From a3cf34ac2df0c2c342299d134a4d9161342d6a5e Mon Sep 17 00:00:00 2001 From: eb8680 Date: Thu, 9 Oct 2025 15:36:40 -0400 Subject: [PATCH 01/39] Staging branch for LLM module --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index e8f23b69..fd887562 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -8,7 +8,7 @@ version = "0.2.1" description = "Metaprogramming infrastructure" readme = "README.rst" license = "Apache-2.0" -requires-python = ">=3.12" +requires-python = ">=3.12,<3.14" authors = [ { name = "Basis" }, ] From b57a18d717e8e0b291e1455dfd4fc3dd83ecf633 Mon Sep 17 00:00:00 2001 From: Jack Feser Date: Thu, 9 Oct 2025 19:36:02 -0400 Subject: [PATCH 02/39] Move LLM interface code from `robotl` (#358) * import llm code from robotl * make optional deps optional * replace template with Template.define * bring in tests * typing bullshit * disable incremental type checking to avoid crash * format * remove anthropic and cache modules * fix * remove dupe * rename * fix not handled * reorganize * wip * wip * wip * wip * restrict python version * specify python version * rename * rename --- .github/workflows/test.yml | 2 +- docs/source/llm.ipynb | 322 ++++++++++++++++++++++++++++ effectful/handlers/llm/__init__.py | 25 +++ effectful/handlers/llm/providers.py | 103 +++++++++ effectful/handlers/llm/structure.py | 41 ++++ effectful/handlers/llm/synthesis.py | 83 +++++++ pyproject.toml | 3 +- scripts/lint.sh | 2 +- tests/test_handlers_llm.py | 149 +++++++++++++ 9 files changed, 727 insertions(+), 3 deletions(-) create mode 100644 docs/source/llm.ipynb create mode 100644 effectful/handlers/llm/__init__.py create mode 100644 effectful/handlers/llm/providers.py create mode 100644 effectful/handlers/llm/structure.py create mode 100644 effectful/handlers/llm/synthesis.py create mode 100644 tests/test_handlers_llm.py diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 6319c5cc..e0c65890 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -13,7 +13,7 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - python-version: ["'<3.13'", "'>=3.13'"] + python-version: ["'<3.13'", "'3.13'"] steps: - uses: actions/checkout@v4 diff --git a/docs/source/llm.ipynb b/docs/source/llm.ipynb new file mode 100644 index 00000000..ddea9724 --- /dev/null +++ b/docs/source/llm.ipynb @@ -0,0 +1,322 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": null, + "id": "5aaf649f", + "metadata": {}, + "outputs": [], + "source": [ + "import functools\n", + "import os\n", + "from collections.abc import Callable\n", + "\n", + "import openai\n", + "from IPython.display import Image as IPYImage\n", + "from IPython.display import display\n", + "from PIL import Image\n", + "\n", + "from effectful.handlers.llm import Template\n", + "from effectful.handlers.llm.providers import OpenAIAPIProvider\n", + "from effectful.handlers.llm.structure import DecodeError, decode\n", + "from effectful.handlers.llm.synthesis import ProgramSynthesis\n", + "from effectful.ops.semantics import handler\n", + "\n", + "provider = OpenAIAPIProvider(openai.OpenAI(api_key=os.getenv(\"OPENAI_API_KEY\")))" + ] + }, + { + "cell_type": "markdown", + "id": "2f9e861b", + "metadata": {}, + "source": [ + "## Interface\n", + "\n", + "The `robotl.ops.llm` module provides a simplified LLM interface that uses algebraic effects to provide modularity. The module interface consists of:\n", + "\n", + "- A decorator `template` which creates a prompt template from a callable. We should think of the prompt template as an LLM-implemented function with behavior specified by a template string. When a templated function is called, an LLM is invoked to produce the specified behavior. The `__call__` method of a template is a handleable operation.\n", + "- An operation `decode` which parses LLM output. `decode(t: type, c: str)` converts an LLM response `c` to the type `t`. It can be handled to provide decoding logic for particular types.\n", + "- Interpretations for LLM providers `OpenAIIntp` and callable decoding `ProgramSynthesisIntp`. These interpretations can be composed to handle a variety of template behaviors." + ] + }, + { + "cell_type": "markdown", + "id": "c1c639d3", + "metadata": {}, + "source": [ + "## Prompt Templates\n", + "\n", + "This template function writes (bad) poetry on a given theme. While difficult to implement in Python, an LLM can provide a reasonable implementation." + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "id": "1e832675", + "metadata": {}, + "outputs": [], + "source": [ + "@Template.define\n", + "def limerick(theme: str) -> str:\n", + " \"\"\"Write a limerick on the theme of {theme}.\"\"\"\n", + " raise NotImplementedError" + ] + }, + { + "cell_type": "markdown", + "id": "f2ca6919", + "metadata": {}, + "source": [ + "If we call the template with a provider interpretation installed, we get reasonable behavior. The LLM is nondeterministic by default, so calling the template twice with the same arguments gives us different results.\n", + "\n", + "Templates are regular callables, so can be converted to operations with `defop` if we want to override the LLM implementation in some cases." + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "id": "634f6533", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "A fish with a wish in the sea, \n", + "Dreamed of climbing a tall apple tree. \n", + "Though it lacked any feet, \n", + "It found life quite sweet, \n", + "While swimming as wild as can be.\n", + "----------------------------------------\n", + "In the sea swam a fish full of flair, \n", + "With scales that would shimmer and glare. \n", + "He'd leap and he'd dive, \n", + "Feeling fully alive, \n", + "Turning flips in the salty sea air.\n" + ] + } + ], + "source": [ + "with handler(provider):\n", + " print(limerick(\"fish\")) # type: ignore\n", + " print(\"-\" * 40)\n", + " print(limerick(\"fish\")) # type: ignore" + ] + }, + { + "cell_type": "markdown", + "id": "2e59acbc", + "metadata": {}, + "source": [ + "If we want deterministic behavior, we can cache the template call." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "706ce53b", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n", + "Gliding through sunbeams, \n", + "Silver scales dance in currents— \n", + "Whispers of the deep.\n", + "----------------------------------------\n", + "Gliding through sunbeams, \n", + "Silver scales dance in currents— \n", + "Whispers of the deep.\n" + ] + } + ], + "source": [ + "@functools.cache\n", + "@Template.define\n", + "def haiku(theme: str) -> str:\n", + " \"\"\"Write a haiku on the theme of {theme}.\"\"\"\n", + " raise NotImplementedError\n", + "\n", + "\n", + "print()\n", + "with handler(provider):\n", + " print(haiku(\"fish\"))\n", + " print(\"-\" * 40)\n", + " print(haiku(\"fish\"))" + ] + }, + { + "cell_type": "markdown", + "id": "13adb300", + "metadata": {}, + "source": [ + "## Converting LLM Results to Python Objects\n", + "\n", + "Type conversion is handled by `decode`. By default, primitive types are converted. `DecodeError` is raised if a response cannot be converted." + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "id": "2c766859", + "metadata": {}, + "outputs": [], + "source": [ + "assert type(decode(str, \"a string\")) is str\n", + "assert type(decode(int, \"123\")) is int\n", + "try:\n", + " decode(int, \"not an int\")\n", + " assert False, \"Should have raised\"\n", + "except DecodeError:\n", + " pass\n", + "\n", + "\n", + "@Template.define\n", + "def primes(first_digit: int) -> int:\n", + " \"\"\"Give exactly one prime number with {first_digit} as the first digit. Respond with only the number.\"\"\"\n", + " raise NotImplementedError\n", + "\n", + "\n", + "with handler(provider):\n", + " assert type(primes(6)) is int # type: ignore" + ] + }, + { + "cell_type": "markdown", + "id": "36d78a71", + "metadata": {}, + "source": [ + "More complex types can be converted by providing handlers for `decode`. `ProgramSynthesisIntp` provides a `decode` handler that parses Python callables." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "c83bbdc0", + "metadata": {}, + "outputs": [], + "source": [ + "@Template.define\n", + "def count_char(char: str) -> Callable[[str], int]:\n", + " \"\"\"Write a function which takes a string and counts the occurrances of '{char}'.\"\"\"\n", + " raise NotImplementedError\n", + "\n", + "\n", + "try:\n", + " with handler(provider), handler(ProgramSynthesis()):\n", + " count_a = count_char(\"a\") # type: ignore\n", + " assert callable(count_a)\n", + " assert count_a(\"banana\") == 3\n", + " assert count_a(\"cherry\") == 0\n", + "except DecodeError as e:\n", + " print(\"DecodeError:\", e.response)\n", + " print(\"This can happen if the LLM generates code that cannot be parsed.\")" + ] + }, + { + "cell_type": "markdown", + "id": "c9634e1a", + "metadata": {}, + "source": [ + "## Multimodal Prompts\n", + "\n", + "Prompt templating is largely the same as standard Python templating. However, special case behavior is provided for image template arguments. These are added to the prompt in a provider-API-specific way." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "72614579", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Testing image: _static/img/cat.jpeg\n" + ] + }, + { + "data": { + "image/jpeg": "/9j/4AAQSkZJRgABAQAASABIAAD/4QIoRXhpZgAATU0AKgAAAAgACAEPAAIAAAAGAAAAbgEQAAIAAAAcAAAAdAESAAMAAAABAAEAAAEaAAUAAAABAAAAkAEbAAUAAAABAAAAmAEoAAMAAAABAAIAAAExAAIAAAAlAAAAoIdpAAQAAAABAAAAxgAAAABDYW5vbgBDYW5vbiBFT1MgRElHSVRBTCBSRUJFTCBYVGkAAAAASAAAAAEAAABIAAAAAUFkb2JlIFBob3Rvc2hvcCBFbGVtZW50cyAzLjAgV2luZG93cwAAABeCmgAFAAAAAQAAAeCCnQAFAAAAAQAAAeiIIgADAAAAAQABAACIJwADAAAAAQGQAACQAAAHAAAABDAyMjGRAQAHAAAABAECAwCSAQAKAAAAAQAAAfCSAgAFAAAAAQAAAfiSBAAKAAAAAQAAAgCSBwADAAAAAQAFAACSCQADAAAAAQAQAACSCgAFAAAAAQAAAgigAAAHAAAABDAxMDCgAQADAAAAAQABAACgAgAEAAAAAQAAAUCgAwAEAAAAAQAAAOqiDgAFAAAAAQAAAhCiDwAFAAAAAQAAAhiiEAADAAAAAQACAACkAQADAAAAAQAAAACkAgADAAAAAQABAACkAwADAAAAAQAAAACkBgADAAAAAQAAAAAAAAAAAAAAAQAAAPoAAAAcAAAABQAA+/MAAB+hAAEPKgAANo0AAAAAAAAAAQAAADMAAAABABcimwAAAVYABpeAAAAAYf/AABEIAOoBQAMBIgACEQEDEQH/xAAfAAABBQEBAQEBAQAAAAAAAAAAAQIDBAUGBwgJCgv/xAC1EAACAQMDAgQDBQUEBAAAAX0BAgMABBEFEiExQQYTUWEHInEUMoGRoQgjQrHBFVLR8CQzYnKCCQoWFxgZGiUmJygpKjQ1Njc4OTpDREVGR0hJSlNUVVZXWFlaY2RlZmdoaWpzdHV2d3h5eoOEhYaHiImKkpOUlZaXmJmaoqOkpaanqKmqsrO0tba3uLm6wsPExcbHyMnK0tPU1dbX2Nna4eLj5OXm5+jp6vHy8/T19vf4+fr/xAAfAQADAQEBAQEBAQEBAAAAAAAAAQIDBAUGBwgJCgv/xAC1EQACAQIEBAMEBwUEBAABAncAAQIDEQQFITEGEkFRB2FxEyIygQgUQpGhscEJIzNS8BVictEKFiQ04SXxFxgZGiYnKCkqNTY3ODk6Q0RFRkdISUpTVFVWV1hZWmNkZWZnaGlqc3R1dnd4eXqCg4SFhoeIiYqSk5SVlpeYmZqio6Slpqeoqaqys7S1tre4ubrCw8TFxsfIycrS09TV1tfY2dri4+Tl5ufo6ery8/T19vf4+fr/2wBDAAICAgICAgMCAgMFAwMDBQYFBQUFBggGBgYGBggKCAgICAgICgoKCgoKCgoMDAwMDAwODg4ODg8PDw8PDw8PDw//2wBDAQICAgQEBAcEBAcQCwkLEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBD/3QAEABT/2gAMAwEAAhEDEQA/ANCBRbyTeH5JfPeOWSZ2k5WNV5C5xk5P3RV7SdJGs6e4m2rdQXJMZcjEiEZKuMdMgHpzXYzW1rq0Es1kyFnlMjSGMlxsGRnPbHfIrGm1CyudusWQjkt4UIlA52lDsIAX1z9ea/mSlmEqC0Wr/r+vM/JE7O4zX9K13UNYvNXF0WhtIAp38ZkkwxK+3XgGo9V8RaXbeH9JMM/nzwQvDNKE5IR/nBA7jgA+laN34guJbLSyAkFpdFE8xjt3yK3zAg9OBj9DWV9kim8Uz2W+KOMRySeTgBVRTwMdMd/9o8V3RzWcVNyW6XzejuHNZtHNa7cLfSKk8WbbYJgCdg3LjHPrnsO1a+h+HpNR8RtfJFgHPkKpICkKd0px6Ekrn61Yn0yaZ1h04NIIHWOXsEMzfMpzyNo6e3Suoj8O3ulohtUEc9zLMVWTcGaNANoGPugKCT3Nb4bFVIvlWt2Qm0zn9U8N2NhfwrakCePYVvCMSgBvn2k7iCeRkc89RXH6L4M1m8uDr2naO2mJZy3UsbTEtJc+bkBlJyOAxJbqAK9N1jUXkKNEY5obnaIywDGIYAxnsAe3pXY/8JF/ZV1HoU8McjRFiHUlQgYAttBI3fLkn9K7cLiHHme629G/zNqUt7nzvd/CW/8Ah/pDT304Z5pJDBbwHzZpnYAB2PTknhM8AZPJq9Z/DFtVvVstQSRlgthKIN5Ry7nceOwP3ST07V9Q+HtZ88rrEXlSxW6jbCD+7lMwZEYlhuG3dnH59K47S72ys7i+n09IUlKMsjyMwOQuQBkfNhjjr06dKeKxVPRRWr/Qpu54a0upWx+0R20ISCPE8TjzAc4iVEyPvDv0IrJup7aTUIhry7bTSx5U0cZCuLg4LR7O2B1Jr25tJ8OXlvq7XU0ks9/BHG6wOGieZeUdARhWLEg461wt34Jv/EGnaXpzKlrIkKXF4srAuZmUhllcgBn4U89wQa4KMYKnyxl1td+fr/wxMYx7nn3i/wAa7tRjt9RLKJoFNmgUbLeBsDmMcFuOp4HWl07XvMtcW8rtLcxkCNhlGSPksPfHBH0r2fxj4V06/wBFtrOxiS01S2VTBL5I+dYuqEjHysOmOM1iHSLOO30s/wBn/Y5rZVS6YDDJ5vUqpAyXx+RzXbUw8Km8v6SO2XJyJrc53RJL/SbaxljIYTN5pYkkKrc5fPoBnJ/Cu40uJ9XuJ5Li+VFtm8xChI6k5AHfIwQcdqt+I1g0fw9b32mZluFXbJASrA7WPl/N/ESDyuOBxXBX2s2fkxSpcC2iubLjGE8tmA3KAecqc1z6pzl9lNfj/X3nG37zPRGVrkRRlgkoVcAjIJHO4cDIzySfpXVW01hfW9pfXU7ic7oQrOBuVcKGwPXPTp0ryKTxXfa3ZJF5DbdNhRIYE/dSSpg+b8w+8RnemevPtWbqE2oabplrqWlxyX/mKs6SIpMoCEEK69AwJHHsa3+r1pR5os0g5PY+1LbxRPZWiWFtdpbwwqEJ4EjAKxO4k7uMYqpf3Oh6i6aX4j8t3mcSqoY7gr4KZOc9Dn8u9fESar4vkGpX08N2QqsZreZCkjPIS2cZ6HnA4r0HRfEf9q6Bpl3fKYbt1yu/hlKZRA7DgjC8AdK4cV7aNNSrfj/kdyxs4r3j36fQdCu7OYWBaCXT5niOQFBV+Mnbwx+bj6ZrB8N/DMRWRuZ9TjkNtKzW6sfnWJjkLnueDn1zXEw+J20CCSW/lIaeYSLESCxP+0DzjPTNezeHdcN9JBqctnEVlk4J+VUDHJODgE/j1NbUsTOzc/8AhjSnWjUa5t2Yz+B9ImaXWJUEl6HVDjLI3y53bTjgdO/IrkB4Z8Q2aoiS/wBoQQz7pwqk7YmJK/L04JGRXsl9qUMF9/xLlMM0oCbG+ZWzznOeNwyK870jVb0R6pHOBFK0wnQ+YE8yI5bGTxhc/wAhVU8znFqLWi/H+tCXTtJI5uI654diTVJrFQqMH2KDlFbgsp7uCcn069aoanNqeq22lABhc3EhdZ2DYXbnAGMZABr3DxDf+HzBbQRzx28kjyebC7BiQuPlC5zk9SawtFv7eFmgnaKNmba8bsvYHlSeCG9scg+1enVrwnPlkrFRqcjM6JLu6hlhvoRcTiA7JwQofIwSc5+6O3eue06ya61SC7a6+xzSJ90ngNGvbIzyuMA9K9Is7rSbz/RFiP2p4pC8CepGNzDHOMjnj9aw7qXSrKOJJomlyZNjYG7duxuY+yjGOldkqkZRUYu3Lq/kY1q/M+ZHL3nhyzNwupzOJbe+mC+WVyq+X91s54x2ArO1jXtLE8+ovFIJYWELAcqUVCA2eCcnGSelbjxMtozLdB4rl/3abcujFd4w/TB6AY4xXG3uiRSWXn6gJLYS+bvLR7WVegYkn6getePjsc3Jrl0tc5JVDktU8ZH+zxHPN5c5cMxAweORj0GD1ruLLxFqSWcOjSQb1mzMrOdoJ2kqMjnIboK406Npxvo7WW3+3zFAnmTEKqyMOPungkDO7J6YravdJu7rULKys5Wa1sGiY7xhmeJy23cOrH06dq85OFOKqSaME2tjt/CmpT6xFI99+6mjIa5jjYDzFY7Rtzk8cbueM5p1yBFIJGhkVA3dvlbJ4Jzzx2+leRLLrdlNcarOpkmLNcbPmG0ysMowA5wVPB6Crlv4wupNctoLhXaSFfMK8uuSu0ED27ehrCpOTcPabdynVurHpN3AzRS2ss5uHnDEEDDouMqD7ZPFQ+EY5ryKNL6QpBDI2EUb8bflJ9ACR07n86xLi8ezuU1W2KkSuC4zz/d4HX71M1i+06CzuZJ55re7ljSWNSP3bHeAykcH7pLAjPNdNGlTdVcvwrX9SlUuzvdMsf7I1m4tJrkXcJw01s/yRzCTKkZxs54I5yCKyvFIfT7KSXTbJreFLi3aVCuZE8olSeOoIYA//qrI0/xJp5EdruWaOAiWRZEGWCDLQ4J6Hue1YmpeJo7+WbY/kqrhgwyVQM2GTd0w3SirVoqk0t+y3W46lTTQtXAu7DR47i1UGB3c3KHAdZUYlGC9DjoRwK5DTfE8r6pqXmq0QjjeaLgj+DfuX2OePpTLzxOmpRyTTWuLq4YyiJWJysaiM7kGOWCt+NXNWji1NtPR0S2mvYjbW52HquQGbH8KL26mueniEpxjbS6Xz8znk0z/0PQZ7uzt9BmnjdbZ5jGVMh3B3xkrz2PTFQaDqujRW+l21kDbRTSFJEC+m1lyMclvX/61c/4hsdR13WLCx+1fZ9IvZCUCICVIUbVdTgAncB16mse4W0vFaTSmmWEeYAWHz70UIAOT93HUd6/mmplsowlLmtqfkbV0ehvocWs3NtpsONsW54UeQReSfm3Hn73OB83rUAsZoZ/tjWoTXXj8oOx+/FnLqF6fdPOe9cZozXGrQQ3b3MzyR7xcRRYVzApGHDDpl8cEZxk+lesyaloGka7Nd3FiX+yq0gLsQ284LvjOCcjkUoQ5LKq9+v8An2JfdklvFpiRW1i0DGa8hW6Zc7fOMQz8zHnoABVCDVFvBHqF9G7mIsY+cbHb92RkHkHP5Vh61qPmeIdL8bpcmd7dTKYWOfMwpBIYcbcEcdu1bmqW0cbWGyRbNbl4HkZxk5lQsiheCQd3XpgZNYYjmjGMqXxJ/wBX/ATl1Rlf2NcyX11bwSxW2941hjxjy3ABznONpzzj2qzrFlp+p/Y7yS5k+2adcMGkCYWaQgqwKkEnn3/CptI8H3ulaHdweIbz7XO0cbCNSN+YzvbBPOMEDt0xXI6DZXGg2riRpZoGlWaIswZ1RwDv2ngMN3OD0z9K9Og5SheKW+77f11+4rzO28M6dJo8FzHOQ11eFRCEGRHsTG5EPyg8nqPU9sVgTw395rMFnp5dLuFzvjlwqOiKd5Pbk8DFR+ItfaW7s72S6VWtJVeQ7wGMQ6AkHvz/AJNdVJ/ZGsahBc2MpitZkZ45Bl34Rd6O6424yeQPetZpVUlB7dP1ByuYvhjVo4b+aGJhBbQIV2qu5MhjjJPGVNdfrUVvd3trZaGhvFyBcvt/1WB83mN14Jxu6E5ArndN01LZrw2du8drduSxL7+NwIOecbRnBPXNTaZrUVnrsshzOLlH2mEFcxy/KN+PQgn61hGjCT9nPbTXs9dvwBWuXIr25G1r+MPDp8ghjJJKyI3IXnuoGSKxpNYXVL+6XWLnybGYC5kuY0Lu24YCRrkAMBkcnAxzS+NtO1IapZXBAltbRIguxvmYn5jvA43ADbu9hWHrMWn6xbSaFpl0baWwUu0bgMrKTllyBnbu56Zxx6168YQjU1V21fXZu39f0y+a7szm/Gmt2uqLZ/ZtPjh0e22pGsbsZ5jxgu5xukAOSAMnkAY5rglk8P8AiKGSPXrSfyoyUtZCpODKpHz7cMoUZ655967GXTtO8QQ2fh+21SFVtpUEhjUbQW+6/OOV/DtXSWHgO/01zaahPFdSRy/vJmXaWtcht+BwcckE13X5o6rbyOlJO1jnGm0uySHT5HQiG3idfJflJIwDt5w2SnBz3Fd/o2pmzisbiFhAk4hjhjlcZUHowZj8zckZOa5K/wBDgtlk1K2hiiu3UyxCUnKqW2kgMfnwox2+bgVyF7o+sXjWOpaOJLkS3EM21gdkUUfI75O7rjoB715qvCdpN2EotOzZ67d6zYWgn1TVD58Fw5fcWJ3gyAeYXX5hznPQdTjtXlPifxXpWn6iTJOYdPVkIkH3MEZ47kk8DHpWz408NXLsyW1z5ljqcnmPByDDtHKADoo5bPf615TD4R1Lxrrdnp5YRC5k8hIXBaIRklEdm67V6nAxnv1roxOHlXSTlddF+ptKneyOq0nx1c6lFqSRQJM0217c7ssSnUN398dK9DtPHd7dWulR6q8saRtJLcBy0aMN2QGOOPmIwOpA968L8HeA/EGl67JMkKXBguT8ijiWOP8AjjdcFSOCwI7kV7zBpdo2qXXmxGWCZtyhnyGZgPkYH1YMOelcGJw0afuzldPy/rsa2jTSZ6cfF6adaXmna9dMuoT7Z7fbFkNEqkblkU7FHOAOoIPSu60O2EWiWl6jLe6bOhMbRjEoaMDfCxGSCG5OeqngV5wnhRdZuC9w6ppduFtrNrYhmctkPww5fflSCemMetdPDHfWthLb2rTzPsHlCQGI5jTA3A/Nux6jOeOQBV1aHLJ06b+HZfibSl7zkem3WmaHrF/FfLbRjYNzCONWLMgzJJkYO7rnJ7VxN7a+GJSuq2TnT7i2utyM43eeh5YbDhi4ZcjIHBxUvhu0e4vrSTRb9o18ly7My/uZJPkfBOFy27HP4V32veEtI1G3uUDrbXkUMM7NjcFfndnuTg4GO5rGhSm5yrSd7/dsbzqKWltTw7WLfxTcXl74n02eeCKSVGtXSB0ljCj5146E44B/lWlN4mk1jTFv7w+dNKpkAZRGx3qVGBjIODkY46+9dXp179utSzzbEDARjzAxaXlS21snIAPArVv/AOxYWuJtPt1uLnyhsyEBVkX5jnGFBY5/Ksljoyju1c4qlJNXvueZraJYpbx2sjxQ2aMyoQCHYDDNn1Gfoazo9XHiBnsBL9qjCqHEh+5Ggz94+rL9AK5SefWNY1oxadILmKPL+XGckwIDvOR/CuMHJ69PfsbM6bonlx2TRRx3X7uWSRQ2dnDIq99rHAPrmvGrzxEHqraaf8P5HnVFrrojK1OPUYtFujAmLi4KrHHwSF3KAxI5xkd+3pzXLajr0+hz3FzqLJNLJLkANypZdyllAAAAGRnrivZYrG6uSbbSGggvZDm4luNu7yl5xgDK/Lk9s5rz3xH4LXUp4NHsUL28l07Xc7sVDRyJ1LdW2rheOhye1dKoxdNQ6L87Gfs3YyPCviKHxPZSXclwbNbtZY4t6jErodu6Jm6Y/Iniqdz4T1XS/F+r61JIHga2X7Pg5abzBlHVR/ACcHPcYrtYdO0LwrYafocdhbQyGSR7IshcLLgsXGSxIPc+vYVL4fvJ7uyinWH7O9kjRRXUrbi8TfM4ZMtgZJwCcgHsa7aeGiotRlp+Q1Dojy7VNUutC09Nbvm+0XMr5UBRJGpjzznodp+o7etcpr2o+IL0XXiSSOSYW8AFyq8yRsyqeUByAe57V9D2uk6fPJY6lAgk0lLeSS3XB2LIuUckkjAPRBzg5JrJtLXSbq+8uyQvLC8jFUYeVM5yil8ZDYzgD2zWmDUIWu2xWS3Plc+INaN3bxWkUhkuQWdSrAlWUERg9MkcnHetTV/GOoadqM/h/wAQrNbqLdGhtVQojMhBBdPl69WZj05r0rTtC17xB421S9tvKCWxe2i6eYsgGCoZs7dnByMVK/w7vW8NX2ia7Zya1rIUQ3OpywCeZbVSCYnIlRgxX7rA8gAEnpXoYfCRqSlFqzaXqzSMUzN8P2dxq1vHrR1GON5XiaG4jyCxXIaP1whHHryfQ12LazrOp+KbqzucO1nKsZjVfnRzFuz7buefUV0PhfSW0zw7pugpBblNOmAj8oMh3/KC8gf5wfLGeeo47Ve8P2tlqXiCXyZJFE8nm7cHIcHa0T5JbH8Rz9R3FefUwcW7bK/5Ezs37p//0fQode/s67CTCOSK8KyydPldOD5fGQVBFc3oksVg17NqMPnCc+ZDKrAlWWTeGx1BPv1ryqTxRcxwwRz6Ufs0K4iZlPIYqAzDGe/BPYVg/Ef4gweGtAkeznjZ4Y3YDoxkYjJGcnCjt6V+AU4VK0o0rayf+Z+W08O52jHqzpfH3xe8NfCSLz9Lm+2apqZmmWN1URxJIACMc8K3QdevavmbVP2ntZ1KYuJ1QsCrBR94P1yfTFfG3jjxlqesapLcXcxmXJZSxJ+9ziuCjurm5kCqTufGMZ5Pav1rLeGsPSpxVWKk13R93g8mo04pTV33P2N+Hfj9/GGjaTqU0ayeSkkHloOOXyuc8HPA+te43OrQWl1YS3y/aJWQzIjLs8t1OFOehUANjFfG3wi8H694d8BLLrs0VjFYol05lfa7CcYVdoDP8pwfu9a+jdY1iK+8LaJfwXCzT20HkxFiwSUIeAGKhiMnBGM5r84zjKYKpVnT26W9T4PHUYqpPk2udQPEUuteIDpkN2JZ4ApYBAMg5cMXJxhOMD1NZd34xuF8QNY6vL5kjNI0ip9xl2nJ+g28VmeHfD8GlaTJdtfRW2oXtxGJi7hvs8ceAVLn5evy4zycZrF0y3sbzWtQ07Urb7FdwySO55LbEI5BxwJc8r27V49HAqELR20u/PzRzKlZXNW9ttMv9PsLq5uI7SPWPIgMTEmeRVfcSoQEjcvAY4HH1r0jRoT4b0w6VBcyFmmWQI+0EQugbbwST0X+tcLf+HLC1lbxRNqLSyj9x5bKBHHKQCVTPLLEpOMcZyO1bmox22j6xLdXMm+4xGznPG0DeB7HacEdq6ZUlSVkt9L/AIA42Rl2t7qk0FzG906EtKsyFjhmAD7R3UHocenvWrod3PqonntMJZ2vlRHaf3qlRuKnocNnj0wa5ptWK30WsRsqx3khZ4EUZSN+EwOACMnLf1rqfDWiDTfD/iO5tpgsjzid2ZgMpK+1AvOTjv7V5cIu8/K+3kS4lhNclg1ifSNemIWWXdtJ/eiN1G0qR056A5GKuaTLGNfS8XM8su9WC5ybaMfN15579q5vUNAPiG3kutJ05pb1MJaujHfI6Zwpz13ElePoe1UNMaWx0yxnnYxapFb3SvABsOXCsSM9MYIxjiuyrCryU6vZ/wBL9QcWlc9Rj0vwjBdTTabEn2ed2bzSoxsUZUN39/Wtpb+VNO1LTZFz5lqN8pQMTbMd7hT1JAxj6gDpXnQv9NS7s7SOERWc0cauAAzKZABjk8MpO4+3arK+IbHw74lMBeZ4bfIWVceVuYYBJ78jpjpzXrRbcoX6uzsdEW7plG8ZrbXdV0I4migYC3uJmKiGdV3v1HzKwIOM4GM1B4Xe5n8RyaMkUkZMKi0mjbKPI7bPLbqwyAQpHQ4zxUPiqxvvE9xJqlxbu/nzGV5jykTRwlbgRr1KAnPtketbOjaxo+n3cGtaVOu3TJM/KuS9ynKlvbsQDwR7mm8NH2vPL4b7d0a769Dvm06PT9LeS+VJXjk8tfN+8sbuM7h3IbGfaqGtxQ6dqcb2CQxRWY2S9MuXYMPLPUfMeQBgmub/AOEkn1u0ik8yKa6jLSMvBaU7sqCB09TnnApbq6tYLuxt7rzZhZL+8ljXKSOWyAHxgDAySc49M4rCUU/dj8jXnaV0UtItfGia/faroFrCdNUZu87zxK23amOEI6luvGOlep6folhDb/2tdZl3K0X71gpBySpZkPODz05zzXNXuuXq6dLHdK9vKJNsUcSkRIQTuKoPvNzndnJPJrLMraZYlufKny8JdzuJbgSFTyBu5II5J9K87EU7Num727/1uKTVlboavhq71PRrK/0Sdg6RzecsIGAEVccYJOcMDkYB9K7S08STW9p5s0g8zcZP3nQKGxu+bp6HFecaQk80sMlisktu8TB5ANu0bMYznpn5uenrV3xRJqGoeHf7LkJW+liaNY7dhICvDBhg8AgE59a4ZOo6XPH4l19SPaato3Ndn1nRLaHU7UR2yTuQI2cNlSx3ZGeA2SRkDrx2NauoeKtX/sKI2km29uEaNUYbnYRg8HvkAbuvSsWxWa5a8OpSR3OZI5o42bdtVFwo7HAAwfSvNYbfxDKmr2MCzJMX8uBmJQJK0IBfJ5C46+lcGAxkZyfNG3z/AKsNVbSbLt/rxi0uz0/TbiNdRl3zM/3slGw6gHvg/XiumtNT1W20udrlg1w0CTfvCNyAzfKoIHIKAE+neuG8P/DibUbax1Xw/qEU76S72xFweEdhgumPvB2Bxn+I/SvYdH8Dz2sF7pf257vU4hFFc7jj92Uw0ijnkO3IJxgEV0001fkaaX9aehHLK9jkdespbCDUPF/gt/Pudd0+eWFIvmVLmCA7lRVGAA4zt6HNfDnwm/aD1PQ4dP0j4qWyzLNNvtr91wqMzfvFlUd+4Ydx61+nEHgvQtM0xPDty01vgSRweW7bmZypOxuB90c46Zr8dPjJo114f8WeIvBQha4t7TUpgrgc7Jj5i564OH4r7LhyNPGVatGrHdJ+ato916ep9LkWFo4h1KNbe10fqjpfiXS9S1eOfw6JLvSpIhJc3Eaght5Kbie5IGQDwMDpzW7rA02Gb7BHqJuLeCOUYVdrB1wWRgOjKOcd81+KXgr45/Ez4MXsthpl7JLYSDDwSjehXuRnO044NfpD8AP2hfCXxivbPQptGnsL0M081xDtkRrhicuUI3YYHDHPYY6Usy4HxFO/sLSi9ujXy2McTw5WXuUlf8z1bXYNRv57D7JDLdLcXSC22fu3Uk/Mu09OB+Narx3Vi0WnJeDyjKIoRs2riTPzZPcFMEdAe9fRdx8G/G1wv9p6dbRag5maZGBAfawOCnOQ2D1/CuJ8TeBtS0vVrC11ZDGsLu0jSKVMcUgOQpPU7icEdM9K+Sq5RiaFOUqtNq2z3W+x4lfAVaUXzxa9Ued2ya5ZaZb6dfRAxNty7kCJo1fkDsASSSBgkV36t4cmlttS0C0SC3VTsSFDiERhlAVR1O/JUn1z1Oa5e31aaw0e8XxFdRLpNlMyiV5Fi2xqS3mEt91cjB/XrXzl4n/ai+CuhS3Wh6drt1c2cg8me60u1eRFU53bZflX5uhIzxnFenk/tG3SpQvqr6X6/wDDkYfC1amlOLZ9ZalrVtfXFpqhmCrKm4xxkIFk2AsxA6s5JCk5x25rgdfTUII7RtLuFtJ7mMkOymSEbmyockj5hnkvuycc8YrE8KvoOseH49c0K9+16XJZxXdtNnIfyTsCpjno549ifYeiWt5capbNa21sWjWOMAspfYSpfK5BB7EDPQ5961ljZOrLmev63189DnvJXvucXq/iC70SS1nkiS4eYiKRdyhhgDLSdcDBAzkcCqB8aKb5p9GVY13lZECkOgQj5/MxhwWHOTnselGt/DrVNRtba60CVbq7GPtAkkdtz8uJCVBBznkYzlQPemWPhGTTbh2trc/aZh5kgDEcbCzkK5/i2gheufxFYYiTgm5vWWi7v9ROL5ea2h//0q3hHw3JZ+Cp4rkPfya2heG4YSASBGRiwDHcjkMy7TwcE+lfE/7U99YrLHY6PffaE08mNkZQrLlcOGIxu+b9K/VXw1f6bqmjkzzsbeSQ3Dg5WJ0UEbQB930IHrnivxP/AGnbNLf4m6wdNJhtpZmZYj/AD2Hqp7e1fl3CyhVxXN22PjckjGdfm7HzFcNI7HcdwNewfALwbD4y+K3hjRLxysFzfQhzgHA3ZPB64xXlVtAHkWGTgSHCntn0/Ovq39nYR+GvH+g61dbTHbXSM3ujfKc/getfp1WMnTly72f5H1WKbVOXLvZn6d3Xw9lv7HWtJhULctM0QyeSu9dy4yP7vAwBg8Vv6Z4Bb/hG00qeQ30unXTeRDHGQyj7yqgwMlWA6dckk166lnpevx/a7S4mjnRFCsQrR7ckAhicsw4zkHOOCKwIfEgvNWOjvdNNGgCyyKShOzKvGSeMA8cdehzxX4QsRampW2ae/mflspW1uea6D4UOmXUUniPKLctOrW9yUG4k7vmCk7XYtgZ47YqrLaW+p+IX8QadF5E0FrLDdLLlCwi/1PB75+UkdSB2rsPEESTB7CBCHUmSRQwxhtoXr6ccf1rsZfC91Oj3AWNIb+NJMs4d3lRhgHCkqoZfu+ue5pLFc0m1H3SoyV/I+dbk6vP4fm1PV5DavFL+5WXgoO6oTxjac8cAnmub1u71K40iTxNBYzsHRiYmiZ1ZYv8AWTkAZ2ouCSOPWva/GfhO/vY5NR/0XVdT09GEdhJLiCPrhtnBkIPUYHpg4rj/AA9F4hvNWh8VPNcx3mlvAWgcZys0YHk7DtCxsPl8vG1lOCKxVByftKjdn26evmacqer2Pnf+3NRtPEzyIJDFLDG5DDcVRscHsNrEfh+Nd14Tg8W3unWd/Av+j3TSRSruByC3yEqPu/j2+ter+NvCuh3mrwanpVqsUsyy2kkSEAbZEIZdpOAAw4OMgD8a3rCC0stSmu7PbDDpohlkPLBgCsUagDGSCozxnA5PNa1cLTvanrf+vyMny9Cra2+q+D9WutAa8lulDKJ442BEM0aceVwCcHg9PTtWMdPsdQsn1kXYZpSkgkK7nidTg85xtbccjnpV3WdQtLO3l1CQujw3MFwzoxJmLH/WEEEhSdxwD1IFc7J4e1vW2B+Htnc3y3M2GjgiYiMTgbwwx05ySfeuqpUk06UVft1tfb7jVU5S92Kuc54isbm0vXuLMO7SvvIzwxdyCQAOAoGP90V0Os2s1/oS2RUnWZJBcRqucPFFIFBI6bn9SOAOa+s/Cv7PGvaha203jmWGwtYHeXaH2zKTjaQBkDuTnOc16hbaN8KfDOpyPFp3+mP+78+YGRQB/dBGACewr1cuyjENptWXW59TlnB+LrJSl7q8/wDI+DrzUdXGlxaYka72MymRuGUyhRIpK8lWCdPXPrWDbwalLdNpGk6aWnVEP74bULkl8tuIAGTn8K/TO11zRvs921gtqbi3xIpMCoTt6rnHJ9Oa7iy8QaFdLZ3Gr2NrLFeIAd8acEjkcg9R2z2r1YZF/NP/AIbse5LgKXL/ABbv0/4J+S0drcXniWyuYLkwwhFd5o1KhpSCxCgD72wFVzgDj3rbn1ww8afqH2jypBMNi4YrJx+8P3cZAzg9Tj1r9gh4E+H2vQqreHdOlCfdHkRqQvTIK49f1rkG/Zq+A1/K5l8LW8UoI+SKSSNcryMKGAGPpVPhqTuqclr6nkV+Eq0dFJM/LM+Lru92R3SeXcBlYKgyqtJ8xwT7AA/U0+/1O/vA08TiKcJtQDkOGIUbQcghcZ9jX6eT/s6/BNp/tL6NJbShBEds8m1lByMjOKw7z9mL4RX5KW63lgIxtUxT5ABJJ27lOOa8OpwrXi21NNv1MHwhi7br8f8AI+HrNJ9Q0/StHWIXE0oMnl84CE7WkcDjIxwDwK8/8V3F9ol3Nq+jPJcvYMsdwzE52OwkHTA43YHt75r9DrT9mDwtpWpw6vomtXMM1oAsSyIJMAcjJGMg4+YY5rzXxP8AsveLAsb6HeWt89xuW7eQmIMmCVGwgg/N156UsbkWJ9j8F2uz/wCGOWtw5jILSF/Q+VvCl2NatJJ9M1OOO6u0eOKUhShKBmOFY5Y8YIxx3r0q21Kxl0t7S6t0t7hEG5nUmNVUL5nHXYe3t0rZ0H9lz4jeHbaC6j01ZbuGZ3S3hmj2IW4LFyeQRngDvk9K07v4O/Fa01EbtBunhUKHKhZFA6bhgkHA4OP6V8VLIcdTbcKL+SPNeW4mO9N/ccLcaEugpFPZx29rJPNInkQsQkkAAXJXrlcbgxIxziurggtraymub8Fbm7dbdJ1kEbDyxncWbPDHkA5Peu4uPhV410+4i1O/0V57vZNEjhc7UkHykjptUZ96868TfDf4gP4e+wrY3T+a6yrIiOTk9VAz1AyeSM9K6YZZXpJXpSu7306v/ggsLVv8DDV77X7/AF+H7KwuY9KDxxgKZAFmxljjIVlK5zxgivyj/aS1651P4o+ILrS9nkrKlvM8Rz5ksKBS5A+n14r9Zb77T8NdN1zxHrNtcR/Zo1IZkaPznQAc5HST65HNeVWvh/4T+JvCd3q+s+H4bK6vJDdMVwzBn/5aK2M5yeR6+xr7fgbATpSnXrpqW2vy/wAj6nhXK5uc6sla2h+A3iTVbl7zy5o1DoWDbSct6E81+rP/AATLuvDeitq994rjiga4TMc8oGViDAEDPdmwBivkv42ReAvC+uzXmkaUslxvZdr8ocHg/WvNdF+K2vaQx8lxAkZV2WMY6D5VH0/nzX6PW5qsLU0fa4VwoVeabP7ENB8UeE4NEbVIrxFtreMO8shCgJjqc9B9a+EfjP8A8FJP2bPBdrfLYX0PjDULN2hSytIvN3yDPHmuAgUEcsCfxr8dfBH7T3irxJ4c1H4danqkkMOoRlXcuT95CiAD/ZB4Hrz2r5Bl+D3i+x1qXTp7WRw8mFlx8roT95T3z6dayw8JyvTqO1vxLxXs/jgua/fofQPxu/aW+Jf7SmrSan4ge00PQoJGNvpNjGIIFQnP71h80zDuW4z0FVvBHhHVdStXvJ5CbOFCWZAMAAZ5rzKLwbe6BfRw3tnOYQdu4KFYAcc57j3r0Gz8U2lroDadczOk6ytEnlfeaHrl8dTngVpisFDktFWZz4FqnK9j9DP2PdTaHwFf6KsyPBpOo3Vrbb93EFwomcKPVS5AA9TX1JdeMNP0/StN8P2bS2kKptaSID5iIwnzHPJx1GRXx/8Asv6Ovh74X2/iSHMd3q+oGWUyAs0Ue5lGPcooz719JXevaUNOEqQ/bRp8eVjAKygMM4Y9SQO55AGehr8IzfGRWMrUFo7v81f77H5Lnk4vF1eXTU6XSdYh03QWsI33XeokkRCNmcRAqScD5skFsA+/HFcZdeJU+1zWsUOBuQSIUJB2HahBz04yB64rC8MeN4NatH1C1BtruSZ1jX5gqRgbVLH0z0OerECsrWoL211dbzTbRtQTaGIaTyrcMhBYyOMAE5JUFgTjAHFclSaquKirNaedjyuZvRH/09yC9E32i0Aa0tdPwJFRvmbzM8J2YZGDn1z2r8z/ANqDw5eWnis6t5Spb3pLrtb94oB6OnOD7jINfp5d3kElno1lYWj7o53UhV2PJIqhRubjP7xgoyeOSa8W/aR+FWr+KPBx/sKa2kv9CeRZbNguSAoZ3RznndnHIBAz1r8L4RzNUcYnUdovTX5H59k2K5K12z8nLK2hLDAIif14HBzxXsfhO5e0kikt+iYZT349vf8AmK8n+zXVtdvbXf7ho2IIbj8MV2Gjasumyxs/72InqvX9eua/oCi1pY+7lqfsd8G/HEGuaDYamJSJbeZTKT86xSrtCFQeobrjPrXpGuWFr4lmn1GBtt+oEcsUGBGzuAULA4CliSWb0Hc1+Z/wx8fw6FeRNb5MCsryRtyDg5zjrx/+vIr74uvFmi+IvDhu9In864vo3BBIiEe3LYcKuTxwNpwMdecV+R8U8Kzw051aEealJbdn/l2+4+FzfK5UpSnBe6/wO2h0XRYtQOrRyS3U2o5ijUZeNpYyAzkBgcr/AADHXHU5rrfDFjri3EOnxTefLZXDRIkeXeSJhv8ALYDndnn8cHNc/wCAYb3QvBUMepTQSfZp0killwfn5Y8EbsfMBg4zyeleYW2v2+l6hfazFqUouoHja1NuxBmnUqpIbAwpdhjuecYr4rD1I80YR1S/p/8AAPEjG2x6B4p8MRXesP4j0yykRbvy43LuBGGViQQoOY2Qgnn8OuK4fWvGEFneXFvbQs76fMwt53ZxIMsASWBy5PJXdkDHFdNFZz32rJ9r1KGzmu5cxxhd/lzRgtI2MiMfOTjJyMDIrG/4QhdWu9cvJFnnF02QWYAyfZgHJ45VSRgK3JHJ5JxtGNRttaNu3n93TuXFcz33OGiuoZZpdZgLSR6ltCEud4niJVsgcEEEZ967TSIT4x8QaoLXT/NWCSM4SItGwiQbTtGFJBPAHcknvW1f6HodhYWt1lh5c3mbZmXc6SJyiIMErEygBsAE5xkVnaNrKWAij0mWSzErEOfNKhi/PQYz9ADivpclyOcuZzXuXTPpsh4ZqYqV5u0O57ZoHwb8NCSHXPF9/Je3CiUJaRBYUCyDkOPYAAY6dRX0PpWtWOi6ImneE7FLe0thhUiVido64I+9XhXh/Ttfht1uzJ9tG7GyST5M+rFuR9AMmvc7K/1y6sVlhVLdYAAWaLC++C5zjPfA9hX19HA04fAj9ZwWV4fCx/dQs+/U5fU77W9QuJLsXZgiYco6AflkEGuJutGF5OV2vNGPmG0bdp64Jzgn0Nem3PiKBovLi3ahcqD8kSmIBh3JJx+Veaah4s1GxePECRwsdsoOCFBwMF9zfj8tdWi1R3TkKsNna3wnhI82MAT284yzAjsWVWPHpkehr1DQotKigXTbdFu4GffHCgJ2H0w2CCORxnNYSppV5Cs1nNhCCU3qJIwQOiPjj6V0WjfbSI7tR5DRNyYzwy9tw5/P9aFvoTJu2p6TZ6ykd2IYX+yzRplYyACQvbLc5HpXdWOv3d9AGitsvjgrgn/JryzWYGvJ4btNyblGAGyGOO2OT+ZrS0OW3t4Wtrm7Z5s/KiIYyuemNwBNWvdkc9Smpq9tTq7jXIp4ZbbVEWFwSMN8rfj2/SsWbzblQ1ndCFoRlRkEEHpn2/WrFxZxSWzi+kkMbjJ81QGUduRxXmNzePpV5PaW85nTZtB2/Ng9weB096xqrW5vRSWiO30q71zVbdt6C2vYHAGDuV16E+v9K7Gw1K9tR5V8oM8J5weGQ9COteP2us3yoiGXGOQQpBAzxg5HPrXTHVtRneK3m+RCRiYHHPp3/nWSlbY1qUr6PY9kTU3wH2743PUcEVrW105Z1YllAyG4Oa4G01SGARoshmKjnI5JFdDaXQnPmgBGzwP4a6KdZt2R4mIwiS2OqIaVcFin061zN+1u8ThZnDLn72c5/MVzXibxI2lWrzx3XkurYLHGGPoBmuAuvFGo/wBnSahcAx5YbSUIyp/n+lXOqnpYvC4CXxXPmD9sfxGLXTtO8O3EhmjnzO0bHKllOFxn8a/Jb4lfHrWNAhn0+2t1REG3cQQpGOhA/SvrL9vLVr/UZtOjeeWO1MIdJUJUltxBAIPr2rxj4UfDD4c/FX4Ja+uo6VLrHiGzhLRfZ5D9rOzdgjJwSTwSRjoOO/vZZlkayitrnFmmYuk5eR+ZHjvxhF4naKdciViXfJyNx7D2FeaNcO2FLHPbPtXoHxL+GevfDXx7deCtYH72ARyoSMZimG5c+4zg+4NedarCLK9Nru3FOpFXOg6bcGtjzPa8/vdz0TwPpr3Wo2lyzmOISli2fQ45r9lfgN4p0FILOHWbeK9I2/vJEUyKc7cAkE4XqP8A61fjj4E8PeNNekz4b0u6vLZGBPlws4PPAyoPevqDRfFXiLwv9n0TV7OeweBdxVw0TN3BIcLx/OuXE4SppVSPQweJgk4SZ+rvxP8Agp8L/idoV7b+HIE0vUrgFmuXZjGec5OP7x7jNfiT8TvD958HtYvdJ1tPPnndvs8sbjY2w4EgGSRx6j8M1+hvhv49aOuk+S19LPcOm2UbgOnpz1/SvhH44+Gb7xt4qfX9FMs1u4wVm+Yox5HcjB9RWUcQ5StPc1r0LRvA/UPw1Yt4C+EHg61vbpWl8Q6bY39sBhzL5vzgYz8oOeT68DvWV4h8VSJb20scYiuQyrIqx4a4ETbG3+mcYwRjbyea9Q8b+C9Qsv2T/gXqEdlJqOpWejQx427lDInmASN1CLngdMDAr5ul8R6toccfh/xLBDfz6ixmLtHFI6XBUAIkZG9Ii2FcsFzn5cYr8izvh7/bpz3T1+/U/JM2wkvrdRf13K+h66ra9Do1u32WCS/Rpo1J3qZCcHcAAVXBAB4B5619d2+m2Ftp9vFcX8jtAsU7RRxCURhwNrkYOWLc8rwDn6fL/g/wFeDWoVntjperi43ahaTK72c0Dv8AM2/5tjDrgNhT82R0r7QN5op1QX1xLHGmpwW6RyJtaBY1jXjGT5gLJww6YwQa8nGYWnBrmdn/AFucStCSZ//U6Kz1P/ibW19qDeTHBGjRxZDBQ2ERFT+HPJGec81H4onttZsH0OSykbzbjLyuDJ5sJIGDgYIC52gY6Y61z77pPESJbs0dtKVW3bITbsG47uvO3AXrycivTUltrbUpdT1BpYrS8tvO2K+1UkACkKoAHzklh3G4n2r+XYynffb/AD/4Y/LXUSakfjz8YvA9x4E8YXdlcxhI5G3x7G+Xa3OADnGO4NeXWMZUGSMbM+nI/Kv1O+OvwmuvHdnI1sN8sMSSrKyssY3IpMe7kEbmCjODx0r8yfEemat4cvWsNRtzAEYqCpyCRxkY/Sv6A4U4gjiaChUaVRLVfqfdZdjVVil1NzTdRW3kTdMu1SCAOuf0r3Dwj8TrvQrqKSyv2UowLJIpwfpjg/XOa+X7W4nGCQHwPvHrz2zmt631Se3+VQuMjnNfaQnodso3P1G8O/GHwzrWg2uia5B9ltw6O7QyPtchg3K5Vj9N2D3ruta8c+C2jEfhi/tWt1LeUk4cPGf7z8KD1yFT86/J+18Q3CHCTnJGMK5x+Wa1oPE2pL8qSMieqjH155NfLYngzLqjk4wcebfldv8AM8qeSYd6pWP1E0j4s+FdLQS6rILmYx7EijQjywvIAIwAGOC2DyO9eSeO/wBoq/uzdW2h7LJLiQSu5kzI0hGPkXoi+3JHqa+ErnxVeSfIbmRQePlyxwfVj/So7LXD5+XChF4yQC5PY1WA4UwOF1hG9nfV3sbYTJsPTlzJX9T6u0PxlrOoXj3er6hJMXAB3OSxUdlA5x6Z/CvXtJ1fUYru3mbbDPcH/R7cNmUp2bOcIO5zzXzd8Pml1e5WScKYrZScvjBbqM49Pr9a+lvA2m6xr/jjS2spTcTTN5asIt0eFGeE6tj1bj1zXRipJuyPt8ArQR9m+BfD+uwC11C8vWLv0jhKYPGWx5nU4/iJA+te0ar42a3t49FgiaJ2AzNLudmP1UBfqSce1cboejN4XkmeSZbi+nHz3F1MXdB3JxuwM5OB19BWnd393NMbW++y3FmwHzIj7iccHGTj6k/hXntLZHo8zOG8T3upXyCCyti8QOXn3lXI68ck/wDfI/GvKbu4lt4ZjpcrQqjfPFdbmOSR8ylxz7g5r2PUvCUk12uoaWrFkGEVW+X2wOv51wes22qXdy9tf6cJrnnLA42qufmPPb2FYzVma3utDtvDWujykh1K82zsPlMeEQA9iFyD+PFev6V4kl0G1WaV2njkXBlwQrKfTbn8OTXyBP8AENPCzfZtSRZ2tjhGETfKvryOoPcHn612XhbxvD4jjk06KPzguZI9ykAIe4B5z+fvTjJPczlofUknjXT44FgvrZmilBaGQLu2Z7ElQB+NcwfEOtWd2RZpELaXDA7d/wCBJHb2Nc5oeuai+nyWTS21zbtlfIDFJF+mc/j0pJvI0uSKRI18mVgN+fnT23Ac/lVN9bkJ9D1O28Wfa7RreeaNZIud6LtOOmAMkZ+prJ1h7jUbMSaW8UNwp+RJj8sh9sgYz6BsVSSy0Z0a7tD9sWZQNrEHLewz269Kn060s7G7NzeKt1gdH+YAdwN2OlZSnfcqnoNgsvE8lkH1GxWHad3mRvtXI7kc8HHYGvSPDeoubbybl1jDjnbtII/DB/T8Kyry4lhs0urWLbDj5VOGU46+lc7b6daanOrzXMtlJH8zLH8sbA84G4jk+oqbJPQ25rrU9wsn0a82RqS8rcg4HOK7COFbeFirYVB0zxn6f0rxjRpDaXK29j++cjnceiDrkNjpXr1oY5bMQxKDuB7dPU5ropa7nBi422eh51e2n9pajNFc/volww52gHt2z+RrkPERne4WCVSVtsdGcE8ceufzr1CGLbPLIeevyj275HevMvFt5fQOyxIEQN1DKmOOvX+tZSWh0wqan5//ALY9iNc+HFnrK2+0WEzJjacgHBwSTzX5ReFvE/iPwjqxl8N6lPZThiqSQuUIBPTIr+grW/BI+IPhHW/DOtrHMLuNmiLuHcSDkYx0NfhX8S/B9h4G8W3Hhz/lpYyN5iSgBkB9SOD7EV9BlFa65GeDnNJ39ojwj4m6hrHjnxZe+KbmWW/ntFitZZ3YyF3iB3MTzxk4FeN2mh2mo+JoYNWuBBDM2ZHOcKPfj8K+gryfSbe2k0nRL5FjnleW5dvkw7n7vrgCvGptLuo/ElxdSSRyQxEr8p3BgR616NaFzyINpts/Sr4P/Gj4D6J4U0PSPGYv9JufDcqyxfYEJju2jVlj80Ag8buh4J5r5l/aQ+PUfxi8YTXOmwGz0q3CRwAqFkKx8KWwccDsPWvmq81zaWhJwwPUc8dq597m+uX3RRSMW4yFODW1bGycFTb0RFOglJzS1Z2Gl61exXaRrIXUEdOCPxr7C+Gd5BeajZw6gBJZzEA5BJBH16CvlDwV4f8AtbqLpCrN0yOpr9Rf2O/gSfHvjfTEujIbezcSSAIvlMiHJG4nP6V85jJpuy3PawN1q9j9TPij4dQfs9+CtEt/PimhNoIzD8sgjCZcEj5gpXAO3B9Oa+Qp/AdvqWmyXp0+a2sGjRjcWDjz5LwlsNPDKA77Uxh94OMg5JzX29+0d4tuNB/s/QNAQyzW0IkaFAp2Rk43FSR91VOMAnt618j6L4h1HVL66N9DbNYgKIjAxjvcZO5ZVI2fewQNqfLn5q/PeLJyhiYy6RSW2/5nyGZuM8RK0dWc54m8MWv/AAljXEt7NZ2ErxvEsDlPtReMeb9oi+Uq+CSASVHHUjNHjDWfD4is4oHC/wBnlFt4NgBeJSwIwpADen8JP6zfEHUdS0jz9AuZVvLi13iESKdzKyhvlkXJzgjjPUVymkTaHrVnb67qEUcEkmLeUldzjaQDjPUZbJ/M18LjJ+2m51F9/Tp/wVufIYqUYv2drd/U/9XldK1O0u9Eea5aQs5AXd8vlTyx7W2g8jaOcZODgZqK91G51TT9Huz5tulu0haKRjuzG2xdxJxgqA31rC0+wTVNgTyo50mwMkhfNfO4xgnBYAAc5yfSudbx3plrbas64le3QiNrkhsKGVSNo+6c8Z4Oc4FfzRVpScG4rd/0j8sqQeuh6lpvjjUby9VIMCJisWxlBBWRcA4GfkY4689xXzv44+FPhfxat/qHiSWS0vnkmFsFkA3yR5Z/lxyowQDxXqvhjx5c6xo+pauunWUN9ZziXCKwkaFUCtMY3JVkQEZIwV4OCCcTW+oxa7420DSzYQONRjnLvGu4u+wkKN3Kjd1I9xXsZZSrYetem3DT/h/+CdGHnOE7xdrH5teJfC0nhCb5rdvsr7tkrL94qcHoeo71y0VwJ1DqhweAB05+tfpp4n0bRNTtLXwzLoMcl3ogknkE4kmW4aU75QgXgHGAuFO4qfauA1P9nzw54ntwLJX0S7eQxQL5bIlwwK5ClgoBXeufr68V+sYDilOC9snfv0PqaGbx5V7Tc+GbZoYwTKhGD0+UEfkDVh7twwVAVQf32wMfnX0VP+yn49OkXGraY8dxDbhpNiSB5igbYSUXLjntjpya425/Za+MdnFaXUulOiahEk1up+aWRJPu4XnGfQ9B1r3o53hmrqaO+OOov7SPIZb6Ifukky/seB7jj+dQC9MAXYcuDuGfX1P/ANevZvC37K/xV8Qam1g1ulgqhjJLPnA2jOF7E8fT3rstS/Y/8T6VDBcaxrEMaTyeXtjVmc9yTkAdPX6DvXHW4mwUdJVVcKmaYeLs5anS/s8T2l9ZXv8Aal4IoiVGAC8kpzkhEHOPpiv06+H+l2ei2Vtf38kem3d0oWGNh/pMcDcjKrnaX/ug5x96vm/9nP8AZj03wTqMPisaib5rxSkFuevmIcMxJAx17Dr9K+69JtPC1lq9zcRsr3Ucjpv/ANY7SJw20nIUD1GT/KudYunW9+m9D6fKcdTrUVKDN5fDWh3Fz5Mk6JBHh5zM23GemT3JPbHNb8kenWk2LBUuFdQFKDDbe252OBn0ArJGj+HrmE3UULTPuLuXY7Ceh+Zsc1kK8S3wiCvbQEkAK272HsB3NM9ZM6610uWNA32nygSxRBtKLn1LHPH4Vx+uaBo11DLbz3IguJDgMG2kt7cfkBxXXWcVoqKskiSuTgOCcjHuM4rY1PwxaahY5Voi7HarqQC2O/OMDtyealxK52fDfjXS2G/RzO006sfLMyBnDeik9T615hZ6pd6Tdx2LEyTwkEoAQuVz15GAfTnPavpn4g+Cr64iSWEmWeJjvQDYWA+8Mp0yOgx9a+LfF93faHdtr9jDNCtvKVe1zghFGM8559OPxrlaaZo2mj6t8GeKZ767i3sYoJl2sgxHh++ASWIz7/j2r2yKdtMiktjEGUAuFyZCxPPQHI/Svgfw78SINSsFWYSW88JV9zsBI3cLwMDHsDX0N4c8QyTSJqoz9kmAR/LBw2fvMOMnHr/Kr5u5DifSnhizsNaUXNy6I+RhQ2HLenB4+telxWTWsm+7RZFGAgJbccdDkZH1ryHw3aojGOxibaoDeaAPl7/MetdpcaxeWkCw3EjO7jC/N69tvU/55qugky9q2sGJxNetsjbAUOdoHb7uAPy/OoEnmvX3pKI7aHbkg4DL+ZP5GuO1TVkuZy2ouZBCMKgwGY9gcHOKbpRRpLaZW3SsdzRltoHPGev61mWpHvPhtBETJHtVz1IIJXd06gda95tIVisPMkbccYzx1P0ArwPwxfJPcwqgTzfunAJDH1B6V735TrZLAW2gDkD1NdWHWjZwYt3sjhDeOb2ZUZkAxkDq3pz/AIV5j4u0tzuliwHwSCfm4/XBJru9VxaOPMQMjcE5Ixj1NcN4g8VW7QGytXVHQAbwOQx9Cep/CsVJPRnTHTVHGeEriTT9ReW9QKzALtX75HckcnHvX59ftr/sx63468RTfEH4UrFc3t7tS609WCEkDmRWYhckjlePWvsW/wBX+yags6XIinLc8HDAHgZ6k+prpF1ePWbRJEESyRkljGBk5+np3rehiXTd0c2KpqatI/mL8b+F/FPhjX5NG8UaXNa6laNtuIpRsYDGPofwzxWBLfgIIIgAF4CKOlf0b/Fb4Q+Afi9aQL4r0xLm4tgwSb7knv8AOuCa+X7D9jL4V6bfm4g0zzSDkNLI0gP4E16EczXY8Krh7bn5KeE/Bmo65O1yLf8AdwnJJQnOfbHT36V7NafDKVseaph3AAHyI2TPYhlwR9RzX6uWH7PujQPE9laoiKOFAxxXouk/s/6ZJMkjxKhI+bb8ox6HHr3rmqYipJ6IUJR2Z+aHwr+BHjLxRqsNjo1jBfxuyg43jZz1IkY/Xoa/f/8AZy+B2lfB/wALRosKf2pdLmZ0HAz/AAr7VzXw2+HWkeGHQWVskJI5KADJ9TX1QJI7DTWnkbakMbOT6BRkmtsNSu+aW5rKuuXlifnT+0XpY8WeNG1uDXE0oWm63hkkUNMkyAgNCpIHA9PmyffFfL8Xg7SfBIT7dqxm8Q3MZc3zSS/aDGGDKJFQld3O4kdvl6Zr3Lxx4on8Tm91WxuoZlimkXcFT5IWYMrqrhR5nXGTzgEc8182eJbe40gxauGUz3DRokmWLKjbt33iRk5BP0PrX5Xn2MhPEyl5/l/wD4zMce6ey3Om8SCG78O31zdMby8uTu+9sZYUO0tuPIbB47YHSvO7UXGk6bNo+jk6xejaxWQ4bYrLINmf41RT06gmub/4STUdR/s4RwtcxyTTJIRzGtsvlgscAd89aka7i8Pzz3whlCHZJJcRnc6JCGRNhJBAZSwJB6tjqBXz9KEn79Rab/f0PlJScpOUj//W+cbOx1W70KC91a5j/tIPuC2sNxJNcjzN+wOF2KScAEHBAOTWB4rgkg8YeJP7T01tM0yV3G0qw82R2DooLZySSTxxkYrLt4rrQZ38W6cJtSvI0Xfa3cihI42+85VS4ZQBwoPTOGxX0HpnjHQtS06w1j+w7K9ntbiBH3KyuisQCV+YhSF+6ccgcHnFfjTilppZvddz881Tuc1oLX73+iRzxNbtpMwtkYIJDPYXG5JVO3+ESArkjjLdgK7vwN4fNv8AECCzvL2a1OnSMkbSAxJIysEB3spUbs8HcOvNaOl+JrHTPFNlZ6HZo1xa3TC6E8f/AB8xo7B4VJABC7lJKnnr3rX8ReD9T074q6t4zgluJfD2pacjmKXj7NcFo/LAZhyMgsQMYYHI5GcKy5FeT2Tv9yucs52un2Ga94H+JeieK7nVdE1R49Ht5QY7S1w0/mbw2xw43rz90kkY6elcP4yvvEmu2Oj3C27WypqE8cgWJmWHzXiBMnVhkjJI6+tfQeneJ1TRLCXfFdanbhybiURsrxZCjg5y0aKPvHoMAE4NeX6BrfiTSb63057pHXJubm4kkdwzOSxEMe5lVlyMNgZORt2803jE1JqWy+/y9SYzvq+h6D4Y8Ka2t1qgiSS0R7aW3mEqERL8oCSo7DDoXIHy5JYkH7tdLqfiN77W76SBULaXbq9rAPuyvHEbdgB2KnlT3z0zVaa+1yLS4XggWAyoNksCqiXDoC/zImAruCeMAFuR3rkLN01TTW1i4kNjb7SG2LsYXEbHlC3A2bNzLjkk4wOa8qWLg4pU3pHd/f8AgcnOz02xufC+laBZaKhdnlikvZpURppi0Iy7ZBCjHQDvgYHU1z2gWGla54i0+fVpZYdPm84mZ/vSxgjkKBhCu4gnsQO9bt+1tqXh2y8Y2pWyu9GWVZzbsESeDYQ0owCA+SpJPGD0PbLsdJ0yOKabTLiaV2tgjJ5wLRrckhoQFAVWAYbvUDI61GAl7PEc8kv67m0G9JMLySHSbqS7sTdCLT5W2lsIVXdI4G0j7hPy8Ek4zxg1vazqWspf6VD4ctftFxqA8yWQHMcaP8zKD0wPzP41yet+F5LfXTHq85gtNWaRYpoi0q+Yi7oFkAwRwCGXqT6ivc/CXhNH0aKysLpZBbxLLBKM8o4yrHOD3wAQD2xX2GQ41xrOjLQ+t4UzB06jpS2ZopN4m8lYfMZY0T5iFAyT1256Z745+grFvEW0gZooZ5i5O9QBuY/px64HNdNPf31nYxWUm17ljtDEEYUdScYHPqe9chq2vx6WsjyCN2Kldmw5Hvlc7s/UV9k5JM/TKexqabKbGJnLNCoX7jMMqc52nAI/rW/c6pBcRgXayiJcEvkeX07DP5ZrwjX9TSW2Wa6lkv5c4WNIxCsXoeDu4/lTbPV9Ys4UJi+1uEDmF+vOOUIJP5j8qOe5pynoHiKA6yI0sPOhjACo5XbGF67ie/6181/E3wa8TJfpIGll+Rdy4ikb0I45Pb1NfQttrlpqUItA0lnKcbSzAtkdQT0UY7/gK57xNYQXFg1owW6UKcbM5yP4sn0/XNYVLbouL7n50XOoXGheJPJuITEoDZwAzEvxnJP5DoK+uPhb4lgu7JLDVHMMU/yqCw3AjoBjOfU5A/rXg/xM0F9Ouftfk7PNBlEh+VhxyuMnGeMelJ8MvEEQdILwjdJgKzDJUg9mOMjpQpBY/TLwk8Fg66fPK+x/usfkwoHqB1PsM+9dpqWoW5aSDytoj6YOGHv615Z4Ue4utNRLqUXMoClWVQMqegBPfuTXQ6pC8Stbq3lb8bsjaWJ7cDJrR3sZ31OA1CXS/MluixLsSdyNuz9cgZJ9Pxrc0e9ubkSzcRwoAqrtGDxxgAdup/Wucv8ATbexmVoJzLcpgjn5QxHTHH0rS03WbeCxM00fmsnzPk7QADjn1BOcCsr9yrn0v8PrydrcG4QcEEFV79DkD096+iIZ0SywCDgZJ/Wvkzw5400+fYkpNupH7qNfmyfXjsT3r3L+03bSpCjB28sZ74z+lddGehxVndnA+L9XurlpmRj5KOFC45I/D1rx7X/FEe99K0+Ly5OhdlwqrjkA55arnibxEunQ3dvcSNvVm27WIwMf5714611YXMu1zJNIiAhk5KcDnHI/nXMdl9LI5bX9bvNGla1gZbmNgdzNwqqOv454zV7wV4rS3vBbswWKXhiDwu72rIvNLtb6VLm2cyu+TulPOe2MYGM+3NMs4AqSFLTa7Aq0g2gcdcEc/U+tIclpqfUMcURtYo1VWjmxg5JP/wBalTRlmcRxhUXbkFugPv2rnvAGrRzWsNjcPtByUZjkH257gV67FpkhVYWj3RsfvAHoPvfpTg+p5tel0ZQ0vwr5aK4/eseuD+XFer6L4VIG5v3fsefoayLazEN1DZWwbJCvk9VAr0vTbeSCQvKWKtgD616FConsjz54axp6XoUdn15bAOf5Yrgv2i/Et54Y+E2rNpNzJbajeottbvDsMymQjcyCTKkquScg8djXuECL5YyuDj8a+Xfjaq+KfEunaFaXj2Z0dXmmYoGicMATGQ7BSWXjHJxnFdk5csG2ctS6VlufmIbG9vdOuLzUL1J9TS3+04aWa4CNFIdzc+XbrI0bAlVQHIbAOKyL1bjU/DjSSpI0Xl/uvkba5kO0qGA+8oB459+tfXPxA8J2Gq3NwfDem/ZBDlmiMShJJAcFZNrJhlQjaT8pGCVJrwzxV4eur6KTSdPkZ0togpe3OI0VgQdsigj5DkHHQ9TX5jneDUopx18/n1Plc7w/Nyzir/oeX3ui/Y/D1toehXMVrLBGHmgWMOfKBZ5MM25mfbyffgelY8ty2rG1t9NhUQTJJPEWUjZDEAhRscAYbec+ld/caBDrNxDZRXv2CW4ZDC6oDcSQR4+Xg8BsD0z+VelTWuhaRcWGj6ExsbdIR58SKo3MnyOJMbuCOWwf1rw6cqbXPLaLfzPmU48rZ//X8607S01GO619LjVZ9L0CKS4F5fLiNoERo08i1UGJTNOBGofd8vOORXk3goDQPFNnrVndrMbjy1l066wkTMcHGCSsi88YUFRjHSvsWC5tzpmq6vpULXVsLeG1eIl2V0UBnZQ+R8oTjnnNch4k03Q7y1tZ9Tt4Zbi2ijeCOeLygsRlM0TgJ82/Dgc9hggCvyp4yitbf8G5+cfWFs1Yr6jpsH9uXd8LWD7JbzJeQSXcqLaqLoFjkEqRkZAK5G9eRkVZj8a6mbyKOz1Cb+ycqLhXla5ZIg5UOuSVli5znr/DxxW+2mza/ZmzvIw1vbtHDcbyD5Rl5RgPu/I7ZA/SuKstYvdMUQeHoP7QkuL3y/NlgVEjh8tgcBVRiH3sztkg7jtAxmvFUY4ine7SX4/l/XU5ILmiel3Wl6ZrFxc+HZbg6CZSpeWMRtbSqcSB0YsCImAycknaeh6VJd+Ere1aeC5mittSt7FnLxPkMwZX3qibg+9AQe3OR1rIbXfDC6JZWWnyvFDakRwNLK0nlCYkKC25XRdxwp+bBx15p+k6tGt++n6hGk15NBN5DlzMD5R2yr1IfcOPmz7d65akqTS73/PQyndHY22px6ZJeW0Un2iCNYowwH+t/dcKocEZJ5U44NcxqEBuPBsOpTMLmJbg7ZFAgf8AfKFDAKCN5xjlWA6cVxOm3Vza3Nwuo7xb2XkwqE4Z3C7nC9srnn3Irsv+El0y2sQtkyCF7c/aI5SpicjBKlXwCygj6YBGMZrw6M/ZLklF8r/BGUYrqd54MttFhgktobgbEhcyxTfKzCQDClGZ8MMjqSCens6xgsrGRLiSKO5tYh9nk+QhpkHKjGAS8Z+6wBORwe1eIarrkMmttc6aAJk8lZijZby3wB8gIDEfyNa/ifX9bsLtJNUT+zRM4S1lf93HcKvH7sng+o25GDzis5Qr1LSUXtt8/wDg/wDDG8E5O8Ud/rjyjU1urK7MUjy+ZJazMGilGcA704Y7s5IwMZJIr0Xw74sj1C9j1DR2jj06xSGDHmZRWQFXJ5zjC5wcnkAetfIMU9zc3i6mim5sLW0aZLlW/dlpGJMDdmdVU4TqQSw4Bq9beN430uLTLCfyvtb27I6nOWJLONvGCGHzE8ADPQCodTEQqRcdNmv+Cb0pTjNOJ9maob/xJfvPp16VjkZVIIBHzc7hg5bjoOPfvS/8IrILWSy0y5Es8OQ8smMhz1y3Tj0UcV8qfBb9oTS9S1OXQ7+Tdd+Y4AD/ACoASADjJ+p7n86+mpfEc11qFroOhRsXc5MxxsZzgjbySQM8np7mv3SnCapx59XoftOAqOUI8z1MifQpLGeOLUsIv/PVYuTgZIIz0PrWZqmlfYrhLiCbcFG5Hxgp/skEgsD9K991Xw5qsUUFxdXgPyAFACpz/ewx4/EV5p4lik0xFjKmeOU5dlI+UfUg1TutD0Fqc7o8VjqMoW5EYxgBuUQsf7wY4J/X2rfj0e5gUm3lLLGwyMAbvcHGcc9651JLeFh5rx4Q7gAMHdjqCADuOOwrYfUZLdVSOITNIuSd2NpP3ufp14NKSGcZ448A2usK7apC7Da4UryqjPJI68nv7elfDMdjP4N1z7Jd24mRJmWNmBxszwTkcen0r9DLjXr2e2lJUMxDg7ckAEcY6YwM4/A18m/FK0nv5ZGhgIC4k3H+8ACFJ69P1pJLoJytoz3/AOHnihp4o40YRRxYC5dVC5Pf0PoBk/oK+gX+yeIlcOpV4wMliF+YDn5upNfnv8Mddm8Pu+jtbLNk+YJOu1sZABPPqM9c19taJrGm3yJJ5rRSYWMRxkYY/wAQ3Y59z+HatIvQxqN7oi1Cz0vTrYLZXIlkJCqv3ifp/UmuHu1F1I7vKEjLL5mRw3sB6Z9Ote16fpmnTl5Hgyr7iwY8+v69gKhufDrTxrcJCEt1CtyB+HPpx3pTpPdGcaq6nPeHdetLCe2tY49iq4RDtzkY68DoP8+/1AlxZ/2VG7S5abORnHTpz7dq+WL3Omavbx2kQkkDptJG0ZJAJ9ec8e1emz39xoukOl4wMseCQedo/p9O1XSqWumE6Skro8N8T3a6l4wvNDLmV0Xed55wxAHGeePWuS1PU5rJpXtsKjHy4F2g5/vMTn8q4TwT4pk8U/FzxNNeMZbOGIIRGcE8n5Q3rjJ46VmfEHxn5M507R4Y0htUwqRkk475YdSKxdrnRDRI9PivN1s5ZUDuMFgc49sDsOuPWsq2uJBfRW0MjxwL8xeTgbz2x+uK8a8O6+0tykkjmZgoOASdvGcD3J9K+jfBcEuqS7p7dyCAoiIzgdzt/LNTLsK/U9M8K2MNqiXl9OrMkfbqxPJPt+Fe0afqt8iCeOYmQgBVbGxUb+tcb4e8KSWQVyF8lipVWyxDHr06fT/61dfrdrcWGlm8tIfIkXP7p+Ucjqvsx7VdKjJ7HnYjERW53Ona1fxNm4RQXCru3DJzx1rsl8R6mNQtLWTaIp9xGOSGTH+NeSeEbrStdtfOtWbaRkA8qvcEfQivV7Lw/HOsN3e3ICWwLvKx2qFI5ye1d9GjJq6PNqV0ek6l4kt/D3h+48Qamw+z28RfjksQPugDJya+EvEV3N4i1bVNa/te5sbkSMQVhMyTOI1YJ5LfKzR8c4xx34ze+IPxxHi7UtR8HeGUkGn2JEUUg2+TcNtYOzyEfJyPlwcnn3I4ix8T6fpVsmheIruS6kbEgdCRawE5GAwPzSZA7FRjuea+ZzXiGm6iow15evS+1j5jEZsnPlitjrNO8R63c3sCxW8XmTRlXfyUjEoYbMOqsAGXbyR2ySc5Fa0+kvaaT9o1UW1pJYpclY97SSMkjKwIAAwSX4Jz8g6ivH9b+INpZ2ptY8T3lusyibaInWQqPLdSMF+vAJ7noKtT+Mo9Q0DT7bWL3ZcXaM8rxJJJI+6PcgIYrwTGOue3OK+X/tNXnTWt/lq9P+GOSrmMWpQtqeRaj4fuLa9XWtHeR0jma7MijMnyB42QY5XJwAOwyfTNREuNRsv7QuXewkEYDgoSS7A5YFiC2AB/Ks/U/GGk6foupz6DLJeT38iMm9Cs0ZXgkqC2QV64zyc5xV+88Qadq891omi3SC70xDIVkdkdjJgAFMc7do+ma+fWJSdpLXt/X9anzzgpuyP/0KEet2ui6NF4bnuhdWyTNLLJI6qJhG2SFK4/dqeg5HfnNalv4ke8vZrjW9PSIX0P+hpCi708uHdhiRwpXCn1H0qtaeGNK8Waxp/h27dLC6hmlt47mdR5Tvy0SSNhvLEzDAYggHbkc1syXV/PqF54cso/7PvoF8kSSlRcbo5CGtyQMFGC7e2cgdCMfgrw1SCvN6N/psfmMqbWr2Od8BX11dxXtzcSKTdnGxPn2GNyd5bjJAQ4HcgYNdza/ZBJbWL2qYmYySsvKrGx2CNCD3c5K54C+9cvp9vJpsr/ANoWkUUU/kvM0ZYq0Zb93tAAXcxYnjhVA7irunaoJNbsbN5Bm5R2KOn3NvzRlcHuck+2RzzXnYnFWajFnM+yLN58PrVtLjZrYWcslwD5qyDbHkgbSSANpUF1wCc546V57rv9r2mt6ffeTGsVrcXKTt/y0XZlgUKgcHjIH444r0lltbfUXhvJJD9mk2eZltygojuTG2MN95M4PTjrXN6jYvJaR69Zb5NJswUeSYBpCTL5eZgADnBG08r0yRyK1tOVpRV+/wDXqbyXNqc62vyy2mol490MUf2necJ5byI5BGehRx/KuCtL278V2k9haXEVtaPFLKsjINzSxKJFADABtygjf1we/Feu6VZrqdvNoms3SC6vVkihvwmDLgny1l6OrbFMeMDBOcnrVC28OmPU7O3vCX+yWsttBLK+8nKqWVig5wxPU9zgenRWSV5LWyLUeWLOT0bQLWHWrK3b7bqUEECy3MzEbJImGfKKKCDJGCAoGTnvxWlNqF5f+Fl/sy3nOnWJC3sDq0u+AnZJKXIxDJA5UM3AKsM52mvUrLTNZuNKutWuFCRzGa2iWNVRdxZpjJsQhuVXGQPnyR15rd+FHg2y0+31y6vJFEGpT+WLZCzRLHK5DwvnIwyn5mzuxjlSua7MvkqllPdp2N6Kje0j5vbwxb2Whz3nhDW5NY0vT9k6pLwqjMavHKRmIuUlJ3ZHABHHTUuvDtlaaA1zDY/ZZLm3QtbKWe42xqxV95+UKyyHkdcc5GDX00mi23hzSIlkS2tdNvBIJYordFSV8AAP5aKzYzwDxjPeq+taLBq8n9r3Ekc1tHELa8gjbY5ljxtTaeVyTsQk4Cg4xjB8+rOnL3oK/L12+/00NZ8jlzo/HXw1qMHg74gXUd+PKgtb51kTcWaU78gnuVxzjgfSv2h+Gfxd8F67o0d7pEZeWGIfMxEROBgYAJbn64+tfkz+1t4ZbQNX0/xjpEEcEeuKyOiNllktwu1mHUb0YdR1U1D8EPjNZeBItNl1KwN/dXLrlGZiDt4zsXGfxOBX7HgcTHFYWnXh1X/Dn6HlWLU6amup+2vhzxr4i8UXj2ttZrIFJaSSTeRgdhxjI+p9K7XxIBd2K20iBJGXlZBvJHfAwfxOKwvh/q03irwPY61p4itE1BA6pblWcr9VJA+gOBU8o1GwukN4siwysB5r5LMQegA57VlONrJn0cNVdHms+lSRRmKZ38gLnCkl89cEAcdP8K47U9Vu9FsEUxsol+VRkhio/vDsGr6SvNOSS0a5U5jY7ixGDnuSDn8PSvmb4iaxDpMckVkfOLthmb5nBUE8Zz+HvUvTc1Tuc9c+NrVDHHLPh3BZthJzI/8AD6DC4z7Vyer3Mmq2/wBrZVU5HyEDAIAw2PrxXgPifxcEmMtnEVRZcfMcHJAGT0PFWfBXiHWtT1GF5EaSJcoSeSDggjpyOAw7j8K2UUtbnHUrqOjPX/CXh6NtQRHxGLpmRwRknPzADv1zk+1emaTb6jpLNplw7fbpMtCoBGxME/L/ALTd+OBmszwZp0s+ow313gi3AlBXqcZDZHrg9PrXU+KItQW+s9ZtFd5o1C7yOVRySfx24oUElc5ZYxW0PRtJ8RyNpQmgkMkiB4yTzhsANnsMDpXYWfiOe6tI7beQGRV2nkDrgnA5PHHbpXg2k2eoXhkSBf3E3zlFzgMDnt7Cr2rza7YRm6tsTZKgqmeQg4APT0zUOvTi9ZHnyxyva57jHFBd6/aHeG2AtI2ORkcke4OAK8K+P/ivUl0+XS9DmkidgQ5iPCLg7ie2ccZ9aveFPHWopfSy38XlSbPmbGQueigdz/L8q8C+Jniy01TU59Oid1lGWcE4Cjv9WPpRXimk4nrYCupKzMH4Z3+l+GdIvWhjLXTKZZwzfOc9QxPPP8vavIfGHj1ri5YKBAhIGIxnB756YA6AfjW5dPajSWMtz5S5JcMMNn1JHJ/lXzNrF1ZT372WnoT82d7uduPU+/PNZ048zuehW91H2H8Kp7vWr9GtgUKDCDczHdzlmxwOMCv0c+FHh2S1so73VFkd5AWMsfzbT6EdenWvzF+BXiaPw9Ct01wFgRgoAXcWLjngkcD16dK/UH4XeJV1adpdIuPKDrudAOOAM+3X+dEF79jlrytC7PoTS1sysTR3Csw5wUILgcZz0zVTxLCLiNrUHLXQDbeuJVPyke1ZHivX7jS47JrM7ZLnPybVAC4yT7EEg0zwoy38sWoSzgv/ABuTgAIAePzzXt06fKuU+ZrVXJ3Ot+HngoaTYy2Myqqxbk39AyH5sn0xkg180ftSftES+BdPs/DvhMmKzSZRf3bRhofLXBwzMCpRlzux0H449j8d/FCLS9DOnadGYIZy0DvJwztu2ZGcfLnGDnBBr4vkig1hZrzxPprSQ3IcTWMsqn99GxKqBsYcsOQrgkc84OPAzfPKdH9zT+b7HhYjF88uSO3U8m1T4m6XpmowvPMZF1yNrq3kjVGtnCMGDAbVPoAc/MOcANzdu/FlvPp2+CyuL+3ICeTGA8khZj0ZgnAyCXDE9sd60vHXhPR7/SJBE0OkmB/MVb633tArKMxIq7QYmU4U79ysOexrE0XwbpWieHJta0gMdH0qBJPLLl2KvJsU7CWO12PDZ4wSBwK+CeFw8Z89DVt3te663/zPIlSg9I6vsYh1K6ks7zTYRGbqVHdIiT5zRkkFh95iOMnLdB1PfvvBviKYyE+Kbkw6To7PG0gRWkKPGSltGSeWcZ2jkIOSMV4Z4717RNKuIYtPZvtyGVmmRo1VSWJXYuA67RuB3dSOgrlrS+8ReJ7WaWCdZdObN19ocCNNiHcIizEIWAyT1J2/NwK5aNCXtFOCenQ5p02pWPV/GmnaXZX82reHrpri1MruBc4JROiunlnYyAZAdcBehTHNeR+Hrq9h8Xap/aVkqyvARBdbuCj58tgy8NkcH3z+HS6D4q0+JAmoypvZmhttiNICzZD4ZiOMDt9ehrqvAviG0nuru4tAZ44z5MX7leAORsBySSTnIx3GK5qkdJOpFLm3tc4qsWm3Y//R4LWvGNx4V8TX2jpAgjllhCfNuLK29ccEFX+YEE+nvXR6ncad4g1AHUr2RbzTlivLmW2l8xm8rhSdoyHXaCVzjOMnvW1400Se6Z9Q0rSDfahqU6tI20IIyQUXac5DKNxIBGCBzyK43+x9c+HetG4jia4e1IdZgrFmQkbg0jbc8ErtJJwMk9K/DKUZTXvRaWn4dT4SrTiqClp+p6xrGqaXqkVlqmiSGWG7AQocNCnnAzLtUcZZ1YMRwAQOK5vwhYSX80enXBEeoWASQqVY58wFmXd3UjC8Hpn1rsLTWNEga3+SG0F03mNGFfY6uPmAIyAY+pIA656iueOqm4vLW30+VVWyMiShUAzvxyhAOdxO7HRe2M14mKowd5RXXS3rrf5fieZUgr3RkarNbaZd3UlnZNNJCxUuF27CTllKD02naecAjPFdhpun3iajPrFhHNqiSWZhmAyvlIxUgyZYBgwwCwzlegB6c54d13RdG0nWtTmjZWy15PclWIjEreUIx94O7EZxnr2AqaTVTPbNH9ojjtbZ3AtwuxmRQuTwowVIYYHGBjkcV3UHKMuVLf7tjenRSZJJHDokzazp9lJNpiSyxyAk/NJ5YJB6jcrcKCBkck84OH4g1O8+y2mtF41gtGEUEkY2yASqc+ao6knAHOOPeuE/4TiPUX1bSLaR7Vrn9+DHDyYjtAic7h5jdguM5xzWC+sJYaVNZXuoK1zDbRxXcc+8wwFVPz5ZUCkcjGWQtjJJNY4LCT15dLmc6Ls7HqHhrxlBai80i9lIlvzutgSItu3cN7Nn5T+GNue1db4K8dQXWhRCyikgnhcuVL4OGx5hkTrtwM89gTwK+KbzVLrV7+1g0aFylztVGRs57yEOOduGA54Ga7rSvEA0q5McN6wkvlISOUAyhSFy0hwUwQfVjg4YZ4r18BhHS5ptdLffoKNI+ofGfj23hX/hHrGWXUrlzG7SeXviZZJVDiPcuVAA2Ef3jkda2ZfEhgzZz7LP+0LIzPJsB34BWSGYAZXa/wAwbA5PPPJ+f/EUviXXdMtdPmnsrYW1ukgWfUIxcXsseDHcMEOAqr92M8Lwcbia52W+n1vWvsc16tlpEMhnubzzUulij3HglCFLk8BMgFuuOxisnrOScF/X/DaHQ6LTsjr/ANoPR/DXiz4WajpNrb/atW0h0ujqMiYMnkjiIKFGAQWG7Jzxx3P5X2VhBc+JLOC+T92WUFRwFUEZznj86/UGP4ja7eXDWnhSaYWOQpgaaESSJg4Cbx5ZlKbvlxg9AO9fnZ8SNGTw74qmsbNJWt2YNC0sbQyPEeRvQ8gjoQOMjjivuOFJVIRnQm9tUfV5FVtBwZ+8nwi+IOjPomh6Bpka2ulQwxwxgY8yYqOfm4AH059hX0ZNLo+pP9siAgtoQSXHzlgOOD/Cv05Nfzp/Dn45zeFdbspry1a/aI+UqvKR5fYgZyFHrgZ+lfrn8L/jLB4t0OJGtjd7EV2WI/u1bA2ocbfy4Ar2MVTlDc++w1aM9j6E8Qa9bWcLizmYxDhG6HJ/h24zXx38RNYiuE81FjgmG5twHfBG5uecDOK7Dx2uoOZL+8uBpUkqkpGru77T94s/bPCjGO+K+R9Y+26tM8V5ejyF3BSpKtI3fPI4PH4Vyxd9zSraKuclc6TdeItZg0xiTGSjMQhJK9S2Oua9H8SaenhLTIZ9LJiutNKRXMSAtKmWBWR06rkEMG5XHHeuw0/wqdK8HXWraMWGpElRsXc6iBBKyZYgjKYLEHJA4HU10On+HtbudF0r4heEnWbVYFaBorhRGrWO4bmeQ7jKiSOUAJwVZRyBXzOOzKpOTjQei/F31/4B+c5tm8qlbkpytFficr4f+I+q2UjQWr/2i9zCJAFjIffuwwQEnci9SR69AK7+X4q3epSRaVpqNcXSbAybOW4ZcFc4U5GMkYHU01/hkNYfR5DNb2OoQzk3qWrkfZlkRDtTbtD8JnbgHLDOOSfm92vbHXr630u++VZ5YhcbiyGRZCrLIyfPECRgBuo5rgo5lWk7Xuvx/wAjxpZjVi7N3R6zf+O9Us7l7Pz7pFUu2W/d7E+7tDDGCN3QAY9aseFvEPiPxD4SntrKZ/trTtAztOfvQkFQRnAVwQMkZ6cnt5DrE+u2sc2teJDFcSOkSyrHJvAVQQo3AjG5VH3uR+NdxomraToOnaXfxXRhtNV5VYogx8xt2AWDdsqpOTnGcdq8vGYmVScp38vmeVXxU6k3Js9j8M/FQRGz0nxDpq6ra6jC8f2of8fsEquA+SoKyEZAGfmx+dfOX7QVk3hbxzp0llOk0F4ufMVSActkAjsygjPNdlplxbX3irUNPBmtzZq17A8g/duu1WwiHcrDp14OCOtePftEeI9SvrfQdSuovJZJHCqUKAq4GAWIxyAOhx7CvZyHHVZTjCT0aPqeF8xnKsoSeltjP1nX7G3sfPZ3mlkHQJwPz5r5b8W6hHPqO9piIJDuYn5T7gAetel3WuXEunxq6Iu3+JuWwe3SvDvEshuSzxgOc4G3/wCua+6w0LM/QsXU909S8OeJriBre1ikyQyhfmxtXuPTPT+tfqx+z/4hsfDWkSa5eagFkltiArnA9en9a/G34ex3WpazZ2sNsW3yLj8epr6k8b6lrtvdW2i6XdG2W3hMLKT8rk4z/hVtKFRMwalUpNH6CfE79piLxHrVrpnhb95c7AGUHGHAwR7ZGP1re0v4l69oenLpKzLPfWwU3GHA3bz0TOOAp5OeMivhj4VeDZ9PsbvxNqkc11MZEVEt2HnAEkEqxDDKjk5HQH619AahLFp+lT502J7q4FvbQXiODGyNGp8qdMgBlLJgjO7Izk9PmeIc6qqfs6Tto2/w/wAz4zOa7hL6tT36/wCR63f6pc313e6x4ijluGURsnlThzbQblXdhwVBDcYB6HGckY1pILzV4bywl1OJ9PkhjKyWkhFwsikF3mQryXJOChIwcnGDnzfwVqNxp2pXGg6pJJPYXtmbYysyMx2gNE67RhVDqVGVxg4AwK6LSp0gul0PVbeGOC7gmMMlqrhdqNwJn+UfKwY5Uj+7Xx0MRKSu9ebdfr9x8snKL7nrn/EpsvDkGj39rHI2lI93NchASwUn5HkbgsTgEJjAGS3OK8K1vUdBvdOu9V1OeXQzIPOj0+LJM7RSDJc/LtRYlymMBSOhr15/EulxW/h+RLaa6/tiHz55XIEBkQYMYjPzbX3N04xz1wD59rPh3TZ3fWLIiSGJI4ozKBn9425olLHcV+YrwwzjnsBt9bSUYpbL+vzNfrLe/Y+UJfBNx4zN3aaTZ3VsSsk0Y8oq935odmDGYI/mMc7uocfdGeD2tn4bl8KWTLMqtNprSWgWaRdjT3sTW4O2TklE3g4HQ8YAOfpPRfDuq6cE8SXGoR3bXTQtcQvCqorctIUdm3spYIVO35AuQME1c8RaPo2qu2rtbL9s0+aFPs8LZDzhDGspJzxIoAY8Y7YJ3H1aU4JScZf1/wAAzlOzuz4TlbQNO1I+HdQSOS5kkFzaGG5Z1E0y7JcghfK3MoVMnhuRxXsuj/C2/sNKittK02S1MhWR5vtId1JGWXBBbdkjrgZ71p6B8Ebaz8TS6xqV0lxrbOFtIghkFioBZ5J2VmVydx2Keh+YjK4r6V0vTrG/so7pJWd7V/IWJ+JHCkfOcYJXHOcYrjr1ErQVncdWavpqz//Swr63iiF/p0VqLyVgZFuNmVcQ4Vkz2kTphhuPPJrptZ8HW8zaRqMcEWn+bawSgMoDM5/hyOcnbyT3NZ2j3k2gtNfS2aRxakDbxhZCSbgKvn5EnJClgVIOORnHf0CTTtQvryx0xlLR22wbgQ/zgZBGD65yAa/A1RmlKLV/mfmeKlJOzOAvLKW5jt7S2hkmvJhcAfKojmYy4yyN8oxlTlv4STkdotLstZ0qe5i1yAWt3cMwjtoG3xQhVYglwSgUj3bJIIzjNe1eH7YXsFw928kl3ZLLENoVXQyL88ig8tgcZ615frFn4n8S63BpcT+Rol7GJIvIdbcbY8rIZ5PuN0U7Twfu5BGT1YSh7vK9L/12NcJTc3yM4fV9d0zSvDi6feYsU02dDLHGwmluAojmWNeByzZ3H1J544o3Ov2WuWd1ZealhNqkYfaXJWBt37iMjPOSzBsHJBbJyRXA/EnwCmh6st/oJmms7WOL7bDEdqmU/McFmyGdhuIXOFyMevBW17eXN2ZBPHFqt6qmNZVeVkhBxuRRuVWb+DcwLcnIraWU6tp6P8DSaaemx6V4K8O2UmjXmoa7JLaRWW2FgXV3uH37ykSk/M7YGZD8ijnBwa5rVNKbxLJPonh/UBp0Wpzs0km1wkcRGETP3tiHGCcbiSe4A9n8O+EtDvtCuo7qZzDChZmMhBEsmCo28gDgkAkn0NJYaBoOn6hp8GoW8LW9rJiR1kZyxibcMsBg8Hrk4PpXQqsqSskYOu7tox/DHwD8P+HNLGrahrk89xbxmCeG1jAyrAbvLDZOXTKnjjJ6cY3dW/Z78LzXEWpW3iG8guL6NmUrFE0sbE7mGSDsAB7dSK9H0HUNXUL58BjR7iRYndlcbwMoufRkIwcckdqx/EGoXljqaapHI0spZGRWV8s27bnA+VVOcZJ/DivGx2Y4mneC3MJ1al99TwXWPh74t+HtjHqdgU1a0vI2B8+ES3AdiF3tv3qAScggYI6YxiuR8QXnxF1S0/4Ruxt1lsbcA3EI+S3ZiAW3Mu1c8tjYAcYwMZz9o6TqspSyEKmCcoqS7skrj5iFb+HgsPc1574h0HSDqNzr+lwPNBCjTB3lKqTgoxYNhSzO/C5GVUcnoPcyrNZyThM9HDYlbPc+RPFkGreELPRdPjs7wvqESXnkeVmFN8hCOxwqgttJBPPGe9eCfFaTWNX1m4lvBNG6zTLHNuzujVyoBKn5sYIHfA5r738eeCZfEthAvii5+y+IIbVhahpEKXNiqYhWKAMx8xPVDtIOch854mL4F6N4guo9F1OefS5dQgR5JZABHGUjBIGc7PM5AJ5zxnmvo8LjacasZJW+Z7uGnGFn1Py8Uz6ffq84YKTjnjDdj+dfWvwH+O174Y1uDRL2+NjDdEpuCgEH03nOP5+nNem+LP2YdBZIpNOmub+OHes7lishY5WIBJFP91uAwwo3Hivk74q/CXxH4ENteXyKoY7d8ZLBWHK5OByRz0r6inmGHxD9mpanvYLMU3eJ9vfFf9rowGTT9IihufLTYvJY7hwGcknOPSvHvgx4nu/iN4tutX8QhrhLdcLCj7FJkBAyewB+b8MV8OXM091me5cl/wBSa9f+DfjVPBHiOG9nTz7WRgJ4z0ZOueCOQeR71GLy1+wlGk/etudWYYirWhKNN2dtD9d7Txh4eu21Lw1fNG8OkrBGlxvAiunk3RyuoPCZfKLksdoxxkgew+JNUu7HwzpXgrw/bLC960YYxjZHCoIkdlbLkkbdgXA+8eRgV8Uab4W1S2jttUsmj1HQsfaoLuMhonkI2nI6r5Qc5zzu4HJyPT9O8WeLY9TgR4DJpdqJZTK8myRY0wFYZPygkkDA6cdev5ZWoOnUmoK11a3Ra/mflFSc6cnB7mbb6Lrel61dvKxZmkeUXEisWLEBMqwY4KgcEY3e1UdF0rwT/wAJPd6l4ks/NlggXdJ50sZklkICq6AYdQTypPFejaRr+nazJqes+elrDNL5v2cllaQBjGwBbvkbh2JB7DNR6r4cstftraO6ZLV4ZmuRPF914o33D5xksWBwF53NxXNOpOnJRn+H6maqOMrmH4g+GF14lW+vPBsotZJo49m/mKUwbvOjUnCxsCCBu65BHcVgeCYV0TS7PRb+CKeJLhljJO5FYsRINx9z6+vpWR4Z+IV/4e+2yX14ImWKaS3t5GKsr3O4ocD+LawY+mccd9/wfq2o+OdJuLvUHQxTTB4TEnyuHJj8xix4XcoAOeSKzrxnUh7ulv8AIJybjY9N0CG2sTYx3Kok62IiaVWIkADkkBu4bgYxnnrXzr+0L4fltPg3DFcZnvLW+MquSwbyURV4DdCMgNyeR9a9NhuNW07Vk02/gmeS5+eNdp/eKoU5HHquMfU1r61pmoWHhu80DxPAb4eJbe4heOQgyxJcJujkzztYtyAASBgkDINcOW4irQxMJtaJ6r87HTleJdGtCo9kz8srfXLibTo4pTyqkfMM8e+O9cxfeW53h1jyeo9fSsQXd3pd1eaTfjyp7SVonQjawZTg9aT7ckxUkbsHqTz+Ar949l1R+r+2UkfTf7P2nWk/iy2lvZhHbw4bLDjCgk5/KtTxR4h/trxZPZwxAxwT8FSCSN+Oo7VzHwsufsVtql5MnzLZTHPYfu2APrxVv4M+EtR1vxDHetHLcQQKLiYgYxGjAE/MQMc4rlrRUVKcuh01MSoU0+h91eDNZsPDfhyPQLWaa21KcB8ojPtTaAWJ3LxubbkHt9a+kLHRzc+H7OKWQXktzbSsJ7eFUeMReWjorHOMgg/OxA29sjHnNr8P/DGpTTi7huYtRljcwRlgINypvSNgoG5JWzsy2OeXwa9b8GeTpXgzT9C1HzLWxa8lZSEEJCqVyJOGHl72OFwASBxwCfzOvRlWm6tTZ3/r9D8txldVJSqN6tnlmn7NFtJptBuS+nxnDK8XnRRLGu9AhO5Y1bBOOTnKiuxtfGduskF3aXMTw3Vu9urgiV4iyMAnk5Llj0GQCDgjNW76CaPUry7svLksLpntrqJokVQPmMZZUUOAzLvA3YJUjHNeNaZo+tWPit7tLiGxOlwm4hjkQmJL6ZS0UazAbSsbbpf7w2cggg1xxoVHNSpv7+lt2crm3r3Or8VWwg07QVmureO602KJCLYgMQSSBKu4nkFhkDkBj0xXa6ra2TaVbTX0cxd7VFIVG8pUbcwIRjtyAfmP4Y4rw/wT4evmvLnW7+5S4urlCI5JQ5adISVyxeNVJY9GY7QBwTwK7M3OvXM+t6nDNcalpbRARpC27ypSSGMIXBACkFl3EkYJIzwpYaVnNrpb7mVyRaudAviOxnutQbSmeS1eFbaKKYsVHyceWGJO0Y56gjHtXaN9sg8ExajelEl2OmM5kK5ARcKM4ViNpGcg47GvKPClrbSaZa293FIt/qG+a3uonLxSvCwYR7doEUhVcgNyclc55LvFHiKKR0ga+eB/IeGOIo6KfMkLxkheF8ssSewAwDmpdVuTlFrZ6LRWZjLqXNNvU8GwQXuj3mb4xyee4zsZ/MJbYXVX+ZCNyEc+2BTR48mstSnux+8S5V45hnaGacD5V6HKsv06Z4IrHu/CmrS2s9+JZLu9GyQkjOd+0BueW474zya+evFck3h+G6uruOVra4n2lyNgiOcuM4O/kYUqBjqc9K8uFOpUny9P1MNZPQ//0/Lk1GW41QXVvOLoRRMogkYsq7jg8McAE43dST+del+CvE6XKrbXN2FktchBGdpCqCGwCTwGJABOcCs6D4ef2noMsllaM08O0RtgM77ywYsAc89AScZyTUuirb6FfLbXelJam3haRQQolkmEZUD3IbB59a/FJxkly2tfqfmDk2rM9Ju/EGmzi2vJb6SyEQjiV0JeVJug9vvZ479TVvxLJa2MzPpwS3eIJN5IJkWYKGySDwDuyAOmTxXD6Bqs8tpA8tmkUnlNIDJFgPkgOwHqjZz1PTiuT1LxBeaVqkskjfaDcblWMN8xVh2TsFOSM+leXjcTNPka1vchxd7I7eLVJ59Zu7bXLc2kM0SzxzNPv82J1y2IOAoBBBU8sPrxw+u2+jQ2Cww6SkQRyd8ZzcvHjhnfHPXOMAKMDtg9ydVtdQ1AyOqBSod5nTc0W1du5VIIABAwRzzTp9e0q3042SpDPLOoQvJH5k8qt9wsUwBnggdBnJGRmvVw2M0fOvkd6rpL3jzSax1rSrU2F7ZyiO32qZQMl8sQjMfdeenHtUP9s27yG2+xf6OG82adixyWVUK5JIbPfH9a9s1LxzY6VoxgvXDPdh4/mkMuwlcMWYKVByTjnjO3OSTXht9YWV5pvlaDdebcxzFvs+7eWQgE4AHbAOTwSMZqHG6fI73/AKscFlfTY9DtLy1hMt7BI09raBd8Z2kpLIn7pEXgjeepJz1I4FSeJNT1Kfzba4a3lur3mZC2XSRF+VV252IinCqBndz3zXOaQP7L0tNPMXlG+aJ53Z0ST7jIrtjDfdO3245FdFplvZ22pXMOnwPDa28yRQStFG6oCAI3LFTkvg4J4BxWinFRbXb/AIcKa5U7DpFeTQLS20y6a4aKWSO73jBWVET5P9pSHI65GKwW0W5sfE1nFcPNa2U8kn2SNC0yL8ofeUz91T3zkk9O43NYuZ9BhlkFyWjkYvJ5z52t3AVgdxY5LADGRmr+naimoWiXV/l3CyBYGDKMvlcKU7MMgjPQ9zWNLHUJbHZQVL4m9TX8Mm4Gjub3yLyEgGzQCMtIsn3NsTZw4XLNkLtIBzyRWHq9hb39/wD2PfoLqYR+dK3mFY0EeASdpMeVxhgWYcH8NW0K6trOn3VvZRyf2bbqvkGEqiSEsC4+7wqqDlgSTye9ZmoW+oR2slx9oWCbmI28QjA3OTgpvwArISMnheTgk4r0PrUJys2l8j0lXeij1I7u0n+3vqV5Y21lptghkgu43aOUALjzGL/JypPXJbOSeOeI+Imm+FfH+mzaXqavJBIAqvcGRpE2ZPngYwqjJHBGRjtyO1WXXL5La2sIRFe5D+ZEB5gTyiGd92UwdhUcEtwDjmueuLrS9TnkkWa2soLG2jEwME0jurnaWLHcyrvYNhWG0lcAAV0c0W1KMtfI3+uxg0on5Mr8M9b1nxjeaS6t5AaUm7jizANnCHIwoVnKLx03dDiuQ0jQJ5tHuNSdZI5Td21nbDGFaWYybskj+ERkcd+vSv188VaFp1vpP9kaHewzWE0kbs7r5y5d0G1Y5OAFChl3KQGOT1rwDxv4Vg1jxT4i1XUJZLrQLaTTprJ4yqhbi0WVFiyc5U+YZHYAAjKrzivtKWfRd1Lp/Vzup55Ft32L37MXxE07xH4DuvhrZ6jPZ+ItHaeWDcu6C7hEqsNhYqoZd+CGIyDn2r6R1ZPEGmXP2SJVRODdEKGMispdUUDjk85HTnPWvh3wz4V1LwR8KWGiu0esapZrdC4QgfZ0iugrSIWwBJKC2DnlFAIr3Xwh8RPiZb3/AIkjeIX+mT3dvard3a7lgfy2eUqQVJRWCR5GdrHJ4GB8VneVueIdfDyVnunv5taeeiZ8/mGEhVqupCVlfW/y2+8yvGkV94qKzfZX0+SSVbdWhcsJZJ18uPnPlgKqEseO4HJr2CTRbzS7PRPD1vGsen2OlS3ckcOSxvYIXyTkqV8tsBByNzM2PT1vQn0vWtL06RQl1DqCK0MOPKZAij5WPUOvIB6GszW5YdPsLuSQyaXrkBMNhPBjzYUZAFdo5CVkfk8cFeMZbbXjU6/PJRqKyT1/r0PKqU5Rbg+h4Tf+CNNlv7vWJYnt2aWR3tZtryMs6Y2PkbUePa33WOR8wCkivTfCulSS38cUVq0sKxiFVtiY4o0h+aONQ2drsy7nZs4zxnrXKeCNMkub2Ow1uc3s3nC4aVP3gl80Fy6s+SrEqQxzuJOMYNei6pqsGmRajH4fUx21zCPswcYdVUFSHBwOSufoeOenNXxFny037uhL5b3Keq2iXfj+DxPfK7RWUCC1tVYq88sZ3DzCSSAZMIQDnGcN3rbuNWXxcX1jVrZoLe9QXE8spaEo9o26Vi2cN8gZQBzlcHjFc94EsDNBcp46la+srsmPBjO0JEuZVjYHJON3flgMYNaXhLxjdeJfDz3ur6YLXFxeWHkH95stysY2nPG4qAjHvlsV3RwznByn3/r59DVJy1Pyb/aGtJb/AOLfifxHDbGG1vZ4pMkpndNEHGQvQkZ/L1rxKwWQzqm4jHQivsn4xaFPH4m1HUrywCWFx50rvEp8mQRASQsCepYZX2Oc44FfNWieHdX1K9huEtSovTIYWb5EkZMFkT1YA/d/DqQK/W8uqfuIp9EvyP0TAVYypRs9kj2j4cwS/wBi6yzksEtimcdPMwn9a+yf2cbuTwOuo6bqUUaB91nG80YeF5hmSAguNgBfBUk4IJBwDmvkbwml5pPhmW+LCK31Gf7K5YZGFXLErg7kycN+nNfTnw98b6p4b0C28OWNmNVvUciG0aXbBJC2NyQyOduG3ZAbO0ds15WdVV7Nxi7PQyznEp0nTv2/M+29K8V6hc6Xa6hqFtHptuiCVo452edZFUsGCsxKqeMMJMYwNuQa53XvHbafb2V/aSCQWn/H3Y3DlyhJDrKrBSWDNgsB0JxjBrkLnXtP1XQdI0/U7OS1gnLwSocs7LeDcAXXLARSqVGQeCAa53Un8O22sanYDz7e40owRg+XkXEIYxqcgEP8gKkEnHGBzmvgcfUlZuMrnwU1J7HfeCfGqahfSul1nS7jLXkbP8k0KngDI4ZcYz6k+pr0vxLqHkT6WsNtDFA8P2q4jt1C4kuf3g2pjGBHwc53HrjpXy9Y+MbqXw6bOws4IIrpgCFHlrGwRgQxDAttG3oerE46Guzg8Q+GtUtbTXL1ruWO5WW0kYy8RSqfM8ss204y5CHA4wCOtcNOfLSUVpr9xnGbW50etXdzr93JpGmAWkTLIW8nIKAFQioeAgAJ+VuAPbrx1ze6ldSpDfyIXt3+ztbLMhhdUVWEk7r8scrDGTtJyoAFdRp+sWEcVvNDKXsGkjeNYyVllIwfKHlvl2Yr8w3HI4xwBXkfj7UrvS4NU1LS9KitRaLIlu8vLyAkDLB8FccbWYkdD6Y0wsFKHNN63N6DT36neeI/FWn6LI0XhecQ3tzIDbedKhZLp0+eE4VSVOARJjaAQcLtIrF1W+1DX9Is9SuLeIa0ygXNsx5XYcMUdeFz97A4znHXFfL3jOVvCWneH5dUlgVLy1+1x+czTupaRmC7ickBduRnb0NdnZ/EPSrK70rwtqeoXMmtukTzmO3Ro/8ASAH+fAVseWQDycYySMVrXyuTTq0477jrYZyXNE+oF1ZftdhpVzdbb2SCVSqkiPYio3GM+4I9s965R7uHxDrUFrdw7NPkuJmZgEkXDknADYHU4744OKztK8W6d4SsY7eVnv7i7e6MP2qLa6SIZFwCrZBZBgnJzgHjNY2teJYFIeyjaW0ilzIyDfH9rkYfKD/ePevIo0I+0Sin3+aOGCknoj//1NfTPHmi2EEeo6XdLcfa13DZJtDeW2WDDnnBzXL3OqjWtQh1bWLM+QJAY/JZjkPkMp6nb0O7tjJriI/A3gaK7EOlanMZVtJJLS2CCGBkyFYqRuG75v4m+gGK9G0SztNWv5vC+l3KpPDAXifBWG5ZX+aDjJ4XjPfIHrX4tBTT5k7pn5fKnyu8S7Yy29vaWl9PIk1xExWJHYtH5Rbc8khzwijhz1YkADmuF8QahNeSta2ypNc+Z5ouUfBJc4LYGAFAAVFAJCnirNpeXNzfvZG02zFjbv8AaC3l26wdIgoPzDG1855GPak0JHtjb3GlSjz53YXKpESHSEFSreYEBwcE8kqACBmtocvLr87msGlHXcsaxDLLBaW1jKLyxa3LSzQ5TzCzsrDjlWBTnPHOOlZKzpYWkn2dd6XodHZPndIygQsXOdm4ZXH58ZrPF1cR6ZBoWsSxoY5HDmMozQIQMIqDAdQfm6ZPueuHc6ZqRtja6IGu43QtuZtm5+M7F+UjgZyRjOKi0JzbiYaSZmWvia21LU4rGO3MGjwxb3JXzVEaDzNgPB+YkKXPOM4FO8Mz6bai6uIJnaWSSSV5n5yBt2mMZwiEEcAZ9TVax07U7SEyx2xs53QbUmKIcoQM7jkEYJznr071g6ut9aT3MVvAls8OHDxbZEixwD5eQU9TgbeOPSo5nyOENO5UZOzjE9ebWrGErZpHLJcTlGJBwieXu3OVxt5HGRgnPNdxp0sXlRRW9xvsNqPKrBisSISRIzZyw9Bjjrk5rwzS9RubzRXMbOz3DIsYOA0rtuDqO/3Tx7n1rs/D+qadpWm6jeMn2nUJIBFJEqgyhIpPMSLDHAAbjnrxUR952fVGbjzadT03WtanurVLmeyXbdxCOON2EZVPvKwHBLykZdj0XA6VnW322z8N/wDHwkmovMiPGwxLCvzMuc/eBIAOM8t6AVf0HwhqN5oS65JOLa6kXzU3N5soVmIXJ+YZOeAoHHGOtbl/oOnalZHTLm8ujfQG1El20qiSXZIp3bsZCHvyOM9+vZOgk7S06Hd9Vco3R57FFe2ur2J0xJg5fdMd7uypycsCfukghSR14Az1zb7xBd3Gqf2VewebL5gBkVC32hFUujKACAcHlTjOMA113iF7+28QyWisY11CR3ZEO1Xw3BLAMWCr8oVQAOpI61xy+Ibqw1iyi0jTXa8tt4WaSZpkDTwsgLLiPhVY4znBwRxWTw/7tpCp1FGPLND73xJNcWiGYGwuVt47ZVBI2FUVlOOzFAWI5+83OTXmPiW61R7OC90q4ZSpWRjC3E9sfnO8AD7jHdtI4GMcdNy68KXEK3l7JdyO8S+UGcZWfzMpHKyBdwYcgY6k9COnD2ljfjTNT8Osrw3EUSSwYDC3IkdS37xnZgyKeV2gAZHXNcmFu4tx2X9P1GoJxbb1PWfh3rF3r3hXXb+ORLjVYxDZxquxBH9uCxuEEhADbEbnPOeDk15rr2vnwvrenaJ4ytfs1vbzF5DIjCMxSsd2wAdFyc+5x0rH0ez1ex8PaxLo1nOtwWgwzYl+0uxZEPyglVRs/mCetb/iSx1aygddclTUtK1TUxGNOYtL5FpMplEsbgM0eSrYGdoC5xlhXv0sN7VqNrPVBGhdmt4ju9POpHQjbxxWkcUltps0S4hMUOTk4yoZGztAxx9K0bV49D8NQSXUwkb7VKY7fhi6TKJJHkyBtwTlQB3Bz0rg00u1tPF2jaTrF09lp80l3asV3MfJ2jB2tx8qqTnHUHB5zWxLFe6h4stkjjFhpdjDIIlkHmOB5SmFpsZ3O4bcQOOQoHSuatFxh7zvbR/I5qito2ejeDc6FYSRm4meSRo5Blg8sySZkOdowuDkY9CQK3vFCzXtrZ6fNbw2yOZU+YHcoUEwYyWxtIUk14xr+sX2ia9ZGOH/AEG5tFuFZDvRZIvmbLgYwwHynvnFLqsuqzaI8tjO91M7hfL2llMUbN5jADsFTcxPABOeK4q01J6x3G8Q73avc6R9TuPD9t/omnPqGrNMWDRp5SSckqiDncSFOHwPyFdHqlzrd3Hb3GvxGC7kQFQy7owGUOY3bgMVDKCR3PXtXReCtVuNamnkv4jPavEouXaLESjYhyWDBh8j5HsPwrF+IHiDw617eTG2uNTvNInSGOG6YxWscmFZQgUjevIyH6YA6ZFeX9XTd5R0bM5Pne1jzw+KdbstZdradopZEba0p2qgDfeAHBznH04rq9K8VXtrpNrZQucPNJH5kCqmZHHmHbu55HXgdcdaoMuntJJfWUCX1zKsi25kZXWJkYBXbAGWG0ngbeOvavNfEei+KrTwpHaukTxrdR3cNxFIvmKJm2lgRltuQckjIx6V7GEVOEvZydzSk1fU6T4j6SNZvdN8Oz2PmyW9ltl52Sx20qJIQvBDN9oKDn1zxivBPGvgE+F7K1vbhmgsPD8ctsLgktGwiO/zPlOTNJMc7fRweQuD7fr+tavc+KbcavOLO1tBFcPMeC8dgvmSKmMFm+TvxnFYfioalceGPGt1rUOLRJrdtPiydqxxyYRtoIyQkg75O7OcnNe7leYThShF7f8AB/4J3YLGyppJf1qecaJ44j1vwhonipbdEitrqS0uEjjDIizEOsjqBglfmx/tc+men8LwT3k8kXmASQLIJmI+WCVGw7Rv1AwNwyeN1VdH1S2v77TtA0vSFvLTxLBK8cJAD7sjfGkwKlmSSPHzh3KnOTnnntS8cWOh2kuiaLp0tyNUCLqMrzPI4a3OFghZQihMgGQ4y7YPQVrXp+1k4x2e39feaVbzbhFeh9TeG9fstXZHiEd95RCt/pCxPHNZyiZTk4UbiMgsCByTjk1xet63DpWuFrm+F9aIB5bb926Engq6fMP7pC9wSM8GvB/hT4g0/UNS1nw/rOmyR22oQy4VC0rrJHnZIFbnCsCeTg5I54x1GmweJtD8T6BpmsmTTbzUrW8ito7iB0mMx3Krg4OFVsrvB57CvIq5Y0vZ9jkqYZx909K03XJtRuNLs0WM2avN91RGpdSoUA5HVuMZzivSH8L2nh2xXQnzeTI0epbgGYqbgEY8sHB2DaQzDnrgdT51beCfFYt4tH1OJv7WljuGgvGZFlupRGrqBGSCY8sqxyN8xcHPykCtCyudQ1LxNqz+IPOjuk2wGQYCF08sxM4zuG5V2sD3U+tcOLy6VO+tupyODV1c9q8K6Vca94WhNpArXUN5JbjzNsSIz7DG8TOw2gYG0gHueMV2Nzpdx4ZkudK1O48/xIYY4XdjuaMRE7n3MgWR24UgAAjJG4YryTQ9UuLGzvDqVsxGnzWzuY/n8yyZnDTouBk4wxAPCkdMGvS/FN1J4h1MrcXSbr/Yk0sBHnzfIB+7fGETI3A56nGG4Fehh6EXStbVf8H8SqbWxn6/8P8ATvGM2gQ39lEIGEaTTSBW+zxeaoIh3g7VKDkcZP1zXy9feGb/AMN+MvEfiC701LeTS9QtrhY5Ufe6iRlnkUSYyjK5CY4PB7gV9fX2o2emxR6TbRLst7hrSZ5SJVMkQ2s2Rtyn8JPGOvOM1gXlhp3i2z1Xw/LppSC/dXmhgfJBmKv5sUsx+UEkkqevOB0raWNUYyp7u6+7qddGuoxcfM4rW4NH1rUtMswPKi04tIgG7BWRl27mwc43Y+Y/KD6dItIsorG6bwXqtoITPeJPKH/ghgbIkXGMkBcZz0Nej6V4ZjOnarp/lHfbTCfz9/Cum5CnI+UbG59SBUs/hT7Xf397aPFM08DQFpHwyEYX5gQThmUleoNfNykoRUqcveX4annKdtj/1fGvhiL3Wk1DQ/GEclnq8Npcw2hA3+cjIfllJz1X7pVhnoexrvvBElp4d1A6rZXUKI6KyQuGktYpYcCQu4O+NmwGHPLHJIFed/BaeZvFFrG0jFF+zYGTgbpVzge/evVPFVzc2dhotvaSvDFdXV7FMiMVWSNLmZVVwOGUAAAHgAYr8oxFG3Ny6WX4tn5rV1lY6Z9R0bxjNdaRNpn9kS6eJJJZYMjYWfBdg5O5BkZcE4ByBinvpmp28cd4zC50u1bzvPilVlLFQueOWLfdHGCcDrXNeBmYeJ/DqgnDX1zAR2MRCDyz/s4JG3pyeK9U+F8skWrWNtG5SG4lCSIDhXRoZmKsBwQSASD3ANeHg4Jy5X1bv8jma1seX6lBFrN/GH0swzXbBonRRiBUAxGOCWYA7nORk8cgVtW+imLQFug2bnHJiIjb5gQQm4Y3KoLY6HB9hVS3ZpvFCyTEu/2mRctydvzcZParMpP2HTo8/IWgbHbPnqM49cHFc+LxEuaLjonb8gqVLtJaHMvpE3h+5zcw/aWMaCFZ9rbiVLAqoAXIP3s8Z57V5ldarZC/TS7J5Tc2r72YhTcSMGXcEbnrg4XsAdx4Ir6Y+IKr/auhLjg23Tt/rGr42+IEMSBblUUTL5rBwBuDCXIOeuea9DCtSlyjhA721MninUprTRXh0O3jlw0Thy5Ibc0xDD5wP4sc+gxzWVbaF4mtPEflXc0ExjeQI/nBpJGXBU5X5Q464JPbivDr+6uf7Asb3zn+0HUmQybjv2s0eV3dcHJyK+gnd1YMrEFJIypB6Hyz09K6MZBUqff/AIY2qtRjZI9F0638TJoAuNdure/kkkJQnzFMYU/MDJ1b6kNirU/iBvDdtb38Flc38CszlUkSNQVZTgPsZjktuO084I681ymtzz/brJfMbDQpkZPO4c/n3qa6mllsrCKR2dFdcKSSBwO1ThsRbWSv/wAMKhinF3auM8QT2vxDeVNOR7C8swtyBC8TnzXjyY5g0olVmGNmOAR6mrvhTUJ9LjttF1wedc7TdIh/5bJgKcZ3LmMnDK4BAPfBFckIooW1yaJAkhu0O5QAcjgHIre1HUL9vFfh2wa5lNtc/aDLEXby5CII8blzg9e9erHExlByUbOx9DheSfuteZ0fimS8j1+71D7ClxqUh8oLJIqyJAsR3qEU7DhWHzD14A7cVptlbpZY0e4CRWwdRbQZCGNFLrG6gBdoJZQeeRjrXs+i29uvgtrxYlFw1pI5kCjeW8sfMW654HNeLeFCf+Fg39hn/Rk0tJFi/wCWYcqfmC9A3v1ryoTcqc5yd+v9feY4qldPXYzLGRdJ07VdA06KW0WSOdXmgUxJKVkXfIzL8yj5j5eD0JAGak8baNcxeBdMgub2bTr2CTzJXtZvKuGjCny1J5yu0MBuwSAMd8++G0tD4t02MwJtmjDONow7bmGWHc44ya+X/j/cTvpugXLyM0s99F5jliWfZEwXcepwCQM9MmroVqnPG0utjy6cnKa1/qxx9q+jzeHNevLSNvtcf2eQMFPmN9qZ4yVDE5bHAJJ6Y6cVva1cyHXrLTIImjm+y25ui8hVhKEjDlzg7TsjQD1xjqa9A+DOnafdx38V1axTIJoRtdFYYEgIGCOxJP1rkfAVtb6h8SPEf2+JLnLIv71Q/wAplHHzZ44HHtXTWUlTbvvr9wqlJOTJbuS813TZ9NvQY7y0Mrrbtwv2WQbcIf7yM3Q4ODXMz+Irmw8OXOnIg8vW1+zTEE58nlpFA92Rc54PIPWu/wDE0EKWNrOkarI91KGYABiN+ME9eleY+IAE0ixVBtButaXA4432/H05P51xfV+SpGcXucdSCjJNHsvwz8cx6ZaX1hcRwtp0whiV0XEj+S0bSO7E42u+ARjhR611v2XwxeeF9YvLZEu/MujczSh1QyOzYYb+u7J4H90YFfPenxRDwuihFA8qwXGB91gHI+hYkn3JPWrXgNj/AGPbrng3k4I+ktx/gPyFbV8HzSab0TNIRu7X6ntF14cguNOFjY3BQSYtlkfA+ZxwB3OONvHrXlfiGCWz0+90e4jFtJZpaWdtcEfLkM8u7IySCSoIxnrjkmvoLxBFFHfaQiIFVrayYgDAJE4XJ98cfTivNPiqAfDugyHlpdR1Lee7eTbSGPPrsz8vp2rzo1HTrOC1M7csrIoeKbXSZPDEOowXdvFqF5pU9nbzXewBZTKS8xX1cr8ow2F4OareK9IKafe2X2lLq4u3RT5e5VkZwOTIwAIYrHu4wApOe1ch4yRPsmlLtGP3YxjsbhgR+XH0r0j4iO8Hhu/eFjGwEAypwceQp7V6FGs+WM0vIuMmeZWvhiXS9D07RPB+ipqN0JZI0uZRIjJHIqMwtsn5jIR8zHaOpOFBz6J4s+FiaTodn4q8VJZabdTGSW4tLd0gupmJTLO8e5Qo+bftDMSTjNerfDuef5f3jcRMRyeD5anP15rgTe3kmrWqSTyMrWcpILEglroqfzHB9RxXTl+ZSlOzXcdPESlK3Y5PV/M8K2NvrXiC6ttF8LiJfsWn2KOL28abIzdTEGWOLjk7wWH3QvOO18VeMNX1zStA8eeGJjca1o9lgWkXyxT2UZjkk2fIDmNG4XAyGzwQc/JXxQu7ub4h+KJZpndxq12gZmJOyMgIuT2UcAdh0r6F8HSSL4T8M3CsRKmp6cocH5grWsmQD1wcDP0r18TTaSnc7ZUrRVS+o+x0PTB8UrPxAkRitPEllP5F8Niutsy7mWVhuJZUHReV5B6Cl8TNrfhLXJYtf0KJHvZVkOUVbiJOFR/PBzIjrzgluxXGcHf0pmufCejm4JlxqjKN3zcPbyhhz2YDn1r0rVIYr/wRaS3yLcPDFCsbSAOUUKoAUnOAB2FeJDFS5eX1+482FW+/oZWo2BGsqt9c2oW8iIjt5DtkcrEEXIYAFMnOA2fvcVQ0bVl0HYdRUxOIfOlVeZFNspeMKMcqSoIU856ehb4ihhn8ZTRzosiRT6bsDAEL97oD0rQsIornwt4+ubhBLMg0pldgGYEz4JBPOSOK21pVFydfzRmlZ2RxOia+by1vLSQyLPCHmjKf6wS8rtcE4KMXAbAB4/CvRvhw15ZavfapqUb+VJKYZZHG0byTGMJ1zwW7YHJwa8b8Bqsr6neSgPODABIeXw0nzDd157+tfdWvWdoNEtCIEy1yxPyjkkDk14mMrtJpdSpaRZyFtp9zpUM1vFbB4HLGR5HADInDMeeQSORznNVNVh069v0jsk8kXWTvUspDKQcHnBXqa7Gyhhl1fSbWVFeESvhGAKjG4jg8cHkVzFuANUsSBgtAGPufMl5PvXjV4py+78TPl0Xmf//Z", + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Result: True\n", + "Testing image: _static/img/not-cat.jpeg\n" + ] + }, + { + "data": { + "image/jpeg": "/9j/4AAQSkZJRgABAQAASABIAAD/4QBYRXhpZgAATU0AKgAAAAgAAgESAAMAAAABAAEAAIdpAAQAAAABAAAAJgAAAAAAA6ABAAMAAAABAAEAAKACAAQAAAABAAAA1aADAAQAAAABAAABQAAAAAD/wAARCAFAANUDASIAAhEBAxEB/8QAHwAAAQUBAQEBAQEAAAAAAAAAAAECAwQFBgcICQoL/8QAtRAAAgEDAwIEAwUFBAQAAAF9AQIDAAQRBRIhMUEGE1FhByJxFDKBkaEII0KxwRVS0fAkM2JyggkKFhcYGRolJicoKSo0NTY3ODk6Q0RFRkdISUpTVFVWV1hZWmNkZWZnaGlqc3R1dnd4eXqDhIWGh4iJipKTlJWWl5iZmqKjpKWmp6ipqrKztLW2t7i5usLDxMXGx8jJytLT1NXW19jZ2uHi4+Tl5ufo6erx8vP09fb3+Pn6/8QAHwEAAwEBAQEBAQEBAQAAAAAAAAECAwQFBgcICQoL/8QAtREAAgECBAQDBAcFBAQAAQJ3AAECAxEEBSExBhJBUQdhcRMiMoEIFEKRobHBCSMzUvAVYnLRChYkNOEl8RcYGRomJygpKjU2Nzg5OkNERUZHSElKU1RVVldYWVpjZGVmZ2hpanN0dXZ3eHl6goOEhYaHiImKkpOUlZaXmJmaoqOkpaanqKmqsrO0tba3uLm6wsPExcbHyMnK0tPU1dbX2Nna4uPk5ebn6Onq8vP09fb3+Pn6/9sAQwACAgICAgIDAgIDBQMDAwUGBQUFBQYIBgYGBgYICggICAgICAoKCgoKCgoKDAwMDAwMDg4ODg4PDw8PDw8PDw8P/9sAQwECAgIEBAQHBAQHEAsJCxAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQ/90ABAAO/9oADAMBAAIRAxEAPwD87LfS4rdAx5Peuo0sW5HvWIrGWL5eK29PtGfDDgigDauIAUyBmobaTyfl6VflbyodrVy892BJs9DQB2sM7bOuavW9o90w2DrXIafeguqE16DpcqJgg0AbdnpcaR/vBzVO5s4BuHY0691YxqQhwa586lJJndnmgDIv7FA+6P8ACq9lpsxlUrknNdHFGbhuRiu30bT4Y1BkXoKAOPn0+eCEt1NeOeKJZIJd5+9X0hrRhWNth4rwvXrA3kxY8gHigDzeO+uG710OmLJcyrvOKjfSzE2cdKu2/wC4cMvagDsorREjAU81RuLx7U7d3BqtHqZYhRnNSyWMt6pduPSgCH+1+gLVpQ35ccGucudHmgO7tWvp9mzLnuKANcNvwM81aWORVOKphDBINwrdth5gyTxQA2x1SazfBOK6RfEBeIqxBrlJrUO+7tV6GwJjJXoKAILy7+0ZYetYtzdMACe1adxsi4Nc7eqZQdvQ0AZOoauUGA3NcXqWsSOvDc1d1K2kV8CuTubV5Dg8GgCIXJfJJFL559RViDSm2ZHOam/sp/SgD//Q/PO3V4kGORXX6dKgALVjWto8yjAregsGjGCMUALqlwPKOyuMMUksu7NdRdxHJDdBWSJI4n5oAs2doykMM5rqraeSGPBasm1lVxle9XmIVcmgCyZd5DO2au2xQnk1zzjb8xNLHcsuCOlAHotnsGMdq6B9TWGEqOteZW+pODjNbNu1xeZEal/UjoPxoAm1HUDMSi81Qj01513hdxNaTWtrYf6Rq11DaoP77gf/AFs1Rb4o/DLQ0LXmsRNsJG1CuTj0yRmgDKvdAmdCxTbj2rlpNJdG6Vr6z+0T8LOUt55JB3AXt9V3CvB/EP7QtjlxoGml3PRpj8oP07j8vwoA9wttLUfNjJFdlpkEezynAr4Yk+PPjwzPJE9vGj4wgiyFx6ZOa7TQP2j76KZE8RaYkiZGZLc7SP8AgB4P50AfW2o6chQlBnIrFt7codo4IpPC/j7wt4ztwNHvkklIyYmIWVfYr1q/d2s0TZUYoAsrpxusADJrqNM8L7owZCcVi6VNKgA6Yr0nSr5fLxJQBzs/hvyQTGuRWW9i8UR4xXq8bJOpUdTWRdaKHJz0oA+etaLxFiegrnILl3XBPFe0a1oduA6lQT615ZeaeImZVGMdKAOduVR/v9TWSdOBJfGa3Jrdhgmpooht2+tAHNrEI8rin4Hoa6L+y55CXRODR/ZF1/coA//R+MIEjtRg1ro8UoBzXN3m/wAvIPSk027KkJIaANe+s/MQlBzXFXNlOXC4xzXqMDxOvHORWPqVoinzBxQBztpD5QGT0FaKuGYDOaypHw+AeKvWyM2DnmgDdgs47gYc1KdI8w4hHyr1J4A+pqWyG1DPMdsY492PoKr3epy3JEcP7uNeQB/M+9AE8NjZWZLSYuHHPJwg/Dv/ACryf4gfG+38MK+l6GUur4jGBjy4gfcAjP6/SuT+KnxHbS7WTw5ok2biYDzJVOdoH/1+n5+lfJc0rMSzHkk5Pc0AdD4i8ZeJfEtxJcaxfyy7z9zcQg9sf41yRY4x6VIFllOFHFWY9PlkQtwNvXP9KAKO4Ubq1LfSZrklYmUkAnrjoMmmPo2pKpdYGdV6leetAGbkUZqRAobbKuB3xwRVx9POF8h97OMqvdh/snv9OtAEVne3dhcJdWMz28yH5XjYqw/EV9MfDj9ofUNNvIdP8dKL/T2+Uzhf30f+0QPvV8u7GHXj2zzSg80Afr/pT6F4h0qLXPDd0l5ZXAJSROnHBB7gg9Qa2LCyZCC7bsdq+Cf2a/H93omvyeDbqX/QNWy0ak/cuFHGP94DB98V922t4Y22lsCgDuraHbGHPGKzdT1EQDaDg+lTxXcbw/Ke1cnr8YmjBibLUAc/qmtD5lIBY1wF0XmcsF610AsHdjv+apW0vbhscUAcrBpslxztzWnFoTAhmXFdXp0UcLDite48hU54NAHNQ2aIgUqDUv2ZP7oqrc3oikKg8VX/ALR96AP/0viC4YuuB2rMi3bsr0Fb5t1I+bvWZMsaDCcUAalnfFFAzyOKkubrz12k1zhcxD73JqaJ2PDGgCzHDvkwK6Wyso0T7RccRjoP7xH9PWqmmwLKCzcKvLH29Pxqe/v1fESHoMDHQD0oAZeXxuflUhQvHHTFeO/ETx/F4ftW06wdTeSjBx/CPT8O/wCVZvjz4mWOiK2n6ZJ9ou+QdnRc5HXtXyzqWqXN7M1zdOZZ5O57CgBL+9knmeaVjJNIcsT61TtbVrh/m6Z6+57Clt7ZpCrvwjnqe/0rpYGt7cKsCbmBBXd1Pv7D3oAltNJSUKZ/lB+6voPXHtW2un3UbGCCJYoARmSTG4r7Dt1q5o6WwuRk7zGI/OkPIC5yQPds8V0ASGSdprpdsMI3bSRlwowM9hnA68UAZmg6PI91PanNwTaylscfOmcEYB9K9A0PTrSCNbPVbZIraReN8kaMMckbWbf27CsTTtUv1lRkXyIDnEMf35Af4iTjC+7H8BVy0upbKZMyK23gmIDgehdsls+wA96AJ9Y8GeFZ7iOSwmQvOCV3P5YbPAIBOMj0PX681zsnw8S5BsbpBDcR5KshwAO5BHBxjPU4ruYLWa8liwrRod20Lk49SODxj/6xr0nQ/DatEPPjMjKpaFmRsq+3HfPUHvQB8q6/4BOnyq1+ZopHXPmKiskv+0Oep749zjvXmV3aLauU3E+mR94eoIJBFfd97ZwWLfZryFPs0mRsZSgDjk42857/AP6q8U8Y/DOG4DahoIAjkJZ4A24bj/HHjOM9wTz6ZxQB4HpGoS6VqdpqcDFJLSVJVIPOUOa/VaC4+1Wlvf2/MdxGsikdw4yK/KW8066sHCXMbIGyPmBBBHUH0I9K/T/4P6hFrHwt8OS58xo7fymJ67omKc/gKAOxhvmVQgyMdqbPciVCGzntU10IomICgVkTtvOegoAtWaxjIk6VduUjEJ2/Wublukix82aq3OpMyBQ1AEU18bdyATis6bV5Zh1rOuXeRueahiT5sYoAfJc7mySSaZ9oHvV77MhUFl/Sj7NF/d/SgD//0/jB5F24NYF1KAdq10k1uTkLVBdOeVumaAObYM3vV+1tZZDz1roU0gIQWFacVrHbxtNjOwZ49e1AFG5f7Fax2kZ+Yck+pPr9K8W+JXj8eHrT+ztMlVr+6RlbgHy1YYz9cdK7PxZ4hh0DSbrWbxsGM7Yxnl5T90CviXVdUuNTu5tRvG3STMWP49h7CgCjc3LZaRyXkc5JPUn1rPRXmc4G4nk0mWmkyRkmuhsrV43URANjG7uM+nvQBa06yyUkvPuY+UYyT6AfWti2tvNmZIo3IzgtnlmY4CjHcngAcmugtNKaeTkhnk4LHkjPcdPpWimn3nhidNUv0CohP2YEYVX/AL2B1YDke/50AX20+00m3bznTbbAHYmSJJf7pI9+CcnCg4qObFssNxdlmkuQXZW43PngkdkUAbR9feuj002OoWUV7qUYNvZq0mxMB5H3EncfT/AD0FJfabNftP4iCqIX+SOTKxwwheSNzdAo/ixyeFB6kA5iS93RyRYIGQHb+8RztA6nHp+ffO1ZOY41kuEWFFBIyMk/QDkn2H4muTk8Q6fHL9h0KP7ZKODO2QgI/uKe3+03NOigu2kSXULg7n6AZLHHoo5x9AB70AehQ6qlzLCuxh5fA81j/wCgA9D2Ga9L8P8AiGWynRGjt5I8EAFDuQ44PUj868Vjs7qWNTa2jzEdGlJQf98g5/M1saZY6wrq01qiluPlGcficUAfTYjTxZpTSRCIXMJ3EREZIxzhSeo9v0rye7n1K0uHWEojR5XEuSnHUYJPX2Fdf4JWBGSC9tCiufvxttdTjquRjj0PFXfGWlrDcp5u1jjiRV259CSPUf1oA8S8Q6KPE+mXTXVjaidFLI8TSREsPUn5Se3QV6d8AtTn07wdJoTqyTWNzICjjDKJPmGe2OvI61zN3bQ6YWmFz5byjiOTK7z6bwBnPvWNoetReH/F1lfRl4o7vENwowynccDIB6g89D+tAH1JcXUrks/U1mPcnOD0rYuI/wByd/y8VxzXSJKVY5GaAN6GGOVPMPOagu7QFdyjpU1lPCyYzkVfnuYxCVwDmgDjprdg3oKovMIMHrXQXKh1yDiueu0CqT3FAF6HUYwg3DNS/wBoxf3f1ripZpw2FBqPzrn0NAH/1Pk3EZbHrWraW0OzccZrm4nZhnpVr+0GiQr0xQBfuykZYLWU7u8PlJ/y0JPXH3RVSW8MmT1zXjXxd8cN4f0mPRrCTbf3qtkjrHGep9ie1AHk3xc8XLrWprpFk+bTT2OSP4pe5/DpXiE0hY47VZkk4yTkmqI+ZuKANLT4g0oypbPTFeo6XpMf2cFVx7d/zFcrodgzOJMbAO5/pXs+haYdQu0hjyg43dsUAdh4F8JJqDxSIqkBsMD0UD1z/Ou+8UeCIdXsBFdAnao8rKZROSM/U449s9uvpHgvwrFbwOiwkKE3N97r1OBxxg8E+terS+Gra+tv7P2+XHuDrGz5XJHXA+8fYUAfBE3wj1aQtai+uEtDsIAxhjn+Idxj/D2q7P8AB7UdWmW0utQnlt4sBISdsa59EHGfU9SeTX3tZaXZ/Yfsl3s8wYc5ALHA4HGcDjAqrbeHFfUYr5nW3tiQAAMvnHIA6k0AfI1n8BtS0u1jFnH5aLyG8tQ7e+48mrll8M5LOTc9uzMTySuST7nqa/RC50i1awjljgWHAyWuCN7DHUIOR+PNcfNZxKx23UMa9AXj4/SgD5WsPCEY2iVDHjplMV19l4SsJMRs6AjpuGM17m8WoLzaS6fMegD7lP45FYlzHrZYmXQLa7A5PlSKx/LINAHL2Pgeyi2O4+Q8ArhhmtfWfhmdXsch9wQblbitW1uLa2jP27S7vSeSd6gtH+PUV1+kapC8Oy3uI5UYEZ5Xr6g96APhrxTo+u+HLoxrbxXkCg5injD5HfafevH9UtbHU4xqWiwiB0YM0ancqsDnjPIH6V+h3jLR7O9RhMwfb02jkelfD1/4EuPDvizUBDKgs7xWljbtGrY3jHrknA6CgD2+613zdJt5vMD+ZEhLL6kDP61wE+r/AL04PGa4h9Qlto5LaJ8242hCDnOOpzx9M96zJNSMSk5zQB65a+II48KGraTVhKo+bIr5sHiM+cYx616R4f1Hz4xuPIoA9IkvjnAPFQGRJThjmudvpJY1Eig4rnB4hWKYKxxQB6G1oJMEcU37B71iWviKMx9c1Z/4SGOgD//V8N1L4V+IdJjLtHvUDPQg15hd2UyTNbzKUZeoNfst4u8E2MtowEY6elfn78UvAgtrh7m1UKyk9utAHytc+XZJLcznbFApdyegCjJr4J8Va7P4i1681eds+e52D0QcKPyr7G+N2pSeHvBc0P3ZdSYQL9CMsfyFfCre9AEEhzVrToGmnUKobvz0rtPAHw5134i6nJaaZttrO0UPd3kuRDBGe7EdWP8ACo5P619W6J8N/hDoFv8AYv7LvNduVA3XctwbcFu5SJAQB6ZJ96APAtBtyF2Mn73AyR/CPpX0R4O0u1jSOUglmOeADnHbnkn36VLP8LNOmSfW/BtzNGsI3TWdxhmRCfvI4+8PXI4rc8NJcwsqRlSF4IKgHPoT1xQB7lpz/YrWF4rZ4SJlaXc+5mkYfMPXcfTtXdWl0YZp7lXDtceYwwclQpACD06EZrytILdri2guXWIwneYkbBcE8ufc9PoPz9ZsLUW7w3IhxDMu2MA5wxY/qaAM+Wx+x30WoqDHHcgblwwVC2eATycdOprrLACC6jiS3JDHcGPUg55PoDXUafo9w8aMy7wR84bnjrgZzWwugJKZLkRCGZBgFewH8+KALt0A9rHAiqvG93A6A9B/+uuI1GHTI9xupSIxnBY4OfriutskaOKT7U5lZ8kZ4HpxWHfnVJMxQJGiIDyzYH1zg0AePaifDl1cNEuohB2V5AMn681zd9p9nZKZI7S9kU8+daTCRfrgf4V6nc6Pr1+rrHpVnqUfU/NyR9SK8n8R6ZbaS5luLO98NPnG9cy2559c8fnQBFpviSa0uAmmeKZbKXP+rvUwCfQ9QR+FdaNT8TJMLu40G01dX586yPDe5EZA/SvIJ5766UKJLbW4s8EEb8f7p5/I1YtLrR7TmXTdRsHHO61dsf8AfLAigD3y01XVJ4yq+EXgyDlpSVX82NfPfxij+zwQajcab9hLM0ZHDKwxn+lehW9zaXUCSM/iG6jHO0rhcfUA15j8co9LtfBdvPYi8jllu1UpeAhgNjHK5xkUAfLOoawZbo/MNvb6VQuNQDIVU5NclJM5mcA96cs5wc8nNAA0xjn3HnmvXfAF7Hd3QikbAU815WbKSaJWUc9TWp4evLrSNSD4+VutAH2Jc2VrNafIo4FeK67pcSzuIxjvXSw+NFFhsXliK4W91xrh2560AYT3dxbMY9x4pn9pz/3zUbWN5qLtJCuQvFH9gal/coA//9b7Bj8bJqNiTI+dw4r5c+J+qW5EjEitDw5qM11pyJG5OR1rhPHvhzVdZhZLdW4/ioA/K79pbxH/AGn4yh0aBy0GmxKSO3mSfMf0xXzaeTk16j8Y4pIPiVr1vKSXhmEfPX5VAry9+e2PpQB92fDvTptN+EXhu3tdscWsNcXUu0AGSQSsilj1O1QAM9K+jvDHwz/s7wfceMvEFuyQfdgDDG9u59xXj37PdrB448D+DNLX71heyWEw9N8vmJ+at+lfWH7QXj6107XLfwTYqBpegosTIOA8mBvJA9+KAPAfCxvtU8Uw2tlbkR3AlicICRsZGHPp2NWNK0tLXWltriL7zYOPetDwn8X5NL1MWHh7SY4FvpI455mGXERbDKmemRXvOg/DrU21+a/uLdxAsjCNmXG4KeCM9c0AfN/iiyOk65cXd27hIigjY9/bA617x8PrfUdSe2R5BKgxnJBwevasH496PFbCx4EbSON/YcA4OOp716F8ANOvLkQ5j/dQ4IJHtwaAPo3RdABi2nAwM49zV99BjhuY3uI9qNncB3J4GPU12MFubaPaOCOSR79MmvKviR4zh8NxQvcsIxEG5IzkD0x6Z6+tAGZrGmWejzF7sBUOG3SMFUd+T+leMeK/ih8NvDk73Goyf2jcKM+SrbYl9sDrz6189fFL4xeLfEtxPa6YjCyX5VeT5QRnrk47V8wXtlqepXH2i8mklboRApx7/M+B+VAH0D47/as8QTzPF4UsodPtwMfKg2hQOMcD9a+c9V+OXjy+aSW+v5XVvvK3CEHtxx+lPhk0HSpfI1O0nBlIAaY71+nHArpx4a8NXqZjgCBh/DQB5pb/ABCsLiZJnDw3BI5jbbz68cH8RX1doV3I+kQXd40iSzRBuDjIPTKnoa+fbP4LafLrUV7FqG2zDB2i25bI5wDnp+FfS01vBHYPyAVAC8dB6CgCvdeJptLjSJtcuLcjJCqM4X1GTXD/ABw8Qxa78P8Aw9dwai+phLicStKuySJwqgIRk5BHINcp4z0691q7iNqdhQYY54I+lcF43ZrHRksHcjzGQ49SoIJ/WgDyFclmc8FjWjb2qldx5Nc/cTeUQoOM1o2NyernpQB2VuI41AbAqG4CBvMQjIrCe8eTkHFV5LiRRvDUAb76oUTG7BFUba+nlkJ7VzpuzK4BNdJp6I+O3vQB7f4QSF9MyQCc88V1flQ/3R+VcB4WSeW0kW2bhCM11H2bUP71AH//1/WPhj4Ze3sEa+jJYAcV6xe+HbS4iLxJsUDkH1rY0W3tbWER5AxxxWpdyHyykKZzQB/OV+1t4XuvC/x38RwTRlY7147mI9mSRByPxBFfNTjHWv3N/bP/AGcdQ+KnhOLxd4UsjP4l0JT+6X71xa8lkUd2U8qPqK/Dy7t7mzlltLuNopoztZGGGUjggg8g0AfYn7FPi8aN44vNDmb5JPI1GIH/AJ6WUgLfnGx/Kvof4iWlxe+P9XmvDvM87yKeoIclgR+Br86vhN4qHgn4jaF4ilfZbQXCpcH1gl+STP8AwEk1+2fhP4ev4k1C31p7eLV7KEpHKrn78Eg+R0b/AHe/tQB8t+FNHlt9esPIsxcSPKpXfwgycKT6819k6v42m0z4hW2mS3AljgCQED7oCLjP4nmuMv8Aw3FonjGW08LahDplpC5Z0u227fXa7Z7eleM+Mtbh1D4gSSaROtxGsgAkjOQ+OpH1oA6n4+SHXdT09EbKwZ29/mYZ5x7V9L/s+x2sGhmKRQGwoJA656df518j+LXuWuLaCZsTOqkHPIDEjGPoK+p/g8VsNHk2MWBZWHOOo6E44/8Ar0AfUtxF5VlIoXccHBUZOT/OvjP4peHb3WbgmVn2AnETo7D/AHsY4H419mR38EenRzXDBWK9ug+ntn2ry/WYzeXMz25VS+T++YjAI4wgyx+px9aAPhiD4dS3bSXMkEAMI+VmjJAx32Ma8H+KOpa3pFheRWswmnt/ui3iWFMH1wMjHfrX3r4hi+yQPp11c26yshGBkNkk8jLdvWvknxR4QuF1GSaK8iLt12sefbvQB8gS67qF5pzXmqWrfZ5GVQryFy3Hz46DjsR/Ouu0p70wxm0U+QQNueuOwNevv4YUOoYieQ9ABwPc8V0Om+D/ADcQBNzZBOB3NAHE+HdMv7y4QMNoY49qb8VfEz+BoLa0kQrJcguXblVUHA9MknpX0v4a8CSqhdotoU5wOAMVwn7S/gGw1vw7pl8VZWRmt5CgB4YblJB9CMfjSur2K5HbmtofIujeINO8Wb5rDVbgXUfJSQHy2A5KhQOvpXGeNNXl1K5/eI0cduNqq/3uOpPoTXpTeGdP8F+GbCfTreQzzXLkXT8K/lKNyBR1HzDJ+n4+Y+Obn7RG96YliZuoX1xTJPJ7m5DXP41oW0qDnnmuYVnacn3rQWRl780AdVHJvWoJn3MRWbbzsYyAac7vnigCnO2yTg4xV631loYu5rm76d9/pioLWG7vj5UPWgD66+Ec/wBt0Ga4bq0n8s16vsWvNPg3o9xZeFzHORuL54+pr1z7G3rQB//Q+preV25TI/CtVL2aI4atLQbaCdQsigmtHU9Ht+q8GgDNg1cDAavlX48/si/Cz43JPrFvGPDniaQZ+326fLI3/TaLIVvqMGvp8aSwbKc082Mq9c0Afz6/FL9jf42fDOWe4GlHX9KiyReacDKNo7vGBvXj2I969I/Z3/ap1fwNZp8NPGkUk1nxDBPys0AB4R1OCVHQHqBX7iR2/kHfM4RB1LEAY/GvmD48+G/2c77R7vUdf0vSbrxAqnyZIFVLkS/wsWiwTg+tAHh/xBudHutJt9Rj3xSyKHVtp+bd3Oetec/CDSE1fxZLAf3giAkHsCdv88V9eaHo9vqPw3t7SVQVaziyGAb+EY61ifCr4ewaXfw6lbKMvbyo2MDLBwwyT9KAPn74l3UUPiBlt2LGDEZA/wBngcj69K+hPgNriXk/2O4bYrcDd0zn5Rj+VeZ/E74f3Dak8tpCfPkcPjOM5yCT7nNb3w0guNIhQ38ZhnKBck4wUbj9KAPsm/MMcoSX7iZ2jHHrwD1wa8w8W6sZISlniJo8lpjztPYAd25/DvXVJqS6zpnnLKPtCqQ+3k8cA4/rXkGuLcOywNkbcnaOfvcAfX3oA4nVdVeBJI7NFRm+8xJd5D0yc9fr+Qrzm48O6lqE7lpyqMegLYBPoM8Zr1k6KblN4Q7lwTjgfT3NaUNhFaI0twMd8BeAP60AeOW/hEWJwBukY4DMTn8K9R8H+FrU3MUMO2S5kIGByea8g8eeMBayNb2L4dPvEdRn3r2f4Y+MLLw7oNl4kh1210m9tAkgiuIvPa6Y43BuD8uOg4xnOc9ObEYqNO1z6PIOGa+Y83sWly+vXbZM+kPEfw51DQPDcUohw1yrHjqOOK+Q/FKzax4Zk02OHzpYJEZlPX5Tg9PTv6V9CfGf9oyPxJolrofh3VorgJkzSQRmLduAO0HrtBz/AJ6/OOh67DaT/wBpyYZ/KYAA5BfP8QJ7ivKqZhF1lY+8wPANdZbNV9Jy1S6q3f1PGvjVp9vF8N/DQkj8m5tb+ZfLOPuSxZJyAMjKD6V8GeN5jcJ5ScKtfoL+1JqdjFc6PpFmQIvJa9AB7TgBfpwDxX57+IcSlsCveTPyGUWnZnlKRsJOuMVJM+CMdqtSxhJWqjPjBwc0CL1nLgEZqxNMRnnFYcE+zvUdzcFzu3daALFxH5h967PwLZRy3ISQZLHFcFBOG+VjmvQvCV7HZXsbNgqTQB91eBvDoj0ZRHtUHHeu0/sF/wC8v51y/gvWbebRo3jYAcd8V139pxf3x+dAH//R+ydBl2zFc967S/VTHuPcZryNNestLnZnPmMP4VrB8UfFZrWwluJXSytoVJZs8gDvmgD1e91rQNCsZL3WLtLZUzwT8x+g618S/Fr9sDSPDRltvC8e914Dvg5I9BXy98W/jvqWvGdNPaRoXysK5O5x/fb0HoK+DPFl/qV3N++lP2idwoGcnLe1AH0j4t/aI+KfxKvZLTTrm7uA7bVjgJABY8DA4rZ8JeDtbuL2Gz1m/wDtetuQ0kCP5nlA9PMbJAbB6D+db2geHYfhV8IP+EijQ/2rdRZjJ6xhxkt/vN69q7L9lbyNR8GN4m1AYvtQ1MmaQ8sVicBRk9ABnigD7x0vTZtE8PRaTcfLLbwLC6+jIMV0Hw/eIpHCzBWjLEj1GeR+tbHj2zax8T6zajjbMWHfIYbh1+teceGNWXTNUeWXuScfUYoA6T4pabEl6t5GvyOg56ZI9DXzn4g1NrVxt+TaPSvqjx7eWesaRFHbj98ibxjp718P+Nb2VxJDEDkHn8enH/16APQ/DXxEurWYbbjaw4OehHofYV2sniOw1Cb7Szou0BmbIIzn+or41W/mRG2MVzwfU8/41e8JX95P4kitpd728AV3x828/wAII+vOD6UAfdhlsrazWW9kCfLuII5yR+GMZr5j8e/Grw1pjvZWcpklAwVU5Gf8+ldT8QdVi8OfD+51nWJ/+JjqYMNnbgkFVIGWbk5z1J/pxXy14V+Desa5bQ+J9YlEEN5IyI7DPQDkL9TjI78etAFK6+I1zfpKkempI8x6kMT7V53q9z4mulVVeWCFdxEauy9PTuK+4/BH7O1ndqou9UW2cOishjyTu6NnJyMc1b8X/BjwDpyBJdVeKQqRuLouXHHQj24+tTKCe6OrDY2rRbdKTV+x8EeHPHTQ3celXLTbmJUsS0mDzySOfavTtL1yS01+y0fWZntPOP3mJIx6jHH05r0C1+BXhGO9S7s9XDIzbi6lXxk8f/XqH4+eBtM0Sz8JeI9CkN1H9pNtOwIPGzeDkdOhrmngaUndo9uHF2PjT9mqmn4nG/HC9i1rxpeTWr+ZBbRW9vHjlQsUSjA/HNfLGuQ7NwYc17dqV2bnzJ35ZyWP1NeQ+I/mUlRyMmus+bbueP3TYmbis6Vcjj8q2rqLdL8vSqjxFcZFAjm3RhzVSRm79K6WWFCvpXPXCgMQKAK6yFTkVpW+pSwsCh5FZNKOtAH1P8O/HQj0TybmTDxtiu+/4T22/wCev618z+G7N2stykjJ7V0X2KX+8aAP/9KfV/GVrYAtNPsPcZwa+c/H3i298WA2KsVslbhOQHPq/sOw71Dqd+8rMCPm5yT1rhrm7KoWY84wKAON8Q28Njprrar+9kzvlP339s9h7V826DZpqvxAha4GY7SRAAem9j/SvoDxQ7vafMccGvDdOb7F4j89TjMisffGKAPuv4tR/afh4trCM+UqfTAXFM+E8P8AYnwvgW3AQjfNgeuc5q1cGPW/DSQuARJEDz9KzfBlyYdAvtIl6wBgq9OOaAP1C+JiG81601hCPJ1vSrK+jI774grfyFfLEzyf29d2oYosUTtnuD/+uvpmwlbxR+zz8NfHEY3tplkdNuj3CxnyQT9HQfnXzfdhDfahdkYhiid3J468CgDBXx9JPYTWEzkSQHbu/vDsQa8Y8R6pFcs0hz5jHBwP61zGgeIorrxHfWcrqYmyMdsU/X7CaBS8T74wSVye2eh9qAHx28VxEZF4KBs47/8A1+1cxYeIU0zUpoYT94qOmM4yP68VPHrSWi7JW8tR97OO/PTvzXGyWkWtazFJZy+XvcbmHQDrkdh/SgDoPEXi248Y+NdP02bNzFZrtSInIz1wfc969u8P/D74kawzySeIP7I09QCvlANIijOEVjwqkn04xnNfOd74XvNGu2vNIn3OSDuYj+gwfwpNQ8eeP7dDbPdiRnHRBjj1OMUAew+K9Mt/DNvOZ/E+rXd2JBgR3OxMgYLErjtx1r5m8Qa94TRWXUNWmlmQncsl0zsPTIyTmqNxP4l8RTvBcTSFTku2eAO59B+Waz20PQ9MlCfZ0aVl5OM/U8+tAF3QNX0fdvspby6jkOAsdwQvPY161rb33h/w5Dody0iQ3rrdxwSP5nlqyjHJ9z/SvO/CenWlvqc0sIWO3UbnIHp7ep6V9c23gjwXq+kIfFTvdX90FdrhWKPCccRxjoFX0I5OSaAPkC7uBs64ry/Xrnc5ANfZOufs839/5g8Ja5b3C4JWO4zG+PTIBFfKXj/4XfETwjOf7e0eaKHPEyDzIT9HXj88UAeQyTIHYZ57VRkkBHJOalmVlcq/DDrVIoOvNAFS4nbYQDWNISxIrXmjB61ntFigCgRzShelTlM0BNp5oA9n8Hwh9LDV1n2cV5r4e1GW0svLXkHpW9/bc3oKAP/T+TNRuTufucHoa4q9nKj5uDitrVZSl06D+KuA1m6kjl2qxxigDP11zLHy3XFeTapCba6tpQMGUMfzPH8q9HvpWuIo4lPzuQv5muQ14R3G6WMf8e7DGOm1eP5UAfUngPVRf+HbdWYFkUA/gKv2mYL2fb0lBBryH4Z6uYM2wP7txnBr2eNGM5kBHTigD9Fv2L9atfGvwe8VfCrUW3Pp93I6gn/lldDII+jqTXgXxe1KXwd4U1nT787NRtZWtpsfKSy/xjPZhhh9a4v9lD4h/wDCBfG1NPupfLs9fQ20uem7koT7hq+hv23vhfqOv6DP4u8NIXuYEzdwohYzxr0YY53Lnr6ZHpQB+UXhDUZBrTXhJIds/rXtd3qZmh8pwWYHOPavDdGhhtRGY2znqe4r1TSJFuXVC24Dg469P6UAcfrlxDPEVmBGOmK4W1e8sLnzrKc4Q5weRivZdf8ACkru8/JVh8o+mCa8u1PR7uKVli7dqANmLW5b4eXdOUzxn1A9MdvbiukvZPDttpoS33NcMMs7nqe/0+leSql3ASQzKB2q7baVca0s09xeGzgt0LSStyqgc9OP50APudbezVxCw2dwO/1rzO/1q6uNQy2d0h7Dge1fQWl/s9eLNZ+Fd18XRq1vaaTA6KsNwkgml8yTy02bcrk53c4wK8xvfAuoWWm3uqy3kUi2IUsiIdxViBkE+meaAL2h6gNOtbi7kP3NpJ7EtgV9HeHfGM1/p6Fm3uB1PcV82eCtW02C6l0/UYxcW14nlSq3OUb09CDzn1rptHkuvCOuz6DdSFox80LnpJE33Wzz24PvQB9Paf4lmRwkpC+nau/s/Fb7TBOVkgkGCr4ZT9Qa+bYtRR3Uk8/0rft7+Rhs3ZA96AOo8Y/Bj4X+Oi9w1n/ZWoOCRPa/KMn+8mdpr5G8f/s++JfBaPqFuV1TS1P+vhB3Lzx5idV+vSvqiPxBPbBW3cLxXo+h63FfwMGIdXGGVhkEdwQaAPylk0I56ZrFutIZDwOlfpZ46+Bnh/xLbSal4SVbDUOWMQ/1Uh9Mfwn6V8F+KNNvtB1CfS9Wga1uYWIZXGOnp6j6UAeXtZ7T0xWdcoYutdK7KzVz2oEZ4oAt6fdyJEVB6GtD7dL61zMExRcYqf7SfSgD/9T4PvtVhmMbL95upzmuV1Vt/Oc1w2n64bmCNy53g811Us4dN+MhsUAYF7LPDCZ7bDSR5ZQehx/WsBbqK9t3aPlZUz9D6fhXQz9HA9K8oudXGi62bVuLa75GeiP0P4GgD1jwRK6MkgONoxX1D4dmN9BJuAHlRlmPfAr5D0e/Gm2+9jtBx/kV9JeBNXXUNKu9h274dv1xzQByWpeIJ9G8UWmtWp2PazK4IP8AdOeMV+5/hXxLafEX4e6drBIk+2Wyb+/OBmv59vFFwPtcgY8hjjH1/wAK/UP9iHx8ut+CJfDM8hM+muUCk9F4x+lAHzD+0d8Grn4d61N4n0KNhpF5IWliUcQs3JYY/hJ6jHFeQeC9XWSYGUgbcH6iv2d8ceDLbxLYT2l5Cs0UilWDDIOR05r8cPjJ8NdW+DXix0hRm0e5cGCUdELc7D/Q0Ad3q/iCNAw4KlQAT71xNqp1S7Kgbgxrj7fWBewAM2cDrXd+E/JhYzu4Tryf0oA4nWbUrd/ZlAAXjgdKqTWL6vquk/DXTwTLqbrLd4BJEOdoU4/vH3710mpX1kdQubuaQJDCWZnboqjkn8K9j/Y48HXfjHxlf/E3VrcrayTF4A4+YxphYlGenTP1oA9i/aimT4X/AAT8GfDbTsIuoXfnTbf7lome3qz/AKV8TaPL/asl1pzklb2CSP2LFcj9a+sv+Cgl4Yde8D2ErHcLa9mYZ6FnjA/livijwzeyQXsFwo3BGXkfWgDzKUtpepBT8rIcH8692eAeLvDEVxbKDqekqZIiOWeL+OP+o96474saGlpq66nAmLe5USAj/a5P61o/DXVhZuu18BecGgB9lrxIRcjB5FddZa3wGPHvXH+NtITSdVF/YKFsr8l1A4CP/Ev9RWfZ3R8n5zQB6hPqQ3Fw3Wuw8Ha7scxsQcV4lDqG+JgDk1u+F78Le+WX2MaAPqaz1hlPmwHAB+ZT6Vy/xG8M+F/HukG01e3RZ5SEju1AEkLH7pzxkZ4IrLtNQELAnlG61ieNbmWz0S7kgJeMKHx6AEHNAH58+J9DvvCuuXugaiMT2TlG9D6EexFcLdSbia+rfj3op1iXRfFenplr6AQzEd3ToT+FfN9x4S1lfnWLcKAOZQjHNSZWrkuk39s2ySMg1H9iu/7hoA//1fxoivG0673D7pJ4r0/Rbt9V053HVOfyryTWkCzyA9jn867X4eXyrO9qwyHB70AdOPJljuWkl2NAE4xnO/P+Feaaz4asNRkWa5unCDcRsUD+ddtcqYdZkTkLPE6c+q/OP5VhXkTEww56Ak0AVoLWO3solSR5AAFG85IAr3T4dX+yN4gcLtxxXhhIaTaTkKOBXovhO9+zuoHHykYFAFfxWQmqTKo43V71+yZ47bwh8To7OV9lvqahWGeNw4/wr598RStJeu7nG705rN8PahNpPiPTNUhJUwTDoex4NAH9N0Mml/2E+vaxcxWemwRM8s0rBEVQOSWPAFfk78f/AIzeFfidqP8AYHhLTBdaMp8p7qQENOF4/doPuL3DHk8HivPv2gfj/wCKPi5o2meH/C12bbw3ovlxXMEZ+ae6Th2cjqqZwF6d+tfNWhandwx7p1xg84HH/wBagDrbr4dT2QMnh+5aYL/y63OElHsj8K/0O0/Wube6ntw9rdu9u0ZIkRvlII9QeleleHtQu765EdpC9yXP3QCQMn16Dr3p/wARvhZa+I76G88S+Jrbw1YwwpHIsI+1Xk53E42Aqq7RwCWOfwoA8V8PaDrfxb8RDQ9GV18PWcgN9Pg4mZTnyxjrk44/E1+5/wADvhxbeHPDmnQxRhfkDsB/z075+g4r8btS+OWk/CePSfCfwWsVWGykUNLeATPdf3zKB8oLdcL09e1fsX+yz+0p8PvjlYjRV26L4ysot8+muwIkjX/lpC3Vh7dR3oA/P3/goHepdfGHS9MBybDSkfHXHnSsf1218Y6LdNb3SjsT07Ee1fRf7a+rrqX7R2vRqR/oVtZW34rCrkfm9fM9pIrOoYYIoA9c8SmLxB4VDIvmSWowQOWxXlHhK4ihui6ZGzrnqK6Wx177A5358tuGHXrXLXSwWOqTXdowMEy7gB6ntQB6F4u1e3m0SHTgBJJIwkz/AHAvGfx6VwEVyqQ/N1Xiq3nvNuaVssw/Qdqz3mEeQaANa2vHSZl3YU9K6HRborfpn7pOM158k5EoINdLYOZDui5I/pQB7tc6gljHl3OFHeue1rxUmpeD9Qc8bEZfrnpXC+JtckGnW8THEsoxj2HesG8uGGjR6Uhw966qcegO4/yoA9Xmgj1DwJbW8vzNbbGXPbPFQ2Xhe1urUBlBJrZ8N6Xf6/Y/2TZjdIEHGPQivdfBnwl1syZvFwCMDigD5M1z4eWb3Csi7eDWL/wrq29K/SBvgBJdgSPuzTf+Gdm/2qAP/9b8YfE6hZRJn7w+lUPDWoNZatC2eGOKteLM/ZYpO6tg/jXI2twVeOTJBVgeaAPf9eCR3dldL08yMnPoxwf51i3cf+kOzdV/lV7xC3m6JHNn5ljzn3AzVG8k8xZZs8MgI/EZoAxkwXZ+mciui0S6KyDrxXOJkA8+h5q1psxE5APQ0AdHq775Ax79K5ueWWOJ5LYlJQCyN6EdDg+lbF0zPgk9KyZCCCOtAF74P6va2n2mymnkZ5pFkeFiCu8H744zyOvNfQE3h6PUrr7fp9ybVJUAkiMRdSw6OCCCD2I7/WviHVYZ9Pl/tCzdoJo2wHQlT+YrvvDXjrxVd2bWz6xcHb/D5hHHfkc0Ae6eJ9evvB8dvpyavFG8ufkgUhlX+8+ckZ7c81ybxDWl3nVhcnqR5nP5E16H4H+EGj6/pkHiDxXcS3Mt+rMqI+MDJALPyS3eu31H9mfw5qMHmeFtQlsLnGRvbzEJ9wQDj8aAPiq9kj07xVYXEsTSWkM371xzk85wfUDkCu2+F+qeMfDnxh0Xx/pv2jTWhuvOtpMMgeOM5CEcZDDG5T1r3HSPA+jymT4e/EK2jsNYsnBguVIjE69UIb7rHB4z1HvxVHxPcQ2vj7TtDWIQ2fh2ITSueAf4mOfTaMfjQBg/G/xPN4u+Mfi7xFdDbLeXzHGOAFVUA9uAOO1cVYtnnr071yM+sS6zqN3qsrfPeTvKQf8AbYmugsXbbnrzQBvmT5iGGf51RlSzimHnqdsnocBW+lSxspkwSTVDVVIjLKc96ANqSwsosSJ5jIwyCCDx+Vc/5Gm3UrwyTy27g+gYf0q9pN4ZrbyH7cVjavZSLKLqDIYdfegB19Y2+nrG8d0Z2ZsbSu0/zNbGiSlplA9a4q+uXmvbdT1VCT9T/wDqrptGYi4DE8AE+g4GaAItVujf646j/VQNsX0wvX9alsrpbzxGjhsxWS7AO29uv5CuXe8FlBLck5diSB6sx4rc8OW9zbAN5ZeSQ5d+gy3WgD70/ZctLbU/GzpfDfmB9o7ZGK/STTtIsYGwsSj8K/NL9mWY2vjfTSpzksh98jmv06spWab2oA6iKGNUACAfhT/Lj/uj8qFzgc075vWgD//X/GDxBi50qVgc4Ab8q83hnOQpORXZwXbXekbTyWXFefoSrgehoA+ip5PO0GND/FGR+lYvnvLpdoD/ABRKCfU4xVyFy2lpzxtwR+FZmnqXsLJf4VQ/zOKALhQiMHHVe9VrR9s42nk1bkQ+WDjPX8KowAb+nSgDoSd2NxySKouoLe1aJjLRgjj61VlQKCOp/SgDk7/Tvtg8kDJkkUH8TXReDfh9p2oXstqt69ncrkLuAaNvTI4I+oz9Ks6WiyX3lvyV+f8AIY/nV2bWrbwpqcGp3AZYmO1iozjvnFAHs3hz4hXnw3K+DfF1oZ4bXmKeE5IjfJHDY3L6HgjpXr+lfHP4fLtZrm49dghfP09P1r461zxd/wALD1Nr3TrWVo4I1ijAUs+1c8sFzySa5x2msptlzAUx8uHVl6H3oA+kPiV4ws/HeuJq1jbstnbQiFPMxvOGLZbBIHJ4FeMa7O1r4R1u6mmeS8vdsIZmLMIw4JyfTAxiv0H+GVpoE/w90r+ykjaGW1RnUgNucqBJuB6ndkHNfGXxo0ew0yy1qCwjEUMczBFHRRnOB7A9KAPnXTbjCpnmu9sZB5SvXmenMQo5rvbFsRemBQB0MUwLgE1avIxLCcc8VixtnYTz259K24z5qke3agDmbO4NnchCcc108s29BIi7h3FcxfwFZd45Ip1je3KkKoDDP6UAZGoyI+tSFBtAVBj0OM10sH7nS7i6HGyMj8W4H865C4lEutXLfd+cD8gK6PXpJLfwjIIWw1zLHHn0HU/yoA46HGo3SqRvt7c899zf/Wr0rSnuHZII+EY4ANeeaVPDYwrHD88g4OOhr0jRk+yhb2/PLfdQdWz2A/rQB9x/svaW114xgmH3LSJ5PqBhc/iTX6U2LBHPSvgL9k+Wy0u31jXtbcJNcbIY1zjag5wK+59K1fQ76Tctx+tAHcLcKVFL9oX1rmLvXdLtHEQlQj3aqv8Awk+mf89Y/wDvqgD/0PwrsbgwwSw5+VGIFc8o3TgDu39at/aCWlKjG85xUNkokvoVPQuP0oA9thBW1AHZcfpVTRmlawtI8cKrc/8AAjVm1cmIKOnpUOlzEWVvEq4YFsn23GgDTnU7cN3qG2hIcH3qe6faR747VZsocjOM0AaATKfMenaq8yoRtX0xWisZABAqvcRgDPfFAFTQo86s57+TgD/gQzWr4h0FtUsDbviMSbwGIyAdhwfzrP0YFNXVV/ijcfyNdl4n1BNJ8NPdTpu2pKcDryAo/nQBu/Brxz4C8O6LbaLqWo21jPbriVZcrmTJ3EtjB/PpXvF5rPwx1y3kgutZ068jmAGyR8r83TGR6ehr4B0nwRda3oza/ZusssxeT7Ow5K5PRuefbFe1eDPiD4Cm0WHS/FFqtpeWirCXaMssgXjJIGQcDkGgD0Tw34sf4W+JNT0TSJk1PQHdJBGkm8IHUH5X5IYDAORg9/WuF+Ld+uo+Hr6/AKi7dpQD1AdsgH8KiTw/Y/FPxPbaJ8MIY7G6tt0rXGPJ+0KOGXGMYA6Fu/pUPxhsrrRtJk0i+iaCeD5WjbgqR/P60AfNlj1AHciu7s3Ij+b069a8/s2XcldxZyAQA5496ANZZF3DPeta2bjANc2XBGWGBV/TJCww5ORQBqSr5jkfhzWK1pd21wJYQGXuK2mGcsDzVSO7kSUqTlenPSgDjYlefVJ58YDyHiur8UyQRaFptpcReakszPjpjaBg/rWJbWzrcljwGcn8zT/Ht7JEmkW8Q3N5btge5A/pQA2zksrbEqRhcdM9B+Fd14ZifUNR+0yZdI/4j6noBXmOiaa9y4udTYqg6KOp/wAK998I2/zRyNH5UKf6pcf+PGgBvinxvP4P1KPT1uHiLxrJhWwOar2H7RXiPTSPsupScdic1x3xv8P31/dadrUEZMOwwsw6BgcgGvGrXwpfT4wpyaAPq0/tQ69L811MJW9ckUn/AA05qn95f++jXhelfCPXtQgMyRtj2Fan/ClPEP8Azzb8qAP/0fwK37mZh3q5oyh9QQn+HJqlOixSuiH5QeK09DGJmk79KAPU9Pm+TB6etW9M3tbxADAy3P8AwI1gWs/lEZ4BrpNJuY2sVQcnc4/8eNAFq4jZnCe4+tdFbwfZrUPIMEiptP07J+1Sj5eoz/Oq17dB5TGnKjvQBGJmfhaJS7DAqSCLLBsZyPpUl0gVCT6UAVdKJj1q3I77x6joatfFy8EXhaONTtaXC4HfcwJ/lVGyXfq1qMhdzEfmDTPHOmfb7qHTL5i0YjDLg4wcnmgDW+G748P2YGRlB/M1514xghtPFt+kChVZhLwMYMihj0981N4U8Z2fhuzk0rU45JJbNmVCgyGGTge1XdD0G48cX934i1Nmt4LiQhRHjcccAAkHAAwOnNAHo/7Ovi1fDfxNtJdu9biGWMZPfGf5Cvrj9or4fL8RPAV34n0hc6xpcTT7Rg+fboMuhx/EoyV/LuMfBmq+Cr7QMa3oF45azPmbT8sqBf4lYcHHcYH416nYftEeNLvw5JoU7W6+bGYnlRCJChGD/FgEjjNAHyjaEE8dR3rr7aQ+SFJz7Vma5pw03Vi8QAtrtRPDjptYkEf8BYEfhT4JRwDxQBubuw/L6VdtphG4Pr+FY/mdASDk4wf8avjIXPbqKAOmV9wDg8UoEMofoDzWdYXKTxbTwR+FWIY13thuxoAWO3XClfvVzfi66C6zbRMBuit0/DcSa7CONiAMdhivL/Fckk3iiRU5EaRofqBQB1uhbbu6igdsByM17+k0FttiQj5QAMegr5qto3Qh422Y5zmu3sNenIFtATPKeM9qAPoPUY9K1rwPeafcOqz+YHiB67gOMVyXgzwS17OimPJz0xWL4e0iX7ba3urTGRzIuyLPGc+lfYXw58OJdXhnhj+Tdx+dAHa+DvhzDbaUFeIbjjsK63/hArb/AJ5L+Qr1PTrEW1sseQD6Ve8kf3hQB//S/AJ2JY1u6YAiZHU1gEENg8Gty3YooxQB0jz7U3egrt/h7eWs1vcwzx+ZJDJkHvh//wBVeXTTHyuvHStfwTfTW2stbwdbqMqB6sOR/WgD23UtdOPs0I24OOKxrY75Nzdc/XrUQ02fz/37Dc3JAraW1SBBt5P+FAE8bEHJ4xTblhggDOR9abuwAThv/r1HK+6PFAFK2O7ULUE4IcZP14qzrk7T615bHPkxop/Hmqdggm1O0tyeGmUfma5vxbrQ8O6/d2TxGSQAMp7ZI4zQBwOusn9uXpQceZ+uBmvo/wCHk0c3hazEYGEDqwH94MTz+ea8k8P+DrbVrAahqUsi3F0TICuMLuJxwRz+dWNN1rVfh1qUmnXUX2m0mw64OMj+8vHXsQaAPpBbaFgQxGxgc56YxzXyKJhbSlIiNoJAz3Ga9B174qz6jYHT9HtntnlBVpGYFgD1Cgdz0ya9G8KeFtO0GwjE0KS3UigyyMATk8kDPQCgDwrWNQjv9F0+PdveyllGRnhJgDjJ9GUkfWsiCUY6civZviP4ZsBo0ut6dElvLCymZVwodWOA2P7wOBx1B9q8KilO360Ab6OTitLcRFkfpxWDDIWOM8CtPzCyBCckUAWYLie3YPHyB2xW3a6lFLGWYYcD+fFcbLNNE25RWjb3QkjUMuCWAoA9RgkjeFSnU4H0rwrVrq4udevWtwSBKy5PQbTivaUEdpYfa522xxJvPtgV5HBJpeo3Ds02wyMTycdTmgB0ELNg3Vxn/ZHSuy0zVbPTV3ghcd/SspfDunAbnvVjB98/lWvY23hSylRpN9/IhyN5wmfoOtAHrXgSG/v7j/hJNUykCAi2Q8bs9X/wr7a+DviSGHTJ4XAMsch/Jua+ILPxhby7I3IHAAA4A9hXu3ww1uC11C4hmk2rNHuH1FAH2wddmlO8Hg9KP7Zm9a+cbvxzHDKUSTge9Vv+E+H/AD1/WgD/2Q==", + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Result: False\n" + ] + } + ], + "source": [ + "@Template.define\n", + "def is_cat(img: Image.Image) -> bool:\n", + " \"\"\"Is this a cat? Say True or False and nothing else. {img}\"\"\"\n", + " raise NotImplementedError\n", + "\n", + "\n", + "with handler(provider):\n", + " for img_file in [\"_static/img/cat.jpeg\", \"_static/img/not-cat.jpeg\"]:\n", + " print(f\"Testing image: {img_file}\")\n", + " display(IPYImage(filename=img_file))\n", + " with Image.open(img_file) as img:\n", + " print(f\"Result: {is_cat(img)}\") # type: ignore" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "991ee445", + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "effectful", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.12.9" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/effectful/handlers/llm/__init__.py b/effectful/handlers/llm/__init__.py new file mode 100644 index 00000000..ecc725cf --- /dev/null +++ b/effectful/handlers/llm/__init__.py @@ -0,0 +1,25 @@ +import dataclasses +import inspect +from collections.abc import Callable + +from effectful.ops.syntax import defop +from effectful.ops.types import NotHandled + + +@dataclasses.dataclass(frozen=True) +class Template[**P, T]: + __signature__: inspect.Signature + __prompt_template__: str + + @defop + def __call__(self, *args: P.args, **kwargs: P.kwargs) -> T: + raise NotHandled + + @classmethod + def define(cls, body: Callable[P, T]) -> "Template[P, T]": + if not body.__doc__: + raise ValueError("Expected a docstring on body") + + return cls( + __signature__=inspect.signature(body), __prompt_template__=body.__doc__ + ) diff --git a/effectful/handlers/llm/providers.py b/effectful/handlers/llm/providers.py new file mode 100644 index 00000000..a1f4a70d --- /dev/null +++ b/effectful/handlers/llm/providers.py @@ -0,0 +1,103 @@ +import base64 +import io +import string + +try: + import openai +except ImportError: + raise ImportError("'openai' is required to use effectful.handlers.providers") + +try: + from PIL import Image +except ImportError: + raise ImportError("'pillow' is required to use effectful.handlers.providers") + + +from effectful.handlers.llm import Template +from effectful.handlers.llm.structure import decode +from effectful.ops.syntax import ObjectInterpretation, implements + + +def _pil_image_to_base64_data(pil_image: Image.Image) -> str: + buf = io.BytesIO() + pil_image.save(buf, format="PNG") + return base64.b64encode(buf.getvalue()).decode("utf-8") + + +def _pil_image_to_base64_data_uri(pil_image: Image.Image) -> str: + return f"data:image/png;base64,{_pil_image_to_base64_data(pil_image)}" + + +class _OpenAIPromptFormatter(string.Formatter): + def format_as_messages( + self, format_str: str, /, *args, **kwargs + ) -> openai.types.responses.ResponseInputMessageContentListParam: + prompt_parts = [] + current_text = "" + + def push_current_text(): + nonlocal current_text + if current_text: + prompt_parts.append({"type": "input_text", "text": current_text}) + current_text = "" + + for literal, field_name, format_spec, conversion in self.parse(format_str): + current_text += literal + + if field_name is not None: + obj, _ = self.get_field(field_name, args, kwargs) + obj = self.convert_field(obj, conversion) + + if isinstance(obj, Image.Image): + assert not format_spec, ( + "image template parameters cannot have format specifiers" + ) + push_current_text() + prompt_parts.append( + { + "type": "input_image", + "image_url": _pil_image_to_base64_data_uri(obj), + } + ) + else: + current_text += self.format_field( + obj, format_spec if format_spec else "" + ) + + push_current_text() + return prompt_parts + + +class OpenAIAPIProvider(ObjectInterpretation): + """Implements templates using the OpenAI API.""" + + def __init__(self, client: openai.OpenAI, model_name: str = "gpt-4o"): + self._client = client + self._model_name = model_name + + @implements(Template.__call__) + def _call[**P, T]( + self, template: Template[P, T], *args: P.args, **kwargs: P.kwargs + ) -> T: + bound_args = template.__signature__.bind(*args, **kwargs) + bound_args.apply_defaults() + prompt = _OpenAIPromptFormatter().format_as_messages( + template.__prompt_template__, **bound_args.arguments + ) + + # TODO: Support structured outputs https://platform.openai.com/docs/guides/structured-outputs + + # Note: The OpenAI api only seems to accept images in the 'user' role. + # The effect of different roles on the model's response is currently + # unclear. + response = self._client.responses.create( + model=self._model_name, input=[{"content": prompt, "role": "user"}] + ) + + first_response = response.output[0] + assert first_response.type == "message" + first_response_content = first_response.content[0] + assert first_response_content.type == "output_text" + + ret_type = template.__signature__.return_annotation + return decode(ret_type, first_response_content.text) diff --git a/effectful/handlers/llm/structure.py b/effectful/handlers/llm/structure.py new file mode 100644 index 00000000..bc6c7dd1 --- /dev/null +++ b/effectful/handlers/llm/structure.py @@ -0,0 +1,41 @@ +import typing + +from effectful.ops.syntax import defop + + +class DecodeError(RuntimeError): + """Raised when decoding an LLM response fails.""" + + def __init__(self, t: type, response: str): + super().__init__() + self.type_ = t + self.response = response + + def __repr__(self): + return f"DecodeError({self.type_}, {self.response})" + + +@defop +def decode[T](t: type[T], content: str) -> T: + """Decode `content` as an instance of `t`. Used to consume the output of an + LLM. + + """ + if t is str: + return typing.cast(T, content) + elif t is bool: + match content.strip().lower(): + case "true": + return typing.cast(T, True) + case "false": + return typing.cast(T, False) + case _: + raise DecodeError(t, content) + elif t in (int, float, complex, bool): + try: + result = t(content) # type: ignore + except ValueError: + raise DecodeError(t, content) + return typing.cast(T, result) + + raise DecodeError(t, content) diff --git a/effectful/handlers/llm/synthesis.py b/effectful/handlers/llm/synthesis.py new file mode 100644 index 00000000..4178fe4b --- /dev/null +++ b/effectful/handlers/llm/synthesis.py @@ -0,0 +1,83 @@ +import ast +import collections.abc +import dataclasses +import re +import textwrap +import typing + +from effectful.handlers.llm import Template +from effectful.handlers.llm.structure import decode +from effectful.ops.semantics import fwd +from effectful.ops.syntax import ObjectInterpretation, implements + + +class ProgramSynthesis(ObjectInterpretation): + """Provides a `decode` handler for callables and a `template` handler to + instruct the LLM to generate code of the right form and with the right type. + + """ + + @implements(decode) + def _decode[T](self, t: type[T], content: str) -> T: + origin = typing.get_origin(t) + t = t if origin is None else origin + + if not (issubclass(t, collections.abc.Callable)): # type: ignore[arg-type] + return fwd() + + pattern = r"(.*?)" + code_content = re.search(pattern, content, re.DOTALL) + if code_content is None: + return fwd() + code = code_content.group(1) + + try: + module_ast = ast.parse(code) + except SyntaxError: + return fwd() + + if not isinstance(module_ast, ast.Module): + return fwd() + + last_decl = module_ast.body[-1] + if not isinstance(last_decl, ast.FunctionDef): + return fwd() + + # TODO: assert callable type compatibility + gs: dict = {} + try: + exec(code, gs) + except Exception: + return fwd() + + return gs[last_decl.name] + + @implements(Template.__call__) + def _call(self, template, *args, **kwargs) -> None: + ret_type = template.__signature__.return_annotation + origin = typing.get_origin(ret_type) + ret_type = ret_type if origin is None else origin + + if not (issubclass(ret_type, collections.abc.Callable)): # type: ignore[arg-type] + return fwd() + + prompt_ext = textwrap.dedent(f""" + Generate a Python function satisfying the following specification and type signature. + + {template.__prompt_template__} + {str(ret_type)} + + + 1. Produce one block of Python code. + 2. Do not include usage examples. + 3. Return your response in tags. + 4. Do not return your response in markdown blocks. + 5. Your output function def must be the final statement in the code block. + + """).strip() + + return fwd( + dataclasses.replace(template, __prompt_template__=prompt_ext), + *args, + **kwargs, + ) diff --git a/pyproject.toml b/pyproject.toml index fd887562..195395ce 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -40,6 +40,7 @@ torch = ["torch", "dm-tree"] pyro = ["pyro-ppl>=1.9.1", "dm-tree"] jax = ["jax", "dm-tree"] numpyro = ["numpyro>=0.19", "dm-tree"] +llm = ["openai", "pillow"] docs = [ "effectful[torch,pyro,jax,numpyro]", "sphinx", @@ -63,7 +64,7 @@ test = [ ] [dependency-groups] -dev = ["effectful[torch,pyro,jax,numpyro,docs,test]"] +dev = ["effectful[torch,pyro,jax,numpyro,llm,docs,test]"] [tool.ruff] target-version = "py312" diff --git a/scripts/lint.sh b/scripts/lint.sh index 824c540a..f186be70 100755 --- a/scripts/lint.sh +++ b/scripts/lint.sh @@ -7,7 +7,7 @@ mypy $SRC ruff check $SRC ruff format --diff $SRC -nbqa mypy docs +nbqa 'mypy --no-incremental' docs nbqa 'ruff check' docs nbqa 'ruff format --diff' docs diff --git a/tests/test_handlers_llm.py b/tests/test_handlers_llm.py new file mode 100644 index 00000000..6d5aec79 --- /dev/null +++ b/tests/test_handlers_llm.py @@ -0,0 +1,149 @@ +from collections.abc import Callable + +import pytest + +from effectful.handlers.llm import Template +from effectful.handlers.llm.structure import DecodeError, decode +from effectful.handlers.llm.synthesis import ProgramSynthesis +from effectful.ops.semantics import handler +from effectful.ops.syntax import ObjectInterpretation, implements + + +class MockLLMProvider(ObjectInterpretation): + """Mock provider for testing. + + Initialized with prompts and responses. Raises if an unexpected prompt is given. + """ + + def __init__(self, prompt_responses: dict[str, str]): + """Initialize with a dictionary mapping prompts to expected responses. + + Args: + prompt_responses: Dict mapping prompt strings to their expected responses + """ + self.prompt_responses = prompt_responses + + @implements(Template.__call__) + def _call[**P, T]( + self, template: Template[P, T], *args: P.args, **kwargs: P.kwargs + ) -> T: + bound_args = template.__signature__.bind(*args, **kwargs) + bound_args.apply_defaults() + prompt = template.__prompt_template__.format(**bound_args.arguments) + + if prompt not in self.prompt_responses: + raise ValueError(f"Unexpected prompt: {prompt!r}") + + response = self.prompt_responses[prompt] + + ret_type = template.__signature__.return_annotation + return decode(ret_type, response) + + +class SingleResponseLLMProvider(ObjectInterpretation): + """Simplified mock provider that returns a single response for any prompt.""" + + def __init__(self, response: str): + """Initialize with a single response string. + + Args: + response: The response to return for any template call + """ + self.response = response + + @implements(Template.__call__) + def _call[**P, T]( + self, template: Template[P, T], *args: P.args, **kwargs: P.kwargs + ) -> T: + ret_type = template.__signature__.return_annotation + return decode(ret_type, self.response) + + +# Test templates from the notebook examples +@Template.define +def limerick(theme: str) -> str: + """Write a limerick on the theme of {theme}.""" + raise NotImplementedError + + +@Template.define +def haiku(theme: str) -> str: + """Write a haiku on the theme of {theme}.""" + raise NotImplementedError + + +@Template.define +def primes(first_digit: int) -> int: + """Give exactly one prime number with {first_digit} as the first digit. Respond with only the number.""" + raise NotImplementedError + + +@Template.define +def count_char(char: str) -> Callable[[str], int]: + """Write a function which takes a string and counts the occurrances of '{char}'.""" + raise NotImplementedError + + +# Unit tests +def test_limerick(): + """Test the limerick template returns a string.""" + mock_response = "There once was a fish from the sea" + mock_provider = MockLLMProvider( + {"Write a limerick on the theme of fish.": mock_response} + ) + + with handler(mock_provider): + result = limerick("fish") + assert result == mock_response + assert isinstance(result, str) + + +def test_primes_decode_int(): + """Test the primes template correctly decodes integer response.""" + mock_provider = SingleResponseLLMProvider("61") + + with handler(mock_provider): + result = primes(6) + assert result == 61 + assert isinstance(result, int) + + +def test_primes_decode_error(): + """Test that non-numeric responses raise DecodeError.""" + mock_provider = SingleResponseLLMProvider("not a number") + + with handler(mock_provider): + with pytest.raises(DecodeError) as exc_info: + primes(7) + assert exc_info.value.type_ == int + assert exc_info.value.response == "not a number" + + +def test_count_char_with_program_synthesis(): + """Test the count_char template with program synthesis.""" + mock_code = """ +def count_occurrences(s): + return s.count('a') +""" + mock_provider = SingleResponseLLMProvider(mock_code) + + with handler(mock_provider), handler(ProgramSynthesis()): + count_a = count_char("a") + assert callable(count_a) + assert count_a("banana") == 3 + assert count_a("cherry") == 0 + + +def test_decode_primitives(): + """Test decode function with primitive types.""" + assert decode(str, "hello") == "hello" + assert decode(int, "42") == 42 + assert decode(float, "3.14") == 3.14 + assert decode(bool, "true") == True + assert decode(bool, "false") == False + + with pytest.raises(DecodeError): + decode(int, "not a number") + + with pytest.raises(DecodeError): + decode(bool, "maybe") From 8170a64c6a3425dfaf1b5df381bff4496b2e60f0 Mon Sep 17 00:00:00 2001 From: Jack Feser Date: Fri, 10 Oct 2025 18:19:59 -0400 Subject: [PATCH 03/39] Implement basic tool calling (#366) * import llm code from robotl * make optional deps optional * replace template with Template.define * bring in tests * typing bullshit * disable incremental type checking to avoid crash * format * remove anthropic and cache modules * fix * remove dupe * rename * fix not handled * reorganize * wip * wip * wip * wip * restrict python version * specify python version * rename * work on a tool call interface * wip * wip * format * wip * revert whitespace changes * fix decorator calls * lint * lint * allow no arguments to decorator * work --- docs/source/llm.ipynb | 83 +++++++++++++--- effectful/handlers/llm/__init__.py | 24 +++-- effectful/handlers/llm/providers.py | 145 ++++++++++++++++++++++++++-- pyproject.toml | 6 +- tests/test_handlers_llm.py | 2 +- 5 files changed, 228 insertions(+), 32 deletions(-) diff --git a/docs/source/llm.ipynb b/docs/source/llm.ipynb index ddea9724..e883530b 100644 --- a/docs/source/llm.ipynb +++ b/docs/source/llm.ipynb @@ -2,7 +2,7 @@ "cells": [ { "cell_type": "code", - "execution_count": null, + "execution_count": 1, "id": "5aaf649f", "metadata": {}, "outputs": [], @@ -17,10 +17,11 @@ "from PIL import Image\n", "\n", "from effectful.handlers.llm import Template\n", - "from effectful.handlers.llm.providers import OpenAIAPIProvider\n", + "from effectful.handlers.llm.providers import OpenAIAPIProvider, tool_call\n", "from effectful.handlers.llm.structure import DecodeError, decode\n", "from effectful.handlers.llm.synthesis import ProgramSynthesis\n", - "from effectful.ops.semantics import handler\n", + "from effectful.ops.semantics import fwd, handler\n", + "from effectful.ops.syntax import defop\n", "\n", "provider = OpenAIAPIProvider(openai.OpenAI(api_key=os.getenv(\"OPENAI_API_KEY\")))" ] @@ -51,7 +52,7 @@ }, { "cell_type": "code", - "execution_count": 2, + "execution_count": null, "id": "1e832675", "metadata": {}, "outputs": [], @@ -74,7 +75,7 @@ }, { "cell_type": "code", - "execution_count": 3, + "execution_count": null, "id": "634f6533", "metadata": {}, "outputs": [ @@ -98,9 +99,9 @@ ], "source": [ "with handler(provider):\n", - " print(limerick(\"fish\")) # type: ignore\n", + " print(limerick(\"fish\"))\n", " print(\"-\" * 40)\n", - " print(limerick(\"fish\")) # type: ignore" + " print(limerick(\"fish\"))" ] }, { @@ -159,7 +160,7 @@ }, { "cell_type": "code", - "execution_count": 7, + "execution_count": null, "id": "2c766859", "metadata": {}, "outputs": [], @@ -180,7 +181,7 @@ "\n", "\n", "with handler(provider):\n", - " assert type(primes(6)) is int # type: ignore" + " assert type(primes(6)) is int" ] }, { @@ -206,7 +207,7 @@ "\n", "try:\n", " with handler(provider), handler(ProgramSynthesis()):\n", - " count_a = count_char(\"a\") # type: ignore\n", + " count_a = count_char(\"a\")\n", " assert callable(count_a)\n", " assert count_a(\"banana\") == 3\n", " assert count_a(\"cherry\") == 0\n", @@ -286,13 +287,71 @@ " print(f\"Testing image: {img_file}\")\n", " display(IPYImage(filename=img_file))\n", " with Image.open(img_file) as img:\n", - " print(f\"Result: {is_cat(img)}\") # type: ignore" + " print(f\"Result: {is_cat(img)}\")" + ] + }, + { + "cell_type": "markdown", + "id": "991ee445", + "metadata": {}, + "source": [ + "## Tool Calling\n", + "\n", + "Passing `Operation`s to `Template.define` makes them available for the LLM to call as tools. The description of these operations is inferred from their type annotations and docstrings.\n", + "\n", + "Tool calls are mediated by a helper operation `tool_call`. Handling this operation allows tool use to be tracked or logged." + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "id": "66711301", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Tool call: cities(*(), **{}) -> ['Chicago', 'New York', 'Barcelona']\n", + "Tool call: weather(*(), **{'city': 'Chicago'}) -> cold\n", + "Tool call: weather(*(), **{'city': 'New York'}) -> wet\n", + "Tool call: weather(*(), **{'city': 'Barcelona'}) -> sunny\n", + "Based on the current weather, Barcelona is a city with good weather, as it is sunny there.\n" + ] + } + ], + "source": [ + "@defop\n", + "def cities() -> list[str]:\n", + " return [\"Chicago\", \"New York\", \"Barcelona\"]\n", + "\n", + "\n", + "@defop\n", + "def weather(city: str) -> str:\n", + " status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\n", + " return status.get(city, \"unknown\")\n", + "\n", + "\n", + "@Template.define(tools=[cities, weather])\n", + "def vacation() -> str:\n", + " \"\"\"Use the provided tools to suggest a city that has good weather.\"\"\"\n", + " raise NotImplementedError\n", + "\n", + "\n", + "def log_tool_call(_, tool, *args, **kwargs):\n", + " result = fwd()\n", + " print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\n", + " return result\n", + "\n", + "\n", + "with handler(provider), handler({tool_call: log_tool_call}):\n", + " print(vacation())" ] }, { "cell_type": "code", "execution_count": null, - "id": "991ee445", + "id": "17668ac8", "metadata": {}, "outputs": [], "source": [] diff --git a/effectful/handlers/llm/__init__.py b/effectful/handlers/llm/__init__.py index ecc725cf..829d3721 100644 --- a/effectful/handlers/llm/__init__.py +++ b/effectful/handlers/llm/__init__.py @@ -1,25 +1,33 @@ import dataclasses import inspect -from collections.abc import Callable +from collections.abc import Callable, Iterable from effectful.ops.syntax import defop -from effectful.ops.types import NotHandled +from effectful.ops.types import NotHandled, Operation @dataclasses.dataclass(frozen=True) class Template[**P, T]: __signature__: inspect.Signature __prompt_template__: str + tools: tuple[Operation, ...] @defop def __call__(self, *args: P.args, **kwargs: P.kwargs) -> T: raise NotHandled @classmethod - def define(cls, body: Callable[P, T]) -> "Template[P, T]": - if not body.__doc__: - raise ValueError("Expected a docstring on body") + def define(cls, _func=None, *, tools: Iterable[Operation] = ()): + def decorator(body: Callable[P, T]): + if not body.__doc__: + raise ValueError("Expected a docstring on body") - return cls( - __signature__=inspect.signature(body), __prompt_template__=body.__doc__ - ) + return cls( + __signature__=inspect.signature(body), + __prompt_template__=body.__doc__, + tools=tuple(tools), + ) + + if _func is None: + return decorator + return decorator(_func) diff --git a/effectful/handlers/llm/providers.py b/effectful/handlers/llm/providers.py index a1f4a70d..9534912f 100644 --- a/effectful/handlers/llm/providers.py +++ b/effectful/handlers/llm/providers.py @@ -1,6 +1,13 @@ import base64 +import dataclasses +import inspect import io import string +from collections.abc import Iterable, Mapping +from typing import Any, get_type_hints + +import pydantic +from pydantic import create_model try: import openai @@ -12,10 +19,12 @@ except ImportError: raise ImportError("'pillow' is required to use effectful.handlers.providers") +from openai.types.responses import FunctionToolParam from effectful.handlers.llm import Template from effectful.handlers.llm.structure import decode -from effectful.ops.syntax import ObjectInterpretation, implements +from effectful.ops.syntax import ObjectInterpretation, defop, implements +from effectful.ops.types import Operation def _pil_image_to_base64_data(pil_image: Image.Image) -> str: @@ -28,6 +37,65 @@ def _pil_image_to_base64_data_uri(pil_image: Image.Image) -> str: return f"data:image/png;base64,{_pil_image_to_base64_data(pil_image)}" +@dataclasses.dataclass +class Tool[**P, T]: + parameter_model: type[pydantic.BaseModel] + result_model: type[pydantic.BaseModel] + operation: Operation[P, T] + name: str + + @classmethod + def of_operation(cls, op: Operation[P, T], name: str): + sig = inspect.signature(op) + hints = get_type_hints(op) + + # Build field definitions with defaults + fields = {} + for param_name, param in sig.parameters.items(): + field_type = hints.get(param_name, str) + if param.default == inspect.Parameter.empty: + field_desc = field_type + else: + field_desc = (field_type, param.default) + fields[param_name] = field_desc + + parameter_model = create_model("Params", **fields) + result_model = create_model("Result", result=sig.return_annotation) + + return cls( + parameter_model=parameter_model, + result_model=result_model, + operation=op, + name=name, + ) + + @property + def function_definition(self) -> FunctionToolParam: + return { + "type": "function", + "name": self.name, + "description": self.operation.__doc__, + "parameters": self.parameter_model.model_json_schema(), + "strict": False, + } + + +def _tools_of_operations(ops: Iterable[Operation]) -> Mapping[str, Tool]: + tools = {} + for op in ops: + name = op.__name__ + + # Ensure tool names are unique. Operation names may not be. + if name in tools: + suffix = 0 + while f"{name}_{suffix}" in tools: + suffix += 1 + name = f"{name}_{suffix}" + + tools[name] = Tool.of_operation(op, name) + return tools + + class _OpenAIPromptFormatter(string.Formatter): def format_as_messages( self, format_str: str, /, *args, **kwargs @@ -68,6 +136,26 @@ def push_current_text(): return prompt_parts +# Note: attempting to type the tool arguments causes type-checker failures +@defop +def tool_call[T](template: Template, tool: Operation[..., T], *args, **kwargs) -> T: + """Perform a model-initiated tool call.""" + return tool(*args, **kwargs) + + +def _call_tool_with_json_args( + template: Template, tool: Tool, json_str_args: str +) -> dict: + try: + args = tool.parameter_model.model_validate_json(json_str_args) + result = tool_call( + template, tool.operation, **args.model_dump(exclude_defaults=True) + ) + return {"status": "success", "result": str(result)} + except Exception as exn: + return {"status": "failure", "exception": str(exn)} + + class OpenAIAPIProvider(ObjectInterpretation): """Implements templates using the OpenAI API.""" @@ -85,19 +173,56 @@ def _call[**P, T]( template.__prompt_template__, **bound_args.arguments ) - # TODO: Support structured outputs https://platform.openai.com/docs/guides/structured-outputs + tools = _tools_of_operations(template.tools) + tool_definitions = [t.function_definition for t in tools.values()] # Note: The OpenAI api only seems to accept images in the 'user' role. # The effect of different roles on the model's response is currently # unclear. - response = self._client.responses.create( - model=self._model_name, input=[{"content": prompt, "role": "user"}] - ) - first_response = response.output[0] - assert first_response.type == "message" - first_response_content = first_response.content[0] - assert first_response_content.type == "output_text" + called_tools = set([]) # tool calls that we have discharged + model_input: list[Any] = [ + {"type": "message", "content": prompt, "role": "user"} + ] + + while True: + response = self._client.responses.create( + model=self._model_name, + input=model_input, + tools=tool_definitions, + tool_choice="auto", + ) + + new_input = [] + for message in response.output: + if message.type != "function_call": + continue + + call_id = message.call_id + if call_id in called_tools: + continue + called_tools.add(call_id) + + tool = tools[message.name] + tool_result = _call_tool_with_json_args( + template, tool, message.arguments + ) + tool_response = { + "type": "function_call_output", + "call_id": call_id, + "output": str(tool_result), + } + new_input.append(tool_response) + + if not new_input: + break + + model_input += response.output + new_input + + last_resp = response.output[-1] + assert last_resp.type == "message" + last_resp_content = last_resp.content[0] + assert last_resp_content.type == "output_text" ret_type = template.__signature__.return_annotation - return decode(ret_type, first_response_content.text) + return decode(ret_type, last_resp_content.text) diff --git a/pyproject.toml b/pyproject.toml index 195395ce..e669f934 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -40,7 +40,11 @@ torch = ["torch", "dm-tree"] pyro = ["pyro-ppl>=1.9.1", "dm-tree"] jax = ["jax", "dm-tree"] numpyro = ["numpyro>=0.19", "dm-tree"] -llm = ["openai", "pillow"] +llm = [ + "openai", + "pillow", + "pydantic", +] docs = [ "effectful[torch,pyro,jax,numpyro]", "sphinx", diff --git a/tests/test_handlers_llm.py b/tests/test_handlers_llm.py index 6d5aec79..879f6508 100644 --- a/tests/test_handlers_llm.py +++ b/tests/test_handlers_llm.py @@ -72,7 +72,7 @@ def haiku(theme: str) -> str: raise NotImplementedError -@Template.define +@Template.define() def primes(first_digit: int) -> int: """Give exactly one prime number with {first_digit} as the first digit. Respond with only the number.""" raise NotImplementedError From ab9e2fe83df1e8af20505664e5ac929c959ed5d4 Mon Sep 17 00:00:00 2001 From: Jack Feser Date: Mon, 20 Oct 2025 16:37:22 -0400 Subject: [PATCH 04/39] enable strict mode for tool calling (#375) --- effectful/handlers/llm/providers.py | 26 ++++++++++---------------- 1 file changed, 10 insertions(+), 16 deletions(-) diff --git a/effectful/handlers/llm/providers.py b/effectful/handlers/llm/providers.py index 9534912f..1a1452e7 100644 --- a/effectful/handlers/llm/providers.py +++ b/effectful/handlers/llm/providers.py @@ -7,7 +7,6 @@ from typing import Any, get_type_hints import pydantic -from pydantic import create_model try: import openai @@ -48,20 +47,15 @@ class Tool[**P, T]: def of_operation(cls, op: Operation[P, T], name: str): sig = inspect.signature(op) hints = get_type_hints(op) - - # Build field definitions with defaults - fields = {} - for param_name, param in sig.parameters.items(): - field_type = hints.get(param_name, str) - if param.default == inspect.Parameter.empty: - field_desc = field_type - else: - field_desc = (field_type, param.default) - fields[param_name] = field_desc - - parameter_model = create_model("Params", **fields) - result_model = create_model("Result", result=sig.return_annotation) - + fields = { + param_name: hints.get(param_name, str) for param_name in sig.parameters + } + parameter_model = pydantic.create_model( + "Params", __config__={"extra": "forbid"}, **fields + ) + result_model = pydantic.create_model( + "Result", __config__={"extra": "forbid"}, result=sig.return_annotation + ) return cls( parameter_model=parameter_model, result_model=result_model, @@ -76,7 +70,7 @@ def function_definition(self) -> FunctionToolParam: "name": self.name, "description": self.operation.__doc__, "parameters": self.parameter_model.model_json_schema(), - "strict": False, + "strict": True, } From 661cab85bffef3e61255043a5b6892e623b67fdc Mon Sep 17 00:00:00 2001 From: Jack Feser Date: Mon, 20 Oct 2025 18:36:27 -0400 Subject: [PATCH 05/39] add structured generation and remove unused `decode` operation (#376) --- docs/source/llm.ipynb | 211 ++++++++++++---------------- effectful/handlers/llm/providers.py | 42 ++++-- effectful/handlers/llm/structure.py | 41 ------ effectful/handlers/llm/synthesis.py | 46 +++--- tests/test_handlers_llm.py | 50 ++----- 5 files changed, 157 insertions(+), 233 deletions(-) delete mode 100644 effectful/handlers/llm/structure.py diff --git a/docs/source/llm.ipynb b/docs/source/llm.ipynb index e883530b..11d7d3cd 100644 --- a/docs/source/llm.ipynb +++ b/docs/source/llm.ipynb @@ -7,18 +7,15 @@ "metadata": {}, "outputs": [], "source": [ + "import dataclasses\n", "import functools\n", "import os\n", "from collections.abc import Callable\n", "\n", "import openai\n", - "from IPython.display import Image as IPYImage\n", - "from IPython.display import display\n", - "from PIL import Image\n", "\n", "from effectful.handlers.llm import Template\n", "from effectful.handlers.llm.providers import OpenAIAPIProvider, tool_call\n", - "from effectful.handlers.llm.structure import DecodeError, decode\n", "from effectful.handlers.llm.synthesis import ProgramSynthesis\n", "from effectful.ops.semantics import fwd, handler\n", "from effectful.ops.syntax import defop\n", @@ -52,7 +49,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 2, "id": "1e832675", "metadata": {}, "outputs": [], @@ -75,7 +72,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 3, "id": "634f6533", "metadata": {}, "outputs": [ @@ -84,16 +81,16 @@ "output_type": "stream", "text": [ "A fish with a wish in the sea, \n", - "Dreamed of climbing a tall apple tree. \n", - "Though it lacked any feet, \n", - "It found life quite sweet, \n", - "While swimming as wild as can be.\n", + "Dreamed of climbing a tall, grand tree. \n", + "With scales shining bright, \n", + "It leapt out of sight, \n", + "But decided that swimming's the key.\n", "----------------------------------------\n", - "In the sea swam a fish full of flair, \n", - "With scales that would shimmer and glare. \n", - "He'd leap and he'd dive, \n", - "Feeling fully alive, \n", - "Turning flips in the salty sea air.\n" + "In the pond where the sun loves to glisten, \n", + "A curious fish liked to listen. \n", + "With a flip and a swish, \n", + "It'd grant every wish, \n", + "To the tales that the waters would christen.\n" ] } ], @@ -114,7 +111,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 4, "id": "706ce53b", "metadata": {}, "outputs": [ @@ -123,13 +120,13 @@ "output_type": "stream", "text": [ "\n", - "Gliding through sunbeams, \n", - "Silver scales dance in currents— \n", - "Whispers of the deep.\n", + "Silver scales shimmer, \n", + "Deep beneath the water's dance— \n", + "Quiet currents hum.\n", "----------------------------------------\n", - "Gliding through sunbeams, \n", - "Silver scales dance in currents— \n", - "Whispers of the deep.\n" + "Silver scales shimmer, \n", + "Deep beneath the water's dance— \n", + "Quiet currents hum.\n" ] } ], @@ -160,23 +157,14 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 5, "id": "2c766859", "metadata": {}, "outputs": [], "source": [ - "assert type(decode(str, \"a string\")) is str\n", - "assert type(decode(int, \"123\")) is int\n", - "try:\n", - " decode(int, \"not an int\")\n", - " assert False, \"Should have raised\"\n", - "except DecodeError:\n", - " pass\n", - "\n", - "\n", "@Template.define\n", "def primes(first_digit: int) -> int:\n", - " \"\"\"Give exactly one prime number with {first_digit} as the first digit. Respond with only the number.\"\"\"\n", + " \"\"\"Give a prime number with {first_digit} as the first digit.\"\"\"\n", " raise NotImplementedError\n", "\n", "\n", @@ -194,7 +182,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 6, "id": "c83bbdc0", "metadata": {}, "outputs": [], @@ -205,89 +193,11 @@ " raise NotImplementedError\n", "\n", "\n", - "try:\n", - " with handler(provider), handler(ProgramSynthesis()):\n", - " count_a = count_char(\"a\")\n", - " assert callable(count_a)\n", - " assert count_a(\"banana\") == 3\n", - " assert count_a(\"cherry\") == 0\n", - "except DecodeError as e:\n", - " print(\"DecodeError:\", e.response)\n", - " print(\"This can happen if the LLM generates code that cannot be parsed.\")" - ] - }, - { - "cell_type": "markdown", - "id": "c9634e1a", - "metadata": {}, - "source": [ - "## Multimodal Prompts\n", - "\n", - "Prompt templating is largely the same as standard Python templating. However, special case behavior is provided for image template arguments. These are added to the prompt in a provider-API-specific way." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "72614579", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Testing image: _static/img/cat.jpeg\n" - ] - }, - { - "data": { - "image/jpeg": "/9j/4AAQSkZJRgABAQAASABIAAD/4QIoRXhpZgAATU0AKgAAAAgACAEPAAIAAAAGAAAAbgEQAAIAAAAcAAAAdAESAAMAAAABAAEAAAEaAAUAAAABAAAAkAEbAAUAAAABAAAAmAEoAAMAAAABAAIAAAExAAIAAAAlAAAAoIdpAAQAAAABAAAAxgAAAABDYW5vbgBDYW5vbiBFT1MgRElHSVRBTCBSRUJFTCBYVGkAAAAASAAAAAEAAABIAAAAAUFkb2JlIFBob3Rvc2hvcCBFbGVtZW50cyAzLjAgV2luZG93cwAAABeCmgAFAAAAAQAAAeCCnQAFAAAAAQAAAeiIIgADAAAAAQABAACIJwADAAAAAQGQAACQAAAHAAAABDAyMjGRAQAHAAAABAECAwCSAQAKAAAAAQAAAfCSAgAFAAAAAQAAAfiSBAAKAAAAAQAAAgCSBwADAAAAAQAFAACSCQADAAAAAQAQAACSCgAFAAAAAQAAAgigAAAHAAAABDAxMDCgAQADAAAAAQABAACgAgAEAAAAAQAAAUCgAwAEAAAAAQAAAOqiDgAFAAAAAQAAAhCiDwAFAAAAAQAAAhiiEAADAAAAAQACAACkAQADAAAAAQAAAACkAgADAAAAAQABAACkAwADAAAAAQAAAACkBgADAAAAAQAAAAAAAAAAAAAAAQAAAPoAAAAcAAAABQAA+/MAAB+hAAEPKgAANo0AAAAAAAAAAQAAADMAAAABABcimwAAAVYABpeAAAAAYf/AABEIAOoBQAMBIgACEQEDEQH/xAAfAAABBQEBAQEBAQAAAAAAAAAAAQIDBAUGBwgJCgv/xAC1EAACAQMDAgQDBQUEBAAAAX0BAgMABBEFEiExQQYTUWEHInEUMoGRoQgjQrHBFVLR8CQzYnKCCQoWFxgZGiUmJygpKjQ1Njc4OTpDREVGR0hJSlNUVVZXWFlaY2RlZmdoaWpzdHV2d3h5eoOEhYaHiImKkpOUlZaXmJmaoqOkpaanqKmqsrO0tba3uLm6wsPExcbHyMnK0tPU1dbX2Nna4eLj5OXm5+jp6vHy8/T19vf4+fr/xAAfAQADAQEBAQEBAQEBAAAAAAAAAQIDBAUGBwgJCgv/xAC1EQACAQIEBAMEBwUEBAABAncAAQIDEQQFITEGEkFRB2FxEyIygQgUQpGhscEJIzNS8BVictEKFiQ04SXxFxgZGiYnKCkqNTY3ODk6Q0RFRkdISUpTVFVWV1hZWmNkZWZnaGlqc3R1dnd4eXqCg4SFhoeIiYqSk5SVlpeYmZqio6Slpqeoqaqys7S1tre4ubrCw8TFxsfIycrS09TV1tfY2dri4+Tl5ufo6ery8/T19vf4+fr/2wBDAAICAgICAgMCAgMFAwMDBQYFBQUFBggGBgYGBggKCAgICAgICgoKCgoKCgoMDAwMDAwODg4ODg8PDw8PDw8PDw//2wBDAQICAgQEBAcEBAcQCwkLEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBD/3QAEABT/2gAMAwEAAhEDEQA/ANCBRbyTeH5JfPeOWSZ2k5WNV5C5xk5P3RV7SdJGs6e4m2rdQXJMZcjEiEZKuMdMgHpzXYzW1rq0Es1kyFnlMjSGMlxsGRnPbHfIrGm1CyudusWQjkt4UIlA52lDsIAX1z9ea/mSlmEqC0Wr/r+vM/JE7O4zX9K13UNYvNXF0WhtIAp38ZkkwxK+3XgGo9V8RaXbeH9JMM/nzwQvDNKE5IR/nBA7jgA+laN34guJbLSyAkFpdFE8xjt3yK3zAg9OBj9DWV9kim8Uz2W+KOMRySeTgBVRTwMdMd/9o8V3RzWcVNyW6XzejuHNZtHNa7cLfSKk8WbbYJgCdg3LjHPrnsO1a+h+HpNR8RtfJFgHPkKpICkKd0px6Ekrn61Yn0yaZ1h04NIIHWOXsEMzfMpzyNo6e3Suoj8O3ulohtUEc9zLMVWTcGaNANoGPugKCT3Nb4bFVIvlWt2Qm0zn9U8N2NhfwrakCePYVvCMSgBvn2k7iCeRkc89RXH6L4M1m8uDr2naO2mJZy3UsbTEtJc+bkBlJyOAxJbqAK9N1jUXkKNEY5obnaIywDGIYAxnsAe3pXY/8JF/ZV1HoU8McjRFiHUlQgYAttBI3fLkn9K7cLiHHme629G/zNqUt7nzvd/CW/8Ah/pDT304Z5pJDBbwHzZpnYAB2PTknhM8AZPJq9Z/DFtVvVstQSRlgthKIN5Ry7nceOwP3ST07V9Q+HtZ88rrEXlSxW6jbCD+7lMwZEYlhuG3dnH59K47S72ys7i+n09IUlKMsjyMwOQuQBkfNhjjr06dKeKxVPRRWr/Qpu54a0upWx+0R20ISCPE8TjzAc4iVEyPvDv0IrJup7aTUIhry7bTSx5U0cZCuLg4LR7O2B1Jr25tJ8OXlvq7XU0ks9/BHG6wOGieZeUdARhWLEg461wt34Jv/EGnaXpzKlrIkKXF4srAuZmUhllcgBn4U89wQa4KMYKnyxl1td+fr/wxMYx7nn3i/wAa7tRjt9RLKJoFNmgUbLeBsDmMcFuOp4HWl07XvMtcW8rtLcxkCNhlGSPksPfHBH0r2fxj4V06/wBFtrOxiS01S2VTBL5I+dYuqEjHysOmOM1iHSLOO30s/wBn/Y5rZVS6YDDJ5vUqpAyXx+RzXbUw8Km8v6SO2XJyJrc53RJL/SbaxljIYTN5pYkkKrc5fPoBnJ/Cu40uJ9XuJ5Li+VFtm8xChI6k5AHfIwQcdqt+I1g0fw9b32mZluFXbJASrA7WPl/N/ESDyuOBxXBX2s2fkxSpcC2iubLjGE8tmA3KAecqc1z6pzl9lNfj/X3nG37zPRGVrkRRlgkoVcAjIJHO4cDIzySfpXVW01hfW9pfXU7ic7oQrOBuVcKGwPXPTp0ryKTxXfa3ZJF5DbdNhRIYE/dSSpg+b8w+8RnemevPtWbqE2oabplrqWlxyX/mKs6SIpMoCEEK69AwJHHsa3+r1pR5os0g5PY+1LbxRPZWiWFtdpbwwqEJ4EjAKxO4k7uMYqpf3Oh6i6aX4j8t3mcSqoY7gr4KZOc9Dn8u9fESar4vkGpX08N2QqsZreZCkjPIS2cZ6HnA4r0HRfEf9q6Bpl3fKYbt1yu/hlKZRA7DgjC8AdK4cV7aNNSrfj/kdyxs4r3j36fQdCu7OYWBaCXT5niOQFBV+Mnbwx+bj6ZrB8N/DMRWRuZ9TjkNtKzW6sfnWJjkLnueDn1zXEw+J20CCSW/lIaeYSLESCxP+0DzjPTNezeHdcN9JBqctnEVlk4J+VUDHJODgE/j1NbUsTOzc/8AhjSnWjUa5t2Yz+B9ImaXWJUEl6HVDjLI3y53bTjgdO/IrkB4Z8Q2aoiS/wBoQQz7pwqk7YmJK/L04JGRXsl9qUMF9/xLlMM0oCbG+ZWzznOeNwyK870jVb0R6pHOBFK0wnQ+YE8yI5bGTxhc/wAhVU8znFqLWi/H+tCXTtJI5uI654diTVJrFQqMH2KDlFbgsp7uCcn069aoanNqeq22lABhc3EhdZ2DYXbnAGMZABr3DxDf+HzBbQRzx28kjyebC7BiQuPlC5zk9SawtFv7eFmgnaKNmba8bsvYHlSeCG9scg+1enVrwnPlkrFRqcjM6JLu6hlhvoRcTiA7JwQofIwSc5+6O3eue06ya61SC7a6+xzSJ90ngNGvbIzyuMA9K9Is7rSbz/RFiP2p4pC8CepGNzDHOMjnj9aw7qXSrKOJJomlyZNjYG7duxuY+yjGOldkqkZRUYu3Lq/kY1q/M+ZHL3nhyzNwupzOJbe+mC+WVyq+X91s54x2ArO1jXtLE8+ovFIJYWELAcqUVCA2eCcnGSelbjxMtozLdB4rl/3abcujFd4w/TB6AY4xXG3uiRSWXn6gJLYS+bvLR7WVegYkn6getePjsc3Jrl0tc5JVDktU8ZH+zxHPN5c5cMxAweORj0GD1ruLLxFqSWcOjSQb1mzMrOdoJ2kqMjnIboK406Npxvo7WW3+3zFAnmTEKqyMOPungkDO7J6YravdJu7rULKys5Wa1sGiY7xhmeJy23cOrH06dq85OFOKqSaME2tjt/CmpT6xFI99+6mjIa5jjYDzFY7Rtzk8cbueM5p1yBFIJGhkVA3dvlbJ4Jzzx2+leRLLrdlNcarOpkmLNcbPmG0ysMowA5wVPB6Crlv4wupNctoLhXaSFfMK8uuSu0ED27ehrCpOTcPabdynVurHpN3AzRS2ss5uHnDEEDDouMqD7ZPFQ+EY5ryKNL6QpBDI2EUb8bflJ9ACR07n86xLi8ezuU1W2KkSuC4zz/d4HX71M1i+06CzuZJ55re7ljSWNSP3bHeAykcH7pLAjPNdNGlTdVcvwrX9SlUuzvdMsf7I1m4tJrkXcJw01s/yRzCTKkZxs54I5yCKyvFIfT7KSXTbJreFLi3aVCuZE8olSeOoIYA//qrI0/xJp5EdruWaOAiWRZEGWCDLQ4J6Hue1YmpeJo7+WbY/kqrhgwyVQM2GTd0w3SirVoqk0t+y3W46lTTQtXAu7DR47i1UGB3c3KHAdZUYlGC9DjoRwK5DTfE8r6pqXmq0QjjeaLgj+DfuX2OePpTLzxOmpRyTTWuLq4YyiJWJysaiM7kGOWCt+NXNWji1NtPR0S2mvYjbW52HquQGbH8KL26mueniEpxjbS6Xz8znk0z/0PQZ7uzt9BmnjdbZ5jGVMh3B3xkrz2PTFQaDqujRW+l21kDbRTSFJEC+m1lyMclvX/61c/4hsdR13WLCx+1fZ9IvZCUCICVIUbVdTgAncB16mse4W0vFaTSmmWEeYAWHz70UIAOT93HUd6/mmplsowlLmtqfkbV0ehvocWs3NtpsONsW54UeQReSfm3Hn73OB83rUAsZoZ/tjWoTXXj8oOx+/FnLqF6fdPOe9cZozXGrQQ3b3MzyR7xcRRYVzApGHDDpl8cEZxk+lesyaloGka7Nd3FiX+yq0gLsQ284LvjOCcjkUoQ5LKq9+v8An2JfdklvFpiRW1i0DGa8hW6Zc7fOMQz8zHnoABVCDVFvBHqF9G7mIsY+cbHb92RkHkHP5Vh61qPmeIdL8bpcmd7dTKYWOfMwpBIYcbcEcdu1bmqW0cbWGyRbNbl4HkZxk5lQsiheCQd3XpgZNYYjmjGMqXxJ/wBX/ATl1Rlf2NcyX11bwSxW2941hjxjy3ABznONpzzj2qzrFlp+p/Y7yS5k+2adcMGkCYWaQgqwKkEnn3/CptI8H3ulaHdweIbz7XO0cbCNSN+YzvbBPOMEDt0xXI6DZXGg2riRpZoGlWaIswZ1RwDv2ngMN3OD0z9K9Og5SheKW+77f11+4rzO28M6dJo8FzHOQ11eFRCEGRHsTG5EPyg8nqPU9sVgTw395rMFnp5dLuFzvjlwqOiKd5Pbk8DFR+ItfaW7s72S6VWtJVeQ7wGMQ6AkHvz/AJNdVJ/ZGsahBc2MpitZkZ45Bl34Rd6O6424yeQPetZpVUlB7dP1ByuYvhjVo4b+aGJhBbQIV2qu5MhjjJPGVNdfrUVvd3trZaGhvFyBcvt/1WB83mN14Jxu6E5ArndN01LZrw2du8drduSxL7+NwIOecbRnBPXNTaZrUVnrsshzOLlH2mEFcxy/KN+PQgn61hGjCT9nPbTXs9dvwBWuXIr25G1r+MPDp8ghjJJKyI3IXnuoGSKxpNYXVL+6XWLnybGYC5kuY0Lu24YCRrkAMBkcnAxzS+NtO1IapZXBAltbRIguxvmYn5jvA43ADbu9hWHrMWn6xbSaFpl0baWwUu0bgMrKTllyBnbu56Zxx6168YQjU1V21fXZu39f0y+a7szm/Gmt2uqLZ/ZtPjh0e22pGsbsZ5jxgu5xukAOSAMnkAY5rglk8P8AiKGSPXrSfyoyUtZCpODKpHz7cMoUZ655967GXTtO8QQ2fh+21SFVtpUEhjUbQW+6/OOV/DtXSWHgO/01zaahPFdSRy/vJmXaWtcht+BwcckE13X5o6rbyOlJO1jnGm0uySHT5HQiG3idfJflJIwDt5w2SnBz3Fd/o2pmzisbiFhAk4hjhjlcZUHowZj8zckZOa5K/wBDgtlk1K2hiiu3UyxCUnKqW2kgMfnwox2+bgVyF7o+sXjWOpaOJLkS3EM21gdkUUfI75O7rjoB715qvCdpN2EotOzZ67d6zYWgn1TVD58Fw5fcWJ3gyAeYXX5hznPQdTjtXlPifxXpWn6iTJOYdPVkIkH3MEZ47kk8DHpWz408NXLsyW1z5ljqcnmPByDDtHKADoo5bPf615TD4R1Lxrrdnp5YRC5k8hIXBaIRklEdm67V6nAxnv1roxOHlXSTlddF+ptKneyOq0nx1c6lFqSRQJM0217c7ssSnUN398dK9DtPHd7dWulR6q8saRtJLcBy0aMN2QGOOPmIwOpA968L8HeA/EGl67JMkKXBguT8ijiWOP8AjjdcFSOCwI7kV7zBpdo2qXXmxGWCZtyhnyGZgPkYH1YMOelcGJw0afuzldPy/rsa2jTSZ6cfF6adaXmna9dMuoT7Z7fbFkNEqkblkU7FHOAOoIPSu60O2EWiWl6jLe6bOhMbRjEoaMDfCxGSCG5OeqngV5wnhRdZuC9w6ppduFtrNrYhmctkPww5fflSCemMetdPDHfWthLb2rTzPsHlCQGI5jTA3A/Nux6jOeOQBV1aHLJ06b+HZfibSl7zkem3WmaHrF/FfLbRjYNzCONWLMgzJJkYO7rnJ7VxN7a+GJSuq2TnT7i2utyM43eeh5YbDhi4ZcjIHBxUvhu0e4vrSTRb9o18ly7My/uZJPkfBOFy27HP4V32veEtI1G3uUDrbXkUMM7NjcFfndnuTg4GO5rGhSm5yrSd7/dsbzqKWltTw7WLfxTcXl74n02eeCKSVGtXSB0ljCj5146E44B/lWlN4mk1jTFv7w+dNKpkAZRGx3qVGBjIODkY46+9dXp179utSzzbEDARjzAxaXlS21snIAPArVv/AOxYWuJtPt1uLnyhsyEBVkX5jnGFBY5/Ksljoyju1c4qlJNXvueZraJYpbx2sjxQ2aMyoQCHYDDNn1Gfoazo9XHiBnsBL9qjCqHEh+5Ggz94+rL9AK5SefWNY1oxadILmKPL+XGckwIDvOR/CuMHJ69PfsbM6bonlx2TRRx3X7uWSRQ2dnDIq99rHAPrmvGrzxEHqraaf8P5HnVFrrojK1OPUYtFujAmLi4KrHHwSF3KAxI5xkd+3pzXLajr0+hz3FzqLJNLJLkANypZdyllAAAAGRnrivZYrG6uSbbSGggvZDm4luNu7yl5xgDK/Lk9s5rz3xH4LXUp4NHsUL28l07Xc7sVDRyJ1LdW2rheOhye1dKoxdNQ6L87Gfs3YyPCviKHxPZSXclwbNbtZY4t6jErodu6Jm6Y/Iniqdz4T1XS/F+r61JIHga2X7Pg5abzBlHVR/ACcHPcYrtYdO0LwrYafocdhbQyGSR7IshcLLgsXGSxIPc+vYVL4fvJ7uyinWH7O9kjRRXUrbi8TfM4ZMtgZJwCcgHsa7aeGiotRlp+Q1Dojy7VNUutC09Nbvm+0XMr5UBRJGpjzznodp+o7etcpr2o+IL0XXiSSOSYW8AFyq8yRsyqeUByAe57V9D2uk6fPJY6lAgk0lLeSS3XB2LIuUckkjAPRBzg5JrJtLXSbq+8uyQvLC8jFUYeVM5yil8ZDYzgD2zWmDUIWu2xWS3Plc+INaN3bxWkUhkuQWdSrAlWUERg9MkcnHetTV/GOoadqM/h/wAQrNbqLdGhtVQojMhBBdPl69WZj05r0rTtC17xB421S9tvKCWxe2i6eYsgGCoZs7dnByMVK/w7vW8NX2ia7Zya1rIUQ3OpywCeZbVSCYnIlRgxX7rA8gAEnpXoYfCRqSlFqzaXqzSMUzN8P2dxq1vHrR1GON5XiaG4jyCxXIaP1whHHryfQ12LazrOp+KbqzucO1nKsZjVfnRzFuz7buefUV0PhfSW0zw7pugpBblNOmAj8oMh3/KC8gf5wfLGeeo47Ve8P2tlqXiCXyZJFE8nm7cHIcHa0T5JbH8Rz9R3FefUwcW7bK/5Ezs37p//0fQode/s67CTCOSK8KyydPldOD5fGQVBFc3oksVg17NqMPnCc+ZDKrAlWWTeGx1BPv1ryqTxRcxwwRz6Ufs0K4iZlPIYqAzDGe/BPYVg/Ef4gweGtAkeznjZ4Y3YDoxkYjJGcnCjt6V+AU4VK0o0rayf+Z+W08O52jHqzpfH3xe8NfCSLz9Lm+2apqZmmWN1URxJIACMc8K3QdevavmbVP2ntZ1KYuJ1QsCrBR94P1yfTFfG3jjxlqesapLcXcxmXJZSxJ+9ziuCjurm5kCqTufGMZ5Pav1rLeGsPSpxVWKk13R93g8mo04pTV33P2N+Hfj9/GGjaTqU0ayeSkkHloOOXyuc8HPA+te43OrQWl1YS3y/aJWQzIjLs8t1OFOehUANjFfG3wi8H694d8BLLrs0VjFYol05lfa7CcYVdoDP8pwfu9a+jdY1iK+8LaJfwXCzT20HkxFiwSUIeAGKhiMnBGM5r84zjKYKpVnT26W9T4PHUYqpPk2udQPEUuteIDpkN2JZ4ApYBAMg5cMXJxhOMD1NZd34xuF8QNY6vL5kjNI0ip9xl2nJ+g28VmeHfD8GlaTJdtfRW2oXtxGJi7hvs8ceAVLn5evy4zycZrF0y3sbzWtQ07Urb7FdwySO55LbEI5BxwJc8r27V49HAqELR20u/PzRzKlZXNW9ttMv9PsLq5uI7SPWPIgMTEmeRVfcSoQEjcvAY4HH1r0jRoT4b0w6VBcyFmmWQI+0EQugbbwST0X+tcLf+HLC1lbxRNqLSyj9x5bKBHHKQCVTPLLEpOMcZyO1bmox22j6xLdXMm+4xGznPG0DeB7HacEdq6ZUlSVkt9L/AIA42Rl2t7qk0FzG906EtKsyFjhmAD7R3UHocenvWrod3PqonntMJZ2vlRHaf3qlRuKnocNnj0wa5ptWK30WsRsqx3khZ4EUZSN+EwOACMnLf1rqfDWiDTfD/iO5tpgsjzid2ZgMpK+1AvOTjv7V5cIu8/K+3kS4lhNclg1ifSNemIWWXdtJ/eiN1G0qR056A5GKuaTLGNfS8XM8su9WC5ybaMfN15579q5vUNAPiG3kutJ05pb1MJaujHfI6Zwpz13ElePoe1UNMaWx0yxnnYxapFb3SvABsOXCsSM9MYIxjiuyrCryU6vZ/wBL9QcWlc9Rj0vwjBdTTabEn2ed2bzSoxsUZUN39/Wtpb+VNO1LTZFz5lqN8pQMTbMd7hT1JAxj6gDpXnQv9NS7s7SOERWc0cauAAzKZABjk8MpO4+3arK+IbHw74lMBeZ4bfIWVceVuYYBJ78jpjpzXrRbcoX6uzsdEW7plG8ZrbXdV0I4migYC3uJmKiGdV3v1HzKwIOM4GM1B4Xe5n8RyaMkUkZMKi0mjbKPI7bPLbqwyAQpHQ4zxUPiqxvvE9xJqlxbu/nzGV5jykTRwlbgRr1KAnPtketbOjaxo+n3cGtaVOu3TJM/KuS9ynKlvbsQDwR7mm8NH2vPL4b7d0a769Dvm06PT9LeS+VJXjk8tfN+8sbuM7h3IbGfaqGtxQ6dqcb2CQxRWY2S9MuXYMPLPUfMeQBgmub/AOEkn1u0ik8yKa6jLSMvBaU7sqCB09TnnApbq6tYLuxt7rzZhZL+8ljXKSOWyAHxgDAySc49M4rCUU/dj8jXnaV0UtItfGia/faroFrCdNUZu87zxK23amOEI6luvGOlep6folhDb/2tdZl3K0X71gpBySpZkPODz05zzXNXuuXq6dLHdK9vKJNsUcSkRIQTuKoPvNzndnJPJrLMraZYlufKny8JdzuJbgSFTyBu5II5J9K87EU7Num727/1uKTVlboavhq71PRrK/0Sdg6RzecsIGAEVccYJOcMDkYB9K7S08STW9p5s0g8zcZP3nQKGxu+bp6HFecaQk80sMlisktu8TB5ANu0bMYznpn5uenrV3xRJqGoeHf7LkJW+liaNY7dhICvDBhg8AgE59a4ZOo6XPH4l19SPaato3Ndn1nRLaHU7UR2yTuQI2cNlSx3ZGeA2SRkDrx2NauoeKtX/sKI2km29uEaNUYbnYRg8HvkAbuvSsWxWa5a8OpSR3OZI5o42bdtVFwo7HAAwfSvNYbfxDKmr2MCzJMX8uBmJQJK0IBfJ5C46+lcGAxkZyfNG3z/AKsNVbSbLt/rxi0uz0/TbiNdRl3zM/3slGw6gHvg/XiumtNT1W20udrlg1w0CTfvCNyAzfKoIHIKAE+neuG8P/DibUbax1Xw/qEU76S72xFweEdhgumPvB2Bxn+I/SvYdH8Dz2sF7pf257vU4hFFc7jj92Uw0ijnkO3IJxgEV0001fkaaX9aehHLK9jkdespbCDUPF/gt/Pudd0+eWFIvmVLmCA7lRVGAA4zt6HNfDnwm/aD1PQ4dP0j4qWyzLNNvtr91wqMzfvFlUd+4Ydx61+nEHgvQtM0xPDty01vgSRweW7bmZypOxuB90c46Zr8dPjJo114f8WeIvBQha4t7TUpgrgc7Jj5i564OH4r7LhyNPGVatGrHdJ+ato916ep9LkWFo4h1KNbe10fqjpfiXS9S1eOfw6JLvSpIhJc3Eaght5Kbie5IGQDwMDpzW7rA02Gb7BHqJuLeCOUYVdrB1wWRgOjKOcd81+KXgr45/Ez4MXsthpl7JLYSDDwSjehXuRnO044NfpD8AP2hfCXxivbPQptGnsL0M081xDtkRrhicuUI3YYHDHPYY6Usy4HxFO/sLSi9ujXy2McTw5WXuUlf8z1bXYNRv57D7JDLdLcXSC22fu3Uk/Mu09OB+Narx3Vi0WnJeDyjKIoRs2riTPzZPcFMEdAe9fRdx8G/G1wv9p6dbRag5maZGBAfawOCnOQ2D1/CuJ8TeBtS0vVrC11ZDGsLu0jSKVMcUgOQpPU7icEdM9K+Sq5RiaFOUqtNq2z3W+x4lfAVaUXzxa9Ued2ya5ZaZb6dfRAxNty7kCJo1fkDsASSSBgkV36t4cmlttS0C0SC3VTsSFDiERhlAVR1O/JUn1z1Oa5e31aaw0e8XxFdRLpNlMyiV5Fi2xqS3mEt91cjB/XrXzl4n/ai+CuhS3Wh6drt1c2cg8me60u1eRFU53bZflX5uhIzxnFenk/tG3SpQvqr6X6/wDDkYfC1amlOLZ9ZalrVtfXFpqhmCrKm4xxkIFk2AsxA6s5JCk5x25rgdfTUII7RtLuFtJ7mMkOymSEbmyockj5hnkvuycc8YrE8KvoOseH49c0K9+16XJZxXdtNnIfyTsCpjno549ifYeiWt5capbNa21sWjWOMAspfYSpfK5BB7EDPQ5961ljZOrLmev63189DnvJXvucXq/iC70SS1nkiS4eYiKRdyhhgDLSdcDBAzkcCqB8aKb5p9GVY13lZECkOgQj5/MxhwWHOTnselGt/DrVNRtba60CVbq7GPtAkkdtz8uJCVBBznkYzlQPemWPhGTTbh2trc/aZh5kgDEcbCzkK5/i2gheufxFYYiTgm5vWWi7v9ROL5ea2h//0q3hHw3JZ+Cp4rkPfya2heG4YSASBGRiwDHcjkMy7TwcE+lfE/7U99YrLHY6PffaE08mNkZQrLlcOGIxu+b9K/VXw1f6bqmjkzzsbeSQ3Dg5WJ0UEbQB930IHrnivxP/AGnbNLf4m6wdNJhtpZmZYj/AD2Hqp7e1fl3CyhVxXN22PjckjGdfm7HzFcNI7HcdwNewfALwbD4y+K3hjRLxysFzfQhzgHA3ZPB64xXlVtAHkWGTgSHCntn0/Ovq39nYR+GvH+g61dbTHbXSM3ujfKc/getfp1WMnTly72f5H1WKbVOXLvZn6d3Xw9lv7HWtJhULctM0QyeSu9dy4yP7vAwBg8Vv6Z4Bb/hG00qeQ30unXTeRDHGQyj7yqgwMlWA6dckk166lnpevx/a7S4mjnRFCsQrR7ckAhicsw4zkHOOCKwIfEgvNWOjvdNNGgCyyKShOzKvGSeMA8cdehzxX4QsRampW2ae/mflspW1uea6D4UOmXUUniPKLctOrW9yUG4k7vmCk7XYtgZ47YqrLaW+p+IX8QadF5E0FrLDdLLlCwi/1PB75+UkdSB2rsPEESTB7CBCHUmSRQwxhtoXr6ccf1rsZfC91Oj3AWNIb+NJMs4d3lRhgHCkqoZfu+ue5pLFc0m1H3SoyV/I+dbk6vP4fm1PV5DavFL+5WXgoO6oTxjac8cAnmub1u71K40iTxNBYzsHRiYmiZ1ZYv8AWTkAZ2ouCSOPWva/GfhO/vY5NR/0XVdT09GEdhJLiCPrhtnBkIPUYHpg4rj/AA9F4hvNWh8VPNcx3mlvAWgcZys0YHk7DtCxsPl8vG1lOCKxVByftKjdn26evmacqer2Pnf+3NRtPEzyIJDFLDG5DDcVRscHsNrEfh+Nd14Tg8W3unWd/Av+j3TSRSruByC3yEqPu/j2+ter+NvCuh3mrwanpVqsUsyy2kkSEAbZEIZdpOAAw4OMgD8a3rCC0stSmu7PbDDpohlkPLBgCsUagDGSCozxnA5PNa1cLTvanrf+vyMny9Cra2+q+D9WutAa8lulDKJ442BEM0aceVwCcHg9PTtWMdPsdQsn1kXYZpSkgkK7nidTg85xtbccjnpV3WdQtLO3l1CQujw3MFwzoxJmLH/WEEEhSdxwD1IFc7J4e1vW2B+Htnc3y3M2GjgiYiMTgbwwx05ySfeuqpUk06UVft1tfb7jVU5S92Kuc54isbm0vXuLMO7SvvIzwxdyCQAOAoGP90V0Os2s1/oS2RUnWZJBcRqucPFFIFBI6bn9SOAOa+s/Cv7PGvaha203jmWGwtYHeXaH2zKTjaQBkDuTnOc16hbaN8KfDOpyPFp3+mP+78+YGRQB/dBGACewr1cuyjENptWXW59TlnB+LrJSl7q8/wDI+DrzUdXGlxaYka72MymRuGUyhRIpK8lWCdPXPrWDbwalLdNpGk6aWnVEP74bULkl8tuIAGTn8K/TO11zRvs921gtqbi3xIpMCoTt6rnHJ9Oa7iy8QaFdLZ3Gr2NrLFeIAd8acEjkcg9R2z2r1YZF/NP/AIbse5LgKXL/ABbv0/4J+S0drcXniWyuYLkwwhFd5o1KhpSCxCgD72wFVzgDj3rbn1ww8afqH2jypBMNi4YrJx+8P3cZAzg9Tj1r9gh4E+H2vQqreHdOlCfdHkRqQvTIK49f1rkG/Zq+A1/K5l8LW8UoI+SKSSNcryMKGAGPpVPhqTuqclr6nkV+Eq0dFJM/LM+Lru92R3SeXcBlYKgyqtJ8xwT7AA/U0+/1O/vA08TiKcJtQDkOGIUbQcghcZ9jX6eT/s6/BNp/tL6NJbShBEds8m1lByMjOKw7z9mL4RX5KW63lgIxtUxT5ABJJ27lOOa8OpwrXi21NNv1MHwhi7br8f8AI+HrNJ9Q0/StHWIXE0oMnl84CE7WkcDjIxwDwK8/8V3F9ol3Nq+jPJcvYMsdwzE52OwkHTA43YHt75r9DrT9mDwtpWpw6vomtXMM1oAsSyIJMAcjJGMg4+YY5rzXxP8AsveLAsb6HeWt89xuW7eQmIMmCVGwgg/N156UsbkWJ9j8F2uz/wCGOWtw5jILSF/Q+VvCl2NatJJ9M1OOO6u0eOKUhShKBmOFY5Y8YIxx3r0q21Kxl0t7S6t0t7hEG5nUmNVUL5nHXYe3t0rZ0H9lz4jeHbaC6j01ZbuGZ3S3hmj2IW4LFyeQRngDvk9K07v4O/Fa01EbtBunhUKHKhZFA6bhgkHA4OP6V8VLIcdTbcKL+SPNeW4mO9N/ccLcaEugpFPZx29rJPNInkQsQkkAAXJXrlcbgxIxziurggtraymub8Fbm7dbdJ1kEbDyxncWbPDHkA5Peu4uPhV410+4i1O/0V57vZNEjhc7UkHykjptUZ96868TfDf4gP4e+wrY3T+a6yrIiOTk9VAz1AyeSM9K6YZZXpJXpSu7306v/ggsLVv8DDV77X7/AF+H7KwuY9KDxxgKZAFmxljjIVlK5zxgivyj/aS1651P4o+ILrS9nkrKlvM8Rz5ksKBS5A+n14r9Zb77T8NdN1zxHrNtcR/Zo1IZkaPznQAc5HST65HNeVWvh/4T+JvCd3q+s+H4bK6vJDdMVwzBn/5aK2M5yeR6+xr7fgbATpSnXrpqW2vy/wAj6nhXK5uc6sla2h+A3iTVbl7zy5o1DoWDbSct6E81+rP/AATLuvDeitq994rjiga4TMc8oGViDAEDPdmwBivkv42ReAvC+uzXmkaUslxvZdr8ocHg/WvNdF+K2vaQx8lxAkZV2WMY6D5VH0/nzX6PW5qsLU0fa4VwoVeabP7ENB8UeE4NEbVIrxFtreMO8shCgJjqc9B9a+EfjP8A8FJP2bPBdrfLYX0PjDULN2hSytIvN3yDPHmuAgUEcsCfxr8dfBH7T3irxJ4c1H4danqkkMOoRlXcuT95CiAD/ZB4Hrz2r5Bl+D3i+x1qXTp7WRw8mFlx8roT95T3z6dayw8JyvTqO1vxLxXs/jgua/fofQPxu/aW+Jf7SmrSan4ge00PQoJGNvpNjGIIFQnP71h80zDuW4z0FVvBHhHVdStXvJ5CbOFCWZAMAAZ5rzKLwbe6BfRw3tnOYQdu4KFYAcc57j3r0Gz8U2lroDadczOk6ytEnlfeaHrl8dTngVpisFDktFWZz4FqnK9j9DP2PdTaHwFf6KsyPBpOo3Vrbb93EFwomcKPVS5AA9TX1JdeMNP0/StN8P2bS2kKptaSID5iIwnzHPJx1GRXx/8Asv6Ovh74X2/iSHMd3q+oGWUyAs0Ue5lGPcooz719JXevaUNOEqQ/bRp8eVjAKygMM4Y9SQO55AGehr8IzfGRWMrUFo7v81f77H5Lnk4vF1eXTU6XSdYh03QWsI33XeokkRCNmcRAqScD5skFsA+/HFcZdeJU+1zWsUOBuQSIUJB2HahBz04yB64rC8MeN4NatH1C1BtruSZ1jX5gqRgbVLH0z0OerECsrWoL211dbzTbRtQTaGIaTyrcMhBYyOMAE5JUFgTjAHFclSaquKirNaedjyuZvRH/09yC9E32i0Aa0tdPwJFRvmbzM8J2YZGDn1z2r8z/ANqDw5eWnis6t5Spb3pLrtb94oB6OnOD7jINfp5d3kElno1lYWj7o53UhV2PJIqhRubjP7xgoyeOSa8W/aR+FWr+KPBx/sKa2kv9CeRZbNguSAoZ3RznndnHIBAz1r8L4RzNUcYnUdovTX5H59k2K5K12z8nLK2hLDAIif14HBzxXsfhO5e0kikt+iYZT349vf8AmK8n+zXVtdvbXf7ho2IIbj8MV2Gjasumyxs/72InqvX9eua/oCi1pY+7lqfsd8G/HEGuaDYamJSJbeZTKT86xSrtCFQeobrjPrXpGuWFr4lmn1GBtt+oEcsUGBGzuAULA4CliSWb0Hc1+Z/wx8fw6FeRNb5MCsryRtyDg5zjrx/+vIr74uvFmi+IvDhu9In864vo3BBIiEe3LYcKuTxwNpwMdecV+R8U8Kzw051aEealJbdn/l2+4+FzfK5UpSnBe6/wO2h0XRYtQOrRyS3U2o5ijUZeNpYyAzkBgcr/AADHXHU5rrfDFjri3EOnxTefLZXDRIkeXeSJhv8ALYDndnn8cHNc/wCAYb3QvBUMepTQSfZp0killwfn5Y8EbsfMBg4zyeleYW2v2+l6hfazFqUouoHja1NuxBmnUqpIbAwpdhjuecYr4rD1I80YR1S/p/8AAPEjG2x6B4p8MRXesP4j0yykRbvy43LuBGGViQQoOY2Qgnn8OuK4fWvGEFneXFvbQs76fMwt53ZxIMsASWBy5PJXdkDHFdNFZz32rJ9r1KGzmu5cxxhd/lzRgtI2MiMfOTjJyMDIrG/4QhdWu9cvJFnnF02QWYAyfZgHJ45VSRgK3JHJ5JxtGNRttaNu3n93TuXFcz33OGiuoZZpdZgLSR6ltCEud4niJVsgcEEEZ967TSIT4x8QaoLXT/NWCSM4SItGwiQbTtGFJBPAHcknvW1f6HodhYWt1lh5c3mbZmXc6SJyiIMErEygBsAE5xkVnaNrKWAij0mWSzErEOfNKhi/PQYz9ADivpclyOcuZzXuXTPpsh4ZqYqV5u0O57ZoHwb8NCSHXPF9/Je3CiUJaRBYUCyDkOPYAAY6dRX0PpWtWOi6ImneE7FLe0thhUiVido64I+9XhXh/Ttfht1uzJ9tG7GyST5M+rFuR9AMmvc7K/1y6sVlhVLdYAAWaLC++C5zjPfA9hX19HA04fAj9ZwWV4fCx/dQs+/U5fU77W9QuJLsXZgiYco6AflkEGuJutGF5OV2vNGPmG0bdp64Jzgn0Nem3PiKBovLi3ahcqD8kSmIBh3JJx+Veaah4s1GxePECRwsdsoOCFBwMF9zfj8tdWi1R3TkKsNna3wnhI82MAT284yzAjsWVWPHpkehr1DQotKigXTbdFu4GffHCgJ2H0w2CCORxnNYSppV5Cs1nNhCCU3qJIwQOiPjj6V0WjfbSI7tR5DRNyYzwy9tw5/P9aFvoTJu2p6TZ6ykd2IYX+yzRplYyACQvbLc5HpXdWOv3d9AGitsvjgrgn/JryzWYGvJ4btNyblGAGyGOO2OT+ZrS0OW3t4Wtrm7Z5s/KiIYyuemNwBNWvdkc9Smpq9tTq7jXIp4ZbbVEWFwSMN8rfj2/SsWbzblQ1ndCFoRlRkEEHpn2/WrFxZxSWzi+kkMbjJ81QGUduRxXmNzePpV5PaW85nTZtB2/Ng9weB096xqrW5vRSWiO30q71zVbdt6C2vYHAGDuV16E+v9K7Gw1K9tR5V8oM8J5weGQ9COteP2us3yoiGXGOQQpBAzxg5HPrXTHVtRneK3m+RCRiYHHPp3/nWSlbY1qUr6PY9kTU3wH2743PUcEVrW105Z1YllAyG4Oa4G01SGARoshmKjnI5JFdDaXQnPmgBGzwP4a6KdZt2R4mIwiS2OqIaVcFin061zN+1u8ThZnDLn72c5/MVzXibxI2lWrzx3XkurYLHGGPoBmuAuvFGo/wBnSahcAx5YbSUIyp/n+lXOqnpYvC4CXxXPmD9sfxGLXTtO8O3EhmjnzO0bHKllOFxn8a/Jb4lfHrWNAhn0+2t1REG3cQQpGOhA/SvrL9vLVr/UZtOjeeWO1MIdJUJUltxBAIPr2rxj4UfDD4c/FX4Ja+uo6VLrHiGzhLRfZ5D9rOzdgjJwSTwSRjoOO/vZZlkayitrnFmmYuk5eR+ZHjvxhF4naKdciViXfJyNx7D2FeaNcO2FLHPbPtXoHxL+GevfDXx7deCtYH72ARyoSMZimG5c+4zg+4NedarCLK9Nru3FOpFXOg6bcGtjzPa8/vdz0TwPpr3Wo2lyzmOISli2fQ45r9lfgN4p0FILOHWbeK9I2/vJEUyKc7cAkE4XqP8A61fjj4E8PeNNekz4b0u6vLZGBPlws4PPAyoPevqDRfFXiLwv9n0TV7OeweBdxVw0TN3BIcLx/OuXE4SppVSPQweJgk4SZ+rvxP8Agp8L/idoV7b+HIE0vUrgFmuXZjGec5OP7x7jNfiT8TvD958HtYvdJ1tPPnndvs8sbjY2w4EgGSRx6j8M1+hvhv49aOuk+S19LPcOm2UbgOnpz1/SvhH44+Gb7xt4qfX9FMs1u4wVm+Yox5HcjB9RWUcQ5StPc1r0LRvA/UPw1Yt4C+EHg61vbpWl8Q6bY39sBhzL5vzgYz8oOeT68DvWV4h8VSJb20scYiuQyrIqx4a4ETbG3+mcYwRjbyea9Q8b+C9Qsv2T/gXqEdlJqOpWejQx427lDInmASN1CLngdMDAr5ul8R6toccfh/xLBDfz6ixmLtHFI6XBUAIkZG9Ii2FcsFzn5cYr8izvh7/bpz3T1+/U/JM2wkvrdRf13K+h66ra9Do1u32WCS/Rpo1J3qZCcHcAAVXBAB4B5619d2+m2Ftp9vFcX8jtAsU7RRxCURhwNrkYOWLc8rwDn6fL/g/wFeDWoVntjperi43ahaTK72c0Dv8AM2/5tjDrgNhT82R0r7QN5op1QX1xLHGmpwW6RyJtaBY1jXjGT5gLJww6YwQa8nGYWnBrmdn/AFucStCSZ//U6Kz1P/ibW19qDeTHBGjRxZDBQ2ERFT+HPJGec81H4onttZsH0OSykbzbjLyuDJ5sJIGDgYIC52gY6Y61z77pPESJbs0dtKVW3bITbsG47uvO3AXrycivTUltrbUpdT1BpYrS8tvO2K+1UkACkKoAHzklh3G4n2r+XYynffb/AD/4Y/LXUSakfjz8YvA9x4E8YXdlcxhI5G3x7G+Xa3OADnGO4NeXWMZUGSMbM+nI/Kv1O+OvwmuvHdnI1sN8sMSSrKyssY3IpMe7kEbmCjODx0r8yfEemat4cvWsNRtzAEYqCpyCRxkY/Sv6A4U4gjiaChUaVRLVfqfdZdjVVil1NzTdRW3kTdMu1SCAOuf0r3Dwj8TrvQrqKSyv2UowLJIpwfpjg/XOa+X7W4nGCQHwPvHrz2zmt631Se3+VQuMjnNfaQnodso3P1G8O/GHwzrWg2uia5B9ltw6O7QyPtchg3K5Vj9N2D3ruta8c+C2jEfhi/tWt1LeUk4cPGf7z8KD1yFT86/J+18Q3CHCTnJGMK5x+Wa1oPE2pL8qSMieqjH155NfLYngzLqjk4wcebfldv8AM8qeSYd6pWP1E0j4s+FdLQS6rILmYx7EijQjywvIAIwAGOC2DyO9eSeO/wBoq/uzdW2h7LJLiQSu5kzI0hGPkXoi+3JHqa+ErnxVeSfIbmRQePlyxwfVj/So7LXD5+XChF4yQC5PY1WA4UwOF1hG9nfV3sbYTJsPTlzJX9T6u0PxlrOoXj3er6hJMXAB3OSxUdlA5x6Z/CvXtJ1fUYru3mbbDPcH/R7cNmUp2bOcIO5zzXzd8Pml1e5WScKYrZScvjBbqM49Pr9a+lvA2m6xr/jjS2spTcTTN5asIt0eFGeE6tj1bj1zXRipJuyPt8ArQR9m+BfD+uwC11C8vWLv0jhKYPGWx5nU4/iJA+te0ar42a3t49FgiaJ2AzNLudmP1UBfqSce1cboejN4XkmeSZbi+nHz3F1MXdB3JxuwM5OB19BWnd393NMbW++y3FmwHzIj7iccHGTj6k/hXntLZHo8zOG8T3upXyCCyti8QOXn3lXI68ck/wDfI/GvKbu4lt4ZjpcrQqjfPFdbmOSR8ylxz7g5r2PUvCUk12uoaWrFkGEVW+X2wOv51wes22qXdy9tf6cJrnnLA42qufmPPb2FYzVma3utDtvDWujykh1K82zsPlMeEQA9iFyD+PFev6V4kl0G1WaV2njkXBlwQrKfTbn8OTXyBP8AENPCzfZtSRZ2tjhGETfKvryOoPcHn612XhbxvD4jjk06KPzguZI9ykAIe4B5z+fvTjJPczlofUknjXT44FgvrZmilBaGQLu2Z7ElQB+NcwfEOtWd2RZpELaXDA7d/wCBJHb2Nc5oeuai+nyWTS21zbtlfIDFJF+mc/j0pJvI0uSKRI18mVgN+fnT23Ac/lVN9bkJ9D1O28Wfa7RreeaNZIud6LtOOmAMkZ+prJ1h7jUbMSaW8UNwp+RJj8sh9sgYz6BsVSSy0Z0a7tD9sWZQNrEHLewz269Kn060s7G7NzeKt1gdH+YAdwN2OlZSnfcqnoNgsvE8lkH1GxWHad3mRvtXI7kc8HHYGvSPDeoubbybl1jDjnbtII/DB/T8Kyry4lhs0urWLbDj5VOGU46+lc7b6daanOrzXMtlJH8zLH8sbA84G4jk+oqbJPQ25rrU9wsn0a82RqS8rcg4HOK7COFbeFirYVB0zxn6f0rxjRpDaXK29j++cjnceiDrkNjpXr1oY5bMQxKDuB7dPU5ropa7nBi422eh51e2n9pajNFc/volww52gHt2z+RrkPERne4WCVSVtsdGcE8ceufzr1CGLbPLIeevyj275HevMvFt5fQOyxIEQN1DKmOOvX+tZSWh0wqan5//ALY9iNc+HFnrK2+0WEzJjacgHBwSTzX5ReFvE/iPwjqxl8N6lPZThiqSQuUIBPTIr+grW/BI+IPhHW/DOtrHMLuNmiLuHcSDkYx0NfhX8S/B9h4G8W3Hhz/lpYyN5iSgBkB9SOD7EV9BlFa65GeDnNJ39ojwj4m6hrHjnxZe+KbmWW/ntFitZZ3YyF3iB3MTzxk4FeN2mh2mo+JoYNWuBBDM2ZHOcKPfj8K+gryfSbe2k0nRL5FjnleW5dvkw7n7vrgCvGptLuo/ElxdSSRyQxEr8p3BgR616NaFzyINpts/Sr4P/Gj4D6J4U0PSPGYv9JufDcqyxfYEJju2jVlj80Ag8buh4J5r5l/aQ+PUfxi8YTXOmwGz0q3CRwAqFkKx8KWwccDsPWvmq81zaWhJwwPUc8dq597m+uX3RRSMW4yFODW1bGycFTb0RFOglJzS1Z2Gl61exXaRrIXUEdOCPxr7C+Gd5BeajZw6gBJZzEA5BJBH16CvlDwV4f8AtbqLpCrN0yOpr9Rf2O/gSfHvjfTEujIbezcSSAIvlMiHJG4nP6V85jJpuy3PawN1q9j9TPij4dQfs9+CtEt/PimhNoIzD8sgjCZcEj5gpXAO3B9Oa+Qp/AdvqWmyXp0+a2sGjRjcWDjz5LwlsNPDKA77Uxh94OMg5JzX29+0d4tuNB/s/QNAQyzW0IkaFAp2Rk43FSR91VOMAnt618j6L4h1HVL66N9DbNYgKIjAxjvcZO5ZVI2fewQNqfLn5q/PeLJyhiYy6RSW2/5nyGZuM8RK0dWc54m8MWv/AAljXEt7NZ2ErxvEsDlPtReMeb9oi+Uq+CSASVHHUjNHjDWfD4is4oHC/wBnlFt4NgBeJSwIwpADen8JP6zfEHUdS0jz9AuZVvLi13iESKdzKyhvlkXJzgjjPUVymkTaHrVnb67qEUcEkmLeUldzjaQDjPUZbJ/M18LjJ+2m51F9/Tp/wVufIYqUYv2drd/U/9XldK1O0u9Eea5aQs5AXd8vlTyx7W2g8jaOcZODgZqK91G51TT9Huz5tulu0haKRjuzG2xdxJxgqA31rC0+wTVNgTyo50mwMkhfNfO4xgnBYAAc5yfSudbx3plrbas64le3QiNrkhsKGVSNo+6c8Z4Oc4FfzRVpScG4rd/0j8sqQeuh6lpvjjUby9VIMCJisWxlBBWRcA4GfkY4689xXzv44+FPhfxat/qHiSWS0vnkmFsFkA3yR5Z/lxyowQDxXqvhjx5c6xo+pauunWUN9ZziXCKwkaFUCtMY3JVkQEZIwV4OCCcTW+oxa7420DSzYQONRjnLvGu4u+wkKN3Kjd1I9xXsZZSrYetem3DT/h/+CdGHnOE7xdrH5teJfC0nhCb5rdvsr7tkrL94qcHoeo71y0VwJ1DqhweAB05+tfpp4n0bRNTtLXwzLoMcl3ogknkE4kmW4aU75QgXgHGAuFO4qfauA1P9nzw54ntwLJX0S7eQxQL5bIlwwK5ClgoBXeufr68V+sYDilOC9snfv0PqaGbx5V7Tc+GbZoYwTKhGD0+UEfkDVh7twwVAVQf32wMfnX0VP+yn49OkXGraY8dxDbhpNiSB5igbYSUXLjntjpya425/Za+MdnFaXUulOiahEk1up+aWRJPu4XnGfQ9B1r3o53hmrqaO+OOov7SPIZb6Ifukky/seB7jj+dQC9MAXYcuDuGfX1P/ANevZvC37K/xV8Qam1g1ulgqhjJLPnA2jOF7E8fT3rstS/Y/8T6VDBcaxrEMaTyeXtjVmc9yTkAdPX6DvXHW4mwUdJVVcKmaYeLs5anS/s8T2l9ZXv8Aal4IoiVGAC8kpzkhEHOPpiv06+H+l2ei2Vtf38kem3d0oWGNh/pMcDcjKrnaX/ug5x96vm/9nP8AZj03wTqMPisaib5rxSkFuevmIcMxJAx17Dr9K+69JtPC1lq9zcRsr3Ucjpv/ANY7SJw20nIUD1GT/KudYunW9+m9D6fKcdTrUVKDN5fDWh3Fz5Mk6JBHh5zM23GemT3JPbHNb8kenWk2LBUuFdQFKDDbe252OBn0ArJGj+HrmE3UULTPuLuXY7Ceh+Zsc1kK8S3wiCvbQEkAK272HsB3NM9ZM6610uWNA32nygSxRBtKLn1LHPH4Vx+uaBo11DLbz3IguJDgMG2kt7cfkBxXXWcVoqKskiSuTgOCcjHuM4rY1PwxaahY5Voi7HarqQC2O/OMDtyealxK52fDfjXS2G/RzO006sfLMyBnDeik9T615hZ6pd6Tdx2LEyTwkEoAQuVz15GAfTnPavpn4g+Cr64iSWEmWeJjvQDYWA+8Mp0yOgx9a+LfF93faHdtr9jDNCtvKVe1zghFGM8559OPxrlaaZo2mj6t8GeKZ767i3sYoJl2sgxHh++ASWIz7/j2r2yKdtMiktjEGUAuFyZCxPPQHI/Svgfw78SINSsFWYSW88JV9zsBI3cLwMDHsDX0N4c8QyTSJqoz9kmAR/LBw2fvMOMnHr/Kr5u5DifSnhizsNaUXNy6I+RhQ2HLenB4+telxWTWsm+7RZFGAgJbccdDkZH1ryHw3aojGOxibaoDeaAPl7/MetdpcaxeWkCw3EjO7jC/N69tvU/55qugky9q2sGJxNetsjbAUOdoHb7uAPy/OoEnmvX3pKI7aHbkg4DL+ZP5GuO1TVkuZy2ouZBCMKgwGY9gcHOKbpRRpLaZW3SsdzRltoHPGev61mWpHvPhtBETJHtVz1IIJXd06gda95tIVisPMkbccYzx1P0ArwPwxfJPcwqgTzfunAJDH1B6V735TrZLAW2gDkD1NdWHWjZwYt3sjhDeOb2ZUZkAxkDq3pz/AIV5j4u0tzuliwHwSCfm4/XBJru9VxaOPMQMjcE5Ixj1NcN4g8VW7QGytXVHQAbwOQx9Cep/CsVJPRnTHTVHGeEriTT9ReW9QKzALtX75HckcnHvX59ftr/sx63468RTfEH4UrFc3t7tS609WCEkDmRWYhckjlePWvsW/wBX+yags6XIinLc8HDAHgZ6k+prpF1ePWbRJEESyRkljGBk5+np3rehiXTd0c2KpqatI/mL8b+F/FPhjX5NG8UaXNa6laNtuIpRsYDGPofwzxWBLfgIIIgAF4CKOlf0b/Fb4Q+Afi9aQL4r0xLm4tgwSb7knv8AOuCa+X7D9jL4V6bfm4g0zzSDkNLI0gP4E16EczXY8Krh7bn5KeE/Bmo65O1yLf8AdwnJJQnOfbHT36V7NafDKVseaph3AAHyI2TPYhlwR9RzX6uWH7PujQPE9laoiKOFAxxXouk/s/6ZJMkjxKhI+bb8ox6HHr3rmqYipJ6IUJR2Z+aHwr+BHjLxRqsNjo1jBfxuyg43jZz1IkY/Xoa/f/8AZy+B2lfB/wALRosKf2pdLmZ0HAz/AAr7VzXw2+HWkeGHQWVskJI5KADJ9TX1QJI7DTWnkbakMbOT6BRkmtsNSu+aW5rKuuXlifnT+0XpY8WeNG1uDXE0oWm63hkkUNMkyAgNCpIHA9PmyffFfL8Xg7SfBIT7dqxm8Q3MZc3zSS/aDGGDKJFQld3O4kdvl6Zr3Lxx4on8Tm91WxuoZlimkXcFT5IWYMrqrhR5nXGTzgEc8182eJbe40gxauGUz3DRokmWLKjbt33iRk5BP0PrX5Xn2MhPEyl5/l/wD4zMce6ey3Om8SCG78O31zdMby8uTu+9sZYUO0tuPIbB47YHSvO7UXGk6bNo+jk6xejaxWQ4bYrLINmf41RT06gmub/4STUdR/s4RwtcxyTTJIRzGtsvlgscAd89aka7i8Pzz3whlCHZJJcRnc6JCGRNhJBAZSwJB6tjqBXz9KEn79Rab/f0PlJScpOUj//W+cbOx1W70KC91a5j/tIPuC2sNxJNcjzN+wOF2KScAEHBAOTWB4rgkg8YeJP7T01tM0yV3G0qw82R2DooLZySSTxxkYrLt4rrQZ38W6cJtSvI0Xfa3cihI42+85VS4ZQBwoPTOGxX0HpnjHQtS06w1j+w7K9ntbiBH3KyuisQCV+YhSF+6ccgcHnFfjTilppZvddz881Tuc1oLX73+iRzxNbtpMwtkYIJDPYXG5JVO3+ESArkjjLdgK7vwN4fNv8AECCzvL2a1OnSMkbSAxJIysEB3spUbs8HcOvNaOl+JrHTPFNlZ6HZo1xa3TC6E8f/AB8xo7B4VJABC7lJKnnr3rX8ReD9T074q6t4zgluJfD2pacjmKXj7NcFo/LAZhyMgsQMYYHI5GcKy5FeT2Tv9yucs52un2Ga94H+JeieK7nVdE1R49Ht5QY7S1w0/mbw2xw43rz90kkY6elcP4yvvEmu2Oj3C27WypqE8cgWJmWHzXiBMnVhkjJI6+tfQeneJ1TRLCXfFdanbhybiURsrxZCjg5y0aKPvHoMAE4NeX6BrfiTSb63057pHXJubm4kkdwzOSxEMe5lVlyMNgZORt2803jE1JqWy+/y9SYzvq+h6D4Y8Ka2t1qgiSS0R7aW3mEqERL8oCSo7DDoXIHy5JYkH7tdLqfiN77W76SBULaXbq9rAPuyvHEbdgB2KnlT3z0zVaa+1yLS4XggWAyoNksCqiXDoC/zImAruCeMAFuR3rkLN01TTW1i4kNjb7SG2LsYXEbHlC3A2bNzLjkk4wOa8qWLg4pU3pHd/f8AgcnOz02xufC+laBZaKhdnlikvZpURppi0Iy7ZBCjHQDvgYHU1z2gWGla54i0+fVpZYdPm84mZ/vSxgjkKBhCu4gnsQO9bt+1tqXh2y8Y2pWyu9GWVZzbsESeDYQ0owCA+SpJPGD0PbLsdJ0yOKabTLiaV2tgjJ5wLRrckhoQFAVWAYbvUDI61GAl7PEc8kv67m0G9JMLySHSbqS7sTdCLT5W2lsIVXdI4G0j7hPy8Ek4zxg1vazqWspf6VD4ctftFxqA8yWQHMcaP8zKD0wPzP41yet+F5LfXTHq85gtNWaRYpoi0q+Yi7oFkAwRwCGXqT6ivc/CXhNH0aKysLpZBbxLLBKM8o4yrHOD3wAQD2xX2GQ41xrOjLQ+t4UzB06jpS2ZopN4m8lYfMZY0T5iFAyT1256Z745+grFvEW0gZooZ5i5O9QBuY/px64HNdNPf31nYxWUm17ljtDEEYUdScYHPqe9chq2vx6WsjyCN2Kldmw5Hvlc7s/UV9k5JM/TKexqabKbGJnLNCoX7jMMqc52nAI/rW/c6pBcRgXayiJcEvkeX07DP5ZrwjX9TSW2Wa6lkv5c4WNIxCsXoeDu4/lTbPV9Ys4UJi+1uEDmF+vOOUIJP5j8qOe5pynoHiKA6yI0sPOhjACo5XbGF67ie/6181/E3wa8TJfpIGll+Rdy4ikb0I45Pb1NfQttrlpqUItA0lnKcbSzAtkdQT0UY7/gK57xNYQXFg1owW6UKcbM5yP4sn0/XNYVLbouL7n50XOoXGheJPJuITEoDZwAzEvxnJP5DoK+uPhb4lgu7JLDVHMMU/yqCw3AjoBjOfU5A/rXg/xM0F9Ouftfk7PNBlEh+VhxyuMnGeMelJ8MvEEQdILwjdJgKzDJUg9mOMjpQpBY/TLwk8Fg66fPK+x/usfkwoHqB1PsM+9dpqWoW5aSDytoj6YOGHv615Z4Ue4utNRLqUXMoClWVQMqegBPfuTXQ6pC8Stbq3lb8bsjaWJ7cDJrR3sZ31OA1CXS/MluixLsSdyNuz9cgZJ9Pxrc0e9ubkSzcRwoAqrtGDxxgAdup/Wucv8ATbexmVoJzLcpgjn5QxHTHH0rS03WbeCxM00fmsnzPk7QADjn1BOcCsr9yrn0v8PrydrcG4QcEEFV79DkD096+iIZ0SywCDgZJ/Wvkzw5400+fYkpNupH7qNfmyfXjsT3r3L+03bSpCjB28sZ74z+lddGehxVndnA+L9XurlpmRj5KOFC45I/D1rx7X/FEe99K0+Ly5OhdlwqrjkA55arnibxEunQ3dvcSNvVm27WIwMf5714611YXMu1zJNIiAhk5KcDnHI/nXMdl9LI5bX9bvNGla1gZbmNgdzNwqqOv454zV7wV4rS3vBbswWKXhiDwu72rIvNLtb6VLm2cyu+TulPOe2MYGM+3NMs4AqSFLTa7Aq0g2gcdcEc/U+tIclpqfUMcURtYo1VWjmxg5JP/wBalTRlmcRxhUXbkFugPv2rnvAGrRzWsNjcPtByUZjkH257gV67FpkhVYWj3RsfvAHoPvfpTg+p5tel0ZQ0vwr5aK4/eseuD+XFer6L4VIG5v3fsefoayLazEN1DZWwbJCvk9VAr0vTbeSCQvKWKtgD616FConsjz54axp6XoUdn15bAOf5Yrgv2i/Et54Y+E2rNpNzJbajeottbvDsMymQjcyCTKkquScg8djXuECL5YyuDj8a+Xfjaq+KfEunaFaXj2Z0dXmmYoGicMATGQ7BSWXjHJxnFdk5csG2ctS6VlufmIbG9vdOuLzUL1J9TS3+04aWa4CNFIdzc+XbrI0bAlVQHIbAOKyL1bjU/DjSSpI0Xl/uvkba5kO0qGA+8oB459+tfXPxA8J2Gq3NwfDem/ZBDlmiMShJJAcFZNrJhlQjaT8pGCVJrwzxV4eur6KTSdPkZ0togpe3OI0VgQdsigj5DkHHQ9TX5jneDUopx18/n1Plc7w/Nyzir/oeX3ui/Y/D1toehXMVrLBGHmgWMOfKBZ5MM25mfbyffgelY8ty2rG1t9NhUQTJJPEWUjZDEAhRscAYbec+ld/caBDrNxDZRXv2CW4ZDC6oDcSQR4+Xg8BsD0z+VelTWuhaRcWGj6ExsbdIR58SKo3MnyOJMbuCOWwf1rw6cqbXPLaLfzPmU48rZ//X8607S01GO619LjVZ9L0CKS4F5fLiNoERo08i1UGJTNOBGofd8vOORXk3goDQPFNnrVndrMbjy1l066wkTMcHGCSsi88YUFRjHSvsWC5tzpmq6vpULXVsLeG1eIl2V0UBnZQ+R8oTjnnNch4k03Q7y1tZ9Tt4Zbi2ijeCOeLygsRlM0TgJ82/Dgc9hggCvyp4yitbf8G5+cfWFs1Yr6jpsH9uXd8LWD7JbzJeQSXcqLaqLoFjkEqRkZAK5G9eRkVZj8a6mbyKOz1Cb+ycqLhXla5ZIg5UOuSVli5znr/DxxW+2mza/ZmzvIw1vbtHDcbyD5Rl5RgPu/I7ZA/SuKstYvdMUQeHoP7QkuL3y/NlgVEjh8tgcBVRiH3sztkg7jtAxmvFUY4ine7SX4/l/XU5ILmiel3Wl6ZrFxc+HZbg6CZSpeWMRtbSqcSB0YsCImAycknaeh6VJd+Ere1aeC5mittSt7FnLxPkMwZX3qibg+9AQe3OR1rIbXfDC6JZWWnyvFDakRwNLK0nlCYkKC25XRdxwp+bBx15p+k6tGt++n6hGk15NBN5DlzMD5R2yr1IfcOPmz7d65akqTS73/PQyndHY22px6ZJeW0Un2iCNYowwH+t/dcKocEZJ5U44NcxqEBuPBsOpTMLmJbg7ZFAgf8AfKFDAKCN5xjlWA6cVxOm3Vza3Nwuo7xb2XkwqE4Z3C7nC9srnn3Irsv+El0y2sQtkyCF7c/aI5SpicjBKlXwCygj6YBGMZrw6M/ZLklF8r/BGUYrqd54MttFhgktobgbEhcyxTfKzCQDClGZ8MMjqSCens6xgsrGRLiSKO5tYh9nk+QhpkHKjGAS8Z+6wBORwe1eIarrkMmttc6aAJk8lZijZby3wB8gIDEfyNa/ifX9bsLtJNUT+zRM4S1lf93HcKvH7sng+o25GDzis5Qr1LSUXtt8/wDg/wDDG8E5O8Ud/rjyjU1urK7MUjy+ZJazMGilGcA704Y7s5IwMZJIr0Xw74sj1C9j1DR2jj06xSGDHmZRWQFXJ5zjC5wcnkAetfIMU9zc3i6mim5sLW0aZLlW/dlpGJMDdmdVU4TqQSw4Bq9beN430uLTLCfyvtb27I6nOWJLONvGCGHzE8ADPQCodTEQqRcdNmv+Cb0pTjNOJ9maob/xJfvPp16VjkZVIIBHzc7hg5bjoOPfvS/8IrILWSy0y5Es8OQ8smMhz1y3Tj0UcV8qfBb9oTS9S1OXQ7+Tdd+Y4AD/ACoASADjJ+p7n86+mpfEc11qFroOhRsXc5MxxsZzgjbySQM8np7mv3SnCapx59XoftOAqOUI8z1MifQpLGeOLUsIv/PVYuTgZIIz0PrWZqmlfYrhLiCbcFG5Hxgp/skEgsD9K991Xw5qsUUFxdXgPyAFACpz/ewx4/EV5p4lik0xFjKmeOU5dlI+UfUg1TutD0Fqc7o8VjqMoW5EYxgBuUQsf7wY4J/X2rfj0e5gUm3lLLGwyMAbvcHGcc9651JLeFh5rx4Q7gAMHdjqCADuOOwrYfUZLdVSOITNIuSd2NpP3ufp14NKSGcZ448A2usK7apC7Da4UryqjPJI68nv7elfDMdjP4N1z7Jd24mRJmWNmBxszwTkcen0r9DLjXr2e2lJUMxDg7ckAEcY6YwM4/A18m/FK0nv5ZGhgIC4k3H+8ACFJ69P1pJLoJytoz3/AOHnihp4o40YRRxYC5dVC5Pf0PoBk/oK+gX+yeIlcOpV4wMliF+YDn5upNfnv8Mddm8Pu+jtbLNk+YJOu1sZABPPqM9c19taJrGm3yJJ5rRSYWMRxkYY/wAQ3Y59z+HatIvQxqN7oi1Cz0vTrYLZXIlkJCqv3ifp/UmuHu1F1I7vKEjLL5mRw3sB6Z9Ote16fpmnTl5Hgyr7iwY8+v69gKhufDrTxrcJCEt1CtyB+HPpx3pTpPdGcaq6nPeHdetLCe2tY49iq4RDtzkY68DoP8+/1AlxZ/2VG7S5abORnHTpz7dq+WL3Omavbx2kQkkDptJG0ZJAJ9ec8e1emz39xoukOl4wMseCQedo/p9O1XSqWumE6Skro8N8T3a6l4wvNDLmV0Xed55wxAHGeePWuS1PU5rJpXtsKjHy4F2g5/vMTn8q4TwT4pk8U/FzxNNeMZbOGIIRGcE8n5Q3rjJ46VmfEHxn5M507R4Y0htUwqRkk475YdSKxdrnRDRI9PivN1s5ZUDuMFgc49sDsOuPWsq2uJBfRW0MjxwL8xeTgbz2x+uK8a8O6+0tykkjmZgoOASdvGcD3J9K+jfBcEuqS7p7dyCAoiIzgdzt/LNTLsK/U9M8K2MNqiXl9OrMkfbqxPJPt+Fe0afqt8iCeOYmQgBVbGxUb+tcb4e8KSWQVyF8lipVWyxDHr06fT/61dfrdrcWGlm8tIfIkXP7p+Ucjqvsx7VdKjJ7HnYjERW53Ona1fxNm4RQXCru3DJzx1rsl8R6mNQtLWTaIp9xGOSGTH+NeSeEbrStdtfOtWbaRkA8qvcEfQivV7Lw/HOsN3e3ICWwLvKx2qFI5ye1d9GjJq6PNqV0ek6l4kt/D3h+48Qamw+z28RfjksQPugDJya+EvEV3N4i1bVNa/te5sbkSMQVhMyTOI1YJ5LfKzR8c4xx34ze+IPxxHi7UtR8HeGUkGn2JEUUg2+TcNtYOzyEfJyPlwcnn3I4ix8T6fpVsmheIruS6kbEgdCRawE5GAwPzSZA7FRjuea+ZzXiGm6iow15evS+1j5jEZsnPlitjrNO8R63c3sCxW8XmTRlXfyUjEoYbMOqsAGXbyR2ySc5Fa0+kvaaT9o1UW1pJYpclY97SSMkjKwIAAwSX4Jz8g6ivH9b+INpZ2ptY8T3lusyibaInWQqPLdSMF+vAJ7noKtT+Mo9Q0DT7bWL3ZcXaM8rxJJJI+6PcgIYrwTGOue3OK+X/tNXnTWt/lq9P+GOSrmMWpQtqeRaj4fuLa9XWtHeR0jma7MijMnyB42QY5XJwAOwyfTNREuNRsv7QuXewkEYDgoSS7A5YFiC2AB/Ks/U/GGk6foupz6DLJeT38iMm9Cs0ZXgkqC2QV64zyc5xV+88Qadq891omi3SC70xDIVkdkdjJgAFMc7do+ma+fWJSdpLXt/X9anzzgpuyP/0KEet2ui6NF4bnuhdWyTNLLJI6qJhG2SFK4/dqeg5HfnNalv4ke8vZrjW9PSIX0P+hpCi708uHdhiRwpXCn1H0qtaeGNK8Waxp/h27dLC6hmlt47mdR5Tvy0SSNhvLEzDAYggHbkc1syXV/PqF54cso/7PvoF8kSSlRcbo5CGtyQMFGC7e2cgdCMfgrw1SCvN6N/psfmMqbWr2Od8BX11dxXtzcSKTdnGxPn2GNyd5bjJAQ4HcgYNdza/ZBJbWL2qYmYySsvKrGx2CNCD3c5K54C+9cvp9vJpsr/ANoWkUUU/kvM0ZYq0Zb93tAAXcxYnjhVA7irunaoJNbsbN5Bm5R2KOn3NvzRlcHuck+2RzzXnYnFWajFnM+yLN58PrVtLjZrYWcslwD5qyDbHkgbSSANpUF1wCc546V57rv9r2mt6ffeTGsVrcXKTt/y0XZlgUKgcHjIH444r0lltbfUXhvJJD9mk2eZltygojuTG2MN95M4PTjrXN6jYvJaR69Zb5NJswUeSYBpCTL5eZgADnBG08r0yRyK1tOVpRV+/wDXqbyXNqc62vyy2mol490MUf2necJ5byI5BGehRx/KuCtL278V2k9haXEVtaPFLKsjINzSxKJFADABtygjf1we/Feu6VZrqdvNoms3SC6vVkihvwmDLgny1l6OrbFMeMDBOcnrVC28OmPU7O3vCX+yWsttBLK+8nKqWVig5wxPU9zgenRWSV5LWyLUeWLOT0bQLWHWrK3b7bqUEECy3MzEbJImGfKKKCDJGCAoGTnvxWlNqF5f+Fl/sy3nOnWJC3sDq0u+AnZJKXIxDJA5UM3AKsM52mvUrLTNZuNKutWuFCRzGa2iWNVRdxZpjJsQhuVXGQPnyR15rd+FHg2y0+31y6vJFEGpT+WLZCzRLHK5DwvnIwyn5mzuxjlSua7MvkqllPdp2N6Kje0j5vbwxb2Whz3nhDW5NY0vT9k6pLwqjMavHKRmIuUlJ3ZHABHHTUuvDtlaaA1zDY/ZZLm3QtbKWe42xqxV95+UKyyHkdcc5GDX00mi23hzSIlkS2tdNvBIJYordFSV8AAP5aKzYzwDxjPeq+taLBq8n9r3Ekc1tHELa8gjbY5ljxtTaeVyTsQk4Cg4xjB8+rOnL3oK/L12+/00NZ8jlzo/HXw1qMHg74gXUd+PKgtb51kTcWaU78gnuVxzjgfSv2h+Gfxd8F67o0d7pEZeWGIfMxEROBgYAJbn64+tfkz+1t4ZbQNX0/xjpEEcEeuKyOiNllktwu1mHUb0YdR1U1D8EPjNZeBItNl1KwN/dXLrlGZiDt4zsXGfxOBX7HgcTHFYWnXh1X/Dn6HlWLU6amup+2vhzxr4i8UXj2ttZrIFJaSSTeRgdhxjI+p9K7XxIBd2K20iBJGXlZBvJHfAwfxOKwvh/q03irwPY61p4itE1BA6pblWcr9VJA+gOBU8o1GwukN4siwysB5r5LMQegA57VlONrJn0cNVdHms+lSRRmKZ38gLnCkl89cEAcdP8K47U9Vu9FsEUxsol+VRkhio/vDsGr6SvNOSS0a5U5jY7ixGDnuSDn8PSvmb4iaxDpMckVkfOLthmb5nBUE8Zz+HvUvTc1Tuc9c+NrVDHHLPh3BZthJzI/8AD6DC4z7Vyer3Mmq2/wBrZVU5HyEDAIAw2PrxXgPifxcEmMtnEVRZcfMcHJAGT0PFWfBXiHWtT1GF5EaSJcoSeSDggjpyOAw7j8K2UUtbnHUrqOjPX/CXh6NtQRHxGLpmRwRknPzADv1zk+1emaTb6jpLNplw7fbpMtCoBGxME/L/ALTd+OBmszwZp0s+ow313gi3AlBXqcZDZHrg9PrXU+KItQW+s9ZtFd5o1C7yOVRySfx24oUElc5ZYxW0PRtJ8RyNpQmgkMkiB4yTzhsANnsMDpXYWfiOe6tI7beQGRV2nkDrgnA5PHHbpXg2k2eoXhkSBf3E3zlFzgMDnt7Cr2rza7YRm6tsTZKgqmeQg4APT0zUOvTi9ZHnyxyva57jHFBd6/aHeG2AtI2ORkcke4OAK8K+P/ivUl0+XS9DmkidgQ5iPCLg7ie2ccZ9aveFPHWopfSy38XlSbPmbGQueigdz/L8q8C+Jniy01TU59Oid1lGWcE4Cjv9WPpRXimk4nrYCupKzMH4Z3+l+GdIvWhjLXTKZZwzfOc9QxPPP8vavIfGHj1ri5YKBAhIGIxnB756YA6AfjW5dPajSWMtz5S5JcMMNn1JHJ/lXzNrF1ZT372WnoT82d7uduPU+/PNZ048zuehW91H2H8Kp7vWr9GtgUKDCDczHdzlmxwOMCv0c+FHh2S1so73VFkd5AWMsfzbT6EdenWvzF+BXiaPw9Ct01wFgRgoAXcWLjngkcD16dK/UH4XeJV1adpdIuPKDrudAOOAM+3X+dEF79jlrytC7PoTS1sysTR3Csw5wUILgcZz0zVTxLCLiNrUHLXQDbeuJVPyke1ZHivX7jS47JrM7ZLnPybVAC4yT7EEg0zwoy38sWoSzgv/ABuTgAIAePzzXt06fKuU+ZrVXJ3Ot+HngoaTYy2Myqqxbk39AyH5sn0xkg180ftSftES+BdPs/DvhMmKzSZRf3bRhofLXBwzMCpRlzux0H449j8d/FCLS9DOnadGYIZy0DvJwztu2ZGcfLnGDnBBr4vkig1hZrzxPprSQ3IcTWMsqn99GxKqBsYcsOQrgkc84OPAzfPKdH9zT+b7HhYjF88uSO3U8m1T4m6XpmowvPMZF1yNrq3kjVGtnCMGDAbVPoAc/MOcANzdu/FlvPp2+CyuL+3ICeTGA8khZj0ZgnAyCXDE9sd60vHXhPR7/SJBE0OkmB/MVb633tArKMxIq7QYmU4U79ysOexrE0XwbpWieHJta0gMdH0qBJPLLl2KvJsU7CWO12PDZ4wSBwK+CeFw8Z89DVt3te663/zPIlSg9I6vsYh1K6ks7zTYRGbqVHdIiT5zRkkFh95iOMnLdB1PfvvBviKYyE+Kbkw6To7PG0gRWkKPGSltGSeWcZ2jkIOSMV4Z4717RNKuIYtPZvtyGVmmRo1VSWJXYuA67RuB3dSOgrlrS+8ReJ7WaWCdZdObN19ocCNNiHcIizEIWAyT1J2/NwK5aNCXtFOCenQ5p02pWPV/GmnaXZX82reHrpri1MruBc4JROiunlnYyAZAdcBehTHNeR+Hrq9h8Xap/aVkqyvARBdbuCj58tgy8NkcH3z+HS6D4q0+JAmoypvZmhttiNICzZD4ZiOMDt9ehrqvAviG0nuru4tAZ44z5MX7leAORsBySSTnIx3GK5qkdJOpFLm3tc4qsWm3Y//R4LWvGNx4V8TX2jpAgjllhCfNuLK29ccEFX+YEE+nvXR6ncad4g1AHUr2RbzTlivLmW2l8xm8rhSdoyHXaCVzjOMnvW1400Se6Z9Q0rSDfahqU6tI20IIyQUXac5DKNxIBGCBzyK43+x9c+HetG4jia4e1IdZgrFmQkbg0jbc8ErtJJwMk9K/DKUZTXvRaWn4dT4SrTiqClp+p6xrGqaXqkVlqmiSGWG7AQocNCnnAzLtUcZZ1YMRwAQOK5vwhYSX80enXBEeoWASQqVY58wFmXd3UjC8Hpn1rsLTWNEga3+SG0F03mNGFfY6uPmAIyAY+pIA656iueOqm4vLW30+VVWyMiShUAzvxyhAOdxO7HRe2M14mKowd5RXXS3rrf5fieZUgr3RkarNbaZd3UlnZNNJCxUuF27CTllKD02naecAjPFdhpun3iajPrFhHNqiSWZhmAyvlIxUgyZYBgwwCwzlegB6c54d13RdG0nWtTmjZWy15PclWIjEreUIx94O7EZxnr2AqaTVTPbNH9ojjtbZ3AtwuxmRQuTwowVIYYHGBjkcV3UHKMuVLf7tjenRSZJJHDokzazp9lJNpiSyxyAk/NJ5YJB6jcrcKCBkck84OH4g1O8+y2mtF41gtGEUEkY2yASqc+ao6knAHOOPeuE/4TiPUX1bSLaR7Vrn9+DHDyYjtAic7h5jdguM5xzWC+sJYaVNZXuoK1zDbRxXcc+8wwFVPz5ZUCkcjGWQtjJJNY4LCT15dLmc6Ls7HqHhrxlBai80i9lIlvzutgSItu3cN7Nn5T+GNue1db4K8dQXWhRCyikgnhcuVL4OGx5hkTrtwM89gTwK+KbzVLrV7+1g0aFylztVGRs57yEOOduGA54Ga7rSvEA0q5McN6wkvlISOUAyhSFy0hwUwQfVjg4YZ4r18BhHS5ptdLffoKNI+ofGfj23hX/hHrGWXUrlzG7SeXviZZJVDiPcuVAA2Ef3jkda2ZfEhgzZz7LP+0LIzPJsB34BWSGYAZXa/wAwbA5PPPJ+f/EUviXXdMtdPmnsrYW1ukgWfUIxcXsseDHcMEOAqr92M8Lwcbia52W+n1vWvsc16tlpEMhnubzzUulij3HglCFLk8BMgFuuOxisnrOScF/X/DaHQ6LTsjr/ANoPR/DXiz4WajpNrb/atW0h0ujqMiYMnkjiIKFGAQWG7Jzxx3P5X2VhBc+JLOC+T92WUFRwFUEZznj86/UGP4ja7eXDWnhSaYWOQpgaaESSJg4Cbx5ZlKbvlxg9AO9fnZ8SNGTw74qmsbNJWt2YNC0sbQyPEeRvQ8gjoQOMjjivuOFJVIRnQm9tUfV5FVtBwZ+8nwi+IOjPomh6Bpka2ulQwxwxgY8yYqOfm4AH059hX0ZNLo+pP9siAgtoQSXHzlgOOD/Cv05Nfzp/Dn45zeFdbspry1a/aI+UqvKR5fYgZyFHrgZ+lfrn8L/jLB4t0OJGtjd7EV2WI/u1bA2ocbfy4Ar2MVTlDc++w1aM9j6E8Qa9bWcLizmYxDhG6HJ/h24zXx38RNYiuE81FjgmG5twHfBG5uecDOK7Dx2uoOZL+8uBpUkqkpGru77T94s/bPCjGO+K+R9Y+26tM8V5ejyF3BSpKtI3fPI4PH4Vyxd9zSraKuclc6TdeItZg0xiTGSjMQhJK9S2Oua9H8SaenhLTIZ9LJiutNKRXMSAtKmWBWR06rkEMG5XHHeuw0/wqdK8HXWraMWGpElRsXc6iBBKyZYgjKYLEHJA4HU10On+HtbudF0r4heEnWbVYFaBorhRGrWO4bmeQ7jKiSOUAJwVZRyBXzOOzKpOTjQei/F31/4B+c5tm8qlbkpytFficr4f+I+q2UjQWr/2i9zCJAFjIffuwwQEnci9SR69AK7+X4q3epSRaVpqNcXSbAybOW4ZcFc4U5GMkYHU01/hkNYfR5DNb2OoQzk3qWrkfZlkRDtTbtD8JnbgHLDOOSfm92vbHXr630u++VZ5YhcbiyGRZCrLIyfPECRgBuo5rgo5lWk7Xuvx/wAjxpZjVi7N3R6zf+O9Us7l7Pz7pFUu2W/d7E+7tDDGCN3QAY9aseFvEPiPxD4SntrKZ/trTtAztOfvQkFQRnAVwQMkZ6cnt5DrE+u2sc2teJDFcSOkSyrHJvAVQQo3AjG5VH3uR+NdxomraToOnaXfxXRhtNV5VYogx8xt2AWDdsqpOTnGcdq8vGYmVScp38vmeVXxU6k3Js9j8M/FQRGz0nxDpq6ra6jC8f2of8fsEquA+SoKyEZAGfmx+dfOX7QVk3hbxzp0llOk0F4ufMVSActkAjsygjPNdlplxbX3irUNPBmtzZq17A8g/duu1WwiHcrDp14OCOtePftEeI9SvrfQdSuovJZJHCqUKAq4GAWIxyAOhx7CvZyHHVZTjCT0aPqeF8xnKsoSeltjP1nX7G3sfPZ3mlkHQJwPz5r5b8W6hHPqO9piIJDuYn5T7gAetel3WuXEunxq6Iu3+JuWwe3SvDvEshuSzxgOc4G3/wCua+6w0LM/QsXU909S8OeJriBre1ikyQyhfmxtXuPTPT+tfqx+z/4hsfDWkSa5eagFkltiArnA9en9a/G34ex3WpazZ2sNsW3yLj8epr6k8b6lrtvdW2i6XdG2W3hMLKT8rk4z/hVtKFRMwalUpNH6CfE79piLxHrVrpnhb95c7AGUHGHAwR7ZGP1re0v4l69oenLpKzLPfWwU3GHA3bz0TOOAp5OeMivhj4VeDZ9PsbvxNqkc11MZEVEt2HnAEkEqxDDKjk5HQH619AahLFp+lT502J7q4FvbQXiODGyNGp8qdMgBlLJgjO7Izk9PmeIc6qqfs6Tto2/w/wAz4zOa7hL6tT36/wCR63f6pc313e6x4ijluGURsnlThzbQblXdhwVBDcYB6HGckY1pILzV4bywl1OJ9PkhjKyWkhFwsikF3mQryXJOChIwcnGDnzfwVqNxp2pXGg6pJJPYXtmbYysyMx2gNE67RhVDqVGVxg4AwK6LSp0gul0PVbeGOC7gmMMlqrhdqNwJn+UfKwY5Uj+7Xx0MRKSu9ebdfr9x8snKL7nrn/EpsvDkGj39rHI2lI93NchASwUn5HkbgsTgEJjAGS3OK8K1vUdBvdOu9V1OeXQzIPOj0+LJM7RSDJc/LtRYlymMBSOhr15/EulxW/h+RLaa6/tiHz55XIEBkQYMYjPzbX3N04xz1wD59rPh3TZ3fWLIiSGJI4ozKBn9425olLHcV+YrwwzjnsBt9bSUYpbL+vzNfrLe/Y+UJfBNx4zN3aaTZ3VsSsk0Y8oq935odmDGYI/mMc7uocfdGeD2tn4bl8KWTLMqtNprSWgWaRdjT3sTW4O2TklE3g4HQ8YAOfpPRfDuq6cE8SXGoR3bXTQtcQvCqorctIUdm3spYIVO35AuQME1c8RaPo2qu2rtbL9s0+aFPs8LZDzhDGspJzxIoAY8Y7YJ3H1aU4JScZf1/wAAzlOzuz4TlbQNO1I+HdQSOS5kkFzaGG5Z1E0y7JcghfK3MoVMnhuRxXsuj/C2/sNKittK02S1MhWR5vtId1JGWXBBbdkjrgZ71p6B8Ebaz8TS6xqV0lxrbOFtIghkFioBZ5J2VmVydx2Keh+YjK4r6V0vTrG/so7pJWd7V/IWJ+JHCkfOcYJXHOcYrjr1ErQVncdWavpqz//Swr63iiF/p0VqLyVgZFuNmVcQ4Vkz2kTphhuPPJrptZ8HW8zaRqMcEWn+bawSgMoDM5/hyOcnbyT3NZ2j3k2gtNfS2aRxakDbxhZCSbgKvn5EnJClgVIOORnHf0CTTtQvryx0xlLR22wbgQ/zgZBGD65yAa/A1RmlKLV/mfmeKlJOzOAvLKW5jt7S2hkmvJhcAfKojmYy4yyN8oxlTlv4STkdotLstZ0qe5i1yAWt3cMwjtoG3xQhVYglwSgUj3bJIIzjNe1eH7YXsFw928kl3ZLLENoVXQyL88ig8tgcZ615frFn4n8S63BpcT+Rol7GJIvIdbcbY8rIZ5PuN0U7Twfu5BGT1YSh7vK9L/12NcJTc3yM4fV9d0zSvDi6feYsU02dDLHGwmluAojmWNeByzZ3H1J544o3Ov2WuWd1ZealhNqkYfaXJWBt37iMjPOSzBsHJBbJyRXA/EnwCmh6st/oJmms7WOL7bDEdqmU/McFmyGdhuIXOFyMevBW17eXN2ZBPHFqt6qmNZVeVkhBxuRRuVWb+DcwLcnIraWU6tp6P8DSaaemx6V4K8O2UmjXmoa7JLaRWW2FgXV3uH37ykSk/M7YGZD8ijnBwa5rVNKbxLJPonh/UBp0Wpzs0km1wkcRGETP3tiHGCcbiSe4A9n8O+EtDvtCuo7qZzDChZmMhBEsmCo28gDgkAkn0NJYaBoOn6hp8GoW8LW9rJiR1kZyxibcMsBg8Hrk4PpXQqsqSskYOu7tox/DHwD8P+HNLGrahrk89xbxmCeG1jAyrAbvLDZOXTKnjjJ6cY3dW/Z78LzXEWpW3iG8guL6NmUrFE0sbE7mGSDsAB7dSK9H0HUNXUL58BjR7iRYndlcbwMoufRkIwcckdqx/EGoXljqaapHI0spZGRWV8s27bnA+VVOcZJ/DivGx2Y4mneC3MJ1al99TwXWPh74t+HtjHqdgU1a0vI2B8+ES3AdiF3tv3qAScggYI6YxiuR8QXnxF1S0/4Ruxt1lsbcA3EI+S3ZiAW3Mu1c8tjYAcYwMZz9o6TqspSyEKmCcoqS7skrj5iFb+HgsPc1574h0HSDqNzr+lwPNBCjTB3lKqTgoxYNhSzO/C5GVUcnoPcyrNZyThM9HDYlbPc+RPFkGreELPRdPjs7wvqESXnkeVmFN8hCOxwqgttJBPPGe9eCfFaTWNX1m4lvBNG6zTLHNuzujVyoBKn5sYIHfA5r738eeCZfEthAvii5+y+IIbVhahpEKXNiqYhWKAMx8xPVDtIOch854mL4F6N4guo9F1OefS5dQgR5JZABHGUjBIGc7PM5AJ5zxnmvo8LjacasZJW+Z7uGnGFn1Py8Uz6ffq84YKTjnjDdj+dfWvwH+O174Y1uDRL2+NjDdEpuCgEH03nOP5+nNem+LP2YdBZIpNOmub+OHes7lishY5WIBJFP91uAwwo3Hivk74q/CXxH4ENteXyKoY7d8ZLBWHK5OByRz0r6inmGHxD9mpanvYLMU3eJ9vfFf9rowGTT9IihufLTYvJY7hwGcknOPSvHvgx4nu/iN4tutX8QhrhLdcLCj7FJkBAyewB+b8MV8OXM091me5cl/wBSa9f+DfjVPBHiOG9nTz7WRgJ4z0ZOueCOQeR71GLy1+wlGk/etudWYYirWhKNN2dtD9d7Txh4eu21Lw1fNG8OkrBGlxvAiunk3RyuoPCZfKLksdoxxkgew+JNUu7HwzpXgrw/bLC960YYxjZHCoIkdlbLkkbdgXA+8eRgV8Uab4W1S2jttUsmj1HQsfaoLuMhonkI2nI6r5Qc5zzu4HJyPT9O8WeLY9TgR4DJpdqJZTK8myRY0wFYZPygkkDA6cdev5ZWoOnUmoK11a3Ra/mflFSc6cnB7mbb6Lrel61dvKxZmkeUXEisWLEBMqwY4KgcEY3e1UdF0rwT/wAJPd6l4ks/NlggXdJ50sZklkICq6AYdQTypPFejaRr+nazJqes+elrDNL5v2cllaQBjGwBbvkbh2JB7DNR6r4cstftraO6ZLV4ZmuRPF914o33D5xksWBwF53NxXNOpOnJRn+H6maqOMrmH4g+GF14lW+vPBsotZJo49m/mKUwbvOjUnCxsCCBu65BHcVgeCYV0TS7PRb+CKeJLhljJO5FYsRINx9z6+vpWR4Z+IV/4e+2yX14ImWKaS3t5GKsr3O4ocD+LawY+mccd9/wfq2o+OdJuLvUHQxTTB4TEnyuHJj8xix4XcoAOeSKzrxnUh7ulv8AIJybjY9N0CG2sTYx3Kok62IiaVWIkADkkBu4bgYxnnrXzr+0L4fltPg3DFcZnvLW+MquSwbyURV4DdCMgNyeR9a9NhuNW07Vk02/gmeS5+eNdp/eKoU5HHquMfU1r61pmoWHhu80DxPAb4eJbe4heOQgyxJcJujkzztYtyAASBgkDINcOW4irQxMJtaJ6r87HTleJdGtCo9kz8srfXLibTo4pTyqkfMM8e+O9cxfeW53h1jyeo9fSsQXd3pd1eaTfjyp7SVonQjawZTg9aT7ckxUkbsHqTz+Ar949l1R+r+2UkfTf7P2nWk/iy2lvZhHbw4bLDjCgk5/KtTxR4h/trxZPZwxAxwT8FSCSN+Oo7VzHwsufsVtql5MnzLZTHPYfu2APrxVv4M+EtR1vxDHetHLcQQKLiYgYxGjAE/MQMc4rlrRUVKcuh01MSoU0+h91eDNZsPDfhyPQLWaa21KcB8ojPtTaAWJ3LxubbkHt9a+kLHRzc+H7OKWQXktzbSsJ7eFUeMReWjorHOMgg/OxA29sjHnNr8P/DGpTTi7huYtRljcwRlgINypvSNgoG5JWzsy2OeXwa9b8GeTpXgzT9C1HzLWxa8lZSEEJCqVyJOGHl72OFwASBxwCfzOvRlWm6tTZ3/r9D8txldVJSqN6tnlmn7NFtJptBuS+nxnDK8XnRRLGu9AhO5Y1bBOOTnKiuxtfGduskF3aXMTw3Vu9urgiV4iyMAnk5Llj0GQCDgjNW76CaPUry7svLksLpntrqJokVQPmMZZUUOAzLvA3YJUjHNeNaZo+tWPit7tLiGxOlwm4hjkQmJL6ZS0UazAbSsbbpf7w2cggg1xxoVHNSpv7+lt2crm3r3Or8VWwg07QVmureO602KJCLYgMQSSBKu4nkFhkDkBj0xXa6ra2TaVbTX0cxd7VFIVG8pUbcwIRjtyAfmP4Y4rw/wT4evmvLnW7+5S4urlCI5JQ5adISVyxeNVJY9GY7QBwTwK7M3OvXM+t6nDNcalpbRARpC27ypSSGMIXBACkFl3EkYJIzwpYaVnNrpb7mVyRaudAviOxnutQbSmeS1eFbaKKYsVHyceWGJO0Y56gjHtXaN9sg8ExajelEl2OmM5kK5ARcKM4ViNpGcg47GvKPClrbSaZa293FIt/qG+a3uonLxSvCwYR7doEUhVcgNyclc55LvFHiKKR0ga+eB/IeGOIo6KfMkLxkheF8ssSewAwDmpdVuTlFrZ6LRWZjLqXNNvU8GwQXuj3mb4xyee4zsZ/MJbYXVX+ZCNyEc+2BTR48mstSnux+8S5V45hnaGacD5V6HKsv06Z4IrHu/CmrS2s9+JZLu9GyQkjOd+0BueW474zya+evFck3h+G6uruOVra4n2lyNgiOcuM4O/kYUqBjqc9K8uFOpUny9P1MNZPQ//0/Lk1GW41QXVvOLoRRMogkYsq7jg8McAE43dST+del+CvE6XKrbXN2FktchBGdpCqCGwCTwGJABOcCs6D4ef2noMsllaM08O0RtgM77ywYsAc89AScZyTUuirb6FfLbXelJam3haRQQolkmEZUD3IbB59a/FJxkly2tfqfmDk2rM9Ju/EGmzi2vJb6SyEQjiV0JeVJug9vvZ479TVvxLJa2MzPpwS3eIJN5IJkWYKGySDwDuyAOmTxXD6Bqs8tpA8tmkUnlNIDJFgPkgOwHqjZz1PTiuT1LxBeaVqkskjfaDcblWMN8xVh2TsFOSM+leXjcTNPka1vchxd7I7eLVJ59Zu7bXLc2kM0SzxzNPv82J1y2IOAoBBBU8sPrxw+u2+jQ2Cww6SkQRyd8ZzcvHjhnfHPXOMAKMDtg9ydVtdQ1AyOqBSod5nTc0W1du5VIIABAwRzzTp9e0q3042SpDPLOoQvJH5k8qt9wsUwBnggdBnJGRmvVw2M0fOvkd6rpL3jzSax1rSrU2F7ZyiO32qZQMl8sQjMfdeenHtUP9s27yG2+xf6OG82adixyWVUK5JIbPfH9a9s1LxzY6VoxgvXDPdh4/mkMuwlcMWYKVByTjnjO3OSTXht9YWV5pvlaDdebcxzFvs+7eWQgE4AHbAOTwSMZqHG6fI73/AKscFlfTY9DtLy1hMt7BI09raBd8Z2kpLIn7pEXgjeepJz1I4FSeJNT1Kfzba4a3lur3mZC2XSRF+VV252IinCqBndz3zXOaQP7L0tNPMXlG+aJ53Z0ST7jIrtjDfdO3245FdFplvZ22pXMOnwPDa28yRQStFG6oCAI3LFTkvg4J4BxWinFRbXb/AIcKa5U7DpFeTQLS20y6a4aKWSO73jBWVET5P9pSHI65GKwW0W5sfE1nFcPNa2U8kn2SNC0yL8ofeUz91T3zkk9O43NYuZ9BhlkFyWjkYvJ5z52t3AVgdxY5LADGRmr+naimoWiXV/l3CyBYGDKMvlcKU7MMgjPQ9zWNLHUJbHZQVL4m9TX8Mm4Gjub3yLyEgGzQCMtIsn3NsTZw4XLNkLtIBzyRWHq9hb39/wD2PfoLqYR+dK3mFY0EeASdpMeVxhgWYcH8NW0K6trOn3VvZRyf2bbqvkGEqiSEsC4+7wqqDlgSTye9ZmoW+oR2slx9oWCbmI28QjA3OTgpvwArISMnheTgk4r0PrUJys2l8j0lXeij1I7u0n+3vqV5Y21lptghkgu43aOUALjzGL/JypPXJbOSeOeI+Imm+FfH+mzaXqavJBIAqvcGRpE2ZPngYwqjJHBGRjtyO1WXXL5La2sIRFe5D+ZEB5gTyiGd92UwdhUcEtwDjmueuLrS9TnkkWa2soLG2jEwME0jurnaWLHcyrvYNhWG0lcAAV0c0W1KMtfI3+uxg0on5Mr8M9b1nxjeaS6t5AaUm7jizANnCHIwoVnKLx03dDiuQ0jQJ5tHuNSdZI5Td21nbDGFaWYybskj+ERkcd+vSv188VaFp1vpP9kaHewzWE0kbs7r5y5d0G1Y5OAFChl3KQGOT1rwDxv4Vg1jxT4i1XUJZLrQLaTTprJ4yqhbi0WVFiyc5U+YZHYAAjKrzivtKWfRd1Lp/Vzup55Ft32L37MXxE07xH4DuvhrZ6jPZ+ItHaeWDcu6C7hEqsNhYqoZd+CGIyDn2r6R1ZPEGmXP2SJVRODdEKGMispdUUDjk85HTnPWvh3wz4V1LwR8KWGiu0esapZrdC4QgfZ0iugrSIWwBJKC2DnlFAIr3Xwh8RPiZb3/AIkjeIX+mT3dvard3a7lgfy2eUqQVJRWCR5GdrHJ4GB8VneVueIdfDyVnunv5taeeiZ8/mGEhVqupCVlfW/y2+8yvGkV94qKzfZX0+SSVbdWhcsJZJ18uPnPlgKqEseO4HJr2CTRbzS7PRPD1vGsen2OlS3ckcOSxvYIXyTkqV8tsBByNzM2PT1vQn0vWtL06RQl1DqCK0MOPKZAij5WPUOvIB6GszW5YdPsLuSQyaXrkBMNhPBjzYUZAFdo5CVkfk8cFeMZbbXjU6/PJRqKyT1/r0PKqU5Rbg+h4Tf+CNNlv7vWJYnt2aWR3tZtryMs6Y2PkbUePa33WOR8wCkivTfCulSS38cUVq0sKxiFVtiY4o0h+aONQ2drsy7nZs4zxnrXKeCNMkub2Ow1uc3s3nC4aVP3gl80Fy6s+SrEqQxzuJOMYNei6pqsGmRajH4fUx21zCPswcYdVUFSHBwOSufoeOenNXxFny037uhL5b3Keq2iXfj+DxPfK7RWUCC1tVYq88sZ3DzCSSAZMIQDnGcN3rbuNWXxcX1jVrZoLe9QXE8spaEo9o26Vi2cN8gZQBzlcHjFc94EsDNBcp46la+srsmPBjO0JEuZVjYHJON3flgMYNaXhLxjdeJfDz3ur6YLXFxeWHkH95stysY2nPG4qAjHvlsV3RwznByn3/r59DVJy1Pyb/aGtJb/AOLfifxHDbGG1vZ4pMkpndNEHGQvQkZ/L1rxKwWQzqm4jHQivsn4xaFPH4m1HUrywCWFx50rvEp8mQRASQsCepYZX2Oc44FfNWieHdX1K9huEtSovTIYWb5EkZMFkT1YA/d/DqQK/W8uqfuIp9EvyP0TAVYypRs9kj2j4cwS/wBi6yzksEtimcdPMwn9a+yf2cbuTwOuo6bqUUaB91nG80YeF5hmSAguNgBfBUk4IJBwDmvkbwml5pPhmW+LCK31Gf7K5YZGFXLErg7kycN+nNfTnw98b6p4b0C28OWNmNVvUciG0aXbBJC2NyQyOduG3ZAbO0ds15WdVV7Nxi7PQyznEp0nTv2/M+29K8V6hc6Xa6hqFtHptuiCVo452edZFUsGCsxKqeMMJMYwNuQa53XvHbafb2V/aSCQWn/H3Y3DlyhJDrKrBSWDNgsB0JxjBrkLnXtP1XQdI0/U7OS1gnLwSocs7LeDcAXXLARSqVGQeCAa53Un8O22sanYDz7e40owRg+XkXEIYxqcgEP8gKkEnHGBzmvgcfUlZuMrnwU1J7HfeCfGqahfSul1nS7jLXkbP8k0KngDI4ZcYz6k+pr0vxLqHkT6WsNtDFA8P2q4jt1C4kuf3g2pjGBHwc53HrjpXy9Y+MbqXw6bOws4IIrpgCFHlrGwRgQxDAttG3oerE46Guzg8Q+GtUtbTXL1ruWO5WW0kYy8RSqfM8ss204y5CHA4wCOtcNOfLSUVpr9xnGbW50etXdzr93JpGmAWkTLIW8nIKAFQioeAgAJ+VuAPbrx1ze6ldSpDfyIXt3+ztbLMhhdUVWEk7r8scrDGTtJyoAFdRp+sWEcVvNDKXsGkjeNYyVllIwfKHlvl2Yr8w3HI4xwBXkfj7UrvS4NU1LS9KitRaLIlu8vLyAkDLB8FccbWYkdD6Y0wsFKHNN63N6DT36neeI/FWn6LI0XhecQ3tzIDbedKhZLp0+eE4VSVOARJjaAQcLtIrF1W+1DX9Is9SuLeIa0ygXNsx5XYcMUdeFz97A4znHXFfL3jOVvCWneH5dUlgVLy1+1x+czTupaRmC7ickBduRnb0NdnZ/EPSrK70rwtqeoXMmtukTzmO3Ro/8ASAH+fAVseWQDycYySMVrXyuTTq0477jrYZyXNE+oF1ZftdhpVzdbb2SCVSqkiPYio3GM+4I9s965R7uHxDrUFrdw7NPkuJmZgEkXDknADYHU4744OKztK8W6d4SsY7eVnv7i7e6MP2qLa6SIZFwCrZBZBgnJzgHjNY2teJYFIeyjaW0ilzIyDfH9rkYfKD/ePevIo0I+0Sin3+aOGCknoj//1NfTPHmi2EEeo6XdLcfa13DZJtDeW2WDDnnBzXL3OqjWtQh1bWLM+QJAY/JZjkPkMp6nb0O7tjJriI/A3gaK7EOlanMZVtJJLS2CCGBkyFYqRuG75v4m+gGK9G0SztNWv5vC+l3KpPDAXifBWG5ZX+aDjJ4XjPfIHrX4tBTT5k7pn5fKnyu8S7Yy29vaWl9PIk1xExWJHYtH5Rbc8khzwijhz1YkADmuF8QahNeSta2ypNc+Z5ouUfBJc4LYGAFAAVFAJCnirNpeXNzfvZG02zFjbv8AaC3l26wdIgoPzDG1855GPak0JHtjb3GlSjz53YXKpESHSEFSreYEBwcE8kqACBmtocvLr87msGlHXcsaxDLLBaW1jKLyxa3LSzQ5TzCzsrDjlWBTnPHOOlZKzpYWkn2dd6XodHZPndIygQsXOdm4ZXH58ZrPF1cR6ZBoWsSxoY5HDmMozQIQMIqDAdQfm6ZPueuHc6ZqRtja6IGu43QtuZtm5+M7F+UjgZyRjOKi0JzbiYaSZmWvia21LU4rGO3MGjwxb3JXzVEaDzNgPB+YkKXPOM4FO8Mz6bai6uIJnaWSSSV5n5yBt2mMZwiEEcAZ9TVax07U7SEyx2xs53QbUmKIcoQM7jkEYJznr071g6ut9aT3MVvAls8OHDxbZEixwD5eQU9TgbeOPSo5nyOENO5UZOzjE9ebWrGErZpHLJcTlGJBwieXu3OVxt5HGRgnPNdxp0sXlRRW9xvsNqPKrBisSISRIzZyw9Bjjrk5rwzS9RubzRXMbOz3DIsYOA0rtuDqO/3Tx7n1rs/D+qadpWm6jeMn2nUJIBFJEqgyhIpPMSLDHAAbjnrxUR952fVGbjzadT03WtanurVLmeyXbdxCOON2EZVPvKwHBLykZdj0XA6VnW322z8N/wDHwkmovMiPGwxLCvzMuc/eBIAOM8t6AVf0HwhqN5oS65JOLa6kXzU3N5soVmIXJ+YZOeAoHHGOtbl/oOnalZHTLm8ujfQG1El20qiSXZIp3bsZCHvyOM9+vZOgk7S06Hd9Vco3R57FFe2ur2J0xJg5fdMd7uypycsCfukghSR14Az1zb7xBd3Gqf2VewebL5gBkVC32hFUujKACAcHlTjOMA113iF7+28QyWisY11CR3ZEO1Xw3BLAMWCr8oVQAOpI61xy+Ibqw1iyi0jTXa8tt4WaSZpkDTwsgLLiPhVY4znBwRxWTw/7tpCp1FGPLND73xJNcWiGYGwuVt47ZVBI2FUVlOOzFAWI5+83OTXmPiW61R7OC90q4ZSpWRjC3E9sfnO8AD7jHdtI4GMcdNy68KXEK3l7JdyO8S+UGcZWfzMpHKyBdwYcgY6k9COnD2ljfjTNT8Osrw3EUSSwYDC3IkdS37xnZgyKeV2gAZHXNcmFu4tx2X9P1GoJxbb1PWfh3rF3r3hXXb+ORLjVYxDZxquxBH9uCxuEEhADbEbnPOeDk15rr2vnwvrenaJ4ytfs1vbzF5DIjCMxSsd2wAdFyc+5x0rH0ez1ex8PaxLo1nOtwWgwzYl+0uxZEPyglVRs/mCetb/iSx1aygddclTUtK1TUxGNOYtL5FpMplEsbgM0eSrYGdoC5xlhXv0sN7VqNrPVBGhdmt4ju9POpHQjbxxWkcUltps0S4hMUOTk4yoZGztAxx9K0bV49D8NQSXUwkb7VKY7fhi6TKJJHkyBtwTlQB3Bz0rg00u1tPF2jaTrF09lp80l3asV3MfJ2jB2tx8qqTnHUHB5zWxLFe6h4stkjjFhpdjDIIlkHmOB5SmFpsZ3O4bcQOOQoHSuatFxh7zvbR/I5qito2ejeDc6FYSRm4meSRo5Blg8sySZkOdowuDkY9CQK3vFCzXtrZ6fNbw2yOZU+YHcoUEwYyWxtIUk14xr+sX2ia9ZGOH/AEG5tFuFZDvRZIvmbLgYwwHynvnFLqsuqzaI8tjO91M7hfL2llMUbN5jADsFTcxPABOeK4q01J6x3G8Q73avc6R9TuPD9t/omnPqGrNMWDRp5SSckqiDncSFOHwPyFdHqlzrd3Hb3GvxGC7kQFQy7owGUOY3bgMVDKCR3PXtXReCtVuNamnkv4jPavEouXaLESjYhyWDBh8j5HsPwrF+IHiDw617eTG2uNTvNInSGOG6YxWscmFZQgUjevIyH6YA6ZFeX9XTd5R0bM5Pne1jzw+KdbstZdradopZEba0p2qgDfeAHBznH04rq9K8VXtrpNrZQucPNJH5kCqmZHHmHbu55HXgdcdaoMuntJJfWUCX1zKsi25kZXWJkYBXbAGWG0ngbeOvavNfEei+KrTwpHaukTxrdR3cNxFIvmKJm2lgRltuQckjIx6V7GEVOEvZydzSk1fU6T4j6SNZvdN8Oz2PmyW9ltl52Sx20qJIQvBDN9oKDn1zxivBPGvgE+F7K1vbhmgsPD8ctsLgktGwiO/zPlOTNJMc7fRweQuD7fr+tavc+KbcavOLO1tBFcPMeC8dgvmSKmMFm+TvxnFYfioalceGPGt1rUOLRJrdtPiydqxxyYRtoIyQkg75O7OcnNe7leYThShF7f8AB/4J3YLGyppJf1qecaJ44j1vwhonipbdEitrqS0uEjjDIizEOsjqBglfmx/tc+men8LwT3k8kXmASQLIJmI+WCVGw7Rv1AwNwyeN1VdH1S2v77TtA0vSFvLTxLBK8cJAD7sjfGkwKlmSSPHzh3KnOTnnntS8cWOh2kuiaLp0tyNUCLqMrzPI4a3OFghZQihMgGQ4y7YPQVrXp+1k4x2e39feaVbzbhFeh9TeG9fstXZHiEd95RCt/pCxPHNZyiZTk4UbiMgsCByTjk1xet63DpWuFrm+F9aIB5bb926Engq6fMP7pC9wSM8GvB/hT4g0/UNS1nw/rOmyR22oQy4VC0rrJHnZIFbnCsCeTg5I54x1GmweJtD8T6BpmsmTTbzUrW8ito7iB0mMx3Krg4OFVsrvB57CvIq5Y0vZ9jkqYZx909K03XJtRuNLs0WM2avN91RGpdSoUA5HVuMZzivSH8L2nh2xXQnzeTI0epbgGYqbgEY8sHB2DaQzDnrgdT51beCfFYt4tH1OJv7WljuGgvGZFlupRGrqBGSCY8sqxyN8xcHPykCtCyudQ1LxNqz+IPOjuk2wGQYCF08sxM4zuG5V2sD3U+tcOLy6VO+tupyODV1c9q8K6Vca94WhNpArXUN5JbjzNsSIz7DG8TOw2gYG0gHueMV2Nzpdx4ZkudK1O48/xIYY4XdjuaMRE7n3MgWR24UgAAjJG4YryTQ9UuLGzvDqVsxGnzWzuY/n8yyZnDTouBk4wxAPCkdMGvS/FN1J4h1MrcXSbr/Yk0sBHnzfIB+7fGETI3A56nGG4Fehh6EXStbVf8H8SqbWxn6/8P8ATvGM2gQ39lEIGEaTTSBW+zxeaoIh3g7VKDkcZP1zXy9feGb/AMN+MvEfiC701LeTS9QtrhY5Ufe6iRlnkUSYyjK5CY4PB7gV9fX2o2emxR6TbRLst7hrSZ5SJVMkQ2s2Rtyn8JPGOvOM1gXlhp3i2z1Xw/LppSC/dXmhgfJBmKv5sUsx+UEkkqevOB0raWNUYyp7u6+7qddGuoxcfM4rW4NH1rUtMswPKi04tIgG7BWRl27mwc43Y+Y/KD6dItIsorG6bwXqtoITPeJPKH/ghgbIkXGMkBcZz0Nej6V4ZjOnarp/lHfbTCfz9/Cum5CnI+UbG59SBUs/hT7Xf397aPFM08DQFpHwyEYX5gQThmUleoNfNykoRUqcveX4annKdtj/1fGvhiL3Wk1DQ/GEclnq8Npcw2hA3+cjIfllJz1X7pVhnoexrvvBElp4d1A6rZXUKI6KyQuGktYpYcCQu4O+NmwGHPLHJIFed/BaeZvFFrG0jFF+zYGTgbpVzge/evVPFVzc2dhotvaSvDFdXV7FMiMVWSNLmZVVwOGUAAAHgAYr8oxFG3Ny6WX4tn5rV1lY6Z9R0bxjNdaRNpn9kS6eJJJZYMjYWfBdg5O5BkZcE4ByBinvpmp28cd4zC50u1bzvPilVlLFQueOWLfdHGCcDrXNeBmYeJ/DqgnDX1zAR2MRCDyz/s4JG3pyeK9U+F8skWrWNtG5SG4lCSIDhXRoZmKsBwQSASD3ANeHg4Jy5X1bv8jma1seX6lBFrN/GH0swzXbBonRRiBUAxGOCWYA7nORk8cgVtW+imLQFug2bnHJiIjb5gQQm4Y3KoLY6HB9hVS3ZpvFCyTEu/2mRctydvzcZParMpP2HTo8/IWgbHbPnqM49cHFc+LxEuaLjonb8gqVLtJaHMvpE3h+5zcw/aWMaCFZ9rbiVLAqoAXIP3s8Z57V5ldarZC/TS7J5Tc2r72YhTcSMGXcEbnrg4XsAdx4Ir6Y+IKr/auhLjg23Tt/rGr42+IEMSBblUUTL5rBwBuDCXIOeuea9DCtSlyjhA721MninUprTRXh0O3jlw0Thy5Ibc0xDD5wP4sc+gxzWVbaF4mtPEflXc0ExjeQI/nBpJGXBU5X5Q464JPbivDr+6uf7Asb3zn+0HUmQybjv2s0eV3dcHJyK+gnd1YMrEFJIypB6Hyz09K6MZBUqff/AIY2qtRjZI9F0638TJoAuNdure/kkkJQnzFMYU/MDJ1b6kNirU/iBvDdtb38Flc38CszlUkSNQVZTgPsZjktuO084I681ymtzz/brJfMbDQpkZPO4c/n3qa6mllsrCKR2dFdcKSSBwO1ThsRbWSv/wAMKhinF3auM8QT2vxDeVNOR7C8swtyBC8TnzXjyY5g0olVmGNmOAR6mrvhTUJ9LjttF1wedc7TdIh/5bJgKcZ3LmMnDK4BAPfBFckIooW1yaJAkhu0O5QAcjgHIre1HUL9vFfh2wa5lNtc/aDLEXby5CII8blzg9e9erHExlByUbOx9DheSfuteZ0fimS8j1+71D7ClxqUh8oLJIqyJAsR3qEU7DhWHzD14A7cVptlbpZY0e4CRWwdRbQZCGNFLrG6gBdoJZQeeRjrXs+i29uvgtrxYlFw1pI5kCjeW8sfMW654HNeLeFCf+Fg39hn/Rk0tJFi/wCWYcqfmC9A3v1ryoTcqc5yd+v9feY4qldPXYzLGRdJ07VdA06KW0WSOdXmgUxJKVkXfIzL8yj5j5eD0JAGak8baNcxeBdMgub2bTr2CTzJXtZvKuGjCny1J5yu0MBuwSAMd8++G0tD4t02MwJtmjDONow7bmGWHc44ya+X/j/cTvpugXLyM0s99F5jliWfZEwXcepwCQM9MmroVqnPG0utjy6cnKa1/qxx9q+jzeHNevLSNvtcf2eQMFPmN9qZ4yVDE5bHAJJ6Y6cVva1cyHXrLTIImjm+y25ui8hVhKEjDlzg7TsjQD1xjqa9A+DOnafdx38V1axTIJoRtdFYYEgIGCOxJP1rkfAVtb6h8SPEf2+JLnLIv71Q/wAplHHzZ44HHtXTWUlTbvvr9wqlJOTJbuS813TZ9NvQY7y0Mrrbtwv2WQbcIf7yM3Q4ODXMz+Irmw8OXOnIg8vW1+zTEE58nlpFA92Rc54PIPWu/wDE0EKWNrOkarI91KGYABiN+ME9eleY+IAE0ixVBtButaXA4432/H05P51xfV+SpGcXucdSCjJNHsvwz8cx6ZaX1hcRwtp0whiV0XEj+S0bSO7E42u+ARjhR611v2XwxeeF9YvLZEu/MujczSh1QyOzYYb+u7J4H90YFfPenxRDwuihFA8qwXGB91gHI+hYkn3JPWrXgNj/AGPbrng3k4I+ktx/gPyFbV8HzSab0TNIRu7X6ntF14cguNOFjY3BQSYtlkfA+ZxwB3OONvHrXlfiGCWz0+90e4jFtJZpaWdtcEfLkM8u7IySCSoIxnrjkmvoLxBFFHfaQiIFVrayYgDAJE4XJ98cfTivNPiqAfDugyHlpdR1Lee7eTbSGPPrsz8vp2rzo1HTrOC1M7csrIoeKbXSZPDEOowXdvFqF5pU9nbzXewBZTKS8xX1cr8ow2F4OareK9IKafe2X2lLq4u3RT5e5VkZwOTIwAIYrHu4wApOe1ch4yRPsmlLtGP3YxjsbhgR+XH0r0j4iO8Hhu/eFjGwEAypwceQp7V6FGs+WM0vIuMmeZWvhiXS9D07RPB+ipqN0JZI0uZRIjJHIqMwtsn5jIR8zHaOpOFBz6J4s+FiaTodn4q8VJZabdTGSW4tLd0gupmJTLO8e5Qo+bftDMSTjNerfDuef5f3jcRMRyeD5anP15rgTe3kmrWqSTyMrWcpILEglroqfzHB9RxXTl+ZSlOzXcdPESlK3Y5PV/M8K2NvrXiC6ttF8LiJfsWn2KOL28abIzdTEGWOLjk7wWH3QvOO18VeMNX1zStA8eeGJjca1o9lgWkXyxT2UZjkk2fIDmNG4XAyGzwQc/JXxQu7ub4h+KJZpndxq12gZmJOyMgIuT2UcAdh0r6F8HSSL4T8M3CsRKmp6cocH5grWsmQD1wcDP0r18TTaSnc7ZUrRVS+o+x0PTB8UrPxAkRitPEllP5F8Niutsy7mWVhuJZUHReV5B6Cl8TNrfhLXJYtf0KJHvZVkOUVbiJOFR/PBzIjrzgluxXGcHf0pmufCejm4JlxqjKN3zcPbyhhz2YDn1r0rVIYr/wRaS3yLcPDFCsbSAOUUKoAUnOAB2FeJDFS5eX1+482FW+/oZWo2BGsqt9c2oW8iIjt5DtkcrEEXIYAFMnOA2fvcVQ0bVl0HYdRUxOIfOlVeZFNspeMKMcqSoIU856ehb4ihhn8ZTRzosiRT6bsDAEL97oD0rQsIornwt4+ubhBLMg0pldgGYEz4JBPOSOK21pVFydfzRmlZ2RxOia+by1vLSQyLPCHmjKf6wS8rtcE4KMXAbAB4/CvRvhw15ZavfapqUb+VJKYZZHG0byTGMJ1zwW7YHJwa8b8Bqsr6neSgPODABIeXw0nzDd157+tfdWvWdoNEtCIEy1yxPyjkkDk14mMrtJpdSpaRZyFtp9zpUM1vFbB4HLGR5HADInDMeeQSORznNVNVh069v0jsk8kXWTvUspDKQcHnBXqa7Gyhhl1fSbWVFeESvhGAKjG4jg8cHkVzFuANUsSBgtAGPufMl5PvXjV4py+78TPl0Xmf//Z", - "text/plain": [ - "" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Result: True\n", - "Testing image: _static/img/not-cat.jpeg\n" - ] - }, - { - "data": { - "image/jpeg": "/9j/4AAQSkZJRgABAQAASABIAAD/4QBYRXhpZgAATU0AKgAAAAgAAgESAAMAAAABAAEAAIdpAAQAAAABAAAAJgAAAAAAA6ABAAMAAAABAAEAAKACAAQAAAABAAAA1aADAAQAAAABAAABQAAAAAD/wAARCAFAANUDASIAAhEBAxEB/8QAHwAAAQUBAQEBAQEAAAAAAAAAAAECAwQFBgcICQoL/8QAtRAAAgEDAwIEAwUFBAQAAAF9AQIDAAQRBRIhMUEGE1FhByJxFDKBkaEII0KxwRVS0fAkM2JyggkKFhcYGRolJicoKSo0NTY3ODk6Q0RFRkdISUpTVFVWV1hZWmNkZWZnaGlqc3R1dnd4eXqDhIWGh4iJipKTlJWWl5iZmqKjpKWmp6ipqrKztLW2t7i5usLDxMXGx8jJytLT1NXW19jZ2uHi4+Tl5ufo6erx8vP09fb3+Pn6/8QAHwEAAwEBAQEBAQEBAQAAAAAAAAECAwQFBgcICQoL/8QAtREAAgECBAQDBAcFBAQAAQJ3AAECAxEEBSExBhJBUQdhcRMiMoEIFEKRobHBCSMzUvAVYnLRChYkNOEl8RcYGRomJygpKjU2Nzg5OkNERUZHSElKU1RVVldYWVpjZGVmZ2hpanN0dXZ3eHl6goOEhYaHiImKkpOUlZaXmJmaoqOkpaanqKmqsrO0tba3uLm6wsPExcbHyMnK0tPU1dbX2Nna4uPk5ebn6Onq8vP09fb3+Pn6/9sAQwACAgICAgIDAgIDBQMDAwUGBQUFBQYIBgYGBgYICggICAgICAoKCgoKCgoKDAwMDAwMDg4ODg4PDw8PDw8PDw8P/9sAQwECAgIEBAQHBAQHEAsJCxAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQ/90ABAAO/9oADAMBAAIRAxEAPwD87LfS4rdAx5Peuo0sW5HvWIrGWL5eK29PtGfDDgigDauIAUyBmobaTyfl6VflbyodrVy892BJs9DQB2sM7bOuavW9o90w2DrXIafeguqE16DpcqJgg0AbdnpcaR/vBzVO5s4BuHY0691YxqQhwa586lJJndnmgDIv7FA+6P8ACq9lpsxlUrknNdHFGbhuRiu30bT4Y1BkXoKAOPn0+eCEt1NeOeKJZIJd5+9X0hrRhWNth4rwvXrA3kxY8gHigDzeO+uG710OmLJcyrvOKjfSzE2cdKu2/wC4cMvagDsorREjAU81RuLx7U7d3BqtHqZYhRnNSyWMt6pduPSgCH+1+gLVpQ35ccGucudHmgO7tWvp9mzLnuKANcNvwM81aWORVOKphDBINwrdth5gyTxQA2x1SazfBOK6RfEBeIqxBrlJrUO+7tV6GwJjJXoKAILy7+0ZYetYtzdMACe1adxsi4Nc7eqZQdvQ0AZOoauUGA3NcXqWsSOvDc1d1K2kV8CuTubV5Dg8GgCIXJfJJFL559RViDSm2ZHOam/sp/SgD//Q/PO3V4kGORXX6dKgALVjWto8yjAregsGjGCMUALqlwPKOyuMMUksu7NdRdxHJDdBWSJI4n5oAs2doykMM5rqraeSGPBasm1lVxle9XmIVcmgCyZd5DO2au2xQnk1zzjb8xNLHcsuCOlAHotnsGMdq6B9TWGEqOteZW+pODjNbNu1xeZEal/UjoPxoAm1HUDMSi81Qj01513hdxNaTWtrYf6Rq11DaoP77gf/AFs1Rb4o/DLQ0LXmsRNsJG1CuTj0yRmgDKvdAmdCxTbj2rlpNJdG6Vr6z+0T8LOUt55JB3AXt9V3CvB/EP7QtjlxoGml3PRpj8oP07j8vwoA9wttLUfNjJFdlpkEezynAr4Yk+PPjwzPJE9vGj4wgiyFx6ZOa7TQP2j76KZE8RaYkiZGZLc7SP8AgB4P50AfW2o6chQlBnIrFt7codo4IpPC/j7wt4ztwNHvkklIyYmIWVfYr1q/d2s0TZUYoAsrpxusADJrqNM8L7owZCcVi6VNKgA6Yr0nSr5fLxJQBzs/hvyQTGuRWW9i8UR4xXq8bJOpUdTWRdaKHJz0oA+etaLxFiegrnILl3XBPFe0a1oduA6lQT615ZeaeImZVGMdKAOduVR/v9TWSdOBJfGa3Jrdhgmpooht2+tAHNrEI8rin4Hoa6L+y55CXRODR/ZF1/coA//R+MIEjtRg1ro8UoBzXN3m/wAvIPSk027KkJIaANe+s/MQlBzXFXNlOXC4xzXqMDxOvHORWPqVoinzBxQBztpD5QGT0FaKuGYDOaypHw+AeKvWyM2DnmgDdgs47gYc1KdI8w4hHyr1J4A+pqWyG1DPMdsY492PoKr3epy3JEcP7uNeQB/M+9AE8NjZWZLSYuHHPJwg/Dv/ACryf4gfG+38MK+l6GUur4jGBjy4gfcAjP6/SuT+KnxHbS7WTw5ok2biYDzJVOdoH/1+n5+lfJc0rMSzHkk5Pc0AdD4i8ZeJfEtxJcaxfyy7z9zcQg9sf41yRY4x6VIFllOFHFWY9PlkQtwNvXP9KAKO4Ubq1LfSZrklYmUkAnrjoMmmPo2pKpdYGdV6leetAGbkUZqRAobbKuB3xwRVx9POF8h97OMqvdh/snv9OtAEVne3dhcJdWMz28yH5XjYqw/EV9MfDj9ofUNNvIdP8dKL/T2+Uzhf30f+0QPvV8u7GHXj2zzSg80Afr/pT6F4h0qLXPDd0l5ZXAJSROnHBB7gg9Qa2LCyZCC7bsdq+Cf2a/H93omvyeDbqX/QNWy0ak/cuFHGP94DB98V922t4Y22lsCgDuraHbGHPGKzdT1EQDaDg+lTxXcbw/Ke1cnr8YmjBibLUAc/qmtD5lIBY1wF0XmcsF610AsHdjv+apW0vbhscUAcrBpslxztzWnFoTAhmXFdXp0UcLDite48hU54NAHNQ2aIgUqDUv2ZP7oqrc3oikKg8VX/ALR96AP/0viC4YuuB2rMi3bsr0Fb5t1I+bvWZMsaDCcUAalnfFFAzyOKkubrz12k1zhcxD73JqaJ2PDGgCzHDvkwK6Wyso0T7RccRjoP7xH9PWqmmwLKCzcKvLH29Pxqe/v1fESHoMDHQD0oAZeXxuflUhQvHHTFeO/ETx/F4ftW06wdTeSjBx/CPT8O/wCVZvjz4mWOiK2n6ZJ9ou+QdnRc5HXtXyzqWqXN7M1zdOZZ5O57CgBL+9knmeaVjJNIcsT61TtbVrh/m6Z6+57Clt7ZpCrvwjnqe/0rpYGt7cKsCbmBBXd1Pv7D3oAltNJSUKZ/lB+6voPXHtW2un3UbGCCJYoARmSTG4r7Dt1q5o6WwuRk7zGI/OkPIC5yQPds8V0ASGSdprpdsMI3bSRlwowM9hnA68UAZmg6PI91PanNwTaylscfOmcEYB9K9A0PTrSCNbPVbZIraReN8kaMMckbWbf27CsTTtUv1lRkXyIDnEMf35Af4iTjC+7H8BVy0upbKZMyK23gmIDgehdsls+wA96AJ9Y8GeFZ7iOSwmQvOCV3P5YbPAIBOMj0PX681zsnw8S5BsbpBDcR5KshwAO5BHBxjPU4ruYLWa8liwrRod20Lk49SODxj/6xr0nQ/DatEPPjMjKpaFmRsq+3HfPUHvQB8q6/4BOnyq1+ZopHXPmKiskv+0Oep749zjvXmV3aLauU3E+mR94eoIJBFfd97ZwWLfZryFPs0mRsZSgDjk42857/AP6q8U8Y/DOG4DahoIAjkJZ4A24bj/HHjOM9wTz6ZxQB4HpGoS6VqdpqcDFJLSVJVIPOUOa/VaC4+1Wlvf2/MdxGsikdw4yK/KW8066sHCXMbIGyPmBBBHUH0I9K/T/4P6hFrHwt8OS58xo7fymJ67omKc/gKAOxhvmVQgyMdqbPciVCGzntU10IomICgVkTtvOegoAtWaxjIk6VduUjEJ2/Wublukix82aq3OpMyBQ1AEU18bdyATis6bV5Zh1rOuXeRueahiT5sYoAfJc7mySSaZ9oHvV77MhUFl/Sj7NF/d/SgD//0/jB5F24NYF1KAdq10k1uTkLVBdOeVumaAObYM3vV+1tZZDz1roU0gIQWFacVrHbxtNjOwZ49e1AFG5f7Fax2kZ+Yck+pPr9K8W+JXj8eHrT+ztMlVr+6RlbgHy1YYz9cdK7PxZ4hh0DSbrWbxsGM7Yxnl5T90CviXVdUuNTu5tRvG3STMWP49h7CgCjc3LZaRyXkc5JPUn1rPRXmc4G4nk0mWmkyRkmuhsrV43URANjG7uM+nvQBa06yyUkvPuY+UYyT6AfWti2tvNmZIo3IzgtnlmY4CjHcngAcmugtNKaeTkhnk4LHkjPcdPpWimn3nhidNUv0CohP2YEYVX/AL2B1YDke/50AX20+00m3bznTbbAHYmSJJf7pI9+CcnCg4qObFssNxdlmkuQXZW43PngkdkUAbR9feuj002OoWUV7qUYNvZq0mxMB5H3EncfT/AD0FJfabNftP4iCqIX+SOTKxwwheSNzdAo/ixyeFB6kA5iS93RyRYIGQHb+8RztA6nHp+ffO1ZOY41kuEWFFBIyMk/QDkn2H4muTk8Q6fHL9h0KP7ZKODO2QgI/uKe3+03NOigu2kSXULg7n6AZLHHoo5x9AB70AehQ6qlzLCuxh5fA81j/wCgA9D2Ga9L8P8AiGWynRGjt5I8EAFDuQ44PUj868Vjs7qWNTa2jzEdGlJQf98g5/M1saZY6wrq01qiluPlGcficUAfTYjTxZpTSRCIXMJ3EREZIxzhSeo9v0rye7n1K0uHWEojR5XEuSnHUYJPX2Fdf4JWBGSC9tCiufvxttdTjquRjj0PFXfGWlrDcp5u1jjiRV259CSPUf1oA8S8Q6KPE+mXTXVjaidFLI8TSREsPUn5Se3QV6d8AtTn07wdJoTqyTWNzICjjDKJPmGe2OvI61zN3bQ6YWmFz5byjiOTK7z6bwBnPvWNoetReH/F1lfRl4o7vENwowynccDIB6g89D+tAH1JcXUrks/U1mPcnOD0rYuI/wByd/y8VxzXSJKVY5GaAN6GGOVPMPOagu7QFdyjpU1lPCyYzkVfnuYxCVwDmgDjprdg3oKovMIMHrXQXKh1yDiueu0CqT3FAF6HUYwg3DNS/wBoxf3f1ripZpw2FBqPzrn0NAH/1Pk3EZbHrWraW0OzccZrm4nZhnpVr+0GiQr0xQBfuykZYLWU7u8PlJ/y0JPXH3RVSW8MmT1zXjXxd8cN4f0mPRrCTbf3qtkjrHGep9ie1AHk3xc8XLrWprpFk+bTT2OSP4pe5/DpXiE0hY47VZkk4yTkmqI+ZuKANLT4g0oypbPTFeo6XpMf2cFVx7d/zFcrodgzOJMbAO5/pXs+haYdQu0hjyg43dsUAdh4F8JJqDxSIqkBsMD0UD1z/Ou+8UeCIdXsBFdAnao8rKZROSM/U449s9uvpHgvwrFbwOiwkKE3N97r1OBxxg8E+terS+Gra+tv7P2+XHuDrGz5XJHXA+8fYUAfBE3wj1aQtai+uEtDsIAxhjn+Idxj/D2q7P8AB7UdWmW0utQnlt4sBISdsa59EHGfU9SeTX3tZaXZ/Yfsl3s8wYc5ALHA4HGcDjAqrbeHFfUYr5nW3tiQAAMvnHIA6k0AfI1n8BtS0u1jFnH5aLyG8tQ7e+48mrll8M5LOTc9uzMTySuST7nqa/RC50i1awjljgWHAyWuCN7DHUIOR+PNcfNZxKx23UMa9AXj4/SgD5WsPCEY2iVDHjplMV19l4SsJMRs6AjpuGM17m8WoLzaS6fMegD7lP45FYlzHrZYmXQLa7A5PlSKx/LINAHL2Pgeyi2O4+Q8ArhhmtfWfhmdXsch9wQblbitW1uLa2jP27S7vSeSd6gtH+PUV1+kapC8Oy3uI5UYEZ5Xr6g96APhrxTo+u+HLoxrbxXkCg5injD5HfafevH9UtbHU4xqWiwiB0YM0ancqsDnjPIH6V+h3jLR7O9RhMwfb02jkelfD1/4EuPDvizUBDKgs7xWljbtGrY3jHrknA6CgD2+613zdJt5vMD+ZEhLL6kDP61wE+r/AL04PGa4h9Qlto5LaJ8242hCDnOOpzx9M96zJNSMSk5zQB65a+II48KGraTVhKo+bIr5sHiM+cYx616R4f1Hz4xuPIoA9IkvjnAPFQGRJThjmudvpJY1Eig4rnB4hWKYKxxQB6G1oJMEcU37B71iWviKMx9c1Z/4SGOgD//V8N1L4V+IdJjLtHvUDPQg15hd2UyTNbzKUZeoNfst4u8E2MtowEY6elfn78UvAgtrh7m1UKyk9utAHytc+XZJLcznbFApdyegCjJr4J8Va7P4i1681eds+e52D0QcKPyr7G+N2pSeHvBc0P3ZdSYQL9CMsfyFfCre9AEEhzVrToGmnUKobvz0rtPAHw5134i6nJaaZttrO0UPd3kuRDBGe7EdWP8ACo5P619W6J8N/hDoFv8AYv7LvNduVA3XctwbcFu5SJAQB6ZJ96APAtBtyF2Mn73AyR/CPpX0R4O0u1jSOUglmOeADnHbnkn36VLP8LNOmSfW/BtzNGsI3TWdxhmRCfvI4+8PXI4rc8NJcwsqRlSF4IKgHPoT1xQB7lpz/YrWF4rZ4SJlaXc+5mkYfMPXcfTtXdWl0YZp7lXDtceYwwclQpACD06EZrytILdri2guXWIwneYkbBcE8ufc9PoPz9ZsLUW7w3IhxDMu2MA5wxY/qaAM+Wx+x30WoqDHHcgblwwVC2eATycdOprrLACC6jiS3JDHcGPUg55PoDXUafo9w8aMy7wR84bnjrgZzWwugJKZLkRCGZBgFewH8+KALt0A9rHAiqvG93A6A9B/+uuI1GHTI9xupSIxnBY4OfriutskaOKT7U5lZ8kZ4HpxWHfnVJMxQJGiIDyzYH1zg0AePaifDl1cNEuohB2V5AMn681zd9p9nZKZI7S9kU8+daTCRfrgf4V6nc6Pr1+rrHpVnqUfU/NyR9SK8n8R6ZbaS5luLO98NPnG9cy2559c8fnQBFpviSa0uAmmeKZbKXP+rvUwCfQ9QR+FdaNT8TJMLu40G01dX586yPDe5EZA/SvIJ5766UKJLbW4s8EEb8f7p5/I1YtLrR7TmXTdRsHHO61dsf8AfLAigD3y01XVJ4yq+EXgyDlpSVX82NfPfxij+zwQajcab9hLM0ZHDKwxn+lehW9zaXUCSM/iG6jHO0rhcfUA15j8co9LtfBdvPYi8jllu1UpeAhgNjHK5xkUAfLOoawZbo/MNvb6VQuNQDIVU5NclJM5mcA96cs5wc8nNAA0xjn3HnmvXfAF7Hd3QikbAU815WbKSaJWUc9TWp4evLrSNSD4+VutAH2Jc2VrNafIo4FeK67pcSzuIxjvXSw+NFFhsXliK4W91xrh2560AYT3dxbMY9x4pn9pz/3zUbWN5qLtJCuQvFH9gal/coA//9b7Bj8bJqNiTI+dw4r5c+J+qW5EjEitDw5qM11pyJG5OR1rhPHvhzVdZhZLdW4/ioA/K79pbxH/AGn4yh0aBy0GmxKSO3mSfMf0xXzaeTk16j8Y4pIPiVr1vKSXhmEfPX5VAry9+e2PpQB92fDvTptN+EXhu3tdscWsNcXUu0AGSQSsilj1O1QAM9K+jvDHwz/s7wfceMvEFuyQfdgDDG9u59xXj37PdrB448D+DNLX71heyWEw9N8vmJ+at+lfWH7QXj6107XLfwTYqBpegosTIOA8mBvJA9+KAPAfCxvtU8Uw2tlbkR3AlicICRsZGHPp2NWNK0tLXWltriL7zYOPetDwn8X5NL1MWHh7SY4FvpI455mGXERbDKmemRXvOg/DrU21+a/uLdxAsjCNmXG4KeCM9c0AfN/iiyOk65cXd27hIigjY9/bA617x8PrfUdSe2R5BKgxnJBwevasH496PFbCx4EbSON/YcA4OOp716F8ANOvLkQ5j/dQ4IJHtwaAPo3RdABi2nAwM49zV99BjhuY3uI9qNncB3J4GPU12MFubaPaOCOSR79MmvKviR4zh8NxQvcsIxEG5IzkD0x6Z6+tAGZrGmWejzF7sBUOG3SMFUd+T+leMeK/ih8NvDk73Goyf2jcKM+SrbYl9sDrz6189fFL4xeLfEtxPa6YjCyX5VeT5QRnrk47V8wXtlqepXH2i8mklboRApx7/M+B+VAH0D47/as8QTzPF4UsodPtwMfKg2hQOMcD9a+c9V+OXjy+aSW+v5XVvvK3CEHtxx+lPhk0HSpfI1O0nBlIAaY71+nHArpx4a8NXqZjgCBh/DQB5pb/ABCsLiZJnDw3BI5jbbz68cH8RX1doV3I+kQXd40iSzRBuDjIPTKnoa+fbP4LafLrUV7FqG2zDB2i25bI5wDnp+FfS01vBHYPyAVAC8dB6CgCvdeJptLjSJtcuLcjJCqM4X1GTXD/ABw8Qxa78P8Aw9dwai+phLicStKuySJwqgIRk5BHINcp4z0691q7iNqdhQYY54I+lcF43ZrHRksHcjzGQ49SoIJ/WgDyFclmc8FjWjb2qldx5Nc/cTeUQoOM1o2NyernpQB2VuI41AbAqG4CBvMQjIrCe8eTkHFV5LiRRvDUAb76oUTG7BFUba+nlkJ7VzpuzK4BNdJp6I+O3vQB7f4QSF9MyQCc88V1flQ/3R+VcB4WSeW0kW2bhCM11H2bUP71AH//1/WPhj4Ze3sEa+jJYAcV6xe+HbS4iLxJsUDkH1rY0W3tbWER5AxxxWpdyHyykKZzQB/OV+1t4XuvC/x38RwTRlY7147mI9mSRByPxBFfNTjHWv3N/bP/AGcdQ+KnhOLxd4UsjP4l0JT+6X71xa8lkUd2U8qPqK/Dy7t7mzlltLuNopoztZGGGUjggg8g0AfYn7FPi8aN44vNDmb5JPI1GIH/AJ6WUgLfnGx/Kvof4iWlxe+P9XmvDvM87yKeoIclgR+Br86vhN4qHgn4jaF4ilfZbQXCpcH1gl+STP8AwEk1+2fhP4ev4k1C31p7eLV7KEpHKrn78Eg+R0b/AHe/tQB8t+FNHlt9esPIsxcSPKpXfwgycKT6819k6v42m0z4hW2mS3AljgCQED7oCLjP4nmuMv8Aw3FonjGW08LahDplpC5Z0u227fXa7Z7eleM+Mtbh1D4gSSaROtxGsgAkjOQ+OpH1oA6n4+SHXdT09EbKwZ29/mYZ5x7V9L/s+x2sGhmKRQGwoJA656df518j+LXuWuLaCZsTOqkHPIDEjGPoK+p/g8VsNHk2MWBZWHOOo6E44/8Ar0AfUtxF5VlIoXccHBUZOT/OvjP4peHb3WbgmVn2AnETo7D/AHsY4H419mR38EenRzXDBWK9ug+ntn2ry/WYzeXMz25VS+T++YjAI4wgyx+px9aAPhiD4dS3bSXMkEAMI+VmjJAx32Ma8H+KOpa3pFheRWswmnt/ui3iWFMH1wMjHfrX3r4hi+yQPp11c26yshGBkNkk8jLdvWvknxR4QuF1GSaK8iLt12sefbvQB8gS67qF5pzXmqWrfZ5GVQryFy3Hz46DjsR/Ouu0p70wxm0U+QQNueuOwNevv4YUOoYieQ9ABwPc8V0Om+D/ADcQBNzZBOB3NAHE+HdMv7y4QMNoY49qb8VfEz+BoLa0kQrJcguXblVUHA9MknpX0v4a8CSqhdotoU5wOAMVwn7S/gGw1vw7pl8VZWRmt5CgB4YblJB9CMfjSur2K5HbmtofIujeINO8Wb5rDVbgXUfJSQHy2A5KhQOvpXGeNNXl1K5/eI0cduNqq/3uOpPoTXpTeGdP8F+GbCfTreQzzXLkXT8K/lKNyBR1HzDJ+n4+Y+Obn7RG96YliZuoX1xTJPJ7m5DXP41oW0qDnnmuYVnacn3rQWRl780AdVHJvWoJn3MRWbbzsYyAac7vnigCnO2yTg4xV631loYu5rm76d9/pioLWG7vj5UPWgD66+Ec/wBt0Ga4bq0n8s16vsWvNPg3o9xZeFzHORuL54+pr1z7G3rQB//Q+preV25TI/CtVL2aI4atLQbaCdQsigmtHU9Ht+q8GgDNg1cDAavlX48/si/Cz43JPrFvGPDniaQZ+326fLI3/TaLIVvqMGvp8aSwbKc082Mq9c0Afz6/FL9jf42fDOWe4GlHX9KiyReacDKNo7vGBvXj2I969I/Z3/ap1fwNZp8NPGkUk1nxDBPys0AB4R1OCVHQHqBX7iR2/kHfM4RB1LEAY/GvmD48+G/2c77R7vUdf0vSbrxAqnyZIFVLkS/wsWiwTg+tAHh/xBudHutJt9Rj3xSyKHVtp+bd3Oetec/CDSE1fxZLAf3giAkHsCdv88V9eaHo9vqPw3t7SVQVaziyGAb+EY61ifCr4ewaXfw6lbKMvbyo2MDLBwwyT9KAPn74l3UUPiBlt2LGDEZA/wBngcj69K+hPgNriXk/2O4bYrcDd0zn5Rj+VeZ/E74f3Dak8tpCfPkcPjOM5yCT7nNb3w0guNIhQ38ZhnKBck4wUbj9KAPsm/MMcoSX7iZ2jHHrwD1wa8w8W6sZISlniJo8lpjztPYAd25/DvXVJqS6zpnnLKPtCqQ+3k8cA4/rXkGuLcOywNkbcnaOfvcAfX3oA4nVdVeBJI7NFRm+8xJd5D0yc9fr+Qrzm48O6lqE7lpyqMegLYBPoM8Zr1k6KblN4Q7lwTjgfT3NaUNhFaI0twMd8BeAP60AeOW/hEWJwBukY4DMTn8K9R8H+FrU3MUMO2S5kIGByea8g8eeMBayNb2L4dPvEdRn3r2f4Y+MLLw7oNl4kh1210m9tAkgiuIvPa6Y43BuD8uOg4xnOc9ObEYqNO1z6PIOGa+Y83sWly+vXbZM+kPEfw51DQPDcUohw1yrHjqOOK+Q/FKzax4Zk02OHzpYJEZlPX5Tg9PTv6V9CfGf9oyPxJolrofh3VorgJkzSQRmLduAO0HrtBz/AJ6/OOh67DaT/wBpyYZ/KYAA5BfP8QJ7ivKqZhF1lY+8wPANdZbNV9Jy1S6q3f1PGvjVp9vF8N/DQkj8m5tb+ZfLOPuSxZJyAMjKD6V8GeN5jcJ5ScKtfoL+1JqdjFc6PpFmQIvJa9AB7TgBfpwDxX57+IcSlsCveTPyGUWnZnlKRsJOuMVJM+CMdqtSxhJWqjPjBwc0CL1nLgEZqxNMRnnFYcE+zvUdzcFzu3daALFxH5h967PwLZRy3ISQZLHFcFBOG+VjmvQvCV7HZXsbNgqTQB91eBvDoj0ZRHtUHHeu0/sF/wC8v51y/gvWbebRo3jYAcd8V139pxf3x+dAH//R+ydBl2zFc967S/VTHuPcZryNNestLnZnPmMP4VrB8UfFZrWwluJXSytoVJZs8gDvmgD1e91rQNCsZL3WLtLZUzwT8x+g618S/Fr9sDSPDRltvC8e914Dvg5I9BXy98W/jvqWvGdNPaRoXysK5O5x/fb0HoK+DPFl/qV3N++lP2idwoGcnLe1AH0j4t/aI+KfxKvZLTTrm7uA7bVjgJABY8DA4rZ8JeDtbuL2Gz1m/wDtetuQ0kCP5nlA9PMbJAbB6D+db2geHYfhV8IP+EijQ/2rdRZjJ6xhxkt/vN69q7L9lbyNR8GN4m1AYvtQ1MmaQ8sVicBRk9ABnigD7x0vTZtE8PRaTcfLLbwLC6+jIMV0Hw/eIpHCzBWjLEj1GeR+tbHj2zax8T6zajjbMWHfIYbh1+teceGNWXTNUeWXuScfUYoA6T4pabEl6t5GvyOg56ZI9DXzn4g1NrVxt+TaPSvqjx7eWesaRFHbj98ibxjp718P+Nb2VxJDEDkHn8enH/16APQ/DXxEurWYbbjaw4OehHofYV2sniOw1Cb7Szou0BmbIIzn+or41W/mRG2MVzwfU8/41e8JX95P4kitpd728AV3x828/wAII+vOD6UAfdhlsrazWW9kCfLuII5yR+GMZr5j8e/Grw1pjvZWcpklAwVU5Gf8+ldT8QdVi8OfD+51nWJ/+JjqYMNnbgkFVIGWbk5z1J/pxXy14V+Desa5bQ+J9YlEEN5IyI7DPQDkL9TjI78etAFK6+I1zfpKkempI8x6kMT7V53q9z4mulVVeWCFdxEauy9PTuK+4/BH7O1ndqou9UW2cOishjyTu6NnJyMc1b8X/BjwDpyBJdVeKQqRuLouXHHQj24+tTKCe6OrDY2rRbdKTV+x8EeHPHTQ3celXLTbmJUsS0mDzySOfavTtL1yS01+y0fWZntPOP3mJIx6jHH05r0C1+BXhGO9S7s9XDIzbi6lXxk8f/XqH4+eBtM0Sz8JeI9CkN1H9pNtOwIPGzeDkdOhrmngaUndo9uHF2PjT9mqmn4nG/HC9i1rxpeTWr+ZBbRW9vHjlQsUSjA/HNfLGuQ7NwYc17dqV2bnzJ35ZyWP1NeQ+I/mUlRyMmus+bbueP3TYmbis6Vcjj8q2rqLdL8vSqjxFcZFAjm3RhzVSRm79K6WWFCvpXPXCgMQKAK6yFTkVpW+pSwsCh5FZNKOtAH1P8O/HQj0TybmTDxtiu+/4T22/wCev618z+G7N2stykjJ7V0X2KX+8aAP/9KfV/GVrYAtNPsPcZwa+c/H3i298WA2KsVslbhOQHPq/sOw71Dqd+8rMCPm5yT1rhrm7KoWY84wKAON8Q28Njprrar+9kzvlP339s9h7V826DZpqvxAha4GY7SRAAem9j/SvoDxQ7vafMccGvDdOb7F4j89TjMisffGKAPuv4tR/afh4trCM+UqfTAXFM+E8P8AYnwvgW3AQjfNgeuc5q1cGPW/DSQuARJEDz9KzfBlyYdAvtIl6wBgq9OOaAP1C+JiG81601hCPJ1vSrK+jI774grfyFfLEzyf29d2oYosUTtnuD/+uvpmwlbxR+zz8NfHEY3tplkdNuj3CxnyQT9HQfnXzfdhDfahdkYhiid3J468CgDBXx9JPYTWEzkSQHbu/vDsQa8Y8R6pFcs0hz5jHBwP61zGgeIorrxHfWcrqYmyMdsU/X7CaBS8T74wSVye2eh9qAHx28VxEZF4KBs47/8A1+1cxYeIU0zUpoYT94qOmM4yP68VPHrSWi7JW8tR97OO/PTvzXGyWkWtazFJZy+XvcbmHQDrkdh/SgDoPEXi248Y+NdP02bNzFZrtSInIz1wfc969u8P/D74kawzySeIP7I09QCvlANIijOEVjwqkn04xnNfOd74XvNGu2vNIn3OSDuYj+gwfwpNQ8eeP7dDbPdiRnHRBjj1OMUAew+K9Mt/DNvOZ/E+rXd2JBgR3OxMgYLErjtx1r5m8Qa94TRWXUNWmlmQncsl0zsPTIyTmqNxP4l8RTvBcTSFTku2eAO59B+Waz20PQ9MlCfZ0aVl5OM/U8+tAF3QNX0fdvspby6jkOAsdwQvPY161rb33h/w5Dody0iQ3rrdxwSP5nlqyjHJ9z/SvO/CenWlvqc0sIWO3UbnIHp7ep6V9c23gjwXq+kIfFTvdX90FdrhWKPCccRxjoFX0I5OSaAPkC7uBs64ry/Xrnc5ANfZOufs839/5g8Ja5b3C4JWO4zG+PTIBFfKXj/4XfETwjOf7e0eaKHPEyDzIT9HXj88UAeQyTIHYZ57VRkkBHJOalmVlcq/DDrVIoOvNAFS4nbYQDWNISxIrXmjB61ntFigCgRzShelTlM0BNp5oA9n8Hwh9LDV1n2cV5r4e1GW0svLXkHpW9/bc3oKAP/T+TNRuTufucHoa4q9nKj5uDitrVZSl06D+KuA1m6kjl2qxxigDP11zLHy3XFeTapCba6tpQMGUMfzPH8q9HvpWuIo4lPzuQv5muQ14R3G6WMf8e7DGOm1eP5UAfUngPVRf+HbdWYFkUA/gKv2mYL2fb0lBBryH4Z6uYM2wP7txnBr2eNGM5kBHTigD9Fv2L9atfGvwe8VfCrUW3Pp93I6gn/lldDII+jqTXgXxe1KXwd4U1nT787NRtZWtpsfKSy/xjPZhhh9a4v9lD4h/wDCBfG1NPupfLs9fQ20uem7koT7hq+hv23vhfqOv6DP4u8NIXuYEzdwohYzxr0YY53Lnr6ZHpQB+UXhDUZBrTXhJIds/rXtd3qZmh8pwWYHOPavDdGhhtRGY2znqe4r1TSJFuXVC24Dg469P6UAcfrlxDPEVmBGOmK4W1e8sLnzrKc4Q5weRivZdf8ACkru8/JVh8o+mCa8u1PR7uKVli7dqANmLW5b4eXdOUzxn1A9MdvbiukvZPDttpoS33NcMMs7nqe/0+leSql3ASQzKB2q7baVca0s09xeGzgt0LSStyqgc9OP50APudbezVxCw2dwO/1rzO/1q6uNQy2d0h7Dge1fQWl/s9eLNZ+Fd18XRq1vaaTA6KsNwkgml8yTy02bcrk53c4wK8xvfAuoWWm3uqy3kUi2IUsiIdxViBkE+meaAL2h6gNOtbi7kP3NpJ7EtgV9HeHfGM1/p6Fm3uB1PcV82eCtW02C6l0/UYxcW14nlSq3OUb09CDzn1rptHkuvCOuz6DdSFox80LnpJE33Wzz24PvQB9Paf4lmRwkpC+nau/s/Fb7TBOVkgkGCr4ZT9Qa+bYtRR3Uk8/0rft7+Rhs3ZA96AOo8Y/Bj4X+Oi9w1n/ZWoOCRPa/KMn+8mdpr5G8f/s++JfBaPqFuV1TS1P+vhB3Lzx5idV+vSvqiPxBPbBW3cLxXo+h63FfwMGIdXGGVhkEdwQaAPylk0I56ZrFutIZDwOlfpZ46+Bnh/xLbSal4SVbDUOWMQ/1Uh9Mfwn6V8F+KNNvtB1CfS9Wga1uYWIZXGOnp6j6UAeXtZ7T0xWdcoYutdK7KzVz2oEZ4oAt6fdyJEVB6GtD7dL61zMExRcYqf7SfSgD/9T4PvtVhmMbL95upzmuV1Vt/Oc1w2n64bmCNy53g811Us4dN+MhsUAYF7LPDCZ7bDSR5ZQehx/WsBbqK9t3aPlZUz9D6fhXQz9HA9K8oudXGi62bVuLa75GeiP0P4GgD1jwRK6MkgONoxX1D4dmN9BJuAHlRlmPfAr5D0e/Gm2+9jtBx/kV9JeBNXXUNKu9h274dv1xzQByWpeIJ9G8UWmtWp2PazK4IP8AdOeMV+5/hXxLafEX4e6drBIk+2Wyb+/OBmv59vFFwPtcgY8hjjH1/wAK/UP9iHx8ut+CJfDM8hM+muUCk9F4x+lAHzD+0d8Grn4d61N4n0KNhpF5IWliUcQs3JYY/hJ6jHFeQeC9XWSYGUgbcH6iv2d8ceDLbxLYT2l5Cs0UilWDDIOR05r8cPjJ8NdW+DXix0hRm0e5cGCUdELc7D/Q0Ad3q/iCNAw4KlQAT71xNqp1S7Kgbgxrj7fWBewAM2cDrXd+E/JhYzu4Tryf0oA4nWbUrd/ZlAAXjgdKqTWL6vquk/DXTwTLqbrLd4BJEOdoU4/vH3710mpX1kdQubuaQJDCWZnboqjkn8K9j/Y48HXfjHxlf/E3VrcrayTF4A4+YxphYlGenTP1oA9i/aimT4X/AAT8GfDbTsIuoXfnTbf7lome3qz/AKV8TaPL/asl1pzklb2CSP2LFcj9a+sv+Cgl4Yde8D2ErHcLa9mYZ6FnjA/livijwzeyQXsFwo3BGXkfWgDzKUtpepBT8rIcH8692eAeLvDEVxbKDqekqZIiOWeL+OP+o96474saGlpq66nAmLe5USAj/a5P61o/DXVhZuu18BecGgB9lrxIRcjB5FddZa3wGPHvXH+NtITSdVF/YKFsr8l1A4CP/Ev9RWfZ3R8n5zQB6hPqQ3Fw3Wuw8Ha7scxsQcV4lDqG+JgDk1u+F78Le+WX2MaAPqaz1hlPmwHAB+ZT6Vy/xG8M+F/HukG01e3RZ5SEju1AEkLH7pzxkZ4IrLtNQELAnlG61ieNbmWz0S7kgJeMKHx6AEHNAH58+J9DvvCuuXugaiMT2TlG9D6EexFcLdSbia+rfj3op1iXRfFenplr6AQzEd3ToT+FfN9x4S1lfnWLcKAOZQjHNSZWrkuk39s2ySMg1H9iu/7hoA//1fxoivG0673D7pJ4r0/Rbt9V053HVOfyryTWkCzyA9jn867X4eXyrO9qwyHB70AdOPJljuWkl2NAE4xnO/P+Feaaz4asNRkWa5unCDcRsUD+ddtcqYdZkTkLPE6c+q/OP5VhXkTEww56Ak0AVoLWO3solSR5AAFG85IAr3T4dX+yN4gcLtxxXhhIaTaTkKOBXovhO9+zuoHHykYFAFfxWQmqTKo43V71+yZ47bwh8To7OV9lvqahWGeNw4/wr598RStJeu7nG705rN8PahNpPiPTNUhJUwTDoex4NAH9N0Mml/2E+vaxcxWemwRM8s0rBEVQOSWPAFfk78f/AIzeFfidqP8AYHhLTBdaMp8p7qQENOF4/doPuL3DHk8HivPv2gfj/wCKPi5o2meH/C12bbw3ovlxXMEZ+ae6Th2cjqqZwF6d+tfNWhandwx7p1xg84HH/wBagDrbr4dT2QMnh+5aYL/y63OElHsj8K/0O0/Wube6ntw9rdu9u0ZIkRvlII9QeleleHtQu765EdpC9yXP3QCQMn16Dr3p/wARvhZa+I76G88S+Jrbw1YwwpHIsI+1Xk53E42Aqq7RwCWOfwoA8V8PaDrfxb8RDQ9GV18PWcgN9Pg4mZTnyxjrk44/E1+5/wADvhxbeHPDmnQxRhfkDsB/z075+g4r8btS+OWk/CePSfCfwWsVWGykUNLeATPdf3zKB8oLdcL09e1fsX+yz+0p8PvjlYjRV26L4ysot8+muwIkjX/lpC3Vh7dR3oA/P3/goHepdfGHS9MBybDSkfHXHnSsf1218Y6LdNb3SjsT07Ee1fRf7a+rrqX7R2vRqR/oVtZW34rCrkfm9fM9pIrOoYYIoA9c8SmLxB4VDIvmSWowQOWxXlHhK4ihui6ZGzrnqK6Wx177A5358tuGHXrXLXSwWOqTXdowMEy7gB6ntQB6F4u1e3m0SHTgBJJIwkz/AHAvGfx6VwEVyqQ/N1Xiq3nvNuaVssw/Qdqz3mEeQaANa2vHSZl3YU9K6HRborfpn7pOM158k5EoINdLYOZDui5I/pQB7tc6gljHl3OFHeue1rxUmpeD9Qc8bEZfrnpXC+JtckGnW8THEsoxj2HesG8uGGjR6Uhw966qcegO4/yoA9Xmgj1DwJbW8vzNbbGXPbPFQ2Xhe1urUBlBJrZ8N6Xf6/Y/2TZjdIEHGPQivdfBnwl1syZvFwCMDigD5M1z4eWb3Csi7eDWL/wrq29K/SBvgBJdgSPuzTf+Gdm/2qAP/9b8YfE6hZRJn7w+lUPDWoNZatC2eGOKteLM/ZYpO6tg/jXI2twVeOTJBVgeaAPf9eCR3dldL08yMnPoxwf51i3cf+kOzdV/lV7xC3m6JHNn5ljzn3AzVG8k8xZZs8MgI/EZoAxkwXZ+mciui0S6KyDrxXOJkA8+h5q1psxE5APQ0AdHq775Ax79K5ueWWOJ5LYlJQCyN6EdDg+lbF0zPgk9KyZCCCOtAF74P6va2n2mymnkZ5pFkeFiCu8H744zyOvNfQE3h6PUrr7fp9ybVJUAkiMRdSw6OCCCD2I7/WviHVYZ9Pl/tCzdoJo2wHQlT+YrvvDXjrxVd2bWz6xcHb/D5hHHfkc0Ae6eJ9evvB8dvpyavFG8ufkgUhlX+8+ckZ7c81ybxDWl3nVhcnqR5nP5E16H4H+EGj6/pkHiDxXcS3Mt+rMqI+MDJALPyS3eu31H9mfw5qMHmeFtQlsLnGRvbzEJ9wQDj8aAPiq9kj07xVYXEsTSWkM371xzk85wfUDkCu2+F+qeMfDnxh0Xx/pv2jTWhuvOtpMMgeOM5CEcZDDG5T1r3HSPA+jymT4e/EK2jsNYsnBguVIjE69UIb7rHB4z1HvxVHxPcQ2vj7TtDWIQ2fh2ITSueAf4mOfTaMfjQBg/G/xPN4u+Mfi7xFdDbLeXzHGOAFVUA9uAOO1cVYtnnr071yM+sS6zqN3qsrfPeTvKQf8AbYmugsXbbnrzQBvmT5iGGf51RlSzimHnqdsnocBW+lSxspkwSTVDVVIjLKc96ANqSwsosSJ5jIwyCCDx+Vc/5Gm3UrwyTy27g+gYf0q9pN4ZrbyH7cVjavZSLKLqDIYdfegB19Y2+nrG8d0Z2ZsbSu0/zNbGiSlplA9a4q+uXmvbdT1VCT9T/wDqrptGYi4DE8AE+g4GaAItVujf646j/VQNsX0wvX9alsrpbzxGjhsxWS7AO29uv5CuXe8FlBLck5diSB6sx4rc8OW9zbAN5ZeSQ5d+gy3WgD70/ZctLbU/GzpfDfmB9o7ZGK/STTtIsYGwsSj8K/NL9mWY2vjfTSpzksh98jmv06spWab2oA6iKGNUACAfhT/Lj/uj8qFzgc075vWgD//X/GDxBi50qVgc4Ab8q83hnOQpORXZwXbXekbTyWXFefoSrgehoA+ip5PO0GND/FGR+lYvnvLpdoD/ABRKCfU4xVyFy2lpzxtwR+FZmnqXsLJf4VQ/zOKALhQiMHHVe9VrR9s42nk1bkQ+WDjPX8KowAb+nSgDoSd2NxySKouoLe1aJjLRgjj61VlQKCOp/SgDk7/Tvtg8kDJkkUH8TXReDfh9p2oXstqt69ncrkLuAaNvTI4I+oz9Ks6WiyX3lvyV+f8AIY/nV2bWrbwpqcGp3AZYmO1iozjvnFAHs3hz4hXnw3K+DfF1oZ4bXmKeE5IjfJHDY3L6HgjpXr+lfHP4fLtZrm49dghfP09P1r461zxd/wALD1Nr3TrWVo4I1ijAUs+1c8sFzySa5x2msptlzAUx8uHVl6H3oA+kPiV4ws/HeuJq1jbstnbQiFPMxvOGLZbBIHJ4FeMa7O1r4R1u6mmeS8vdsIZmLMIw4JyfTAxiv0H+GVpoE/w90r+ykjaGW1RnUgNucqBJuB6ndkHNfGXxo0ew0yy1qCwjEUMczBFHRRnOB7A9KAPnXTbjCpnmu9sZB5SvXmenMQo5rvbFsRemBQB0MUwLgE1avIxLCcc8VixtnYTz259K24z5qke3agDmbO4NnchCcc108s29BIi7h3FcxfwFZd45Ip1je3KkKoDDP6UAZGoyI+tSFBtAVBj0OM10sH7nS7i6HGyMj8W4H865C4lEutXLfd+cD8gK6PXpJLfwjIIWw1zLHHn0HU/yoA46HGo3SqRvt7c899zf/Wr0rSnuHZII+EY4ANeeaVPDYwrHD88g4OOhr0jRk+yhb2/PLfdQdWz2A/rQB9x/svaW114xgmH3LSJ5PqBhc/iTX6U2LBHPSvgL9k+Wy0u31jXtbcJNcbIY1zjag5wK+59K1fQ76Tctx+tAHcLcKVFL9oX1rmLvXdLtHEQlQj3aqv8Awk+mf89Y/wDvqgD/0PwrsbgwwSw5+VGIFc8o3TgDu39at/aCWlKjG85xUNkokvoVPQuP0oA9thBW1AHZcfpVTRmlawtI8cKrc/8AAjVm1cmIKOnpUOlzEWVvEq4YFsn23GgDTnU7cN3qG2hIcH3qe6faR747VZsocjOM0AaATKfMenaq8yoRtX0xWisZABAqvcRgDPfFAFTQo86s57+TgD/gQzWr4h0FtUsDbviMSbwGIyAdhwfzrP0YFNXVV/ijcfyNdl4n1BNJ8NPdTpu2pKcDryAo/nQBu/Brxz4C8O6LbaLqWo21jPbriVZcrmTJ3EtjB/PpXvF5rPwx1y3kgutZ068jmAGyR8r83TGR6ehr4B0nwRda3oza/ZusssxeT7Ow5K5PRuefbFe1eDPiD4Cm0WHS/FFqtpeWirCXaMssgXjJIGQcDkGgD0Tw34sf4W+JNT0TSJk1PQHdJBGkm8IHUH5X5IYDAORg9/WuF+Ld+uo+Hr6/AKi7dpQD1AdsgH8KiTw/Y/FPxPbaJ8MIY7G6tt0rXGPJ+0KOGXGMYA6Fu/pUPxhsrrRtJk0i+iaCeD5WjbgqR/P60AfNlj1AHciu7s3Ij+b069a8/s2XcldxZyAQA5496ANZZF3DPeta2bjANc2XBGWGBV/TJCww5ORQBqSr5jkfhzWK1pd21wJYQGXuK2mGcsDzVSO7kSUqTlenPSgDjYlefVJ58YDyHiur8UyQRaFptpcReakszPjpjaBg/rWJbWzrcljwGcn8zT/Ht7JEmkW8Q3N5btge5A/pQA2zksrbEqRhcdM9B+Fd14ZifUNR+0yZdI/4j6noBXmOiaa9y4udTYqg6KOp/wAK998I2/zRyNH5UKf6pcf+PGgBvinxvP4P1KPT1uHiLxrJhWwOar2H7RXiPTSPsupScdic1x3xv8P31/dadrUEZMOwwsw6BgcgGvGrXwpfT4wpyaAPq0/tQ69L811MJW9ckUn/AA05qn95f++jXhelfCPXtQgMyRtj2Fan/ClPEP8Azzb8qAP/0fwK37mZh3q5oyh9QQn+HJqlOixSuiH5QeK09DGJmk79KAPU9Pm+TB6etW9M3tbxADAy3P8AwI1gWs/lEZ4BrpNJuY2sVQcnc4/8eNAFq4jZnCe4+tdFbwfZrUPIMEiptP07J+1Sj5eoz/Oq17dB5TGnKjvQBGJmfhaJS7DAqSCLLBsZyPpUl0gVCT6UAVdKJj1q3I77x6joatfFy8EXhaONTtaXC4HfcwJ/lVGyXfq1qMhdzEfmDTPHOmfb7qHTL5i0YjDLg4wcnmgDW+G748P2YGRlB/M1514xghtPFt+kChVZhLwMYMihj0981N4U8Z2fhuzk0rU45JJbNmVCgyGGTge1XdD0G48cX934i1Nmt4LiQhRHjcccAAkHAAwOnNAHo/7Ovi1fDfxNtJdu9biGWMZPfGf5Cvrj9or4fL8RPAV34n0hc6xpcTT7Rg+fboMuhx/EoyV/LuMfBmq+Cr7QMa3oF45azPmbT8sqBf4lYcHHcYH416nYftEeNLvw5JoU7W6+bGYnlRCJChGD/FgEjjNAHyjaEE8dR3rr7aQ+SFJz7Vma5pw03Vi8QAtrtRPDjptYkEf8BYEfhT4JRwDxQBubuw/L6VdtphG4Pr+FY/mdASDk4wf8avjIXPbqKAOmV9wDg8UoEMofoDzWdYXKTxbTwR+FWIY13thuxoAWO3XClfvVzfi66C6zbRMBuit0/DcSa7CONiAMdhivL/Fckk3iiRU5EaRofqBQB1uhbbu6igdsByM17+k0FttiQj5QAMegr5qto3Qh422Y5zmu3sNenIFtATPKeM9qAPoPUY9K1rwPeafcOqz+YHiB67gOMVyXgzwS17OimPJz0xWL4e0iX7ba3urTGRzIuyLPGc+lfYXw58OJdXhnhj+Tdx+dAHa+DvhzDbaUFeIbjjsK63/hArb/AJ5L+Qr1PTrEW1sseQD6Ve8kf3hQB//S/AJ2JY1u6YAiZHU1gEENg8Gty3YooxQB0jz7U3egrt/h7eWs1vcwzx+ZJDJkHvh//wBVeXTTHyuvHStfwTfTW2stbwdbqMqB6sOR/WgD23UtdOPs0I24OOKxrY75Nzdc/XrUQ02fz/37Dc3JAraW1SBBt5P+FAE8bEHJ4xTblhggDOR9abuwAThv/r1HK+6PFAFK2O7ULUE4IcZP14qzrk7T615bHPkxop/Hmqdggm1O0tyeGmUfma5vxbrQ8O6/d2TxGSQAMp7ZI4zQBwOusn9uXpQceZ+uBmvo/wCHk0c3hazEYGEDqwH94MTz+ea8k8P+DrbVrAahqUsi3F0TICuMLuJxwRz+dWNN1rVfh1qUmnXUX2m0mw64OMj+8vHXsQaAPpBbaFgQxGxgc56YxzXyKJhbSlIiNoJAz3Ga9B174qz6jYHT9HtntnlBVpGYFgD1Cgdz0ya9G8KeFtO0GwjE0KS3UigyyMATk8kDPQCgDwrWNQjv9F0+PdveyllGRnhJgDjJ9GUkfWsiCUY6civZviP4ZsBo0ut6dElvLCymZVwodWOA2P7wOBx1B9q8KilO360Ab6OTitLcRFkfpxWDDIWOM8CtPzCyBCckUAWYLie3YPHyB2xW3a6lFLGWYYcD+fFcbLNNE25RWjb3QkjUMuCWAoA9RgkjeFSnU4H0rwrVrq4udevWtwSBKy5PQbTivaUEdpYfa522xxJvPtgV5HBJpeo3Ds02wyMTycdTmgB0ELNg3Vxn/ZHSuy0zVbPTV3ghcd/SspfDunAbnvVjB98/lWvY23hSylRpN9/IhyN5wmfoOtAHrXgSG/v7j/hJNUykCAi2Q8bs9X/wr7a+DviSGHTJ4XAMsch/Jua+ILPxhby7I3IHAAA4A9hXu3ww1uC11C4hmk2rNHuH1FAH2wddmlO8Hg9KP7Zm9a+cbvxzHDKUSTge9Vv+E+H/AD1/WgD/2Q==", - "text/plain": [ - "" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Result: False\n" - ] - } - ], - "source": [ - "@Template.define\n", - "def is_cat(img: Image.Image) -> bool:\n", - " \"\"\"Is this a cat? Say True or False and nothing else. {img}\"\"\"\n", - " raise NotImplementedError\n", - "\n", - "\n", - "with handler(provider):\n", - " for img_file in [\"_static/img/cat.jpeg\", \"_static/img/not-cat.jpeg\"]:\n", - " print(f\"Testing image: {img_file}\")\n", - " display(IPYImage(filename=img_file))\n", - " with Image.open(img_file) as img:\n", - " print(f\"Result: {is_cat(img)}\")" + "with handler(provider), handler(ProgramSynthesis()):\n", + " count_a = count_char(\"a\")\n", + " assert callable(count_a)\n", + " assert count_a(\"banana\") == 3\n", + " assert count_a(\"cherry\") == 0\n" ] }, { @@ -304,7 +214,7 @@ }, { "cell_type": "code", - "execution_count": 2, + "execution_count": 7, "id": "66711301", "metadata": {}, "outputs": [ @@ -316,7 +226,7 @@ "Tool call: weather(*(), **{'city': 'Chicago'}) -> cold\n", "Tool call: weather(*(), **{'city': 'New York'}) -> wet\n", "Tool call: weather(*(), **{'city': 'Barcelona'}) -> sunny\n", - "Based on the current weather, Barcelona is a city with good weather, as it is sunny there.\n" + "Based on the current weather conditions, Barcelona has sunny weather and would be a good choice!\n" ] } ], @@ -348,13 +258,70 @@ " print(vacation())" ] }, + { + "cell_type": "markdown", + "id": "3d221feb", + "metadata": {}, + "source": [ + "## Structured Output Generation\n", + "\n", + "Constrained generation is used for any type that is convertible to a Pydantic model." + ] + }, { "cell_type": "code", - "execution_count": null, + "execution_count": 8, "id": "17668ac8", "metadata": {}, - "outputs": [], - "source": [] + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "> You are onstage at a comedy club. You tell the following joke:\n", + "Knock knock.\n", + "Who's there?\n", + "Iguana.\n", + "Iguana who?\n", + "Iguana come inside, it's too cold out here!\n", + "> The crowd laughs politely.\n" + ] + } + ], + "source": [ + "@dataclasses.dataclass\n", + "class KnockKnockJoke:\n", + " whos_there: str\n", + " punchline: str\n", + "\n", + "\n", + "@Template.define\n", + "def write_joke(theme: str) -> KnockKnockJoke:\n", + " \"\"\"Write a knock-knock joke on the theme of {theme}.\"\"\"\n", + " raise NotImplementedError\n", + "\n", + "\n", + "@Template.define\n", + "def rate_joke(joke: KnockKnockJoke) -> bool:\n", + " \"\"\"Decide if {joke} is funny or not\"\"\"\n", + " raise NotImplementedError\n", + "\n", + "\n", + "def do_comedy():\n", + " joke = write_joke(\"lizards\")\n", + " print(\"> You are onstage at a comedy club. You tell the following joke:\")\n", + " print(\n", + " f\"Knock knock.\\nWho's there?\\n{joke.whos_there}.\\n{joke.whos_there} who?\\n{joke.punchline}\"\n", + " )\n", + " if rate_joke(joke):\n", + " print(\"> The crowd laughs politely.\")\n", + " else:\n", + " print(\"> The crowd stares in stony silence.\")\n", + "\n", + "\n", + "with handler(provider):\n", + " do_comedy()" + ] } ], "metadata": { diff --git a/effectful/handlers/llm/providers.py b/effectful/handlers/llm/providers.py index 1a1452e7..150620c8 100644 --- a/effectful/handlers/llm/providers.py +++ b/effectful/handlers/llm/providers.py @@ -21,7 +21,6 @@ from openai.types.responses import FunctionToolParam from effectful.handlers.llm import Template -from effectful.handlers.llm.structure import decode from effectful.ops.syntax import ObjectInterpretation, defop, implements from effectful.ops.types import Operation @@ -161,6 +160,7 @@ def __init__(self, client: openai.OpenAI, model_name: str = "gpt-4o"): def _call[**P, T]( self, template: Template[P, T], *args: P.args, **kwargs: P.kwargs ) -> T: + ret_type = template.__signature__.return_annotation bound_args = template.__signature__.bind(*args, **kwargs) bound_args.apply_defaults() prompt = _OpenAIPromptFormatter().format_as_messages( @@ -170,21 +170,40 @@ def _call[**P, T]( tools = _tools_of_operations(template.tools) tool_definitions = [t.function_definition for t in tools.values()] + response_kwargs: dict[str, Any] = { + "model": self._model_name, + "tools": tool_definitions, + "tool_choice": "auto", + } + + if ret_type == str: + result_schema = None + else: + Result = pydantic.create_model( + "Response", value=ret_type, __config__={"extra": "forbid"} + ) + result_schema = openai.lib._pydantic.to_strict_json_schema(Result) + response_kwargs["text"] = { + "format": { + "type": "json_schema", + "name": "response", + "schema": result_schema, + "strict": True, + } + } + + called_tools = set([]) # tool calls that we have discharged + # Note: The OpenAI api only seems to accept images in the 'user' role. # The effect of different roles on the model's response is currently # unclear. - - called_tools = set([]) # tool calls that we have discharged model_input: list[Any] = [ {"type": "message", "content": prompt, "role": "user"} ] while True: response = self._client.responses.create( - model=self._model_name, - input=model_input, - tools=tool_definitions, - tool_choice="auto", + input=model_input, **response_kwargs ) new_input = [] @@ -217,6 +236,11 @@ def _call[**P, T]( assert last_resp.type == "message" last_resp_content = last_resp.content[0] assert last_resp_content.type == "output_text" + result_str = last_resp_content.text - ret_type = template.__signature__.return_annotation - return decode(ret_type, last_resp_content.text) + if result_schema is None: + return result_str + + result = Result.model_validate_json(result_str) + assert isinstance(result, Result) + return result.value # type: ignore[attr-defined] diff --git a/effectful/handlers/llm/structure.py b/effectful/handlers/llm/structure.py deleted file mode 100644 index bc6c7dd1..00000000 --- a/effectful/handlers/llm/structure.py +++ /dev/null @@ -1,41 +0,0 @@ -import typing - -from effectful.ops.syntax import defop - - -class DecodeError(RuntimeError): - """Raised when decoding an LLM response fails.""" - - def __init__(self, t: type, response: str): - super().__init__() - self.type_ = t - self.response = response - - def __repr__(self): - return f"DecodeError({self.type_}, {self.response})" - - -@defop -def decode[T](t: type[T], content: str) -> T: - """Decode `content` as an instance of `t`. Used to consume the output of an - LLM. - - """ - if t is str: - return typing.cast(T, content) - elif t is bool: - match content.strip().lower(): - case "true": - return typing.cast(T, True) - case "false": - return typing.cast(T, False) - case _: - raise DecodeError(t, content) - elif t in (int, float, complex, bool): - try: - result = t(content) # type: ignore - except ValueError: - raise DecodeError(t, content) - return typing.cast(T, result) - - raise DecodeError(t, content) diff --git a/effectful/handlers/llm/synthesis.py b/effectful/handlers/llm/synthesis.py index 4178fe4b..c26bddaa 100644 --- a/effectful/handlers/llm/synthesis.py +++ b/effectful/handlers/llm/synthesis.py @@ -6,49 +6,49 @@ import typing from effectful.handlers.llm import Template -from effectful.handlers.llm.structure import decode from effectful.ops.semantics import fwd from effectful.ops.syntax import ObjectInterpretation, implements -class ProgramSynthesis(ObjectInterpretation): - """Provides a `decode` handler for callables and a `template` handler to - instruct the LLM to generate code of the right form and with the right type. +class SynthesisError(Exception): + """Raised when program synthesis fails.""" - """ + def __init__(self, message, code=None): + super().__init__(message) + self.code = code - @implements(decode) - def _decode[T](self, t: type[T], content: str) -> T: - origin = typing.get_origin(t) - t = t if origin is None else origin - if not (issubclass(t, collections.abc.Callable)): # type: ignore[arg-type] - return fwd() +class ProgramSynthesis(ObjectInterpretation): + """Provides a `template` handler to instruct the LLM to generate code of the + right form and with the right type. + + """ + def _parse_and_eval[T](self, t: type[T], content: str) -> T: pattern = r"(.*?)" code_content = re.search(pattern, content, re.DOTALL) if code_content is None: - return fwd() + raise SynthesisError(" tags not found", content) code = code_content.group(1) try: module_ast = ast.parse(code) - except SyntaxError: - return fwd() + except SyntaxError as exc: + raise SynthesisError("failed to parse", content) from exc if not isinstance(module_ast, ast.Module): - return fwd() + raise SynthesisError("not a module", content) last_decl = module_ast.body[-1] if not isinstance(last_decl, ast.FunctionDef): - return fwd() + raise SynthesisError("last definition not a function", content) # TODO: assert callable type compatibility gs: dict = {} try: exec(code, gs) - except Exception: - return fwd() + except Exception as exc: + raise SynthesisError("evaluation failed", content) from exc return gs[last_decl.name] @@ -76,8 +76,14 @@ def _call(self, template, *args, **kwargs) -> None: """).strip() - return fwd( - dataclasses.replace(template, __prompt_template__=prompt_ext), + response = fwd( + dataclasses.replace( + template, + __prompt_template__=prompt_ext, + __signature__=template.__signature__.replace(return_annotation=str), + ), *args, **kwargs, ) + + return self._parse_and_eval(ret_type, response) diff --git a/tests/test_handlers_llm.py b/tests/test_handlers_llm.py index 879f6508..2531ef0d 100644 --- a/tests/test_handlers_llm.py +++ b/tests/test_handlers_llm.py @@ -1,21 +1,18 @@ from collections.abc import Callable -import pytest - from effectful.handlers.llm import Template -from effectful.handlers.llm.structure import DecodeError, decode from effectful.handlers.llm.synthesis import ProgramSynthesis from effectful.ops.semantics import handler from effectful.ops.syntax import ObjectInterpretation, implements -class MockLLMProvider(ObjectInterpretation): +class MockLLMProvider[T](ObjectInterpretation): """Mock provider for testing. Initialized with prompts and responses. Raises if an unexpected prompt is given. """ - def __init__(self, prompt_responses: dict[str, str]): + def __init__(self, prompt_responses: dict[str, T]): """Initialize with a dictionary mapping prompts to expected responses. Args: @@ -24,7 +21,7 @@ def __init__(self, prompt_responses: dict[str, str]): self.prompt_responses = prompt_responses @implements(Template.__call__) - def _call[**P, T]( + def _call[**P]( self, template: Template[P, T], *args: P.args, **kwargs: P.kwargs ) -> T: bound_args = template.__signature__.bind(*args, **kwargs) @@ -35,15 +32,13 @@ def _call[**P, T]( raise ValueError(f"Unexpected prompt: {prompt!r}") response = self.prompt_responses[prompt] - - ret_type = template.__signature__.return_annotation - return decode(ret_type, response) + return response -class SingleResponseLLMProvider(ObjectInterpretation): +class SingleResponseLLMProvider[T](ObjectInterpretation): """Simplified mock provider that returns a single response for any prompt.""" - def __init__(self, response: str): + def __init__(self, response: T): """Initialize with a single response string. Args: @@ -52,11 +47,10 @@ def __init__(self, response: str): self.response = response @implements(Template.__call__) - def _call[**P, T]( + def _call[**P]( self, template: Template[P, T], *args: P.args, **kwargs: P.kwargs ) -> T: - ret_type = template.__signature__.return_annotation - return decode(ret_type, self.response) + return self.response # Test templates from the notebook examples @@ -100,7 +94,7 @@ def test_limerick(): def test_primes_decode_int(): """Test the primes template correctly decodes integer response.""" - mock_provider = SingleResponseLLMProvider("61") + mock_provider = SingleResponseLLMProvider(61) with handler(mock_provider): result = primes(6) @@ -108,17 +102,6 @@ def test_primes_decode_int(): assert isinstance(result, int) -def test_primes_decode_error(): - """Test that non-numeric responses raise DecodeError.""" - mock_provider = SingleResponseLLMProvider("not a number") - - with handler(mock_provider): - with pytest.raises(DecodeError) as exc_info: - primes(7) - assert exc_info.value.type_ == int - assert exc_info.value.response == "not a number" - - def test_count_char_with_program_synthesis(): """Test the count_char template with program synthesis.""" mock_code = """ @@ -132,18 +115,3 @@ def count_occurrences(s): assert callable(count_a) assert count_a("banana") == 3 assert count_a("cherry") == 0 - - -def test_decode_primitives(): - """Test decode function with primitive types.""" - assert decode(str, "hello") == "hello" - assert decode(int, "42") == 42 - assert decode(float, "3.14") == 3.14 - assert decode(bool, "true") == True - assert decode(bool, "false") == False - - with pytest.raises(DecodeError): - decode(int, "not a number") - - with pytest.raises(DecodeError): - decode(bool, "maybe") From 02c4378f229b9075085d40f2e84c19393c59ce9a Mon Sep 17 00:00:00 2001 From: Kiran Gopinathan Date: Fri, 24 Oct 2025 16:57:14 -0400 Subject: [PATCH 06/39] implemented support for class methods in `Template.define` (#377) --- effectful/handlers/llm/__init__.py | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/effectful/handlers/llm/__init__.py b/effectful/handlers/llm/__init__.py index 829d3721..17da000a 100644 --- a/effectful/handlers/llm/__init__.py +++ b/effectful/handlers/llm/__init__.py @@ -1,11 +1,11 @@ import dataclasses import inspect from collections.abc import Callable, Iterable +import functools from effectful.ops.syntax import defop from effectful.ops.types import NotHandled, Operation - @dataclasses.dataclass(frozen=True) class Template[**P, T]: __signature__: inspect.Signature @@ -16,6 +16,12 @@ class Template[**P, T]: def __call__(self, *args: P.args, **kwargs: P.kwargs) -> T: raise NotHandled + def __get__(self, instance, _owner): + if instance is not None: + return functools.partial(self, instance) + else: + return self + @classmethod def define(cls, _func=None, *, tools: Iterable[Operation] = ()): def decorator(body: Callable[P, T]): From d9d1782dd2c7159ad894e2a3f7fbdf990afded4e Mon Sep 17 00:00:00 2001 From: Kiran Gopinathan Date: Fri, 24 Oct 2025 17:04:13 -0400 Subject: [PATCH 07/39] Revert "implemented support for class methods in `Template.define` (#377)" This reverts commit 02c4378f229b9075085d40f2e84c19393c59ce9a. --- effectful/handlers/llm/__init__.py | 8 +------- 1 file changed, 1 insertion(+), 7 deletions(-) diff --git a/effectful/handlers/llm/__init__.py b/effectful/handlers/llm/__init__.py index 17da000a..829d3721 100644 --- a/effectful/handlers/llm/__init__.py +++ b/effectful/handlers/llm/__init__.py @@ -1,11 +1,11 @@ import dataclasses import inspect from collections.abc import Callable, Iterable -import functools from effectful.ops.syntax import defop from effectful.ops.types import NotHandled, Operation + @dataclasses.dataclass(frozen=True) class Template[**P, T]: __signature__: inspect.Signature @@ -16,12 +16,6 @@ class Template[**P, T]: def __call__(self, *args: P.args, **kwargs: P.kwargs) -> T: raise NotHandled - def __get__(self, instance, _owner): - if instance is not None: - return functools.partial(self, instance) - else: - return self - @classmethod def define(cls, _func=None, *, tools: Iterable[Operation] = ()): def decorator(body: Callable[P, T]): From 1053fdde281eea0fa3013ac7c9be6413ede8c0c7 Mon Sep 17 00:00:00 2001 From: Kiran Gopinathan <23038502+kiranandcode@users.noreply.github.com> Date: Sun, 26 Oct 2025 13:14:24 -0400 Subject: [PATCH 08/39] Add support for methods in `Template.define` (#377) (#378) --- effectful/handlers/llm/__init__.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/effectful/handlers/llm/__init__.py b/effectful/handlers/llm/__init__.py index 829d3721..0cdbdd0e 100644 --- a/effectful/handlers/llm/__init__.py +++ b/effectful/handlers/llm/__init__.py @@ -1,4 +1,5 @@ import dataclasses +import functools import inspect from collections.abc import Callable, Iterable @@ -16,6 +17,12 @@ class Template[**P, T]: def __call__(self, *args: P.args, **kwargs: P.kwargs) -> T: raise NotHandled + def __get__(self, instance, _owner): + if instance is not None: + return functools.partial(self, instance) + else: + return self + @classmethod def define(cls, _func=None, *, tools: Iterable[Operation] = ()): def decorator(body: Callable[P, T]): From 54efb777f10b0a76ba36299771e59e806a31f810 Mon Sep 17 00:00:00 2001 From: "Dat Nguyen (Marc)" <15943389+datvo06@users.noreply.github.com> Date: Tue, 28 Oct 2025 11:46:35 -0400 Subject: [PATCH 09/39] Adding a lower-level event and a logger example (#382) * Adding a lower-level event and a logger example * Bring back the note * Linting * Add a logging handler that expose python logging * Lint * Miscs * Update LoggingHandler interface * Linting and fixing signatures --- docs/source/llm.ipynb | 143 +++++++++++++++++++++++----- effectful/handlers/llm/providers.py | 69 +++++++++++++- 2 files changed, 187 insertions(+), 25 deletions(-) diff --git a/docs/source/llm.ipynb b/docs/source/llm.ipynb index 11d7d3cd..b4e50ab6 100644 --- a/docs/source/llm.ipynb +++ b/docs/source/llm.ipynb @@ -9,13 +9,20 @@ "source": [ "import dataclasses\n", "import functools\n", + "import logging\n", "import os\n", + "import sys\n", "from collections.abc import Callable\n", "\n", "import openai\n", "\n", "from effectful.handlers.llm import Template\n", - "from effectful.handlers.llm.providers import OpenAIAPIProvider, tool_call\n", + "from effectful.handlers.llm.providers import (\n", + " LLMLoggingHandler,\n", + " OpenAIAPIProvider,\n", + " llm_request,\n", + " tool_call,\n", + ")\n", "from effectful.handlers.llm.synthesis import ProgramSynthesis\n", "from effectful.ops.semantics import fwd, handler\n", "from effectful.ops.syntax import defop\n", @@ -80,17 +87,17 @@ "name": "stdout", "output_type": "stream", "text": [ - "A fish with a wish in the sea, \n", - "Dreamed of climbing a tall, grand tree. \n", - "With scales shining bright, \n", - "It leapt out of sight, \n", - "But decided that swimming's the key.\n", + "In the sea swam a fish with a grin, \n", + "Who loved to dance with a fin-spin. \n", + "He'd flip and he'd dart, \n", + "Winning hearts from the start, \n", + "With a splash, he'd twirl and then win! \n", "----------------------------------------\n", - "In the pond where the sun loves to glisten, \n", - "A curious fish liked to listen. \n", - "With a flip and a swish, \n", - "It'd grant every wish, \n", - "To the tales that the waters would christen.\n" + "In the sea where the fish love to play, \n", + "A school of bright colors display. \n", + "With a splash and a flip, \n", + "They go on a quick trip, \n", + "Chasing bubbles that dance on the bay.\n" ] } ], @@ -120,13 +127,13 @@ "output_type": "stream", "text": [ "\n", - "Silver scales shimmer, \n", - "Deep beneath the water's dance— \n", - "Quiet currents hum.\n", + "Gliding through currents, \n", + "Scales shimmer like silver moons— \n", + "Silent whispers dance.\n", "----------------------------------------\n", - "Silver scales shimmer, \n", - "Deep beneath the water's dance— \n", - "Quiet currents hum.\n" + "Gliding through currents, \n", + "Scales shimmer like silver moons— \n", + "Silent whispers dance.\n" ] } ], @@ -197,7 +204,7 @@ " count_a = count_char(\"a\")\n", " assert callable(count_a)\n", " assert count_a(\"banana\") == 3\n", - " assert count_a(\"cherry\") == 0\n" + " assert count_a(\"cherry\") == 0" ] }, { @@ -226,7 +233,7 @@ "Tool call: weather(*(), **{'city': 'Chicago'}) -> cold\n", "Tool call: weather(*(), **{'city': 'New York'}) -> wet\n", "Tool call: weather(*(), **{'city': 'Barcelona'}) -> sunny\n", - "Based on the current weather conditions, Barcelona has sunny weather and would be a good choice!\n" + "Among the cities checked, **Barcelona** currently has good weather, as it is sunny.\n" ] } ], @@ -283,7 +290,7 @@ "Who's there?\n", "Iguana.\n", "Iguana who?\n", - "Iguana come inside, it's too cold out here!\n", + "Iguana tell you a secret, but I'm too shy!\n", "> The crowd laughs politely.\n" ] } @@ -322,11 +329,103 @@ "with handler(provider):\n", " do_comedy()" ] + }, + { + "cell_type": "markdown", + "id": "0cab62b5", + "metadata": {}, + "source": [ + "### Logging LLM requests\n", + "To intercept messages being called on the lower-level, we can write a handler for `llm_request`:" + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "id": "cbf495a2", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Request fired: () {'input': [{'type': 'message', 'content': [{'type': 'input_text', 'text': 'Write a haiku on the theme of fish2.'}], 'role': 'user'}], 'model': 'gpt-4o', 'tools': [], 'tool_choice': 'auto'} Response(id='resp_09fcadbd12423e30006900db6313ec81959d965abbe131e37e', created_at=1761663843.0, error=None, incomplete_details=None, instructions=None, metadata={}, model='gpt-4o-2024-08-06', object='response', output=[ResponseOutputMessage(id='msg_09fcadbd12423e30006900db63be008195ae1f74af1ec1c0ec', content=[ResponseOutputText(annotations=[], text='Beneath gentle waves, \\nWhispers of scales glide in peace— \\nSilent dance of fins.', type='output_text', logprobs=[])], role='assistant', status='completed', type='message')], parallel_tool_calls=True, temperature=1.0, tool_choice='auto', tools=[], top_p=1.0, background=False, conversation=None, max_output_tokens=None, max_tool_calls=None, previous_response_id=None, prompt=None, prompt_cache_key=None, reasoning=Reasoning(effort=None, generate_summary=None, summary=None), safety_identifier=None, service_tier='default', status='completed', text=ResponseTextConfig(format=ResponseFormatText(type='text'), verbosity='medium'), top_logprobs=0, truncation='disabled', usage=ResponseUsage(input_tokens=18, input_tokens_details=InputTokensDetails(cached_tokens=0), output_tokens=22, output_tokens_details=OutputTokensDetails(reasoning_tokens=0), total_tokens=40), user=None, billing={'payer': 'developer'}, store=True)\n", + "Request fired: () {'input': [{'type': 'message', 'content': [{'type': 'input_text', 'text': 'Write a limerick on the theme of fish.'}], 'role': 'user'}], 'model': 'gpt-4o', 'tools': [], 'tool_choice': 'auto'} Response(id='resp_08d35081ff1cf93a006900db65c80881909c2bf0bd1f8fb826', created_at=1761663845.0, error=None, incomplete_details=None, instructions=None, metadata={}, model='gpt-4o-2024-08-06', object='response', output=[ResponseOutputMessage(id='msg_08d35081ff1cf93a006900db667bd88190b5e6ba81bb4ceaf5', content=[ResponseOutputText(annotations=[], text='In the sea where the bright corals swish, \\nLived a cod with a dream to be \"pish.\" \\nHe\\'d leap and he\\'d twirl, \\nIn a watery swirl, \\nEarning fame as the most stylish fish.', type='output_text', logprobs=[])], role='assistant', status='completed', type='message')], parallel_tool_calls=True, temperature=1.0, tool_choice='auto', tools=[], top_p=1.0, background=False, conversation=None, max_output_tokens=None, max_tool_calls=None, previous_response_id=None, prompt=None, prompt_cache_key=None, reasoning=Reasoning(effort=None, generate_summary=None, summary=None), safety_identifier=None, service_tier='default', status='completed', text=ResponseTextConfig(format=ResponseFormatText(type='text'), verbosity='medium'), top_logprobs=0, truncation='disabled', usage=ResponseUsage(input_tokens=18, input_tokens_details=InputTokensDetails(cached_tokens=0), output_tokens=51, output_tokens_details=OutputTokensDetails(reasoning_tokens=0), total_tokens=69), user=None, billing={'payer': 'developer'}, store=True)\n" + ] + } + ], + "source": [ + "def log_llm(client, *args, **kwargs):\n", + " result = fwd()\n", + " print(\"Request fired: \", args, kwargs, result)\n", + " return result\n", + "\n", + "\n", + "# Avoid cache\n", + "try:\n", + " haiku.cache_clear()\n", + "except Exception:\n", + " pass\n", + "\n", + "# Put llm_request handler innermost so it has highest precedence during the call\n", + "with handler(provider), handler({llm_request: log_llm}):\n", + " _ = haiku(\"fish2\")\n", + " _ = limerick(\"fish\") # or use haiku(\"fish-2\") to avoid cache" + ] + }, + { + "cell_type": "markdown", + "id": "8e8e531d", + "metadata": {}, + "source": [ + "### Python logging for LLM requests and tool calls\n", + "We can also uses Python logger through `LLMLoggingHandler` to log both low-level LLM requests (`llm_request`) and model-initiated tool use (`tool_call`):\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "81a15f00", + "metadata": {}, + "outputs": [ + { + "ename": "TypeError", + "evalue": "StreamHandler.__init__() got an unexpected keyword argument 'formatter'", + "output_type": "error", + "traceback": [ + "\u001b[31m---------------------------------------------------------------------------\u001b[39m", + "\u001b[31mTypeError\u001b[39m Traceback (most recent call last)", + "\u001b[36mCell\u001b[39m\u001b[36m \u001b[39m\u001b[32mIn[14]\u001b[39m\u001b[32m, line 4\u001b[39m\n\u001b[32m 2\u001b[39m logger = logging.getLogger(\u001b[33m\"\u001b[39m\u001b[33meffectful.llm\u001b[39m\u001b[33m\"\u001b[39m)\n\u001b[32m 3\u001b[39m logger.setLevel(logging.INFO)\n\u001b[32m----> \u001b[39m\u001b[32m4\u001b[39m logger.addHandler(\u001b[43mlogging\u001b[49m\u001b[43m.\u001b[49m\u001b[43mStreamHandler\u001b[49m\u001b[43m(\u001b[49m\u001b[43msys\u001b[49m\u001b[43m.\u001b[49m\u001b[43mstdout\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mformatter\u001b[49m\u001b[43m=\u001b[49m\u001b[43mlogging\u001b[49m\u001b[43m.\u001b[49m\u001b[43mFormatter\u001b[49m\u001b[43m(\u001b[49m\u001b[33;43m\"\u001b[39;49m\u001b[38;5;132;43;01m%(levelname)s\u001b[39;49;00m\u001b[33;43m \u001b[39;49m\u001b[38;5;132;43;01m%(payload)s\u001b[39;49;00m\u001b[33;43m\"\u001b[39;49m\u001b[43m)\u001b[49m\u001b[43m)\u001b[49m)\n\u001b[32m 5\u001b[39m \u001b[38;5;66;03m# 2. Pass it to the handler\u001b[39;00m\n\u001b[32m 6\u001b[39m llm_logger = LLMLoggingHandler(logger=logger) \u001b[38;5;66;03m# can also be LLMLoggingHandler()\u001b[39;00m\n", + "\u001b[31mTypeError\u001b[39m: StreamHandler.__init__() got an unexpected keyword argument 'formatter'" + ] + } + ], + "source": [ + "# 1. Create a logger\n", + "logger = logging.getLogger(\"effectful.llm\")\n", + "logger.setLevel(logging.INFO)\n", + "log_handler = logging.StreamHandler(sys.stdout)\n", + "log_handler.setFormatter(logging.Formatter(\"%(levelname)s %(payload)s\"))\n", + "logger.addHandler(log_handler)\n", + "# 2. Pass it to the handler\n", + "llm_logger = LLMLoggingHandler(logger=logger) # can also be LLMLoggingHandler()\n", + "\n", + "# Avoid cache for demonstration\n", + "try:\n", + " haiku.cache_clear()\n", + " limerick.cache_clear()\n", + "except Exception:\n", + " pass\n", + "\n", + "with handler(provider), handler(llm_logger):\n", + " _ = haiku(\"fish3\")\n", + " _ = limerick(\"fish4\")" + ] } ], "metadata": { "kernelspec": { - "display_name": "effectful", + "display_name": ".venv", "language": "python", "name": "python3" }, @@ -340,7 +439,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.12.9" + "version": "3.13.7" } }, "nbformat": 4, diff --git a/effectful/handlers/llm/providers.py b/effectful/handlers/llm/providers.py index 150620c8..59ed61b6 100644 --- a/effectful/handlers/llm/providers.py +++ b/effectful/handlers/llm/providers.py @@ -2,6 +2,7 @@ import dataclasses import inspect import io +import logging import string from collections.abc import Iterable, Mapping from typing import Any, get_type_hints @@ -21,6 +22,7 @@ from openai.types.responses import FunctionToolParam from effectful.handlers.llm import Template +from effectful.ops.semantics import fwd from effectful.ops.syntax import ObjectInterpretation, defop, implements from effectful.ops.types import Operation @@ -129,6 +131,13 @@ def push_current_text(): return prompt_parts +# Emitted for model request/response rounds so handlers can observe/log requests. +@defop +def llm_request(client: openai.OpenAI, *args, **kwargs) -> Any: + """Low-level LLM request. Handlers may log/modify requests and delegate via fwd().""" + return client.responses.create(*args, **kwargs) + + # Note: attempting to type the tool arguments causes type-checker failures @defop def tool_call[T](template: Template, tool: Operation[..., T], *args, **kwargs) -> T: @@ -136,6 +145,62 @@ def tool_call[T](template: Template, tool: Operation[..., T], *args, **kwargs) - return tool(*args, **kwargs) +class LLMLoggingHandler(ObjectInterpretation): + """Logs llm_request rounds and tool_call invocations using Python logging. + + Configure with a logger or logger name. By default logs at INFO level. + """ + + def __init__( + self, + *, + logger: logging.Logger | None = None, + ): + """Initialize the logging handler. + + Args: + logger: The logger to use. If None, the logger name will be the name of the class. Note that the logger should have a handler that print out also the extra payload, e.g. `%(payload)s`. + """ + self.logger = logger or logging.getLogger(__name__) + + @implements(llm_request) + def _log_llm_request(self, client: openai.OpenAI, *args, **kwargs) -> Any: + """Log the LLM request and response.""" + + response = fwd() + self.logger.info( + "llm.request", + extra={ + "payload": { + "args": args, + "kwargs": kwargs, + "response": response, + } + }, + ) + return response + + @implements(tool_call) + def _log_tool_call( + self, template: Template, tool: Operation, *args, **kwargs + ) -> Any: + """Log the tool call and result.""" + + tool_name = tool.__name__ + result = fwd() + self.logger.info( + "llm.tool_call", + extra={ + "payload": { + "tool": tool_name, + "args": args, + "kwargs": kwargs, + } + }, + ) + return result + + def _call_tool_with_json_args( template: Template, tool: Tool, json_str_args: str ) -> dict: @@ -202,9 +267,7 @@ def _call[**P, T]( ] while True: - response = self._client.responses.create( - input=model_input, **response_kwargs - ) + response = llm_request(self._client, input=model_input, **response_kwargs) new_input = [] for message in response.output: From 657924ee206a928384fa1f85146eeb24a1f8425e Mon Sep 17 00:00:00 2001 From: Kiran Gopinathan <23038502+kiranandcode@users.noreply.github.com> Date: Wed, 29 Oct 2025 13:58:07 -0400 Subject: [PATCH 10/39] Add support for tools returning images (#385) * added support for images * updated conversion to be delegated through a dynamic pydantic model instead * Revert "updated conversion to be delegated through a dynamic pydantic model instead" This reverts commit d41e8107f3a20fc0b1c1027b3a1f5e7bc8629bbd. * updated implementation to use OpenAI types and unify conversion logic under Tool. --- effectful/handlers/llm/providers.py | 53 +++++++++++++++++++++++------ 1 file changed, 43 insertions(+), 10 deletions(-) diff --git a/effectful/handlers/llm/providers.py b/effectful/handlers/llm/providers.py index 59ed61b6..3447a072 100644 --- a/effectful/handlers/llm/providers.py +++ b/effectful/handlers/llm/providers.py @@ -4,7 +4,8 @@ import io import logging import string -from collections.abc import Iterable, Mapping +import typing +from collections.abc import Iterable, Mapping, Sequence from typing import Any, get_type_hints import pydantic @@ -37,13 +38,47 @@ def _pil_image_to_base64_data_uri(pil_image: Image.Image) -> str: return f"data:image/png;base64,{_pil_image_to_base64_data(pil_image)}" +def _pil_image_to_openai_image_param( + pil_image: Image.Image, +) -> openai.types.responses.ResponseInputImageParam: + return openai.types.responses.ResponseInputImageParam( + type="input_image", + detail="auto", + image_url=_pil_image_to_base64_data_uri(pil_image), + ) + + +OpenAIFunctionOutputParamType = ( + str | list[openai.types.responses.ResponseInputImageParam] +) + + @dataclasses.dataclass class Tool[**P, T]: parameter_model: type[pydantic.BaseModel] - result_model: type[pydantic.BaseModel] operation: Operation[P, T] name: str + def serialise_return_value(self, value) -> OpenAIFunctionOutputParamType: + """Serializes a value returned by the function into a json format suitable for the OpenAI API.""" + sig = inspect.signature(self.operation) + ret_ty = sig.return_annotation + ret_ty_origin = typing.get_origin(ret_ty) or ret_ty + ret_ty_args = typing.get_args(ret_ty) + + # special casing for images + if ret_ty == Image.Image: + return [_pil_image_to_openai_image_param(value)] + + # special casing for sequences of images (tuple[Image.Image, Image.Image], etc.) + if issubclass(ret_ty_origin, Sequence) and all( + arg == Image.Image for arg in ret_ty_args + ): + return [_pil_image_to_openai_image_param(image) for image in value] + + # otherwise stringify + return str({"status": "success", "result": str(value)}) + @classmethod def of_operation(cls, op: Operation[P, T], name: str): sig = inspect.signature(op) @@ -51,15 +86,13 @@ def of_operation(cls, op: Operation[P, T], name: str): fields = { param_name: hints.get(param_name, str) for param_name in sig.parameters } + parameter_model = pydantic.create_model( "Params", __config__={"extra": "forbid"}, **fields ) - result_model = pydantic.create_model( - "Result", __config__={"extra": "forbid"}, result=sig.return_annotation - ) + return cls( parameter_model=parameter_model, - result_model=result_model, operation=op, name=name, ) @@ -203,15 +236,15 @@ def _log_tool_call( def _call_tool_with_json_args( template: Template, tool: Tool, json_str_args: str -) -> dict: +) -> OpenAIFunctionOutputParamType: try: args = tool.parameter_model.model_validate_json(json_str_args) result = tool_call( template, tool.operation, **args.model_dump(exclude_defaults=True) ) - return {"status": "success", "result": str(result)} + return tool.serialise_return_value(result) except Exception as exn: - return {"status": "failure", "exception": str(exn)} + return str({"status": "failure", "exception": str(exn)}) class OpenAIAPIProvider(ObjectInterpretation): @@ -286,7 +319,7 @@ def _call[**P, T]( tool_response = { "type": "function_call_output", "call_id": call_id, - "output": str(tool_result), + "output": tool_result, } new_input.append(tool_response) From 68af295cf51aeb111f2e7900617cfd5418ef4c45 Mon Sep 17 00:00:00 2001 From: "Dat Nguyen (Marc)" <15943389+datvo06@users.noreply.github.com> Date: Wed, 12 Nov 2025 17:10:20 -0500 Subject: [PATCH 11/39] Implement Caching Handler for LLM (#392) * Adding caching handler * Adding caching handler * Linting and cleaner implementation of hashing keys * Minor linting * Slightly more precise type for cacher * Slightly more precise type for cacher * Addressing comments * Attempt to fix issue with pypandoc * Fixing CI * Linting --- .github/workflows/lint.yml | 4 + .github/workflows/test.yml | 4 + .github/workflows/test_notebooks.yml | 4 + docs/source/llm.ipynb | 120 ++++++++++++++++++--------- effectful/handlers/llm/providers.py | 30 ++++++- pyproject.toml | 2 +- 6 files changed, 121 insertions(+), 43 deletions(-) diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml index 191d8f4a..86b3bfab 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/lint.yml @@ -19,6 +19,10 @@ jobs: with: enable-cache: true + - name: Install pandoc + run: | + sudo apt install -y pandoc + - name: Install dependencies run: | uv sync --all-extras --dev diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index e0c65890..7c932237 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -22,6 +22,10 @@ jobs: with: enable-cache: true + - name: Install pandoc + run: | + sudo apt install -y pandoc + - name: Install Python dependencies run: | uv sync --all-extras --dev --python ${{ matrix.python-version }} diff --git a/.github/workflows/test_notebooks.yml b/.github/workflows/test_notebooks.yml index bf8a8b1a..aa4d9520 100644 --- a/.github/workflows/test_notebooks.yml +++ b/.github/workflows/test_notebooks.yml @@ -26,6 +26,10 @@ jobs: with: enable-cache: true + - name: Install pandoc + run: | + sudo apt install -y pandoc + - name: Install Python packages run: | uv sync --all-extras --dev diff --git a/docs/source/llm.ipynb b/docs/source/llm.ipynb index b4e50ab6..9b9b88b2 100644 --- a/docs/source/llm.ipynb +++ b/docs/source/llm.ipynb @@ -2,7 +2,7 @@ "cells": [ { "cell_type": "code", - "execution_count": 1, + "execution_count": 11, "id": "5aaf649f", "metadata": {}, "outputs": [], @@ -18,6 +18,7 @@ "\n", "from effectful.handlers.llm import Template\n", "from effectful.handlers.llm.providers import (\n", + " CacheLLMRequestHandler,\n", " LLMLoggingHandler,\n", " OpenAIAPIProvider,\n", " llm_request,\n", @@ -56,7 +57,7 @@ }, { "cell_type": "code", - "execution_count": 2, + "execution_count": 12, "id": "1e832675", "metadata": {}, "outputs": [], @@ -79,7 +80,7 @@ }, { "cell_type": "code", - "execution_count": 3, + "execution_count": 13, "id": "634f6533", "metadata": {}, "outputs": [ @@ -87,17 +88,17 @@ "name": "stdout", "output_type": "stream", "text": [ - "In the sea swam a fish with a grin, \n", - "Who loved to dance with a fin-spin. \n", - "He'd flip and he'd dart, \n", - "Winning hearts from the start, \n", - "With a splash, he'd twirl and then win! \n", + "There once was a fish in the sea, \n", + "Who dreamed of a life wild and free. \n", + "He tried to make friends, \n", + "Around coral bends, \n", + "And surfed on the waves with such glee.\n", "----------------------------------------\n", - "In the sea where the fish love to play, \n", - "A school of bright colors display. \n", - "With a splash and a flip, \n", - "They go on a quick trip, \n", - "Chasing bubbles that dance on the bay.\n" + "There once was a fish who could skate, \n", + "Gliding smooth on a pond, silver plate. \n", + "With a flip and a flop, \n", + "He'd never quite stop, \n", + "Making waves with his slick figure eight.\n" ] } ], @@ -113,12 +114,12 @@ "id": "2e59acbc", "metadata": {}, "source": [ - "If we want deterministic behavior, we can cache the template call." + "If we want deterministic behavior, we can cache the template call. We can either cache it with the default `@functools.cache` or using `CacheLLMRequestHandler`:" ] }, { "cell_type": "code", - "execution_count": 4, + "execution_count": 14, "id": "706ce53b", "metadata": {}, "outputs": [ @@ -127,13 +128,29 @@ "output_type": "stream", "text": [ "\n", - "Gliding through currents, \n", - "Scales shimmer like silver moons— \n", - "Silent whispers dance.\n", + "Ripples in moonlight, \n", + "Silver scales dance in silence— \n", + "A river's secret.\n", "----------------------------------------\n", - "Gliding through currents, \n", - "Scales shimmer like silver moons— \n", - "Silent whispers dance.\n" + "Ripples in moonlight, \n", + "Silver scales dance in silence— \n", + "A river's secret.\n", + "\n", + "Silent waters gleam, \n", + "Fish drift in the moon’s soft glow— \n", + "Nature's quiet dance.\n", + "----------------------------------------\n", + "Silent waters gleam, \n", + "Fish drift in the moon’s soft glow— \n", + "Nature's quiet dance.\n", + "\n", + "Silver scales shimmer, \n", + "Silently weaving through waves— \n", + "Whispers of the deep.\n", + "----------------------------------------\n", + "Silver scales shimmer, \n", + "Silently weaving through waves— \n", + "Whispers of the deep.\n" ] } ], @@ -144,12 +161,32 @@ " \"\"\"Write a haiku on the theme of {theme}.\"\"\"\n", " raise NotImplementedError\n", "\n", + "@Template.define\n", + "def haiku_no_cache(theme: str) -> str:\n", + " \"\"\"Write a haiku on the theme of {theme}.\"\"\"\n", + " raise NotImplementedError\n", + "\n", "\n", "print()\n", "with handler(provider):\n", " print(haiku(\"fish\"))\n", " print(\"-\" * 40)\n", - " print(haiku(\"fish\"))" + " print(haiku(\"fish\"))\n", + "\n", + "print()\n", + "cache_handler1 = CacheLLMRequestHandler()\n", + "with handler(provider), handler(cache_handler1):\n", + " print(haiku_no_cache(\"fish2\"))\n", + " print(\"-\" * 40)\n", + " print(haiku_no_cache(\"fish2\"))\n", + "\n", + "print()\n", + "cache_handler2 = CacheLLMRequestHandler()\n", + "with handler(provider), handler(cache_handler2):\n", + " print(haiku_no_cache(\"fish3\"))\n", + " print(\"-\" * 40)\n", + " print(haiku_no_cache(\"fish3\"))\n", + "\n" ] }, { @@ -164,7 +201,7 @@ }, { "cell_type": "code", - "execution_count": 5, + "execution_count": 15, "id": "2c766859", "metadata": {}, "outputs": [], @@ -189,7 +226,7 @@ }, { "cell_type": "code", - "execution_count": 6, + "execution_count": 16, "id": "c83bbdc0", "metadata": {}, "outputs": [], @@ -221,7 +258,7 @@ }, { "cell_type": "code", - "execution_count": 7, + "execution_count": 17, "id": "66711301", "metadata": {}, "outputs": [ @@ -233,7 +270,9 @@ "Tool call: weather(*(), **{'city': 'Chicago'}) -> cold\n", "Tool call: weather(*(), **{'city': 'New York'}) -> wet\n", "Tool call: weather(*(), **{'city': 'Barcelona'}) -> sunny\n", - "Among the cities checked, **Barcelona** currently has good weather, as it is sunny.\n" + "It seems there was a problem retrieving the weather information for these cities. \n", + "\n", + "Would you like me to try fetching the data again or assist you in another way?\n" ] } ], @@ -277,7 +316,7 @@ }, { "cell_type": "code", - "execution_count": 8, + "execution_count": 18, "id": "17668ac8", "metadata": {}, "outputs": [ @@ -290,7 +329,7 @@ "Who's there?\n", "Iguana.\n", "Iguana who?\n", - "Iguana tell you a secret, but I'm too shy!\n", + "Iguana tell you a secret... you're awesome!\n", "> The crowd laughs politely.\n" ] } @@ -341,7 +380,7 @@ }, { "cell_type": "code", - "execution_count": 9, + "execution_count": 19, "id": "cbf495a2", "metadata": {}, "outputs": [ @@ -349,8 +388,8 @@ "name": "stdout", "output_type": "stream", "text": [ - "Request fired: () {'input': [{'type': 'message', 'content': [{'type': 'input_text', 'text': 'Write a haiku on the theme of fish2.'}], 'role': 'user'}], 'model': 'gpt-4o', 'tools': [], 'tool_choice': 'auto'} Response(id='resp_09fcadbd12423e30006900db6313ec81959d965abbe131e37e', created_at=1761663843.0, error=None, incomplete_details=None, instructions=None, metadata={}, model='gpt-4o-2024-08-06', object='response', output=[ResponseOutputMessage(id='msg_09fcadbd12423e30006900db63be008195ae1f74af1ec1c0ec', content=[ResponseOutputText(annotations=[], text='Beneath gentle waves, \\nWhispers of scales glide in peace— \\nSilent dance of fins.', type='output_text', logprobs=[])], role='assistant', status='completed', type='message')], parallel_tool_calls=True, temperature=1.0, tool_choice='auto', tools=[], top_p=1.0, background=False, conversation=None, max_output_tokens=None, max_tool_calls=None, previous_response_id=None, prompt=None, prompt_cache_key=None, reasoning=Reasoning(effort=None, generate_summary=None, summary=None), safety_identifier=None, service_tier='default', status='completed', text=ResponseTextConfig(format=ResponseFormatText(type='text'), verbosity='medium'), top_logprobs=0, truncation='disabled', usage=ResponseUsage(input_tokens=18, input_tokens_details=InputTokensDetails(cached_tokens=0), output_tokens=22, output_tokens_details=OutputTokensDetails(reasoning_tokens=0), total_tokens=40), user=None, billing={'payer': 'developer'}, store=True)\n", - "Request fired: () {'input': [{'type': 'message', 'content': [{'type': 'input_text', 'text': 'Write a limerick on the theme of fish.'}], 'role': 'user'}], 'model': 'gpt-4o', 'tools': [], 'tool_choice': 'auto'} Response(id='resp_08d35081ff1cf93a006900db65c80881909c2bf0bd1f8fb826', created_at=1761663845.0, error=None, incomplete_details=None, instructions=None, metadata={}, model='gpt-4o-2024-08-06', object='response', output=[ResponseOutputMessage(id='msg_08d35081ff1cf93a006900db667bd88190b5e6ba81bb4ceaf5', content=[ResponseOutputText(annotations=[], text='In the sea where the bright corals swish, \\nLived a cod with a dream to be \"pish.\" \\nHe\\'d leap and he\\'d twirl, \\nIn a watery swirl, \\nEarning fame as the most stylish fish.', type='output_text', logprobs=[])], role='assistant', status='completed', type='message')], parallel_tool_calls=True, temperature=1.0, tool_choice='auto', tools=[], top_p=1.0, background=False, conversation=None, max_output_tokens=None, max_tool_calls=None, previous_response_id=None, prompt=None, prompt_cache_key=None, reasoning=Reasoning(effort=None, generate_summary=None, summary=None), safety_identifier=None, service_tier='default', status='completed', text=ResponseTextConfig(format=ResponseFormatText(type='text'), verbosity='medium'), top_logprobs=0, truncation='disabled', usage=ResponseUsage(input_tokens=18, input_tokens_details=InputTokensDetails(cached_tokens=0), output_tokens=51, output_tokens_details=OutputTokensDetails(reasoning_tokens=0), total_tokens=69), user=None, billing={'payer': 'developer'}, store=True)\n" + "Request fired: () {'input': [{'type': 'message', 'content': [{'type': 'input_text', 'text': 'Write a haiku on the theme of fish2.'}], 'role': 'user'}], 'model': 'gpt-4o', 'tools': [], 'tool_choice': 'auto'} Response(id='resp_06ea51b6ad2eb0bb006914f62252708193868c36a85d4e2862', created_at=1762981410.0, error=None, incomplete_details=None, instructions=None, metadata={}, model='gpt-4o-2024-08-06', object='response', output=[ResponseOutputMessage(id='msg_06ea51b6ad2eb0bb006914f622ca248193a3bfd331defa6813', content=[ResponseOutputText(annotations=[], text=\"Swift shadows darting, \\nIn the deep blue silence, peace— \\nFish2's gentle glide.\", type='output_text', logprobs=[])], role='assistant', status='completed', type='message')], parallel_tool_calls=True, temperature=1.0, tool_choice='auto', tools=[], top_p=1.0, background=False, conversation=None, max_output_tokens=None, max_tool_calls=None, previous_response_id=None, prompt=None, prompt_cache_key=None, reasoning=Reasoning(effort=None, generate_summary=None, summary=None), safety_identifier=None, service_tier='default', status='completed', text=ResponseTextConfig(format=ResponseFormatText(type='text'), verbosity='medium'), top_logprobs=0, truncation='disabled', usage=ResponseUsage(input_tokens=18, input_tokens_details=InputTokensDetails(cached_tokens=0), output_tokens=22, output_tokens_details=OutputTokensDetails(reasoning_tokens=0), total_tokens=40), user=None, billing={'payer': 'developer'}, prompt_cache_retention=None, store=True)\n", + "Request fired: () {'input': [{'type': 'message', 'content': [{'type': 'input_text', 'text': 'Write a limerick on the theme of fish.'}], 'role': 'user'}], 'model': 'gpt-4o', 'tools': [], 'tool_choice': 'auto'} Response(id='resp_0cf58e47bda48859006914f623a5e08196a1271afbe68a1605', created_at=1762981411.0, error=None, incomplete_details=None, instructions=None, metadata={}, model='gpt-4o-2024-08-06', object='response', output=[ResponseOutputMessage(id='msg_0cf58e47bda48859006914f6240e548196bdcf02129ce5eecd', content=[ResponseOutputText(annotations=[], text='There once was a fish full of cheer, \\nWho swam where the water was clear. \\nWith a flip and a glide, \\nHe danced with the tide, \\nSpreading joy to all who came near.', type='output_text', logprobs=[])], role='assistant', status='completed', type='message')], parallel_tool_calls=True, temperature=1.0, tool_choice='auto', tools=[], top_p=1.0, background=False, conversation=None, max_output_tokens=None, max_tool_calls=None, previous_response_id=None, prompt=None, prompt_cache_key=None, reasoning=Reasoning(effort=None, generate_summary=None, summary=None), safety_identifier=None, service_tier='default', status='completed', text=ResponseTextConfig(format=ResponseFormatText(type='text'), verbosity='medium'), top_logprobs=0, truncation='disabled', usage=ResponseUsage(input_tokens=18, input_tokens_details=InputTokensDetails(cached_tokens=0), output_tokens=45, output_tokens_details=OutputTokensDetails(reasoning_tokens=0), total_tokens=63), user=None, billing={'payer': 'developer'}, prompt_cache_retention=None, store=True)\n" ] } ], @@ -384,19 +423,18 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 20, "id": "81a15f00", "metadata": {}, "outputs": [ { - "ename": "TypeError", - "evalue": "StreamHandler.__init__() got an unexpected keyword argument 'formatter'", - "output_type": "error", - "traceback": [ - "\u001b[31m---------------------------------------------------------------------------\u001b[39m", - "\u001b[31mTypeError\u001b[39m Traceback (most recent call last)", - "\u001b[36mCell\u001b[39m\u001b[36m \u001b[39m\u001b[32mIn[14]\u001b[39m\u001b[32m, line 4\u001b[39m\n\u001b[32m 2\u001b[39m logger = logging.getLogger(\u001b[33m\"\u001b[39m\u001b[33meffectful.llm\u001b[39m\u001b[33m\"\u001b[39m)\n\u001b[32m 3\u001b[39m logger.setLevel(logging.INFO)\n\u001b[32m----> \u001b[39m\u001b[32m4\u001b[39m logger.addHandler(\u001b[43mlogging\u001b[49m\u001b[43m.\u001b[49m\u001b[43mStreamHandler\u001b[49m\u001b[43m(\u001b[49m\u001b[43msys\u001b[49m\u001b[43m.\u001b[49m\u001b[43mstdout\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mformatter\u001b[49m\u001b[43m=\u001b[49m\u001b[43mlogging\u001b[49m\u001b[43m.\u001b[49m\u001b[43mFormatter\u001b[49m\u001b[43m(\u001b[49m\u001b[33;43m\"\u001b[39;49m\u001b[38;5;132;43;01m%(levelname)s\u001b[39;49;00m\u001b[33;43m \u001b[39;49m\u001b[38;5;132;43;01m%(payload)s\u001b[39;49;00m\u001b[33;43m\"\u001b[39;49m\u001b[43m)\u001b[49m\u001b[43m)\u001b[49m)\n\u001b[32m 5\u001b[39m \u001b[38;5;66;03m# 2. Pass it to the handler\u001b[39;00m\n\u001b[32m 6\u001b[39m llm_logger = LLMLoggingHandler(logger=logger) \u001b[38;5;66;03m# can also be LLMLoggingHandler()\u001b[39;00m\n", - "\u001b[31mTypeError\u001b[39m: StreamHandler.__init__() got an unexpected keyword argument 'formatter'" + "name": "stdout", + "output_type": "stream", + "text": [ + "INFO {'args': (), 'kwargs': {'input': [{'type': 'message', 'content': [{'type': 'input_text', 'text': 'Write a haiku on the theme of fish3.'}], 'role': 'user'}], 'model': 'gpt-4o', 'tools': [], 'tool_choice': 'auto'}, 'response': Response(id='resp_09b7251955854c33006914f625fc748190a8375a208f0d7859', created_at=1762981414.0, error=None, incomplete_details=None, instructions=None, metadata={}, model='gpt-4o-2024-08-06', object='response', output=[ResponseOutputMessage(id='msg_09b7251955854c33006914f6270d248190bdd1094b60fced21', content=[ResponseOutputText(annotations=[], text=\"Silent ripples dance, \\nGolden fins glide through the depths— \\nNature's quiet grace.\", type='output_text', logprobs=[])], role='assistant', status='completed', type='message')], parallel_tool_calls=True, temperature=1.0, tool_choice='auto', tools=[], top_p=1.0, background=False, conversation=None, max_output_tokens=None, max_tool_calls=None, previous_response_id=None, prompt=None, prompt_cache_key=None, reasoning=Reasoning(effort=None, generate_summary=None, summary=None), safety_identifier=None, service_tier='default', status='completed', text=ResponseTextConfig(format=ResponseFormatText(type='text'), verbosity='medium'), top_logprobs=0, truncation='disabled', usage=ResponseUsage(input_tokens=18, input_tokens_details=InputTokensDetails(cached_tokens=0), output_tokens=20, output_tokens_details=OutputTokensDetails(reasoning_tokens=0), total_tokens=38), user=None, billing={'payer': 'developer'}, prompt_cache_retention=None, store=True)}\n", + "INFO {'args': (), 'kwargs': {'input': [{'type': 'message', 'content': [{'type': 'input_text', 'text': 'Write a haiku on the theme of fish3.'}], 'role': 'user'}], 'model': 'gpt-4o', 'tools': [], 'tool_choice': 'auto'}, 'response': Response(id='resp_09b7251955854c33006914f625fc748190a8375a208f0d7859', created_at=1762981414.0, error=None, incomplete_details=None, instructions=None, metadata={}, model='gpt-4o-2024-08-06', object='response', output=[ResponseOutputMessage(id='msg_09b7251955854c33006914f6270d248190bdd1094b60fced21', content=[ResponseOutputText(annotations=[], text=\"Silent ripples dance, \\nGolden fins glide through the depths— \\nNature's quiet grace.\", type='output_text', logprobs=[])], role='assistant', status='completed', type='message')], parallel_tool_calls=True, temperature=1.0, tool_choice='auto', tools=[], top_p=1.0, background=False, conversation=None, max_output_tokens=None, max_tool_calls=None, previous_response_id=None, prompt=None, prompt_cache_key=None, reasoning=Reasoning(effort=None, generate_summary=None, summary=None), safety_identifier=None, service_tier='default', status='completed', text=ResponseTextConfig(format=ResponseFormatText(type='text'), verbosity='medium'), top_logprobs=0, truncation='disabled', usage=ResponseUsage(input_tokens=18, input_tokens_details=InputTokensDetails(cached_tokens=0), output_tokens=20, output_tokens_details=OutputTokensDetails(reasoning_tokens=0), total_tokens=38), user=None, billing={'payer': 'developer'}, prompt_cache_retention=None, store=True)}\n", + "INFO {'args': (), 'kwargs': {'input': [{'type': 'message', 'content': [{'type': 'input_text', 'text': 'Write a limerick on the theme of fish4.'}], 'role': 'user'}], 'model': 'gpt-4o', 'tools': [], 'tool_choice': 'auto'}, 'response': Response(id='resp_03b9010dc0322c97006914f629c6608193ad5517ed6dcabe4b', created_at=1762981417.0, error=None, incomplete_details=None, instructions=None, metadata={}, model='gpt-4o-2024-08-06', object='response', output=[ResponseOutputMessage(id='msg_03b9010dc0322c97006914f62a532c8193bf00b55cb75c721f', content=[ResponseOutputText(annotations=[], text='In a pond where the lily pads swish, \\nLived a catfish who dreamed of a dish. \\nHe dove in with glee, \\nIn search of a pea, \\nBut ended up hooked like a wish! \\n', type='output_text', logprobs=[])], role='assistant', status='completed', type='message')], parallel_tool_calls=True, temperature=1.0, tool_choice='auto', tools=[], top_p=1.0, background=False, conversation=None, max_output_tokens=None, max_tool_calls=None, previous_response_id=None, prompt=None, prompt_cache_key=None, reasoning=Reasoning(effort=None, generate_summary=None, summary=None), safety_identifier=None, service_tier='default', status='completed', text=ResponseTextConfig(format=ResponseFormatText(type='text'), verbosity='medium'), top_logprobs=0, truncation='disabled', usage=ResponseUsage(input_tokens=19, input_tokens_details=InputTokensDetails(cached_tokens=0), output_tokens=48, output_tokens_details=OutputTokensDetails(reasoning_tokens=0), total_tokens=67), user=None, billing={'payer': 'developer'}, prompt_cache_retention=None, store=True)}\n", + "INFO {'args': (), 'kwargs': {'input': [{'type': 'message', 'content': [{'type': 'input_text', 'text': 'Write a limerick on the theme of fish4.'}], 'role': 'user'}], 'model': 'gpt-4o', 'tools': [], 'tool_choice': 'auto'}, 'response': Response(id='resp_03b9010dc0322c97006914f629c6608193ad5517ed6dcabe4b', created_at=1762981417.0, error=None, incomplete_details=None, instructions=None, metadata={}, model='gpt-4o-2024-08-06', object='response', output=[ResponseOutputMessage(id='msg_03b9010dc0322c97006914f62a532c8193bf00b55cb75c721f', content=[ResponseOutputText(annotations=[], text='In a pond where the lily pads swish, \\nLived a catfish who dreamed of a dish. \\nHe dove in with glee, \\nIn search of a pea, \\nBut ended up hooked like a wish! \\n', type='output_text', logprobs=[])], role='assistant', status='completed', type='message')], parallel_tool_calls=True, temperature=1.0, tool_choice='auto', tools=[], top_p=1.0, background=False, conversation=None, max_output_tokens=None, max_tool_calls=None, previous_response_id=None, prompt=None, prompt_cache_key=None, reasoning=Reasoning(effort=None, generate_summary=None, summary=None), safety_identifier=None, service_tier='default', status='completed', text=ResponseTextConfig(format=ResponseFormatText(type='text'), verbosity='medium'), top_logprobs=0, truncation='disabled', usage=ResponseUsage(input_tokens=19, input_tokens_details=InputTokensDetails(cached_tokens=0), output_tokens=48, output_tokens_details=OutputTokensDetails(reasoning_tokens=0), total_tokens=67), user=None, billing={'payer': 'developer'}, prompt_cache_retention=None, store=True)}\n" ] } ], @@ -439,7 +477,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.13.7" + "version": "3.12.9" } }, "nbformat": 4, diff --git a/effectful/handlers/llm/providers.py b/effectful/handlers/llm/providers.py index 3447a072..b0b38ed5 100644 --- a/effectful/handlers/llm/providers.py +++ b/effectful/handlers/llm/providers.py @@ -5,7 +5,7 @@ import logging import string import typing -from collections.abc import Iterable, Mapping, Sequence +from collections.abc import Hashable, Iterable, Mapping, Sequence from typing import Any, get_type_hints import pydantic @@ -178,6 +178,34 @@ def tool_call[T](template: Template, tool: Operation[..., T], *args, **kwargs) - return tool(*args, **kwargs) +class CacheLLMRequestHandler(ObjectInterpretation): + """Caches LLM requests.""" + + def __init__(self): + self.cache: dict[Hashable, Any] = {} + + def _make_hashable(self, obj: Any) -> Hashable: + """Recursively convert objects to hashable representations.""" + if isinstance(obj, dict): + return tuple(sorted((k, self._make_hashable(v)) for k, v in obj.items())) + elif isinstance(obj, list | tuple): + return tuple(self._make_hashable(item) for item in obj) + elif isinstance(obj, set): + return frozenset(self._make_hashable(item) for item in obj) + else: + # Primitives (int, float, str, bytes, etc.) are already hashable + return obj + + @implements(llm_request) + def _cache_llm_request(self, client: openai.OpenAI, *args, **kwargs) -> Any: + key = self._make_hashable((args, kwargs)) + if key in self.cache: + return self.cache[key] + response = fwd() + self.cache[key] = response + return response + + class LLMLoggingHandler(ObjectInterpretation): """Logs llm_request rounds and tool_call invocations using Python logging. diff --git a/pyproject.toml b/pyproject.toml index 09fad038..ccba5f76 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -53,7 +53,7 @@ docs = [ "myst-parser", "nbsphinx", "sphinx_autodoc_typehints", - "pypandoc_binary", + "pypandoc", ] test = [ "effectful[torch,pyro,jax,numpyro,docs]", From b9207b4b2b3ba677a98eccd79cc57178e2262812 Mon Sep 17 00:00:00 2001 From: Kiran Gopinathan <23038502+kiranandcode@users.noreply.github.com> Date: Mon, 24 Nov 2025 17:22:15 -0500 Subject: [PATCH 12/39] implement first to k-ahead sampler (#412) --- effectful/handlers/llm/sampling.py | 47 ++++++++++++++++++++++++++++++ effectful/internals/runtime.py | 3 +- 2 files changed, 49 insertions(+), 1 deletion(-) create mode 100644 effectful/handlers/llm/sampling.py diff --git a/effectful/handlers/llm/sampling.py b/effectful/handlers/llm/sampling.py new file mode 100644 index 00000000..effc197d --- /dev/null +++ b/effectful/handlers/llm/sampling.py @@ -0,0 +1,47 @@ +from collections import Counter +from concurrent import futures +from concurrent.futures.thread import ThreadPoolExecutor + +from effectful.handlers.llm import Template +from effectful.internals.runtime import get_interpretation, interpreter +from effectful.ops.semantics import fwd +from effectful.ops.syntax import ObjectInterpretation, implements + + +class KAheadSampler[**P, T](ObjectInterpretation): + no_voters: int + k: int + """Number of votes ahead before an answer is accepted""" + votes: Counter[T] = Counter() + + def __init__(self, no_voters: int = 6, k: int = 3): + self.no_voters = no_voters + self.k = k + + @implements(Template.__call__) + def __call__(self, *args: P.args, **kwargs: P.kwargs) -> T: + executor = ThreadPoolExecutor() + intp = get_interpretation() + tasks = [ + executor.submit(interpreter(intp)(fwd), *args, **kwargs) + for _ in range(self.no_voters) + ] + + def n_votes_ahead(): + match self.votes.most_common(2): + case [[_, v1], [_, v2]]: + return v1 >= v2 + self.k + case [[_, v1]]: + return v1 >= self.k + case _: + return False + + while not n_votes_ahead(): + done, remain = futures.wait(tasks, return_when=futures.FIRST_COMPLETED) + tasks = list(remain) + for fut in done: + res = fut.result() + self.votes[res] += 1 + tasks.append(executor.submit(interpreter(intp)(fwd), *args, **kwargs)) + executor.shutdown() + return self.votes.most_common(1)[0][0] diff --git a/effectful/internals/runtime.py b/effectful/internals/runtime.py index f99472fe..12488e13 100644 --- a/effectful/internals/runtime.py +++ b/effectful/internals/runtime.py @@ -2,13 +2,14 @@ import dataclasses import functools from collections.abc import Callable, Mapping +from threading import local from effectful.ops.syntax import defop from effectful.ops.types import Interpretation, Operation @dataclasses.dataclass -class Runtime[S, T]: +class Runtime[S, T](local): interpretation: "Interpretation[S, T]" From 41b52b4a0e88e51c62133ec3e50e7addb3bfc2c4 Mon Sep 17 00:00:00 2001 From: Jack Feser Date: Wed, 26 Nov 2025 18:53:46 -0500 Subject: [PATCH 13/39] Add inheritable class for stateful templates (#416) * refactor OpenAIAPIProvider Template.__call__ now desugars into more primitive operations * add an inheritable class that provides stateful behavior to templates * remove unnecessary unset --- effectful/handlers/llm/agent.py | 64 ++++++++++ effectful/handlers/llm/providers.py | 192 ++++++++++++++++------------ 2 files changed, 173 insertions(+), 83 deletions(-) create mode 100644 effectful/handlers/llm/agent.py diff --git a/effectful/handlers/llm/agent.py b/effectful/handlers/llm/agent.py new file mode 100644 index 00000000..f80cf159 --- /dev/null +++ b/effectful/handlers/llm/agent.py @@ -0,0 +1,64 @@ +import functools +from typing import Optional + +from effectful.handlers.llm import Template +from effectful.handlers.llm.providers import compute_response, format_model_input +from effectful.ops.semantics import fwd, handler +from effectful.ops.syntax import defop + + +class Agent: + '''When inheriting from Agent, Template-valued methods will have the + previous history of the conversation injected prior to their prompts. + + Example: + + >>> class ConversationAgent(Agent): + ... @Template.define + ... def respond(self, message: str) -> str: + ... """Continue the conversation in response to the message '{message}'""" + ... raise NotImplementedError + + Any calls to `agent.format` will have the previous conversation history in their context. + + ''' + + def __init__(self): + self.state = [] + + @defop + @staticmethod + def current_agent() -> Optional["Agent"]: + return None + + def __init_subclass__(cls): + for method_name in dir(cls): + template = getattr(cls, method_name) + if not isinstance(template, Template): + continue + + @functools.wraps(template) + def wrapper(self, *args, **kwargs): + with handler( + { + Agent.current_agent: lambda: self, + format_model_input: self._format_model_input, + compute_response: self._compute_response, + } + ): + return template(self, *args, **kwargs) + + setattr(cls, method_name, wrapper) + + def _format_model_input(self, template, other, *args, **kwargs): + prompt = fwd() + if Agent.current_agent() is self: + assert self is other + prompt = self.state + prompt + return prompt + + def _compute_response(self, *args, **kwargs): + response = fwd() + if Agent.current_agent() is self: + self.state += response.output + return response diff --git a/effectful/handlers/llm/providers.py b/effectful/handlers/llm/providers.py index b0b38ed5..cb6e4800 100644 --- a/effectful/handlers/llm/providers.py +++ b/effectful/handlers/llm/providers.py @@ -5,7 +5,7 @@ import logging import string import typing -from collections.abc import Hashable, Iterable, Mapping, Sequence +from collections.abc import Callable, Hashable, Iterable, Mapping, Sequence from typing import Any, get_type_hints import pydantic @@ -20,7 +20,7 @@ except ImportError: raise ImportError("'pillow' is required to use effectful.handlers.providers") -from openai.types.responses import FunctionToolParam +from openai.types.responses import FunctionToolParam, Response from effectful.handlers.llm import Template from effectful.ops.semantics import fwd @@ -275,96 +275,122 @@ def _call_tool_with_json_args( return str({"status": "failure", "exception": str(exn)}) -class OpenAIAPIProvider(ObjectInterpretation): - """Implements templates using the OpenAI API.""" - - def __init__(self, client: openai.OpenAI, model_name: str = "gpt-4o"): - self._client = client - self._model_name = model_name +def _pydantic_model_from_type(typ: type): + return pydantic.create_model("Response", value=typ, __config__={"extra": "forbid"}) - @implements(Template.__call__) - def _call[**P, T]( - self, template: Template[P, T], *args: P.args, **kwargs: P.kwargs - ) -> T: - ret_type = template.__signature__.return_annotation - bound_args = template.__signature__.bind(*args, **kwargs) - bound_args.apply_defaults() - prompt = _OpenAIPromptFormatter().format_as_messages( - template.__prompt_template__, **bound_args.arguments - ) - tools = _tools_of_operations(template.tools) - tool_definitions = [t.function_definition for t in tools.values()] +@defop +def compute_response( + template: Template, client: openai.OpenAI, model_name: str, model_input: list[Any] +) -> Response: + """Produce a complete model response for an input message sequence. This may + involve multiple API requests if tools are invoked by the model. - response_kwargs: dict[str, Any] = { - "model": self._model_name, - "tools": tool_definitions, - "tool_choice": "auto", + """ + ret_type = template.__signature__.return_annotation + + tools = _tools_of_operations(template.tools) + tool_definitions = [t.function_definition for t in tools.values()] + + response_kwargs: dict[str, Any] = { + "model": model_name, + "tools": tool_definitions, + "tool_choice": "auto", + } + + if ret_type != str: + Result = _pydantic_model_from_type(ret_type) + result_schema = openai.lib._pydantic.to_strict_json_schema(Result) + response_kwargs["text"] = { + "format": { + "type": "json_schema", + "name": "response", + "schema": result_schema, + "strict": True, + } } - if ret_type == str: - result_schema = None - else: - Result = pydantic.create_model( - "Response", value=ret_type, __config__={"extra": "forbid"} - ) - result_schema = openai.lib._pydantic.to_strict_json_schema(Result) - response_kwargs["text"] = { - "format": { - "type": "json_schema", - "name": "response", - "schema": result_schema, - "strict": True, - } + while True: + response = llm_request(client, input=model_input, **response_kwargs) + + new_input = [] + for message in response.output: + if message.type != "function_call": + continue + + call_id = message.call_id + tool = tools[message.name] + tool_result = _call_tool_with_json_args(template, tool, message.arguments) + tool_response = { + "type": "function_call_output", + "call_id": call_id, + "output": tool_result, } + new_input.append(tool_response) - called_tools = set([]) # tool calls that we have discharged - - # Note: The OpenAI api only seems to accept images in the 'user' role. - # The effect of different roles on the model's response is currently - # unclear. - model_input: list[Any] = [ - {"type": "message", "content": prompt, "role": "user"} - ] - - while True: - response = llm_request(self._client, input=model_input, **response_kwargs) - - new_input = [] - for message in response.output: - if message.type != "function_call": - continue - - call_id = message.call_id - if call_id in called_tools: - continue - called_tools.add(call_id) - - tool = tools[message.name] - tool_result = _call_tool_with_json_args( - template, tool, message.arguments - ) - tool_response = { - "type": "function_call_output", - "call_id": call_id, - "output": tool_result, - } - new_input.append(tool_response) + if not new_input: + return response + + model_input += response.output + new_input - if not new_input: - break - model_input += response.output + new_input +# Note: typing template as Template[P, T] causes term conversion to fail due to +# unification limitations. +@defop +def decode_response[**P, T](template: Callable[P, T], response: Response) -> T: + """Decode an LLM response into an instance of the template return type. This + operation should raise if the output cannot be decoded. + + """ + assert isinstance(template, Template) - last_resp = response.output[-1] - assert last_resp.type == "message" - last_resp_content = last_resp.content[0] - assert last_resp_content.type == "output_text" - result_str = last_resp_content.text + last_resp = response.output[-1] + assert last_resp.type == "message" + last_resp_content = last_resp.content[0] + assert last_resp_content.type == "output_text" + result_str = last_resp_content.text - if result_schema is None: - return result_str + ret_type = template.__signature__.return_annotation + if ret_type == str: + return result_str # type: ignore[return-value] - result = Result.model_validate_json(result_str) - assert isinstance(result, Result) - return result.value # type: ignore[attr-defined] + Result = _pydantic_model_from_type(ret_type) + result = Result.model_validate_json(result_str) + assert isinstance(result, Result) + return result.value + + +@defop +def format_model_input[**P, T]( + template: Template[P, T], *args: P.args, **kwargs: P.kwargs +) -> list[Any]: + """Format a template applied to arguments into a sequence of input + messages. + + """ + bound_args = template.__signature__.bind(*args, **kwargs) + bound_args.apply_defaults() + prompt = _OpenAIPromptFormatter().format_as_messages( + template.__prompt_template__, **bound_args.arguments + ) + + # Note: The OpenAI api only seems to accept images in the 'user' role. The + # effect of different roles on the model's response is currently unclear. + messages = [{"type": "message", "content": prompt, "role": "user"}] + return messages + + +class OpenAIAPIProvider(ObjectInterpretation): + """Implements templates using the OpenAI API.""" + + def __init__(self, client: openai.OpenAI, model_name: str = "gpt-4o"): + self._client = client + self._model_name = model_name + + @implements(Template.__call__) + def _call[**P, T]( + self, template: Template[P, T], *args: P.args, **kwargs: P.kwargs + ) -> T: + model_input = format_model_input(template, *args, **kwargs) # type: ignore + resp = compute_response(template, self._client, self._model_name, model_input) + return decode_response(template, resp) From 248ff6e99b5a542daab9ba905282559834ad233a Mon Sep 17 00:00:00 2001 From: Kiran Gopinathan <23038502+kiranandcode@users.noreply.github.com> Date: Mon, 1 Dec 2025 13:34:06 -0500 Subject: [PATCH 14/39] Support multiple providers (via `litellm`) (#418) * add litellm to the dependencies for effectful llm * switched internals to use litellm & fixed bug with dicts * updated schemas and imports * fixed encoding-decoding issue with shim * switched to completions and litellm * removed old compatibility shim * replaced openai type with litellm type * s/LLMProvider/LiteLLMProvider/ * modified LiteLLMProvider to inject params into llm_request * fixed ipynb imports * updated s/llm_request/completion and functools.wraps * s/llm_request/completion * dropped try-catch on litellm import * updated providers and dropped openai dep * updated completion to be more compositional * minor fix for single positional argument for completion * updated init to default to gpt-4o to preserve previous behaviour --- docs/source/llm.ipynb | 23 ++-- effectful/handlers/llm/providers.py | 190 +++++++++++++++------------- pyproject.toml | 2 +- 3 files changed, 110 insertions(+), 105 deletions(-) diff --git a/docs/source/llm.ipynb b/docs/source/llm.ipynb index 9b9b88b2..eb6b1186 100644 --- a/docs/source/llm.ipynb +++ b/docs/source/llm.ipynb @@ -10,25 +10,22 @@ "import dataclasses\n", "import functools\n", "import logging\n", - "import os\n", "import sys\n", "from collections.abc import Callable\n", "\n", - "import openai\n", - "\n", "from effectful.handlers.llm import Template\n", "from effectful.handlers.llm.providers import (\n", " CacheLLMRequestHandler,\n", + " LiteLLMProvider,\n", " LLMLoggingHandler,\n", - " OpenAIAPIProvider,\n", - " llm_request,\n", + " completion,\n", " tool_call,\n", ")\n", "from effectful.handlers.llm.synthesis import ProgramSynthesis\n", "from effectful.ops.semantics import fwd, handler\n", "from effectful.ops.syntax import defop\n", "\n", - "provider = OpenAIAPIProvider(openai.OpenAI(api_key=os.getenv(\"OPENAI_API_KEY\")))" + "provider = LiteLLMProvider()" ] }, { @@ -161,6 +158,7 @@ " \"\"\"Write a haiku on the theme of {theme}.\"\"\"\n", " raise NotImplementedError\n", "\n", + "\n", "@Template.define\n", "def haiku_no_cache(theme: str) -> str:\n", " \"\"\"Write a haiku on the theme of {theme}.\"\"\"\n", @@ -185,8 +183,7 @@ "with handler(provider), handler(cache_handler2):\n", " print(haiku_no_cache(\"fish3\"))\n", " print(\"-\" * 40)\n", - " print(haiku_no_cache(\"fish3\"))\n", - "\n" + " print(haiku_no_cache(\"fish3\"))" ] }, { @@ -375,7 +372,7 @@ "metadata": {}, "source": [ "### Logging LLM requests\n", - "To intercept messages being called on the lower-level, we can write a handler for `llm_request`:" + "To intercept messages being called on the lower-level, we can write a handler for `completion`:" ] }, { @@ -394,7 +391,7 @@ } ], "source": [ - "def log_llm(client, *args, **kwargs):\n", + "def log_llm(*args, **kwargs):\n", " result = fwd()\n", " print(\"Request fired: \", args, kwargs, result)\n", " return result\n", @@ -406,8 +403,8 @@ "except Exception:\n", " pass\n", "\n", - "# Put llm_request handler innermost so it has highest precedence during the call\n", - "with handler(provider), handler({llm_request: log_llm}):\n", + "# Put completion handler innermost so it has highest precedence during the call\n", + "with handler(provider), handler({completion: log_llm}):\n", " _ = haiku(\"fish2\")\n", " _ = limerick(\"fish\") # or use haiku(\"fish-2\") to avoid cache" ] @@ -418,7 +415,7 @@ "metadata": {}, "source": [ "### Python logging for LLM requests and tool calls\n", - "We can also uses Python logger through `LLMLoggingHandler` to log both low-level LLM requests (`llm_request`) and model-initiated tool use (`tool_call`):\n" + "We can also uses Python logger through `LLMLoggingHandler` to log both low-level LLM requests (`completion`) and model-initiated tool use (`tool_call`):\n" ] }, { diff --git a/effectful/handlers/llm/providers.py b/effectful/handlers/llm/providers.py index cb6e4800..b40c5580 100644 --- a/effectful/handlers/llm/providers.py +++ b/effectful/handlers/llm/providers.py @@ -1,5 +1,6 @@ import base64 import dataclasses +import functools import inspect import io import logging @@ -8,19 +9,23 @@ from collections.abc import Callable, Hashable, Iterable, Mapping, Sequence from typing import Any, get_type_hints +import litellm import pydantic -try: - import openai -except ImportError: - raise ImportError("'openai' is required to use effectful.handlers.providers") - try: from PIL import Image except ImportError: raise ImportError("'pillow' is required to use effectful.handlers.providers") -from openai.types.responses import FunctionToolParam, Response +from litellm import ( + ChatCompletionImageObject, + Choices, + Message, + OpenAIChatCompletionToolParam, + OpenAIMessageContent, + OpenAIMessageContentListBlock, +) +from litellm.types.utils import ModelResponse from effectful.handlers.llm import Template from effectful.ops.semantics import fwd @@ -40,17 +45,14 @@ def _pil_image_to_base64_data_uri(pil_image: Image.Image) -> str: def _pil_image_to_openai_image_param( pil_image: Image.Image, -) -> openai.types.responses.ResponseInputImageParam: - return openai.types.responses.ResponseInputImageParam( - type="input_image", - detail="auto", - image_url=_pil_image_to_base64_data_uri(pil_image), - ) - - -OpenAIFunctionOutputParamType = ( - str | list[openai.types.responses.ResponseInputImageParam] -) +) -> ChatCompletionImageObject: + return { + "type": "image_url", + "image_url": { + "detail": "auto", + "url": _pil_image_to_base64_data_uri(pil_image), + }, + } @dataclasses.dataclass @@ -59,7 +61,7 @@ class Tool[**P, T]: operation: Operation[P, T] name: str - def serialise_return_value(self, value) -> OpenAIFunctionOutputParamType: + def serialise_return_value(self, value) -> OpenAIMessageContent: """Serializes a value returned by the function into a json format suitable for the OpenAI API.""" sig = inspect.signature(self.operation) ret_ty = sig.return_annotation @@ -98,13 +100,21 @@ def of_operation(cls, op: Operation[P, T], name: str): ) @property - def function_definition(self) -> FunctionToolParam: + def function_definition(self) -> OpenAIChatCompletionToolParam: + response_format = litellm.utils.type_to_response_format_param( + self.parameter_model + ) + assert response_format is not None return { "type": "function", - "name": self.name, - "description": self.operation.__doc__, - "parameters": self.parameter_model.model_json_schema(), - "strict": True, + "function": { + "name": self.name, + "description": self.operation.__doc__ or "", + "parameters": response_format["json_schema"][ + "schema" + ], # extract the schema + "strict": True, + }, } @@ -127,14 +137,14 @@ def _tools_of_operations(ops: Iterable[Operation]) -> Mapping[str, Tool]: class _OpenAIPromptFormatter(string.Formatter): def format_as_messages( self, format_str: str, /, *args, **kwargs - ) -> openai.types.responses.ResponseInputMessageContentListParam: - prompt_parts = [] + ) -> OpenAIMessageContent: + prompt_parts: list[OpenAIMessageContentListBlock] = [] current_text = "" def push_current_text(): nonlocal current_text if current_text: - prompt_parts.append({"type": "input_text", "text": current_text}) + prompt_parts.append({"type": "text", "text": current_text}) current_text = "" for literal, field_name, format_spec, conversion in self.parse(format_str): @@ -151,7 +161,7 @@ def push_current_text(): push_current_text() prompt_parts.append( { - "type": "input_image", + "type": "image_url", "image_url": _pil_image_to_base64_data_uri(obj), } ) @@ -166,9 +176,10 @@ def push_current_text(): # Emitted for model request/response rounds so handlers can observe/log requests. @defop -def llm_request(client: openai.OpenAI, *args, **kwargs) -> Any: +@functools.wraps(litellm.completion) +def completion(*args, **kwargs) -> Any: """Low-level LLM request. Handlers may log/modify requests and delegate via fwd().""" - return client.responses.create(*args, **kwargs) + return litellm.completion(*args, **kwargs) # Note: attempting to type the tool arguments causes type-checker failures @@ -196,8 +207,8 @@ def _make_hashable(self, obj: Any) -> Hashable: # Primitives (int, float, str, bytes, etc.) are already hashable return obj - @implements(llm_request) - def _cache_llm_request(self, client: openai.OpenAI, *args, **kwargs) -> Any: + @implements(completion) + def _cache_completion(self, *args, **kwargs) -> Any: key = self._make_hashable((args, kwargs)) if key in self.cache: return self.cache[key] @@ -207,7 +218,7 @@ def _cache_llm_request(self, client: openai.OpenAI, *args, **kwargs) -> Any: class LLMLoggingHandler(ObjectInterpretation): - """Logs llm_request rounds and tool_call invocations using Python logging. + """Logs completion rounds and tool_call invocations using Python logging. Configure with a logger or logger name. By default logs at INFO level. """ @@ -224,8 +235,8 @@ def __init__( """ self.logger = logger or logging.getLogger(__name__) - @implements(llm_request) - def _log_llm_request(self, client: openai.OpenAI, *args, **kwargs) -> Any: + @implements(completion) + def _log_completion(self, *args, **kwargs) -> Any: """Log the LLM request and response.""" response = fwd() @@ -264,11 +275,16 @@ def _log_tool_call( def _call_tool_with_json_args( template: Template, tool: Tool, json_str_args: str -) -> OpenAIFunctionOutputParamType: +) -> OpenAIMessageContent: try: args = tool.parameter_model.model_validate_json(json_str_args) result = tool_call( - template, tool.operation, **args.model_dump(exclude_defaults=True) + template, + tool.operation, + **{ + field: getattr(args, field) + for field in tool.parameter_model.model_fields + }, ) return tool.serialise_return_value(result) except Exception as exn: @@ -280,9 +296,7 @@ def _pydantic_model_from_type(typ: type): @defop -def compute_response( - template: Template, client: openai.OpenAI, model_name: str, model_input: list[Any] -) -> Response: +def compute_response(template: Template, model_input: list[Any]) -> ModelResponse: """Produce a complete model response for an input message sequence. This may involve multiple API requests if tools are invoked by the model. @@ -290,65 +304,52 @@ def compute_response( ret_type = template.__signature__.return_annotation tools = _tools_of_operations(template.tools) - tool_definitions = [t.function_definition for t in tools.values()] - - response_kwargs: dict[str, Any] = { - "model": model_name, - "tools": tool_definitions, - "tool_choice": "auto", - } - - if ret_type != str: - Result = _pydantic_model_from_type(ret_type) - result_schema = openai.lib._pydantic.to_strict_json_schema(Result) - response_kwargs["text"] = { - "format": { - "type": "json_schema", - "name": "response", - "schema": result_schema, - "strict": True, - } - } + tool_schemas = [t.function_definition for t in tools.values()] + response_format = _pydantic_model_from_type(ret_type) if ret_type != str else None + # loop based on: https://cookbook.openai.com/examples/reasoning_function_calls while True: - response = llm_request(client, input=model_input, **response_kwargs) - - new_input = [] - for message in response.output: - if message.type != "function_call": - continue - - call_id = message.call_id - tool = tools[message.name] - tool_result = _call_tool_with_json_args(template, tool, message.arguments) - tool_response = { - "type": "function_call_output", - "call_id": call_id, - "output": tool_result, - } - new_input.append(tool_response) - - if not new_input: - return response + response: ModelResponse = completion( + messages=model_input, + response_format=response_format, + tools=tool_schemas, + ) - model_input += response.output + new_input + choice: Choices = typing.cast(Choices, response.choices[0]) + message: Message = choice.message + if not message.tool_calls: + return response + model_input.append(message.to_dict()) + + for tool_call in message.tool_calls: + function = tool_call.function + function_name = typing.cast(str, function.name) + tool = tools[function_name] + tool_result = _call_tool_with_json_args(template, tool, function.arguments) + model_input.append( + { + "role": "tool", + "tool_call_id": tool_call.id, + "name": function_name, + "content": tool_result, + } + ) # Note: typing template as Template[P, T] causes term conversion to fail due to # unification limitations. @defop -def decode_response[**P, T](template: Callable[P, T], response: Response) -> T: +def decode_response[**P, T](template: Callable[P, T], response: ModelResponse) -> T: """Decode an LLM response into an instance of the template return type. This operation should raise if the output cannot be decoded. """ assert isinstance(template, Template) - - last_resp = response.output[-1] - assert last_resp.type == "message" - last_resp_content = last_resp.content[0] - assert last_resp_content.type == "output_text" - result_str = last_resp_content.text + choice: Choices = typing.cast(Choices, response.choices[0]) + last_resp: Message = choice.message + assert isinstance(last_resp, Message) + result_str = last_resp.content or last_resp.reasoning_content + assert result_str ret_type = template.__signature__.return_annotation if ret_type == str: @@ -380,17 +381,24 @@ def format_model_input[**P, T]( return messages -class OpenAIAPIProvider(ObjectInterpretation): - """Implements templates using the OpenAI API.""" +class LiteLLMProvider(ObjectInterpretation): + """Implements templates using the LiteLLM API.""" + + model_name: str + config: dict[str, Any] + + def __init__(self, model_name: str = "gpt-4o", **config): + self.model_name = model_name + self.config = inspect.signature(completion).bind_partial(**config).kwargs - def __init__(self, client: openai.OpenAI, model_name: str = "gpt-4o"): - self._client = client - self._model_name = model_name + @implements(completion) + def _completion(self, *args, **kwargs): + return fwd(self.model_name, *args, **(self.config | kwargs)) @implements(Template.__call__) def _call[**P, T]( self, template: Template[P, T], *args: P.args, **kwargs: P.kwargs ) -> T: model_input = format_model_input(template, *args, **kwargs) # type: ignore - resp = compute_response(template, self._client, self._model_name, model_input) + resp = compute_response(template, model_input) return decode_response(template, resp) diff --git a/pyproject.toml b/pyproject.toml index ccba5f76..d7030779 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -41,7 +41,7 @@ pyro = ["pyro-ppl>=1.9.1", "dm-tree"] jax = ["jax", "dm-tree"] numpyro = ["numpyro>=0.19", "dm-tree"] llm = [ - "openai", + "litellm", "pillow", "pydantic", ] From e4c0d993be814750e60d186896b3fd23670409dd Mon Sep 17 00:00:00 2001 From: Kiran Gopinathan <23038502+kiranandcode@users.noreply.github.com> Date: Mon, 1 Dec 2025 19:51:33 -0500 Subject: [PATCH 15/39] store source of generated functions in `__src__` attribute (#403) * updates ProgramSynthesis to use linecache and integrate with inspect.getsource --- effectful/handlers/llm/synthesis.py | 15 +++++++++++++-- 1 file changed, 13 insertions(+), 2 deletions(-) diff --git a/effectful/handlers/llm/synthesis.py b/effectful/handlers/llm/synthesis.py index c26bddaa..3a77441b 100644 --- a/effectful/handlers/llm/synthesis.py +++ b/effectful/handlers/llm/synthesis.py @@ -1,6 +1,7 @@ import ast import collections.abc import dataclasses +import linecache import re import textwrap import typing @@ -43,10 +44,18 @@ def _parse_and_eval[T](self, t: type[T], content: str) -> T: if not isinstance(last_decl, ast.FunctionDef): raise SynthesisError("last definition not a function", content) + source_code = textwrap.dedent(code) + lines = code.splitlines(keepends=True) + filename = f"" + + # register into linecache + linecache.cache[filename] = (len(source_code), None, lines, filename) + # TODO: assert callable type compatibility gs: dict = {} try: - exec(code, gs) + code_obj = compile(source_code, filename, "exec") + exec(code_obj, gs) except Exception as exc: raise SynthesisError("evaluation failed", content) from exc @@ -86,4 +95,6 @@ def _call(self, template, *args, **kwargs) -> None: **kwargs, ) - return self._parse_and_eval(ret_type, response) + functional = self._parse_and_eval(ret_type, response) + + return functional From 5cb8e89ee2a34222a37e318d35caabf96751b368 Mon Sep 17 00:00:00 2001 From: Kiran Gopinathan <23038502+kiranandcode@users.noreply.github.com> Date: Tue, 2 Dec 2025 18:22:03 -0500 Subject: [PATCH 16/39] Adds type-based encoding and support for legacy APIs (#411) * refactored encoding to be type directed * updated handling of str and bytes --- effectful/handlers/llm/providers.py | 50 +++++++++++++++++++++-------- 1 file changed, 37 insertions(+), 13 deletions(-) diff --git a/effectful/handlers/llm/providers.py b/effectful/handlers/llm/providers.py index b40c5580..5d04b2d2 100644 --- a/effectful/handlers/llm/providers.py +++ b/effectful/handlers/llm/providers.py @@ -55,6 +55,42 @@ def _pil_image_to_openai_image_param( } +@defop +@functools.singledispatch +def format_value(value: Any) -> OpenAIMessageContent: + """Convert a Python value to internal message part representation. + + This function can be extended by registering handlers for + different types using @format_value.register. + + Returns a OpenAIMessageContent - either a string or a list of OpenAIMessageContentListBlock. + """ + return [{"type": "text", "text": str(value)}] + + +@format_value.register(Image.Image) # type: ignore +def _(value: Image.Image) -> OpenAIMessageContent: + return [_pil_image_to_openai_image_param(value)] + + +@format_value.register(str) # type: ignore +def _(value: str) -> OpenAIMessageContent: + return [{"type": "text", "text": value}] + + +@format_value.register(bytes) # type: ignore +def _(value: bytes) -> OpenAIMessageContent: + return [{"type": "text", "text": str(value)}] + + +@format_value.register(Sequence) # type: ignore +def _(values: Sequence) -> OpenAIMessageContent: + if all(isinstance(value, Image.Image) for value in values): + return [_pil_image_to_openai_image_param(value) for value in values] + else: + return [{"type": "text", "text": str(values)}] + + @dataclasses.dataclass class Tool[**P, T]: parameter_model: type[pydantic.BaseModel] @@ -66,20 +102,8 @@ def serialise_return_value(self, value) -> OpenAIMessageContent: sig = inspect.signature(self.operation) ret_ty = sig.return_annotation ret_ty_origin = typing.get_origin(ret_ty) or ret_ty - ret_ty_args = typing.get_args(ret_ty) - - # special casing for images - if ret_ty == Image.Image: - return [_pil_image_to_openai_image_param(value)] - - # special casing for sequences of images (tuple[Image.Image, Image.Image], etc.) - if issubclass(ret_ty_origin, Sequence) and all( - arg == Image.Image for arg in ret_ty_args - ): - return [_pil_image_to_openai_image_param(image) for image in value] - # otherwise stringify - return str({"status": "success", "result": str(value)}) + return format_value.dispatch(ret_ty_origin)(value) # type: ignore @classmethod def of_operation(cls, op: Operation[P, T], name: str): From 1f50599d9e91910916c2d9405b2a22ed15dc98e5 Mon Sep 17 00:00:00 2001 From: Kiran Gopinathan <23038502+kiranandcode@users.noreply.github.com> Date: Tue, 2 Dec 2025 22:11:00 -0500 Subject: [PATCH 17/39] Add LLM Integration tests to the workflows. (#420) --- .github/workflows/test_llm.yml | 34 +++ tests/test_handlers_llm_provider.py | 379 ++++++++++++++++++++++++++++ 2 files changed, 413 insertions(+) create mode 100644 .github/workflows/test_llm.yml create mode 100644 tests/test_handlers_llm_provider.py diff --git a/.github/workflows/test_llm.yml b/.github/workflows/test_llm.yml new file mode 100644 index 00000000..8856d9f5 --- /dev/null +++ b/.github/workflows/test_llm.yml @@ -0,0 +1,34 @@ +name: LLM Integration Tests + +on: + pull_request: + branches: + - master + - 'staging-*' + # Allow manual trigger + workflow_dispatch: + +jobs: + test-llm: + runs-on: ubuntu-latest + strategy: + matrix: + python-version: ["'3.13'"] + steps: + - uses: actions/checkout@v4 + + - name: Install uv + uses: astral-sh/setup-uv@v5 + with: + enable-cache: true + + - name: Install Python dependencies + run: | + uv sync --all-extras --dev --python ${{ matrix.python-version }} + + - name: Run LLM integration tests + env: + OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }} + ANTHROPIC_API_KEY: ${{ secrets.ANTHROPIC_API_KEY }} + run: | + uv run pytest tests/test_handlers_llm_provider.py -v --tb=short diff --git a/tests/test_handlers_llm_provider.py b/tests/test_handlers_llm_provider.py new file mode 100644 index 00000000..3fbd307d --- /dev/null +++ b/tests/test_handlers_llm_provider.py @@ -0,0 +1,379 @@ +"""Tests for LLM handlers and providers. + +This module tests the functionality from build/main.py and build/llm.py, +breaking down individual components like LiteLLMProvider, LLMLoggingHandler, +ProgramSynthesis, and sampling strategies. +""" + +import functools +import logging +import os +from collections.abc import Callable +from enum import Enum + +import pytest +from PIL import Image +from pydantic import Field +from pydantic.dataclasses import dataclass + +from effectful.handlers.llm import Template +from effectful.handlers.llm.providers import ( + LiteLLMProvider, + LLMLoggingHandler, + completion, +) +from effectful.handlers.llm.synthesis import ProgramSynthesis, SynthesisError +from effectful.ops.semantics import fwd, handler +from effectful.ops.syntax import ObjectInterpretation, defop, implements +from effectful.ops.types import NotHandled + +# Check for API keys +HAS_OPENAI_KEY = "OPENAI_API_KEY" in os.environ and os.environ["OPENAI_API_KEY"] +HAS_ANTHROPIC_KEY = ( + "ANTHROPIC_API_KEY" in os.environ and os.environ["ANTHROPIC_API_KEY"] +) + +# Pytest markers for skipping tests based on API key availability +requires_openai = pytest.mark.skipif( + not HAS_OPENAI_KEY, reason="OPENAI_API_KEY environment variable not set" +) +requires_anthropic = pytest.mark.skipif( + not HAS_ANTHROPIC_KEY, reason="ANTHROPIC_API_KEY environment variable not set" +) + + +# ============================================================================ +# Test Fixtures and Mock Data +# ============================================================================ +def retry_on_error(error: type[Exception], n: int): + def decorator(func): + @functools.wraps(func) + def wrapper(*args, **kwargs): + for i in range(n): + try: + return func(*args, **kwargs) + except error as e: + if i < n - 1: + continue + raise e + + return wrapper + + return decorator + + +class LimitLLMCallsHandler(ObjectInterpretation): + max_calls: int + no_calls: int = 0 + + def __init__(self, max_calls: int): + self.max_calls = max_calls + + @implements(completion) + def _completion(self, *args, **kwargs): + if self.no_calls >= self.max_calls: + raise RuntimeError( + f"Test used too many requests (max_calls = {self.max_calls})" + ) + self.no_calls += 1 + return fwd() + + +class MovieGenre(str, Enum): + """Movie genre classifications.""" + + ACTION = "action" + COMEDY = "comedy" + DRAMA = "drama" + HORROR = "horror" + SCIFI = "sci-fi" + ROMANCE = "romance" + + +@dataclass(frozen=True) +class MovieClassification: + """Classification result for a movie.""" + + genre: MovieGenre + explanation: str = Field( + ..., description="explanation for the given movie classification" + ) + + +@Template.define +def classify_genre(plot: str) -> MovieClassification: + """Classify the movie genre based on this plot: {plot}""" + raise NotImplementedError + + +@Template.define +def simple_prompt(topic: str) -> str: + """Write a short sentence about {topic}.""" + raise NotImplementedError + + +@Template.define +def generate_number(max_value: int) -> int: + """Generate a random number between 1 and {max_value}. Return only the number.""" + raise NotImplementedError + + +@Template.define +def create_function(char: str) -> Callable[[str], int]: + """Create a function that counts occurrences of the character '{char}' in a string. + + Return as a code block with the last definition being the function. + """ + raise NotHandled + + +class TestLiteLLMProvider: + """Tests for LiteLLMProvider basic functionality.""" + + @requires_openai + @pytest.mark.parametrize("model_name", ["gpt-4o-mini", "gpt-5-nano"]) + def test_simple_prompt_multiple_models(self, model_name): + """Test that LiteLLMProvider works with different model configurations.""" + with ( + handler(LiteLLMProvider(model_name=model_name)), + handler(LimitLLMCallsHandler(max_calls=1)), + ): + result = simple_prompt("testing") + assert isinstance(result, str) + assert len(result) > 0 + + @pytest.mark.parametrize( + "model_name", + [ + pytest.param("gpt-4o-mini", marks=requires_openai), + pytest.param("claude-haiku-4-5", marks=requires_anthropic), + ], + ) + def test_simple_prompt_cross_endpoint(self, model_name): + """Test that LiteLLMProvider works across different API endpoints.""" + with ( + handler(LiteLLMProvider(model_name=model_name)), + handler(LimitLLMCallsHandler(max_calls=1)), + ): + result = simple_prompt("testing") + assert isinstance(result, str) + assert len(result) > 0 + + @requires_openai + def test_structured_output(self): + """Test LiteLLMProvider with structured Pydantic output.""" + plot = "A rogue cop must stop a evil group from taking over a skyscraper." + + with ( + handler(LiteLLMProvider(model_name="gpt-5-nano")), + handler(LimitLLMCallsHandler(max_calls=1)), + ): + classification = classify_genre(plot) + + assert isinstance(classification, MovieClassification) + assert isinstance(classification.genre, MovieGenre) + assert classification.genre == MovieGenre.ACTION + assert isinstance(classification.explanation, str) + assert len(classification.explanation) > 0 + + @requires_openai + def test_integer_return_type(self): + """Test LiteLLMProvider with integer return type.""" + with ( + handler(LiteLLMProvider(model_name="gpt-5-nano")), + handler(LimitLLMCallsHandler(max_calls=1)), + ): + result = generate_number(100) + + assert isinstance(result, int) + assert 1 <= result <= 100 + + @requires_openai + def test_with_config_params(self): + """Test LiteLLMProvider accepts and uses additional configuration parameters.""" + # Test with temperature parameter + with ( + handler(LiteLLMProvider(model_name="gpt-4o-mini", temperature=0.1)), + handler(LimitLLMCallsHandler(max_calls=1)), + ): + result = simple_prompt("deterministic test") + assert isinstance(result, str) + + +class TestLLMLoggingHandler: + """Tests for LLMLoggingHandler functionality.""" + + @requires_openai + def test_logs_requests(self, caplog): + """Test that LLMLoggingHandler properly logs LLM requests.""" + with caplog.at_level(logging.INFO): + with ( + handler(LLMLoggingHandler()), + handler(LiteLLMProvider(model_name="gpt-4o-mini")), + handler(LimitLLMCallsHandler(max_calls=1)), + ): + result = simple_prompt("testing") + assert isinstance(result, str) + + # Check that logging occurred + assert any("llm.request" in record.message for record in caplog.records) + + @requires_openai + def test_custom_logger(self, caplog): + """Test LLMLoggingHandler with a custom logger.""" + custom_logger = logging.getLogger("test_custom_logger") + + with caplog.at_level(logging.INFO, logger="test_custom_logger"): + with ( + handler(LLMLoggingHandler(logger=custom_logger)), + handler(LiteLLMProvider(model_name="gpt-4o-mini")), + handler(LimitLLMCallsHandler(max_calls=1)), + ): + result = simple_prompt("testing") + assert isinstance(result, str) + + # Verify custom logger was used + assert any( + record.name == "test_custom_logger" and "llm.request" in record.message + for record in caplog.records + ) + + +class TestProgramSynthesis: + """Tests for ProgramSynthesis handler functionality.""" + + @requires_openai + @retry_on_error(error=SynthesisError, n=3) + def test_generates_callable(self): + """Test ProgramSynthesis handler generates executable code.""" + with ( + handler(LiteLLMProvider(model_name="gpt-4o-mini")), + handler(ProgramSynthesis()), + handler(LimitLLMCallsHandler(max_calls=1)), + ): + count_func = create_function("a") + + assert callable(count_func) + # Test the generated function + assert count_func("banana") == 3 + assert count_func("cherry") == 0 + assert count_func("aardvark") == 3 + + +@dataclass +class Poem: + """A poem with content and form.""" + + content: str = Field(..., description="content of the poem") + form: str = Field(..., description="name of the type of the poem") + + +class PoemQuality(str, Enum): + """Quality rating for a poem.""" + + GOOD = "GOOD" + OKAY = "OKAY" + BAD = "BAD" + + +@defop +def evaluate_poem_tool(poem: Poem, explanation: str) -> PoemQuality: + """Evaluate the quality of a poem. + + Parameters: + - poem: Poem object representing the poem + - explanation: natural language explanation of the thought process + """ + raise NotHandled + + +class LoggingPoemEvaluationInterpretation(ObjectInterpretation): + """Provides an interpretation for `evaluate_poem_tool` that tracks evaluation counts.""" + + evaluation_count: int = 0 + evaluation_results: list[dict] = [] + + @implements(evaluate_poem_tool) + def _evaluate_poem_tool(self, poem: Poem, explanation: str) -> PoemQuality: + self.evaluation_count += 1 + + # Simple heuristic: require at least 2 evaluations, then approve + quality = PoemQuality.BAD if self.evaluation_count < 2 else PoemQuality.GOOD + + self.evaluation_results.append( + {"poem": poem, "explanation": explanation, "quality": quality} + ) + + return quality + + +@Template.define(tools=[evaluate_poem_tool]) +def generate_good_poem(topic: str) -> Poem: + """Generate a good poem about {topic} returning your result following + the provided json schema. Use the provided tools to evaluate the quality + and you MUST make sure it is a good poem. + """ + raise NotHandled + + +class TestToolCalling: + """Tests for templates with tool calling functionality.""" + + @pytest.mark.parametrize( + "model_name", + [ + pytest.param("gpt-5-nano", marks=requires_openai), + pytest.param("claude-sonnet-4-5-20250929", marks=requires_anthropic), + ], + ) + def test_tool_calling(self, model_name): + """Test that templates with tools work with openai.""" + poem_eval_ctx = LoggingPoemEvaluationInterpretation() + with ( + handler(LiteLLMProvider(model_name=model_name)), + handler(LimitLLMCallsHandler(max_calls=4)), + handler(poem_eval_ctx), + ): + poem = generate_good_poem("Python") + assert isinstance(poem, Poem) + assert isinstance(poem.content, str) + assert isinstance(poem.form, str) + + # Verify the tool was called at least once + assert poem_eval_ctx.evaluation_count >= 1 + assert len(poem_eval_ctx.evaluation_results) >= 1 + + +def smiley_face() -> Image.Image: + bmp = [ + "00000000", + "00100100", + "00100100", + "00000000", + "01000010", + "00111100", + "00000000", + "00000000", + ] + + img = Image.new("1", (8, 8)) + for y, row in enumerate(bmp): + for x, c in enumerate(row): + img.putpixel((x, y), 1 if c == "1" else 0) + return img + + +@Template.define +def categorise_image(image: Image.Image) -> str: + """Return a description of the following image: + {image}""" + raise NotHandled + + +@requires_openai +def test_image_input(): + with ( + handler(LiteLLMProvider(model_name="gpt-4o")), + handler(LimitLLMCallsHandler(max_calls=3)), + ): + assert any("smile" in categorise_image(smiley_face()) for _ in range(3)) From 8118a8f46e628c47e64b5060b55b2e15eb863380 Mon Sep 17 00:00:00 2001 From: Jack Feser Date: Thu, 4 Dec 2025 17:55:00 -0500 Subject: [PATCH 18/39] Merge master into llm-staging (#423) * Release v0.2.3 (#374) * Install prettyprinter for term when library is available (#386) * install prettyprinter for term when library is available * lint * move code into types.py * fix pypandoc issue (#397) * Convert evaluate to a singledispatch (#398) * convert evaluate to a singledispatch * lint * add jnp.pi and ArrayTerm.T (#394) * Deprecate defterm (#399) * deprecate defterm * remove defterm case * remove defterm * lint * evaluate distribution arguments * lint * remove interpreter * Revert "remove interpreter" This reverts commit 30442779689da862aa6f8e0224330ffaf8f556ae. * wip * lint * Rework numpyro distribution handling to enable symbolic distributions and handling of distribution methods (#311) * refactor distribution operations * add a test for typeof of distributions * add tests for symbolic dists/arguments * introduce operations for distribution methods * comment * fix tests * work around https://github.com/BasisResearch/effectful/issues/310 * replace hack with new hack * tweak repr for _BaseOperation * lint * work around https://github.com/BasisResearch/effectful/issues/312 * clean up access to dist ops * wip * wip * add type annotations to get correct term conversion * lint * include distribution arguments as properties * fix distribution calls * try again * fixes * format * Box the output of `__type_rule__` (#387) * box the output of __type_rule__ * fix tests * fix tests * require callers of __type_rule__ to box arguments * fix * move Box out of ops.types * lint * fix test * fix syntactic_eq implementation for jax arrays (#405) * Fix recursion error in sizesof (#406) * fix recursion error in sizesof * format * Allow `_BaseOperation` subclasses to have an overrideable `apply` method (#414) * stash * fixes * initial * wip * lint * ensure each subclass has a fresh operation * wip * wip * lint * wip * wip * lint * refactor class method support * move defops * fix test * remove singledispatch case and add test * move definition * cleanup * simplify * cleanup * lint * fix failing test * fix classmethod * __isabstractmethod__ * revert --------- Co-authored-by: Eli * lint --------- Co-authored-by: Tim Cooijmans Co-authored-by: Eli --- docs/source/effectful.rst | 1 - docs/source/lambda_.py | 2 +- docs/source/semi_ring.py | 6 +- effectful/handlers/jax/_handlers.py | 31 +- effectful/handlers/jax/_terms.py | 4 + effectful/handlers/jax/numpy/__init__.py | 1 + effectful/handlers/llm/providers.py | 2 +- effectful/handlers/numpyro.py | 1058 +++++++++++++++++----- effectful/handlers/pyro.py | 305 ++++--- effectful/handlers/torch.py | 46 +- effectful/internals/runtime.py | 2 +- effectful/internals/unification.py | 136 +-- effectful/ops/semantics.py | 151 +-- effectful/ops/syntax.py | 479 +--------- effectful/ops/types.py | 497 +++++++++- pyproject.toml | 9 +- tests/test_handlers_jax.py | 6 + tests/test_handlers_numpyro.py | 171 ++-- tests/test_internals_product_n.py | 13 +- tests/test_internals_unification.py | 196 ++-- tests/test_ops_semantics.py | 15 +- tests/test_ops_syntax.py | 124 ++- 22 files changed, 2043 insertions(+), 1212 deletions(-) diff --git a/docs/source/effectful.rst b/docs/source/effectful.rst index 245e8a83..78e3e7e3 100644 --- a/docs/source/effectful.rst +++ b/docs/source/effectful.rst @@ -15,7 +15,6 @@ Syntax :members: :undoc-members: - .. autofunction:: effectful.ops.syntax.defterm(value: T) -> Expr[T] .. autofunction:: effectful.ops.syntax.defdata(value: Term[T]) -> Expr[T] Semantics diff --git a/docs/source/lambda_.py b/docs/source/lambda_.py index 822f32d9..2ec34ac7 100644 --- a/docs/source/lambda_.py +++ b/docs/source/lambda_.py @@ -102,7 +102,7 @@ def sort_add(x: Expr[int], y: Expr[int]) -> Expr[int]: case Term(add_, (a, Term(vx, ()))), Term(vy, ()) if add_ == add and id(vx) > id( vy ): - return (a + vy()) + vx() # type: ignore + return (a + vy()) + vx() case _: return fwd() diff --git a/docs/source/semi_ring.py b/docs/source/semi_ring.py index fd799c1b..685dcb13 100644 --- a/docs/source/semi_ring.py +++ b/docs/source/semi_ring.py @@ -174,11 +174,11 @@ def vertical_fusion[S, T](e1: T, x: Operation[[], T], e2: S) -> S: return evaluate( Sum( e_sum, # type: ignore - k1, # type: ignore - v1, # type: ignore + k1, + v1, Let( e_lhs, - v2, # type: ignore + v2, Let(k1(), k2, Dict(k2(), Let(e_lhs, k2, e_rhs))), # type: ignore ), ) diff --git a/effectful/handlers/jax/_handlers.py b/effectful/handlers/jax/_handlers.py index c66ef2c9..26391988 100644 --- a/effectful/handlers/jax/_handlers.py +++ b/effectful/handlers/jax/_handlers.py @@ -12,14 +12,14 @@ import tree -from effectful.ops.semantics import fvsof, typeof +from effectful.internals.runtime import interpreter +from effectful.ops.semantics import apply, evaluate, fvsof, typeof from effectful.ops.syntax import ( Scoped, _CustomSingleDispatchCallable, defdata, deffn, defop, - defterm, syntactic_eq, ) from effectful.ops.types import Expr, NotHandled, Operation, Term @@ -70,19 +70,14 @@ def _getitem_sizeof(x: jax.Array, key: tuple[Expr[IndexElement], ...]): for i, k in enumerate(key): if isinstance(k, Term) and len(k.args) == 0 and len(k.kwargs) == 0: update_sizes(sizes, k.op, x.shape[i]) + return defdata(jax_getitem, x, key) + + def _apply(op, *args, **kwargs): + return defdata(op, *args, **kwargs) + + with interpreter({jax_getitem: _getitem_sizeof, apply: _apply}): + evaluate(value) - def _sizesof(expr): - expr = defterm(expr) - if isinstance(expr, Term): - for x in tree.flatten((expr.args, expr.kwargs)): - _sizesof(x) - if expr.op is jax_getitem: - _getitem_sizeof(*expr.args) - elif tree.is_nested(expr): - for x in tree.flatten(expr): - _sizesof(x) - - _sizesof(value) return sizes @@ -287,7 +282,9 @@ def index_expr(i): @syntactic_eq.register -def _(x: jax.typing.ArrayLike, other) -> bool: - return isinstance(other, jax.typing.ArrayLike) and bool( # type: ignore[arg-type] - (jnp.asarray(x) == jnp.asarray(other)).all() +def _(x: jax.Array, other) -> bool: + return ( + isinstance(other, jax.Array) + and x.shape == other.shape + and bool((jnp.asarray(x) == jnp.asarray(other)).all()) ) diff --git a/effectful/handlers/jax/_terms.py b/effectful/handlers/jax/_terms.py index b3324817..5817e484 100644 --- a/effectful/handlers/jax/_terms.py +++ b/effectful/handlers/jax/_terms.py @@ -107,6 +107,10 @@ def __len__(self): def ndim(self) -> Expr[int]: return jnp.ndim(cast(jax.Array, self)) + @property + def T(self) -> jax.Array: + return jnp.transpose(cast(jax.Array, self)) + def __add__(self, other: jax.Array) -> jax.Array: return jnp.add(cast(jax.Array, self), other) diff --git a/effectful/handlers/jax/numpy/__init__.py b/effectful/handlers/jax/numpy/__init__.py index 8ea2a319..990830d2 100644 --- a/effectful/handlers/jax/numpy/__init__.py +++ b/effectful/handlers/jax/numpy/__init__.py @@ -17,6 +17,7 @@ ) globals()[name] = jax_op +pi = jax.numpy.pi # Tell mypy about our wrapped functions. if TYPE_CHECKING: diff --git a/effectful/handlers/llm/providers.py b/effectful/handlers/llm/providers.py index 5d04b2d2..29241eec 100644 --- a/effectful/handlers/llm/providers.py +++ b/effectful/handlers/llm/providers.py @@ -423,6 +423,6 @@ def _completion(self, *args, **kwargs): def _call[**P, T]( self, template: Template[P, T], *args: P.args, **kwargs: P.kwargs ) -> T: - model_input = format_model_input(template, *args, **kwargs) # type: ignore + model_input = format_model_input(template, *args, **kwargs) resp = compute_response(template, model_input) return decode_response(template, resp) diff --git a/effectful/handlers/numpyro.py b/effectful/handlers/numpyro.py index 407920a0..85ebc0d7 100644 --- a/effectful/handlers/numpyro.py +++ b/effectful/handlers/numpyro.py @@ -5,17 +5,16 @@ import functools -from collections.abc import Collection, Hashable, Mapping -from typing import Any, cast +from collections.abc import Collection, Mapping +from typing import Any import jax -import tree import effectful.handlers.jax.numpy as jnp from effectful.handlers.jax import bind_dims, jax_getitem, sizesof, unbind_dims from effectful.handlers.jax._handlers import _register_jax_op, is_eager_array -from effectful.ops.semantics import apply, runner, typeof -from effectful.ops.syntax import defdata, defop, defterm +from effectful.ops.semantics import evaluate, typeof +from effectful.ops.syntax import defdata, defop from effectful.ops.types import NotHandled, Operation, Term @@ -76,7 +75,11 @@ def _validate_batch_shape(t): def _to_named(a): nonlocal batch_shape - if isinstance(a, jax.Array): + # FIXME: Some distributions take scalar arguments that are never + # batched. Ignore these. We should be able to raise an error in some + # cases that we see a scalar tensor, and a smarter version of this code + # would do so. + if isinstance(a, jax.Array) and a.shape != (): _validate_batch_shape(a) return unbind_dims(a, *names) elif isinstance(a, dist.Distribution): @@ -84,25 +87,16 @@ def _to_named(a): else: return a - # Convert to a term in a context that does not evaluate distribution constructors. - def _apply(op, *args, **kwargs): - typ = op.__type_rule__(*args, **kwargs) - if issubclass(typ, dist.Distribution): - return defdata(op, *args, **kwargs) - return op.__default_rule__(*args, **kwargs) + d = evaluate(d) - with runner({apply: _apply}): - d = defterm(d) - - if not (isinstance(d, Term) and typeof(d) is dist.Distribution): - raise NotHandled - - # TODO: this is a hack to avoid mangling arguments that are array-valued, but not batched - aux_kwargs = set(["total_count"]) + # FIXME: This assumes that the only operations that return distributions are + # distribution constructors. + if not (isinstance(d, Term) and issubclass(typeof(d), dist.Distribution)): + raise NotImplementedError new_d = d.op( *[_to_named(a) for a in d.args], - **{k: v if k in aux_kwargs else _to_named(v) for (k, v) in d.kwargs.items()}, + **{k: _to_named(v) for (k, v) in d.kwargs.items()}, ) return new_d @@ -126,24 +120,19 @@ def _to_positional(a, indices): ) return bind_dims(a_indexed, *indices) elif issubclass(typ, dist.Distribution): - # We are really assuming that only one distriution appears in our arguments. This is sufficient for cases - # like Independent and TransformedDistribution + # We assume that only one distriution appears in our arguments. This + # is sufficient for cases like Independent and + # TransformedDistribution return bind_dims(a, *indices) else: return a - # Convert to a term in a context that does not evaluate distribution constructors. - def _apply(op, *args, **kwargs): - typ = op.__type_rule__(*args, **kwargs) - if issubclass(typ, dist.Distribution): - return defdata(op, *args, **kwargs) - return op.__default_rule__(*args, **kwargs) - - with runner({apply: _apply}): - d = defterm(d) + d = evaluate(d) - if not (isinstance(d, Term) and typeof(d) is dist.Distribution): - raise NotHandled + # FIXME: This assumes that the only operations that return distributions are + # distribution constructors. + if not (isinstance(d, Term) and issubclass(typeof(d), dist.Distribution)): + raise NotImplementedError sizes = sizesof(d) indices = {k: sizes[k] for k in names} @@ -155,30 +144,6 @@ def _apply(op, *args, **kwargs): return new_d -@functools.cache -def _register_distribution_op( - dist_constr: type[dist.Distribution], -) -> Operation[Any, dist.Distribution]: - # introduce a wrapper so that we can control type annotations - def wrapper(*args, **kwargs) -> dist.Distribution: - if any(isinstance(a, Term) for a in tree.flatten((args, kwargs))): - raise NotHandled - return dist_constr(*args, **kwargs) - - return defop(wrapper, name=dist_constr.__name__) - - -@defdata.register(dist.Distribution) -def _(op, *args, **kwargs): - if all( - not isinstance(a, Term) or is_eager_array(a) or isinstance(a, dist.Distribution) - for a in tree.flatten((args, kwargs)) - ): - return _DistributionTerm(op, *args, **kwargs) - else: - return defdata.dispatch(object)(op, *args, **kwargs) - - def _broadcast_to_named(t, sizes): missing_dims = set(sizes) - set(sizesof(t)) t_broadcast = jnp.broadcast_to( @@ -222,6 +187,7 @@ def expand_to_batch_shape(tensor, batch_ndims, expanded_batch_shape): return expanded_tensor +@Term.register class _DistributionTerm(dist.Distribution): """A distribution wrapper that satisfies the Term interface. @@ -234,28 +200,35 @@ class _DistributionTerm(dist.Distribution): """ - _op: Operation[Any, dist.Distribution] + _constr: type[dist.Distribution] + _op: Operation[..., dist.Distribution] _args: tuple _kwargs: dict + __pos_base_dist: dist.Distribution | None = None - def __init__(self, op: Operation[Any, dist.Distribution], *args, **kwargs): + def __init__(self, constr, op, *args, **kwargs): + assert issubclass(constr, dist.Distribution) + + self._constr = constr self._op = op self._args = args self._kwargs = kwargs - self.__indices = None - self.__pos_base_dist = None - @property - def _indices(self): - if self.__indices is None: - self.__indices = sizesof(self) - return self.__indices + @functools.cached_property + def _indices(self) -> Mapping[Operation[[], jax.Array], int]: + return sizesof(self) - @property - def _pos_base_dist(self): - if self.__pos_base_dist is None: - self.__pos_base_dist = bind_dims(self, *self._indices) - return self.__pos_base_dist + @functools.cached_property + def _pos_base_dist(self) -> dist.Distribution: + bound = bind_dims(self, *self._indices) + return self._constr(*bound.args, **bound.kwargs) + + @functools.cached_property + def _is_eager(self) -> bool: + return all( + (not isinstance(x, Term) or is_eager_array(x)) + for x in (*self.args, *self.kwargs.values()) + ) @property def op(self): @@ -270,33 +243,54 @@ def kwargs(self): return self._kwargs @property - def batch_shape(self): + @defop + def batch_shape(self) -> tuple[int, ...]: + if not (self._is_eager): + raise NotHandled return self._pos_base_dist.batch_shape[len(self._indices) :] @property + @defop def has_rsample(self) -> bool: + if not (self._is_eager): + raise NotHandled return self._pos_base_dist.has_rsample @property - def event_shape(self): + @defop + def event_shape(self) -> tuple[int, ...]: + if not (self._is_eager): + raise NotHandled return self._pos_base_dist.event_shape - def rsample(self, key, sample_shape=()): + def _reindex_sample(self, value, sample_shape): + index = (slice(None),) * len(sample_shape) + tuple(i() for i in self._indices) + ret = jax_getitem(value, index) + return ret + + @defop + def rsample(self, key, sample_shape=()) -> jax.Array: + if not (self._is_eager and is_eager_array(key)): + raise NotHandled + return self._reindex_sample( self._pos_base_dist.rsample(key, sample_shape), sample_shape ) - def sample(self, key, sample_shape=()): + @defop + def sample(self, key, sample_shape=()) -> jax.Array: + if not (self._is_eager and is_eager_array(key)): + raise NotHandled + return self._reindex_sample( self._pos_base_dist.sample(key, sample_shape), sample_shape ) - def _reindex_sample(self, value, sample_shape): - index = (slice(None),) * len(sample_shape) + tuple(i() for i in self._indices) - ret = jax_getitem(value, index) - return ret + @defop + def log_prob(self, value) -> jax.Array: + if not (self._is_eager and is_eager_array(value)): + raise NotHandled - def log_prob(self, value): # value has shape named_batch_shape + sample_shape + batch_shape + event_shape n_batch_event = len(self.batch_shape) + len(self.event_shape) sample_shape = ( @@ -305,7 +299,7 @@ def log_prob(self, value): value = bind_dims(_broadcast_to_named(value, self._indices), *self._indices) dims = list(range(len(value.shape))) n_named_batch = len(self._indices) - perm = ( + perm = tuple( dims[n_named_batch : n_named_batch + len(sample_shape)] + dims[:n_named_batch] + dims[n_named_batch + len(sample_shape) :] @@ -319,23 +313,46 @@ def log_prob(self, value): return ind_log_prob @property - def mean(self): - return self._reindex_sample(self._pos_base_dist.mean, ()) + @defop + def mean(self) -> jax.Array: + if not self._is_eager: + raise NotHandled + try: + return self._reindex_sample(self._pos_base_dist.mean, ()) + except NotImplementedError: + raise RuntimeError(f"mean is not implemented for {type(self).__name__}") @property - def variance(self): - return self._reindex_sample(self._pos_base_dist.variance, ()) - - def enumerate_support(self, expand=True): + @defop + def variance(self) -> jax.Array: + if not self._is_eager: + raise NotHandled + try: + return self._reindex_sample(self._pos_base_dist.variance, ()) + except NotImplementedError: + raise RuntimeError(f"variance is not implemented for {type(self).__name__}") + + @defop + def enumerate_support(self, expand=True) -> jax.Array: + if not self._is_eager: + raise NotHandled return self._reindex_sample(self._pos_base_dist.enumerate_support(expand), ()) - def entropy(self): + @defop + def entropy(self) -> jax.Array: + if not self._is_eager: + raise NotHandled return self._pos_base_dist.entropy() - def to_event(self, reinterpreted_batch_ndims=None): + @defop + def to_event(self, reinterpreted_batch_ndims=None) -> dist.Distribution: raise NotHandled - def expand(self, batch_shape): + @defop + def expand(self, batch_shape) -> jax.Array: + if not self._is_eager: + raise NotHandled + def expand_arg(a, batch_shape): if is_eager_array(a): return expand_to_batch_shape(a, len(self.batch_shape), batch_shape) @@ -357,206 +374,767 @@ def __str__(self): return Term.__str__(self) -Term.register(_DistributionTerm) +batch_shape = _DistributionTerm.batch_shape +event_shape = _DistributionTerm.event_shape +has_rsample = _DistributionTerm.has_rsample +rsample = _DistributionTerm.rsample +sample = _DistributionTerm.sample +log_prob = _DistributionTerm.log_prob +mean = _DistributionTerm.mean +variance = _DistributionTerm.variance +enumerate_support = _DistributionTerm.enumerate_support +entropy = _DistributionTerm.entropy +to_event = _DistributionTerm.to_event +expand = _DistributionTerm.expand -@defterm.register(dist.Distribution) -def _embed_distribution(dist: dist.Distribution) -> Term[dist.Distribution]: - raise ValueError( - f"No embedding provided for distribution of type {type(dist).__name__}." - ) +@defop +def Cauchy(loc=0.0, scale=1.0, **kwargs) -> dist.Cauchy: + raise NotHandled -@defterm.register(dist.Cauchy) -@defterm.register(dist.Gumbel) -@defterm.register(dist.Laplace) -@defterm.register(dist.LogNormal) -@defterm.register(dist.Logistic) -@defterm.register(dist.Normal) -@defterm.register(dist.StudentT) -def _embed_loc_scale(d: dist.Distribution) -> Term[dist.Distribution]: - return _register_distribution_op(cast(Hashable, type(d)))(d.loc, d.scale) +@defdata.register(dist.Cauchy) +class CauchyTerm(_DistributionTerm): + def __init__(self, op, loc, scale, **kwargs): + super().__init__(dist.Cauchy, op, loc, scale, **kwargs) + self.loc = loc + self.scale = scale -@defterm.register(dist.BernoulliProbs) -@defterm.register(dist.CategoricalProbs) -@defterm.register(dist.GeometricProbs) -def _embed_probs(d: dist.Distribution) -> Term[dist.Distribution]: - return _register_distribution_op(cast(Hashable, type(d)))(d.probs) +@evaluate.register(dist.Cauchy) +def _embed_cauchy(d: dist.Cauchy) -> Term[dist.Cauchy]: + return Cauchy(d.loc, d.scale) -@defterm.register(dist.BernoulliLogits) -@defterm.register(dist.CategoricalLogits) -@defterm.register(dist.GeometricLogits) -def _embed_logits(d: dist.Distribution) -> Term[dist.Distribution]: - return _register_distribution_op(cast(Hashable, type(d)))(d.logits) +@defop +def Gumbel(loc=0.0, scale=1.0, **kwargs) -> dist.Gumbel: + raise NotHandled -@defterm.register(dist.Beta) -@defterm.register(dist.Kumaraswamy) -def _embed_beta(d: dist.Distribution) -> Term[dist.Distribution]: - return _register_distribution_op(cast(Hashable, type(d)))( - d.concentration1, d.concentration0 - ) +@defdata.register(dist.Gumbel) +class GumbelTerm(_DistributionTerm): + def __init__(self, op, loc, scale, **kwargs): + super().__init__(dist.Gumbel, op, loc, scale, **kwargs) + self.loc = loc + self.scale = scale + + +@evaluate.register(dist.Gumbel) +def _embed_gumbel(d: dist.Gumbel) -> Term[dist.Gumbel]: + return Gumbel(d.loc, d.scale) + + +@defop +def Laplace(loc=0.0, scale=1.0, **kwargs) -> dist.Laplace: + raise NotHandled + + +@defdata.register(dist.Laplace) +class LaplaceTerm(_DistributionTerm): + def __init__(self, op, loc, scale, **kwargs): + super().__init__(dist.Laplace, op, loc, scale, **kwargs) + self.loc = loc + self.scale = scale + + +@evaluate.register(dist.Laplace) +def _embed_laplace(d: dist.Laplace) -> Term[dist.Laplace]: + return Laplace(d.loc, d.scale) + + +@defop +def LogNormal(loc=0.0, scale=1.0, **kwargs) -> dist.LogNormal: + raise NotHandled + + +@defdata.register(dist.LogNormal) +class LogNormalTerm(_DistributionTerm): + def __init__(self, op, loc, scale, **kwargs): + super().__init__(dist.LogNormal, op, loc, scale, **kwargs) + self.loc = loc + self.scale = scale + + +@evaluate.register(dist.LogNormal) +def _embed_lognormal(d: dist.LogNormal) -> Term[dist.LogNormal]: + return LogNormal(d.loc, d.scale) + + +@defop +def Logistic(loc=0.0, scale=1.0, **kwargs) -> dist.Logistic: + raise NotHandled + + +@defdata.register(dist.Logistic) +class LogisticTerm(_DistributionTerm): + def __init__(self, op, loc, scale, **kwargs): + super().__init__(dist.Logistic, op, loc, scale, **kwargs) + self.loc = loc + self.scale = scale + + +@evaluate.register(dist.Logistic) +def _embed_logistic(d: dist.Logistic) -> Term[dist.Logistic]: + return Logistic(d.loc, d.scale) + + +@defop +def Normal(loc=0.0, scale=1.0, **kwargs) -> dist.Normal: + raise NotHandled + + +@defdata.register(dist.Normal) +class NormalTerm(_DistributionTerm): + def __init__(self, op, loc, scale, **kwargs): + super().__init__(dist.Normal, op, loc, scale, **kwargs) + self.loc = loc + self.scale = scale + + +@evaluate.register(dist.Normal) +def _embed_normal(d: dist.Normal) -> Term[dist.Normal]: + return Normal(d.loc, d.scale) + + +@defop +def StudentT(df, loc=0.0, scale=1.0, **kwargs) -> dist.StudentT: + raise NotHandled + + +@defdata.register(dist.StudentT) +class StudentTTerm(_DistributionTerm): + def __init__(self, op, df, loc, scale, **kwargs): + super().__init__(dist.StudentT, op, df, loc, scale, **kwargs) + self.df = df + self.loc = loc + self.scale = scale + + +@evaluate.register(dist.StudentT) +def _embed_studentt(d: dist.StudentT) -> Term[dist.StudentT]: + return StudentT(d.df, d.loc, d.scale) + + +@defop +def BernoulliProbs(probs, **kwargs) -> dist.BernoulliProbs: + raise NotHandled + + +@defdata.register(dist.BernoulliProbs) +class BernoulliProbsTerm(_DistributionTerm): + def __init__(self, op, probs, **kwargs): + super().__init__(dist.BernoulliProbs, op, probs, **kwargs) + self.probs = probs + + +@evaluate.register(dist.BernoulliProbs) +def _embed_bernoulliprobs(d: dist.BernoulliProbs) -> Term[dist.BernoulliProbs]: + return BernoulliProbs(d.probs) + + +@defop +def CategoricalProbs(probs, **kwargs) -> dist.CategoricalProbs: + raise NotHandled + + +@defdata.register(dist.CategoricalProbs) +class CategoricalProbsTerm(_DistributionTerm): + def __init__(self, op, probs, **kwargs): + super().__init__(dist.CategoricalProbs, op, probs, **kwargs) + self.probs = probs + + +@evaluate.register(dist.CategoricalProbs) +def _embed_categoricalprobs(d: dist.CategoricalProbs) -> Term[dist.CategoricalProbs]: + return CategoricalProbs(d.probs) + + +@defop +def GeometricProbs(probs, **kwargs) -> dist.GeometricProbs: + raise NotHandled + + +@defdata.register(dist.GeometricProbs) +class GeometricProbsTerm(_DistributionTerm): + def __init__(self, op, probs, **kwargs): + super().__init__(dist.GeometricProbs, op, probs, **kwargs) + self.probs = probs + + +@evaluate.register(dist.GeometricProbs) +def _embed_geometricprobs(d: dist.GeometricProbs) -> Term[dist.GeometricProbs]: + return GeometricProbs(d.probs) + + +@defop +def BernoulliLogits(logits, **kwargs) -> dist.BernoulliLogits: + raise NotHandled + + +@defdata.register(dist.BernoulliLogits) +class BernoulliLogitsTerm(_DistributionTerm): + def __init__(self, op, logits, **kwargs): + super().__init__(dist.BernoulliLogits, op, logits, **kwargs) + self.logits = logits + + +@evaluate.register(dist.BernoulliLogits) +def _embed_bernoullilogits(d: dist.BernoulliLogits) -> Term[dist.BernoulliLogits]: + return BernoulliLogits(d.logits) + + +@defop +def CategoricalLogits(logits, **kwargs) -> dist.CategoricalLogits: + raise NotHandled + + +@defdata.register(dist.CategoricalLogits) +class CategoricalLogitsTerm(_DistributionTerm): + def __init__(self, op, logits, **kwargs): + super().__init__(dist.CategoricalLogits, op, logits, **kwargs) + self.logits = logits + + +@evaluate.register(dist.CategoricalLogits) +def _embed_categoricallogits(d: dist.CategoricalLogits) -> Term[dist.CategoricalLogits]: + return CategoricalLogits(d.logits) + + +@defop +def GeometricLogits(logits, **kwargs) -> dist.GeometricLogits: + raise NotHandled + + +@defdata.register(dist.GeometricLogits) +class GeometricLogitsTerm(_DistributionTerm): + def __init__(self, op, logits, **kwargs): + super().__init__(dist.GeometricLogits, op, logits, **kwargs) + self.logits = logits + + +@evaluate.register(dist.GeometricLogits) +def _embed_geometriclogits(d: dist.GeometricLogits) -> Term[dist.GeometricLogits]: + return GeometricLogits(d.logits) + + +@defop +def Beta(concentration1, concentration0, **kwargs) -> dist.Beta: + raise NotHandled + + +@defdata.register(dist.Beta) +class BetaTerm(_DistributionTerm): + def __init__(self, op, concentration1, concentration0, **kwargs): + super().__init__(dist.Beta, op, concentration1, concentration0, **kwargs) + self.concentration1 = concentration1 + self.concentration0 = concentration0 + + +@evaluate.register(dist.Beta) +def _embed_beta(d: dist.Beta) -> Term[dist.Beta]: + return Beta(d.concentration1, d.concentration0) + + +@defop +def Kumaraswamy(concentration1, concentration0, **kwargs) -> dist.Kumaraswamy: + raise NotHandled + + +@defdata.register(dist.Kumaraswamy) +class KumaraswamyTerm(_DistributionTerm): + def __init__(self, op, concentration1, concentration0, **kwargs): + super().__init__(dist.Kumaraswamy, op, concentration1, concentration0, **kwargs) + self.concentration1 = concentration1 + self.concentration0 = concentration0 -@defterm.register(dist.BinomialProbs) -@defterm.register(dist.NegativeBinomialProbs) -@defterm.register(dist.MultinomialProbs) -def _embed_binomial_probs(d: dist.Distribution) -> Term[dist.Distribution]: - return _register_distribution_op(cast(Hashable, type(d)))(d.probs, d.total_count) +@evaluate.register(dist.Kumaraswamy) +def _embed_kumaraswamy(d: dist.Kumaraswamy) -> Term[dist.Kumaraswamy]: + return Kumaraswamy(d.concentration1, d.concentration0) -@defterm.register(dist.BinomialLogits) -@defterm.register(dist.NegativeBinomialLogits) -@defterm.register(dist.MultinomialLogits) -def _embed_binomial_logits(d: dist.Distribution) -> Term[dist.Distribution]: - return _register_distribution_op(cast(Hashable, type(d)))(d.logits, d.total_count) +@defop +def BinomialProbs(probs, total_count=1, **kwargs) -> dist.BinomialProbs: + raise NotHandled -@defterm.register -def _embed_chi2(d: dist.Chi2) -> Term[dist.Distribution]: - return _register_distribution_op(cast(Hashable, type(d)))(d.df) +@defdata.register(dist.BinomialProbs) +class BinomialProbsTerm(_DistributionTerm): + def __init__(self, op, probs, total_count, **kwargs): + super().__init__(dist.BinomialProbs, op, probs, total_count, **kwargs) + self.probs = probs + self.total_count = total_count -@defterm.register -def _embed_dirichlet(d: dist.Dirichlet) -> Term[dist.Distribution]: - return _register_distribution_op(cast(Hashable, type(d)))(d.concentration) +@evaluate.register(dist.BinomialProbs) +def _embed_binomialprobs(d: dist.BinomialProbs) -> Term[dist.BinomialProbs]: + return BinomialProbs(d.probs, d.total_count) -@defterm.register -def _embed_dirichlet_multinomial( +@defop +def NegativeBinomialProbs(total_count, probs, **kwargs) -> dist.NegativeBinomialProbs: + raise NotHandled + + +@defdata.register(dist.NegativeBinomialProbs) +class NegativeBinomialProbsTerm(_DistributionTerm): + def __init__(self, op, total_count, probs, **kwargs): + super().__init__(dist.NegativeBinomialProbs, op, total_count, probs, **kwargs) + self.total_count = total_count + self.probs = probs + + +@evaluate.register(dist.NegativeBinomialProbs) +def _embed_negativebinomialprobs( + d: dist.NegativeBinomialProbs, +) -> Term[dist.NegativeBinomialProbs]: + return NegativeBinomialProbs(d.total_count, d.probs) + + +@defop +def MultinomialProbs(probs, total_count=1, **kwargs) -> dist.MultinomialProbs: + raise NotHandled + + +@defdata.register(dist.MultinomialProbs) +class MultinomialProbsTerm(_DistributionTerm): + def __init__(self, op, probs, total_count, **kwargs): + super().__init__(dist.MultinomialProbs, op, probs, total_count, **kwargs) + self.probs = probs + self.total_count = total_count + + +@evaluate.register(dist.MultinomialProbs) +def _embed_multinomialprobs(d: dist.MultinomialProbs) -> Term[dist.MultinomialProbs]: + return MultinomialProbs(d.probs, d.total_count) + + +@defop +def BinomialLogits(logits, total_count=1, **kwargs) -> dist.BinomialLogits: + raise NotHandled + + +@defdata.register(dist.BinomialLogits) +class BinomialLogitsTerm(_DistributionTerm): + def __init__(self, op, logits, total_count, **kwargs): + super().__init__(dist.BinomialLogits, op, logits, total_count, **kwargs) + self.logits = logits + self.total_count = total_count + + +@evaluate.register(dist.BinomialLogits) +def _embed_binomiallogits(d: dist.BinomialLogits) -> Term[dist.BinomialLogits]: + return BinomialLogits(d.logits, d.total_count) + + +@defop +def NegativeBinomialLogits( + total_count, logits, **kwargs +) -> dist.NegativeBinomialLogits: + raise NotHandled + + +@defdata.register(dist.NegativeBinomialLogits) +class NegativeBinomialLogitsTerm(_DistributionTerm): + def __init__(self, op, total_count, logits, **kwargs): + super().__init__(dist.NegativeBinomialLogits, op, total_count, logits, **kwargs) + self.total_count = total_count + self.logits = logits + + +@evaluate.register(dist.NegativeBinomialLogits) +def _embed_negativebinomiallogits( + d: dist.NegativeBinomialLogits, +) -> Term[dist.NegativeBinomialLogits]: + return NegativeBinomialLogits(d.total_count, d.logits) + + +@defop +def MultinomialLogits(logits, total_count=1, **kwargs) -> dist.MultinomialLogits: + raise NotHandled + + +@defdata.register(dist.MultinomialLogits) +class MultinomialLogitsTerm(_DistributionTerm): + def __init__(self, op, logits, total_count, **kwargs): + super().__init__(dist.MultinomialLogits, op, logits, total_count, **kwargs) + self.logits = logits + self.total_count = total_count + + +@evaluate.register(dist.MultinomialLogits) +def _embed_multinomiallogits(d: dist.MultinomialLogits) -> Term[dist.MultinomialLogits]: + return MultinomialLogits(d.logits, d.total_count) + + +@defop +def Chi2(df, **kwargs) -> dist.Chi2: + raise NotHandled + + +@defdata.register(dist.Chi2) +class Chi2Term(_DistributionTerm): + def __init__(self, op, df, **kwargs): + super().__init__(dist.Chi2, op, df, **kwargs) + self.df = df + + +@evaluate.register(dist.Chi2) +def _embed_chi2(d: dist.Chi2) -> Term[dist.Chi2]: + return Chi2(d.df) + + +@defop +def Dirichlet(concentration, **kwargs) -> dist.Dirichlet: + raise NotHandled + + +@defdata.register(dist.Dirichlet) +class DirichletTerm(_DistributionTerm): + def __init__(self, op, concentration, **kwargs): + super().__init__(dist.Dirichlet, op, concentration, **kwargs) + self.concentration = concentration + + +@evaluate.register(dist.Dirichlet) +def _embed_dirichlet(d: dist.Dirichlet) -> Term[dist.Dirichlet]: + return Dirichlet(d.concentration) + + +@defop +def DirichletMultinomial( + concentration, total_count=1, **kwargs +) -> dist.DirichletMultinomial: + raise NotHandled + + +@defdata.register(dist.DirichletMultinomial) +class DirichletMultinomialTerm(_DistributionTerm): + def __init__(self, op, concentration, total_count, **kwargs): + super().__init__( + dist.DirichletMultinomial, op, concentration, total_count, **kwargs + ) + self.concentration = concentration + self.total_count = total_count + + +@evaluate.register(dist.DirichletMultinomial) +def _embed_dirichletmultinomial( d: dist.DirichletMultinomial, -) -> Term[dist.Distribution]: - return _register_distribution_op(cast(Hashable, type(d)))( - d.concentration, total_count=d.total_count - ) +) -> Term[dist.DirichletMultinomial]: + return DirichletMultinomial(d.concentration, d.total_count) -@defterm.register(dist.Exponential) -@defterm.register(dist.Poisson) -def _embed_exponential(d: dist.Distribution) -> Term[dist.Distribution]: - return _register_distribution_op(cast(Hashable, type(d)))(d.rate) +@defop +def Exponential(rate=1.0, **kwargs) -> dist.Exponential: + raise NotHandled -@defterm.register -def _embed_gamma(d: dist.Gamma) -> Term[dist.Distribution]: - return _register_distribution_op(cast(Hashable, type(d)))(d.concentration, d.rate) +@defdata.register(dist.Exponential) +class ExponentialTerm(_DistributionTerm): + def __init__(self, op, rate, **kwargs): + super().__init__(dist.Exponential, op, rate, **kwargs) + self.rate = rate -@defterm.register(dist.HalfCauchy) -@defterm.register(dist.HalfNormal) -def _embed_half_cauchy(d: dist.Distribution) -> Term[dist.Distribution]: - return _register_distribution_op(cast(Hashable, type(d)))(d.scale) +@evaluate.register(dist.Exponential) +def _embed_exponential(d: dist.Exponential) -> Term[dist.Exponential]: + return Exponential(d.rate) -@defterm.register -def _embed_lkj_cholesky(d: dist.LKJCholesky) -> Term[dist.Distribution]: - return _register_distribution_op(cast(Hashable, type(d)))( - d.dim, concentration=d.concentration - ) +@defop +def Poisson(rate, **kwargs) -> dist.Poisson: + raise NotHandled -@defterm.register -def _embed_multivariate_normal(d: dist.MultivariateNormal) -> Term[dist.Distribution]: - return _register_distribution_op(cast(Hashable, type(d)))( - d.loc, scale_tril=d.scale_tril - ) +@defdata.register(dist.Poisson) +class PoissonTerm(_DistributionTerm): + def __init__(self, op, rate, **kwargs): + super().__init__(dist.Poisson, op, rate, **kwargs) + self.rate = rate + + +@evaluate.register(dist.Poisson) +def _embed_poisson(d: dist.Poisson) -> Term[dist.Poisson]: + return Poisson(d.rate) + + +@defop +def Gamma(concentration, rate=1.0, **kwargs) -> dist.Gamma: + raise NotHandled + + +@defdata.register(dist.Gamma) +class GammaTerm(_DistributionTerm): + def __init__(self, op, concentration, rate, **kwargs): + super().__init__(dist.Gamma, op, concentration, rate, **kwargs) + self.concentration = concentration + self.rate = rate + + +@evaluate.register(dist.Gamma) +def _embed_gamma(d: dist.Gamma) -> Term[dist.Gamma]: + return Gamma(d.concentration, d.rate) + + +@defop +def HalfCauchy(scale=1.0, **kwargs) -> dist.HalfCauchy: + raise NotHandled -@defterm.register -def _embed_pareto(d: dist.Pareto) -> Term[dist.Distribution]: - return _register_distribution_op(cast(Hashable, type(d)))(d.scale, d.alpha) +@defdata.register(dist.HalfCauchy) +class HalfCauchyTerm(_DistributionTerm): + def __init__(self, op, scale, **kwargs): + super().__init__(dist.HalfCauchy, op, scale, **kwargs) + self.scale = scale -@defterm.register -def _embed_uniform(d: dist.Uniform) -> Term[dist.Distribution]: - return _register_distribution_op(cast(Hashable, type(d)))(d.low, d.high) +@evaluate.register(dist.HalfCauchy) +def _embed_halfcauchy(d: dist.HalfCauchy) -> Term[dist.HalfCauchy]: + return HalfCauchy(d.scale) -@defterm.register -def _embed_von_mises(d: dist.VonMises) -> Term[dist.Distribution]: - return _register_distribution_op(cast(Hashable, type(d)))(d.loc, d.concentration) +@defop +def HalfNormal(scale=1.0, **kwargs) -> dist.HalfNormal: + raise NotHandled -@defterm.register -def _embed_weibull(d: dist.Weibull) -> Term[dist.Distribution]: - return _register_distribution_op(dist.Weibull)(d.scale, d.concentration) +@defdata.register(dist.HalfNormal) +class HalfNormalTerm(_DistributionTerm): + def __init__(self, op, scale, **kwargs): + super().__init__(dist.HalfNormal, op, scale, **kwargs) + self.scale = scale -@defterm.register -def _embed_wishart(d: dist.Wishart) -> Term[dist.Distribution]: - return _register_distribution_op(dist.Wishart)(d.df, d.scale_tril) +@evaluate.register(dist.HalfNormal) +def _embed_halfnormal(d: dist.HalfNormal) -> Term[dist.HalfNormal]: + return HalfNormal(d.scale) -@defterm.register -def _embed_delta(d: dist.Delta) -> Term[dist.Distribution]: - return _register_distribution_op(cast(Hashable, type(d)))( - d.v, log_density=d.log_density, event_dim=d.event_dim +@defop +def LKJCholesky(dim, concentration=1.0, **kwargs) -> dist.LKJCholesky: + raise NotHandled + + +@defdata.register(dist.LKJCholesky) +class LKJCholeskyTerm(_DistributionTerm): + def __init__(self, op, dim, concentration, **kwargs): + super().__init__(dist.LKJCholesky, op, dim, concentration, **kwargs) + self.dim = dim + self.concentration = concentration + + +@evaluate.register(dist.LKJCholesky) +def _embed_lkjcholesky(d: dist.LKJCholesky) -> Term[dist.LKJCholesky]: + return LKJCholesky(d.dim, d.concentration) + + +@defop +def MultivariateNormal( + loc=0.0, covariance_matrix=None, precision_matrix=None, scale_tril=None, **kwargs +) -> dist.MultivariateNormal: + raise NotHandled + + +@defdata.register(dist.MultivariateNormal) +class MultivariateNormalTerm(_DistributionTerm): + def __init__( + self, op, loc, covariance_matrix, precision_matrix, scale_tril, **kwargs + ): + super().__init__( + dist.MultivariateNormal, + op, + loc, + covariance_matrix, + precision_matrix, + scale_tril, + **kwargs, + ) + self.loc = loc + self.covariance_matrix = covariance_matrix + self.precision_matrix = precision_matrix + self.scale_tril = scale_tril + + +@evaluate.register(dist.MultivariateNormal) +def _embed_multivariatenormal( + d: dist.MultivariateNormal, +) -> Term[dist.MultivariateNormal]: + return MultivariateNormal( + d.loc, d.covariance_matrix, d.precision_matrix, d.scale_tril ) -@defterm.register -def _embed_low_rank_multivariate_normal( +@defop +def Pareto(scale, alpha, **kwargs) -> dist.Pareto: + raise NotHandled + + +@defdata.register(dist.Pareto) +class ParetoTerm(_DistributionTerm): + def __init__(self, op, scale, alpha, **kwargs): + super().__init__(dist.Pareto, op, scale, alpha, **kwargs) + self.scale = scale + self.alpha = alpha + + +@evaluate.register(dist.Pareto) +def _embed_pareto(d: dist.Pareto) -> Term[dist.Pareto]: + return Pareto(d.scale, d.alpha) + + +@defop +def Uniform(low=0.0, high=1.0, **kwargs) -> dist.Uniform: + raise NotHandled + + +@defdata.register(dist.Uniform) +class UniformTerm(_DistributionTerm): + def __init__(self, op, low, high, **kwargs): + super().__init__(dist.Uniform, op, low, high, **kwargs) + self.low = low + self.high = high + + +@evaluate.register(dist.Uniform) +def _embed_uniform(d: dist.Uniform) -> Term[dist.Uniform]: + return Uniform(d.low, d.high) + + +@defop +def VonMises(loc, concentration, **kwargs) -> dist.VonMises: + raise NotHandled + + +@defdata.register(dist.VonMises) +class VonMisesTerm(_DistributionTerm): + def __init__(self, op, loc, concentration, **kwargs): + super().__init__(dist.VonMises, op, loc, concentration, **kwargs) + self.loc = loc + self.concentration = concentration + + +@evaluate.register(dist.VonMises) +def _embed_vonmises(d: dist.VonMises) -> Term[dist.VonMises]: + return VonMises(d.loc, d.concentration) + + +@defop +def Weibull(scale, concentration, **kwargs) -> dist.Weibull: + raise NotHandled + + +@defdata.register(dist.Weibull) +class WeibullTerm(_DistributionTerm): + def __init__(self, op, scale, concentration, **kwargs): + super().__init__(dist.Weibull, op, scale, concentration, **kwargs) + self.scale = scale + self.concentration = concentration + + +@evaluate.register(dist.Weibull) +def _embed_weibull(d: dist.Weibull) -> Term[dist.Weibull]: + return Weibull(d.scale, d.concentration) + + +@defop +def Wishart(df, scale_tril, **kwargs) -> dist.Wishart: + raise NotHandled + + +@defdata.register(dist.Wishart) +class WishartTerm(_DistributionTerm): + def __init__(self, op, df, scale_tril, **kwargs): + super().__init__(dist.Wishart, op, df, scale_tril, **kwargs) + self.df = df + self.scale_tril = scale_tril + + +@evaluate.register(dist.Wishart) +def _embed_wishart(d: dist.Wishart) -> Term[dist.Wishart]: + return Wishart(d.df, d.scale_tril) + + +@defop +def Delta(v=0.0, log_density=0.0, event_dim=0, **kwargs) -> dist.Delta: + raise NotHandled + + +@defdata.register(dist.Delta) +class DeltaTerm(_DistributionTerm): + def __init__(self, op, v, log_density, event_dim, **kwargs): + super().__init__(dist.Delta, op, v, log_density, event_dim, **kwargs) + self.v = v + self.log_density = log_density + + +@evaluate.register(dist.Delta) +def _embed_delta(d: dist.Delta) -> Term[dist.Delta]: + return Delta(d.v, d.log_density, d.event_dim) + + +@defop +def LowRankMultivariateNormal( + loc, cov_factor, cov_diag, **kwargs +) -> dist.LowRankMultivariateNormal: + raise NotHandled + + +@defdata.register(dist.LowRankMultivariateNormal) +class LowRankMultivariateNormalTerm(_DistributionTerm): + def __init__(self, op, loc, cov_factor, cov_diag, **kwargs): + super().__init__( + dist.LowRankMultivariateNormal, op, loc, cov_factor, cov_diag, **kwargs + ) + self.loc = loc + self.cov_factor = cov_factor + self.cov_diag = cov_diag + + +@evaluate.register(dist.LowRankMultivariateNormal) +def _embed_lowrankmultivariatenormal( d: dist.LowRankMultivariateNormal, -) -> Term[dist.Distribution]: - return _register_distribution_op(cast(Hashable, type(d)))( - d.loc, d.cov_factor, d.cov_diag - ) +) -> Term[dist.LowRankMultivariateNormal]: + return LowRankMultivariateNormal(d.loc, d.cov_factor, d.cov_diag) + +@defop +def RelaxedBernoulliLogits( + temperature, logits, **kwargs +) -> dist.RelaxedBernoulliLogits: + raise NotHandled -@defterm.register -def _embed_relaxed_bernoulli_logits( + +@defdata.register(dist.RelaxedBernoulliLogits) +class RelaxedBernoulliLogitsTerm(_DistributionTerm): + def __init__(self, op, temperature, logits, **kwargs): + super().__init__(dist.RelaxedBernoulliLogits, op, temperature, logits, **kwargs) + self.temperature = temperature + self.logits = logits + + +@evaluate.register(dist.RelaxedBernoulliLogits) +def _embed_relaxedbernoullilogits( d: dist.RelaxedBernoulliLogits, -) -> Term[dist.Distribution]: - return _register_distribution_op(cast(Hashable, type(d)))(d.temperature, d.logits) +) -> Term[dist.RelaxedBernoulliLogits]: + return RelaxedBernoulliLogits(d.temperature, d.logits) -@defterm.register -def _embed_independent(d: dist.Independent) -> Term[dist.Distribution]: - return _register_distribution_op(cast(Hashable, type(d)))( - d.base_dist, d.reinterpreted_batch_ndims - ) +@defop +def Independent(base_dist, reinterpreted_batch_ndims, **kwargs) -> dist.Independent: + raise NotHandled + + +@defdata.register(dist.Independent) +class IndependentTerm(_DistributionTerm): + def __init__(self, op, base_dist, reinterpreted_batch_ndims, **kwargs): + super().__init__( + dist.Independent, op, base_dist, reinterpreted_batch_ndims, **kwargs + ) + self.base_dist = base_dist + self.reinterpreted_batch_ndims = reinterpreted_batch_ndims -BernoulliLogits = _register_distribution_op(dist.BernoulliLogits) -BernoulliProbs = _register_distribution_op(dist.BernoulliProbs) -Beta = _register_distribution_op(dist.Beta) -BinomialProbs = _register_distribution_op(dist.BinomialProbs) -BinomialLogits = _register_distribution_op(dist.BinomialLogits) -CategoricalLogits = _register_distribution_op(dist.CategoricalLogits) -CategoricalProbs = _register_distribution_op(dist.CategoricalProbs) -Cauchy = _register_distribution_op(dist.Cauchy) -Chi2 = _register_distribution_op(dist.Chi2) -Delta = _register_distribution_op(dist.Delta) -Dirichlet = _register_distribution_op(dist.Dirichlet) -DirichletMultinomial = _register_distribution_op(dist.DirichletMultinomial) -Distribution = _register_distribution_op(dist.Distribution) -Exponential = _register_distribution_op(dist.Exponential) -Gamma = _register_distribution_op(dist.Gamma) -GeometricLogits = _register_distribution_op(dist.GeometricLogits) -GeometricProbs = _register_distribution_op(dist.GeometricProbs) -Gumbel = _register_distribution_op(dist.Gumbel) -HalfCauchy = _register_distribution_op(dist.HalfCauchy) -HalfNormal = _register_distribution_op(dist.HalfNormal) -Independent = _register_distribution_op(dist.Independent) -Kumaraswamy = _register_distribution_op(dist.Kumaraswamy) -LKJCholesky = _register_distribution_op(dist.LKJCholesky) -Laplace = _register_distribution_op(dist.Laplace) -LogNormal = _register_distribution_op(dist.LogNormal) -Logistic = _register_distribution_op(dist.Logistic) -LowRankMultivariateNormal = _register_distribution_op(dist.LowRankMultivariateNormal) -MultinomialProbs = _register_distribution_op(dist.MultinomialProbs) -MultinomialLogits = _register_distribution_op(dist.MultinomialLogits) -MultivariateNormal = _register_distribution_op(dist.MultivariateNormal) -NegativeBinomialProbs = _register_distribution_op(dist.NegativeBinomialProbs) -NegativeBinomialLogits = _register_distribution_op(dist.NegativeBinomialLogits) -Normal = _register_distribution_op(dist.Normal) -Pareto = _register_distribution_op(dist.Pareto) -Poisson = _register_distribution_op(dist.Poisson) -RelaxedBernoulliLogits = _register_distribution_op(dist.RelaxedBernoulliLogits) -StudentT = _register_distribution_op(dist.StudentT) -Uniform = _register_distribution_op(dist.Uniform) -VonMises = _register_distribution_op(dist.VonMises) -Weibull = _register_distribution_op(dist.Weibull) -Wishart = _register_distribution_op(dist.Wishart) +@evaluate.register(dist.Independent) +def _embed_independent(d: dist.Independent) -> Term[dist.Independent]: + return Independent(d.base_dist, d.reinterpreted_batch_ndims) diff --git a/effectful/handlers/pyro.py b/effectful/handlers/pyro.py index 913c2fa8..13399d1d 100644 --- a/effectful/handlers/pyro.py +++ b/effectful/handlers/pyro.py @@ -27,8 +27,8 @@ unbind_dims, ) from effectful.internals.runtime import interpreter -from effectful.ops.semantics import apply, runner, typeof -from effectful.ops.syntax import defdata, defop, defterm +from effectful.ops.semantics import apply, evaluate, handler, typeof +from effectful.ops.syntax import defdata, defop from effectful.ops.types import NotHandled, Operation, Term @@ -339,7 +339,6 @@ def _unbind_dims_distribution( value: pyro.distributions.torch_distribution.TorchDistribution, *names: Operation[[], torch.Tensor], ) -> pyro.distributions.torch_distribution.TorchDistribution: - d = value batch_shape = None def _validate_batch_shape(t): @@ -369,18 +368,8 @@ def _to_named(a): return a # Convert to a term in a context that does not evaluate distribution constructors. - def _apply(op, *args, **kwargs): - typ = op.__type_rule__(*args, **kwargs) - if issubclass( - typ, pyro.distributions.torch_distribution.TorchDistribution - ) or issubclass( - typ, pyro.distributions.torch_distribution.TorchDistributionMixin - ): - return defdata(op, *args, **kwargs) - return op.__default_rule__(*args, **kwargs) - - with runner({apply: _apply}): - d = defterm(d) + with handler({apply: defdata}): + d = typing.cast(TorchDistribution, evaluate(value)) if not (isinstance(d, Term) and typeof(d) is TorchDistribution): raise NotHandled @@ -399,8 +388,6 @@ def _bind_dims_distribution( value: pyro.distributions.torch_distribution.TorchDistribution, *names: Operation[[], torch.Tensor], ) -> pyro.distributions.torch_distribution.TorchDistribution: - d = value - def _to_positional(a, indices): if isinstance(a, torch.Tensor): # broadcast to full indexed shape @@ -416,19 +403,8 @@ def _to_positional(a, indices): else: return a - # Convert to a term in a context that does not evaluate distribution constructors. - def _apply(op, *args, **kwargs): - typ = op.__type_rule__(*args, **kwargs) - if issubclass( - typ, pyro.distributions.torch_distribution.TorchDistribution - ) or issubclass( - typ, pyro.distributions.torch_distribution.TorchDistributionMixin - ): - return defdata(op, *args, **kwargs) - return op.__default_rule__(*args, **kwargs) - - with runner({apply: _apply}): - d = defterm(d) + with handler({apply: defdata}): + d = typing.cast(TorchDistribution, evaluate(value)) if not (isinstance(d, Term) and typeof(d) is TorchDistribution): raise NotHandled @@ -452,7 +428,7 @@ def _register_distribution_op( def wrapper(*args, **kwargs) -> TorchDistribution: return dist_constr(*args, **kwargs) - return defop(wrapper) + return defop(wrapper, name=dist_constr.__name__) @defdata.register(pyro.distributions.torch_distribution.TorchDistribution) @@ -475,8 +451,8 @@ class _DistributionTerm(Term[TorchDistribution], TorchDistribution): def __init__(self, op: Operation[Any, TorchDistribution], *args, **kwargs): self._op = op - self._args = tuple(defterm(a) for a in args) - self._kwargs = {k: defterm(v) for (k, v) in kwargs.items()} + self._args = args + self._kwargs = kwargs @property def op(self): @@ -531,258 +507,337 @@ def enumerate_support(self, expand=True): return self._base_dist.enumerate_support(expand) -@defterm.register(TorchDistribution) -@defterm.register(TorchDistributionMixin) +@evaluate.register(TorchDistribution) +@evaluate.register(TorchDistributionMixin) def _embed_distribution(dist: TorchDistribution) -> Term[TorchDistribution]: raise ValueError( f"No embedding provided for distribution of type {type(dist).__name__}." ) -@defterm.register +################################################################################ +# Note: Accessing attributes on a distribution actually mutates the +# distribution, so it is unsafe to access attributes in a context that overrides +# torch_getitem and the partial evaluation rules. +################################################################################ + + +@evaluate.register def _embed_expanded(d: dist.ExpandedDistribution) -> Term[TorchDistribution]: with interpreter({}): - batch_shape = d._batch_shape - base_dist = d.base_dist - base_batch_shape = base_dist.batch_shape - if batch_shape == base_batch_shape: - return base_dist + batch_shape_raw = d._batch_shape + base_dist_raw = d.base_dist + + batch_shape = evaluate(batch_shape_raw) + base_dist = evaluate(base_dist_raw) + base_batch_shape = base_dist.batch_shape # type: ignore + if batch_shape == base_batch_shape: + return base_dist raise ValueError("Nontrivial ExpandedDistribution not implemented.") -@defterm.register +@evaluate.register def _embed_independent(d: dist.Independent) -> Term[TorchDistribution]: with interpreter({}): - base_dist = d.base_dist - reinterpreted_batch_ndims = d.reinterpreted_batch_ndims + base_dist_raw = d.base_dist + reinterpreted_batch_ndims_raw = d.reinterpreted_batch_ndims + + base_dist = evaluate(base_dist_raw) + reinterpreted_batch_ndims = evaluate(reinterpreted_batch_ndims_raw) return _register_distribution_op(type(d))(base_dist, reinterpreted_batch_ndims) -@defterm.register +@evaluate.register def _embed_folded(d: dist.FoldedDistribution) -> Term[TorchDistribution]: with interpreter({}): - base_dist = d.base_dist + base_dist_raw = d.base_dist + + base_dist = evaluate(base_dist_raw) return _register_distribution_op(type(d))(base_dist) # type: ignore -@defterm.register +@evaluate.register def _embed_masked(d: dist.MaskedDistribution) -> Term[TorchDistribution]: with interpreter({}): - base_dist = d.base_dist - mask = d._mask + base_dist_raw = d.base_dist + mask_raw = d._mask + + base_dist = evaluate(base_dist_raw) + mask = evaluate(mask_raw) return _register_distribution_op(type(d))(base_dist, mask) -@defterm.register(dist.Cauchy) -@defterm.register(dist.Gumbel) -@defterm.register(dist.Laplace) -@defterm.register(dist.LogNormal) -@defterm.register(dist.Logistic) -@defterm.register(dist.LogisticNormal) -@defterm.register(dist.Normal) -@defterm.register(dist.StudentT) +@evaluate.register(dist.Cauchy) +@evaluate.register(dist.Gumbel) +@evaluate.register(dist.Laplace) +@evaluate.register(dist.LogNormal) +@evaluate.register(dist.Logistic) +@evaluate.register(dist.LogisticNormal) +@evaluate.register(dist.Normal) +@evaluate.register(dist.StudentT) def _embed_loc_scale(d: TorchDistribution) -> Term[TorchDistribution]: with interpreter({}): - loc = d.loc - scale = d.scale + loc_raw = d.loc + scale_raw = d.scale + + loc = evaluate(loc_raw) + scale = evaluate(scale_raw) return _register_distribution_op(type(d))(loc, scale) -@defterm.register(dist.Bernoulli) -@defterm.register(dist.Categorical) -@defterm.register(dist.ContinuousBernoulli) -@defterm.register(dist.Geometric) -@defterm.register(dist.OneHotCategorical) -@defterm.register(dist.OneHotCategoricalStraightThrough) +@evaluate.register(dist.Bernoulli) +@evaluate.register(dist.Categorical) +@evaluate.register(dist.ContinuousBernoulli) +@evaluate.register(dist.Geometric) +@evaluate.register(dist.OneHotCategorical) +@evaluate.register(dist.OneHotCategoricalStraightThrough) def _embed_probs(d: TorchDistribution) -> Term[TorchDistribution]: with interpreter({}): - probs = d.probs + probs_raw = d.probs + + probs = evaluate(probs_raw) return _register_distribution_op(type(d))(probs) -@defterm.register(dist.Beta) -@defterm.register(dist.Kumaraswamy) +@evaluate.register(dist.Beta) +@evaluate.register(dist.Kumaraswamy) def _embed_beta(d: TorchDistribution) -> Term[TorchDistribution]: with interpreter({}): - concentration1 = d.concentration1 - concentration0 = d.concentration0 + concentration1_raw = d.concentration1 + concentration0_raw = d.concentration0 + + concentration1 = evaluate(concentration1_raw) + concentration0 = evaluate(concentration0_raw) return _register_distribution_op(type(d))(concentration1, concentration0) -@defterm.register +@evaluate.register def _embed_binomial(d: dist.Binomial) -> Term[TorchDistribution]: with interpreter({}): - total_count = d.total_count - probs = d.probs + total_count_raw = d.total_count + probs_raw = d.probs + + total_count = evaluate(total_count_raw) + probs = evaluate(probs_raw) return _register_distribution_op(dist.Binomial)(total_count, probs) -@defterm.register +@evaluate.register def _embed_chi2(d: dist.Chi2) -> Term[TorchDistribution]: with interpreter({}): - df = d.df + df_raw = d.df + + df = evaluate(df_raw) return _register_distribution_op(dist.Chi2)(df) -@defterm.register +@evaluate.register def _embed_dirichlet(d: dist.Dirichlet) -> Term[TorchDistribution]: with interpreter({}): - concentration = d.concentration + concentration_raw = d.concentration + + concentration = evaluate(concentration_raw) return _register_distribution_op(dist.Dirichlet)(concentration) -@defterm.register +@evaluate.register def _embed_exponential(d: dist.Exponential) -> Term[TorchDistribution]: with interpreter({}): - rate = d.rate + rate_raw = d.rate + + rate = evaluate(rate_raw) return _register_distribution_op(dist.Exponential)(rate) -@defterm.register +@evaluate.register def _embed_fisher_snedecor(d: dist.FisherSnedecor) -> Term[TorchDistribution]: with interpreter({}): - df1 = d.df1 - df2 = d.df2 + df1_raw = d.df1 + df2_raw = d.df2 + + df1 = evaluate(df1_raw) + df2 = evaluate(df2_raw) return _register_distribution_op(dist.FisherSnedecor)(df1, df2) -@defterm.register +@evaluate.register def _embed_gamma(d: dist.Gamma) -> Term[TorchDistribution]: with interpreter({}): - concentration = d.concentration - rate = d.rate + concentration_raw = d.concentration + rate_raw = d.rate + + concentration = evaluate(concentration_raw) + rate = evaluate(rate_raw) return _register_distribution_op(dist.Gamma)(concentration, rate) -@defterm.register(dist.HalfCauchy) -@defterm.register(dist.HalfNormal) +@evaluate.register(dist.HalfCauchy) +@evaluate.register(dist.HalfNormal) def _embed_half_cauchy(d: TorchDistribution) -> Term[TorchDistribution]: with interpreter({}): - scale = d.scale + scale_raw = d.scale + + scale = evaluate(scale_raw) return _register_distribution_op(type(d))(scale) -@defterm.register +@evaluate.register def _embed_lkj_cholesky(d: dist.LKJCholesky) -> Term[TorchDistribution]: with interpreter({}): - dim = d.dim - concentration = d.concentration + dim_raw = d.dim + concentration_raw = d.concentration + + dim = evaluate(dim_raw) + concentration = evaluate(concentration_raw) return _register_distribution_op(dist.LKJCholesky)(dim, concentration=concentration) -@defterm.register +@evaluate.register def _embed_multinomial(d: dist.Multinomial) -> Term[TorchDistribution]: with interpreter({}): - total_count = d.total_count - probs = d.probs + total_count_raw = d.total_count + probs_raw = d.probs + + total_count = evaluate(total_count_raw) + probs = evaluate(probs_raw) return _register_distribution_op(dist.Multinomial)(total_count, probs) -@defterm.register +@evaluate.register def _embed_multivariate_normal(d: dist.MultivariateNormal) -> Term[TorchDistribution]: with interpreter({}): - loc = d.loc - scale_tril = d.scale_tril + loc_raw = d.loc + scale_tril_raw = d.scale_tril + + loc = evaluate(loc_raw) + scale_tril = evaluate(scale_tril_raw) return _register_distribution_op(dist.MultivariateNormal)( loc, scale_tril=scale_tril ) -@defterm.register +@evaluate.register def _embed_negative_binomial(d: dist.NegativeBinomial) -> Term[TorchDistribution]: with interpreter({}): - total_count = d.total_count - probs = d.probs + total_count_raw = d.total_count + probs_raw = d.probs + + total_count = evaluate(total_count_raw) + probs = evaluate(probs_raw) return _register_distribution_op(dist.NegativeBinomial)(total_count, probs) -@defterm.register +@evaluate.register def _embed_pareto(d: dist.Pareto) -> Term[TorchDistribution]: with interpreter({}): - scale = d.scale - alpha = d.alpha + scale_raw = d.scale + alpha_raw = d.alpha + + scale = evaluate(scale_raw) + alpha = evaluate(alpha_raw) return _register_distribution_op(dist.Pareto)(scale, alpha) -@defterm.register +@evaluate.register def _embed_poisson(d: dist.Poisson) -> Term[TorchDistribution]: with interpreter({}): - rate = d.rate + rate_raw = d.rate + + rate = evaluate(rate_raw) return _register_distribution_op(dist.Poisson)(rate) -@defterm.register(dist.RelaxedBernoulli) -@defterm.register(dist.RelaxedOneHotCategorical) +@evaluate.register(dist.RelaxedBernoulli) +@evaluate.register(dist.RelaxedOneHotCategorical) def _embed_relaxed(d: TorchDistribution) -> Term[TorchDistribution]: with interpreter({}): - temperature = d.temperature - probs = d.probs + temperature_raw = d.temperature + probs_raw = d.probs + + temperature = evaluate(temperature_raw) + probs = evaluate(probs_raw) return _register_distribution_op(type(d))(temperature, probs) -@defterm.register +@evaluate.register def _embed_uniform(d: dist.Uniform) -> Term[TorchDistribution]: with interpreter({}): - low = d.low - high = d.high + low_raw = d.low + high_raw = d.high + + low = evaluate(low_raw) + high = evaluate(high_raw) return _register_distribution_op(dist.Uniform)(low, high) -@defterm.register +@evaluate.register def _embed_von_mises(d: dist.VonMises) -> Term[TorchDistribution]: with interpreter({}): - loc = d.loc - concentration = d.concentration + loc_raw = d.loc + concentration_raw = d.concentration + + loc = evaluate(loc_raw) + concentration = evaluate(concentration_raw) return _register_distribution_op(dist.VonMises)(loc, concentration) -@defterm.register +@evaluate.register def _embed_weibull(d: dist.Weibull) -> Term[TorchDistribution]: with interpreter({}): - scale = d.scale - concentration = d.concentration + scale_raw = d.scale + concentration_raw = d.concentration + + scale = evaluate(scale_raw) + concentration = evaluate(concentration_raw) return _register_distribution_op(dist.Weibull)(scale, concentration) -@defterm.register +@evaluate.register def _embed_wishart(d: dist.Wishart) -> Term[TorchDistribution]: with interpreter({}): - df = d.df - scale_tril = d.scale_tril + df_raw = d.df + scale_tril_raw = d.scale_tril + + df = evaluate(df_raw) + scale_tril = evaluate(scale_tril_raw) return _register_distribution_op(dist.Wishart)(df, scale_tril) -@defterm.register +@evaluate.register def _embed_delta(d: dist.Delta) -> Term[TorchDistribution]: with interpreter({}): - v = d.v - log_density = d.log_density - event_dim = d.event_dim + v_raw = d.v + log_density_raw = d.log_density + event_dim_raw = d.event_dim + + v = evaluate(v_raw) + log_density = evaluate(log_density_raw) + event_dim = evaluate(event_dim_raw) return _register_distribution_op(dist.Delta)( v, log_density=log_density, event_dim=event_dim diff --git a/effectful/handlers/torch.py b/effectful/handlers/torch.py index cc626661..57f91b9f 100644 --- a/effectful/handlers/torch.py +++ b/effectful/handlers/torch.py @@ -14,7 +14,7 @@ from effectful.internals.runtime import interpreter from effectful.internals.tensor_utils import _desugar_tensor_index from effectful.ops.semantics import apply, evaluate, fvsof, handler, typeof -from effectful.ops.syntax import Scoped, defdata, defop, defterm, syntactic_eq +from effectful.ops.syntax import Scoped, defdata, defop, syntactic_eq from effectful.ops.types import Expr, NotHandled, Operation, Term # + An element of a tensor index expression. @@ -72,11 +72,10 @@ def _torch_getitem_sizeof( return defdata(torch_getitem, x, key) def _apply(op, *args, **kwargs): - args, kwargs = tree.map_structure(defterm, (args, kwargs)) return defdata(op, *args, **kwargs) with interpreter({torch_getitem: _torch_getitem_sizeof, apply: _apply}): - evaluate(defterm(value)) + evaluate(value) return sizes @@ -167,35 +166,28 @@ def bind_dims[ def _bind_dims_tensor( value: torch.Tensor, *names: Operation[[], torch.Tensor] ) -> torch.Tensor: - def _evaluate(expr): - if isinstance(expr, Term): - (args, kwargs) = tree.map_structure(_evaluate, (expr.args, expr.kwargs)) - return _partial_eval(expr) - if tree.is_nested(expr): - return tree.map_structure(_evaluate, expr) - return expr - - t = value - args = names - - if not isinstance(t, Term): - return t + names_set = set(names) + + if not len(names_set) == len(names): + raise ValueError("Expected names to be distinct") - result = _evaluate(t) - if not isinstance(result, Term) or not args: - return result + if not (names_set & set(sizesof(value).keys())): + return value # ensure that the result is a torch_getitem with a tensor as the first argument - if not (result.op is torch_getitem and isinstance(result.args[0], torch.Tensor)): + if not ( + isinstance(value, Term) + and value.op is torch_getitem + and isinstance(value.args[0], torch.Tensor) + ): raise NotHandled - tensor = result.args[0] - dims = result.args[1] + tensor = value.args[0] + dims = value.args[1] assert isinstance(dims, Sequence) # ensure that the order is a subset of the named dimensions - order_set = set(args) - if not order_set <= set(a.op for a in dims if isinstance(a, Term)): + if not names_set <= set(a.op for a in dims if isinstance(a, Term)): raise NotHandled # permute the inner tensor so that the leading dimensions are in the order @@ -204,12 +196,12 @@ def _evaluate(expr): reindex_dims = [ i for i, o in enumerate(dims) - if not isinstance(o, Term) or o.op not in order_set + if not isinstance(o, Term) or o.op not in names_set ] dim_ops = [a.op if isinstance(a, Term) else None for a in dims] - perm = [dim_ops.index(o) for o in args] + reindex_dims + perm = [dim_ops.index(o) for o in names] + reindex_dims tensor = tensor.permute(perm) - return tensor[(slice(None),) * len(args) + tuple(dims[i] for i in reindex_dims)] + return tensor[(slice(None),) * len(names) + tuple(dims[i] for i in reindex_dims)] @defop diff --git a/effectful/internals/runtime.py b/effectful/internals/runtime.py index 12488e13..4ad7b534 100644 --- a/effectful/internals/runtime.py +++ b/effectful/internals/runtime.py @@ -41,7 +41,7 @@ def _get_args() -> tuple[tuple, Mapping]: def _restore_args[**P, T](fn: Callable[P, T]) -> Callable[P, T]: @functools.wraps(fn) def _cont_wrapper(*a: P.args, **k: P.kwargs) -> T: - a, k = (a, k) if a or k else _get_args() # type: ignore + a, k = (a, k) if a or k else _get_args() return fn(*a, **k) return _cont_wrapper diff --git a/effectful/internals/unification.py b/effectful/internals/unification.py index a3c1be78..8c3a63b4 100644 --- a/effectful/internals/unification.py +++ b/effectful/internals/unification.py @@ -66,6 +66,7 @@ import operator import types import typing +from dataclasses import dataclass try: from typing import _collect_type_parameters as _freetypevars # type: ignore @@ -93,6 +94,16 @@ Substitutions = collections.abc.Mapping[TypeVariable, TypeExpressions] +@dataclass +class Box[T]: + """Boxed types. Prevents confusion between types computed by __type_rule__ + and values. + + """ + + value: T + + @typing.overload def unify( typ: inspect.Signature, @@ -582,7 +593,7 @@ def _(typ: typing.ForwardRef): @functools.singledispatch -def nested_type(value) -> TypeExpression: +def nested_type(value) -> Box[TypeExpression]: """ Infer the type of a value, handling nested collections with generic parameters. @@ -618,87 +629,87 @@ def nested_type(value) -> TypeExpression: >>> from effectful.internals.unification import nested_type # Basic types are returned as their type - >>> nested_type(42) + >>> nested_type(42).value - >>> nested_type("hello") + >>> nested_type("hello").value - >>> nested_type(3.14) + >>> nested_type(3.14).value - >>> nested_type(True) + >>> nested_type(True).value - # Type objects pass through unchanged - >>> nested_type(int) + # Boxed type objects pass through unchanged + >>> nested_type(Box(int)).value - >>> nested_type(str) + >>> nested_type(Box(str)).value - >>> nested_type(list) + >>> nested_type(Box(list)).value # Empty collections return their base type - >>> nested_type([]) + >>> nested_type([]).value - >>> nested_type({}) + >>> nested_type({}).value - >>> nested_type(set()) + >>> nested_type(set()).value # Sequences become Sequence[element_type] - >>> nested_type([1, 2, 3]) + >>> nested_type([1, 2, 3]).value collections.abc.MutableSequence[int] - >>> nested_type(["a", "b", "c"]) + >>> nested_type(["a", "b", "c"]).value collections.abc.MutableSequence[str] # Tuples preserve exact structure - >>> nested_type((1, "hello", 3.14)) + >>> nested_type((1, "hello", 3.14)).value tuple[int, str, float] - >>> nested_type(()) + >>> nested_type(()).value - >>> nested_type((1,)) + >>> nested_type((1,)).value tuple[int] # Sets become Set[element_type] - >>> nested_type({1, 2, 3}) + >>> nested_type({1, 2, 3}).value collections.abc.MutableSet[int] - >>> nested_type({"a", "b"}) + >>> nested_type({"a", "b"}).value collections.abc.MutableSet[str] # Mappings become Mapping[key_type, value_type] - >>> nested_type({"key": "value"}) + >>> nested_type({"key": "value"}).value collections.abc.MutableMapping[str, str] - >>> nested_type({1: "one", 2: "two"}) + >>> nested_type({1: "one", 2: "two"}).value collections.abc.MutableMapping[int, str] # Strings and bytes are NOT treated as sequences - >>> nested_type("hello") + >>> nested_type("hello").value - >>> nested_type(b"bytes") + >>> nested_type(b"bytes").value # Annotated functions return types derived from their annotations >>> def annotated_func(x: int) -> str: ... return str(x) - >>> nested_type(annotated_func) + >>> nested_type(annotated_func).value collections.abc.Callable[[int], str] # Unannotated functions/callables return their type >>> def f(): pass - >>> nested_type(f) + >>> nested_type(f).value - >>> nested_type(lambda x: x) + >>> nested_type(lambda x: x).value # Generic aliases and union types pass through - >>> nested_type(list[int]) + >>> nested_type(Box(list[int])).value list[int] - >>> nested_type(int | str) + >>> nested_type(Box(int | str)).value int | str """ - return type(value) + return Box(type(value)) @nested_type.register -def _(value: TypeExpression): +def _(value: Box): return value @@ -709,23 +720,23 @@ def _(value: effectful.ops.types.Term): @nested_type.register def _(value: effectful.ops.types.Operation): - typ = nested_type.dispatch(collections.abc.Callable)(value) + typ = nested_type.dispatch(collections.abc.Callable)(value).value (arg_types, return_type) = typing.get_args(typ) - return effectful.ops.types.Operation[arg_types, return_type] # type: ignore + return Box(effectful.ops.types.Operation[arg_types, return_type]) # type: ignore @nested_type.register def _(value: collections.abc.Callable): if typing.get_overloads(value): - return type(value) + return Box(type(value)) try: sig = inspect.signature(value) except ValueError: - return type(value) + return Box(type(value)) if sig.return_annotation is inspect.Signature.empty: - return type(value) + return Box(type(value)) elif any( p.annotation is inspect.Parameter.empty or p.kind @@ -736,60 +747,65 @@ def _(value: collections.abc.Callable): } for p in sig.parameters.values() ): - return collections.abc.Callable[..., sig.return_annotation] + return Box(collections.abc.Callable[..., sig.return_annotation]) else: - return collections.abc.Callable[ - [p.annotation for p in sig.parameters.values()], sig.return_annotation - ] + return Box( + collections.abc.Callable[ + [p.annotation for p in sig.parameters.values()], sig.return_annotation + ] + ) @nested_type.register def _(value: collections.abc.Mapping): if value and isinstance(value, effectful.ops.types.Interpretation): - return effectful.ops.types.Interpretation + return Box(effectful.ops.types.Interpretation) if len(value) == 0: - return type(value) + return Box(type(value)) elif len(value) == 1: - ktyp = nested_type(next(iter(value.keys()))) - vtyp = nested_type(next(iter(value.values()))) - return canonicalize(type(value))[ktyp, vtyp] # type: ignore + ktyp = nested_type(next(iter(value.keys()))).value + vtyp = nested_type(next(iter(value.values()))).value + return Box(canonicalize(type(value))[ktyp, vtyp]) # type: ignore else: - ktyp = functools.reduce(operator.or_, map(nested_type, value.keys())) - vtyp = functools.reduce(operator.or_, map(nested_type, value.values())) + ktyp = functools.reduce( + operator.or_, [nested_type(x).value for x in value.keys()] + ) + vtyp = functools.reduce( + operator.or_, [nested_type(x).value for x in value.values()] + ) if isinstance(ktyp, UnionType) or isinstance(vtyp, UnionType): - return type(value) + return Box(type(value)) else: - return canonicalize(type(value))[ktyp, vtyp] # type: ignore + return Box(canonicalize(type(value))[ktyp, vtyp]) # type: ignore @nested_type.register def _(value: collections.abc.Collection): if len(value) == 0: - return type(value) + return Box(type(value)) elif len(value) == 1: - vtyp = nested_type(next(iter(value))) - return canonicalize(type(value))[vtyp] # type: ignore + vtyp = nested_type(next(iter(value))).value + return Box(canonicalize(type(value))[vtyp]) # type: ignore else: - valtyp = functools.reduce(operator.or_, map(nested_type, value)) + valtyp = functools.reduce(operator.or_, [nested_type(x).value for x in value]) if isinstance(valtyp, UnionType): - return type(value) + return Box(type(value)) else: - return canonicalize(type(value))[valtyp] # type: ignore + return Box(canonicalize(type(value))[valtyp]) # type: ignore @nested_type.register def _(value: tuple): - return ( - nested_type.dispatch(collections.abc.Sequence)(value) - if type(value) != tuple or len(value) == 0 - else tuple[tuple(nested_type(item) for item in value)] # type: ignore - ) + if type(value) != tuple or len(value) == 0: + return nested_type.dispatch(collections.abc.Sequence)(value) + else: + return Box(tuple[tuple(nested_type(item).value for item in value)]) # type: ignore @nested_type.register def _(value: str | bytes | range | None): - return type(value) + return Box(type(value)) def freetypevars(typ) -> collections.abc.Set[TypeVariable]: diff --git a/effectful/ops/semantics.py b/effectful/ops/semantics.py index cc3857a7..879e56ad 100644 --- a/effectful/ops/semantics.py +++ b/effectful/ops/semantics.py @@ -3,9 +3,10 @@ import dataclasses import types import typing +from collections.abc import Callable from typing import Any -from effectful.ops.syntax import defop +from effectful.ops.syntax import _CustomSingleDispatchCallable, defop from effectful.ops.types import ( Expr, Interpretation, @@ -15,7 +16,7 @@ ) -@defop # type: ignore +@defop def apply[**P, T](op: Operation[P, T], *args: P.args, **kwargs: P.kwargs) -> T: """Apply ``op`` to ``args``, ``kwargs`` in interpretation ``intp``. @@ -37,23 +38,14 @@ def apply[**P, T](op: Operation[P, T], *args: P.args, **kwargs: P.kwargs) -> T: By installing an :func:`apply` handler, we capture the term instead: - >>> def default(*args, **kwargs): - ... raise NotHandled - >>> with handler({apply: default }): + >>> from effectful.ops.syntax import defdata + >>> with handler({apply: defdata}): ... term = mul(add(1, 2), 3) >>> print(str(term)) mul(add(1, 2), 3) """ - from effectful.internals.runtime import get_interpretation - - intp = get_interpretation() - if op in intp: - return intp[op](*args, **kwargs) - elif apply in intp: - return intp[apply](op, *args, **kwargs) - else: - return op.__default_rule__(*args, **kwargs) # type: ignore + return op.__default_rule__(*args, **kwargs) # type: ignore @defop @@ -209,7 +201,13 @@ def handler(intp: Interpretation): yield intp -def evaluate[T](expr: Expr[T], *, intp: Interpretation | None = None) -> Expr[T]: +@_CustomSingleDispatchCallable +def evaluate[T]( + __dispatch: Callable[[type], Callable[..., Expr[T]]], + expr: Expr[T], + *, + intp: Interpretation | None = None, +) -> Expr[T]: """Evaluate expression ``expr`` using interpretation ``intp``. If no interpretation is provided, uses the current interpretation. @@ -228,46 +226,12 @@ def evaluate[T](expr: Expr[T], *, intp: Interpretation | None = None) -> Expr[T] 6 """ - from effectful.internals.runtime import get_interpretation, interpreter + from effectful.internals.runtime import interpreter if intp is not None: return interpreter(intp)(evaluate)(expr) - if isinstance(expr, Term): - args = tuple(evaluate(arg) for arg in expr.args) - kwargs = {k: evaluate(v) for k, v in expr.kwargs.items()} - return expr.op(*args, **kwargs) - elif isinstance(expr, Operation): - op_intp = get_interpretation().get(expr, expr) - return op_intp if isinstance(op_intp, Operation) else expr # type: ignore - elif isinstance(expr, collections.abc.Mapping): - if isinstance(expr, collections.defaultdict): - return type(expr)(expr.default_factory, evaluate(tuple(expr.items()))) # type: ignore - elif isinstance(expr, types.MappingProxyType): - return type(expr)(dict(evaluate(tuple(expr.items())))) # type: ignore - else: - return type(expr)(evaluate(tuple(expr.items()))) # type: ignore - elif isinstance(expr, collections.abc.Sequence): - if isinstance(expr, str | bytes): - return typing.cast(T, expr) # mypy doesnt like ignore here, so we use cast - elif ( - isinstance(expr, tuple) - and hasattr(expr, "_fields") - and all(hasattr(expr, field) for field in getattr(expr, "_fields")) - ): # namedtuple - return type(expr)( - **{field: evaluate(getattr(expr, field)) for field in expr._fields} - ) - else: - return type(expr)(evaluate(item) for item in expr) # type: ignore - elif isinstance(expr, collections.abc.Set): - if isinstance(expr, collections.abc.ItemsView | collections.abc.KeysView): - return {evaluate(item) for item in expr} # type: ignore - else: - return type(expr)(evaluate(item) for item in expr) # type: ignore - elif isinstance(expr, collections.abc.ValuesView): - return [evaluate(item) for item in expr] # type: ignore - elif dataclasses.is_dataclass(expr) and not isinstance(expr, type): + if dataclasses.is_dataclass(expr) and not isinstance(expr, type): return typing.cast( T, dataclasses.replace( @@ -278,8 +242,75 @@ def evaluate[T](expr: Expr[T], *, intp: Interpretation | None = None) -> Expr[T] }, ), ) + + return __dispatch(type(expr))(expr) + + +@evaluate.register(object) +@evaluate.register(str) +@evaluate.register(bytes) +def _evaluate_object[T](expr: T, **kwargs) -> T: + return expr + + +@evaluate.register(Term) +def _evaluate_term(expr: Term, **kwargs): + args = tuple(evaluate(arg) for arg in expr.args) + kwargs = {k: evaluate(v) for k, v in expr.kwargs.items()} + return expr.op(*args, **kwargs) + + +@evaluate.register(Operation) +def _evaluate_operation(expr: Operation, **kwargs) -> Operation: + from effectful.internals.runtime import get_interpretation + + op_intp = get_interpretation().get(expr, expr) + return op_intp if isinstance(op_intp, Operation) else expr + + +@evaluate.register(collections.defaultdict) +def _evaluate_defaultdict(expr, **kwargs): + return type(expr)(expr.default_factory, evaluate(tuple(expr.items()))) + + +@evaluate.register(types.MappingProxyType) +def _evaluate_mappingproxytype(expr, **kwargs): + return type(expr)(dict(evaluate(tuple(expr.items())))) + + +@evaluate.register(collections.abc.Mapping) +def _evaluate_mapping(expr, **kwargs): + return type(expr)(evaluate(tuple(expr.items()))) + + +@evaluate.register(tuple) +def _evaluate_tuple(expr, **kwargs): + if ( + isinstance(expr, tuple) + and hasattr(expr, "_fields") + and all(hasattr(expr, field) for field in getattr(expr, "_fields")) + ): # namedtuple + return type(expr)( + **{field: evaluate(getattr(expr, field)) for field in expr._fields} + ) else: - return typing.cast(T, expr) + return type(expr)(evaluate(item) for item in expr) + + +@evaluate.register(collections.abc.Sequence) +def _evaluate_sequence(expr, **kwargs): + return type(expr)(evaluate(item) for item in expr) + + +@evaluate.register(collections.abc.ItemsView) +@evaluate.register(collections.abc.KeysView) +def _evaluate_set_view(expr, **kwargs): + return {evaluate(item) for item in expr} + + +@evaluate.register(collections.abc.ValuesView) +def _evaluate_list_view(expr, **kwargs): + return [evaluate(item) for item in expr] def _simple_type(tp: type) -> type: @@ -320,14 +351,16 @@ def typeof[T](term: Expr[T]) -> type[T]: """ from effectful.internals.runtime import interpreter + from effectful.internals.unification import Box - with interpreter({apply: lambda op, *a, **k: op.__type_rule__(*a, **k)}): - if isinstance(term, Term): - # If term is a Term, we evaluate it to get its type - tp = evaluate(term) - return _simple_type(typing.cast(type, tp)) - else: - return type(term) + def _apply(op, *args, **kwargs): + return Box(op.__type_rule__(*args, **kwargs)) + + with interpreter({apply: _apply}): + type_or_value = evaluate(term) + if isinstance(type_or_value, Box): + return _simple_type(type_or_value.value) + return typing.cast(type[T], type(type_or_value)) def fvsof[S](term: Expr[S]) -> collections.abc.Set[Operation]: diff --git a/effectful/ops/syntax.py b/effectful/ops/syntax.py index fc2d753c..2e43d6c7 100644 --- a/effectful/ops/syntax.py +++ b/effectful/ops/syntax.py @@ -4,14 +4,19 @@ import inspect import numbers import operator -import random -import types import typing import warnings from collections.abc import Callable, Iterable, Mapping -from typing import Annotated, Any, Concatenate +from typing import Annotated, Any -from effectful.ops.types import Annotation, Expr, NotHandled, Operation, Term +from effectful.ops.types import ( + Annotation, + Expr, + NotHandled, + Operation, + Term, + _CustomSingleDispatchCallable, +) @dataclasses.dataclass @@ -378,405 +383,10 @@ def extract_operations(obj): return bound_vars -@functools.singledispatch -def defop[**P, T]( - t: Callable[P, T], *, name: str | None = None, freshening=list[int] | None -) -> Operation[P, T]: - """Creates a fresh :class:`Operation`. +defop = Operation.define - :param t: May be a type, callable, or :class:`Operation`. If a type, the - operation will have no arguments and return the type. If a callable, - the operation will have the same signature as the callable, but with - no default rule. If an operation, the operation will be a distinct - copy of the operation. - :param name: Optional name for the operation. - :returns: A fresh operation. - .. note:: - - The result of :func:`defop` is always fresh (i.e. ``defop(f) != defop(f)``). - - **Example usage**: - - * Defining an operation: - - This example defines an operation that selects one of two integers: - - >>> @defop - ... def select(x: int, y: int) -> int: - ... return x - - The operation can be called like a regular function. By default, ``select`` - returns the first argument: - - >>> select(1, 2) - 1 - - We can change its behavior by installing a ``select`` handler: - - >>> from effectful.ops.semantics import handler - >>> with handler({select: lambda x, y: y}): - ... print(select(1, 2)) - 2 - - * Defining an operation with no default rule: - - We can use :func:`defop` and the - :exc:`NotHandled` exception to define an - operation with no default rule: - - >>> @defop - ... def add(x: int, y: int) -> int: - ... raise NotHandled - >>> print(str(add(1, 2))) - add(1, 2) - - When an operation has no default rule, the free rule is used instead, which - constructs a term of the operation applied to its arguments. This feature - can be used to conveniently define the syntax of a domain-specific language. - - * Defining free variables: - - Passing :func:`defop` a type is a handy way to create a free variable. - - >>> from effectful.ops.semantics import evaluate - >>> x = defop(int, name='x') - >>> y = x() + 1 - - ``y`` is free in ``x``, so it is not fully evaluated: - - >>> print(str(y)) - __add__(x(), 1) - - We bind ``x`` by installing a handler for it: - - >>> with handler({x: lambda: 2}): - ... print(evaluate(y)) - 3 - - .. note:: - - Because the result of :func:`defop` is always fresh, it's important to - be careful with variable identity. - - Two operations with the same name that come from different calls to - ``defop`` are not equal: - - >>> x1 = defop(int, name='x') - >>> x2 = defop(int, name='x') - >>> x1 == x2 - False - - This means that to correctly bind a variable, you must use the same - operation object. In this example, ``scale`` returns a term with a free - variable ``x``: - - >>> x = defop(float, name='x') - >>> def scale(a: float) -> float: - ... return x() * a - - Binding the variable ``x`` as follows does not work: - - >>> term = scale(3.0) - >>> fresh_x = defop(float, name='x') - >>> with handler({fresh_x: lambda: 2.0}): - ... print(str(evaluate(term))) - __mul__(x(), 3.0) - - Only the original operation object will work: - - >>> from effectful.ops.semantics import fvsof - >>> with handler({x: lambda: 2.0}): - ... print(evaluate(term)) - 6.0 - - * Defining a fresh :class:`Operation`: - - Passing :func:`defop` an :class:`Operation` creates a fresh operation with - the same name and signature, but no default rule. - - >>> fresh_select = defop(select) - >>> print(str(fresh_select(1, 2))) - select(1, 2) - - The new operation is distinct from the original: - - >>> with handler({select: lambda x, y: y}): - ... print(select(1, 2), fresh_select(1, 2)) - 2 select(1, 2) - - >>> with handler({fresh_select: lambda x, y: y}): - ... print(select(1, 2), fresh_select(1, 2)) - 1 2 - - """ - raise NotImplementedError(f"expected type or callable, got {t}") - - -@defop.register(typing.cast(type[collections.abc.Callable], collections.abc.Callable)) -class _BaseOperation[**Q, V](Operation[Q, V]): - __signature__: inspect.Signature - __name__: str - - _default: Callable[Q, V] - - def __init__( - self, - default: Callable[Q, V], - *, - name: str | None = None, - freshening: list[int] | None = None, - ): - functools.update_wrapper(self, default) - self._default = default - self.__name__ = name or default.__name__ - self._freshening = freshening or [] - self.__signature__ = inspect.signature(default) - - def __eq__(self, other): - if not isinstance(other, Operation): - return NotImplemented - return self is other - - def __lt__(self, other): - if not isinstance(other, Operation): - return NotImplemented - return id(self) < id(other) - - def __hash__(self): - return hash(self._default) - - def __default_rule__(self, *args: Q.args, **kwargs: Q.kwargs) -> "Expr[V]": - try: - try: - return self._default(*args, **kwargs) - except NotImplementedError: - warnings.warn( - "Operations should raise effectful.ops.types.NotHandled instead of NotImplementedError.", - DeprecationWarning, - ) - raise NotHandled - except NotHandled: - return typing.cast( - Callable[Concatenate[Operation[Q, V], Q], Expr[V]], defdata - )(self, *args, **kwargs) - - def __fvs_rule__(self, *args: Q.args, **kwargs: Q.kwargs) -> inspect.BoundArguments: - sig = Scoped.infer_annotations(self.__signature__) - bound_sig = sig.bind(*args, **kwargs) - bound_sig.apply_defaults() - - result_sig = sig.bind( - *(frozenset() for _ in bound_sig.args), - **{k: frozenset() for k in bound_sig.kwargs}, - ) - for name, param in sig.parameters.items(): - if typing.get_origin(param.annotation) is typing.Annotated: - for anno in typing.get_args(param.annotation)[1:]: - if isinstance(anno, Scoped): - param_bound_vars = anno.analyze(bound_sig) - if param.kind is inspect.Parameter.VAR_POSITIONAL: - result_sig.arguments[name] = tuple( - param_bound_vars for _ in bound_sig.arguments[name] - ) - elif param.kind is inspect.Parameter.VAR_KEYWORD: - for k in bound_sig.arguments[name]: - result_sig.arguments[name][k] = param_bound_vars - else: - result_sig.arguments[name] = param_bound_vars - - return result_sig - - def __type_rule__(self, *args: Q.args, **kwargs: Q.kwargs) -> type[V]: - from effectful.internals.unification import ( - freetypevars, - nested_type, - substitute, - unify, - ) - - return_anno = self.__signature__.return_annotation - if typing.get_origin(return_anno) is typing.Annotated: - return_anno = typing.get_args(return_anno)[0] - - if return_anno is inspect.Parameter.empty: - return typing.cast(type[V], object) - elif return_anno is None: - return type(None) # type: ignore - elif not freetypevars(return_anno): - return return_anno - - type_args = tuple(nested_type(a) for a in args) - type_kwargs = {k: nested_type(v) for k, v in kwargs.items()} - bound_sig = self.__signature__.bind(*type_args, **type_kwargs) - return substitute(return_anno, unify(self.__signature__, bound_sig)) # type: ignore - - def __repr__(self): - return f"_BaseOperation({self._default}, name={self.__name__}, freshening={self._freshening})" - - def __str__(self): - return self.__name__ - - def __get__(self, instance, owner): - if instance is not None: - # This is an instance-level operation, so we need to bind the instance - return functools.partial(self, instance) - else: - # This is a static operation, so we return the operation itself - return self - - -@defop.register(Operation) -def _[**P, T](t: Operation[P, T], *, name: str | None = None) -> Operation[P, T]: - @functools.wraps(t) - def func(*args, **kwargs): - raise NotHandled - - if name is None: - name = getattr(t, "__name__", str(t)) - freshening = getattr(t, "_freshening", []) + [random.randint(0, 1 << 32)] - - return defop(func, name=name, freshening=freshening) - - -@defop.register(type) -@defop.register(typing.cast(type, types.GenericAlias)) -@defop.register(typing.cast(type, typing._GenericAlias)) # type: ignore -@defop.register(typing.cast(type, types.UnionType)) -def _[T](t: type[T], *, name: str | None = None) -> Operation[[], T]: - def func() -> t: # type: ignore - raise NotHandled - - freshening = [] - if name is None: - name = t.__name__ - freshening = [random.randint(0, 1 << 32)] - - return typing.cast( - Operation[[], T], - defop(func, name=name, freshening=freshening), - ) - - -@defop.register(types.BuiltinFunctionType) -def _[**P, T](t: Callable[P, T], *, name: str | None = None) -> Operation[P, T]: - @functools.wraps(t) - def func(*args, **kwargs): - from effectful.ops.semantics import fvsof - - if not fvsof((args, kwargs)): - return t(*args, **kwargs) - else: - raise NotHandled - - return defop(func, name=name) - - -@defop.register(classmethod) -def _[**P, S, T]( # type: ignore - t: classmethod, *, name: str | None = None -) -> Operation[Concatenate[type[S], P], T]: - raise NotImplementedError("classmethod operations are not yet supported") - - -@defop.register(staticmethod) -class _StaticMethodOperation[**P, S, T](_BaseOperation[P, T]): - def __init__(self, default: staticmethod, **kwargs): - super().__init__(default=default.__func__, **kwargs) - - def __get__(self, instance: S, owner: type[S] | None = None) -> Callable[P, T]: - return self - - -@defop.register(property) -class _PropertyOperation[S, T](_BaseOperation[[S], T]): - def __init__(self, default: property, **kwargs): # type: ignore - assert not default.fset, "property with setter is not supported" - assert not default.fdel, "property with deleter is not supported" - super().__init__(default=typing.cast(Callable[[S], T], default.fget), **kwargs) - - @typing.overload - def __get__( - self, instance: None, owner: type[S] | None = None - ) -> "_PropertyOperation[S, T]": ... - - @typing.overload - def __get__(self, instance: S, owner: type[S] | None = None) -> T: ... - - def __get__(self, instance, owner: type[S] | None = None): - if instance is not None: - return self(instance) - else: - return self - - -@defop.register(functools.singledispatchmethod) -class _SingleDispatchMethodOperation[**P, S, T](_BaseOperation[Concatenate[S, P], T]): - _default: Callable[Concatenate[S, P], T] - - def __init__(self, default: functools.singledispatchmethod, **kwargs): # type: ignore - if isinstance(default.func, classmethod): - raise NotImplementedError("Operations as classmethod are not yet supported") - - @functools.wraps(default.func) - def _wrapper(obj: S, *args: P.args, **kwargs: P.kwargs) -> T: - return default.__get__(obj)(*args, **kwargs) - - self._registry: functools.singledispatchmethod = default - super().__init__(_wrapper, **kwargs) - - @typing.overload - def __get__( - self, instance: None, owner: type[S] | None = None - ) -> "_SingleDispatchMethodOperation[P, S, T]": ... - - @typing.overload - def __get__(self, instance: S, owner: type[S] | None = None) -> Callable[P, T]: ... - - def __get__(self, instance, owner: type[S] | None = None): - if instance is not None: - return functools.partial(self, instance) - else: - return self - - @property - def register(self): - return self._registry.register - - @property - def __isabstractmethod__(self): - return self._registry.__isabstractmethod__ - - -class _SingleDispatchOperation[**P, S, T](_BaseOperation[Concatenate[S, P], T]): - _default: "functools._SingleDispatchCallable[T]" - - @property - def register(self): - return self._default.register - - @property - def dispatch(self): - return self._default.dispatch - - -if typing.TYPE_CHECKING: - defop.register(functools._SingleDispatchCallable)(_SingleDispatchOperation) -else: - - @typing.runtime_checkable - class _SingleDispatchCallable(typing.Protocol): - registry: types.MappingProxyType[object, Callable] - - def dispatch(self, cls: type) -> Callable: ... - def register(self, cls: type, func: Callable | None = None) -> Callable: ... - def _clear_cache(self) -> None: ... - def __call__(self, /, *args, **kwargs): ... - - defop.register(_SingleDispatchCallable)(_SingleDispatchOperation) - - -@defop +@Operation.define def deffn[T, A, B]( body: Annotated[T, Scoped[A | B]], *args: Annotated[Operation, Scoped[A]], @@ -817,43 +427,6 @@ def deffn[T, A, B]( raise NotHandled -class _CustomSingleDispatchCallable[**P, **Q, S, T]: - def __init__( - self, func: Callable[Concatenate[Callable[[type], Callable[Q, S]], P], T] - ): - self.func = func - self._registry = functools.singledispatch(func) - functools.update_wrapper(self, func) - - @property - def dispatch(self): - return self._registry.dispatch - - @property - def register(self): - return self._registry.register - - def __call__(self, *args: P.args, **kwargs: P.kwargs) -> T: - return self.func(self.dispatch, *args, **kwargs) - - -@defop.register(_CustomSingleDispatchCallable) -class _CustomSingleDispatchOperation[**P, **Q, S, T](_BaseOperation[P, T]): - _default: _CustomSingleDispatchCallable[P, Q, S, T] - - def __init__(self, default: _CustomSingleDispatchCallable[P, Q, S, T], **kwargs): - super().__init__(default, **kwargs) - self.__signature__ = inspect.signature(functools.partial(default.func, None)) # type: ignore - - @property - def dispatch(self): - return self._registry.dispatch - - @property - def register(self): - return self._registry.register - - @_CustomSingleDispatchCallable def defterm[T](__dispatch: Callable[[type], Callable[[T], Expr[T]]], value: T): """Convert a value to a term, using the type of the value to dispatch. @@ -861,10 +434,11 @@ def defterm[T](__dispatch: Callable[[type], Callable[[T], Expr[T]]], value: T): :param value: The value to convert. :returns: A term. """ - if isinstance(value, Term): - return value - else: - return __dispatch(type(value))(value) + from effectful.ops.semantics import evaluate + + warnings.warn("defterm is replaced by evaluate", DeprecationWarning) + + return evaluate(value) @_CustomSingleDispatchCallable @@ -942,14 +516,16 @@ def __call__(self: collections.abc.Callable[P, T], *args: P.args, **kwargs: P.kw cast = defop(object, name="cast") def apply_type(op, *args, **kwargs): + from effectful.internals.unification import Box + assert isinstance(op, Operation) tp = op.__type_rule__(*args, **kwargs) - return tp + return Box(tp) def apply_cast(op, *args, **kwargs): assert isinstance(op, Operation) full_type = typ() - dispatch_type = _simple_type(full_type) + dispatch_type = _simple_type(full_type.value) return __dispatch(dispatch_type)(op, *args, **kwargs) analysis = productN({typ: {apply: apply_type}, cast: {apply: apply_cast}}) @@ -987,15 +563,6 @@ def evaluate_with_renaming(expr, ctx): return _unpack(result, cast) -@defterm.register(object) -@defterm.register(Operation) -@defterm.register(Term) -@defterm.register(type) -@defterm.register(types.BuiltinFunctionType) -def _[T](value: T) -> T: - return value - - @defdata.register(object) class _BaseTerm[T](Term[T]): _op: Operation[..., T] @@ -1348,7 +915,6 @@ def __int__(self) -> int: def __bool__(self) -> bool: raise ValueError("Cannot convert term to bool") - @defop # type: ignore[prop-decorator] @property def real(self) -> float: if not isinstance(self, Term): @@ -1356,7 +922,6 @@ def real(self) -> float: else: raise NotHandled - @defop # type: ignore[prop-decorator] @property def imag(self) -> float: if not isinstance(self, Term): @@ -1371,7 +936,6 @@ def conjugate(self) -> complex: else: raise NotHandled - @defop # type: ignore[prop-decorator] @property def numerator(self) -> int: if not isinstance(self, Term): @@ -1379,7 +943,6 @@ def numerator(self) -> int: else: raise NotHandled - @defop # type: ignore[prop-decorator] @property def denominator(self) -> int: if not isinstance(self, Term): @@ -1452,7 +1015,7 @@ def __index__(self) -> int: raise NotHandled @defop - def __eq__(self, other) -> bool: # type: ignore[override] + def __eq__(self, other) -> bool: if not isinstance(self, Term) and not isinstance(other, Term): return self.__eq__(other) else: diff --git a/effectful/ops/types.py b/effectful/ops/types.py index 015878eb..1599bb6a 100644 --- a/effectful/ops/types.py +++ b/effectful/ops/types.py @@ -1,12 +1,19 @@ -from __future__ import annotations - import abc import collections.abc import functools import inspect +import types import typing +import warnings from collections.abc import Callable, Mapping, Sequence -from typing import Any, _ProtocolMeta, overload, runtime_checkable +from typing import ( + Any, + Concatenate, + Protocol, + _ProtocolMeta, + overload, + runtime_checkable, +) class NotHandled(Exception): @@ -15,65 +22,472 @@ class NotHandled(Exception): pass +class _CustomSingleDispatchCallable[**P, **Q, S, T]: + def __init__( + self, func: Callable[Concatenate[Callable[[type], Callable[Q, S]], P], T] + ): + self.func = func + self._registry = functools.singledispatch(func) + self.__signature__ = inspect.signature(functools.partial(func, None)) # type: ignore[arg-type] + functools.update_wrapper(self, func) + + @property + def dispatch(self): + return self._registry.dispatch + + @property + def register(self): + return self._registry.register + + def __call__(self, *args: P.args, **kwargs: P.kwargs) -> T: + return self.func(self.dispatch, *args, **kwargs) + + +class _ClassMethodOpDescriptor(classmethod): + def __init__(self, define, *args, **kwargs): + super().__init__(*args, **kwargs) + self._define = define + + def __set_name__(self, owner, name): + assert not hasattr(self, "_name_on_owner"), "should only be called once" + self._name_on_owner = f"_descriptorop_{name}" + + def __get__(self, instance, owner: type | None = None): + owner = owner if owner is not None else type(instance) + try: + return owner.__dict__[self._name_on_owner] + except KeyError: + bound_op = self._define(super().__get__(instance, owner)) + setattr(owner, self._name_on_owner, bound_op) + return bound_op + + @functools.total_ordering -class Operation[**Q, V](abc.ABC): +class Operation[**Q, V]: """An abstract class representing an effect that can be implemented by an effect handler. .. note:: - Do not use :class:`Operation` directly. Instead, use :func:`defop` to define operations. + Do not instantiate :class:`Operation` directly. Instead, use + :func:`define` to define operations. """ __signature__: inspect.Signature __name__: str + __default__: Callable[Q, V] + + def __init__( + self, signature: inspect.Signature, name: str, default: Callable[Q, V] + ): + functools.update_wrapper(self, default) + + self.__signature__ = signature + self.__name__ = name + self.__default__ = default - @abc.abstractmethod def __eq__(self, other): - raise NotImplementedError + if not isinstance(other, Operation): + return NotImplemented + return self is other + + def __lt__(self, other): + if not isinstance(other, Operation): + return NotImplemented + return id(self) < id(other) - @abc.abstractmethod def __hash__(self): - raise NotImplementedError + return hash(self.__default__) - @abc.abstractmethod - def __lt__(self, other): + @functools.singledispatchmethod + @classmethod + def define[**P, T]( + cls: Callable[P, T], default: Callable[Q, V], *, name: str | None = None + ) -> "Operation[P, T]": + """Creates a fresh :class:`Operation`. + + :param t: May be a type, callable, or :class:`Operation`. If a type, the + operation will have no arguments and return the type. If a + callable, the operation will have the same signature as the + callable, but with no default rule. If an operation, the + operation will be a distinct copy of the operation. + :param name: Optional name for the operation. + :returns: A fresh operation. + + .. note:: + + The result of :func:`Operation.define` is always fresh (i.e. + ``Operation.define(f) != Operation.define(f)``). + + **Example usage**: + + * Defining an operation: + + This example defines an operation that selects one of two integers: + + >>> @Operation.define + ... def select(x: int, y: int) -> int: + ... return x + + The operation can be called like a regular function. By default, + ``select`` returns the first argument: + + >>> select(1, 2) + 1 + + We can change its behavior by installing a ``select`` handler: + + >>> from effectful.ops.semantics import handler + >>> with handler({select: lambda x, y: y}): + ... print(select(1, 2)) + 2 + + * Defining an operation with no default rule: + + We can use :func:`Operation.define` and the :exc:`NotHandled` + exception to define an operation with no default rule: + + >>> @Operation.define + ... def add(x: int, y: int) -> int: + ... raise NotHandled + >>> print(str(add(1, 2))) + add(1, 2) + + When an operation has no default rule, the free rule is used instead, + which constructs a term of the operation applied to its arguments. + This feature can be used to conveniently define the syntax of a + domain-specific language. + + * Defining free variables: + + Passing :func:`Operation.define` a type creates a free variable. + + >>> from effectful.ops.semantics import evaluate + >>> x = Operation.define(int, name='x') + >>> y = x() + 1 + + ``y`` is free in ``x``, so it is not fully evaluated: + + >>> print(str(y)) + __add__(x(), 1) + + We bind ``x`` by installing a handler for it: + + >>> with handler({x: lambda: 2}): + ... print(evaluate(y)) + 3 + + .. note:: + + Because the result of :func:`Operation.define` is always fresh, it's + important to be careful with variable identity. + + Two operations with the same name that come from different calls to + ``Operation.define`` are not equal: + + >>> x1 = Operation.define(int, name='x') + >>> x2 = Operation.define(int, name='x') + >>> x1 == x2 + False + + This means that to correctly bind a variable, you must use the same + operation object. In this example, ``scale`` returns a term with a + free variable ``x``: + + >>> x = Operation.define(float, name='x') + >>> def scale(a: float) -> float: + ... return x() * a + + Binding the variable ``x`` as follows does not work: + + >>> term = scale(3.0) + >>> fresh_x = Operation.define(float, name='x') + >>> with handler({fresh_x: lambda: 2.0}): + ... print(str(evaluate(term))) + __mul__(x(), 3.0) + + Only the original operation object will work: + + >>> from effectful.ops.semantics import fvsof + >>> with handler({x: lambda: 2.0}): + ... print(evaluate(term)) + 6.0 + + * Defining a fresh :class:`Operation`: + + Passing :func:`Operation.define` an :class:`Operation` creates a fresh + operation with the same name and signature, but no default rule. + + >>> fresh_select = Operation.define(select) + >>> print(str(fresh_select(1, 2))) + select(1, 2) + + The new operation is distinct from the original: + + >>> with handler({select: lambda x, y: y}): + ... print(select(1, 2), fresh_select(1, 2)) + 2 select(1, 2) + + >>> with handler({fresh_select: lambda x, y: y}): + ... print(select(1, 2), fresh_select(1, 2)) + 1 2 + + """ raise NotImplementedError - @abc.abstractmethod - def __default_rule__(self, *args: Q.args, **kwargs: Q.kwargs) -> Expr[V]: + @define.register( + typing.cast(type[collections.abc.Callable], collections.abc.Callable) + ) + @classmethod + def _define_callable[**P, T]( + cls, t: Callable[P, T], *, name: str | None = None + ) -> "Operation[P, T]": + if isinstance(t, Operation): + + @functools.wraps(t) + def func(*args, **kwargs): + raise NotHandled + + op = cls.define(func, name=name) + else: + name = name or t.__name__ + op = cls(inspect.signature(t), name, t) # type: ignore[arg-type] + + return op # type: ignore[return-value] + + @define.register(type) + @define.register(typing.cast(type, types.GenericAlias)) + @define.register(typing.cast(type, typing._GenericAlias)) # type: ignore[attr-defined] + @define.register(typing.cast(type, types.UnionType)) + @classmethod + def _define_type[T](cls, t: type[T], **kwargs) -> "Operation[[], T]": + def func(): + raise NotHandled + + func.__signature__ = inspect.Signature(return_annotation=t) # type: ignore[attr-defined] + func.__name__ = t.__name__ + return typing.cast(Operation[[], T], cls.define(func, **kwargs)) + + @define.register(types.BuiltinFunctionType) + @classmethod + def _define_builtinfunctiontype[**P, T]( + cls, t: Callable[P, T], **kwargs + ) -> "Operation[P, T]": + @functools.wraps(t) + def func(*args, **kwargs): + from effectful.ops.semantics import fvsof + + if not fvsof((args, kwargs)): + return t(*args, **kwargs) + else: + raise NotHandled + + return typing.cast(Operation[P, T], cls.define(func, **kwargs)) + + @define.register(staticmethod) + @classmethod + def _define_staticmethod[**P, T](cls, t: "staticmethod[P, T]", **kwargs): + return staticmethod(cls.define(t.__func__, **kwargs)) + + @define.register(classmethod) + @classmethod + def _define_classmethod(cls, default, **kwargs): + return _ClassMethodOpDescriptor(cls.define, default.__func__) + + @define.register(functools.singledispatchmethod) + @classmethod + def _define_singledispatchmethod(cls, default, **kwargs): + if isinstance(default.func, classmethod): + raise NotImplementedError("Operations as classmethod are not yet supported") + + @functools.wraps(default.func) + def _wrapper(obj, *args, **kwargs): + return default.__get__(obj)(*args, **kwargs) + + op = cls.define(_wrapper, **kwargs) + op.register = default.register + op.__isabstractmethod__ = default.__isabstractmethod__ + return op + + @define.register(_CustomSingleDispatchCallable) + @classmethod + def _defop_customsingledispatchcallable( + cls, default: _CustomSingleDispatchCallable, **kwargs + ): + @functools.wraps(default) + def func(*args, **kwargs): + return default(*args, **kwargs) + + op = cls.define(func, **kwargs) + op.dispatch = default._registry.dispatch # type: ignore[attr-defined] + op.register = default._registry.register # type: ignore[attr-defined] + return op + + @typing.final + def __default_rule__(self, *args: Q.args, **kwargs: Q.kwargs) -> "Expr[V]": """The default rule is used when the operation is not handled. If no default rule is supplied, the free rule is used instead. """ - raise NotImplementedError + try: + try: + return self.__default__(*args, **kwargs) + except NotImplementedError: + warnings.warn( + "Operations should raise effectful.ops.types.NotHandled instead of NotImplementedError.", + DeprecationWarning, + ) + raise NotHandled + except NotHandled: + from effectful.ops.syntax import defdata - @abc.abstractmethod + return typing.cast( + Callable[Concatenate[Operation[Q, V], Q], Expr[V]], defdata + )(self, *args, **kwargs) + + @typing.final def __type_rule__(self, *args: Q.args, **kwargs: Q.kwargs) -> type[V]: - """Returns the type of the operation applied to arguments.""" - raise NotImplementedError + """Returns the type of the operation applied to arguments. - @abc.abstractmethod - def __fvs_rule__(self, *args: Q.args, **kwargs: Q.kwargs) -> inspect.BoundArguments: - """ - Returns the sets of variables that appear free in each argument and keyword argument - but not in the result of the operation, i.e. the variables bound by the operation. + .. note:: + + The arguments are expected to be either + :class:`effectful.internals.unification.Box`ed types or collections + containing values and :class:`effectful.internals.unification.Box`ed + types. Callers are expected to apply the appropriate boxing. Boxing + the input types prevents confusion between types and values and + allows for terms that compute on type-valued arguments. - These are used by :func:`fvsof` to determine the free variables of a term by - subtracting the results of this method from the free variables of the subterms, - allowing :func:`fvsof` to be implemented in terms of :func:`evaluate` . """ - raise NotImplementedError + from effectful.internals.unification import ( + freetypevars, + nested_type, + substitute, + unify, + ) + + return_anno = self.__signature__.return_annotation + if typing.get_origin(return_anno) is typing.Annotated: + return_anno = typing.get_args(return_anno)[0] + + if return_anno is inspect.Parameter.empty: + return typing.cast(type[V], object) + elif return_anno is None: + return type(None) # type: ignore + elif not freetypevars(return_anno): + return return_anno + + type_args = tuple(nested_type(a).value for a in args) + type_kwargs = {k: nested_type(v).value for k, v in kwargs.items()} + bound_sig = self.__signature__.bind(*type_args, **type_kwargs) + subst_type = substitute(return_anno, unify(self.__signature__, bound_sig)) + return typing.cast(type[V], subst_type) @typing.final + def __fvs_rule__(self, *args: Q.args, **kwargs: Q.kwargs) -> inspect.BoundArguments: + """Returns the sets of variables that appear free in each argument and + keyword argument but not in the result of the operation, i.e. the + variables bound by the operation. + + These are used by :func:`fvsof` to determine the free variables of a + term by subtracting the results of this method from the free variables + of the subterms, allowing :func:`fvsof` to be implemented in terms of + :func:`evaluate` . + + """ + from effectful.ops.syntax import Scoped + + sig = Scoped.infer_annotations(self.__signature__) + bound_sig = sig.bind(*args, **kwargs) + bound_sig.apply_defaults() + + result_sig = sig.bind( + *(frozenset() for _ in bound_sig.args), + **{k: frozenset() for k in bound_sig.kwargs}, + ) + for name, param in sig.parameters.items(): + if typing.get_origin(param.annotation) is typing.Annotated: + for anno in typing.get_args(param.annotation)[1:]: + if isinstance(anno, Scoped): + param_bound_vars = anno.analyze(bound_sig) + if param.kind is inspect.Parameter.VAR_POSITIONAL: + result_sig.arguments[name] = tuple( + param_bound_vars for _ in bound_sig.arguments[name] + ) + elif param.kind is inspect.Parameter.VAR_KEYWORD: + for k in bound_sig.arguments[name]: + result_sig.arguments[name][k] = param_bound_vars + else: + result_sig.arguments[name] = param_bound_vars + + return result_sig + def __call__(self, *args: Q.args, **kwargs: Q.kwargs) -> V: + from effectful.internals.runtime import get_interpretation from effectful.ops.semantics import apply - return apply.__default_rule__(self, *args, **kwargs) # type: ignore + intp = get_interpretation() + + self_handler = intp.get(self) + if self_handler is not None: + return self_handler(*args, **kwargs) + + class_apply_handler = intp.get(type(self).apply) + if class_apply_handler is not None: + return class_apply_handler(self, *args, **kwargs) + + global_apply_handler = intp.get(apply) + if global_apply_handler is not None: + return global_apply_handler(self, *args, **kwargs) + + # Use type(self) instead of self because we do not want a bound method + class_apply = type(self).apply + + # In Operation, cls.apply is a classmethod. In subclasses, it is an operation. + if isinstance(class_apply, Operation): + return class_apply.__default_rule__(self, *args, **kwargs) # type: ignore[return-value] + return class_apply(self, *args, **kwargs) # type: ignore[return-value] def __repr__(self): return f"{self.__class__.__name__}({self.__name__}, {self.__signature__})" + def __str__(self): + return self.__name__ + + def __get__(self, instance, owner): + if instance is not None: + # This is an instance-level operation, so we need to bind the instance + return types.MethodType(self, instance) + else: + # This is a static operation, so we return the operation itself + return self + + @classmethod + def apply[**A, B]( + cls, op: "Operation[A, B]", *args: A.args, **kwargs: A.kwargs + ) -> "Expr[B]": + """Apply an operation to arguments. + + In subclasses of Operation, `apply` is an operation that may be handled. + + """ + return op.__default_rule__(*args, **kwargs) + + def __init_subclass__(cls, **kwargs): + super().__init_subclass__(**kwargs) + cls.apply = cls.define(cls.apply, name=f"{cls.__name__}_apply") + + +if typing.TYPE_CHECKING: + + @runtime_checkable + class _OperationDefine(Protocol): + def __call__[**Q, V]( + self, op: Callable[Q, V], *, name: str | None = None + ) -> Operation[Q, V]: ... + + assert isinstance(Operation.define, _OperationDefine) + class Term[T](abc.ABC): """A term in an effectful computation is a is a tree of :class:`Operation` @@ -91,13 +505,13 @@ def op(self) -> Operation[..., T]: @property @abc.abstractmethod - def args(self) -> Sequence[Expr[Any]]: + def args(self) -> Sequence["Expr[Any]"]: """Abstract property for the arguments.""" raise NotImplementedError @property @abc.abstractmethod - def kwargs(self) -> Mapping[str, Expr[Any]]: + def kwargs(self) -> Mapping[str, "Expr[Any]"]: """Abstract property for the keyword arguments.""" raise NotImplementedError @@ -158,6 +572,31 @@ def _apply(op, *args, **kwargs) -> str: return typing.cast(str, evaluate(self)) +try: + from prettyprinter import install_extras, pretty_call, register_pretty + + install_extras({"dataclasses"}) + + @register_pretty(Term) + def pretty_term(value: Term, ctx): + default_op_name = str(value.op) + + fresh_by_name = ctx.get("fresh_by_name") or {} + new_ctx = ctx.assoc("fresh_by_name", fresh_by_name) + + fresh = fresh_by_name.get(default_op_name, {}) + fresh_by_name[default_op_name] = fresh + + fresh_ctr = fresh.get(value.op, len(fresh)) + fresh[value.op] = fresh_ctr + + op_name = str(value.op) + (f"!{fresh_ctr}" if fresh_ctr > 0 else "") + return pretty_call(new_ctx, op_name, *value.args, **value.kwargs) + +except ImportError: + pass + + #: An expression is either a value or a term. type Expr[T] = T | Term[T] diff --git a/pyproject.toml b/pyproject.toml index d7030779..ac502509 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta" [project] name = "effectful" -version = "0.2.2" +version = "0.2.3" description = "Metaprogramming infrastructure" readme = "README.rst" license = "Apache-2.0" @@ -45,18 +45,19 @@ llm = [ "pillow", "pydantic", ] +prettyprinter = ["prettyprinter"] docs = [ - "effectful[torch,pyro,jax,numpyro]", + "effectful[torch,pyro,jax,numpyro,prettyprinter]", "sphinx", "sphinxcontrib-bibtex", "sphinx_rtd_theme", "myst-parser", "nbsphinx", "sphinx_autodoc_typehints", - "pypandoc", + "pypandoc_binary<1.16", ] test = [ - "effectful[torch,pyro,jax,numpyro,docs]", + "effectful[torch,pyro,jax,numpyro,docs,prettyprinter]", "pytest", "pytest-cov", "pytest-xdist", diff --git a/tests/test_handlers_jax.py b/tests/test_handlers_jax.py index b846b395..b53a0982 100644 --- a/tests/test_handlers_jax.py +++ b/tests/test_handlers_jax.py @@ -374,6 +374,9 @@ def test_array_eq(): y = jnp.array([1, 2, 3]) assert syntactic_eq(x + y, x + y) + z = jnp.array([1, 2, 3, 4]) + assert not syntactic_eq(x + y, x + z) + def test_jax_rotation(): import jax.scipy.spatial.transform @@ -926,12 +929,15 @@ def test_arrayterm_transpose(): array_term = jax_getitem(base_array, (i(),)) result = array_term.transpose() + result_T = array_term.T jnp_result = jnp.transpose(array_term) with handler({i: lambda: jnp.array([0, 1])}): eval_result = evaluate(result) + eval_result_T = evaluate(result_T) eval_jnp_result = evaluate(jnp_result) assert jnp.allclose(eval_result, eval_jnp_result) + assert jnp.allclose(eval_result_T, eval_jnp_result) def test_arrayterm_var(): diff --git a/tests/test_handlers_numpyro.py b/tests/test_handlers_numpyro.py index e9e5e84c..cb045940 100644 --- a/tests/test_handlers_numpyro.py +++ b/tests/test_handlers_numpyro.py @@ -11,6 +11,7 @@ import effectful.handlers.jax.numpy as jnp import effectful.handlers.numpyro as dist from effectful.handlers.jax import bind_dims, jax_getitem, sizesof, unbind_dims +from effectful.ops.semantics import typeof from effectful.ops.syntax import defop from effectful.ops.types import Operation, Term @@ -43,7 +44,7 @@ def add_case(raw_dist, raw_params, batch_shape, xfail=None): for batch_shape in [(5,), (2, 3, 4), ()]: # BernoulliProbs add_case( - "dist.BernoulliProbs(probs=case.probs)", + "dist.BernoulliProbs(case.probs)", (("probs", f"rand({batch_shape})"),), batch_shape, ) @@ -67,7 +68,7 @@ def add_case(raw_dist, raw_params, batch_shape, xfail=None): # Binomial add_case( - "dist.BinomialProbs(total_count=case.total_count, probs=case.probs)", + "dist.BinomialProbs(case.probs, case.total_count)", ( ("total_count", "5"), ("probs", f"rand({batch_shape})"), @@ -78,7 +79,7 @@ def add_case(raw_dist, raw_params, batch_shape, xfail=None): # CategoricalLogits for size in [2, 4]: add_case( - "dist.CategoricalLogits(logits=case.logits)", + "dist.CategoricalLogits(case.logits)", (("logits", f"rand({batch_shape + (size,)})"),), batch_shape, ) @@ -86,25 +87,25 @@ def add_case(raw_dist, raw_params, batch_shape, xfail=None): # CategoricalProbs for size in [2, 4]: add_case( - "dist.CategoricalProbs(probs=case.probs)", + "dist.CategoricalProbs(case.probs)", (("probs", f"rand({batch_shape + (size,)})"),), batch_shape, ) # Cauchy add_case( - "dist.Cauchy(loc=case.loc, scale=case.scale)", + "dist.Cauchy(case.loc, case.scale)", (("loc", f"rand({batch_shape})"), ("scale", f"rand({batch_shape})")), batch_shape, ) # Chi2 - add_case("dist.Chi2(df=case.df)", (("df", f"rand({batch_shape})"),), batch_shape) + add_case("dist.Chi2(case.df)", (("df", f"rand({batch_shape})"),), batch_shape) # Delta for event_shape in [(), (4,), (3, 2)]: add_case( - f"dist.Delta(v=case.v, log_density=case.log_density, event_dim={len(event_shape)})", + f"dist.Delta(case.v, case.log_density, {len(event_shape)})", ( ("v", f"rand({batch_shape + event_shape})"), ("log_density", f"rand({batch_shape})"), @@ -133,9 +134,7 @@ def add_case(raw_dist, raw_params, batch_shape, xfail=None): # Exponential add_case( - "dist.Exponential(rate=case.rate)", - (("rate", f"rand({batch_shape})"),), - batch_shape, + "dist.Exponential(case.rate)", (("rate", f"rand({batch_shape})"),), batch_shape ) # Gamma @@ -147,47 +146,43 @@ def add_case(raw_dist, raw_params, batch_shape, xfail=None): # Geometric add_case( - "dist.GeometricProbs(probs=case.probs)", + "dist.GeometricProbs(case.probs)", (("probs", f"rand({batch_shape})"),), batch_shape, ) add_case( - "dist.GeometricLogits(logits=case.logits)", + "dist.GeometricLogits(case.logits)", (("logits", f"rand({batch_shape})"),), batch_shape, ) # Gumbel add_case( - "dist.Gumbel(loc=case.loc, scale=case.scale)", + "dist.Gumbel(case.loc, case.scale)", (("loc", f"rand({batch_shape})"), ("scale", f"rand({batch_shape})")), batch_shape, ) # HalfCauchy add_case( - "dist.HalfCauchy(scale=case.scale)", - (("scale", f"rand({batch_shape})"),), - batch_shape, + "dist.HalfCauchy(case.scale)", (("scale", f"rand({batch_shape})"),), batch_shape ) # HalfNormal add_case( - "dist.HalfNormal(scale=case.scale)", - (("scale", f"rand({batch_shape})"),), - batch_shape, + "dist.HalfNormal(case.scale)", (("scale", f"rand({batch_shape})"),), batch_shape ) # Laplace add_case( - "dist.Laplace(loc=case.loc, scale=case.scale)", + "dist.Laplace(case.loc, case.scale)", (("loc", f"rand({batch_shape})"), ("scale", f"rand({batch_shape})")), batch_shape, ) # Logistic add_case( - "dist.Logistic(loc=case.loc, scale=case.scale)", + "dist.Logistic(case.loc, case.scale)", (("loc", f"rand({batch_shape})"), ("scale", f"rand({batch_shape})")), batch_shape, ) @@ -195,7 +190,7 @@ def add_case(raw_dist, raw_params, batch_shape, xfail=None): # # LowRankMultivariateNormal for event_shape in [(3,), (4,)]: add_case( - "dist.LowRankMultivariateNormal(loc=case.loc, cov_factor=case.cov_factor, cov_diag=case.cov_diag)", + "dist.LowRankMultivariateNormal(case.loc, case.cov_factor, case.cov_diag)", ( ("loc", f"rand({batch_shape + event_shape})"), ("cov_factor", f"rand({batch_shape + event_shape + (2,)})"), @@ -227,7 +222,9 @@ def add_case(raw_dist, raw_params, batch_shape, xfail=None): for n_event in [1, 3]: event_shape = (n_event,) add_case( - "dist.MultivariateNormal(loc=case.loc, scale_tril=case.scale_tril)", + # FIXME: See https://github.com/BasisResearch/effectful/issues/310 + # The better call would be dist.MultivariateNormal(case.loc, scale_tril=case.scale_tril) + "dist.MultivariateNormal(case.loc, None, None, case.scale_tril)", ( ("loc", f"rand({batch_shape + event_shape})"), ("scale_tril", f"random_scale_tril({batch_shape}, {n_event})"), @@ -237,7 +234,7 @@ def add_case(raw_dist, raw_params, batch_shape, xfail=None): # NegativeBinomial add_case( - "dist.NegativeBinomialProbs(total_count=case.total_count, probs=case.probs)", + "dist.NegativeBinomialProbs(case.total_count, case.probs)", ( ("total_count", "5"), ("probs", f"rand({batch_shape})"), @@ -246,7 +243,7 @@ def add_case(raw_dist, raw_params, batch_shape, xfail=None): ) add_case( - "dist.NegativeBinomialLogits(total_count=case.total_count, logits=case.logits)", + "dist.NegativeBinomialLogits(case.total_count, case.logits)", ( ("total_count", "5"), ("logits", f"rand({batch_shape})"), @@ -263,28 +260,26 @@ def add_case(raw_dist, raw_params, batch_shape, xfail=None): # Pareto add_case( - "dist.Pareto(scale=case.scale, alpha=case.alpha)", + "dist.Pareto(case.scale, case.alpha)", (("scale", f"rand({batch_shape})"), ("alpha", f"rand({batch_shape})")), batch_shape, ) # Poisson add_case( - "dist.Poisson(rate=case.rate)", - (("rate", f"rand({batch_shape})"),), - batch_shape, + "dist.Poisson(case.rate)", (("rate", f"rand({batch_shape})"),), batch_shape ) # RelaxedBernoulli add_case( - "dist.RelaxedBernoulliLogits(temperature=case.temperature, logits=case.logits)", + "dist.RelaxedBernoulliLogits(case.temperature, case.logits)", (("temperature", f"rand({batch_shape})"), ("logits", f"rand({batch_shape})")), batch_shape, ) # StudentT add_case( - "dist.StudentT(df=case.df, loc=case.loc, scale=case.scale)", + "dist.StudentT(case.df, case.loc, case.scale)", ( ("df", f"rand({batch_shape})"), ("loc", f"rand({batch_shape})"), @@ -295,7 +290,7 @@ def add_case(raw_dist, raw_params, batch_shape, xfail=None): # Uniform add_case( - "dist.Uniform(low=case.low, high=case.high)", + "dist.Uniform(case.low, case.high)", (("low", f"rand({batch_shape})"), ("high", f"2. + rand({batch_shape})")), batch_shape, ) @@ -309,7 +304,7 @@ def add_case(raw_dist, raw_params, batch_shape, xfail=None): # Weibull add_case( - "dist.Weibull(scale=case.scale, concentration=case.concentration)", + "dist.Weibull(case.scale, case.concentration)", ( ("scale", f"exp(rand({batch_shape}))"), ("concentration", f"exp(rand({batch_shape}))"), @@ -322,7 +317,7 @@ def add_case(raw_dist, raw_params, batch_shape, xfail=None): add_case( """ dist.TransformedDistribution( - dist.Uniform(low=case.low, high=case.high), + dist.Uniform(case.low, case.high), [dist.transforms.ExpTransform()]) """, (("low", f"rand({batch_shape})"), ("high", f"2. + rand({batch_shape})")), @@ -334,7 +329,7 @@ def add_case(raw_dist, raw_params, batch_shape, xfail=None): add_case( """ dist.TransformedDistribution( - dist.Uniform(low=case.low, high=case.high), + dist.Uniform(case.low, case.high), [dist.transforms.ExpTransform().inv]) """, (("low", f"rand({batch_shape})"), ("high", f"2. + rand({batch_shape})")), @@ -346,7 +341,7 @@ def add_case(raw_dist, raw_params, batch_shape, xfail=None): add_case( """ dist.TransformedDistribution( - dist.Uniform(low=case.low, high=case.high), + dist.Uniform(case.low, case.high), [dist.transforms.TanhTransform(),]) """, (("low", f"rand({batch_shape})"), ("high", f"2. + rand({batch_shape})")), @@ -358,7 +353,7 @@ def add_case(raw_dist, raw_params, batch_shape, xfail=None): add_case( """ dist.TransformedDistribution( - dist.Uniform(low=case.low, high=case.high), + dist.Uniform(case.low, case.high), [dist.transforms.TanhTransform().inv]) """, ( @@ -373,7 +368,7 @@ def add_case(raw_dist, raw_params, batch_shape, xfail=None): add_case( """ dist.TransformedDistribution( - dist.Uniform(low=case.low, high=case.high), + dist.Uniform(case.low, case.high), [dist.transforms.TanhTransform(), dist.transforms.ExpTransform()]) """, @@ -386,7 +381,7 @@ def add_case(raw_dist, raw_params, batch_shape, xfail=None): add_case( """ dist.TransformedDistribution( - dist.Uniform(low=case.low, high=case.high), + dist.Uniform(case.low, case.high), dist.transforms.ComposeTransform([ dist.transforms.TanhTransform(), dist.transforms.ExpTransform()])) @@ -400,7 +395,7 @@ def add_case(raw_dist, raw_params, batch_shape, xfail=None): add_case( """ dist.TransformedDistribution( - dist.Exponential(rate=case.rate), + dist.Exponential(case.rate), dist.transforms.PowerTransform(0.5)) """, (("rate", f"rand({batch_shape})"),), @@ -412,7 +407,7 @@ def add_case(raw_dist, raw_params, batch_shape, xfail=None): add_case( """ dist.TransformedDistribution( - dist.Normal(loc=case.loc, scale=1.).to_event(1), + dist.Normal(case.loc, 1.).to_event(1), dist.transforms.HaarTransform(dim=-1)) """, (("loc", f"rand({batch_shape} + (3,))"),), @@ -452,7 +447,7 @@ def add_case(raw_dist, raw_params, batch_shape, xfail=None): f""" dist.Independent( dist.TransformedDistribution( - dist.Uniform(low=case.low, high=case.high), + dist.Uniform(case.low, case.high), dist.transforms.ComposeTransform([ dist.transforms.TanhTransform(), dist.transforms.ExpTransform()])), @@ -469,7 +464,7 @@ def add_case(raw_dist, raw_params, batch_shape, xfail=None): @functools.cache def name_to_sym(name: str) -> Operation[[], jax.Array]: - return defop(jax.Array, name=name) # type: ignore + return defop(jax.Array, name=name) def from_indexed(tensor, batch_dims): @@ -717,9 +712,9 @@ def test_dist_expand(case_, sample_shape, indexed_sample_shape, extra_batch_shap sample = expanded.sample(key, sample_shape_full) # Index into the sample - indexed_sample = sample[ - tuple(defop(jax.Array)() for _ in range(len(indexed_sample_shape))) - ] + indexed_sample = jax_getitem( + sample, tuple(defop(jax.Array)() for _ in range(len(indexed_sample_shape))) + ) # Check shapes expected_shape = ( @@ -818,8 +813,9 @@ def test_dist_stats(case_, statistic): try: expected = getattr(dist, statistic) - except NotImplementedError: - pytest.xfail(f"Statistic {statistic} not implemented for {case_.raw_dist}") + except RuntimeError as e: + if "not implemented" in repr(e): + pytest.xfail(f"Statistic {statistic} not implemented for {case_.raw_dist}") actual = getattr(indexed_dist, statistic) @@ -840,9 +836,7 @@ def test_distribution_terms(): y = defop(jax.Array, name="y") d1 = dist.Normal(x(), y()) - assert isinstance(d1, Term) and not isinstance( - d1, numpyro.distributions.Distribution - ) + assert isinstance(d1, Term) and isinstance(d1, numpyro.distributions.Distribution) a = jax_getitem(jnp.array([0.0]), [x()]) b = jax_getitem(jnp.array([1.0]), [y()]) @@ -850,6 +844,77 @@ def test_distribution_terms(): assert isinstance(d2, Term) and isinstance(d2, numpyro.distributions.Distribution) d3 = dist.Normal(jnp.array(0.0), jnp.array(1.0)) - assert not isinstance(d3, Term) and isinstance( - d3, numpyro.distributions.Distribution + assert isinstance(d3, Term) and isinstance(d3, numpyro.distributions.Distribution) + + +@pytest.mark.parametrize( + "dist_factory,dist_args", + [(dist.Normal, []), (dist.BernoulliProbs, [jnp.array(0.5)])], +) +def test_concrete_dist(dist_factory, dist_args): + """Test concrete distribution with indexed argument.""" + index = defop(jax.Array, name="index") + sample = defop(jax.Array, name="sample")() + + distribution = dist_factory(*dist_args) + sample_array = jnp.array([0.0, 1.0, 2.0]) + indexed_sample_array = jax_getitem(sample_array, [index()]) + + assert isinstance(distribution.log_prob(sample_array), jax.Array) + + assert isinstance(distribution.log_prob(sample), Term) + + indexed_sample_logprob = distribution.log_prob(indexed_sample_array) + assert isinstance(indexed_sample_logprob, Term) and isinstance( + bind_dims(indexed_sample_logprob, index), jax.Array + ) + + +@pytest.mark.parametrize( + "dist_factory,param_array", + [ + (dist.Normal, jnp.array([0.0, 1.0, 2.0])), + (dist.BernoulliProbs, jnp.array([0.5, 0.25])), + ], +) +def test_indexed_dist_symbolic_argument(dist_factory, param_array): + """Test indexed distribution with symbolic argument.""" + sample = defop(jax.Array, name="sample")() + index = defop(jax.Array, name="index") + indexed_dist = dist_factory(jax_getitem(param_array, [index()])) + assert isinstance(indexed_dist.log_prob(sample), Term) + + +@pytest.mark.parametrize("dist_factory", [dist.Normal, dist.BernoulliProbs]) +def test_symbolic_dist(dist_factory): + """Test symbolic distribution with concrete argument.""" + param_sym = defop(jax.Array, name="param")() + sample_sym = defop(jax.Array, name="sample")() + index = defop(jax.Array, name="index") + distribution = dist_factory(param_sym) + + result = distribution.log_prob(jnp.array(0.0)) + assert isinstance(result, Term) + + result = distribution.log_prob(sample_sym) + assert isinstance(result, Term) + + indexed_sample_logprob = distribution.log_prob( + jax_getitem(jnp.array([0.0, 1.0]), [index()]) + ) + assert isinstance(indexed_sample_logprob, Term) + + +def test_distribution_typeof(): + """Check that typeof() behaves correctly on distribution-valued terms.""" + assert ( + typeof(dist.Normal(defop(jax.Array)())) + is numpyro.distributions.continuous.Normal + ) + + assert typeof(dist.Normal()) is numpyro.distributions.continuous.Normal + + assert ( + typeof(dist.Normal(jax_getitem(jnp.array([0, 1, 2]), [defop(jax.Array)()]))) + is numpyro.distributions.continuous.Normal ) diff --git a/tests/test_internals_product_n.py b/tests/test_internals_product_n.py index 5dfca0e3..9d659b23 100644 --- a/tests/test_internals_product_n.py +++ b/tests/test_internals_product_n.py @@ -1,4 +1,5 @@ from effectful.internals.product_n import argsof, productN +from effectful.internals.unification import Box from effectful.ops.semantics import apply, coproduct, evaluate, handler from effectful.ops.syntax import defop from effectful.ops.types import Interpretation, NotHandled @@ -37,7 +38,9 @@ def plus2_value(x): return plus1(plus1(x)) def times_value(x, y): - if typ() is int and argsof(typ)[0][0] is int: + t = typ() + arg = argsof(typ)[0][0] + if t is int and arg is int: return x * y raise TypeError("unexpected type!") @@ -86,7 +89,7 @@ def times[T](x: T, y: T) -> T: value = defop(Interpretation, name="value") def apply_type(op, *a, **k): - return op.__type_rule__(*a, **k) + return Box(op.__type_rule__(*a, **k)) type_rules = {apply: apply_type} @@ -97,7 +100,9 @@ def plus2_value(x): return plus1(plus1(x)) def times_value(x, y): - if typ() is int and argsof(typ)[0][0] is int: + t = typ().value + arg = argsof(typ)[0][0].value + if t is int and arg is int: return x * y raise TypeError("unexpected type!") @@ -121,7 +126,7 @@ def f1(): with handler(analysisN): i = f1() - t = i.values(typ) + t = i.values(typ).value v = i.values(value) assert t is int assert v == 21 diff --git a/tests/test_internals_unification.py b/tests/test_internals_unification.py index fb1ada17..fa7fce7f 100644 --- a/tests/test_internals_unification.py +++ b/tests/test_internals_unification.py @@ -5,6 +5,7 @@ import pytest from effectful.internals.unification import ( + Box, canonicalize, freetypevars, nested_type, @@ -612,27 +613,11 @@ def test_infer_return_type_failure( (None, type(None)), (b"bytes", bytes), (b"", bytes), - # Type objects pass through - (int, int), - (str, str), - (float, float), - (bool, bool), - (list, list), - (dict, dict), - (set, set), - (tuple, tuple), - (type(None), type(None)), - (type(...), type(...)), - # Generic aliases pass through - (list[int], list[int]), - (dict[str, int], dict[str, int]), - (set[bool], set[bool]), - (tuple[int, str], tuple[int, str]), - (int | str, int | str), - (list[T], list[T]), - (dict[K, V], dict[K, V]), - # Union types pass through - (int | str, int | str), + # Boxed type objects pass through + (Box(int), int), + # Boxed generic aliases pass through + (Box(list[int]), list[int]), + (int, type), # Empty collections ([], list), ({}, dict), @@ -687,7 +672,7 @@ def test_infer_return_type_failure( ], ) def test_nested_type(value, expected): - result = nested_type(value) + result = nested_type(value).value assert canonicalize(result) == canonicalize(expected) @@ -1028,19 +1013,24 @@ def test_infer_composition_1(seq, index, key): inferred_type1 = substitute( sig1.return_annotation, - unify(sig1, sig1.bind(nested_type(seq), nested_type(index))), + unify(sig1, sig1.bind(nested_type(seq).value, nested_type(index).value)), ) inferred_type2 = substitute( sig2.return_annotation, - unify(sig2, sig2.bind(nested_type(inferred_type1), nested_type(key))), + unify( + sig2, + sig2.bind(nested_type(Box(inferred_type1)).value, nested_type(key).value), + ), ) inferred_type12 = substitute( sig12.return_annotation, unify( sig12, - sig12.bind(nested_type(seq), nested_type(index), nested_type(key)), + sig12.bind( + nested_type(seq).value, nested_type(index).value, nested_type(key).value + ), ), ) @@ -1049,7 +1039,10 @@ def test_infer_composition_1(seq, index, key): # check that the result of nested_type on the value of the composition unifies with the inferred type assert isinstance( - unify(nested_type(sequence_mapping_getitem(seq, index, key)), inferred_type12), + unify( + nested_type(sequence_mapping_getitem(seq, index, key)).value, + inferred_type12, + ), collections.abc.Mapping, ) @@ -1144,13 +1137,16 @@ def test_infer_composition_2(mapping, key, index): # First infer type of mapping_getitem(mapping, key) -> should be a sequence inferred_type1 = substitute( sig1.return_annotation, - unify(sig1, sig1.bind(nested_type(mapping), nested_type(key))), + unify(sig1, sig1.bind(nested_type(mapping).value, nested_type(key).value)), ) # Then infer type of sequence_getitem(result_from_step1, index) -> should be element type inferred_type2 = substitute( sig2.return_annotation, - unify(sig2, sig2.bind(nested_type(inferred_type1), nested_type(index))), + unify( + sig2, + sig2.bind(nested_type(Box(inferred_type1)).value, nested_type(index).value), + ), ) # Directly infer type of mapping_sequence_getitem(mapping, key, index) @@ -1158,7 +1154,11 @@ def test_infer_composition_2(mapping, key, index): sig12.return_annotation, unify( sig12, - sig12.bind(nested_type(mapping), nested_type(key), nested_type(index)), + sig12.bind( + nested_type(mapping).value, + nested_type(key).value, + nested_type(index).value, + ), ), ) @@ -1168,7 +1168,8 @@ def test_infer_composition_2(mapping, key, index): # check that the result of nested_type on the value of the composition unifies with the inferred type assert isinstance( unify( - nested_type(mapping_sequence_getitem(mapping, key, index)), inferred_type12 + nested_type(mapping_sequence_getitem(mapping, key, index)).value, + inferred_type12, ), collections.abc.Mapping, ) @@ -1200,20 +1201,22 @@ def test_get_from_constructed_sequence(a, b, index): # Infer type of sequence_from_pair(a, b) -> Sequence[T] construct_subs = unify( - sig_construct, sig_construct.bind(nested_type(a), nested_type(b)) + sig_construct, sig_construct.bind(nested_type(a).value, nested_type(b).value) ) inferred_sequence_type = substitute(sig_construct.return_annotation, construct_subs) # Infer type of sequence_getitem(sequence, index) -> T getitem_subs = unify( - sig_getitem, sig_getitem.bind(inferred_sequence_type, nested_type(index)) + sig_getitem, sig_getitem.bind(inferred_sequence_type, nested_type(index).value) ) inferred_element_type = substitute(sig_getitem.return_annotation, getitem_subs) # Directly infer type of get_from_constructed_sequence(a, b, index) direct_subs = unify( sig_composed, - sig_composed.bind(nested_type(a), nested_type(b), nested_type(index)), + sig_composed.bind( + nested_type(a).value, nested_type(b).value, nested_type(index).value + ), ) direct_type = substitute(sig_composed.return_annotation, direct_subs) @@ -1224,7 +1227,9 @@ def test_get_from_constructed_sequence(a, b, index): # check that the result of nested_type on the value of the composition unifies with the inferred type assert isinstance( - unify(nested_type(get_from_constructed_sequence(a, b, index)), direct_type), + unify( + nested_type(get_from_constructed_sequence(a, b, index)).value, direct_type + ), collections.abc.Mapping, ) @@ -1254,13 +1259,15 @@ def test_get_from_constructed_mapping(key, value, lookup_key): # Infer type of mapping_from_pair(key, value) -> Mapping[K, V] construct_subs = unify( - sig_construct, sig_construct.bind(nested_type(key), nested_type(value)) + sig_construct, + sig_construct.bind(nested_type(key).value, nested_type(value).value), ) inferred_mapping_type = substitute(sig_construct.return_annotation, construct_subs) # Infer type of mapping_getitem(mapping, lookup_key) -> V getitem_subs = unify( - sig_getitem, sig_getitem.bind(inferred_mapping_type, nested_type(lookup_key)) + sig_getitem, + sig_getitem.bind(inferred_mapping_type, nested_type(lookup_key).value), ) inferred_value_type = substitute(sig_getitem.return_annotation, getitem_subs) @@ -1268,7 +1275,9 @@ def test_get_from_constructed_mapping(key, value, lookup_key): direct_subs = unify( sig_composed, sig_composed.bind( - nested_type(key), nested_type(value), nested_type(lookup_key) + nested_type(key).value, + nested_type(value).value, + nested_type(lookup_key).value, ), ) direct_type = substitute(sig_composed.return_annotation, direct_subs) @@ -1279,7 +1288,7 @@ def test_get_from_constructed_mapping(key, value, lookup_key): # check that the result of nested_type on the value of the composition unifies with the inferred type assert isinstance( unify( - nested_type(get_from_constructed_mapping(key, value, lookup_key)), + nested_type(get_from_constructed_mapping(key, value, lookup_key)).value, direct_type, ), collections.abc.Mapping, @@ -1307,7 +1316,9 @@ def test_sequence_of_mappings(key1, val1, key2, val2, index): sig_composed = inspect.signature(sequence_of_mappings) # Step 1: Infer types of the two mappings - map1_subs = unify(sig_map, sig_map.bind(nested_type(key1), nested_type(val1))) + map1_subs = unify( + sig_map, sig_map.bind(nested_type(key1).value, nested_type(val1).value) + ) map1_type = substitute(sig_map.return_annotation, map1_subs) # Step 2: Infer type of sequence containing these mappings @@ -1321,7 +1332,10 @@ def test_sequence_of_mappings(key1, val1, key2, val2, index): direct_subs = unify( sig_composed, sig_composed.bind( - nested_type(key1), nested_type(val1), nested_type(key2), nested_type(val2) + nested_type(key1).value, + nested_type(val1).value, + nested_type(key2).value, + nested_type(val2).value, ), ) direct_type = substitute(sig_composed.return_annotation, direct_subs) @@ -1357,12 +1371,12 @@ def test_double_nested_get(k1, v1, v2, k2, v3, v4, outer_idx, inner_key, inner_i nested_subs = unify( sig_nested, sig_nested.bind( - nested_type(k1), - nested_type(v1), - nested_type(v2), - nested_type(k2), - nested_type(v3), - nested_type(v4), + nested_type(k1).value, + nested_type(v1).value, + nested_type(v2).value, + nested_type(k2).value, + nested_type(v3).value, + nested_type(v4).value, ), ) nested_seq_type = substitute(sig_nested.return_annotation, nested_subs) @@ -1370,21 +1384,21 @@ def test_double_nested_get(k1, v1, v2, k2, v3, v4, outer_idx, inner_key, inner_i # Step 2: Get element from outer sequence outer_get_subs = unify( - sig_seq_get, sig_seq_get.bind(nested_seq_type, nested_type(outer_idx)) + sig_seq_get, sig_seq_get.bind(nested_seq_type, nested_type(outer_idx).value) ) mapping_type = substitute(sig_seq_get.return_annotation, outer_get_subs) # This should be Mapping[K, Sequence[T]] # Step 3: Get sequence from mapping inner_map_subs = unify( - sig_map_get, sig_map_get.bind(mapping_type, nested_type(inner_key)) + sig_map_get, sig_map_get.bind(mapping_type, nested_type(inner_key).value) ) sequence_type = substitute(sig_map_get.return_annotation, inner_map_subs) # This should be Sequence[T] # Step 4: Get element from inner sequence final_get_subs = unify( - sig_seq_get, sig_seq_get.bind(sequence_type, nested_type(inner_idx)) + sig_seq_get, sig_seq_get.bind(sequence_type, nested_type(inner_idx).value) ) composed_type = substitute(sig_seq_get.return_annotation, final_get_subs) # This should be T @@ -1393,15 +1407,15 @@ def test_double_nested_get(k1, v1, v2, k2, v3, v4, outer_idx, inner_key, inner_i direct_subs = unify( sig_composed, sig_composed.bind( - nested_type(k1), - nested_type(v1), - nested_type(v2), - nested_type(k2), - nested_type(v3), - nested_type(v4), - nested_type(outer_idx), - nested_type(inner_key), - nested_type(inner_idx), + nested_type(k1).value, + nested_type(v1).value, + nested_type(v2).value, + nested_type(k2).value, + nested_type(v3).value, + nested_type(v4).value, + nested_type(outer_idx).value, + nested_type(inner_key).value, + nested_type(inner_idx).value, ), ) direct_type = substitute(sig_composed.return_annotation, direct_subs) @@ -1416,7 +1430,7 @@ def test_double_nested_get(k1, v1, v2, k2, v3, v4, outer_idx, inner_key, inner_i double_nested_get( k1, v1, v2, k2, v3, v4, outer_idx, inner_key, inner_idx ) - ), + ).value, direct_type, ), collections.abc.Mapping, @@ -1442,18 +1456,20 @@ def test_apply_to_sequence_element(f, seq, index): # Step 1: Infer type of sequence_getitem(seq, index) -> T getitem_subs = unify( - sig_getitem, sig_getitem.bind(nested_type(seq), nested_type(index)) + sig_getitem, sig_getitem.bind(nested_type(seq).value, nested_type(index).value) ) element_type = substitute(sig_getitem.return_annotation, getitem_subs) # Step 2: Infer type of call_func(f, element) -> U - call_subs = unify(sig_call, sig_call.bind(nested_type(f), element_type)) + call_subs = unify(sig_call, sig_call.bind(nested_type(f).value, element_type)) composed_type = substitute(sig_call.return_annotation, call_subs) # Direct inference direct_subs = unify( sig_composed, - sig_composed.bind(nested_type(f), nested_type(seq), nested_type(index)), + sig_composed.bind( + nested_type(f).value, nested_type(seq).value, nested_type(index).value + ), ) direct_type = substitute(sig_composed.return_annotation, direct_subs) @@ -1462,7 +1478,7 @@ def test_apply_to_sequence_element(f, seq, index): # check that the result of nested_type on the value of the composition unifies with the inferred type assert isinstance( - unify(nested_type(apply_to_sequence_element(f, seq, index)), direct_type), + unify(nested_type(apply_to_sequence_element(f, seq, index)).value, direct_type), collections.abc.Mapping, ) @@ -1484,17 +1500,23 @@ def test_map_and_get(f, seq, index): sig_composed = inspect.signature(map_and_get) # Step 1: Infer type of map_sequence(f, seq) -> Sequence[U] - map_subs = unify(sig_map, sig_map.bind(nested_type(f), nested_type(seq))) + map_subs = unify( + sig_map, sig_map.bind(nested_type(f).value, nested_type(seq).value) + ) mapped_type = substitute(sig_map.return_annotation, map_subs) # Step 2: Infer type of sequence_getitem(mapped_seq, index) -> U - getitem_subs = unify(sig_getitem, sig_getitem.bind(mapped_type, nested_type(index))) + getitem_subs = unify( + sig_getitem, sig_getitem.bind(mapped_type, nested_type(index).value) + ) composed_type = substitute(sig_getitem.return_annotation, getitem_subs) # Direct inference direct_subs = unify( sig_composed, - sig_composed.bind(nested_type(f), nested_type(seq), nested_type(index)), + sig_composed.bind( + nested_type(f).value, nested_type(seq).value, nested_type(index).value + ), ) direct_type = substitute(sig_composed.return_annotation, direct_subs) @@ -1503,7 +1525,7 @@ def test_map_and_get(f, seq, index): # check that the result of nested_type on the value of the composition unifies with the inferred type assert isinstance( - unify(nested_type(map_and_get(f, seq, index)), direct_type), + unify(nested_type(map_and_get(f, seq, index)).value, direct_type), collections.abc.Mapping, ) @@ -1525,17 +1547,23 @@ def test_compose_and_apply(f, g, value): sig_composed = inspect.signature(compose_and_apply) # Step 1: Infer type of compose_mappings(f, g) -> Callable[[T], V] - compose_subs = unify(sig_compose, sig_compose.bind(nested_type(f), nested_type(g))) + compose_subs = unify( + sig_compose, sig_compose.bind(nested_type(f).value, nested_type(g).value) + ) composed_func_type = substitute(sig_compose.return_annotation, compose_subs) # Step 2: Infer type of call_func(composed, value) -> V - call_subs = unify(sig_call, sig_call.bind(composed_func_type, nested_type(value))) + call_subs = unify( + sig_call, sig_call.bind(composed_func_type, nested_type(value).value) + ) result_type = substitute(sig_call.return_annotation, call_subs) # Direct inference direct_subs = unify( sig_composed, - sig_composed.bind(nested_type(f), nested_type(g), nested_type(value)), + sig_composed.bind( + nested_type(f).value, nested_type(g).value, nested_type(value).value + ), ) direct_type = substitute(sig_composed.return_annotation, direct_subs) @@ -1544,7 +1572,7 @@ def test_compose_and_apply(f, g, value): # check that the result of nested_type on the value of the composition unifies with the inferred type assert isinstance( - unify(nested_type(compose_and_apply(f, g, value)), direct_type), + unify(nested_type(compose_and_apply(f, g, value)).value, direct_type), collections.abc.Mapping, ) @@ -1567,13 +1595,14 @@ def test_construct_apply_and_get(f, a, b, index): # Step 1: Infer type of sequence_from_pair(a, b) -> Sequence[T] construct_subs = unify( - sig_construct, sig_construct.bind(nested_type(a), nested_type(b)) + sig_construct, sig_construct.bind(nested_type(a).value, nested_type(b).value) ) seq_type = substitute(sig_construct.return_annotation, construct_subs) # Step 2: Infer type of apply_to_sequence_element(f, seq, index) -> U apply_subs = unify( - sig_apply, sig_apply.bind(nested_type(f), seq_type, nested_type(index)) + sig_apply, + sig_apply.bind(nested_type(f).value, seq_type, nested_type(index).value), ) composed_type = substitute(sig_apply.return_annotation, apply_subs) @@ -1581,7 +1610,10 @@ def test_construct_apply_and_get(f, a, b, index): direct_subs = unify( sig_composed, sig_composed.bind( - nested_type(f), nested_type(a), nested_type(b), nested_type(index) + nested_type(f).value, + nested_type(a).value, + nested_type(b).value, + nested_type(index).value, ), ) direct_type = substitute(sig_composed.return_annotation, direct_subs) @@ -1591,7 +1623,7 @@ def test_construct_apply_and_get(f, a, b, index): # check that the result of nested_type on the value of the composition unifies with the inferred type assert isinstance( - unify(nested_type(construct_apply_and_get(f, a, b, index)), direct_type), + unify(nested_type(construct_apply_and_get(f, a, b, index)).value, direct_type), collections.abc.Mapping, ) @@ -1614,18 +1646,19 @@ def test_binary_on_sequence_elements(f, seq, index1, index2): # Step 1: Infer types of sequence_getitem calls getitem1_subs = unify( - sig_getitem, sig_getitem.bind(nested_type(seq), nested_type(index1)) + sig_getitem, sig_getitem.bind(nested_type(seq).value, nested_type(index1).value) ) elem1_type = substitute(sig_getitem.return_annotation, getitem1_subs) getitem2_subs = unify( - sig_getitem, sig_getitem.bind(nested_type(seq), nested_type(index2)) + sig_getitem, sig_getitem.bind(nested_type(seq).value, nested_type(index2).value) ) elem2_type = substitute(sig_getitem.return_annotation, getitem2_subs) # Step 2: Infer type of call_binary_func(f, elem1, elem2) -> V call_subs = unify( - sig_call_binary, sig_call_binary.bind(nested_type(f), elem1_type, elem2_type) + sig_call_binary, + sig_call_binary.bind(nested_type(f).value, elem1_type, elem2_type), ) composed_type = substitute(sig_call_binary.return_annotation, call_subs) @@ -1633,7 +1666,10 @@ def test_binary_on_sequence_elements(f, seq, index1, index2): direct_subs = unify( sig_composed, sig_composed.bind( - nested_type(f), nested_type(seq), nested_type(index1), nested_type(index2) + nested_type(f).value, + nested_type(seq).value, + nested_type(index1).value, + nested_type(index2).value, ), ) direct_type = substitute(sig_composed.return_annotation, direct_subs) @@ -1644,7 +1680,7 @@ def test_binary_on_sequence_elements(f, seq, index1, index2): # check that the result of nested_type on the value of the composition unifies with the inferred type assert isinstance( unify( - nested_type(binary_on_sequence_elements(f, seq, index1, index2)), + nested_type(binary_on_sequence_elements(f, seq, index1, index2)).value, direct_type, ), collections.abc.Mapping, diff --git a/tests/test_ops_semantics.py b/tests/test_ops_semantics.py index 896c09be..c97dfcc6 100644 --- a/tests/test_ops_semantics.py +++ b/tests/test_ops_semantics.py @@ -2,7 +2,7 @@ import functools import itertools import logging -from collections.abc import Callable +from collections.abc import Callable, Mapping from typing import Annotated, Any, Union import pytest @@ -18,7 +18,7 @@ typeof, ) from effectful.ops.syntax import ObjectInterpretation, Scoped, deffn, defop, implements -from effectful.ops.types import Interpretation, NotHandled, Operation +from effectful.ops.types import Interpretation, NotHandled, Operation, Term logger = logging.getLogger(__name__) @@ -815,3 +815,14 @@ def Lam2[A, B]( term = Lam2(add(x(), add(y(), z())), x, y) assert not {x, y} <= fvsof(term) assert fvsof(term) == {z, Lam2, add} + + +def test_interpretation_typing(): + @defop + def f[T](m: Mapping[Operation, T], x: T) -> T: + raise NotHandled + + x = defop(int) + t1 = f({x: x()}, 2) + + assert isinstance(t1, Term) and typeof(t1) == int diff --git a/tests/test_ops_syntax.py b/tests/test_ops_syntax.py index be40b642..b2261319 100644 --- a/tests/test_ops_syntax.py +++ b/tests/test_ops_syntax.py @@ -12,7 +12,7 @@ import pytest from docs.source.lambda_ import App, Lam, Let, eager_mixed -from effectful.ops.semantics import evaluate, fvsof, handler, typeof +from effectful.ops.semantics import apply, evaluate, fvsof, handler, typeof from effectful.ops.syntax import ( Scoped, _CustomSingleDispatchCallable, @@ -20,7 +20,6 @@ deffn, defop, defstream, - defterm, iter_, next_, syntactic_eq, @@ -190,7 +189,7 @@ def add(x: int, y: int) -> int: def f1(x: int) -> int: return add(x, add(y(), 1)) - f1_term = defterm(f1) + f1_term = evaluate(f1) f1_app = call(f1, x()) f1_term_app = f1_term(x()) @@ -211,7 +210,6 @@ def test_term_str(): x3 = defop(x1) assert str(x1) == str(x2) == str(x3) == "x" - assert repr(x1) != repr(x2) != repr(x3) assert str(x1() + x2()) == "__add__(x(), x!1())" assert str(x1() + x1()) == "__add__(x(), x())" assert str(deffn(x1() + x1(), x1)) == "deffn(__add__(x(), x()), x)" @@ -283,6 +281,10 @@ def _(x: str): assert process.__signature__ == inspect.signature(process) + with handler({process: lambda _: "test"}): + assert process(0) == "test" + assert process("hello") == "test" + def test_defop_method(): """Test that defop can be used as a method decorator.""" @@ -299,7 +301,7 @@ def my_method(self, x: int) -> int: # check signature assert MyClass.my_method.__signature__ == inspect.signature( - MyClass.my_method._default + MyClass.my_method.__default__ ) assert isinstance(term, Term) @@ -377,7 +379,6 @@ def my_op(x: int) -> int: MyClass().my_op(5) -@pytest.mark.xfail(reason="defop does not support classmethod yet") def test_defop_classmethod(): """Test that defop can be used as a classmethod decorator.""" @@ -392,16 +393,13 @@ def my_classmethod(cls, x: int) -> int: assert isinstance(MyClass.my_classmethod, Operation) # check signature assert MyClass.my_classmethod.__signature__ == inspect.signature( - MyClass.my_classmethod._default + MyClass.my_classmethod.__default__ ) assert isinstance(term, Term) assert isinstance(term.op, Operation) assert term.op.__name__ == "my_classmethod" - assert term.args == ( - MyClass, - 5, - ) + assert term.args == (5,) assert term.kwargs == {} # Ensure the operation is unique @@ -409,7 +407,7 @@ def my_classmethod(cls, x: int) -> int: assert term.op is another_term.op # Test that the classmethod can be called with a handler - with handler({MyClass.my_classmethod: lambda cls, x: x + 3}): + with handler({MyClass.my_classmethod: lambda x: x + 3}): assert MyClass.my_classmethod(5) == 8 assert MyClass.my_classmethod(10) == 13 @@ -428,7 +426,7 @@ def my_staticmethod(x: int) -> int: assert isinstance(MyClass.my_staticmethod, Operation) # check signature assert MyClass.my_staticmethod.__signature__ == inspect.signature( - MyClass.my_staticmethod._default + MyClass.my_staticmethod.__default__ ) assert isinstance(term, Term) @@ -447,39 +445,6 @@ def my_staticmethod(x: int) -> int: assert MyClass.my_staticmethod(10) == 14 -def test_defop_property(): - """Test that defop can be used as a property decorator.""" - - class MyClass: - @defop - @property - def my_property(self) -> int: - raise NotHandled - - instance = MyClass() - term = instance.my_property - - assert isinstance(MyClass.my_property, Operation) - assert MyClass.my_property.__signature__ == inspect.signature( - MyClass.my_property._default - ) - - assert isinstance(term, Term) - assert isinstance(term.op, Operation) - assert term.op.__name__ == "my_property" - assert term.args == (instance,) - assert term.kwargs == {} - - # Ensure the operation is unique - another_instance = MyClass() - assert instance.my_property is not another_instance.my_property - - # Test that the property can be called with a handler - with handler({MyClass.my_property: lambda self: 42}): - assert instance.my_property == 42 - assert another_instance.my_property == 42 - - def test_defop_singledispatchmethod(): """Test that defop can be used as a singledispatchmethod decorator.""" @@ -510,7 +475,7 @@ def _(self, x: bool) -> bool: assert isinstance(MyClass.my_singledispatch, Operation) assert MyClass.my_singledispatch.__signature__ == inspect.signature( - MyClass.my_singledispatch._default + MyClass.my_singledispatch.__default__ ) assert isinstance(term_float, Term) @@ -935,3 +900,68 @@ def f(c: C) -> int: obj = C(defop(int)()) term = defdata(f, obj) assert isinstance(term.args[0], C) and isinstance(term.args[0].x, Term) + + +def test_operation_subclass(): + class TestOperation(Operation): + pass + + class OtherOperation(Operation): + pass + + assert isinstance(TestOperation.apply, Operation) + assert isinstance(OtherOperation.apply, Operation) + assert TestOperation.apply != OtherOperation.apply + + @TestOperation.define + def my_func(a, b): + return "" + + def _my_func(a, b): + return "" + + def _apply(op, a, b, **kwargs): + assert op is my_func + return "" + + def _test_operation_apply(op, a, b): + assert op is my_func + return "" + + def _other_operation_apply(op, a, b): + return "" + + assert my_func(1, 2) == "" + + # Handling the operation works + with handler({my_func: _my_func}): + assert my_func(3, 4) == "" + + # Handling the class apply works + with handler({TestOperation.apply: _test_operation_apply}): + assert my_func(3, 4) == "" + + with handler({OtherOperation.apply: _other_operation_apply}): + assert my_func(3, 4) == "" + + # Handling global apply works + with handler({apply: _apply}): + assert my_func(3, 4) == "" + + # Handling the operation takes precedence over the class apply + with handler({TestOperation.apply: _test_operation_apply, my_func: _my_func}): + assert my_func(3, 4) == "" + + # Handling the class apply takes precedence over the global apply + with handler({apply: _apply, TestOperation.apply: _test_operation_apply}): + assert my_func(3, 4) == "" + + # Handling the operation takes precedence over the global apply + with handler({apply: _apply, my_func: _my_func}): + assert my_func(3, 4) == "" + + # Handling the operation takes precedence over the class apply and the global apply + with handler( + {apply: _apply, my_func: _my_func, TestOperation.apply: _test_operation_apply} + ): + assert my_func(3, 4) == "" From 62e45a41f922c889988ecfee58c7bccc0827e2ac Mon Sep 17 00:00:00 2001 From: eb8680 Date: Thu, 4 Dec 2025 19:13:59 -0500 Subject: [PATCH 19/39] Fix `staging-llm` diff against `master` (#426) * Release v0.2.3 (#374) * Install prettyprinter for term when library is available (#386) * install prettyprinter for term when library is available * lint * move code into types.py * fix pypandoc issue (#397) * Convert evaluate to a singledispatch (#398) * convert evaluate to a singledispatch * lint * add jnp.pi and ArrayTerm.T (#394) * Deprecate defterm (#399) * deprecate defterm * remove defterm case * remove defterm * lint * evaluate distribution arguments * lint * remove interpreter * Revert "remove interpreter" This reverts commit 30442779689da862aa6f8e0224330ffaf8f556ae. * wip * lint * Rework numpyro distribution handling to enable symbolic distributions and handling of distribution methods (#311) * refactor distribution operations * add a test for typeof of distributions * add tests for symbolic dists/arguments * introduce operations for distribution methods * comment * fix tests * work around https://github.com/BasisResearch/effectful/issues/310 * replace hack with new hack * tweak repr for _BaseOperation * lint * work around https://github.com/BasisResearch/effectful/issues/312 * clean up access to dist ops * wip * wip * add type annotations to get correct term conversion * lint * include distribution arguments as properties * fix distribution calls * try again * fixes * format * Box the output of `__type_rule__` (#387) * box the output of __type_rule__ * fix tests * fix tests * require callers of __type_rule__ to box arguments * fix * move Box out of ops.types * lint * fix test * fix syntactic_eq implementation for jax arrays (#405) * Fix recursion error in sizesof (#406) * fix recursion error in sizesof * format * Allow `_BaseOperation` subclasses to have an overrideable `apply` method (#414) * stash * fixes * initial * wip * lint * ensure each subclass has a fresh operation * wip * wip * lint * wip * wip * lint * refactor class method support * move defops * fix test * remove singledispatch case and add test * move definition * cleanup * simplify * cleanup * lint * fix failing test * fix classmethod * __isabstractmethod__ * revert --------- Co-authored-by: Eli * Try pulling in pyproject.toml from staging-llm to master (#425) --------- Co-authored-by: Jack Feser Co-authored-by: Tim Cooijmans From 1c37637f64b904e2324f47d05a934c18ba281673 Mon Sep 17 00:00:00 2001 From: "Dat Nguyen (Marc)" <15943389+datvo06@users.noreply.github.com> Date: Tue, 9 Dec 2025 11:16:26 -0500 Subject: [PATCH 20/39] Implement a RetryHandler for LLM module (#428) * Logging retries * Remove redundant assignment * Linting * Adding mypy check and test * SynthesizedFunction for constrained decoding, adapt the tests * Linting * Let LLM generate param names * Pydantic field annotation * Adding validation error example * Linting * Fix minor formatting for type context * Update llm dependencies to include mypy * More comprehensive error message * linting * More comprehensive import * Fix `staging-llm` diff against `master` (#426) * Release v0.2.3 (#374) * Install prettyprinter for term when library is available (#386) * install prettyprinter for term when library is available * lint * move code into types.py * fix pypandoc issue (#397) * Convert evaluate to a singledispatch (#398) * convert evaluate to a singledispatch * lint * add jnp.pi and ArrayTerm.T (#394) * Deprecate defterm (#399) * deprecate defterm * remove defterm case * remove defterm * lint * evaluate distribution arguments * lint * remove interpreter * Revert "remove interpreter" This reverts commit 30442779689da862aa6f8e0224330ffaf8f556ae. * wip * lint * Rework numpyro distribution handling to enable symbolic distributions and handling of distribution methods (#311) * refactor distribution operations * add a test for typeof of distributions * add tests for symbolic dists/arguments * introduce operations for distribution methods * comment * fix tests * work around https://github.com/BasisResearch/effectful/issues/310 * replace hack with new hack * tweak repr for _BaseOperation * lint * work around https://github.com/BasisResearch/effectful/issues/312 * clean up access to dist ops * wip * wip * add type annotations to get correct term conversion * lint * include distribution arguments as properties * fix distribution calls * try again * fixes * format * Box the output of `__type_rule__` (#387) * box the output of __type_rule__ * fix tests * fix tests * require callers of __type_rule__ to box arguments * fix * move Box out of ops.types * lint * fix test * fix syntactic_eq implementation for jax arrays (#405) * Fix recursion error in sizesof (#406) * fix recursion error in sizesof * format * Allow `_BaseOperation` subclasses to have an overrideable `apply` method (#414) * stash * fixes * initial * wip * lint * ensure each subclass has a fresh operation * wip * wip * lint * wip * wip * lint * refactor class method support * move defops * fix test * remove singledispatch case and add test * move definition * cleanup * simplify * cleanup * lint * fix failing test * fix classmethod * __isabstractmethod__ * revert --------- Co-authored-by: Eli * Try pulling in pyproject.toml from staging-llm to master (#425) --------- Co-authored-by: Jack Feser Co-authored-by: Tim Cooijmans * Logging retries * Make linter happy with the exception_cls * More linting * Revert the changes to synthesis, it belongs to a different PR * Add test for retry handler * More linting * Removing mypy for llm --------- Co-authored-by: eb8680 Co-authored-by: Jack Feser Co-authored-by: Tim Cooijmans --- docs/source/llm.ipynb | 259 ++++++++++++++++++++++------ effectful/handlers/llm/providers.py | 46 +++++ tests/test_handlers_llm.py | 117 +++++++++++++ 3 files changed, 372 insertions(+), 50 deletions(-) diff --git a/docs/source/llm.ipynb b/docs/source/llm.ipynb index eb6b1186..aba3ecff 100644 --- a/docs/source/llm.ipynb +++ b/docs/source/llm.ipynb @@ -2,13 +2,14 @@ "cells": [ { "cell_type": "code", - "execution_count": 11, + "execution_count": 1, "id": "5aaf649f", "metadata": {}, "outputs": [], "source": [ "import dataclasses\n", "import functools\n", + "import inspect\n", "import logging\n", "import sys\n", "from collections.abc import Callable\n", @@ -18,6 +19,7 @@ " CacheLLMRequestHandler,\n", " LiteLLMProvider,\n", " LLMLoggingHandler,\n", + " RetryLLMHandler,\n", " completion,\n", " tool_call,\n", ")\n", @@ -54,7 +56,7 @@ }, { "cell_type": "code", - "execution_count": 12, + "execution_count": 2, "id": "1e832675", "metadata": {}, "outputs": [], @@ -77,7 +79,7 @@ }, { "cell_type": "code", - "execution_count": 13, + "execution_count": 3, "id": "634f6533", "metadata": {}, "outputs": [ @@ -85,17 +87,17 @@ "name": "stdout", "output_type": "stream", "text": [ - "There once was a fish in the sea, \n", - "Who dreamed of a life wild and free. \n", - "He tried to make friends, \n", - "Around coral bends, \n", - "And surfed on the waves with such glee.\n", + "In the ocean where fast fish dash, \n", + "Swims a mackerel that makes quite a splash. \n", + "With each flip of its fin, \n", + "It wears a cheeky grin, \n", + "And escapes from each net's eager clasp! \n", "----------------------------------------\n", - "There once was a fish who could skate, \n", - "Gliding smooth on a pond, silver plate. \n", - "With a flip and a flop, \n", - "He'd never quite stop, \n", - "Making waves with his slick figure eight.\n" + "In the depths of the ocean so blue, \n", + "Swam a fish with a curious view. \n", + "With a flick of its tail, \n", + "It set off to unveil, \n", + "The mysteries of waters anew. \n" ] } ], @@ -116,7 +118,7 @@ }, { "cell_type": "code", - "execution_count": 14, + "execution_count": 4, "id": "706ce53b", "metadata": {}, "outputs": [ @@ -125,29 +127,29 @@ "output_type": "stream", "text": [ "\n", - "Ripples in moonlight, \n", - "Silver scales dance in silence— \n", - "A river's secret.\n", - "----------------------------------------\n", - "Ripples in moonlight, \n", - "Silver scales dance in silence— \n", - "A river's secret.\n", - "\n", - "Silent waters gleam, \n", - "Fish drift in the moon’s soft glow— \n", - "Nature's quiet dance.\n", + "Silver scales shimmer, \n", + "Dancing through the ocean's depths— \n", + "Whispers of the sea.\n", "----------------------------------------\n", - "Silent waters gleam, \n", - "Fish drift in the moon’s soft glow— \n", - "Nature's quiet dance.\n", + "Silver scales shimmer, \n", + "Dancing through the ocean's depths— \n", + "Whispers of the sea.\n", "\n", "Silver scales shimmer, \n", - "Silently weaving through waves— \n", - "Whispers of the deep.\n", + "Beneath the gentle waves' dance, \n", + "In the ocean's hush. \n", "----------------------------------------\n", "Silver scales shimmer, \n", - "Silently weaving through waves— \n", - "Whispers of the deep.\n" + "Beneath the gentle waves' dance, \n", + "In the ocean's hush. \n", + "\n", + "In deep waters blue, \n", + "Silent dances shift and gleam, \n", + "Fish weave dreams anew. \n", + "----------------------------------------\n", + "In deep waters blue, \n", + "Silent dances shift and gleam, \n", + "Fish weave dreams anew. \n" ] } ], @@ -198,7 +200,7 @@ }, { "cell_type": "code", - "execution_count": 15, + "execution_count": 5, "id": "2c766859", "metadata": {}, "outputs": [], @@ -223,10 +225,19 @@ }, { "cell_type": "code", - "execution_count": 16, + "execution_count": 6, "id": "c83bbdc0", "metadata": {}, - "outputs": [], + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "def count_a_occurrences(input_string: str) -> int:\n", + " return input_string.count('a')\n" + ] + } + ], "source": [ "@Template.define\n", "def count_char(char: str) -> Callable[[str], int]:\n", @@ -238,7 +249,9 @@ " count_a = count_char(\"a\")\n", " assert callable(count_a)\n", " assert count_a(\"banana\") == 3\n", - " assert count_a(\"cherry\") == 0" + " assert count_a(\"cherry\") == 0\n", + " # Print the source code of the generated function\n", + " print(inspect.getsource(count_a))" ] }, { @@ -255,7 +268,7 @@ }, { "cell_type": "code", - "execution_count": 17, + "execution_count": 7, "id": "66711301", "metadata": {}, "outputs": [ @@ -267,9 +280,7 @@ "Tool call: weather(*(), **{'city': 'Chicago'}) -> cold\n", "Tool call: weather(*(), **{'city': 'New York'}) -> wet\n", "Tool call: weather(*(), **{'city': 'Barcelona'}) -> sunny\n", - "It seems there was a problem retrieving the weather information for these cities. \n", - "\n", - "Would you like me to try fetching the data again or assist you in another way?\n" + "Barcelona currently has good weather, as it is sunny.\n" ] } ], @@ -313,7 +324,7 @@ }, { "cell_type": "code", - "execution_count": 18, + "execution_count": 8, "id": "17668ac8", "metadata": {}, "outputs": [ @@ -326,7 +337,7 @@ "Who's there?\n", "Iguana.\n", "Iguana who?\n", - "Iguana tell you a secret... you're awesome!\n", + "Iguana come inside your house and warm up, it's cold out here!\n", "> The crowd laughs politely.\n" ] } @@ -377,7 +388,7 @@ }, { "cell_type": "code", - "execution_count": 19, + "execution_count": 9, "id": "cbf495a2", "metadata": {}, "outputs": [ @@ -385,8 +396,8 @@ "name": "stdout", "output_type": "stream", "text": [ - "Request fired: () {'input': [{'type': 'message', 'content': [{'type': 'input_text', 'text': 'Write a haiku on the theme of fish2.'}], 'role': 'user'}], 'model': 'gpt-4o', 'tools': [], 'tool_choice': 'auto'} Response(id='resp_06ea51b6ad2eb0bb006914f62252708193868c36a85d4e2862', created_at=1762981410.0, error=None, incomplete_details=None, instructions=None, metadata={}, model='gpt-4o-2024-08-06', object='response', output=[ResponseOutputMessage(id='msg_06ea51b6ad2eb0bb006914f622ca248193a3bfd331defa6813', content=[ResponseOutputText(annotations=[], text=\"Swift shadows darting, \\nIn the deep blue silence, peace— \\nFish2's gentle glide.\", type='output_text', logprobs=[])], role='assistant', status='completed', type='message')], parallel_tool_calls=True, temperature=1.0, tool_choice='auto', tools=[], top_p=1.0, background=False, conversation=None, max_output_tokens=None, max_tool_calls=None, previous_response_id=None, prompt=None, prompt_cache_key=None, reasoning=Reasoning(effort=None, generate_summary=None, summary=None), safety_identifier=None, service_tier='default', status='completed', text=ResponseTextConfig(format=ResponseFormatText(type='text'), verbosity='medium'), top_logprobs=0, truncation='disabled', usage=ResponseUsage(input_tokens=18, input_tokens_details=InputTokensDetails(cached_tokens=0), output_tokens=22, output_tokens_details=OutputTokensDetails(reasoning_tokens=0), total_tokens=40), user=None, billing={'payer': 'developer'}, prompt_cache_retention=None, store=True)\n", - "Request fired: () {'input': [{'type': 'message', 'content': [{'type': 'input_text', 'text': 'Write a limerick on the theme of fish.'}], 'role': 'user'}], 'model': 'gpt-4o', 'tools': [], 'tool_choice': 'auto'} Response(id='resp_0cf58e47bda48859006914f623a5e08196a1271afbe68a1605', created_at=1762981411.0, error=None, incomplete_details=None, instructions=None, metadata={}, model='gpt-4o-2024-08-06', object='response', output=[ResponseOutputMessage(id='msg_0cf58e47bda48859006914f6240e548196bdcf02129ce5eecd', content=[ResponseOutputText(annotations=[], text='There once was a fish full of cheer, \\nWho swam where the water was clear. \\nWith a flip and a glide, \\nHe danced with the tide, \\nSpreading joy to all who came near.', type='output_text', logprobs=[])], role='assistant', status='completed', type='message')], parallel_tool_calls=True, temperature=1.0, tool_choice='auto', tools=[], top_p=1.0, background=False, conversation=None, max_output_tokens=None, max_tool_calls=None, previous_response_id=None, prompt=None, prompt_cache_key=None, reasoning=Reasoning(effort=None, generate_summary=None, summary=None), safety_identifier=None, service_tier='default', status='completed', text=ResponseTextConfig(format=ResponseFormatText(type='text'), verbosity='medium'), top_logprobs=0, truncation='disabled', usage=ResponseUsage(input_tokens=18, input_tokens_details=InputTokensDetails(cached_tokens=0), output_tokens=45, output_tokens_details=OutputTokensDetails(reasoning_tokens=0), total_tokens=63), user=None, billing={'payer': 'developer'}, prompt_cache_retention=None, store=True)\n" + "Request fired: () {'messages': [{'type': 'message', 'content': [{'type': 'text', 'text': 'Write a haiku on the theme of fish2.'}], 'role': 'user'}], 'response_format': None, 'tools': []} ModelResponse(id='chatcmpl-CkjWzRIrVVqCuOSeRSdCW1nYZ2SG7', created=1765254145, model='gpt-4o-2024-08-06', object='chat.completion', system_fingerprint='fp_83554c687e', choices=[Choices(finish_reason='stop', index=0, message=Message(content='Gently in the stream, \\nSilver scales in dappled light, \\nSilent swirls below. ', role='assistant', tool_calls=None, function_call=None, provider_specific_fields={'refusal': None}, annotations=[]), provider_specific_fields={})], usage=Usage(completion_tokens=23, prompt_tokens=34, total_tokens=57, completion_tokens_details=CompletionTokensDetailsWrapper(accepted_prediction_tokens=0, audio_tokens=0, reasoning_tokens=0, rejected_prediction_tokens=0, text_tokens=None, image_tokens=None), prompt_tokens_details=PromptTokensDetailsWrapper(audio_tokens=0, cached_tokens=0, text_tokens=None, image_tokens=None)), service_tier='default')\n", + "Request fired: () {'messages': [{'type': 'message', 'content': [{'type': 'text', 'text': 'Write a limerick on the theme of fish.'}], 'role': 'user'}], 'response_format': None, 'tools': []} ModelResponse(id='chatcmpl-CkjX0o5CHnG7qL9LJT0PvvofD2OzU', created=1765254146, model='gpt-4o-2024-08-06', object='chat.completion', system_fingerprint='fp_83554c687e', choices=[Choices(finish_reason='stop', index=0, message=Message(content='In the sea where the swift currents swish, \\nLived a cod with an unyielding wish. \\nHe dreamt of the sky, \\nWhere seagulls would fly, \\nBut alas, he remained just a fish. ', role='assistant', tool_calls=None, function_call=None, provider_specific_fields={'refusal': None}, annotations=[]), provider_specific_fields={})], usage=Usage(completion_tokens=50, prompt_tokens=34, total_tokens=84, completion_tokens_details=CompletionTokensDetailsWrapper(accepted_prediction_tokens=0, audio_tokens=0, reasoning_tokens=0, rejected_prediction_tokens=0, text_tokens=None, image_tokens=None), prompt_tokens_details=PromptTokensDetailsWrapper(audio_tokens=0, cached_tokens=0, text_tokens=None, image_tokens=None)), service_tier='default')\n" ] } ], @@ -420,7 +431,7 @@ }, { "cell_type": "code", - "execution_count": 20, + "execution_count": 10, "id": "81a15f00", "metadata": {}, "outputs": [ @@ -428,10 +439,8 @@ "name": "stdout", "output_type": "stream", "text": [ - "INFO {'args': (), 'kwargs': {'input': [{'type': 'message', 'content': [{'type': 'input_text', 'text': 'Write a haiku on the theme of fish3.'}], 'role': 'user'}], 'model': 'gpt-4o', 'tools': [], 'tool_choice': 'auto'}, 'response': Response(id='resp_09b7251955854c33006914f625fc748190a8375a208f0d7859', created_at=1762981414.0, error=None, incomplete_details=None, instructions=None, metadata={}, model='gpt-4o-2024-08-06', object='response', output=[ResponseOutputMessage(id='msg_09b7251955854c33006914f6270d248190bdd1094b60fced21', content=[ResponseOutputText(annotations=[], text=\"Silent ripples dance, \\nGolden fins glide through the depths— \\nNature's quiet grace.\", type='output_text', logprobs=[])], role='assistant', status='completed', type='message')], parallel_tool_calls=True, temperature=1.0, tool_choice='auto', tools=[], top_p=1.0, background=False, conversation=None, max_output_tokens=None, max_tool_calls=None, previous_response_id=None, prompt=None, prompt_cache_key=None, reasoning=Reasoning(effort=None, generate_summary=None, summary=None), safety_identifier=None, service_tier='default', status='completed', text=ResponseTextConfig(format=ResponseFormatText(type='text'), verbosity='medium'), top_logprobs=0, truncation='disabled', usage=ResponseUsage(input_tokens=18, input_tokens_details=InputTokensDetails(cached_tokens=0), output_tokens=20, output_tokens_details=OutputTokensDetails(reasoning_tokens=0), total_tokens=38), user=None, billing={'payer': 'developer'}, prompt_cache_retention=None, store=True)}\n", - "INFO {'args': (), 'kwargs': {'input': [{'type': 'message', 'content': [{'type': 'input_text', 'text': 'Write a haiku on the theme of fish3.'}], 'role': 'user'}], 'model': 'gpt-4o', 'tools': [], 'tool_choice': 'auto'}, 'response': Response(id='resp_09b7251955854c33006914f625fc748190a8375a208f0d7859', created_at=1762981414.0, error=None, incomplete_details=None, instructions=None, metadata={}, model='gpt-4o-2024-08-06', object='response', output=[ResponseOutputMessage(id='msg_09b7251955854c33006914f6270d248190bdd1094b60fced21', content=[ResponseOutputText(annotations=[], text=\"Silent ripples dance, \\nGolden fins glide through the depths— \\nNature's quiet grace.\", type='output_text', logprobs=[])], role='assistant', status='completed', type='message')], parallel_tool_calls=True, temperature=1.0, tool_choice='auto', tools=[], top_p=1.0, background=False, conversation=None, max_output_tokens=None, max_tool_calls=None, previous_response_id=None, prompt=None, prompt_cache_key=None, reasoning=Reasoning(effort=None, generate_summary=None, summary=None), safety_identifier=None, service_tier='default', status='completed', text=ResponseTextConfig(format=ResponseFormatText(type='text'), verbosity='medium'), top_logprobs=0, truncation='disabled', usage=ResponseUsage(input_tokens=18, input_tokens_details=InputTokensDetails(cached_tokens=0), output_tokens=20, output_tokens_details=OutputTokensDetails(reasoning_tokens=0), total_tokens=38), user=None, billing={'payer': 'developer'}, prompt_cache_retention=None, store=True)}\n", - "INFO {'args': (), 'kwargs': {'input': [{'type': 'message', 'content': [{'type': 'input_text', 'text': 'Write a limerick on the theme of fish4.'}], 'role': 'user'}], 'model': 'gpt-4o', 'tools': [], 'tool_choice': 'auto'}, 'response': Response(id='resp_03b9010dc0322c97006914f629c6608193ad5517ed6dcabe4b', created_at=1762981417.0, error=None, incomplete_details=None, instructions=None, metadata={}, model='gpt-4o-2024-08-06', object='response', output=[ResponseOutputMessage(id='msg_03b9010dc0322c97006914f62a532c8193bf00b55cb75c721f', content=[ResponseOutputText(annotations=[], text='In a pond where the lily pads swish, \\nLived a catfish who dreamed of a dish. \\nHe dove in with glee, \\nIn search of a pea, \\nBut ended up hooked like a wish! \\n', type='output_text', logprobs=[])], role='assistant', status='completed', type='message')], parallel_tool_calls=True, temperature=1.0, tool_choice='auto', tools=[], top_p=1.0, background=False, conversation=None, max_output_tokens=None, max_tool_calls=None, previous_response_id=None, prompt=None, prompt_cache_key=None, reasoning=Reasoning(effort=None, generate_summary=None, summary=None), safety_identifier=None, service_tier='default', status='completed', text=ResponseTextConfig(format=ResponseFormatText(type='text'), verbosity='medium'), top_logprobs=0, truncation='disabled', usage=ResponseUsage(input_tokens=19, input_tokens_details=InputTokensDetails(cached_tokens=0), output_tokens=48, output_tokens_details=OutputTokensDetails(reasoning_tokens=0), total_tokens=67), user=None, billing={'payer': 'developer'}, prompt_cache_retention=None, store=True)}\n", - "INFO {'args': (), 'kwargs': {'input': [{'type': 'message', 'content': [{'type': 'input_text', 'text': 'Write a limerick on the theme of fish4.'}], 'role': 'user'}], 'model': 'gpt-4o', 'tools': [], 'tool_choice': 'auto'}, 'response': Response(id='resp_03b9010dc0322c97006914f629c6608193ad5517ed6dcabe4b', created_at=1762981417.0, error=None, incomplete_details=None, instructions=None, metadata={}, model='gpt-4o-2024-08-06', object='response', output=[ResponseOutputMessage(id='msg_03b9010dc0322c97006914f62a532c8193bf00b55cb75c721f', content=[ResponseOutputText(annotations=[], text='In a pond where the lily pads swish, \\nLived a catfish who dreamed of a dish. \\nHe dove in with glee, \\nIn search of a pea, \\nBut ended up hooked like a wish! \\n', type='output_text', logprobs=[])], role='assistant', status='completed', type='message')], parallel_tool_calls=True, temperature=1.0, tool_choice='auto', tools=[], top_p=1.0, background=False, conversation=None, max_output_tokens=None, max_tool_calls=None, previous_response_id=None, prompt=None, prompt_cache_key=None, reasoning=Reasoning(effort=None, generate_summary=None, summary=None), safety_identifier=None, service_tier='default', status='completed', text=ResponseTextConfig(format=ResponseFormatText(type='text'), verbosity='medium'), top_logprobs=0, truncation='disabled', usage=ResponseUsage(input_tokens=19, input_tokens_details=InputTokensDetails(cached_tokens=0), output_tokens=48, output_tokens_details=OutputTokensDetails(reasoning_tokens=0), total_tokens=67), user=None, billing={'payer': 'developer'}, prompt_cache_retention=None, store=True)}\n" + "INFO {'args': (), 'kwargs': {'messages': [{'type': 'message', 'content': [{'type': 'text', 'text': 'Write a haiku on the theme of fish3.'}], 'role': 'user'}], 'response_format': None, 'tools': []}, 'response': ModelResponse(id='chatcmpl-CkjX1vTtAKxu7ldqTHNLf3Q5HJtEa', created=1765254147, model='gpt-4o-2024-08-06', object='chat.completion', system_fingerprint='fp_83554c687e', choices=[Choices(finish_reason='stop', index=0, message=Message(content='Beneath ripples glide, \\nWhispers of scales in moonlight, \\nSilent depths, fish dart. ', role='assistant', tool_calls=None, function_call=None, provider_specific_fields={'refusal': None}, annotations=[]), provider_specific_fields={})], usage=Usage(completion_tokens=25, prompt_tokens=34, total_tokens=59, completion_tokens_details=CompletionTokensDetailsWrapper(accepted_prediction_tokens=0, audio_tokens=0, reasoning_tokens=0, rejected_prediction_tokens=0, text_tokens=None, image_tokens=None), prompt_tokens_details=PromptTokensDetailsWrapper(audio_tokens=0, cached_tokens=0, text_tokens=None, image_tokens=None)), service_tier='default')}\n", + "INFO {'args': (), 'kwargs': {'messages': [{'type': 'message', 'content': [{'type': 'text', 'text': 'Write a limerick on the theme of fish4.'}], 'role': 'user'}], 'response_format': None, 'tools': []}, 'response': ModelResponse(id='chatcmpl-CkjX2ED38u6C82SrNwcgpoBJ6rLtL', created=1765254148, model='gpt-4o-2024-08-06', object='chat.completion', system_fingerprint='fp_83554c687e', choices=[Choices(finish_reason='stop', index=0, message=Message(content='In the ocean so deep and so swish, \\nSwam a cod with a dream-like wish. \\nIt leaped with a flop, \\nOver waves it would hop, \\nSaying, \"One day I\\'ll fly—oh, what bliss!\" ', role='assistant', tool_calls=None, function_call=None, provider_specific_fields={'refusal': None}, annotations=[]), provider_specific_fields={})], usage=Usage(completion_tokens=53, prompt_tokens=35, total_tokens=88, completion_tokens_details=CompletionTokensDetailsWrapper(accepted_prediction_tokens=0, audio_tokens=0, reasoning_tokens=0, rejected_prediction_tokens=0, text_tokens=None, image_tokens=None), prompt_tokens_details=PromptTokensDetailsWrapper(audio_tokens=0, cached_tokens=0, text_tokens=None, image_tokens=None)), service_tier='default')}\n" ] } ], @@ -456,6 +465,156 @@ " _ = haiku(\"fish3\")\n", " _ = limerick(\"fish4\")" ] + }, + { + "cell_type": "markdown", + "id": "bd25826d", + "metadata": {}, + "source": [ + "### Retrying LLM Requests\n", + "LLM calls can sometimes fail due to transient errors or produce invalid outputs. The `RetryLLMHandler` automatically retries failed template calls:\n", + "\n", + "- `max_retries`: Maximum number of retry attempts (default: 3)\n", + "- `add_error_feedback`: When `True`, appends the error message to the prompt on retry, helping the LLM correct its output.\n", + "- `exception_cls`: RetryHandler will only attempt to try again when a specific type of `Exception` is thrown.\n" + ] + }, + { + "cell_type": "markdown", + "id": "bafc0a96", + "metadata": {}, + "source": [ + "Example usage: having an unstable service that seldomly fail." + ] + }, + { + "cell_type": "code", + "execution_count": 11, + "id": "4334d07a", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "INFO {'args': (), 'kwargs': {'messages': [{'type': 'message', 'content': [{'type': 'text', 'text': 'Use the unstable_service tool to fetch data.'}], 'role': 'user'}], 'response_format': None, 'tools': [{'type': 'function', 'function': {'name': 'unstable_service', 'description': 'Fetch data from an unstable external service. May require retries.', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}]}, 'response': ModelResponse(id='chatcmpl-CkjX4Tdccqd0ljEj2AUMlASFo31Tp', created=1765254150, model='gpt-4o-2024-08-06', object='chat.completion', system_fingerprint='fp_83554c687e', choices=[Choices(finish_reason='tool_calls', index=0, message=Message(content=None, role='assistant', tool_calls=[ChatCompletionMessageToolCall(function=Function(arguments='{}', name='unstable_service'), id='call_6cYMZnIK0hrv3xTStyyWBLXR', type='function')], function_call=None, provider_specific_fields={'refusal': None}, annotations=[]), provider_specific_fields={})], usage=Usage(completion_tokens=11, prompt_tokens=52, total_tokens=63, completion_tokens_details=CompletionTokensDetailsWrapper(accepted_prediction_tokens=0, audio_tokens=0, reasoning_tokens=0, rejected_prediction_tokens=0, text_tokens=None, image_tokens=None), prompt_tokens_details=PromptTokensDetailsWrapper(audio_tokens=0, cached_tokens=0, text_tokens=None, image_tokens=None)), service_tier='default')}\n", + "INFO {'args': (), 'kwargs': {'messages': [{'type': 'message', 'content': [{'type': 'text', 'text': 'Use the unstable_service tool to fetch data.'}], 'role': 'user'}, {'content': None, 'role': 'assistant', 'tool_calls': [{'function': {'arguments': '{}', 'name': 'unstable_service'}, 'id': 'call_6cYMZnIK0hrv3xTStyyWBLXR', 'type': 'function'}], 'function_call': None, 'provider_specific_fields': {'refusal': None}, 'annotations': []}, {'role': 'tool', 'tool_call_id': 'call_6cYMZnIK0hrv3xTStyyWBLXR', 'name': 'unstable_service', 'content': \"{'status': 'failure', 'exception': 'Service unavailable! Attempt 1/3. Please retry.'}\"}], 'response_format': None, 'tools': [{'type': 'function', 'function': {'name': 'unstable_service', 'description': 'Fetch data from an unstable external service. May require retries.', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}]}, 'response': ModelResponse(id='chatcmpl-CkjX4gCmm8GEpTADaz6b3WJHXETYu', created=1765254150, model='gpt-4o-2024-08-06', object='chat.completion', system_fingerprint='fp_83554c687e', choices=[Choices(finish_reason='tool_calls', index=0, message=Message(content=None, role='assistant', tool_calls=[ChatCompletionMessageToolCall(function=Function(arguments='{}', name='unstable_service'), id='call_7Cz1w1toF0CccR8e5XUp0dIP', type='function')], function_call=None, provider_specific_fields={'refusal': None}, annotations=[]), provider_specific_fields={})], usage=Usage(completion_tokens=11, prompt_tokens=95, total_tokens=106, completion_tokens_details=CompletionTokensDetailsWrapper(accepted_prediction_tokens=0, audio_tokens=0, reasoning_tokens=0, rejected_prediction_tokens=0, text_tokens=None, image_tokens=None), prompt_tokens_details=PromptTokensDetailsWrapper(audio_tokens=0, cached_tokens=0, text_tokens=None, image_tokens=None)), service_tier='default')}\n", + "INFO {'args': (), 'kwargs': {'messages': [{'type': 'message', 'content': [{'type': 'text', 'text': 'Use the unstable_service tool to fetch data.'}], 'role': 'user'}, {'content': None, 'role': 'assistant', 'tool_calls': [{'function': {'arguments': '{}', 'name': 'unstable_service'}, 'id': 'call_6cYMZnIK0hrv3xTStyyWBLXR', 'type': 'function'}], 'function_call': None, 'provider_specific_fields': {'refusal': None}, 'annotations': []}, {'role': 'tool', 'tool_call_id': 'call_6cYMZnIK0hrv3xTStyyWBLXR', 'name': 'unstable_service', 'content': \"{'status': 'failure', 'exception': 'Service unavailable! Attempt 1/3. Please retry.'}\"}, {'content': None, 'role': 'assistant', 'tool_calls': [{'function': {'arguments': '{}', 'name': 'unstable_service'}, 'id': 'call_7Cz1w1toF0CccR8e5XUp0dIP', 'type': 'function'}], 'function_call': None, 'provider_specific_fields': {'refusal': None}, 'annotations': []}, {'role': 'tool', 'tool_call_id': 'call_7Cz1w1toF0CccR8e5XUp0dIP', 'name': 'unstable_service', 'content': \"{'status': 'failure', 'exception': 'Service unavailable! Attempt 2/3. Please retry.'}\"}], 'response_format': None, 'tools': [{'type': 'function', 'function': {'name': 'unstable_service', 'description': 'Fetch data from an unstable external service. May require retries.', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}]}, 'response': ModelResponse(id='chatcmpl-CkjX57HW80BM2iCR3guWCc8e9etYS', created=1765254151, model='gpt-4o-2024-08-06', object='chat.completion', system_fingerprint='fp_83554c687e', choices=[Choices(finish_reason='tool_calls', index=0, message=Message(content=None, role='assistant', tool_calls=[ChatCompletionMessageToolCall(function=Function(arguments='{}', name='unstable_service'), id='call_KLr43KhV2A4GzUbQfbhD1iXz', type='function')], function_call=None, provider_specific_fields={'refusal': None}, annotations=[]), provider_specific_fields={})], usage=Usage(completion_tokens=11, prompt_tokens=138, total_tokens=149, completion_tokens_details=CompletionTokensDetailsWrapper(accepted_prediction_tokens=0, audio_tokens=0, reasoning_tokens=0, rejected_prediction_tokens=0, text_tokens=None, image_tokens=None), prompt_tokens_details=PromptTokensDetailsWrapper(audio_tokens=0, cached_tokens=0, text_tokens=None, image_tokens=None)), service_tier='default')}\n", + "INFO {'tool': 'unstable_service', 'args': (), 'kwargs': {}}\n", + "INFO {'args': (), 'kwargs': {'messages': [{'type': 'message', 'content': [{'type': 'text', 'text': 'Use the unstable_service tool to fetch data.'}], 'role': 'user'}, {'content': None, 'role': 'assistant', 'tool_calls': [{'function': {'arguments': '{}', 'name': 'unstable_service'}, 'id': 'call_6cYMZnIK0hrv3xTStyyWBLXR', 'type': 'function'}], 'function_call': None, 'provider_specific_fields': {'refusal': None}, 'annotations': []}, {'role': 'tool', 'tool_call_id': 'call_6cYMZnIK0hrv3xTStyyWBLXR', 'name': 'unstable_service', 'content': \"{'status': 'failure', 'exception': 'Service unavailable! Attempt 1/3. Please retry.'}\"}, {'content': None, 'role': 'assistant', 'tool_calls': [{'function': {'arguments': '{}', 'name': 'unstable_service'}, 'id': 'call_7Cz1w1toF0CccR8e5XUp0dIP', 'type': 'function'}], 'function_call': None, 'provider_specific_fields': {'refusal': None}, 'annotations': []}, {'role': 'tool', 'tool_call_id': 'call_7Cz1w1toF0CccR8e5XUp0dIP', 'name': 'unstable_service', 'content': \"{'status': 'failure', 'exception': 'Service unavailable! Attempt 2/3. Please retry.'}\"}, {'content': None, 'role': 'assistant', 'tool_calls': [{'function': {'arguments': '{}', 'name': 'unstable_service'}, 'id': 'call_KLr43KhV2A4GzUbQfbhD1iXz', 'type': 'function'}], 'function_call': None, 'provider_specific_fields': {'refusal': None}, 'annotations': []}, {'role': 'tool', 'tool_call_id': 'call_KLr43KhV2A4GzUbQfbhD1iXz', 'name': 'unstable_service', 'content': [{'type': 'text', 'text': \"{ 'status': 'ok', 'data': [1, 2, 3] }\"}]}], 'response_format': None, 'tools': [{'type': 'function', 'function': {'name': 'unstable_service', 'description': 'Fetch data from an unstable external service. May require retries.', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}]}, 'response': ModelResponse(id='chatcmpl-CkjX5GhYtff6cMPBmDxTmDp8PVvEr', created=1765254151, model='gpt-4o-2024-08-06', object='chat.completion', system_fingerprint='fp_83554c687e', choices=[Choices(finish_reason='stop', index=0, message=Message(content='I successfully fetched the data: \\\\([1, 2, 3]\\\\).', role='assistant', tool_calls=None, function_call=None, provider_specific_fields={'refusal': None}, annotations=[]), provider_specific_fields={})], usage=Usage(completion_tokens=18, prompt_tokens=178, total_tokens=196, completion_tokens_details=CompletionTokensDetailsWrapper(accepted_prediction_tokens=0, audio_tokens=0, reasoning_tokens=0, rejected_prediction_tokens=0, text_tokens=None, image_tokens=None), prompt_tokens_details=PromptTokensDetailsWrapper(audio_tokens=0, cached_tokens=0, text_tokens=None, image_tokens=None)), service_tier='default')}\n", + "Result: I successfully fetched the data: \\([1, 2, 3]\\). Retries: 3\n" + ] + } + ], + "source": [ + "call_count = 0\n", + "REQUIRED_RETRIES = 3\n", + "\n", + "@defop\n", + "def unstable_service() -> str:\n", + " \"\"\"Fetch data from an unstable external service. May require retries.\"\"\"\n", + " global call_count\n", + " call_count += 1\n", + " if call_count < REQUIRED_RETRIES:\n", + " raise ConnectionError(\n", + " f\"Service unavailable! Attempt {call_count}/{REQUIRED_RETRIES}. Please retry.\"\n", + " )\n", + " return \"{ 'status': 'ok', 'data': [1, 2, 3] }\"\n", + "\n", + "\n", + "@Template.define(tools=[unstable_service])\n", + "def fetch_data() -> str:\n", + " \"\"\"Use the unstable_service tool to fetch data.\"\"\"\n", + " raise NotImplementedError\n", + "\n", + "\n", + "retry_handler = RetryLLMHandler(max_retries=5, add_error_feedback=True)\n", + "\n", + "with handler(provider), handler(retry_handler), handler(llm_logger):\n", + " result = fetch_data()\n", + " print(f\"Result: {result}\", \"Retries:\", call_count)" + ] + }, + { + "cell_type": "markdown", + "id": "4ac00e01", + "metadata": {}, + "source": [ + "### Retrying with Validation Errors\n", + "As noted above, the `RetryHandler` can also be used to retry on runtime/validation error:" + ] + }, + { + "cell_type": "code", + "execution_count": 18, + "id": "39b2b225", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "INFO {'args': (), 'kwargs': {'messages': [{'type': 'message', 'content': [{'type': 'text', 'text': 'Give a rating for Die Hard. The explanation MUST include the numeric score.'}], 'role': 'user'}], 'response_format': , 'tools': []}, 'response': ModelResponse(id='chatcmpl-CkjbhOTBz1G18GHnmdx0IcPaqOIiB', created=1765254437, model='gpt-4o-2024-08-06', object='chat.completion', system_fingerprint='fp_83554c687e', choices=[Choices(finish_reason='stop', index=0, message=Message(content='{\"value\":{\"score\":9,\"explanation\":\"Die Hard is often regarded as a quintessential action film, praised for its innovative story, memorable characters, and thrilling sequences. The film\\'s protagonist, John McClane, played by Bruce Willis, is celebrated for his relatable and everyman qualities, which set a new standard for action heroes. Additionally, Alan Rickman\\'s portrayal of the villain Hans Gruber is highly acclaimed for adding depth and sophistication to the antagonist role. The movie\\'s pace, witty dialogues, and suspenseful action have made it a beloved classic in the action genre. For these reasons, it deserves a high score of 9 out of 10.\"}}', role='assistant', tool_calls=None, function_call=None, provider_specific_fields={'refusal': None}, annotations=[]), provider_specific_fields={})], usage=Usage(completion_tokens=138, prompt_tokens=108, total_tokens=246, completion_tokens_details=CompletionTokensDetailsWrapper(accepted_prediction_tokens=0, audio_tokens=0, reasoning_tokens=0, rejected_prediction_tokens=0, text_tokens=None, image_tokens=None), prompt_tokens_details=PromptTokensDetailsWrapper(audio_tokens=0, cached_tokens=0, text_tokens=None, image_tokens=None)), service_tier='default')}\n", + "INFO {'args': (), 'kwargs': {'messages': [{'type': 'message', 'content': [{'type': 'text', 'text': 'Retry generating the following prompt: Give a rating for Die Hard. The explanation MUST include the numeric score.\\n\\nError from previous generation:\\n```\\nTraceback (most recent call last):\\n File \"/Users/datnguyenthanh/Marc/effectful/effectful/handlers/llm/providers.py\", line 321, in _retry_completion\\n return fwd()\\n ^^^^^\\n File \"/Users/datnguyenthanh/Marc/effectful/effectful/ops/types.py\", line 433, in __call__\\n return self_handler(*args, **kwargs)\\n ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\\n File \"/opt/homebrew/Cellar/python@3.12/3.12.9/Frameworks/Python.framework/Versions/3.12/lib/python3.12/contextlib.py\", line 81, in inner\\n return func(*args, **kwds)\\n ^^^^^^^^^^^^^^^^^^^\\n File \"/Users/datnguyenthanh/Marc/effectful/effectful/internals/runtime.py\", line 45, in _cont_wrapper\\n return fn(*a, **k)\\n ^^^^^^^^^^^\\n File \"/Users/datnguyenthanh/Marc/effectful/effectful/internals/runtime.py\", line 56, in _cont_wrapper\\n return fn(*a, **k)\\n ^^^^^^^^^^^\\n File \"/Users/datnguyenthanh/Marc/effectful/effectful/internals/runtime.py\", line 70, in bound_body\\n return body(*a, **k)\\n ^^^^^^^^^^^^^\\n File \"/Users/datnguyenthanh/Marc/effectful/effectful/internals/runtime.py\", line 56, in _cont_wrapper\\n return fn(*a, **k)\\n ^^^^^^^^^^^\\n File \"/Users/datnguyenthanh/Marc/effectful/effectful/handlers/llm/providers.py\", line 471, in _call\\n return decode_response(template, resp)\\n ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\\n File \"/Users/datnguyenthanh/Marc/effectful/effectful/ops/types.py\", line 449, in __call__\\n return class_apply(self, *args, **kwargs) # type: ignore[return-value]\\n ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\\n File \"/Users/datnguyenthanh/Marc/effectful/effectful/ops/types.py\", line 474, in apply\\n return op.__default_rule__(*args, **kwargs)\\n ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\\n File \"/Users/datnguyenthanh/Marc/effectful/effectful/ops/types.py\", line 334, in __default_rule__\\n return self.__default__(*args, **kwargs)\\n ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\\n File \"/Users/datnguyenthanh/Marc/effectful/effectful/handlers/llm/providers.py\", line 426, in decode_response\\n result = Result.model_validate_json(result_str)\\n ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\\n File \"/Users/datnguyenthanh/Marc/effectful/.venv/lib/python3.12/site-packages/pydantic/main.py\", line 766, in model_validate_json\\n return cls.__pydantic_validator__.validate_json(\\n ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\\npydantic_core._pydantic_core.ValidationError: 1 validation error for Response\\nvalue.score\\n score must be 1–5, got 9 [type=invalid_score, input_value=9, input_type=int]\\n```'}], 'role': 'user'}], 'response_format': , 'tools': []}, 'response': ModelResponse(id='chatcmpl-CkjbkDlrwCHGvzQNh2wdT28L7j19N', created=1765254440, model='gpt-4o-2024-08-06', object='chat.completion', system_fingerprint='fp_83554c687e', choices=[Choices(finish_reason='stop', index=0, message=Message(content='{\"value\":{\"score\":5,\"explanation\":\"Die Hard is a highly acclaimed action film widely regarded as a classic in its genre. It combines thrilling action sequences with a charismatic performance by Bruce Willis as the lead character. The film\\'s clever plot, high stakes, and memorable antagonist make it a favorite among action movie enthusiasts. Critics and audiences alike often rate it at the top end of action cinema, earning it a score of 5 out of 5.\"}}', role='assistant', tool_calls=None, function_call=None, provider_specific_fields={'refusal': None}, annotations=[]), provider_specific_fields={})], usage=Usage(completion_tokens=97, prompt_tokens=843, total_tokens=940, completion_tokens_details=CompletionTokensDetailsWrapper(accepted_prediction_tokens=0, audio_tokens=0, reasoning_tokens=0, rejected_prediction_tokens=0, text_tokens=None, image_tokens=None), prompt_tokens_details=PromptTokensDetailsWrapper(audio_tokens=0, cached_tokens=0, text_tokens=None, image_tokens=None)), service_tier='default')}\n", + "Score: 5/5\n", + "Explanation: Die Hard is a highly acclaimed action film widely regarded as a classic in its genre. It combines thrilling action sequences with a charismatic performance by Bruce Willis as the lead character. The film's clever plot, high stakes, and memorable antagonist make it a favorite among action movie enthusiasts. Critics and audiences alike often rate it at the top end of action cinema, earning it a score of 5 out of 5.\n" + ] + } + ], + "source": [ + "import pydantic\n", + "from pydantic import ValidationError, field_validator\n", + "from pydantic_core import PydanticCustomError\n", + "\n", + "\n", + "@pydantic.dataclasses.dataclass\n", + "class Rating:\n", + " score: int\n", + " explanation: str\n", + "\n", + " @field_validator(\"score\")\n", + " @classmethod\n", + " def check_score(cls, v):\n", + " if v < 1 or v > 5:\n", + " raise PydanticCustomError(\n", + " \"invalid_score\",\n", + " \"score must be 1–5, got {v}\",\n", + " {\"v\": v},\n", + " )\n", + " return v\n", + "\n", + " @field_validator(\"explanation\")\n", + " @classmethod\n", + " def check_explanation_contains_score(cls, v, info):\n", + " score = info.data.get(\"score\", None)\n", + " if score is not None and str(score) not in v:\n", + " raise PydanticCustomError(\n", + " \"invalid_explanation\",\n", + " \"explanation must mention the score {score}, got '{explanation}'\",\n", + " {\"score\": score, \"explanation\": v},\n", + " )\n", + " return v\n", + "\n", + "\n", + "@Template.define\n", + "def give_rating_for_movie(movie_name: str) -> Rating:\n", + " \"\"\"Give a rating for {movie_name}. The explanation MUST include the numeric score.\"\"\"\n", + " raise NotImplementedError\n", + "\n", + "\n", + "# RetryLLMHandler with error feedback - the traceback helps LLM correct validation errors\n", + "# Note: Pydantic wraps PydanticCustomError inside ValidationError, so we catch ValidationError instead\n", + "retry_handler = RetryLLMHandler(\n", + " max_retries=3,\n", + " add_error_feedback=True,\n", + " exception_cls=ValidationError, # Catch validation errors\n", + ")\n", + "\n", + "with handler(provider), handler(retry_handler), handler(llm_logger):\n", + " rating = give_rating_for_movie(\"Die Hard\")\n", + " print(f\"Score: {rating.score}/5\")\n", + " print(f\"Explanation: {rating.explanation}\")\n" + ] } ], "metadata": { diff --git a/effectful/handlers/llm/providers.py b/effectful/handlers/llm/providers.py index 29241eec..d0e9c1aa 100644 --- a/effectful/handlers/llm/providers.py +++ b/effectful/handlers/llm/providers.py @@ -5,6 +5,7 @@ import io import logging import string +import traceback import typing from collections.abc import Callable, Hashable, Iterable, Mapping, Sequence from typing import Any, get_type_hints @@ -297,6 +298,51 @@ def _log_tool_call( return result +class RetryLLMHandler(ObjectInterpretation): + """Retries LLM requests if they fail. + If the request fails, the error is logged and the prompt is updated to include the error. + If the request fails after the maximum number of retries, an exception is raised. + Args: + max_retries: The maximum number of retries. + add_error_feedback: Whether to add error feedback to the prompt. + exception_cls: The exception class to raise if the maximum number of retries is reached. + """ + + def __init__( + self, + max_retries: int = 3, + add_error_feedback: bool = False, + exception_cls: type[BaseException] = Exception, + ): + self.max_retries = max_retries + self.add_error_feedback = add_error_feedback + self.exception_cls = exception_cls + + @implements(Template.__call__) + def _retry_completion(self, template: Template, *args, **kwargs) -> Any: + max_retries = self.max_retries + current_template = template + while max_retries > 0: + try: + return fwd(current_template, *args, **kwargs) + except self.exception_cls as exn: + max_retries -= 1 + if max_retries == 0: + raise exn + if self.add_error_feedback: + # Capture the full traceback for better error context + tb = traceback.format_exc() + prompt_ext = ( + f"Retry generating the following prompt: {template.__prompt_template__}\n\n" + f"Error from previous generation:\n```\n{tb}```" + ) + current_template = dataclasses.replace( + template, __prompt_template__=prompt_ext + ) + # Continue the loop to retry + raise Exception("Max retries reached") + + def _call_tool_with_json_args( template: Template, tool: Tool, json_str_args: str ) -> OpenAIMessageContent: diff --git a/tests/test_handlers_llm.py b/tests/test_handlers_llm.py index 2531ef0d..20c8b969 100644 --- a/tests/test_handlers_llm.py +++ b/tests/test_handlers_llm.py @@ -1,6 +1,9 @@ from collections.abc import Callable +import pytest + from effectful.handlers.llm import Template +from effectful.handlers.llm.providers import RetryLLMHandler from effectful.handlers.llm.synthesis import ProgramSynthesis from effectful.ops.semantics import handler from effectful.ops.syntax import ObjectInterpretation, implements @@ -115,3 +118,117 @@ def count_occurrences(s): assert callable(count_a) assert count_a("banana") == 3 assert count_a("cherry") == 0 + + +class FailingThenSucceedingProvider[T](ObjectInterpretation): + """Mock provider that fails a specified number of times before succeeding.""" + + def __init__( + self, + fail_count: int, + success_response: T, + exception_factory: Callable[[], Exception], + ): + """Initialize the provider. + + Args: + fail_count: Number of times to fail before succeeding + success_response: Response to return after failures + exception_factory: Factory function that creates exceptions to raise + """ + self.fail_count = fail_count + self.success_response = success_response + self.exception_factory = exception_factory + self.call_count = 0 + + @implements(Template.__call__) + def _call[**P]( + self, template: Template[P, T], *args: P.args, **kwargs: P.kwargs + ) -> T: + self.call_count += 1 + if self.call_count <= self.fail_count: + raise self.exception_factory() + return self.success_response + + +def test_retry_handler_succeeds_after_failures(): + """Test that RetryLLMHandler retries and eventually succeeds.""" + provider = FailingThenSucceedingProvider( + fail_count=2, + success_response="Success after retries!", + exception_factory=lambda: ValueError("Temporary failure"), + ) + retry_handler = RetryLLMHandler(max_retries=3, exception_cls=ValueError) + + with handler(provider), handler(retry_handler): + result = limerick("test") + assert result == "Success after retries!" + assert provider.call_count == 3 # 2 failures + 1 success + + +def test_retry_handler_exhausts_retries(): + """Test that RetryLLMHandler raises after max retries exhausted.""" + provider = FailingThenSucceedingProvider( + fail_count=5, # More failures than retries + success_response="Never reached", + exception_factory=lambda: ValueError("Persistent failure"), + ) + retry_handler = RetryLLMHandler(max_retries=3, exception_cls=ValueError) + + with pytest.raises(ValueError, match="Persistent failure"): + with handler(provider), handler(retry_handler): + limerick("test") + + assert provider.call_count == 3 # Should have tried 3 times + + +def test_retry_handler_only_catches_specified_exception(): + """Test that RetryLLMHandler only catches the specified exception class.""" + provider = FailingThenSucceedingProvider( + fail_count=1, + success_response="Success", + exception_factory=lambda: TypeError("Wrong type"), # Different exception type + ) + retry_handler = RetryLLMHandler(max_retries=3, exception_cls=ValueError) + + # TypeError should not be caught, should propagate immediately + with pytest.raises(TypeError, match="Wrong type"): + with handler(provider), handler(retry_handler): + limerick("test") + + assert provider.call_count == 1 # Should have only tried once + + +def test_retry_handler_with_error_feedback(): + """Test that RetryLLMHandler includes error feedback when enabled.""" + call_prompts: list[str] = [] + + class PromptCapturingProvider(ObjectInterpretation): + """Provider that captures prompts and fails once.""" + + def __init__(self): + self.call_count = 0 + + @implements(Template.__call__) + def _call(self, template: Template, *args, **kwargs): + self.call_count += 1 + call_prompts.append(template.__prompt_template__) + if self.call_count == 1: + raise ValueError("First attempt failed") + return "Success on retry" + + provider = PromptCapturingProvider() + retry_handler = RetryLLMHandler( + max_retries=2, add_error_feedback=True, exception_cls=ValueError + ) + + with handler(provider), handler(retry_handler): + result = limerick("test") + assert result == "Success on retry" + + assert len(call_prompts) == 2 + # First call has original prompt + assert "Write a limerick on the theme of {theme}." in call_prompts[0] + # Second call should include error feedback with traceback + assert "Retry generating" in call_prompts[1] + assert "First attempt failed" in call_prompts[1] From bb5bded93d07668296dd5f6200abf7f15acf6f9b Mon Sep 17 00:00:00 2001 From: eb8680 Date: Fri, 12 Dec 2025 11:18:26 -0500 Subject: [PATCH 21/39] Merge `master` into `staging-llm` again (#443) * Release v0.2.3 (#374) * Install prettyprinter for term when library is available (#386) * install prettyprinter for term when library is available * lint * move code into types.py * fix pypandoc issue (#397) * Convert evaluate to a singledispatch (#398) * convert evaluate to a singledispatch * lint * add jnp.pi and ArrayTerm.T (#394) * Deprecate defterm (#399) * deprecate defterm * remove defterm case * remove defterm * lint * evaluate distribution arguments * lint * remove interpreter * Revert "remove interpreter" This reverts commit 30442779689da862aa6f8e0224330ffaf8f556ae. * wip * lint * Rework numpyro distribution handling to enable symbolic distributions and handling of distribution methods (#311) * refactor distribution operations * add a test for typeof of distributions * add tests for symbolic dists/arguments * introduce operations for distribution methods * comment * fix tests * work around https://github.com/BasisResearch/effectful/issues/310 * replace hack with new hack * tweak repr for _BaseOperation * lint * work around https://github.com/BasisResearch/effectful/issues/312 * clean up access to dist ops * wip * wip * add type annotations to get correct term conversion * lint * include distribution arguments as properties * fix distribution calls * try again * fixes * format * Box the output of `__type_rule__` (#387) * box the output of __type_rule__ * fix tests * fix tests * require callers of __type_rule__ to box arguments * fix * move Box out of ops.types * lint * fix test * fix syntactic_eq implementation for jax arrays (#405) * Fix recursion error in sizesof (#406) * fix recursion error in sizesof * format * Allow `_BaseOperation` subclasses to have an overrideable `apply` method (#414) * stash * fixes * initial * wip * lint * ensure each subclass has a fresh operation * wip * wip * lint * wip * wip * lint * refactor class method support * move defops * fix test * remove singledispatch case and add test * move definition * cleanup * simplify * cleanup * lint * fix failing test * fix classmethod * __isabstractmethod__ * revert --------- Co-authored-by: Eli * Try pulling in pyproject.toml from staging-llm to master (#425) * Generate instance-level `Operation`s for bound methods (#351) * generalize __get__ * nits * coverage of methoddescriptor api * methodtype * simplify * simplify * simplify * format * revert * restore * simplify * simplify * retain instance op on term construction * Simplify apply inheritance * assign * put call next to init_subclass * add explanatory comment * Operation.apply -> Operation.__apply__ * add test based on issue description * fix doctest * Fix dataclass @defops and added dataclass metaclass (#439) * fixed dataclass ordering and added metaclass for simplifying construction of dataclass terms * ensure term fields are not being overriden * added decorator and dataclass * updated to make defdata registration automatic * simplified dataclass loop * updated to give property op an appropriate name * added failing tests * fixed failing test * fixed numpyro/pyro/torch interfaces * minor fix + test for deffn kwargs --------- Co-authored-by: Jack Feser Co-authored-by: Tim Cooijmans Co-authored-by: Kiran Gopinathan <23038502+kiranandcode@users.noreply.github.com> --- effectful/handlers/jax/_terms.py | 2 +- effectful/handlers/numpyro.py | 80 ++++++------- effectful/handlers/pyro.py | 4 +- effectful/handlers/torch.py | 2 +- effectful/ops/semantics.py | 47 ++------ effectful/ops/syntax.py | 88 +++++++++++++- effectful/ops/types.py | 135 +++++++++++++++------ tests/test_ops_syntax.py | 196 +++++++++++++++++++++++++++---- 8 files changed, 415 insertions(+), 139 deletions(-) diff --git a/effectful/handlers/jax/_terms.py b/effectful/handlers/jax/_terms.py index 5817e484..989ec3d9 100644 --- a/effectful/handlers/jax/_terms.py +++ b/effectful/handlers/jax/_terms.py @@ -58,7 +58,7 @@ def jax_at_set(arr, index_key, val): @defdata.register(jax.Array) -def _embed_array(op, *args, **kwargs): +def _embed_array(ty, op, *args, **kwargs): if ( op is jax_getitem and not isinstance(args[0], Term) diff --git a/effectful/handlers/numpyro.py b/effectful/handlers/numpyro.py index 85ebc0d7..74010de4 100644 --- a/effectful/handlers/numpyro.py +++ b/effectful/handlers/numpyro.py @@ -395,7 +395,7 @@ def Cauchy(loc=0.0, scale=1.0, **kwargs) -> dist.Cauchy: @defdata.register(dist.Cauchy) class CauchyTerm(_DistributionTerm): - def __init__(self, op, loc, scale, **kwargs): + def __init__(self, ty, op, loc, scale, **kwargs): super().__init__(dist.Cauchy, op, loc, scale, **kwargs) self.loc = loc self.scale = scale @@ -413,7 +413,7 @@ def Gumbel(loc=0.0, scale=1.0, **kwargs) -> dist.Gumbel: @defdata.register(dist.Gumbel) class GumbelTerm(_DistributionTerm): - def __init__(self, op, loc, scale, **kwargs): + def __init__(self, ty, op, loc, scale, **kwargs): super().__init__(dist.Gumbel, op, loc, scale, **kwargs) self.loc = loc self.scale = scale @@ -431,7 +431,7 @@ def Laplace(loc=0.0, scale=1.0, **kwargs) -> dist.Laplace: @defdata.register(dist.Laplace) class LaplaceTerm(_DistributionTerm): - def __init__(self, op, loc, scale, **kwargs): + def __init__(self, ty, op, loc, scale, **kwargs): super().__init__(dist.Laplace, op, loc, scale, **kwargs) self.loc = loc self.scale = scale @@ -449,7 +449,7 @@ def LogNormal(loc=0.0, scale=1.0, **kwargs) -> dist.LogNormal: @defdata.register(dist.LogNormal) class LogNormalTerm(_DistributionTerm): - def __init__(self, op, loc, scale, **kwargs): + def __init__(self, ty, op, loc, scale, **kwargs): super().__init__(dist.LogNormal, op, loc, scale, **kwargs) self.loc = loc self.scale = scale @@ -467,7 +467,7 @@ def Logistic(loc=0.0, scale=1.0, **kwargs) -> dist.Logistic: @defdata.register(dist.Logistic) class LogisticTerm(_DistributionTerm): - def __init__(self, op, loc, scale, **kwargs): + def __init__(self, ty, op, loc, scale, **kwargs): super().__init__(dist.Logistic, op, loc, scale, **kwargs) self.loc = loc self.scale = scale @@ -485,7 +485,7 @@ def Normal(loc=0.0, scale=1.0, **kwargs) -> dist.Normal: @defdata.register(dist.Normal) class NormalTerm(_DistributionTerm): - def __init__(self, op, loc, scale, **kwargs): + def __init__(self, ty, op, loc, scale, **kwargs): super().__init__(dist.Normal, op, loc, scale, **kwargs) self.loc = loc self.scale = scale @@ -503,7 +503,7 @@ def StudentT(df, loc=0.0, scale=1.0, **kwargs) -> dist.StudentT: @defdata.register(dist.StudentT) class StudentTTerm(_DistributionTerm): - def __init__(self, op, df, loc, scale, **kwargs): + def __init__(self, ty, op, df, loc, scale, **kwargs): super().__init__(dist.StudentT, op, df, loc, scale, **kwargs) self.df = df self.loc = loc @@ -522,7 +522,7 @@ def BernoulliProbs(probs, **kwargs) -> dist.BernoulliProbs: @defdata.register(dist.BernoulliProbs) class BernoulliProbsTerm(_DistributionTerm): - def __init__(self, op, probs, **kwargs): + def __init__(self, ty, op, probs, **kwargs): super().__init__(dist.BernoulliProbs, op, probs, **kwargs) self.probs = probs @@ -539,7 +539,7 @@ def CategoricalProbs(probs, **kwargs) -> dist.CategoricalProbs: @defdata.register(dist.CategoricalProbs) class CategoricalProbsTerm(_DistributionTerm): - def __init__(self, op, probs, **kwargs): + def __init__(self, ty, op, probs, **kwargs): super().__init__(dist.CategoricalProbs, op, probs, **kwargs) self.probs = probs @@ -556,7 +556,7 @@ def GeometricProbs(probs, **kwargs) -> dist.GeometricProbs: @defdata.register(dist.GeometricProbs) class GeometricProbsTerm(_DistributionTerm): - def __init__(self, op, probs, **kwargs): + def __init__(self, ty, op, probs, **kwargs): super().__init__(dist.GeometricProbs, op, probs, **kwargs) self.probs = probs @@ -573,7 +573,7 @@ def BernoulliLogits(logits, **kwargs) -> dist.BernoulliLogits: @defdata.register(dist.BernoulliLogits) class BernoulliLogitsTerm(_DistributionTerm): - def __init__(self, op, logits, **kwargs): + def __init__(self, ty, op, logits, **kwargs): super().__init__(dist.BernoulliLogits, op, logits, **kwargs) self.logits = logits @@ -590,7 +590,7 @@ def CategoricalLogits(logits, **kwargs) -> dist.CategoricalLogits: @defdata.register(dist.CategoricalLogits) class CategoricalLogitsTerm(_DistributionTerm): - def __init__(self, op, logits, **kwargs): + def __init__(self, ty, op, logits, **kwargs): super().__init__(dist.CategoricalLogits, op, logits, **kwargs) self.logits = logits @@ -607,7 +607,7 @@ def GeometricLogits(logits, **kwargs) -> dist.GeometricLogits: @defdata.register(dist.GeometricLogits) class GeometricLogitsTerm(_DistributionTerm): - def __init__(self, op, logits, **kwargs): + def __init__(self, ty, op, logits, **kwargs): super().__init__(dist.GeometricLogits, op, logits, **kwargs) self.logits = logits @@ -624,7 +624,7 @@ def Beta(concentration1, concentration0, **kwargs) -> dist.Beta: @defdata.register(dist.Beta) class BetaTerm(_DistributionTerm): - def __init__(self, op, concentration1, concentration0, **kwargs): + def __init__(self, ty, op, concentration1, concentration0, **kwargs): super().__init__(dist.Beta, op, concentration1, concentration0, **kwargs) self.concentration1 = concentration1 self.concentration0 = concentration0 @@ -642,7 +642,7 @@ def Kumaraswamy(concentration1, concentration0, **kwargs) -> dist.Kumaraswamy: @defdata.register(dist.Kumaraswamy) class KumaraswamyTerm(_DistributionTerm): - def __init__(self, op, concentration1, concentration0, **kwargs): + def __init__(self, ty, op, concentration1, concentration0, **kwargs): super().__init__(dist.Kumaraswamy, op, concentration1, concentration0, **kwargs) self.concentration1 = concentration1 self.concentration0 = concentration0 @@ -660,7 +660,7 @@ def BinomialProbs(probs, total_count=1, **kwargs) -> dist.BinomialProbs: @defdata.register(dist.BinomialProbs) class BinomialProbsTerm(_DistributionTerm): - def __init__(self, op, probs, total_count, **kwargs): + def __init__(self, ty, op, probs, total_count, **kwargs): super().__init__(dist.BinomialProbs, op, probs, total_count, **kwargs) self.probs = probs self.total_count = total_count @@ -678,7 +678,7 @@ def NegativeBinomialProbs(total_count, probs, **kwargs) -> dist.NegativeBinomial @defdata.register(dist.NegativeBinomialProbs) class NegativeBinomialProbsTerm(_DistributionTerm): - def __init__(self, op, total_count, probs, **kwargs): + def __init__(self, ty, op, total_count, probs, **kwargs): super().__init__(dist.NegativeBinomialProbs, op, total_count, probs, **kwargs) self.total_count = total_count self.probs = probs @@ -698,7 +698,7 @@ def MultinomialProbs(probs, total_count=1, **kwargs) -> dist.MultinomialProbs: @defdata.register(dist.MultinomialProbs) class MultinomialProbsTerm(_DistributionTerm): - def __init__(self, op, probs, total_count, **kwargs): + def __init__(self, ty, op, probs, total_count, **kwargs): super().__init__(dist.MultinomialProbs, op, probs, total_count, **kwargs) self.probs = probs self.total_count = total_count @@ -716,7 +716,7 @@ def BinomialLogits(logits, total_count=1, **kwargs) -> dist.BinomialLogits: @defdata.register(dist.BinomialLogits) class BinomialLogitsTerm(_DistributionTerm): - def __init__(self, op, logits, total_count, **kwargs): + def __init__(self, ty, op, logits, total_count, **kwargs): super().__init__(dist.BinomialLogits, op, logits, total_count, **kwargs) self.logits = logits self.total_count = total_count @@ -736,7 +736,7 @@ def NegativeBinomialLogits( @defdata.register(dist.NegativeBinomialLogits) class NegativeBinomialLogitsTerm(_DistributionTerm): - def __init__(self, op, total_count, logits, **kwargs): + def __init__(self, ty, op, total_count, logits, **kwargs): super().__init__(dist.NegativeBinomialLogits, op, total_count, logits, **kwargs) self.total_count = total_count self.logits = logits @@ -756,7 +756,7 @@ def MultinomialLogits(logits, total_count=1, **kwargs) -> dist.MultinomialLogits @defdata.register(dist.MultinomialLogits) class MultinomialLogitsTerm(_DistributionTerm): - def __init__(self, op, logits, total_count, **kwargs): + def __init__(self, ty, op, logits, total_count, **kwargs): super().__init__(dist.MultinomialLogits, op, logits, total_count, **kwargs) self.logits = logits self.total_count = total_count @@ -774,7 +774,7 @@ def Chi2(df, **kwargs) -> dist.Chi2: @defdata.register(dist.Chi2) class Chi2Term(_DistributionTerm): - def __init__(self, op, df, **kwargs): + def __init__(self, ty, op, df, **kwargs): super().__init__(dist.Chi2, op, df, **kwargs) self.df = df @@ -791,7 +791,7 @@ def Dirichlet(concentration, **kwargs) -> dist.Dirichlet: @defdata.register(dist.Dirichlet) class DirichletTerm(_DistributionTerm): - def __init__(self, op, concentration, **kwargs): + def __init__(self, ty, op, concentration, **kwargs): super().__init__(dist.Dirichlet, op, concentration, **kwargs) self.concentration = concentration @@ -810,7 +810,7 @@ def DirichletMultinomial( @defdata.register(dist.DirichletMultinomial) class DirichletMultinomialTerm(_DistributionTerm): - def __init__(self, op, concentration, total_count, **kwargs): + def __init__(self, ty, op, concentration, total_count, **kwargs): super().__init__( dist.DirichletMultinomial, op, concentration, total_count, **kwargs ) @@ -832,7 +832,7 @@ def Exponential(rate=1.0, **kwargs) -> dist.Exponential: @defdata.register(dist.Exponential) class ExponentialTerm(_DistributionTerm): - def __init__(self, op, rate, **kwargs): + def __init__(self, ty, op, rate, **kwargs): super().__init__(dist.Exponential, op, rate, **kwargs) self.rate = rate @@ -849,7 +849,7 @@ def Poisson(rate, **kwargs) -> dist.Poisson: @defdata.register(dist.Poisson) class PoissonTerm(_DistributionTerm): - def __init__(self, op, rate, **kwargs): + def __init__(self, ty, op, rate, **kwargs): super().__init__(dist.Poisson, op, rate, **kwargs) self.rate = rate @@ -866,7 +866,7 @@ def Gamma(concentration, rate=1.0, **kwargs) -> dist.Gamma: @defdata.register(dist.Gamma) class GammaTerm(_DistributionTerm): - def __init__(self, op, concentration, rate, **kwargs): + def __init__(self, ty, op, concentration, rate, **kwargs): super().__init__(dist.Gamma, op, concentration, rate, **kwargs) self.concentration = concentration self.rate = rate @@ -884,7 +884,7 @@ def HalfCauchy(scale=1.0, **kwargs) -> dist.HalfCauchy: @defdata.register(dist.HalfCauchy) class HalfCauchyTerm(_DistributionTerm): - def __init__(self, op, scale, **kwargs): + def __init__(self, ty, op, scale, **kwargs): super().__init__(dist.HalfCauchy, op, scale, **kwargs) self.scale = scale @@ -901,7 +901,7 @@ def HalfNormal(scale=1.0, **kwargs) -> dist.HalfNormal: @defdata.register(dist.HalfNormal) class HalfNormalTerm(_DistributionTerm): - def __init__(self, op, scale, **kwargs): + def __init__(self, ty, op, scale, **kwargs): super().__init__(dist.HalfNormal, op, scale, **kwargs) self.scale = scale @@ -918,7 +918,7 @@ def LKJCholesky(dim, concentration=1.0, **kwargs) -> dist.LKJCholesky: @defdata.register(dist.LKJCholesky) class LKJCholeskyTerm(_DistributionTerm): - def __init__(self, op, dim, concentration, **kwargs): + def __init__(self, ty, op, dim, concentration, **kwargs): super().__init__(dist.LKJCholesky, op, dim, concentration, **kwargs) self.dim = dim self.concentration = concentration @@ -939,7 +939,7 @@ def MultivariateNormal( @defdata.register(dist.MultivariateNormal) class MultivariateNormalTerm(_DistributionTerm): def __init__( - self, op, loc, covariance_matrix, precision_matrix, scale_tril, **kwargs + self, ty, op, loc, covariance_matrix, precision_matrix, scale_tril, **kwargs ): super().__init__( dist.MultivariateNormal, @@ -972,7 +972,7 @@ def Pareto(scale, alpha, **kwargs) -> dist.Pareto: @defdata.register(dist.Pareto) class ParetoTerm(_DistributionTerm): - def __init__(self, op, scale, alpha, **kwargs): + def __init__(self, ty, op, scale, alpha, **kwargs): super().__init__(dist.Pareto, op, scale, alpha, **kwargs) self.scale = scale self.alpha = alpha @@ -990,7 +990,7 @@ def Uniform(low=0.0, high=1.0, **kwargs) -> dist.Uniform: @defdata.register(dist.Uniform) class UniformTerm(_DistributionTerm): - def __init__(self, op, low, high, **kwargs): + def __init__(self, ty, op, low, high, **kwargs): super().__init__(dist.Uniform, op, low, high, **kwargs) self.low = low self.high = high @@ -1008,7 +1008,7 @@ def VonMises(loc, concentration, **kwargs) -> dist.VonMises: @defdata.register(dist.VonMises) class VonMisesTerm(_DistributionTerm): - def __init__(self, op, loc, concentration, **kwargs): + def __init__(self, ty, op, loc, concentration, **kwargs): super().__init__(dist.VonMises, op, loc, concentration, **kwargs) self.loc = loc self.concentration = concentration @@ -1026,7 +1026,7 @@ def Weibull(scale, concentration, **kwargs) -> dist.Weibull: @defdata.register(dist.Weibull) class WeibullTerm(_DistributionTerm): - def __init__(self, op, scale, concentration, **kwargs): + def __init__(self, ty, op, scale, concentration, **kwargs): super().__init__(dist.Weibull, op, scale, concentration, **kwargs) self.scale = scale self.concentration = concentration @@ -1044,7 +1044,7 @@ def Wishart(df, scale_tril, **kwargs) -> dist.Wishart: @defdata.register(dist.Wishart) class WishartTerm(_DistributionTerm): - def __init__(self, op, df, scale_tril, **kwargs): + def __init__(self, ty, op, df, scale_tril, **kwargs): super().__init__(dist.Wishart, op, df, scale_tril, **kwargs) self.df = df self.scale_tril = scale_tril @@ -1062,7 +1062,7 @@ def Delta(v=0.0, log_density=0.0, event_dim=0, **kwargs) -> dist.Delta: @defdata.register(dist.Delta) class DeltaTerm(_DistributionTerm): - def __init__(self, op, v, log_density, event_dim, **kwargs): + def __init__(self, ty, op, v, log_density, event_dim, **kwargs): super().__init__(dist.Delta, op, v, log_density, event_dim, **kwargs) self.v = v self.log_density = log_density @@ -1082,7 +1082,7 @@ def LowRankMultivariateNormal( @defdata.register(dist.LowRankMultivariateNormal) class LowRankMultivariateNormalTerm(_DistributionTerm): - def __init__(self, op, loc, cov_factor, cov_diag, **kwargs): + def __init__(self, ty, op, loc, cov_factor, cov_diag, **kwargs): super().__init__( dist.LowRankMultivariateNormal, op, loc, cov_factor, cov_diag, **kwargs ) @@ -1107,7 +1107,7 @@ def RelaxedBernoulliLogits( @defdata.register(dist.RelaxedBernoulliLogits) class RelaxedBernoulliLogitsTerm(_DistributionTerm): - def __init__(self, op, temperature, logits, **kwargs): + def __init__(self, ty, op, temperature, logits, **kwargs): super().__init__(dist.RelaxedBernoulliLogits, op, temperature, logits, **kwargs) self.temperature = temperature self.logits = logits @@ -1127,7 +1127,7 @@ def Independent(base_dist, reinterpreted_batch_ndims, **kwargs) -> dist.Independ @defdata.register(dist.Independent) class IndependentTerm(_DistributionTerm): - def __init__(self, op, base_dist, reinterpreted_batch_ndims, **kwargs): + def __init__(self, ty, op, base_dist, reinterpreted_batch_ndims, **kwargs): super().__init__( dist.Independent, op, base_dist, reinterpreted_batch_ndims, **kwargs ) diff --git a/effectful/handlers/pyro.py b/effectful/handlers/pyro.py index 13399d1d..2e792421 100644 --- a/effectful/handlers/pyro.py +++ b/effectful/handlers/pyro.py @@ -449,7 +449,9 @@ class _DistributionTerm(Term[TorchDistribution], TorchDistribution): _args: tuple _kwargs: dict - def __init__(self, op: Operation[Any, TorchDistribution], *args, **kwargs): + def __init__( + self, ty: type, op: Operation[Any, TorchDistribution], *args, **kwargs + ): self._op = op self._args = args self._kwargs = kwargs diff --git a/effectful/handlers/torch.py b/effectful/handlers/torch.py index 57f91b9f..24dcbe84 100644 --- a/effectful/handlers/torch.py +++ b/effectful/handlers/torch.py @@ -323,7 +323,7 @@ def torch_getitem(x: torch.Tensor, key: tuple[IndexElement, ...]) -> torch.Tenso @defdata.register(torch.Tensor) -def _embed_tensor(op, *args, **kwargs): +def _embed_tensor(ty, op, *args, **kwargs): if ( op is torch_getitem and not isinstance(args[0], Term) diff --git a/effectful/ops/semantics.py b/effectful/ops/semantics.py index 879e56ad..57d8de64 100644 --- a/effectful/ops/semantics.py +++ b/effectful/ops/semantics.py @@ -15,37 +15,7 @@ Term, ) - -@defop -def apply[**P, T](op: Operation[P, T], *args: P.args, **kwargs: P.kwargs) -> T: - """Apply ``op`` to ``args``, ``kwargs`` in interpretation ``intp``. - - Handling :func:`apply` changes the evaluation strategy of terms. - - **Example usage**: - - >>> @defop - ... def add(x: int, y: int) -> int: - ... return x + y - >>> @defop - ... def mul(x: int, y: int) -> int: - ... return x * y - - ``add`` and ``mul`` have default rules, so this term evaluates: - - >>> mul(add(1, 2), 3) - 9 - - By installing an :func:`apply` handler, we capture the term instead: - - >>> from effectful.ops.syntax import defdata - >>> with handler({apply: defdata}): - ... term = mul(add(1, 2), 3) - >>> print(str(term)) - mul(add(1, 2), 3) - - """ - return op.__default_rule__(*args, **kwargs) # type: ignore +apply = Operation.__apply__ @defop @@ -231,6 +201,13 @@ def evaluate[T]( if intp is not None: return interpreter(intp)(evaluate)(expr) + return __dispatch(type(expr))(expr) + + +@evaluate.register(object) +@evaluate.register(str) +@evaluate.register(bytes) +def _evaluate_object[T](expr: T, **kwargs) -> T: if dataclasses.is_dataclass(expr) and not isinstance(expr, type): return typing.cast( T, @@ -242,14 +219,6 @@ def evaluate[T]( }, ), ) - - return __dispatch(type(expr))(expr) - - -@evaluate.register(object) -@evaluate.register(str) -@evaluate.register(bytes) -def _evaluate_object[T](expr: T, **kwargs) -> T: return expr diff --git a/effectful/ops/syntax.py b/effectful/ops/syntax.py index 2e43d6c7..eee5c4a5 100644 --- a/effectful/ops/syntax.py +++ b/effectful/ops/syntax.py @@ -471,6 +471,7 @@ def defdata[T]( class _CallableTerm[**P, T](Term[collections.abc.Callable[P, T]]): def __init__( self, + ty: type, op: Operation[..., T], *args: Expr, **kwargs: Expr, @@ -526,7 +527,7 @@ def apply_cast(op, *args, **kwargs): assert isinstance(op, Operation) full_type = typ() dispatch_type = _simple_type(full_type.value) - return __dispatch(dispatch_type)(op, *args, **kwargs) + return __dispatch(dispatch_type)(dispatch_type, op, *args, **kwargs) analysis = productN({typ: {apply: apply_type}, cast: {apply: apply_cast}}) @@ -563,7 +564,35 @@ def evaluate_with_renaming(expr, ctx): return _unpack(result, cast) +def _construct_dataclass_term[T]( + cls: type[T], op: Operation[..., T], *args: Expr, **kwargs: Expr +) -> Term[T]: + """ + Constructs a term wrapping an operation that returns a dataclass. + """ + assert cls not in defdata._registry.registry.keys(), ( + "Use defdata(op, *args, **kwargs) to construct terms of this type." + ) + name = cls.__name__ + term_name = f"_{name}Term" + bases = (Term, cls) + term_cls = _DataclassTermMeta(term_name, bases, {}) + + defdata.register(cls)(term_cls) + return term_cls(cls, op, *args, **kwargs) + + @defdata.register(object) +def __dispatch_defdata_object[T]( + ty: type[T], op: Operation[..., T], *args: Expr, **kwargs: Expr +): + ty = typing.get_origin(ty) or ty + if dataclasses.is_dataclass(ty): + return _construct_dataclass_term(ty, op, *args, **kwargs) + else: + return _BaseTerm(op, *args, **kwargs) + + class _BaseTerm[T](Term[T]): _op: Operation[..., T] _args: collections.abc.Sequence[Expr] @@ -597,8 +626,59 @@ def kwargs(self): return self._kwargs +class _DataclassTermMeta(type(_BaseTerm)): # type: ignore + def __new__(mcls, name, bases, ns): + assert len(bases) == 2, ( + "_DataclassTermMeta subclasses must inherit from two classes exactly" + ) + assert bases[0] == Term, ( + "expected _DataclassTermMeta subclass to inherit from Term" + ) + assert dataclasses.is_dataclass(bases[1]), ( + "_DataclassTermMeta must inherit from a dataclass" + ) + + base_dt = bases[1] + + for f in dataclasses.fields(base_dt): + attr = f.name + field_type = f.type + + def make_getter(a, return_type: type): + def getter(self) -> return_type: # type: ignore + if isinstance(self, Term): + raise NotHandled + return self.__dict__[a] + + return getter + + g = make_getter(attr, field_type) + g.__name__ = attr + ns[attr] = property(defop(g, name=f"{name}.{attr}")) + + def __init__(self, ty, op, *args, **kwargs): + self._op = op + self._args = args + self._kwargs = kwargs + + ns["__init__"] = __init__ + + field_names = {f.name for f in dataclasses.fields(base_dt)} + for op in ["op", "args", "kwargs"]: + assert op not in field_names, f"Dataclass can not contain field {op}" + + ns["op"] = property(lambda self: self._op) + ns["args"] = property(lambda self: self._args) + ns["kwargs"] = property(lambda self: self._kwargs) + + return super().__new__(mcls, name, bases, ns) + + @defdata.register(collections.abc.Callable) class _CallableTerm[**P, T](_BaseTerm[collections.abc.Callable[P, T]]): + def __init__(self, ty, op, *args, **kwargs): + super().__init__(op, *args, **kwargs) + @defop def __call__( self: collections.abc.Callable[P, T], *args: P.args, **kwargs: P.kwargs @@ -687,6 +767,9 @@ def defstream[S, T, A, B]( @defdata.register(collections.abc.Iterable) class _IterableTerm[T](_BaseTerm[collections.abc.Iterable[T]]): + def __init__(self, ty, op, *args, **kwargs): + super().__init__(op, *args, **kwargs) + @defop def __iter__(self: collections.abc.Iterable[T]) -> collections.abc.Iterator[T]: if not isinstance(self, Term): @@ -900,6 +983,9 @@ def implements[**P, V](op: Operation[P, V]): @defdata.register(numbers.Number) @functools.total_ordering class _NumberTerm[T: numbers.Number](_BaseTerm[T], numbers.Number): + def __init__(self, ty, op, *args, **kwargs): + super().__init__(op, *args, **kwargs) + def __hash__(self): return id(self) diff --git a/effectful/ops/types.py b/effectful/ops/types.py index 1599bb6a..9a670f1e 100644 --- a/effectful/ops/types.py +++ b/effectful/ops/types.py @@ -76,6 +76,7 @@ class Operation[**Q, V]: __signature__: inspect.Signature __name__: str __default__: Callable[Q, V] + __apply__: typing.ClassVar["Operation"] def __init__( self, signature: inspect.Signature, name: str, default: Callable[Q, V] @@ -422,60 +423,124 @@ def __fvs_rule__(self, *args: Q.args, **kwargs: Q.kwargs) -> inspect.BoundArgume return result_sig + def __repr__(self): + return f"{self.__class__.__name__}({self.__name__}, {self.__signature__})" + + def __str__(self): + return self.__name__ + + def __set_name__[T](self, owner: type[T], name: str) -> None: + if not issubclass(owner, Term): + assert not hasattr(self, "_name_on_instance"), "should only be called once" + self._name_on_instance: str = f"__instanceop_{name}" + + def __get__[T](self, instance: T | None, owner: type[T] | None = None): + if hasattr(instance, "__dict__") and hasattr(self, "_name_on_instance"): + from effectful.ops.semantics import fvsof + + if self._name_on_instance in instance.__dict__: + return instance.__dict__[self._name_on_instance] + elif isinstance(instance, Term) or fvsof(instance): + return types.MethodType(self, instance) + else: + + @functools.wraps(self) + def _instance_op(instance, *args, **kwargs): + from effectful.ops.syntax import defdata + + default_result = self(instance, *args, **kwargs) + if ( + isinstance(default_result, Term) + and default_result.op is self + and isinstance(self.__get__(default_result.args[0]), Operation) + ): + # Given a term cls_op(instance, *args, **kwargs), + # such that instance_op = cls_op.__get__(instance), + # rewrite to a new term instance_op(*args, **kwargs) + # so that the instance-specific operation reappears + # in the final term and is therefore visible to evaluate() + return defdata( + self.__get__(default_result.args[0]), + *default_result.args[1:], + **default_result.kwargs, + ) + else: + return default_result + + instance_op = self.define(types.MethodType(_instance_op, instance)) + instance.__dict__[self._name_on_instance] = instance_op + return instance_op + elif instance is not None: + return types.MethodType(self, instance) + else: + return self + def __call__(self, *args: Q.args, **kwargs: Q.kwargs) -> V: from effectful.internals.runtime import get_interpretation - from effectful.ops.semantics import apply intp = get_interpretation() self_handler = intp.get(self) if self_handler is not None: return self_handler(*args, **kwargs) + elif args and isinstance(args[0], Operation) and self is args[0].__apply__: + # Prevent infinite recursion when calling self.apply directly + return self.__default__(*args, **kwargs) + else: + return self.__apply__(self, *args, **kwargs) - class_apply_handler = intp.get(type(self).apply) - if class_apply_handler is not None: - return class_apply_handler(self, *args, **kwargs) + def __init_subclass__(cls, **kwargs) -> None: + assert "__apply__" not in cls.__dict__ or cls is Operation, ( + "Cannot manually override apply" + ) + assert isinstance(cls.__apply__, Operation) + + cls.__apply__ = cls.__apply__.define( + staticmethod( + functools.wraps(cls.__apply__)( + functools.partial( + lambda app, op, *args, **kwargs: app(op, *args, **kwargs), + cls.__apply__, + ) + ) + ) + ) - global_apply_handler = intp.get(apply) - if global_apply_handler is not None: - return global_apply_handler(self, *args, **kwargs) - # Use type(self) instead of self because we do not want a bound method - class_apply = type(self).apply +def __apply__[**A, B](op: Operation[A, B], *args: A.args, **kwargs: A.kwargs) -> B: + """Apply ``op`` to ``args``, ``kwargs`` in interpretation ``intp``. - # In Operation, cls.apply is a classmethod. In subclasses, it is an operation. - if isinstance(class_apply, Operation): - return class_apply.__default_rule__(self, *args, **kwargs) # type: ignore[return-value] - return class_apply(self, *args, **kwargs) # type: ignore[return-value] + Handling :func:`Operation.__apply__` changes the evaluation strategy of terms. - def __repr__(self): - return f"{self.__class__.__name__}({self.__name__}, {self.__signature__})" + **Example usage**: - def __str__(self): - return self.__name__ + >>> @Operation.define + ... def add(x: int, y: int) -> int: + ... return x + y + >>> @Operation.define + ... def mul(x: int, y: int) -> int: + ... return x * y - def __get__(self, instance, owner): - if instance is not None: - # This is an instance-level operation, so we need to bind the instance - return types.MethodType(self, instance) - else: - # This is a static operation, so we return the operation itself - return self + ``add`` and ``mul`` have default rules, so this term evaluates: - @classmethod - def apply[**A, B]( - cls, op: "Operation[A, B]", *args: A.args, **kwargs: A.kwargs - ) -> "Expr[B]": - """Apply an operation to arguments. + >>> mul(add(1, 2), 3) + 9 - In subclasses of Operation, `apply` is an operation that may be handled. + By installing an :func:`Operation.__apply__` handler, we capture the term instead: + + >>> from effectful.ops.syntax import defdata + >>> from effectful.ops.semantics import handler + >>> with handler({Operation.__apply__: defdata}): + ... term = mul(add(1, 2), 3) + >>> print(str(term)) + mul(add(1, 2), 3) + + """ + return op.__default_rule__(*args, **kwargs) # type: ignore[return-value] - """ - return op.__default_rule__(*args, **kwargs) - def __init_subclass__(cls, **kwargs): - super().__init_subclass__(**kwargs) - cls.apply = cls.define(cls.apply, name=f"{cls.__name__}_apply") +Operation.__apply__ = Operation.define(staticmethod(__apply__)) +del __apply__ if typing.TYPE_CHECKING: diff --git a/tests/test_ops_syntax.py b/tests/test_ops_syntax.py index b2261319..fb52a392 100644 --- a/tests/test_ops_syntax.py +++ b/tests/test_ops_syntax.py @@ -217,6 +217,20 @@ def test_term_str(): assert str(deffn(x1() + x2(), x1)) == "deffn(__add__(x(), x!1()), x)" +def test_deffn_keyword_args(): + x, y = defop(int, name="x"), defop(int, name="y") + term = deffn(2 * x() + y(), x, y=y) + + assert isinstance(term, Term) + assert term.op is deffn + + result = term(3, y=4) + assert result == 10 + + result2 = term(5) + assert isinstance(result2, Term) + + def test_defdata_renaming(): @defop def Let[S, T, A, B]( @@ -306,11 +320,7 @@ def my_method(self, x: int) -> int: assert isinstance(term, Term) assert isinstance(term.op, Operation) - assert term.op.__name__ == "my_method" - assert term.args == ( - instance, - 5, - ) + assert term.args == (5,) assert term.kwargs == {} # Ensure the operation is unique @@ -363,7 +373,8 @@ def test_defop_setattr_class() -> None: class MyClass: my_op: ClassVar[Operation] - @defop + @defop # type: ignore + @staticmethod def my_op(x: int) -> int: raise NotHandled @@ -372,11 +383,10 @@ def my_op(x: int) -> int: tm = MyClass.my_op(5) assert isinstance(tm, Term) assert isinstance(tm.op, Operation) - assert tm.op is my_op + assert tm.op is MyClass.my_op assert tm.args == (5,) - with pytest.raises(TypeError): - MyClass().my_op(5) + MyClass().my_op(5) def test_defop_classmethod(): @@ -480,10 +490,7 @@ def _(self, x: bool) -> bool: assert isinstance(term_float, Term) assert term_float.op.__name__ == "my_singledispatch" - assert term_float.args == ( - instance, - 1.5, - ) + assert term_float.args == (1.5,) assert term_float.kwargs == {} # Test that the method can be called with a handler @@ -909,9 +916,9 @@ class TestOperation(Operation): class OtherOperation(Operation): pass - assert isinstance(TestOperation.apply, Operation) - assert isinstance(OtherOperation.apply, Operation) - assert TestOperation.apply != OtherOperation.apply + assert isinstance(TestOperation.__apply__, Operation) + assert isinstance(OtherOperation.__apply__, Operation) + assert TestOperation.__apply__ != OtherOperation.__apply__ @TestOperation.define def my_func(a, b): @@ -938,10 +945,10 @@ def _other_operation_apply(op, a, b): assert my_func(3, 4) == "" # Handling the class apply works - with handler({TestOperation.apply: _test_operation_apply}): + with handler({TestOperation.__apply__: _test_operation_apply}): assert my_func(3, 4) == "" - with handler({OtherOperation.apply: _other_operation_apply}): + with handler({OtherOperation.__apply__: _other_operation_apply}): assert my_func(3, 4) == "" # Handling global apply works @@ -949,11 +956,11 @@ def _other_operation_apply(op, a, b): assert my_func(3, 4) == "" # Handling the operation takes precedence over the class apply - with handler({TestOperation.apply: _test_operation_apply, my_func: _my_func}): + with handler({TestOperation.__apply__: _test_operation_apply, my_func: _my_func}): assert my_func(3, 4) == "" # Handling the class apply takes precedence over the global apply - with handler({apply: _apply, TestOperation.apply: _test_operation_apply}): + with handler({apply: _apply, TestOperation.__apply__: _test_operation_apply}): assert my_func(3, 4) == "" # Handling the operation takes precedence over the global apply @@ -962,6 +969,153 @@ def _other_operation_apply(op, a, b): # Handling the operation takes precedence over the class apply and the global apply with handler( - {apply: _apply, my_func: _my_func, TestOperation.apply: _test_operation_apply} + { + apply: _apply, + my_func: _my_func, + TestOperation.__apply__: _test_operation_apply, + } ): assert my_func(3, 4) == "" + + +def test_operation_subclass_inheritance(): + class BaseOperation(Operation): + pass + + class SubOperation(BaseOperation): + pass + + @BaseOperation.define + def base_op(x): + return f"base_op: {x}" + + @SubOperation.define + def sub_op(x): + return f"sub_op: {x}" + + assert base_op(1) == "base_op: 1" + + with handler({base_op: lambda x: f"handled base_op: {x}"}): + assert base_op(2) == "handled base_op: 2" + + with handler( + { + SubOperation.__apply__: lambda op, + x, + **kwargs: f"handled SubOperation: {op} {x}" + } + ): + assert sub_op(3) == f"handled SubOperation: {sub_op} 3" + assert base_op(4) == "base_op: 4" + + with handler( + { + BaseOperation.__apply__: lambda op, + x, + **kwargs: f"handled BaseOperation: {op} {x}" + } + ): + assert sub_op(4) == f"handled BaseOperation: {sub_op} 4" + assert base_op(5) == f"handled BaseOperation: {base_op} 5" + + with handler( + { + SubOperation.__apply__: lambda op, + x, + **kwargs: f"handled SubOperation: {op} {x}", + BaseOperation.__apply__: lambda op, + x, + **kwargs: f"handled BaseOperation: {op} {x}", + } + ): + assert sub_op(6) == f"handled SubOperation: {sub_op} 6" + assert base_op(7) == f"handled BaseOperation: {base_op} 7" + + +def test_operation_instances(): + """Test that defop on methods creates instance-level Operations. + + When defop is used on a method, accessing it on an instance should + dynamically create a new instance-level Operation that is bound to + that instance. The default behavior of an unhandled instance-level + Operation should be to call the class-level Operation. + """ + + class Foo[T]: + @defop + def bar(self, x: T) -> T: + raise NotHandled + + foo1, foo2 = Foo(), Foo() + + # All of Foo.bar, foo1.bar, foo2.bar should be Operations + assert isinstance(Foo.bar, Operation) + assert isinstance(foo1.bar, Operation) + assert isinstance(foo2.bar, Operation) + + # Instance-level operations are created once per instance (cached) + assert foo1.bar is foo1.bar + assert foo2.bar is foo2.bar + + # Class-level and instance-level operations are distinct + assert Foo.bar is not foo1.bar + assert Foo.bar is not foo2.bar + assert foo1.bar is not foo2.bar + + # Default behavior: unhandled instance-level operation calls class-level operation + def Foo_bar_impl(self, x): + return f"Foo.bar({self}, {x})" + + def foo1_bar_impl(x): + return f"foo1.bar({x})" + + with handler({Foo.bar: Foo_bar_impl}): + # foo1.bar is handled separately, does not call Foo.bar + with handler({foo1.bar: foo1_bar_impl}): + assert foo1.bar(42) == "foo1.bar(42)" + # foo2.bar is unhandled, so it should call Foo.bar + assert foo2.bar(42) == f"Foo.bar({foo2}, 42)" + + # Without the inner handler, foo1.bar should also call Foo.bar + assert foo1.bar(42) == f"Foo.bar({foo1}, 42)" + + +def test_operation_dataclass(): + @dataclasses.dataclass + class Point: + x: int + y: int + + @defop + def random_point() -> Point: + raise NotHandled + + @defop + def id[T](base: T) -> T: + raise NotHandled + + def client(): + p1 = random_point() + p2 = random_point() + return p1.x + p2.x + + p = random_point() + assert isinstance(p, Term) + assert isinstance(p, Point) + + t = client() + assert isinstance(t, Term) + + assert isinstance(id(Point(0, 0)).x, Term) + + +def test_operation_dataclass_generic(): + @dataclasses.dataclass + class A: + x: int + + @defop + def id[T](base: T) -> T: + raise NotHandled + + assert isinstance(id(A(0)).x, Term) From 44d7d1208583487e810838fae7155f36bf0c5e50 Mon Sep 17 00:00:00 2001 From: Kiran Gopinathan <23038502+kiranandcode@users.noreply.github.com> Date: Mon, 15 Dec 2025 11:08:33 -0500 Subject: [PATCH 22/39] Implements a unified `encode`ing/`decode`ing pipeline for `llm` (#442) * implemented unified encoding type * implemented decoding * unified __init__ * added tests for basemodels * s/@property/@functools.cached_property/ * type for encode and decode * removed handling for numbers.Number and explicit tests for complex * fixed is_dataclass checks * updated to check parameter annotations in Tool.of_operation constructor * updated serializer to be more selective in what is an image * reducing number of #type: ignores, and switching to typing.Any * removed comment * dropped dataclass support * dropped tests for dataclass with image * updated dataclass tests to stop assuming pydantic models * test for tool that returns list of images * made serialization a parameter of encodable and thus type-directed * dropped test for tool that returns list of images * dropped registration of encodable types * dropped unused typevar * s/_Encodable/EncodableAs/ --- effectful/handlers/llm/encoding.py | 239 ++++++++++ effectful/handlers/llm/providers.py | 218 ++++----- tests/test_handlers_llm_encoding.py | 709 ++++++++++++++++++++++++++++ tests/test_handlers_llm_provider.py | 133 +++++- 4 files changed, 1190 insertions(+), 109 deletions(-) create mode 100644 effectful/handlers/llm/encoding.py create mode 100644 tests/test_handlers_llm_encoding.py diff --git a/effectful/handlers/llm/encoding.py b/effectful/handlers/llm/encoding.py new file mode 100644 index 00000000..fe8b7e21 --- /dev/null +++ b/effectful/handlers/llm/encoding.py @@ -0,0 +1,239 @@ +import base64 +import io +import typing +from abc import ABC, abstractmethod +from collections.abc import Callable + +import pydantic +from litellm import ( + ChatCompletionImageUrlObject, + OpenAIMessageContentListBlock, +) +from PIL import Image + +from effectful.ops.syntax import _CustomSingleDispatchCallable + + +def _pil_image_to_base64_data(pil_image: Image.Image) -> str: + buf = io.BytesIO() + pil_image.save(buf, format="PNG") + return base64.b64encode(buf.getvalue()).decode("utf-8") + + +def _pil_image_to_base64_data_uri(pil_image: Image.Image) -> str: + return f"data:image/png;base64,{_pil_image_to_base64_data(pil_image)}" + + +class EncodableAs[T, U](ABC): + t: type[U] + + def __init__(self, *args, **kwargs): + pass + + @classmethod + @abstractmethod + def encode(cls, vl: T) -> U: + pass + + @classmethod + @abstractmethod + def decode(cls, vl: U) -> T: + pass + + @classmethod + def serialize(cls, value: U) -> list[OpenAIMessageContentListBlock]: + return [{"type": "text", "text": str(value)}] + + +class Encodable[T](EncodableAs[T, type]): + t = type + + +@_CustomSingleDispatchCallable +def type_to_encodable_type[T]( + __dispatch: Callable[[type[T]], Callable[..., Encodable[T]]], ty: type[T] +) -> Encodable[T]: + origin_ty = typing.get_origin(ty) or ty + return __dispatch(origin_ty)(ty) + + +@type_to_encodable_type.register(object) +def _type_encodable_type_base[T](ty: type[T]) -> Encodable[T]: + class BaseEncodable(EncodableAs[T, T]): + t: type[T] = ty + + @classmethod + def encode(cls, vl: T) -> T: + return vl + + @classmethod + def decode(cls, vl: T) -> T: + return vl + + return typing.cast(Encodable[T], BaseEncodable()) + + +@type_to_encodable_type.register(pydantic.BaseModel) +def _type_encodable_type_pydantic_base_model[T: pydantic.BaseModel]( + ty: type[T], +) -> Encodable[T]: + class EncodablePydanticBaseModel(EncodableAs[T, T]): + t: type[T] = ty + + @classmethod + def decode(cls, vl: T) -> T: + return vl + + @classmethod + def encode(cls, vl: T) -> T: + return vl + + @classmethod + def serialize(cls, vl: T) -> list[OpenAIMessageContentListBlock]: + return [{"type": "text", "text": vl.model_dump_json()}] + + return typing.cast(Encodable[T], EncodablePydanticBaseModel()) + + +@type_to_encodable_type.register(Image.Image) +class EncodableImage(EncodableAs[Image.Image, ChatCompletionImageUrlObject]): + t = ChatCompletionImageUrlObject + + @classmethod + def encode(cls, image: Image.Image) -> ChatCompletionImageUrlObject: + return { + "detail": "auto", + "url": _pil_image_to_base64_data_uri(image), + } + + @classmethod + def decode(cls, image: ChatCompletionImageUrlObject) -> Image.Image: + image_url = image["url"] + if not image_url.startswith("data:image/"): + raise RuntimeError( + f"expected base64 encoded image as data uri, received {image_url}" + ) + data = image_url.split(",")[1] + return Image.open(fp=io.BytesIO(base64.b64decode(data))) + + @classmethod + def serialize( + cls, value: ChatCompletionImageUrlObject + ) -> list[OpenAIMessageContentListBlock]: + return [{"type": "image_url", "image_url": value}] + + +@type_to_encodable_type.register(tuple) +def _type_encodable_type_tuple[T](ty: type[T]) -> Encodable[T]: + args = typing.get_args(ty) + + # Handle empty tuple, or tuple with no args + if not args or args == ((),): + return _type_encodable_type_base(ty) + + # Create encoders for each element type + element_encoders = [type_to_encodable_type(arg) for arg in args] + + # Check if any element type is Image.Image + has_image = any(arg is Image.Image for arg in args) + + encoded_ty: type[typing.Any] = typing.cast( + type[typing.Any], + tuple[*(enc.t for enc in element_encoders)], # type: ignore + ) + + class TupleEncodable(EncodableAs[T, typing.Any]): + t: type[typing.Any] = encoded_ty + + @classmethod + def encode(cls, t: T) -> typing.Any: + if not isinstance(t, tuple): + raise TypeError(f"Expected tuple, got {type(t)}") + if len(t) != len(element_encoders): + raise ValueError( + f"Tuple length {len(t)} does not match expected length {len(element_encoders)}" + ) + return tuple([enc.encode(elem) for enc, elem in zip(element_encoders, t)]) + + @classmethod + def decode(cls, t: typing.Any) -> T: + if len(t) != len(element_encoders): + raise ValueError( + f"tuple length {len(t)} does not match expected length {len(element_encoders)}" + ) + decoded_elements: list[typing.Any] = [ + enc.decode(elem) for enc, elem in zip(element_encoders, t) + ] + return typing.cast(T, tuple(decoded_elements)) + + @classmethod + def serialize(cls, value: typing.Any) -> list[OpenAIMessageContentListBlock]: + if has_image: + # If tuple contains images, serialize each element and flatten the results + result: list[OpenAIMessageContentListBlock] = [] + if not isinstance(value, tuple): + raise TypeError(f"Expected tuple, got {type(value)}") + if len(value) != len(element_encoders): + raise ValueError( + f"Tuple length {len(value)} does not match expected length {len(element_encoders)}" + ) + for enc, elem in zip(element_encoders, value): + result.extend(enc.serialize(elem)) + return result + else: + return super().serialize(value) + + return typing.cast(Encodable[T], TupleEncodable()) + + +@type_to_encodable_type.register(list) +def _type_encodable_type_list[T](ty: type[T]) -> Encodable[T]: + args = typing.get_args(ty) + + # Handle unparameterized list (list without type args) + if not args: + return _type_encodable_type_base(ty) + + # Get the element type (first type argument) + element_ty = args[0] + element_encoder = type_to_encodable_type(element_ty) + + # Check if element type is Image.Image + has_image = element_ty is Image.Image + + # Build the encoded type (list of encoded element type) - runtime-created, use Any + encoded_ty: type[typing.Any] = typing.cast( + type[typing.Any], + list[element_encoder.t], # type: ignore + ) + + class ListEncodable(EncodableAs[T, typing.Any]): + t: type[typing.Any] = encoded_ty + + @classmethod + def encode(cls, t: T) -> typing.Any: + if not isinstance(t, list): + raise TypeError(f"Expected list, got {type(t)}") + return [element_encoder.encode(elem) for elem in t] + + @classmethod + def decode(cls, t: typing.Any) -> T: + decoded_elements: list[typing.Any] = [ + element_encoder.decode(elem) for elem in t + ] + return typing.cast(T, decoded_elements) + + @classmethod + def serialize(cls, value: typing.Any) -> list[OpenAIMessageContentListBlock]: + if has_image: + # If list contains images, serialize each element and flatten the results + result: list[OpenAIMessageContentListBlock] = [] + if not isinstance(value, list): + raise TypeError(f"Expected list, got {type(value)}") + for elem in value: + result.extend(element_encoder.serialize(elem)) + return result + else: + return super().serialize(value) + + return typing.cast(Encodable[T], ListEncodable()) diff --git a/effectful/handlers/llm/providers.py b/effectful/handlers/llm/providers.py index d0e9c1aa..27097f2d 100644 --- a/effectful/handlers/llm/providers.py +++ b/effectful/handlers/llm/providers.py @@ -7,19 +7,20 @@ import string import traceback import typing -from collections.abc import Callable, Hashable, Iterable, Mapping, Sequence +from collections.abc import Callable, Hashable, Iterable, Mapping from typing import Any, get_type_hints import litellm import pydantic +from effectful.handlers.llm.encoding import type_to_encodable_type + try: from PIL import Image except ImportError: raise ImportError("'pillow' is required to use effectful.handlers.providers") from litellm import ( - ChatCompletionImageObject, Choices, Message, OpenAIChatCompletionToolParam, @@ -44,84 +45,89 @@ def _pil_image_to_base64_data_uri(pil_image: Image.Image) -> str: return f"data:image/png;base64,{_pil_image_to_base64_data(pil_image)}" -def _pil_image_to_openai_image_param( - pil_image: Image.Image, -) -> ChatCompletionImageObject: - return { - "type": "image_url", - "image_url": { - "detail": "auto", - "url": _pil_image_to_base64_data_uri(pil_image), - }, - } - - -@defop -@functools.singledispatch -def format_value(value: Any) -> OpenAIMessageContent: - """Convert a Python value to internal message part representation. - - This function can be extended by registering handlers for - different types using @format_value.register. - - Returns a OpenAIMessageContent - either a string or a list of OpenAIMessageContentListBlock. - """ - return [{"type": "text", "text": str(value)}] - - -@format_value.register(Image.Image) # type: ignore -def _(value: Image.Image) -> OpenAIMessageContent: - return [_pil_image_to_openai_image_param(value)] - - -@format_value.register(str) # type: ignore -def _(value: str) -> OpenAIMessageContent: - return [{"type": "text", "text": value}] - - -@format_value.register(bytes) # type: ignore -def _(value: bytes) -> OpenAIMessageContent: - return [{"type": "text", "text": str(value)}] - - -@format_value.register(Sequence) # type: ignore -def _(values: Sequence) -> OpenAIMessageContent: - if all(isinstance(value, Image.Image) for value in values): - return [_pil_image_to_openai_image_param(value) for value in values] - else: - return [{"type": "text", "text": str(values)}] - - @dataclasses.dataclass class Tool[**P, T]: - parameter_model: type[pydantic.BaseModel] operation: Operation[P, T] name: str + parameter_annotations: dict[str, type] def serialise_return_value(self, value) -> OpenAIMessageContent: """Serializes a value returned by the function into a json format suitable for the OpenAI API.""" sig = inspect.signature(self.operation) - ret_ty = sig.return_annotation - ret_ty_origin = typing.get_origin(ret_ty) or ret_ty + encoded_ty = type_to_encodable_type(sig.return_annotation) + encoded_value = encoded_ty.encode(value) + return encoded_ty.serialize(encoded_value) + + @functools.cached_property + def parameter_model(self) -> type[pydantic.BaseModel]: + fields = { + param_name: type_to_encodable_type(param_type).t + for param_name, param_type in self.parameter_annotations.items() + } + parameter_model = pydantic.create_model( + "Params", + __config__={"extra": "forbid"}, + **fields, # type: ignore + ) + return parameter_model - return format_value.dispatch(ret_ty_origin)(value) # type: ignore + def call_with_json_args( + self, template: Template, json_str: str + ) -> OpenAIMessageContent: + """Implements a roundtrip call to a python function. Input is a json string representing an LLM tool call request parameters. The output is the serialised response to the model.""" + try: + op = self.operation + # build dict of raw encodable types U + raw_args = self.parameter_model.model_validate_json(json_str) + + # use encoders to decode Us to python types T + params: dict[str, Any] = { + param_name: type_to_encodable_type( + self.parameter_annotations[param_name] + ).decode(getattr(raw_args, param_name)) + for param_name in raw_args.model_fields_set + } + + # call tool with python types + result = tool_call( + template, + self.operation, + **params, + ) + # serialize back to U using encoder for return type + sig = inspect.signature(op) + encoded_ty = type_to_encodable_type(sig.return_annotation) + encoded_value = encoded_ty.encode(result) + # serialise back to Json + return encoded_ty.serialize(encoded_value) + except Exception as exn: + return str({"status": "failure", "exception": str(exn)}) @classmethod def of_operation(cls, op: Operation[P, T], name: str): sig = inspect.signature(op) hints = get_type_hints(op) - fields = { - param_name: hints.get(param_name, str) for param_name in sig.parameters - } - - parameter_model = pydantic.create_model( - "Params", __config__={"extra": "forbid"}, **fields - ) + parameter_annotations: dict[str, type] = {} + + for param_name, param in sig.parameters.items(): + # Check if parameter annotation is missing (inspect.Parameter.empty) + if param.annotation is inspect.Parameter.empty: + raise TypeError( + f"Parameter '{param_name}' in operation '{op.__name__}' " + "does not have a type annotation" + ) + # get_type_hints might not include the parameter if annotation is invalid + if param_name not in hints: + raise TypeError( + f"Parameter '{param_name}' in operation '{op.__name__}' " + "does not have a valid type annotation" + ) + parameter_annotations[param_name] = hints[param_name] return cls( - parameter_model=parameter_model, operation=op, name=name, + parameter_annotations=parameter_annotations, ) @property @@ -177,23 +183,21 @@ def push_current_text(): if field_name is not None: obj, _ = self.get_field(field_name, args, kwargs) - obj = self.convert_field(obj, conversion) - - if isinstance(obj, Image.Image): - assert not format_spec, ( - "image template parameters cannot have format specifiers" + part = self.convert_field(obj, conversion) + # special casing for text + if ( + isinstance(part, list) + and len(part) == 1 + and part[0]["type"] == "text" + ): + current_text += self.format_field( + part[0]["text"], format_spec if format_spec else "" ) + elif isinstance(part, list): push_current_text() - prompt_parts.append( - { - "type": "image_url", - "image_url": _pil_image_to_base64_data_uri(obj), - } - ) + prompt_parts.extend(part) else: - current_text += self.format_field( - obj, format_spec if format_spec else "" - ) + prompt_parts.append(part) push_current_text() return prompt_parts @@ -343,24 +347,6 @@ def _retry_completion(self, template: Template, *args, **kwargs) -> Any: raise Exception("Max retries reached") -def _call_tool_with_json_args( - template: Template, tool: Tool, json_str_args: str -) -> OpenAIMessageContent: - try: - args = tool.parameter_model.model_validate_json(json_str_args) - result = tool_call( - template, - tool.operation, - **{ - field: getattr(args, field) - for field in tool.parameter_model.model_fields - }, - ) - return tool.serialise_return_value(result) - except Exception as exn: - return str({"status": "failure", "exception": str(exn)}) - - def _pydantic_model_from_type(typ: type): return pydantic.create_model("Response", value=typ, __config__={"extra": "forbid"}) @@ -375,13 +361,19 @@ def compute_response(template: Template, model_input: list[Any]) -> ModelRespons tools = _tools_of_operations(template.tools) tool_schemas = [t.function_definition for t in tools.values()] - response_format = _pydantic_model_from_type(ret_type) if ret_type != str else None + response_encoding_type: type | None = type_to_encodable_type(ret_type).t + if response_encoding_type == str: + response_encoding_type = None # loop based on: https://cookbook.openai.com/examples/reasoning_function_calls while True: response: ModelResponse = completion( messages=model_input, - response_format=response_format, + response_format=pydantic.create_model( + "Response", value=response_encoding_type, __config__={"extra": "forbid"} + ) + if response_encoding_type + else None, tools=tool_schemas, ) @@ -395,7 +387,7 @@ def compute_response(template: Template, model_input: list[Any]) -> ModelRespons function = tool_call.function function_name = typing.cast(str, function.name) tool = tools[function_name] - tool_result = _call_tool_with_json_args(template, tool, function.arguments) + tool_result = tool.call_with_json_args(template, function.arguments) model_input.append( { "role": "tool", @@ -406,13 +398,9 @@ def compute_response(template: Template, model_input: list[Any]) -> ModelRespons ) -# Note: typing template as Template[P, T] causes term conversion to fail due to -# unification limitations. -@defop def decode_response[**P, T](template: Callable[P, T], response: ModelResponse) -> T: """Decode an LLM response into an instance of the template return type. This operation should raise if the output cannot be decoded. - """ assert isinstance(template, Template) choice: Choices = typing.cast(Choices, response.choices[0]) @@ -422,13 +410,18 @@ def decode_response[**P, T](template: Callable[P, T], response: ModelResponse) - assert result_str ret_type = template.__signature__.return_annotation - if ret_type == str: - return result_str # type: ignore[return-value] + encodable_ty = type_to_encodable_type(ret_type) + + if encodable_ty.t == str: + # if encoding as a type, value is just directly what the llm returned + value = result_str + else: + Result = pydantic.create_model("Result", value=encodable_ty.t) + result = Result.model_validate_json(result_str) + assert isinstance(result, Result) + value = result.value # type: ignore - Result = _pydantic_model_from_type(ret_type) - result = Result.model_validate_json(result_str) - assert isinstance(result, Result) - return result.value + return encodable_ty.decode(value) # type: ignore @defop @@ -441,8 +434,17 @@ def format_model_input[**P, T]( """ bound_args = template.__signature__.bind(*args, **kwargs) bound_args.apply_defaults() + # encode arguments + arguments = {} + for param in bound_args.arguments: + encoder = type_to_encodable_type( + template.__signature__.parameters[param].annotation + ) + encoded = encoder.encode(bound_args.arguments[param]) + arguments[param] = encoder.serialize(encoded) + prompt = _OpenAIPromptFormatter().format_as_messages( - template.__prompt_template__, **bound_args.arguments + template.__prompt_template__, **arguments ) # Note: The OpenAI api only seems to accept images in the 'user' role. The diff --git a/tests/test_handlers_llm_encoding.py b/tests/test_handlers_llm_encoding.py new file mode 100644 index 00000000..ce50979e --- /dev/null +++ b/tests/test_handlers_llm_encoding.py @@ -0,0 +1,709 @@ +from dataclasses import asdict, dataclass +from typing import NamedTuple, TypedDict + +import pydantic +import pytest +from PIL import Image + +from effectful.handlers.llm.encoding import type_to_encodable_type + + +def test_type_to_encodable_type_str(): + encodable = type_to_encodable_type(str) + encoded = encodable.encode("hello") + decoded = encodable.decode(encoded) + assert decoded == "hello" + Model = pydantic.create_model("Model", value=encodable.t) + decoded = Model.model_validate({"value": "hello"}) + assert decoded.value == "hello" + + +def test_type_to_encodable_type_int(): + encodable = type_to_encodable_type(int) + encoded = encodable.encode(42) + decoded = encodable.decode(encoded) + assert decoded == 42 + assert isinstance(decoded, int) + Model = pydantic.create_model("Model", value=encodable.t) + decoded = Model.model_validate({"value": 42}) + assert decoded.value == 42 + assert isinstance(decoded.value, int) + + +def test_type_to_encodable_type_bool(): + encodable = type_to_encodable_type(bool) + encoded = encodable.encode(True) + decoded = encodable.decode(encoded) + assert decoded is True + assert isinstance(decoded, bool) + encoded_false = encodable.encode(False) + decoded_false = encodable.decode(encoded_false) + assert decoded_false is False + Model = pydantic.create_model("Model", value=encodable.t) + decoded = Model.model_validate({"value": True}) + assert decoded.value is True + assert isinstance(decoded.value, bool) + + +def test_type_to_encodable_type_float(): + encodable = type_to_encodable_type(float) + encoded = encodable.encode(3.14) + decoded = encodable.decode(encoded) + assert decoded == 3.14 + assert isinstance(decoded, float) + Model = pydantic.create_model("Model", value=encodable.t) + decoded = Model.model_validate({"value": 3.14}) + assert decoded.value == 3.14 + assert isinstance(decoded.value, float) + + +def test_type_to_encodable_type_image(): + encodable = type_to_encodable_type(Image.Image) + image = Image.new("RGB", (10, 10), color="red") + encoded = encodable.encode(image) + assert isinstance(encoded, dict) + assert "url" in encoded + assert "detail" in encoded + assert encoded["detail"] == "auto" + assert encoded["url"].startswith("data:image/png;base64,") + decoded = encodable.decode(encoded) + assert isinstance(decoded, Image.Image) + assert decoded.size == (10, 10) + Model = pydantic.create_model("Model", value=encodable.t) + decoded = Model.model_validate({"value": encoded}) + assert decoded.value["url"] == encoded["url"] + assert decoded.value["detail"] == "auto" + + +def test_type_to_encodable_type_image_roundtrip(): + encodable = type_to_encodable_type(Image.Image) + original = Image.new("RGB", (20, 20), color="green") + encoded = encodable.encode(original) + decoded = encodable.decode(encoded) + assert isinstance(decoded, Image.Image) + assert decoded.size == original.size + assert decoded.mode == original.mode + + +def test_type_to_encodable_type_image_decode_invalid_url(): + encodable = type_to_encodable_type(Image.Image) + encoded = {"url": "http://example.com/image.png", "detail": "auto"} + with pytest.raises(RuntimeError, match="expected base64 encoded image as data uri"): + encodable.decode(encoded) + + +def test_type_to_encodable_type_tuple(): + encodable = type_to_encodable_type(tuple[int, str]) + value = (1, "test") + encoded = encodable.encode(value) + decoded = encodable.decode(encoded) + assert decoded == value + assert isinstance(decoded, tuple) + assert decoded[0] == 1 + assert decoded[1] == "test" + # Test with pydantic model validation + Model = pydantic.create_model("Model", value=encodable.t) + model_instance = Model.model_validate({"value": encoded}) + assert model_instance.value == encoded + assert isinstance(model_instance.value, tuple) + assert model_instance.value[0] == 1 + assert model_instance.value[1] == "test" + # Decode from model + decoded_from_model = encodable.decode(model_instance.value) + assert decoded_from_model == value + assert isinstance(decoded_from_model, tuple) + + +def test_type_to_encodable_type_tuple_empty(): + encodable = type_to_encodable_type(tuple[()]) + value = () + encoded = encodable.encode(value) + decoded = encodable.decode(encoded) + assert decoded == value + assert isinstance(decoded, tuple) + assert len(decoded) == 0 + # Test with pydantic model validation + Model = pydantic.create_model("Model", value=encodable.t) + model_instance = Model.model_validate({"value": encoded}) + assert model_instance.value == encoded + assert isinstance(model_instance.value, tuple) + assert len(model_instance.value) == 0 + # Decode from model + decoded_from_model = encodable.decode(model_instance.value) + assert decoded_from_model == value + assert isinstance(decoded_from_model, tuple) + + +def test_type_to_encodable_type_tuple_three_elements(): + encodable = type_to_encodable_type(tuple[int, str, bool]) + value = (42, "hello", True) + encoded = encodable.encode(value) + decoded = encodable.decode(encoded) + assert decoded == value + assert isinstance(decoded, tuple) + assert decoded[0] == 42 + assert decoded[1] == "hello" + assert decoded[2] is True + # Test with pydantic model validation + Model = pydantic.create_model("Model", value=encodable.t) + model_instance = Model.model_validate({"value": encoded}) + assert model_instance.value == encoded + assert isinstance(model_instance.value, tuple) + assert model_instance.value[0] == 42 + assert model_instance.value[1] == "hello" + assert model_instance.value[2] is True + # Decode from model + decoded_from_model = encodable.decode(model_instance.value) + assert decoded_from_model == value + assert isinstance(decoded_from_model, tuple) + + +def test_type_to_encodable_type_list(): + encodable = type_to_encodable_type(list[int]) + value = [1, 2, 3, 4, 5] + encoded = encodable.encode(value) + decoded = encodable.decode(encoded) + assert decoded == value + assert isinstance(decoded, list) + assert all(isinstance(elem, int) for elem in decoded) + # Test with pydantic model validation + Model = pydantic.create_model("Model", value=encodable.t) + model_instance = Model.model_validate({"value": encoded}) + assert model_instance.value == encoded + assert isinstance(model_instance.value, list) + assert model_instance.value == [1, 2, 3, 4, 5] + # Decode from model + decoded_from_model = encodable.decode(model_instance.value) + assert decoded_from_model == value + assert isinstance(decoded_from_model, list) + assert all(isinstance(elem, int) for elem in decoded_from_model) + + +def test_type_to_encodable_type_list_str(): + encodable = type_to_encodable_type(list[str]) + value = ["hello", "world", "test"] + encoded = encodable.encode(value) + decoded = encodable.decode(encoded) + assert decoded == value + assert isinstance(decoded, list) + assert all(isinstance(elem, str) for elem in decoded) + # Test with pydantic model validation + Model = pydantic.create_model("Model", value=encodable.t) + model_instance = Model.model_validate({"value": encoded}) + assert model_instance.value == encoded + assert isinstance(model_instance.value, list) + assert model_instance.value == ["hello", "world", "test"] + # Decode from model + decoded_from_model = encodable.decode(model_instance.value) + assert decoded_from_model == value + assert isinstance(decoded_from_model, list) + assert all(isinstance(elem, str) for elem in decoded_from_model) + + +def test_type_to_encodable_type_namedtuple(): + class Point(NamedTuple): + x: int + y: int + + encodable = type_to_encodable_type(Point) + point = Point(10, 20) + encoded = encodable.encode(point) + decoded = encodable.decode(encoded) + assert decoded == point + assert isinstance(decoded, Point) + assert decoded.x == 10 + assert decoded.y == 20 + Model = pydantic.create_model("Model", value=encodable.t) + decoded = Model.model_validate({"value": {"x": 10, "y": 20}}) + assert decoded.value == point + assert isinstance(decoded.value, Point) + + +def test_type_to_encodable_type_namedtuple_with_str(): + class Person(NamedTuple): + name: str + age: int + + encodable = type_to_encodable_type(Person) + person = Person("Alice", 30) + encoded = encodable.encode(person) + decoded = encodable.decode(encoded) + assert decoded == person + assert isinstance(decoded, Person) + assert decoded.name == "Alice" + assert decoded.age == 30 + Model = pydantic.create_model("Model", value=encodable.t) + decoded = Model.model_validate({"value": {"name": "Alice", "age": 30}}) + assert decoded.value == person + assert isinstance(decoded.value, Person) + + +def test_type_to_encodable_type_typeddict(): + class User(TypedDict): + name: str + age: int + + encodable = type_to_encodable_type(User) + user = User(name="Bob", age=25) + encoded = encodable.encode(user) + decoded = encodable.decode(encoded) + assert decoded == user + assert isinstance(decoded, dict) + assert decoded["name"] == "Bob" + assert decoded["age"] == 25 + Model = pydantic.create_model("Model", value=encodable.t) + decoded = Model.model_validate({"value": {"name": "Bob", "age": 25}}) + assert decoded.value == user + assert isinstance(decoded.value, dict) + + +def test_type_to_encodable_type_typeddict_optional(): + class Config(TypedDict, total=False): + host: str + port: int + + encodable = type_to_encodable_type(Config) + config = Config(host="localhost", port=8080) + encoded = encodable.encode(config) + decoded = encodable.decode(encoded) + assert decoded == config + assert decoded["host"] == "localhost" + assert decoded["port"] == 8080 + Model = pydantic.create_model("Model", value=encodable.t) + decoded = Model.model_validate({"value": {"host": "localhost", "port": 8080}}) + assert decoded.value == config + assert isinstance(decoded.value, dict) + + +def test_type_to_encodable_type_complex(): + encodable = type_to_encodable_type(complex) + value = 3 + 4j + encoded = encodable.encode(value) + decoded = encodable.decode(encoded) + assert decoded == value + assert isinstance(decoded, complex) + assert decoded.real == 3.0 + assert decoded.imag == 4.0 + # Test with pydantic model validation + Model = pydantic.create_model("Model", value=encodable.t) + model_instance = Model.model_validate({"value": encoded}) + assert model_instance.value == encoded + # Decode from model + decoded_from_model = encodable.decode(model_instance.value) + assert decoded_from_model == value + assert isinstance(decoded_from_model, complex) + + +def test_type_to_encodable_type_tuple_of_images(): + encodable = type_to_encodable_type(tuple[Image.Image, Image.Image]) + image1 = Image.new("RGB", (10, 10), color="red") + image2 = Image.new("RGB", (20, 20), color="blue") + value = (image1, image2) + + encoded = encodable.encode(value) + assert isinstance(encoded, tuple) + assert len(encoded) == 2 + assert isinstance(encoded[0], dict) + assert isinstance(encoded[1], dict) + assert "url" in encoded[0] + assert "url" in encoded[1] + assert encoded[0]["url"].startswith("data:image/png;base64,") + assert encoded[1]["url"].startswith("data:image/png;base64,") + + decoded = encodable.decode(encoded) + assert isinstance(decoded, tuple) + assert len(decoded) == 2 + assert isinstance(decoded[0], Image.Image) + assert isinstance(decoded[1], Image.Image) + assert decoded[0].size == (10, 10) + assert decoded[1].size == (20, 20) + + # Test with pydantic model validation + Model = pydantic.create_model("Model", value=encodable.t) + model_instance = Model.model_validate({"value": encoded}) + assert model_instance.value == encoded + assert isinstance(model_instance.value, tuple) + assert len(model_instance.value) == 2 + assert isinstance(model_instance.value[0], dict) + assert isinstance(model_instance.value[1], dict) + assert model_instance.value[0]["url"] == encoded[0]["url"] + assert model_instance.value[1]["url"] == encoded[1]["url"] + # Decode from model + decoded_from_model = encodable.decode(model_instance.value) + assert isinstance(decoded_from_model, tuple) + assert len(decoded_from_model) == 2 + assert isinstance(decoded_from_model[0], Image.Image) + assert isinstance(decoded_from_model[1], Image.Image) + assert decoded_from_model[0].size == (10, 10) + assert decoded_from_model[1].size == (20, 20) + + # Roundtrip test + original = ( + Image.new("RGB", (15, 15), color="green"), + Image.new("RGB", (25, 25), color="yellow"), + ) + encoded_roundtrip = encodable.encode(original) + decoded_roundtrip = encodable.decode(encoded_roundtrip) + assert isinstance(decoded_roundtrip, tuple) + assert len(decoded_roundtrip) == 2 + assert decoded_roundtrip[0].size == original[0].size + assert decoded_roundtrip[1].size == original[1].size + assert decoded_roundtrip[0].mode == original[0].mode + assert decoded_roundtrip[1].mode == original[1].mode + + +def test_type_to_encodable_type_list_of_images(): + encodable = type_to_encodable_type(list[Image.Image]) + images = [ + Image.new("RGB", (10, 10), color="red"), + Image.new("RGB", (20, 20), color="blue"), + Image.new("RGB", (30, 30), color="green"), + ] + + encoded = encodable.encode(images) + assert isinstance(encoded, list) + assert len(encoded) == 3 + assert all(isinstance(elem, dict) for elem in encoded) + assert all("url" in elem for elem in encoded) + assert all(elem["url"].startswith("data:image/png;base64,") for elem in encoded) + + decoded = encodable.decode(encoded) + assert isinstance(decoded, list) + assert len(decoded) == 3 + assert all(isinstance(elem, Image.Image) for elem in decoded) + assert decoded[0].size == (10, 10) + assert decoded[1].size == (20, 20) + assert decoded[2].size == (30, 30) + + # Test with pydantic model validation + Model = pydantic.create_model("Model", value=encodable.t) + model_instance = Model.model_validate({"value": encoded}) + assert model_instance.value == encoded + assert isinstance(model_instance.value, list) + assert len(model_instance.value) == 3 + assert all(isinstance(elem, dict) for elem in model_instance.value) + assert all("url" in elem for elem in model_instance.value) + assert model_instance.value[0]["url"] == encoded[0]["url"] + assert model_instance.value[1]["url"] == encoded[1]["url"] + assert model_instance.value[2]["url"] == encoded[2]["url"] + # Decode from model + decoded_from_model = encodable.decode(model_instance.value) + assert isinstance(decoded_from_model, list) + assert len(decoded_from_model) == 3 + assert all(isinstance(elem, Image.Image) for elem in decoded_from_model) + assert decoded_from_model[0].size == (10, 10) + assert decoded_from_model[1].size == (20, 20) + assert decoded_from_model[2].size == (30, 30) + + # Roundtrip test + original = [ + Image.new("RGB", (15, 15), color="yellow"), + Image.new("RGB", (25, 25), color="purple"), + ] + encoded_roundtrip = encodable.encode(original) + decoded_roundtrip = encodable.decode(encoded_roundtrip) + assert isinstance(decoded_roundtrip, list) + assert len(decoded_roundtrip) == 2 + assert decoded_roundtrip[0].size == original[0].size + assert decoded_roundtrip[1].size == original[1].size + assert decoded_roundtrip[0].mode == original[0].mode + assert decoded_roundtrip[1].mode == original[1].mode + + +def test_type_to_encodable_type_dataclass(): + @dataclass + class Point: + x: int + y: int + + encodable = type_to_encodable_type(Point) + point = Point(10, 20) + encoded = encodable.encode(point) + decoded = encodable.decode(encoded) + assert decoded == point + assert isinstance(decoded, Point) + assert decoded.x == 10 + assert decoded.y == 20 + # Test with pydantic model validation + Model = pydantic.create_model("Model", value=encodable.t) + model_instance = Model.model_validate({"value": asdict(encoded)}) + assert model_instance.value.x == 10 + assert model_instance.value.y == 20 + # Decode from model + decoded_from_model = encodable.decode(model_instance.value) + assert decoded_from_model == point + assert isinstance(decoded_from_model, Point) + + +def test_type_to_encodable_type_dataclass_with_str(): + @dataclass + class Person: + name: str + age: int + + encodable = type_to_encodable_type(Person) + person = Person("Alice", 30) + encoded = encodable.encode(person) + decoded = encodable.decode(encoded) + assert decoded == person + assert isinstance(decoded, Person) + assert decoded.name == "Alice" + assert decoded.age == 30 + # Test with pydantic model validation + Model = pydantic.create_model("Model", value=encodable.t) + model_instance = Model.model_validate({"value": asdict(encoded)}) + assert model_instance.value.name == "Alice" + assert model_instance.value.age == 30 + # Decode from model + decoded_from_model = encodable.decode(model_instance.value) + assert decoded_from_model == person + assert isinstance(decoded_from_model, Person) + + +def test_type_to_encodable_type_dataclass_with_list(): + @dataclass + class Container: + items: list[int] + name: str + + encodable = type_to_encodable_type(Container) + container = Container(items=[1, 2, 3], name="test") + encoded = encodable.encode(container) + decoded = encodable.decode(encoded) + assert decoded == container + assert isinstance(decoded, Container) + assert decoded.items == [1, 2, 3] + assert decoded.name == "test" + # Test with pydantic model validation + Model = pydantic.create_model("Model", value=encodable.t) + model_instance = Model.model_validate({"value": asdict(encoded)}) + assert model_instance.value.items == [1, 2, 3] + assert model_instance.value.name == "test" + # Decode from model + decoded_from_model = encodable.decode(model_instance.value) + assert decoded_from_model == container + assert isinstance(decoded_from_model, Container) + + +def test_type_to_encodable_type_dataclass_with_tuple(): + @dataclass + class Pair: + values: tuple[int, str] + count: int + + encodable = type_to_encodable_type(Pair) + pair = Pair(values=(42, "hello"), count=2) + encoded = encodable.encode(pair) + decoded = encodable.decode(encoded) + assert decoded == pair + assert isinstance(decoded, Pair) + assert decoded.values == (42, "hello") + assert decoded.count == 2 + # Test with pydantic model validation + Model = pydantic.create_model("Model", value=encodable.t) + model_instance = Model.model_validate({"value": asdict(encoded)}) + assert model_instance.value.values == (42, "hello") + assert model_instance.value.count == 2 + # Decode from model + decoded_from_model = encodable.decode(model_instance.value) + assert decoded_from_model == pair + assert isinstance(decoded_from_model, Pair) + + +def test_type_to_encodable_type_dataclass_with_optional(): + @dataclass + class Config: + host: str + port: int + timeout: float | None = None + + encodable = type_to_encodable_type(Config) + config = Config(host="localhost", port=8080, timeout=5.0) + encoded = encodable.encode(config) + decoded = encodable.decode(encoded) + assert decoded == config + assert isinstance(decoded, Config) + assert decoded.host == "localhost" + assert decoded.port == 8080 + assert decoded.timeout == 5.0 + + # Test with None value + config_none = Config(host="localhost", port=8080, timeout=None) + encoded_none = encodable.encode(config_none) + decoded_none = encodable.decode(encoded_none) + assert decoded_none == config_none + assert decoded_none.timeout is None + + # Test with pydantic model validation + Model = pydantic.create_model("Model", value=encodable.t) + model_instance = Model.model_validate({"value": asdict(encoded)}) + assert model_instance.value.host == "localhost" + assert model_instance.value.port == 8080 + assert model_instance.value.timeout == 5.0 + # Decode from model + decoded_from_model = encodable.decode(model_instance.value) + assert decoded_from_model == config + + +def test_type_to_encodable_type_nested_dataclass(): + @dataclass + class Address: + street: str + city: str + + @dataclass + class Person: + name: str + age: int + address: Address + + encodable = type_to_encodable_type(Person) + address = Address(street="123 Main St", city="New York") + person = Person(name="Bob", age=25, address=address) + + encoded = encodable.encode(person) + assert isinstance(encoded, Person) + assert hasattr(encoded, "name") + assert hasattr(encoded, "age") + assert hasattr(encoded, "address") + assert isinstance(encoded.address, Address) + assert encoded.address.street == "123 Main St" + assert encoded.address.city == "New York" + + decoded = encodable.decode(encoded) + assert isinstance(decoded, Person) + assert isinstance(decoded.address, Address) + assert decoded.name == "Bob" + assert decoded.age == 25 + assert decoded.address.street == "123 Main St" + assert decoded.address.city == "New York" + + # Test with pydantic model validation + Model = pydantic.create_model("Model", value=encodable.t) + model_instance = Model.model_validate({"value": asdict(encoded)}) + assert model_instance.value.name == "Bob" + assert model_instance.value.age == 25 + assert model_instance.value.address.street == "123 Main St" + assert model_instance.value.address.city == "New York" + # Decode from model + decoded_from_model = encodable.decode(model_instance.value) + assert decoded_from_model == person + assert isinstance(decoded_from_model, Person) + assert isinstance(decoded_from_model.address, Address) + + +def test_type_to_encodable_type_pydantic_model(): + class Point(pydantic.BaseModel): + x: int + y: int + + encodable = type_to_encodable_type(Point) + point = Point(x=10, y=20) + encoded = encodable.encode(point) + decoded = encodable.decode(encoded) + assert decoded == point + assert isinstance(decoded, Point) + assert decoded.x == 10 + assert decoded.y == 20 + # Test with pydantic model validation + Model = pydantic.create_model("Model", value=encodable.t) + model_instance = Model.model_validate({"value": encoded.model_dump()}) + assert model_instance.value.x == 10 + assert model_instance.value.y == 20 + # Decode from model + decoded_from_model = encodable.decode(model_instance.value) + assert decoded_from_model == point + assert isinstance(decoded_from_model, Point) + + +def test_type_to_encodable_type_pydantic_model_with_str(): + class Person(pydantic.BaseModel): + name: str + age: int + + encodable = type_to_encodable_type(Person) + person = Person(name="Alice", age=30) + encoded = encodable.encode(person) + decoded = encodable.decode(encoded) + assert decoded == person + assert isinstance(decoded, Person) + assert decoded.name == "Alice" + assert decoded.age == 30 + # Test with pydantic model validation + Model = pydantic.create_model("Model", value=encodable.t) + model_instance = Model.model_validate({"value": encoded.model_dump()}) + assert model_instance.value.name == "Alice" + assert model_instance.value.age == 30 + # Decode from model + decoded_from_model = encodable.decode(model_instance.value) + assert decoded_from_model == person + assert isinstance(decoded_from_model, Person) + + +def test_type_to_encodable_type_pydantic_model_with_list(): + class Container(pydantic.BaseModel): + items: list[int] + name: str + + encodable = type_to_encodable_type(Container) + container = Container(items=[1, 2, 3], name="test") + encoded = encodable.encode(container) + decoded = encodable.decode(encoded) + assert decoded == container + assert isinstance(decoded, Container) + assert decoded.items == [1, 2, 3] + assert decoded.name == "test" + # Test with pydantic model validation + Model = pydantic.create_model("Model", value=encodable.t) + model_instance = Model.model_validate({"value": encoded.model_dump()}) + assert model_instance.value.items == [1, 2, 3] + assert model_instance.value.name == "test" + # Decode from model + decoded_from_model = encodable.decode(model_instance.value) + assert decoded_from_model == container + assert isinstance(decoded_from_model, Container) + + +def test_type_to_encodable_type_nested_pydantic_model(): + class Address(pydantic.BaseModel): + street: str + city: str + + class Person(pydantic.BaseModel): + name: str + age: int + address: Address + + encodable = type_to_encodable_type(Person) + address = Address(street="123 Main St", city="New York") + person = Person(name="Bob", age=25, address=address) + + encoded = encodable.encode(person) + assert isinstance(encoded, pydantic.BaseModel) + assert hasattr(encoded, "name") + assert hasattr(encoded, "age") + assert hasattr(encoded, "address") + assert isinstance(encoded.address, pydantic.BaseModel) + assert encoded.address.street == "123 Main St" + assert encoded.address.city == "New York" + + decoded = encodable.decode(encoded) + assert isinstance(decoded, Person) + assert isinstance(decoded.address, Address) + assert decoded.name == "Bob" + assert decoded.age == 25 + assert decoded.address.street == "123 Main St" + assert decoded.address.city == "New York" + + # Test with pydantic model validation + Model = pydantic.create_model("Model", value=encodable.t) + model_instance = Model.model_validate({"value": encoded.model_dump()}) + assert model_instance.value.name == "Bob" + assert model_instance.value.age == 25 + assert model_instance.value.address.street == "123 Main St" + assert model_instance.value.address.city == "New York" + # Decode from model + decoded_from_model = encodable.decode(model_instance.value) + assert decoded_from_model == person + assert isinstance(decoded_from_model, Person) + assert isinstance(decoded_from_model.address, Address) diff --git a/tests/test_handlers_llm_provider.py b/tests/test_handlers_llm_provider.py index 3fbd307d..9a0bcc5c 100644 --- a/tests/test_handlers_llm_provider.py +++ b/tests/test_handlers_llm_provider.py @@ -13,7 +13,7 @@ import pytest from PIL import Image -from pydantic import Field +from pydantic import BaseModel, Field from pydantic.dataclasses import dataclass from effectful.handlers.llm import Template @@ -377,3 +377,134 @@ def test_image_input(): handler(LimitLLMCallsHandler(max_calls=3)), ): assert any("smile" in categorise_image(smiley_face()) for _ in range(3)) + + +class BookReview(BaseModel): + """A book review with rating and summary.""" + + title: str = Field(..., description="title of the book") + rating: int = Field(..., description="rating from 1 to 5", ge=1, le=5) + summary: str = Field(..., description="brief summary of the review") + + +@Template.define +def review_book(plot: str) -> BookReview: + """Review a book based on this plot: {plot}""" + raise NotImplementedError + + +class TestPydanticBaseModelReturn: + @requires_openai + def test_pydantic_basemodel_return(self): + plot = "A young wizard discovers he has magical powers and goes to a school for wizards." + + with ( + handler(LiteLLMProvider(model_name="gpt-5-nano")), + handler(LimitLLMCallsHandler(max_calls=1)), + ): + review = review_book(plot) + + assert isinstance(review, BookReview) + assert isinstance(review.title, str) + assert len(review.title) > 0 + assert isinstance(review.rating, int) + assert 1 <= review.rating <= 5 + assert isinstance(review.summary, str) + assert len(review.summary) > 0 + + +class BookRecommendation(BaseModel): + """A book recommendation with details.""" + + title: str = Field(..., description="title of the recommended book") + reason: str = Field(..., description="reason for the recommendation") + + +@defop +def recommend_book_tool(genre: str, explanation: str) -> BookRecommendation: + """Recommend a book based on genre preference. + + Parameters: + - genre: The genre of book to recommend + - explanation: Natural language explanation of the recommendation + """ + raise NotHandled + + +class LoggingBookRecommendationInterpretation(ObjectInterpretation): + """Provides an interpretation for `recommend_book_tool` that tracks recommendations.""" + + recommendation_count: int = 0 + recommendation_results: list[dict] = [] + + @implements(recommend_book_tool) + def _recommend_book_tool(self, genre: str, explanation: str) -> BookRecommendation: + self.recommendation_count += 1 + + # Simple heuristic: recommend based on genre + recommendations = { + "fantasy": BookRecommendation( + title="The Lord of the Rings", reason="Classic fantasy epic" + ), + "sci-fi": BookRecommendation( + title="Dune", reason="Epic science fiction masterpiece" + ), + "mystery": BookRecommendation( + title="The Hound of the Baskervilles", + reason="Classic mystery novel", + ), + } + + recommendation = recommendations.get( + genre.lower(), + BookRecommendation( + title="1984", reason="Thought-provoking dystopian novel" + ), + ) + + self.recommendation_results.append( + { + "genre": genre, + "explanation": explanation, + "recommendation": recommendation, + } + ) + + return recommendation + + +@Template.define(tools=[recommend_book_tool]) +def get_book_recommendation(user_preference: str) -> BookRecommendation: + """Get a book recommendation based on user preference: {user_preference}. + Use the provided tools to make a recommendation. + """ + raise NotHandled + + +class TestPydanticBaseModelToolCalls: + @pytest.mark.parametrize( + "model_name", + [ + pytest.param("gpt-5-nano", marks=requires_openai), + pytest.param("claude-sonnet-4-5-20250929", marks=requires_anthropic), + ], + ) + def test_pydantic_basemodel_tool_calling(self, model_name): + """Test that templates with tools work with Pydantic BaseModel.""" + book_rec_ctx = LoggingBookRecommendationInterpretation() + with ( + handler(LiteLLMProvider(model_name=model_name)), + handler(LimitLLMCallsHandler(max_calls=4)), + handler(book_rec_ctx), + ): + recommendation = get_book_recommendation("I love fantasy novels") + + assert isinstance(recommendation, BookRecommendation) + assert isinstance(recommendation.title, str) + assert len(recommendation.title) > 0 + assert isinstance(recommendation.reason, str) + assert len(recommendation.reason) > 0 + + # Verify the tool was called at least once + assert book_rec_ctx.recommendation_count >= 1 + assert len(book_rec_ctx.recommendation_results) >= 1 From 931d5071d3f386a224cf46c103ca1905fa3c12df Mon Sep 17 00:00:00 2001 From: "Dat Nguyen (Marc)" <15943389+datvo06@users.noreply.github.com> Date: Mon, 15 Dec 2025 16:32:37 -0500 Subject: [PATCH 23/39] Initial version of Lexical Context Collection - Collecting Tools and Template (#434) * Adding lexical context collection * Allow model input to refer to anything within the lexical context * Different handling between representable object and other types when referred to in Template * More edge case handling * More edge case handling * More edge case handling * Register more instead of _get_source_for_object * Tune down the scope to only collect relevant tools templates * Constructing Tool from Template, allowing higher-order Tempalte * Linting * Fix exception type * Fix exception type to NotHandled * Fix stringinified annotation * Fix tool.define signature * Trim Tool.define * Trip format_value.register * Add warning on no doc case * Removing default for __name__ * More specific lexical context type annotation * Additional tools for pydandic-compatible conversion and recursion depth limiting * Linting * Linting * Linting * Linting * Minor fixes * Merge encoding/decoding * Update a lot of prompt with "do not use any tools" * Update a lot of prompt with "do not use any tools" * More tool limiting prompts * Lint * Lint * Factoring out book and poem tools after multiple failed prompt modification attempts * Revert semantics * Lint * Update name of lexical_context to __context__ * Lint * Minor * More prompt modification * Make sure notebook run --- docs/source/llm.ipynb | 271 +++++++++++++------ effectful/handlers/llm/__init__.py | 89 +++++- effectful/handlers/llm/providers.py | 72 +++-- effectful/handlers/llm/synthesis.py | 2 +- tests/test_handlers_llm.py | 138 +++++++++- tests/test_handlers_llm_provider.py | 195 +------------ tests/test_handlers_llm_tool_calling_book.py | 128 +++++++++ tests/test_handlers_llm_tool_calling_poem.py | 137 ++++++++++ 8 files changed, 724 insertions(+), 308 deletions(-) create mode 100644 tests/test_handlers_llm_tool_calling_book.py create mode 100644 tests/test_handlers_llm_tool_calling_poem.py diff --git a/docs/source/llm.ipynb b/docs/source/llm.ipynb index aba3ecff..6f2a24b0 100644 --- a/docs/source/llm.ipynb +++ b/docs/source/llm.ipynb @@ -24,7 +24,7 @@ " tool_call,\n", ")\n", "from effectful.handlers.llm.synthesis import ProgramSynthesis\n", - "from effectful.ops.semantics import fwd, handler\n", + "from effectful.ops.semantics import NotHandled, fwd, handler\n", "from effectful.ops.syntax import defop\n", "\n", "provider = LiteLLMProvider()" @@ -56,15 +56,15 @@ }, { "cell_type": "code", - "execution_count": 2, + "execution_count": 5, "id": "1e832675", "metadata": {}, "outputs": [], "source": [ "@Template.define\n", "def limerick(theme: str) -> str:\n", - " \"\"\"Write a limerick on the theme of {theme}.\"\"\"\n", - " raise NotImplementedError" + " \"\"\"Write a limerick on the theme of {theme}. Do not use any tools.\"\"\"\n", + " raise NotHandled" ] }, { @@ -79,7 +79,7 @@ }, { "cell_type": "code", - "execution_count": 3, + "execution_count": 6, "id": "634f6533", "metadata": {}, "outputs": [ @@ -87,17 +87,17 @@ "name": "stdout", "output_type": "stream", "text": [ - "In the ocean where fast fish dash, \n", - "Swims a mackerel that makes quite a splash. \n", - "With each flip of its fin, \n", - "It wears a cheeky grin, \n", - "And escapes from each net's eager clasp! \n", - "----------------------------------------\n", - "In the depths of the ocean so blue, \n", - "Swam a fish with a curious view. \n", + "In the ocean so deep and so swish, \n", + "Swam a curious gold-colored fish. \n", "With a flick of its tail, \n", - "It set off to unveil, \n", - "The mysteries of waters anew. \n" + "It set off to sail, \n", + "In search of a dream and a wish.\n", + "----------------------------------------\n", + "In the ocean where fish like to play, \n", + "They swim and they glide all the day. \n", + "With scales shining bright, \n", + "They bring such delight, \n", + "In the waters, they dance and display.\n" ] } ], @@ -118,7 +118,7 @@ }, { "cell_type": "code", - "execution_count": 4, + "execution_count": 7, "id": "706ce53b", "metadata": {}, "outputs": [ @@ -127,29 +127,37 @@ "output_type": "stream", "text": [ "\n", - "Silver scales shimmer, \n", - "Dancing through the ocean's depths— \n", - "Whispers of the sea.\n", + "Silent waters dance, \n", + "Silver flash beneath the waves— \n", + "Fish in quiet grace.\n", "----------------------------------------\n", - "Silver scales shimmer, \n", - "Dancing through the ocean's depths— \n", - "Whispers of the sea.\n", + "Silent waters dance, \n", + "Silver flash beneath the waves— \n", + "Fish in quiet grace.\n", "\n", - "Silver scales shimmer, \n", - "Beneath the gentle waves' dance, \n", - "In the ocean's hush. \n", + "Fish swim silently, \n", + "In the embrace of water, \n", + "Nature's dance alive.\n", "----------------------------------------\n", - "Silver scales shimmer, \n", - "Beneath the gentle waves' dance, \n", - "In the ocean's hush. \n", + "Fish swim silently, \n", + "In the embrace of water, \n", + "Nature's dance alive.\n", + "\n", + "\n", + "\u001b[1;31mGive Feedback / Get Help: https://github.com/BerriAI/litellm/issues/new\u001b[0m\n", + "LiteLLM.Info: If you need to debug this error, use `litellm._turn_on_debug()'.\n", "\n", - "In deep waters blue, \n", - "Silent dances shift and gleam, \n", - "Fish weave dreams anew. \n", + "Here's a haiku on the theme of fish3:\n", + "\n", + "In the deep blue sea, \n", + "Silent swimmers glide with grace, \n", + "Whispers of the tide.\n", "----------------------------------------\n", - "In deep waters blue, \n", - "Silent dances shift and gleam, \n", - "Fish weave dreams anew. \n" + "Here is a haiku on the theme of fish3:\n", + "\n", + "In the clear water, \n", + "Graceful fins weave through the sea, \n", + "Whispers of the deep.\n" ] } ], @@ -157,14 +165,14 @@ "@functools.cache\n", "@Template.define\n", "def haiku(theme: str) -> str:\n", - " \"\"\"Write a haiku on the theme of {theme}.\"\"\"\n", - " raise NotImplementedError\n", + " \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\n", + " raise NotHandled\n", "\n", "\n", "@Template.define\n", "def haiku_no_cache(theme: str) -> str:\n", - " \"\"\"Write a haiku on the theme of {theme}.\"\"\"\n", - " raise NotImplementedError\n", + " \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\n", + " raise NotHandled\n", "\n", "\n", "print()\n", @@ -200,15 +208,15 @@ }, { "cell_type": "code", - "execution_count": 5, + "execution_count": 8, "id": "2c766859", "metadata": {}, "outputs": [], "source": [ "@Template.define\n", "def primes(first_digit: int) -> int:\n", - " \"\"\"Give a prime number with {first_digit} as the first digit.\"\"\"\n", - " raise NotImplementedError\n", + " \"\"\"Give a prime number with {first_digit} as the first digit. Do not use any tools.\"\"\"\n", + " raise NotHandled\n", "\n", "\n", "with handler(provider):\n", @@ -225,7 +233,7 @@ }, { "cell_type": "code", - "execution_count": 6, + "execution_count": 9, "id": "c83bbdc0", "metadata": {}, "outputs": [ @@ -234,15 +242,16 @@ "output_type": "stream", "text": [ "def count_a_occurrences(input_string: str) -> int:\n", - " return input_string.count('a')\n" + " return input_string.count('a')\n", + "\n" ] } ], "source": [ "@Template.define\n", "def count_char(char: str) -> Callable[[str], int]:\n", - " \"\"\"Write a function which takes a string and counts the occurrances of '{char}'.\"\"\"\n", - " raise NotImplementedError\n", + " \"\"\"Write a function which takes a string and counts the occurrances of '{char}'. Do not use any tools.\"\"\"\n", + " raise NotHandled\n", "\n", "\n", "with handler(provider), handler(ProgramSynthesis()):\n", @@ -261,14 +270,14 @@ "source": [ "## Tool Calling\n", "\n", - "Passing `Operation`s to `Template.define` makes them available for the LLM to call as tools. The description of these operations is inferred from their type annotations and docstrings.\n", + "`Operation`s defined in the lexical scope of a `Template` are automatically available for the LLM to call as tools. The description of these operations is inferred from their type annotations and docstrings.\n", "\n", "Tool calls are mediated by a helper operation `tool_call`. Handling this operation allows tool use to be tracked or logged." ] }, { "cell_type": "code", - "execution_count": 7, + "execution_count": 11, "id": "66711301", "metadata": {}, "outputs": [ @@ -280,7 +289,7 @@ "Tool call: weather(*(), **{'city': 'Chicago'}) -> cold\n", "Tool call: weather(*(), **{'city': 'New York'}) -> wet\n", "Tool call: weather(*(), **{'city': 'Barcelona'}) -> sunny\n", - "Barcelona currently has good weather, as it is sunny.\n" + "Among the cities checked, Barcelona has good weather, as it is currently sunny.\n" ] } ], @@ -296,10 +305,10 @@ " return status.get(city, \"unknown\")\n", "\n", "\n", - "@Template.define(tools=[cities, weather])\n", + "@Template.define # cities and weather auto-captured from lexical scope\n", "def vacation() -> str:\n", - " \"\"\"Use the provided tools to suggest a city that has good weather.\"\"\"\n", - " raise NotImplementedError\n", + " \"\"\"Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.\"\"\"\n", + " raise NotHandled\n", "\n", "\n", "def log_tool_call(_, tool, *args, **kwargs):\n", @@ -324,7 +333,7 @@ }, { "cell_type": "code", - "execution_count": 8, + "execution_count": 12, "id": "17668ac8", "metadata": {}, "outputs": [ @@ -335,9 +344,9 @@ "> You are onstage at a comedy club. You tell the following joke:\n", "Knock knock.\n", "Who's there?\n", - "Iguana.\n", - "Iguana who?\n", - "Iguana come inside your house and warm up, it's cold out here!\n", + "Liz.\n", + "Liz who?\n", + "Liz-ard you curious who's at the door?\n", "> The crowd laughs politely.\n" ] } @@ -351,14 +360,14 @@ "\n", "@Template.define\n", "def write_joke(theme: str) -> KnockKnockJoke:\n", - " \"\"\"Write a knock-knock joke on the theme of {theme}.\"\"\"\n", - " raise NotImplementedError\n", + " \"\"\"Write a knock-knock joke on the theme of {theme}. Do not use any tools.\"\"\"\n", + " raise NotHandled\n", "\n", "\n", "@Template.define\n", "def rate_joke(joke: KnockKnockJoke) -> bool:\n", - " \"\"\"Decide if {joke} is funny or not\"\"\"\n", - " raise NotImplementedError\n", + " \"\"\"Decide if {joke} is funny or not. Do not use any tools.\"\"\"\n", + " raise NotHandled\n", "\n", "\n", "def do_comedy():\n", @@ -388,7 +397,7 @@ }, { "cell_type": "code", - "execution_count": 9, + "execution_count": 13, "id": "cbf495a2", "metadata": {}, "outputs": [ @@ -396,8 +405,8 @@ "name": "stdout", "output_type": "stream", "text": [ - "Request fired: () {'messages': [{'type': 'message', 'content': [{'type': 'text', 'text': 'Write a haiku on the theme of fish2.'}], 'role': 'user'}], 'response_format': None, 'tools': []} ModelResponse(id='chatcmpl-CkjWzRIrVVqCuOSeRSdCW1nYZ2SG7', created=1765254145, model='gpt-4o-2024-08-06', object='chat.completion', system_fingerprint='fp_83554c687e', choices=[Choices(finish_reason='stop', index=0, message=Message(content='Gently in the stream, \\nSilver scales in dappled light, \\nSilent swirls below. ', role='assistant', tool_calls=None, function_call=None, provider_specific_fields={'refusal': None}, annotations=[]), provider_specific_fields={})], usage=Usage(completion_tokens=23, prompt_tokens=34, total_tokens=57, completion_tokens_details=CompletionTokensDetailsWrapper(accepted_prediction_tokens=0, audio_tokens=0, reasoning_tokens=0, rejected_prediction_tokens=0, text_tokens=None, image_tokens=None), prompt_tokens_details=PromptTokensDetailsWrapper(audio_tokens=0, cached_tokens=0, text_tokens=None, image_tokens=None)), service_tier='default')\n", - "Request fired: () {'messages': [{'type': 'message', 'content': [{'type': 'text', 'text': 'Write a limerick on the theme of fish.'}], 'role': 'user'}], 'response_format': None, 'tools': []} ModelResponse(id='chatcmpl-CkjX0o5CHnG7qL9LJT0PvvofD2OzU', created=1765254146, model='gpt-4o-2024-08-06', object='chat.completion', system_fingerprint='fp_83554c687e', choices=[Choices(finish_reason='stop', index=0, message=Message(content='In the sea where the swift currents swish, \\nLived a cod with an unyielding wish. \\nHe dreamt of the sky, \\nWhere seagulls would fly, \\nBut alas, he remained just a fish. ', role='assistant', tool_calls=None, function_call=None, provider_specific_fields={'refusal': None}, annotations=[]), provider_specific_fields={})], usage=Usage(completion_tokens=50, prompt_tokens=34, total_tokens=84, completion_tokens_details=CompletionTokensDetailsWrapper(accepted_prediction_tokens=0, audio_tokens=0, reasoning_tokens=0, rejected_prediction_tokens=0, text_tokens=None, image_tokens=None), prompt_tokens_details=PromptTokensDetailsWrapper(audio_tokens=0, cached_tokens=0, text_tokens=None, image_tokens=None)), service_tier='default')\n" + "Request fired: () {'messages': [{'type': 'message', 'content': [{'type': 'text', 'text': 'Write a haiku on the theme of fish2. Do not use any tools.'}], 'role': 'user'}], 'response_format': None, 'tools': [{'type': 'function', 'function': {'name': 'limerick', 'description': 'Write a limerick on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'haiku_no_cache', 'description': 'Write a haiku on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'primes', 'description': 'Give a prime number with {first_digit} as the first digit. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'first_digit': {'title': 'First Digit', 'type': 'integer'}}, 'required': ['first_digit'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'count_char', 'description': \"Write a function which takes a string and counts the occurrances of '{char}'. Do not use any tools.\", 'parameters': {'additionalProperties': False, 'properties': {'char': {'title': 'Char', 'type': 'string'}}, 'required': ['char'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'cities', 'description': '', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}, {'type': 'function', 'function': {'name': 'weather', 'description': '', 'parameters': {'additionalProperties': False, 'properties': {'city': {'title': 'City', 'type': 'string'}}, 'required': ['city'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'vacation', 'description': 'Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}, {'type': 'function', 'function': {'name': 'write_joke', 'description': 'Write a knock-knock joke on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'rate_joke', 'description': 'Decide if {joke} is funny or not. Do not use any tools.', 'parameters': {'$defs': {'KnockKnockJoke': {'properties': {'whos_there': {'title': 'Whos There', 'type': 'string'}, 'punchline': {'title': 'Punchline', 'type': 'string'}}, 'required': ['whos_there', 'punchline'], 'title': 'KnockKnockJoke', 'type': 'object', 'additionalProperties': False}}, 'additionalProperties': False, 'properties': {'joke': {'$ref': '#/$defs/KnockKnockJoke'}}, 'required': ['joke'], 'title': 'Params', 'type': 'object'}, 'strict': True}}]} ModelResponse(id='chatcmpl-CnANzyeB958opw15SxIJ5GLG5eCI8', created=1765834031, model='gpt-4o-2024-08-06', object='chat.completion', system_fingerprint='fp_83554c687e', choices=[Choices(finish_reason='stop', index=0, message=Message(content='In the gentle stream, \\nSilver scales shimmer and dance, \\nQuietly they glide. ', role='assistant', tool_calls=None, function_call=None, provider_specific_fields={'refusal': None}, annotations=[]), provider_specific_fields={})], usage=Usage(completion_tokens=20, prompt_tokens=364, total_tokens=384, completion_tokens_details=CompletionTokensDetailsWrapper(accepted_prediction_tokens=0, audio_tokens=0, reasoning_tokens=0, rejected_prediction_tokens=0, text_tokens=None, image_tokens=None), prompt_tokens_details=PromptTokensDetailsWrapper(audio_tokens=0, cached_tokens=0, text_tokens=None, image_tokens=None)), service_tier='default')\n", + "Request fired: () {'messages': [{'type': 'message', 'content': [{'type': 'text', 'text': 'Write a limerick on the theme of fish. Do not use any tools.'}], 'role': 'user'}], 'response_format': None, 'tools': [{'type': 'function', 'function': {'name': 'limerick', 'description': 'Write a limerick on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'haiku_no_cache', 'description': 'Write a haiku on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'primes', 'description': 'Give a prime number with {first_digit} as the first digit. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'first_digit': {'title': 'First Digit', 'type': 'integer'}}, 'required': ['first_digit'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'count_char', 'description': \"Write a function which takes a string and counts the occurrances of '{char}'. Do not use any tools.\", 'parameters': {'additionalProperties': False, 'properties': {'char': {'title': 'Char', 'type': 'string'}}, 'required': ['char'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'cities', 'description': '', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}, {'type': 'function', 'function': {'name': 'weather', 'description': '', 'parameters': {'additionalProperties': False, 'properties': {'city': {'title': 'City', 'type': 'string'}}, 'required': ['city'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'vacation', 'description': 'Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}, {'type': 'function', 'function': {'name': 'write_joke', 'description': 'Write a knock-knock joke on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'rate_joke', 'description': 'Decide if {joke} is funny or not. Do not use any tools.', 'parameters': {'$defs': {'KnockKnockJoke': {'properties': {'whos_there': {'title': 'Whos There', 'type': 'string'}, 'punchline': {'title': 'Punchline', 'type': 'string'}}, 'required': ['whos_there', 'punchline'], 'title': 'KnockKnockJoke', 'type': 'object', 'additionalProperties': False}}, 'additionalProperties': False, 'properties': {'joke': {'$ref': '#/$defs/KnockKnockJoke'}}, 'required': ['joke'], 'title': 'Params', 'type': 'object'}, 'strict': True}}]} ModelResponse(id='chatcmpl-CnAO05Uemhl4BA8dIUcQyqoKIyOvk', created=1765834032, model='gpt-4o-2024-08-06', object='chat.completion', system_fingerprint='fp_83554c687e', choices=[Choices(finish_reason='stop', index=0, message=Message(content='In the sea where the waves gently swish, \\nLived a fish with a hopeful wish. \\nHe dreamed of the skies, \\nTo soar and to rise, \\nBut alas, he remained just a fish. ', role='assistant', tool_calls=None, function_call=None, provider_specific_fields={'refusal': None}, annotations=[]), provider_specific_fields={})], usage=Usage(completion_tokens=46, prompt_tokens=364, total_tokens=410, completion_tokens_details=CompletionTokensDetailsWrapper(accepted_prediction_tokens=0, audio_tokens=0, reasoning_tokens=0, rejected_prediction_tokens=0, text_tokens=None, image_tokens=None), prompt_tokens_details=PromptTokensDetailsWrapper(audio_tokens=0, cached_tokens=0, text_tokens=None, image_tokens=None)), service_tier='default')\n" ] } ], @@ -431,7 +440,7 @@ }, { "cell_type": "code", - "execution_count": 10, + "execution_count": 14, "id": "81a15f00", "metadata": {}, "outputs": [ @@ -439,8 +448,11 @@ "name": "stdout", "output_type": "stream", "text": [ - "INFO {'args': (), 'kwargs': {'messages': [{'type': 'message', 'content': [{'type': 'text', 'text': 'Write a haiku on the theme of fish3.'}], 'role': 'user'}], 'response_format': None, 'tools': []}, 'response': ModelResponse(id='chatcmpl-CkjX1vTtAKxu7ldqTHNLf3Q5HJtEa', created=1765254147, model='gpt-4o-2024-08-06', object='chat.completion', system_fingerprint='fp_83554c687e', choices=[Choices(finish_reason='stop', index=0, message=Message(content='Beneath ripples glide, \\nWhispers of scales in moonlight, \\nSilent depths, fish dart. ', role='assistant', tool_calls=None, function_call=None, provider_specific_fields={'refusal': None}, annotations=[]), provider_specific_fields={})], usage=Usage(completion_tokens=25, prompt_tokens=34, total_tokens=59, completion_tokens_details=CompletionTokensDetailsWrapper(accepted_prediction_tokens=0, audio_tokens=0, reasoning_tokens=0, rejected_prediction_tokens=0, text_tokens=None, image_tokens=None), prompt_tokens_details=PromptTokensDetailsWrapper(audio_tokens=0, cached_tokens=0, text_tokens=None, image_tokens=None)), service_tier='default')}\n", - "INFO {'args': (), 'kwargs': {'messages': [{'type': 'message', 'content': [{'type': 'text', 'text': 'Write a limerick on the theme of fish4.'}], 'role': 'user'}], 'response_format': None, 'tools': []}, 'response': ModelResponse(id='chatcmpl-CkjX2ED38u6C82SrNwcgpoBJ6rLtL', created=1765254148, model='gpt-4o-2024-08-06', object='chat.completion', system_fingerprint='fp_83554c687e', choices=[Choices(finish_reason='stop', index=0, message=Message(content='In the ocean so deep and so swish, \\nSwam a cod with a dream-like wish. \\nIt leaped with a flop, \\nOver waves it would hop, \\nSaying, \"One day I\\'ll fly—oh, what bliss!\" ', role='assistant', tool_calls=None, function_call=None, provider_specific_fields={'refusal': None}, annotations=[]), provider_specific_fields={})], usage=Usage(completion_tokens=53, prompt_tokens=35, total_tokens=88, completion_tokens_details=CompletionTokensDetailsWrapper(accepted_prediction_tokens=0, audio_tokens=0, reasoning_tokens=0, rejected_prediction_tokens=0, text_tokens=None, image_tokens=None), prompt_tokens_details=PromptTokensDetailsWrapper(audio_tokens=0, cached_tokens=0, text_tokens=None, image_tokens=None)), service_tier='default')}\n" + "INFO {'args': (), 'kwargs': {'messages': [{'type': 'message', 'content': [{'type': 'text', 'text': 'Write a haiku on the theme of fish3. Do not use any tools.'}], 'role': 'user'}], 'response_format': None, 'tools': [{'type': 'function', 'function': {'name': 'limerick', 'description': 'Write a limerick on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'haiku_no_cache', 'description': 'Write a haiku on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'primes', 'description': 'Give a prime number with {first_digit} as the first digit. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'first_digit': {'title': 'First Digit', 'type': 'integer'}}, 'required': ['first_digit'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'count_char', 'description': \"Write a function which takes a string and counts the occurrances of '{char}'. Do not use any tools.\", 'parameters': {'additionalProperties': False, 'properties': {'char': {'title': 'Char', 'type': 'string'}}, 'required': ['char'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'cities', 'description': '', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}, {'type': 'function', 'function': {'name': 'weather', 'description': '', 'parameters': {'additionalProperties': False, 'properties': {'city': {'title': 'City', 'type': 'string'}}, 'required': ['city'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'vacation', 'description': 'Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}, {'type': 'function', 'function': {'name': 'write_joke', 'description': 'Write a knock-knock joke on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'rate_joke', 'description': 'Decide if {joke} is funny or not. Do not use any tools.', 'parameters': {'$defs': {'KnockKnockJoke': {'properties': {'whos_there': {'title': 'Whos There', 'type': 'string'}, 'punchline': {'title': 'Punchline', 'type': 'string'}}, 'required': ['whos_there', 'punchline'], 'title': 'KnockKnockJoke', 'type': 'object', 'additionalProperties': False}}, 'additionalProperties': False, 'properties': {'joke': {'$ref': '#/$defs/KnockKnockJoke'}}, 'required': ['joke'], 'title': 'Params', 'type': 'object'}, 'strict': True}}]}, 'response': ModelResponse(id='chatcmpl-CnAOCov9x47s8Jj0K2oGyrB21h9dM', created=1765834044, model='gpt-4o-2024-08-06', object='chat.completion', system_fingerprint='fp_83554c687e', choices=[Choices(finish_reason='tool_calls', index=0, message=Message(content=None, role='assistant', tool_calls=[ChatCompletionMessageToolCall(function=Function(arguments='{\"theme\":\"fish\"}', name='haiku_no_cache'), id='call_8gQN3B78H2aZzIdOZPhEqPqy', type='function')], function_call=None, provider_specific_fields={'refusal': None}, annotations=[]), provider_specific_fields={})], usage=Usage(completion_tokens=17, prompt_tokens=364, total_tokens=381, completion_tokens_details=CompletionTokensDetailsWrapper(accepted_prediction_tokens=0, audio_tokens=0, reasoning_tokens=0, rejected_prediction_tokens=0, text_tokens=None, image_tokens=None), prompt_tokens_details=PromptTokensDetailsWrapper(audio_tokens=0, cached_tokens=0, text_tokens=None, image_tokens=None)), service_tier='default')}\n", + "INFO {'args': (), 'kwargs': {'messages': [{'type': 'message', 'content': [{'type': 'text', 'text': 'Write a haiku on the theme of fish. Do not use any tools.'}], 'role': 'user'}], 'response_format': None, 'tools': [{'type': 'function', 'function': {'name': 'limerick', 'description': 'Write a limerick on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'haiku_no_cache', 'description': 'Write a haiku on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'primes', 'description': 'Give a prime number with {first_digit} as the first digit. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'first_digit': {'title': 'First Digit', 'type': 'integer'}}, 'required': ['first_digit'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'count_char', 'description': \"Write a function which takes a string and counts the occurrances of '{char}'. Do not use any tools.\", 'parameters': {'additionalProperties': False, 'properties': {'char': {'title': 'Char', 'type': 'string'}}, 'required': ['char'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'cities', 'description': '', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}, {'type': 'function', 'function': {'name': 'weather', 'description': '', 'parameters': {'additionalProperties': False, 'properties': {'city': {'title': 'City', 'type': 'string'}}, 'required': ['city'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'vacation', 'description': 'Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}, {'type': 'function', 'function': {'name': 'write_joke', 'description': 'Write a knock-knock joke on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'rate_joke', 'description': 'Decide if {joke} is funny or not. Do not use any tools.', 'parameters': {'$defs': {'KnockKnockJoke': {'properties': {'whos_there': {'title': 'Whos There', 'type': 'string'}, 'punchline': {'title': 'Punchline', 'type': 'string'}}, 'required': ['whos_there', 'punchline'], 'title': 'KnockKnockJoke', 'type': 'object', 'additionalProperties': False}}, 'additionalProperties': False, 'properties': {'joke': {'$ref': '#/$defs/KnockKnockJoke'}}, 'required': ['joke'], 'title': 'Params', 'type': 'object'}, 'strict': True}}]}, 'response': ModelResponse(id='chatcmpl-CnAOD4UrpELPotqSt3s76CnJGu6FB', created=1765834045, model='gpt-4o-2024-08-06', object='chat.completion', system_fingerprint='fp_83554c687e', choices=[Choices(finish_reason='stop', index=0, message=Message(content='In the quiet stream, \\nSilver scales shimmer with grace, \\nFish dance in moonlight.', role='assistant', tool_calls=None, function_call=None, provider_specific_fields={'refusal': None}, annotations=[]), provider_specific_fields={})], usage=Usage(completion_tokens=20, prompt_tokens=363, total_tokens=383, completion_tokens_details=CompletionTokensDetailsWrapper(accepted_prediction_tokens=0, audio_tokens=0, reasoning_tokens=0, rejected_prediction_tokens=0, text_tokens=None, image_tokens=None), prompt_tokens_details=PromptTokensDetailsWrapper(audio_tokens=0, cached_tokens=0, text_tokens=None, image_tokens=None)), service_tier='default')}\n", + "INFO {'tool': 'haiku_no_cache', 'args': (), 'kwargs': {'theme': 'fish'}}\n", + "INFO {'args': (), 'kwargs': {'messages': [{'type': 'message', 'content': [{'type': 'text', 'text': 'Write a haiku on the theme of fish3. Do not use any tools.'}], 'role': 'user'}, {'content': None, 'role': 'assistant', 'tool_calls': [{'function': {'arguments': '{\"theme\":\"fish\"}', 'name': 'haiku_no_cache'}, 'id': 'call_8gQN3B78H2aZzIdOZPhEqPqy', 'type': 'function'}], 'function_call': None, 'provider_specific_fields': {'refusal': None}, 'annotations': []}, {'role': 'tool', 'tool_call_id': 'call_8gQN3B78H2aZzIdOZPhEqPqy', 'name': 'haiku_no_cache', 'content': [{'type': 'text', 'text': 'In the quiet stream, \\nSilver scales shimmer with grace, \\nFish dance in moonlight.'}]}], 'response_format': None, 'tools': [{'type': 'function', 'function': {'name': 'limerick', 'description': 'Write a limerick on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'haiku_no_cache', 'description': 'Write a haiku on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'primes', 'description': 'Give a prime number with {first_digit} as the first digit. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'first_digit': {'title': 'First Digit', 'type': 'integer'}}, 'required': ['first_digit'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'count_char', 'description': \"Write a function which takes a string and counts the occurrances of '{char}'. Do not use any tools.\", 'parameters': {'additionalProperties': False, 'properties': {'char': {'title': 'Char', 'type': 'string'}}, 'required': ['char'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'cities', 'description': '', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}, {'type': 'function', 'function': {'name': 'weather', 'description': '', 'parameters': {'additionalProperties': False, 'properties': {'city': {'title': 'City', 'type': 'string'}}, 'required': ['city'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'vacation', 'description': 'Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}, {'type': 'function', 'function': {'name': 'write_joke', 'description': 'Write a knock-knock joke on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'rate_joke', 'description': 'Decide if {joke} is funny or not. Do not use any tools.', 'parameters': {'$defs': {'KnockKnockJoke': {'properties': {'whos_there': {'title': 'Whos There', 'type': 'string'}, 'punchline': {'title': 'Punchline', 'type': 'string'}}, 'required': ['whos_there', 'punchline'], 'title': 'KnockKnockJoke', 'type': 'object', 'additionalProperties': False}}, 'additionalProperties': False, 'properties': {'joke': {'$ref': '#/$defs/KnockKnockJoke'}}, 'required': ['joke'], 'title': 'Params', 'type': 'object'}, 'strict': True}}]}, 'response': ModelResponse(id='chatcmpl-CnAOEXNCJJEDIOBwwR8PaKYXoqOCs', created=1765834046, model='gpt-4o-2024-08-06', object='chat.completion', system_fingerprint='fp_83554c687e', choices=[Choices(finish_reason='stop', index=0, message=Message(content='In the quiet stream, \\nSilver scales shimmer with grace, \\nFish dance in moonlight.', role='assistant', tool_calls=None, function_call=None, provider_specific_fields={'refusal': None}, annotations=[]), provider_specific_fields={})], usage=Usage(completion_tokens=20, prompt_tokens=410, total_tokens=430, completion_tokens_details=CompletionTokensDetailsWrapper(accepted_prediction_tokens=0, audio_tokens=0, reasoning_tokens=0, rejected_prediction_tokens=0, text_tokens=None, image_tokens=None), prompt_tokens_details=PromptTokensDetailsWrapper(audio_tokens=0, cached_tokens=0, text_tokens=None, image_tokens=None)), service_tier='default')}\n", + "INFO {'args': (), 'kwargs': {'messages': [{'type': 'message', 'content': [{'type': 'text', 'text': 'Write a limerick on the theme of fish4. Do not use any tools.'}], 'role': 'user'}], 'response_format': None, 'tools': [{'type': 'function', 'function': {'name': 'limerick', 'description': 'Write a limerick on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'haiku_no_cache', 'description': 'Write a haiku on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'primes', 'description': 'Give a prime number with {first_digit} as the first digit. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'first_digit': {'title': 'First Digit', 'type': 'integer'}}, 'required': ['first_digit'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'count_char', 'description': \"Write a function which takes a string and counts the occurrances of '{char}'. Do not use any tools.\", 'parameters': {'additionalProperties': False, 'properties': {'char': {'title': 'Char', 'type': 'string'}}, 'required': ['char'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'cities', 'description': '', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}, {'type': 'function', 'function': {'name': 'weather', 'description': '', 'parameters': {'additionalProperties': False, 'properties': {'city': {'title': 'City', 'type': 'string'}}, 'required': ['city'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'vacation', 'description': 'Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}, {'type': 'function', 'function': {'name': 'write_joke', 'description': 'Write a knock-knock joke on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'rate_joke', 'description': 'Decide if {joke} is funny or not. Do not use any tools.', 'parameters': {'$defs': {'KnockKnockJoke': {'properties': {'whos_there': {'title': 'Whos There', 'type': 'string'}, 'punchline': {'title': 'Punchline', 'type': 'string'}}, 'required': ['whos_there', 'punchline'], 'title': 'KnockKnockJoke', 'type': 'object', 'additionalProperties': False}}, 'additionalProperties': False, 'properties': {'joke': {'$ref': '#/$defs/KnockKnockJoke'}}, 'required': ['joke'], 'title': 'Params', 'type': 'object'}, 'strict': True}}]}, 'response': ModelResponse(id='chatcmpl-CnAOEfHrN8jTkm6tIJ7EzgXP9bo2d', created=1765834046, model='gpt-4o-2024-08-06', object='chat.completion', system_fingerprint='fp_83554c687e', choices=[Choices(finish_reason='stop', index=0, message=Message(content='In the ocean where fishies do play, \\nA big whale came swimming one day. \\nWith a splash and a dive, \\nHe felt so alive, \\nChasing fish in the blue, gleaming bay.', role='assistant', tool_calls=None, function_call=None, provider_specific_fields={'refusal': None}, annotations=[]), provider_specific_fields={})], usage=Usage(completion_tokens=45, prompt_tokens=365, total_tokens=410, completion_tokens_details=CompletionTokensDetailsWrapper(accepted_prediction_tokens=0, audio_tokens=0, reasoning_tokens=0, rejected_prediction_tokens=0, text_tokens=None, image_tokens=None), prompt_tokens_details=PromptTokensDetailsWrapper(audio_tokens=0, cached_tokens=0, text_tokens=None, image_tokens=None)), service_tier='default')}\n" ] } ], @@ -466,6 +478,78 @@ " _ = limerick(\"fish4\")" ] }, + { + "cell_type": "markdown", + "id": "c0003944", + "metadata": {}, + "source": [ + "## Template Composition\n", + "\n", + "Templates defined in the lexical scope are also captured, enabling template composition. One template can use the result of another template in a pipeline:\n" + ] + }, + { + "cell_type": "code", + "execution_count": 15, + "id": "78a4bf44", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Sub-templates available to write_story: [Template(__prompt_template__='Write a limerick on the theme of {theme}. Do not use any tools.', __signature__= str>, __context__=LexicalContext(mappingproxy({'__name__': '__main__', '__doc__': 'Automatically created module for IPython interactive environment', '__package__': None, '__loader__': None, '__spec__': None, '__builtin__': , '__builtins__': , '_ih': ['', 'import dataclasses\\nimport functools\\nimport inspect\\nimport logging\\nimport sys\\nfrom collections.abc import Callable\\n\\nfrom effectful.handlers.llm import Template\\nfrom effectful.handlers.llm.providers import (\\n CacheLLMRequestHandler,\\n LiteLLMProvider,\\n LLMLoggingHandler,\\n RetryLLMHandler,\\n completion,\\n tool_call,\\n)\\nfrom effectful.handlers.llm.synthesis import ProgramSynthesis\\nfrom effectful.ops.semantics import NotHandled, fwd, handler\\nfrom effectful.ops.syntax import defop\\n\\nprovider = LiteLLMProvider()', '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}.\"\"\"\\n raise NotHandled', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '@functools.cache\\n@Template.define\\ndef haiku(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef haiku_no_cache(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nprint()\\nwith handler(provider):\\n print(haiku(\"fish\"))\\n print(\"-\" * 40)\\n print(haiku(\"fish\"))\\n\\nprint()\\ncache_handler1 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler1):\\n print(haiku_no_cache(\"fish2\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish2\"))\\n\\nprint()\\ncache_handler2 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler2):\\n print(haiku_no_cache(\"fish3\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish3\"))', '@Template.define\\ndef primes(first_digit: int) -> int:\\n \"\"\"Give a prime number with {first_digit} as the first digit. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider):\\n assert type(primes(6)) is int', '@Template.define\\ndef count_char(char: str) -> Callable[[str], int]:\\n \"\"\"Write a function which takes a string and counts the occurrances of \\'{char}\\'. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider), handler(ProgramSynthesis()):\\n count_a = count_char(\"a\")\\n assert callable(count_a)\\n assert count_a(\"banana\") == 3\\n assert count_a(\"cherry\") == 0\\n # Print the source code of the generated function\\n print(inspect.getsource(count_a))', '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', '@dataclasses.dataclass\\nclass KnockKnockJoke:\\n whos_there: str\\n punchline: str\\n\\n\\n@Template.define\\ndef write_joke(theme: str) -> KnockKnockJoke:\\n \"\"\"Write a knock-knock joke on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef rate_joke(joke: KnockKnockJoke) -> bool:\\n \"\"\"Decide if {joke} is funny or not. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\ndef do_comedy():\\n joke = write_joke(\"lizards\")\\n print(\"> You are onstage at a comedy club. You tell the following joke:\")\\n print(\\n f\"Knock knock.\\\\nWho\\'s there?\\\\n{joke.whos_there}.\\\\n{joke.whos_there} who?\\\\n{joke.punchline}\"\\n )\\n if rate_joke(joke):\\n print(\"> The crowd laughs politely.\")\\n else:\\n print(\"> The crowd stares in stony silence.\")\\n\\n\\nwith handler(provider):\\n do_comedy()', 'def log_llm(*args, **kwargs):\\n result = fwd()\\n print(\"Request fired: \", args, kwargs, result)\\n return result\\n\\n\\n# Avoid cache\\ntry:\\n haiku.cache_clear()\\nexcept Exception:\\n pass\\n\\n# Put completion handler innermost so it has highest precedence during the call\\nwith handler(provider), handler({completion: log_llm}):\\n _ = haiku(\"fish2\")\\n _ = limerick(\"fish\") # or use haiku(\"fish-2\") to avoid cache', '# 1. Create a logger\\nlogger = logging.getLogger(\"effectful.llm\")\\nlogger.setLevel(logging.INFO)\\nlog_handler = logging.StreamHandler(sys.stdout)\\nlog_handler.setFormatter(logging.Formatter(\"%(levelname)s %(payload)s\"))\\nlogger.addHandler(log_handler)\\n# 2. Pass it to the handler\\nllm_logger = LLMLoggingHandler(logger=logger) # can also be LLMLoggingHandler()\\n\\n# Avoid cache for demonstration\\ntry:\\n haiku.cache_clear()\\n limerick.cache_clear()\\nexcept Exception:\\n pass\\n\\nwith handler(provider), handler(llm_logger):\\n _ = haiku(\"fish3\")\\n _ = limerick(\"fish4\")', '# Sub-templates for different story styles\\n@Template.define\\ndef story_with_moral(topic: str) -> str:\\n \"\"\"Write a short story about {topic} and end with a moral lesson. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef story_funny(topic: str) -> str:\\n \"\"\"Write a funny, humorous story about {topic}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n# Main orchestrator template - has access to sub-templates\\n@Template.define\\ndef write_story(topic: str, style: str) -> str:\\n \"\"\"Write a story about {topic} in the style: {style}.\\n Available styles: \\'moral\\' for a story with a lesson, \\'funny\\' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\"\"\"\\n raise NotHandled\\n\\n\\n# Verify sub-templates are captured in write_story\\'s lexical context\\nassert story_with_moral in write_story.tools\\nassert story_funny in write_story.tools\\nprint(\"Sub-templates available to write_story:\", list(write_story.tools))\\n\\nwith handler(provider), handler(llm_logger):\\n print(\"=== Story with moral ===\")\\n print(write_story(\"a curious cat\", \"moral\"))\\n print()\\n print(\"=== Funny story ===\")\\n print(write_story(\"a curious cat\", \"funny\"))'], '_oh': {}, '_dh': [PosixPath('/Users/datnguyenthanh/Marc/effectful')], 'In': ['', 'import dataclasses\\nimport functools\\nimport inspect\\nimport logging\\nimport sys\\nfrom collections.abc import Callable\\n\\nfrom effectful.handlers.llm import Template\\nfrom effectful.handlers.llm.providers import (\\n CacheLLMRequestHandler,\\n LiteLLMProvider,\\n LLMLoggingHandler,\\n RetryLLMHandler,\\n completion,\\n tool_call,\\n)\\nfrom effectful.handlers.llm.synthesis import ProgramSynthesis\\nfrom effectful.ops.semantics import NotHandled, fwd, handler\\nfrom effectful.ops.syntax import defop\\n\\nprovider = LiteLLMProvider()', '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}.\"\"\"\\n raise NotHandled', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '@functools.cache\\n@Template.define\\ndef haiku(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef haiku_no_cache(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nprint()\\nwith handler(provider):\\n print(haiku(\"fish\"))\\n print(\"-\" * 40)\\n print(haiku(\"fish\"))\\n\\nprint()\\ncache_handler1 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler1):\\n print(haiku_no_cache(\"fish2\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish2\"))\\n\\nprint()\\ncache_handler2 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler2):\\n print(haiku_no_cache(\"fish3\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish3\"))', '@Template.define\\ndef primes(first_digit: int) -> int:\\n \"\"\"Give a prime number with {first_digit} as the first digit. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider):\\n assert type(primes(6)) is int', '@Template.define\\ndef count_char(char: str) -> Callable[[str], int]:\\n \"\"\"Write a function which takes a string and counts the occurrances of \\'{char}\\'. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider), handler(ProgramSynthesis()):\\n count_a = count_char(\"a\")\\n assert callable(count_a)\\n assert count_a(\"banana\") == 3\\n assert count_a(\"cherry\") == 0\\n # Print the source code of the generated function\\n print(inspect.getsource(count_a))', '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', '@dataclasses.dataclass\\nclass KnockKnockJoke:\\n whos_there: str\\n punchline: str\\n\\n\\n@Template.define\\ndef write_joke(theme: str) -> KnockKnockJoke:\\n \"\"\"Write a knock-knock joke on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef rate_joke(joke: KnockKnockJoke) -> bool:\\n \"\"\"Decide if {joke} is funny or not. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\ndef do_comedy():\\n joke = write_joke(\"lizards\")\\n print(\"> You are onstage at a comedy club. You tell the following joke:\")\\n print(\\n f\"Knock knock.\\\\nWho\\'s there?\\\\n{joke.whos_there}.\\\\n{joke.whos_there} who?\\\\n{joke.punchline}\"\\n )\\n if rate_joke(joke):\\n print(\"> The crowd laughs politely.\")\\n else:\\n print(\"> The crowd stares in stony silence.\")\\n\\n\\nwith handler(provider):\\n do_comedy()', 'def log_llm(*args, **kwargs):\\n result = fwd()\\n print(\"Request fired: \", args, kwargs, result)\\n return result\\n\\n\\n# Avoid cache\\ntry:\\n haiku.cache_clear()\\nexcept Exception:\\n pass\\n\\n# Put completion handler innermost so it has highest precedence during the call\\nwith handler(provider), handler({completion: log_llm}):\\n _ = haiku(\"fish2\")\\n _ = limerick(\"fish\") # or use haiku(\"fish-2\") to avoid cache', '# 1. Create a logger\\nlogger = logging.getLogger(\"effectful.llm\")\\nlogger.setLevel(logging.INFO)\\nlog_handler = logging.StreamHandler(sys.stdout)\\nlog_handler.setFormatter(logging.Formatter(\"%(levelname)s %(payload)s\"))\\nlogger.addHandler(log_handler)\\n# 2. Pass it to the handler\\nllm_logger = LLMLoggingHandler(logger=logger) # can also be LLMLoggingHandler()\\n\\n# Avoid cache for demonstration\\ntry:\\n haiku.cache_clear()\\n limerick.cache_clear()\\nexcept Exception:\\n pass\\n\\nwith handler(provider), handler(llm_logger):\\n _ = haiku(\"fish3\")\\n _ = limerick(\"fish4\")', '# Sub-templates for different story styles\\n@Template.define\\ndef story_with_moral(topic: str) -> str:\\n \"\"\"Write a short story about {topic} and end with a moral lesson. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef story_funny(topic: str) -> str:\\n \"\"\"Write a funny, humorous story about {topic}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n# Main orchestrator template - has access to sub-templates\\n@Template.define\\ndef write_story(topic: str, style: str) -> str:\\n \"\"\"Write a story about {topic} in the style: {style}.\\n Available styles: \\'moral\\' for a story with a lesson, \\'funny\\' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\"\"\"\\n raise NotHandled\\n\\n\\n# Verify sub-templates are captured in write_story\\'s lexical context\\nassert story_with_moral in write_story.tools\\nassert story_funny in write_story.tools\\nprint(\"Sub-templates available to write_story:\", list(write_story.tools))\\n\\nwith handler(provider), handler(llm_logger):\\n print(\"=== Story with moral ===\")\\n print(write_story(\"a curious cat\", \"moral\"))\\n print()\\n print(\"=== Funny story ===\")\\n print(write_story(\"a curious cat\", \"funny\"))'], 'Out': {}, 'get_ipython': >, 'exit': , 'quit': , 'open': , '_': 'In the ocean where fishies do play, \\nA big whale came swimming one day. \\nWith a splash and a dive, \\nHe felt so alive, \\nChasing fish in the blue, gleaming bay.', '__': '', '___': '', '__vsc_ipynb_file__': '/Users/datnguyenthanh/Marc/effectful/docs/source/llm.ipynb', '_i': '# 1. Create a logger\\nlogger = logging.getLogger(\"effectful.llm\")\\nlogger.setLevel(logging.INFO)\\nlog_handler = logging.StreamHandler(sys.stdout)\\nlog_handler.setFormatter(logging.Formatter(\"%(levelname)s %(payload)s\"))\\nlogger.addHandler(log_handler)\\n# 2. Pass it to the handler\\nllm_logger = LLMLoggingHandler(logger=logger) # can also be LLMLoggingHandler()\\n\\n# Avoid cache for demonstration\\ntry:\\n haiku.cache_clear()\\n limerick.cache_clear()\\nexcept Exception:\\n pass\\n\\nwith handler(provider), handler(llm_logger):\\n _ = haiku(\"fish3\")\\n _ = limerick(\"fish4\")', '_ii': 'def log_llm(*args, **kwargs):\\n result = fwd()\\n print(\"Request fired: \", args, kwargs, result)\\n return result\\n\\n\\n# Avoid cache\\ntry:\\n haiku.cache_clear()\\nexcept Exception:\\n pass\\n\\n# Put completion handler innermost so it has highest precedence during the call\\nwith handler(provider), handler({completion: log_llm}):\\n _ = haiku(\"fish2\")\\n _ = limerick(\"fish\") # or use haiku(\"fish-2\") to avoid cache', '_iii': '@dataclasses.dataclass\\nclass KnockKnockJoke:\\n whos_there: str\\n punchline: str\\n\\n\\n@Template.define\\ndef write_joke(theme: str) -> KnockKnockJoke:\\n \"\"\"Write a knock-knock joke on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef rate_joke(joke: KnockKnockJoke) -> bool:\\n \"\"\"Decide if {joke} is funny or not. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\ndef do_comedy():\\n joke = write_joke(\"lizards\")\\n print(\"> You are onstage at a comedy club. You tell the following joke:\")\\n print(\\n f\"Knock knock.\\\\nWho\\'s there?\\\\n{joke.whos_there}.\\\\n{joke.whos_there} who?\\\\n{joke.punchline}\"\\n )\\n if rate_joke(joke):\\n print(\"> The crowd laughs politely.\")\\n else:\\n print(\"> The crowd stares in stony silence.\")\\n\\n\\nwith handler(provider):\\n do_comedy()', '_i1': 'import dataclasses\\nimport functools\\nimport inspect\\nimport logging\\nimport sys\\nfrom collections.abc import Callable\\n\\nfrom effectful.handlers.llm import Template\\nfrom effectful.handlers.llm.providers import (\\n CacheLLMRequestHandler,\\n LiteLLMProvider,\\n LLMLoggingHandler,\\n RetryLLMHandler,\\n completion,\\n tool_call,\\n)\\nfrom effectful.handlers.llm.synthesis import ProgramSynthesis\\nfrom effectful.ops.semantics import NotHandled, fwd, handler\\nfrom effectful.ops.syntax import defop\\n\\nprovider = LiteLLMProvider()', 'dataclasses': , 'functools': , 'inspect': , 'logging': , 'sys': , 'Callable': , 'Template': , 'CacheLLMRequestHandler': , 'LiteLLMProvider': , 'LLMLoggingHandler': , 'RetryLLMHandler': , 'completion': Operation(completion, (model: str, messages: List = [], timeout: Union[float, str, openai.Timeout, NoneType] = None, temperature: Optional[float] = None, top_p: Optional[float] = None, n: Optional[int] = None, stream: Optional[bool] = None, stream_options: Optional[dict] = None, stop=None, max_completion_tokens: Optional[int] = None, max_tokens: Optional[int] = None, modalities: Optional[List[Literal['text', 'audio']]] = None, prediction: Optional[openai.types.chat.chat_completion_prediction_content_param.ChatCompletionPredictionContentParam] = None, audio: Optional[openai.types.chat.chat_completion_audio_param.ChatCompletionAudioParam] = None, presence_penalty: Optional[float] = None, frequency_penalty: Optional[float] = None, logit_bias: Optional[dict] = None, user: Optional[str] = None, reasoning_effort: Optional[Literal['none', 'minimal', 'low', 'medium', 'high', 'default']] = None, verbosity: Optional[Literal['low', 'medium', 'high']] = None, response_format: Union[dict, Type[pydantic.main.BaseModel], NoneType] = None, seed: Optional[int] = None, tools: Optional[List] = None, tool_choice: Union[str, dict, NoneType] = None, logprobs: Optional[bool] = None, top_logprobs: Optional[int] = None, parallel_tool_calls: Optional[bool] = None, web_search_options: Optional[litellm.types.llms.openai.OpenAIWebSearchOptions] = None, deployment_id=None, extra_headers: Optional[dict] = None, safety_identifier: Optional[str] = None, service_tier: Optional[str] = None, functions: Optional[List] = None, function_call: Optional[str] = None, base_url: Optional[str] = None, api_version: Optional[str] = None, api_key: Optional[str] = None, model_list: Optional[list] = None, thinking: Optional[litellm.types.llms.anthropic.AnthropicThinkingParam] = None, shared_session: Optional[ForwardRef('ClientSession')] = None, **kwargs) -> Union[litellm.types.utils.ModelResponse, litellm.litellm_core_utils.streaming_handler.CustomStreamWrapper]), 'tool_call': Operation(tool_call, (template: effectful.handlers.llm.Template, tool: Union[effectful.ops.types.Operation[..., T], effectful.handlers.llm.Template[..., T]], *args, **kwargs) -> T), 'ProgramSynthesis': , 'NotHandled': , 'fwd': Operation(fwd, (*args, **kwargs) -> Any), 'handler': , 'defop': , 'provider': , '_i2': '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}.\"\"\"\\n raise NotHandled', 'limerick': ..., '_i3': 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '_i4': 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '_i5': '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled', '_i6': 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '_i7': '@functools.cache\\n@Template.define\\ndef haiku(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef haiku_no_cache(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nprint()\\nwith handler(provider):\\n print(haiku(\"fish\"))\\n print(\"-\" * 40)\\n print(haiku(\"fish\"))\\n\\nprint()\\ncache_handler1 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler1):\\n print(haiku_no_cache(\"fish2\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish2\"))\\n\\nprint()\\ncache_handler2 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler2):\\n print(haiku_no_cache(\"fish3\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish3\"))', 'haiku': , 'haiku_no_cache': Template(__prompt_template__='Write a haiku on the theme of {theme}. Do not use any tools.', __signature__= str>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='haiku_no_cache'), 'cache_handler1': , 'cache_handler2': , '_i8': '@Template.define\\ndef primes(first_digit: int) -> int:\\n \"\"\"Give a prime number with {first_digit} as the first digit. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider):\\n assert type(primes(6)) is int', 'primes': Template(__prompt_template__='Give a prime number with {first_digit} as the first digit. Do not use any tools.', __signature__= int>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='primes'), '_i9': '@Template.define\\ndef count_char(char: str) -> Callable[[str], int]:\\n \"\"\"Write a function which takes a string and counts the occurrances of \\'{char}\\'. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider), handler(ProgramSynthesis()):\\n count_a = count_char(\"a\")\\n assert callable(count_a)\\n assert count_a(\"banana\") == 3\\n assert count_a(\"cherry\") == 0\\n # Print the source code of the generated function\\n print(inspect.getsource(count_a))', 'count_char': Template(__prompt_template__=\"Write a function which takes a string and counts the occurrances of '{char}'. Do not use any tools.\", __signature__= collections.abc.Callable[[str], int]>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='count_char'), 'count_a': , '_i10': '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', 'cities': Operation(cities, () -> list[str]), 'weather': Operation(weather, (city: str) -> str), 'vacation': Template(__prompt_template__='Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.', __signature__= str>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='vacation'), 'log_tool_call': , '_i11': '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', '_i12': '@dataclasses.dataclass\\nclass KnockKnockJoke:\\n whos_there: str\\n punchline: str\\n\\n\\n@Template.define\\ndef write_joke(theme: str) -> KnockKnockJoke:\\n \"\"\"Write a knock-knock joke on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef rate_joke(joke: KnockKnockJoke) -> bool:\\n \"\"\"Decide if {joke} is funny or not. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\ndef do_comedy():\\n joke = write_joke(\"lizards\")\\n print(\"> You are onstage at a comedy club. You tell the following joke:\")\\n print(\\n f\"Knock knock.\\\\nWho\\'s there?\\\\n{joke.whos_there}.\\\\n{joke.whos_there} who?\\\\n{joke.punchline}\"\\n )\\n if rate_joke(joke):\\n print(\"> The crowd laughs politely.\")\\n else:\\n print(\"> The crowd stares in stony silence.\")\\n\\n\\nwith handler(provider):\\n do_comedy()', 'KnockKnockJoke': , 'write_joke': Template(__prompt_template__='Write a knock-knock joke on the theme of {theme}. Do not use any tools.', __signature__= __main__.KnockKnockJoke>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='write_joke'), 'rate_joke': Template(__prompt_template__='Decide if {joke} is funny or not. Do not use any tools.', __signature__= bool>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='rate_joke'), 'do_comedy': , '_i13': 'def log_llm(*args, **kwargs):\\n result = fwd()\\n print(\"Request fired: \", args, kwargs, result)\\n return result\\n\\n\\n# Avoid cache\\ntry:\\n haiku.cache_clear()\\nexcept Exception:\\n pass\\n\\n# Put completion handler innermost so it has highest precedence during the call\\nwith handler(provider), handler({completion: log_llm}):\\n _ = haiku(\"fish2\")\\n _ = limerick(\"fish\") # or use haiku(\"fish-2\") to avoid cache', 'log_llm': , '_i14': '# 1. Create a logger\\nlogger = logging.getLogger(\"effectful.llm\")\\nlogger.setLevel(logging.INFO)\\nlog_handler = logging.StreamHandler(sys.stdout)\\nlog_handler.setFormatter(logging.Formatter(\"%(levelname)s %(payload)s\"))\\nlogger.addHandler(log_handler)\\n# 2. Pass it to the handler\\nllm_logger = LLMLoggingHandler(logger=logger) # can also be LLMLoggingHandler()\\n\\n# Avoid cache for demonstration\\ntry:\\n haiku.cache_clear()\\n limerick.cache_clear()\\nexcept Exception:\\n pass\\n\\nwith handler(provider), handler(llm_logger):\\n _ = haiku(\"fish3\")\\n _ = limerick(\"fish4\")', 'logger': , 'log_handler': , 'llm_logger': , '_i15': '# Sub-templates for different story styles\\n@Template.define\\ndef story_with_moral(topic: str) -> str:\\n \"\"\"Write a short story about {topic} and end with a moral lesson. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef story_funny(topic: str) -> str:\\n \"\"\"Write a funny, humorous story about {topic}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n# Main orchestrator template - has access to sub-templates\\n@Template.define\\ndef write_story(topic: str, style: str) -> str:\\n \"\"\"Write a story about {topic} in the style: {style}.\\n Available styles: \\'moral\\' for a story with a lesson, \\'funny\\' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\"\"\"\\n raise NotHandled\\n\\n\\n# Verify sub-templates are captured in write_story\\'s lexical context\\nassert story_with_moral in write_story.tools\\nassert story_funny in write_story.tools\\nprint(\"Sub-templates available to write_story:\", list(write_story.tools))\\n\\nwith handler(provider), handler(llm_logger):\\n print(\"=== Story with moral ===\")\\n print(write_story(\"a curious cat\", \"moral\"))\\n print()\\n print(\"=== Funny story ===\")\\n print(write_story(\"a curious cat\", \"funny\"))', 'story_with_moral': Template(__prompt_template__='Write a short story about {topic} and end with a moral lesson. Do not use any tools.', __signature__= str>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='story_with_moral'), 'story_funny': Template(__prompt_template__='Write a funny, humorous story about {topic}. Do not use any tools.', __signature__= str>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='story_funny'), 'write_story': Template(__prompt_template__=\"Write a story about {topic} in the style: {style}.\\n Available styles: 'moral' for a story with a lesson, 'funny' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\", __signature__= str>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='write_story')}), mappingproxy({'__name__': '__main__', '__doc__': 'Automatically created module for IPython interactive environment', '__package__': None, '__loader__': None, '__spec__': None, '__builtin__': , '__builtins__': , '_ih': ['', 'import dataclasses\\nimport functools\\nimport inspect\\nimport logging\\nimport sys\\nfrom collections.abc import Callable\\n\\nfrom effectful.handlers.llm import Template\\nfrom effectful.handlers.llm.providers import (\\n CacheLLMRequestHandler,\\n LiteLLMProvider,\\n LLMLoggingHandler,\\n RetryLLMHandler,\\n completion,\\n tool_call,\\n)\\nfrom effectful.handlers.llm.synthesis import ProgramSynthesis\\nfrom effectful.ops.semantics import NotHandled, fwd, handler\\nfrom effectful.ops.syntax import defop\\n\\nprovider = LiteLLMProvider()', '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}.\"\"\"\\n raise NotHandled', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '@functools.cache\\n@Template.define\\ndef haiku(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef haiku_no_cache(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nprint()\\nwith handler(provider):\\n print(haiku(\"fish\"))\\n print(\"-\" * 40)\\n print(haiku(\"fish\"))\\n\\nprint()\\ncache_handler1 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler1):\\n print(haiku_no_cache(\"fish2\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish2\"))\\n\\nprint()\\ncache_handler2 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler2):\\n print(haiku_no_cache(\"fish3\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish3\"))', '@Template.define\\ndef primes(first_digit: int) -> int:\\n \"\"\"Give a prime number with {first_digit} as the first digit. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider):\\n assert type(primes(6)) is int', '@Template.define\\ndef count_char(char: str) -> Callable[[str], int]:\\n \"\"\"Write a function which takes a string and counts the occurrances of \\'{char}\\'. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider), handler(ProgramSynthesis()):\\n count_a = count_char(\"a\")\\n assert callable(count_a)\\n assert count_a(\"banana\") == 3\\n assert count_a(\"cherry\") == 0\\n # Print the source code of the generated function\\n print(inspect.getsource(count_a))', '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', '@dataclasses.dataclass\\nclass KnockKnockJoke:\\n whos_there: str\\n punchline: str\\n\\n\\n@Template.define\\ndef write_joke(theme: str) -> KnockKnockJoke:\\n \"\"\"Write a knock-knock joke on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef rate_joke(joke: KnockKnockJoke) -> bool:\\n \"\"\"Decide if {joke} is funny or not. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\ndef do_comedy():\\n joke = write_joke(\"lizards\")\\n print(\"> You are onstage at a comedy club. You tell the following joke:\")\\n print(\\n f\"Knock knock.\\\\nWho\\'s there?\\\\n{joke.whos_there}.\\\\n{joke.whos_there} who?\\\\n{joke.punchline}\"\\n )\\n if rate_joke(joke):\\n print(\"> The crowd laughs politely.\")\\n else:\\n print(\"> The crowd stares in stony silence.\")\\n\\n\\nwith handler(provider):\\n do_comedy()', 'def log_llm(*args, **kwargs):\\n result = fwd()\\n print(\"Request fired: \", args, kwargs, result)\\n return result\\n\\n\\n# Avoid cache\\ntry:\\n haiku.cache_clear()\\nexcept Exception:\\n pass\\n\\n# Put completion handler innermost so it has highest precedence during the call\\nwith handler(provider), handler({completion: log_llm}):\\n _ = haiku(\"fish2\")\\n _ = limerick(\"fish\") # or use haiku(\"fish-2\") to avoid cache', '# 1. Create a logger\\nlogger = logging.getLogger(\"effectful.llm\")\\nlogger.setLevel(logging.INFO)\\nlog_handler = logging.StreamHandler(sys.stdout)\\nlog_handler.setFormatter(logging.Formatter(\"%(levelname)s %(payload)s\"))\\nlogger.addHandler(log_handler)\\n# 2. Pass it to the handler\\nllm_logger = LLMLoggingHandler(logger=logger) # can also be LLMLoggingHandler()\\n\\n# Avoid cache for demonstration\\ntry:\\n haiku.cache_clear()\\n limerick.cache_clear()\\nexcept Exception:\\n pass\\n\\nwith handler(provider), handler(llm_logger):\\n _ = haiku(\"fish3\")\\n _ = limerick(\"fish4\")', '# Sub-templates for different story styles\\n@Template.define\\ndef story_with_moral(topic: str) -> str:\\n \"\"\"Write a short story about {topic} and end with a moral lesson. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef story_funny(topic: str) -> str:\\n \"\"\"Write a funny, humorous story about {topic}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n# Main orchestrator template - has access to sub-templates\\n@Template.define\\ndef write_story(topic: str, style: str) -> str:\\n \"\"\"Write a story about {topic} in the style: {style}.\\n Available styles: \\'moral\\' for a story with a lesson, \\'funny\\' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\"\"\"\\n raise NotHandled\\n\\n\\n# Verify sub-templates are captured in write_story\\'s lexical context\\nassert story_with_moral in write_story.tools\\nassert story_funny in write_story.tools\\nprint(\"Sub-templates available to write_story:\", list(write_story.tools))\\n\\nwith handler(provider), handler(llm_logger):\\n print(\"=== Story with moral ===\")\\n print(write_story(\"a curious cat\", \"moral\"))\\n print()\\n print(\"=== Funny story ===\")\\n print(write_story(\"a curious cat\", \"funny\"))'], '_oh': {}, '_dh': [PosixPath('/Users/datnguyenthanh/Marc/effectful')], 'In': ['', 'import dataclasses\\nimport functools\\nimport inspect\\nimport logging\\nimport sys\\nfrom collections.abc import Callable\\n\\nfrom effectful.handlers.llm import Template\\nfrom effectful.handlers.llm.providers import (\\n CacheLLMRequestHandler,\\n LiteLLMProvider,\\n LLMLoggingHandler,\\n RetryLLMHandler,\\n completion,\\n tool_call,\\n)\\nfrom effectful.handlers.llm.synthesis import ProgramSynthesis\\nfrom effectful.ops.semantics import NotHandled, fwd, handler\\nfrom effectful.ops.syntax import defop\\n\\nprovider = LiteLLMProvider()', '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}.\"\"\"\\n raise NotHandled', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '@functools.cache\\n@Template.define\\ndef haiku(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef haiku_no_cache(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nprint()\\nwith handler(provider):\\n print(haiku(\"fish\"))\\n print(\"-\" * 40)\\n print(haiku(\"fish\"))\\n\\nprint()\\ncache_handler1 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler1):\\n print(haiku_no_cache(\"fish2\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish2\"))\\n\\nprint()\\ncache_handler2 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler2):\\n print(haiku_no_cache(\"fish3\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish3\"))', '@Template.define\\ndef primes(first_digit: int) -> int:\\n \"\"\"Give a prime number with {first_digit} as the first digit. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider):\\n assert type(primes(6)) is int', '@Template.define\\ndef count_char(char: str) -> Callable[[str], int]:\\n \"\"\"Write a function which takes a string and counts the occurrances of \\'{char}\\'. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider), handler(ProgramSynthesis()):\\n count_a = count_char(\"a\")\\n assert callable(count_a)\\n assert count_a(\"banana\") == 3\\n assert count_a(\"cherry\") == 0\\n # Print the source code of the generated function\\n print(inspect.getsource(count_a))', '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', '@dataclasses.dataclass\\nclass KnockKnockJoke:\\n whos_there: str\\n punchline: str\\n\\n\\n@Template.define\\ndef write_joke(theme: str) -> KnockKnockJoke:\\n \"\"\"Write a knock-knock joke on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef rate_joke(joke: KnockKnockJoke) -> bool:\\n \"\"\"Decide if {joke} is funny or not. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\ndef do_comedy():\\n joke = write_joke(\"lizards\")\\n print(\"> You are onstage at a comedy club. You tell the following joke:\")\\n print(\\n f\"Knock knock.\\\\nWho\\'s there?\\\\n{joke.whos_there}.\\\\n{joke.whos_there} who?\\\\n{joke.punchline}\"\\n )\\n if rate_joke(joke):\\n print(\"> The crowd laughs politely.\")\\n else:\\n print(\"> The crowd stares in stony silence.\")\\n\\n\\nwith handler(provider):\\n do_comedy()', 'def log_llm(*args, **kwargs):\\n result = fwd()\\n print(\"Request fired: \", args, kwargs, result)\\n return result\\n\\n\\n# Avoid cache\\ntry:\\n haiku.cache_clear()\\nexcept Exception:\\n pass\\n\\n# Put completion handler innermost so it has highest precedence during the call\\nwith handler(provider), handler({completion: log_llm}):\\n _ = haiku(\"fish2\")\\n _ = limerick(\"fish\") # or use haiku(\"fish-2\") to avoid cache', '# 1. Create a logger\\nlogger = logging.getLogger(\"effectful.llm\")\\nlogger.setLevel(logging.INFO)\\nlog_handler = logging.StreamHandler(sys.stdout)\\nlog_handler.setFormatter(logging.Formatter(\"%(levelname)s %(payload)s\"))\\nlogger.addHandler(log_handler)\\n# 2. Pass it to the handler\\nllm_logger = LLMLoggingHandler(logger=logger) # can also be LLMLoggingHandler()\\n\\n# Avoid cache for demonstration\\ntry:\\n haiku.cache_clear()\\n limerick.cache_clear()\\nexcept Exception:\\n pass\\n\\nwith handler(provider), handler(llm_logger):\\n _ = haiku(\"fish3\")\\n _ = limerick(\"fish4\")', '# Sub-templates for different story styles\\n@Template.define\\ndef story_with_moral(topic: str) -> str:\\n \"\"\"Write a short story about {topic} and end with a moral lesson. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef story_funny(topic: str) -> str:\\n \"\"\"Write a funny, humorous story about {topic}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n# Main orchestrator template - has access to sub-templates\\n@Template.define\\ndef write_story(topic: str, style: str) -> str:\\n \"\"\"Write a story about {topic} in the style: {style}.\\n Available styles: \\'moral\\' for a story with a lesson, \\'funny\\' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\"\"\"\\n raise NotHandled\\n\\n\\n# Verify sub-templates are captured in write_story\\'s lexical context\\nassert story_with_moral in write_story.tools\\nassert story_funny in write_story.tools\\nprint(\"Sub-templates available to write_story:\", list(write_story.tools))\\n\\nwith handler(provider), handler(llm_logger):\\n print(\"=== Story with moral ===\")\\n print(write_story(\"a curious cat\", \"moral\"))\\n print()\\n print(\"=== Funny story ===\")\\n print(write_story(\"a curious cat\", \"funny\"))'], 'Out': {}, 'get_ipython': >, 'exit': , 'quit': , 'open': , '_': 'In the ocean where fishies do play, \\nA big whale came swimming one day. \\nWith a splash and a dive, \\nHe felt so alive, \\nChasing fish in the blue, gleaming bay.', '__': '', '___': '', '__vsc_ipynb_file__': '/Users/datnguyenthanh/Marc/effectful/docs/source/llm.ipynb', '_i': '# 1. Create a logger\\nlogger = logging.getLogger(\"effectful.llm\")\\nlogger.setLevel(logging.INFO)\\nlog_handler = logging.StreamHandler(sys.stdout)\\nlog_handler.setFormatter(logging.Formatter(\"%(levelname)s %(payload)s\"))\\nlogger.addHandler(log_handler)\\n# 2. Pass it to the handler\\nllm_logger = LLMLoggingHandler(logger=logger) # can also be LLMLoggingHandler()\\n\\n# Avoid cache for demonstration\\ntry:\\n haiku.cache_clear()\\n limerick.cache_clear()\\nexcept Exception:\\n pass\\n\\nwith handler(provider), handler(llm_logger):\\n _ = haiku(\"fish3\")\\n _ = limerick(\"fish4\")', '_ii': 'def log_llm(*args, **kwargs):\\n result = fwd()\\n print(\"Request fired: \", args, kwargs, result)\\n return result\\n\\n\\n# Avoid cache\\ntry:\\n haiku.cache_clear()\\nexcept Exception:\\n pass\\n\\n# Put completion handler innermost so it has highest precedence during the call\\nwith handler(provider), handler({completion: log_llm}):\\n _ = haiku(\"fish2\")\\n _ = limerick(\"fish\") # or use haiku(\"fish-2\") to avoid cache', '_iii': '@dataclasses.dataclass\\nclass KnockKnockJoke:\\n whos_there: str\\n punchline: str\\n\\n\\n@Template.define\\ndef write_joke(theme: str) -> KnockKnockJoke:\\n \"\"\"Write a knock-knock joke on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef rate_joke(joke: KnockKnockJoke) -> bool:\\n \"\"\"Decide if {joke} is funny or not. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\ndef do_comedy():\\n joke = write_joke(\"lizards\")\\n print(\"> You are onstage at a comedy club. You tell the following joke:\")\\n print(\\n f\"Knock knock.\\\\nWho\\'s there?\\\\n{joke.whos_there}.\\\\n{joke.whos_there} who?\\\\n{joke.punchline}\"\\n )\\n if rate_joke(joke):\\n print(\"> The crowd laughs politely.\")\\n else:\\n print(\"> The crowd stares in stony silence.\")\\n\\n\\nwith handler(provider):\\n do_comedy()', '_i1': 'import dataclasses\\nimport functools\\nimport inspect\\nimport logging\\nimport sys\\nfrom collections.abc import Callable\\n\\nfrom effectful.handlers.llm import Template\\nfrom effectful.handlers.llm.providers import (\\n CacheLLMRequestHandler,\\n LiteLLMProvider,\\n LLMLoggingHandler,\\n RetryLLMHandler,\\n completion,\\n tool_call,\\n)\\nfrom effectful.handlers.llm.synthesis import ProgramSynthesis\\nfrom effectful.ops.semantics import NotHandled, fwd, handler\\nfrom effectful.ops.syntax import defop\\n\\nprovider = LiteLLMProvider()', 'dataclasses': , 'functools': , 'inspect': , 'logging': , 'sys': , 'Callable': , 'Template': , 'CacheLLMRequestHandler': , 'LiteLLMProvider': , 'LLMLoggingHandler': , 'RetryLLMHandler': , 'completion': Operation(completion, (model: str, messages: List = [], timeout: Union[float, str, openai.Timeout, NoneType] = None, temperature: Optional[float] = None, top_p: Optional[float] = None, n: Optional[int] = None, stream: Optional[bool] = None, stream_options: Optional[dict] = None, stop=None, max_completion_tokens: Optional[int] = None, max_tokens: Optional[int] = None, modalities: Optional[List[Literal['text', 'audio']]] = None, prediction: Optional[openai.types.chat.chat_completion_prediction_content_param.ChatCompletionPredictionContentParam] = None, audio: Optional[openai.types.chat.chat_completion_audio_param.ChatCompletionAudioParam] = None, presence_penalty: Optional[float] = None, frequency_penalty: Optional[float] = None, logit_bias: Optional[dict] = None, user: Optional[str] = None, reasoning_effort: Optional[Literal['none', 'minimal', 'low', 'medium', 'high', 'default']] = None, verbosity: Optional[Literal['low', 'medium', 'high']] = None, response_format: Union[dict, Type[pydantic.main.BaseModel], NoneType] = None, seed: Optional[int] = None, tools: Optional[List] = None, tool_choice: Union[str, dict, NoneType] = None, logprobs: Optional[bool] = None, top_logprobs: Optional[int] = None, parallel_tool_calls: Optional[bool] = None, web_search_options: Optional[litellm.types.llms.openai.OpenAIWebSearchOptions] = None, deployment_id=None, extra_headers: Optional[dict] = None, safety_identifier: Optional[str] = None, service_tier: Optional[str] = None, functions: Optional[List] = None, function_call: Optional[str] = None, base_url: Optional[str] = None, api_version: Optional[str] = None, api_key: Optional[str] = None, model_list: Optional[list] = None, thinking: Optional[litellm.types.llms.anthropic.AnthropicThinkingParam] = None, shared_session: Optional[ForwardRef('ClientSession')] = None, **kwargs) -> Union[litellm.types.utils.ModelResponse, litellm.litellm_core_utils.streaming_handler.CustomStreamWrapper]), 'tool_call': Operation(tool_call, (template: effectful.handlers.llm.Template, tool: Union[effectful.ops.types.Operation[..., T], effectful.handlers.llm.Template[..., T]], *args, **kwargs) -> T), 'ProgramSynthesis': , 'NotHandled': , 'fwd': Operation(fwd, (*args, **kwargs) -> Any), 'handler': , 'defop': , 'provider': , '_i2': '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}.\"\"\"\\n raise NotHandled', 'limerick': ..., '_i3': 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '_i4': 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '_i5': '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled', '_i6': 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '_i7': '@functools.cache\\n@Template.define\\ndef haiku(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef haiku_no_cache(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nprint()\\nwith handler(provider):\\n print(haiku(\"fish\"))\\n print(\"-\" * 40)\\n print(haiku(\"fish\"))\\n\\nprint()\\ncache_handler1 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler1):\\n print(haiku_no_cache(\"fish2\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish2\"))\\n\\nprint()\\ncache_handler2 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler2):\\n print(haiku_no_cache(\"fish3\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish3\"))', 'haiku': , 'haiku_no_cache': Template(__prompt_template__='Write a haiku on the theme of {theme}. Do not use any tools.', __signature__= str>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='haiku_no_cache'), 'cache_handler1': , 'cache_handler2': , '_i8': '@Template.define\\ndef primes(first_digit: int) -> int:\\n \"\"\"Give a prime number with {first_digit} as the first digit. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider):\\n assert type(primes(6)) is int', 'primes': Template(__prompt_template__='Give a prime number with {first_digit} as the first digit. Do not use any tools.', __signature__= int>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='primes'), '_i9': '@Template.define\\ndef count_char(char: str) -> Callable[[str], int]:\\n \"\"\"Write a function which takes a string and counts the occurrances of \\'{char}\\'. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider), handler(ProgramSynthesis()):\\n count_a = count_char(\"a\")\\n assert callable(count_a)\\n assert count_a(\"banana\") == 3\\n assert count_a(\"cherry\") == 0\\n # Print the source code of the generated function\\n print(inspect.getsource(count_a))', 'count_char': Template(__prompt_template__=\"Write a function which takes a string and counts the occurrances of '{char}'. Do not use any tools.\", __signature__= collections.abc.Callable[[str], int]>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='count_char'), 'count_a': , '_i10': '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', 'cities': Operation(cities, () -> list[str]), 'weather': Operation(weather, (city: str) -> str), 'vacation': Template(__prompt_template__='Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.', __signature__= str>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='vacation'), 'log_tool_call': , '_i11': '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', '_i12': '@dataclasses.dataclass\\nclass KnockKnockJoke:\\n whos_there: str\\n punchline: str\\n\\n\\n@Template.define\\ndef write_joke(theme: str) -> KnockKnockJoke:\\n \"\"\"Write a knock-knock joke on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef rate_joke(joke: KnockKnockJoke) -> bool:\\n \"\"\"Decide if {joke} is funny or not. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\ndef do_comedy():\\n joke = write_joke(\"lizards\")\\n print(\"> You are onstage at a comedy club. You tell the following joke:\")\\n print(\\n f\"Knock knock.\\\\nWho\\'s there?\\\\n{joke.whos_there}.\\\\n{joke.whos_there} who?\\\\n{joke.punchline}\"\\n )\\n if rate_joke(joke):\\n print(\"> The crowd laughs politely.\")\\n else:\\n print(\"> The crowd stares in stony silence.\")\\n\\n\\nwith handler(provider):\\n do_comedy()', 'KnockKnockJoke': , 'write_joke': Template(__prompt_template__='Write a knock-knock joke on the theme of {theme}. Do not use any tools.', __signature__= __main__.KnockKnockJoke>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='write_joke'), 'rate_joke': Template(__prompt_template__='Decide if {joke} is funny or not. Do not use any tools.', __signature__= bool>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='rate_joke'), 'do_comedy': , '_i13': 'def log_llm(*args, **kwargs):\\n result = fwd()\\n print(\"Request fired: \", args, kwargs, result)\\n return result\\n\\n\\n# Avoid cache\\ntry:\\n haiku.cache_clear()\\nexcept Exception:\\n pass\\n\\n# Put completion handler innermost so it has highest precedence during the call\\nwith handler(provider), handler({completion: log_llm}):\\n _ = haiku(\"fish2\")\\n _ = limerick(\"fish\") # or use haiku(\"fish-2\") to avoid cache', 'log_llm': , '_i14': '# 1. Create a logger\\nlogger = logging.getLogger(\"effectful.llm\")\\nlogger.setLevel(logging.INFO)\\nlog_handler = logging.StreamHandler(sys.stdout)\\nlog_handler.setFormatter(logging.Formatter(\"%(levelname)s %(payload)s\"))\\nlogger.addHandler(log_handler)\\n# 2. Pass it to the handler\\nllm_logger = LLMLoggingHandler(logger=logger) # can also be LLMLoggingHandler()\\n\\n# Avoid cache for demonstration\\ntry:\\n haiku.cache_clear()\\n limerick.cache_clear()\\nexcept Exception:\\n pass\\n\\nwith handler(provider), handler(llm_logger):\\n _ = haiku(\"fish3\")\\n _ = limerick(\"fish4\")', 'logger': , 'log_handler': , 'llm_logger': , '_i15': '# Sub-templates for different story styles\\n@Template.define\\ndef story_with_moral(topic: str) -> str:\\n \"\"\"Write a short story about {topic} and end with a moral lesson. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef story_funny(topic: str) -> str:\\n \"\"\"Write a funny, humorous story about {topic}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n# Main orchestrator template - has access to sub-templates\\n@Template.define\\ndef write_story(topic: str, style: str) -> str:\\n \"\"\"Write a story about {topic} in the style: {style}.\\n Available styles: \\'moral\\' for a story with a lesson, \\'funny\\' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\"\"\"\\n raise NotHandled\\n\\n\\n# Verify sub-templates are captured in write_story\\'s lexical context\\nassert story_with_moral in write_story.tools\\nassert story_funny in write_story.tools\\nprint(\"Sub-templates available to write_story:\", list(write_story.tools))\\n\\nwith handler(provider), handler(llm_logger):\\n print(\"=== Story with moral ===\")\\n print(write_story(\"a curious cat\", \"moral\"))\\n print()\\n print(\"=== Funny story ===\")\\n print(write_story(\"a curious cat\", \"funny\"))', 'story_with_moral': Template(__prompt_template__='Write a short story about {topic} and end with a moral lesson. Do not use any tools.', __signature__= str>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='story_with_moral'), 'story_funny': Template(__prompt_template__='Write a funny, humorous story about {topic}. Do not use any tools.', __signature__= str>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='story_funny'), 'write_story': Template(__prompt_template__=\"Write a story about {topic} in the style: {style}.\\n Available styles: 'moral' for a story with a lesson, 'funny' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\", __signature__= str>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='write_story')})), __name__='limerick'), Template(__prompt_template__='Write a haiku on the theme of {theme}. Do not use any tools.', __signature__= str>, __context__=LexicalContext(mappingproxy({'__name__': '__main__', '__doc__': 'Automatically created module for IPython interactive environment', '__package__': None, '__loader__': None, '__spec__': None, '__builtin__': , '__builtins__': , '_ih': ['', 'import dataclasses\\nimport functools\\nimport inspect\\nimport logging\\nimport sys\\nfrom collections.abc import Callable\\n\\nfrom effectful.handlers.llm import Template\\nfrom effectful.handlers.llm.providers import (\\n CacheLLMRequestHandler,\\n LiteLLMProvider,\\n LLMLoggingHandler,\\n RetryLLMHandler,\\n completion,\\n tool_call,\\n)\\nfrom effectful.handlers.llm.synthesis import ProgramSynthesis\\nfrom effectful.ops.semantics import NotHandled, fwd, handler\\nfrom effectful.ops.syntax import defop\\n\\nprovider = LiteLLMProvider()', '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}.\"\"\"\\n raise NotHandled', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '@functools.cache\\n@Template.define\\ndef haiku(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef haiku_no_cache(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nprint()\\nwith handler(provider):\\n print(haiku(\"fish\"))\\n print(\"-\" * 40)\\n print(haiku(\"fish\"))\\n\\nprint()\\ncache_handler1 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler1):\\n print(haiku_no_cache(\"fish2\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish2\"))\\n\\nprint()\\ncache_handler2 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler2):\\n print(haiku_no_cache(\"fish3\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish3\"))', '@Template.define\\ndef primes(first_digit: int) -> int:\\n \"\"\"Give a prime number with {first_digit} as the first digit. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider):\\n assert type(primes(6)) is int', '@Template.define\\ndef count_char(char: str) -> Callable[[str], int]:\\n \"\"\"Write a function which takes a string and counts the occurrances of \\'{char}\\'. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider), handler(ProgramSynthesis()):\\n count_a = count_char(\"a\")\\n assert callable(count_a)\\n assert count_a(\"banana\") == 3\\n assert count_a(\"cherry\") == 0\\n # Print the source code of the generated function\\n print(inspect.getsource(count_a))', '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', '@dataclasses.dataclass\\nclass KnockKnockJoke:\\n whos_there: str\\n punchline: str\\n\\n\\n@Template.define\\ndef write_joke(theme: str) -> KnockKnockJoke:\\n \"\"\"Write a knock-knock joke on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef rate_joke(joke: KnockKnockJoke) -> bool:\\n \"\"\"Decide if {joke} is funny or not. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\ndef do_comedy():\\n joke = write_joke(\"lizards\")\\n print(\"> You are onstage at a comedy club. You tell the following joke:\")\\n print(\\n f\"Knock knock.\\\\nWho\\'s there?\\\\n{joke.whos_there}.\\\\n{joke.whos_there} who?\\\\n{joke.punchline}\"\\n )\\n if rate_joke(joke):\\n print(\"> The crowd laughs politely.\")\\n else:\\n print(\"> The crowd stares in stony silence.\")\\n\\n\\nwith handler(provider):\\n do_comedy()', 'def log_llm(*args, **kwargs):\\n result = fwd()\\n print(\"Request fired: \", args, kwargs, result)\\n return result\\n\\n\\n# Avoid cache\\ntry:\\n haiku.cache_clear()\\nexcept Exception:\\n pass\\n\\n# Put completion handler innermost so it has highest precedence during the call\\nwith handler(provider), handler({completion: log_llm}):\\n _ = haiku(\"fish2\")\\n _ = limerick(\"fish\") # or use haiku(\"fish-2\") to avoid cache', '# 1. Create a logger\\nlogger = logging.getLogger(\"effectful.llm\")\\nlogger.setLevel(logging.INFO)\\nlog_handler = logging.StreamHandler(sys.stdout)\\nlog_handler.setFormatter(logging.Formatter(\"%(levelname)s %(payload)s\"))\\nlogger.addHandler(log_handler)\\n# 2. Pass it to the handler\\nllm_logger = LLMLoggingHandler(logger=logger) # can also be LLMLoggingHandler()\\n\\n# Avoid cache for demonstration\\ntry:\\n haiku.cache_clear()\\n limerick.cache_clear()\\nexcept Exception:\\n pass\\n\\nwith handler(provider), handler(llm_logger):\\n _ = haiku(\"fish3\")\\n _ = limerick(\"fish4\")', '# Sub-templates for different story styles\\n@Template.define\\ndef story_with_moral(topic: str) -> str:\\n \"\"\"Write a short story about {topic} and end with a moral lesson. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef story_funny(topic: str) -> str:\\n \"\"\"Write a funny, humorous story about {topic}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n# Main orchestrator template - has access to sub-templates\\n@Template.define\\ndef write_story(topic: str, style: str) -> str:\\n \"\"\"Write a story about {topic} in the style: {style}.\\n Available styles: \\'moral\\' for a story with a lesson, \\'funny\\' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\"\"\"\\n raise NotHandled\\n\\n\\n# Verify sub-templates are captured in write_story\\'s lexical context\\nassert story_with_moral in write_story.tools\\nassert story_funny in write_story.tools\\nprint(\"Sub-templates available to write_story:\", list(write_story.tools))\\n\\nwith handler(provider), handler(llm_logger):\\n print(\"=== Story with moral ===\")\\n print(write_story(\"a curious cat\", \"moral\"))\\n print()\\n print(\"=== Funny story ===\")\\n print(write_story(\"a curious cat\", \"funny\"))'], '_oh': {}, '_dh': [PosixPath('/Users/datnguyenthanh/Marc/effectful')], 'In': ['', 'import dataclasses\\nimport functools\\nimport inspect\\nimport logging\\nimport sys\\nfrom collections.abc import Callable\\n\\nfrom effectful.handlers.llm import Template\\nfrom effectful.handlers.llm.providers import (\\n CacheLLMRequestHandler,\\n LiteLLMProvider,\\n LLMLoggingHandler,\\n RetryLLMHandler,\\n completion,\\n tool_call,\\n)\\nfrom effectful.handlers.llm.synthesis import ProgramSynthesis\\nfrom effectful.ops.semantics import NotHandled, fwd, handler\\nfrom effectful.ops.syntax import defop\\n\\nprovider = LiteLLMProvider()', '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}.\"\"\"\\n raise NotHandled', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '@functools.cache\\n@Template.define\\ndef haiku(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef haiku_no_cache(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nprint()\\nwith handler(provider):\\n print(haiku(\"fish\"))\\n print(\"-\" * 40)\\n print(haiku(\"fish\"))\\n\\nprint()\\ncache_handler1 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler1):\\n print(haiku_no_cache(\"fish2\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish2\"))\\n\\nprint()\\ncache_handler2 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler2):\\n print(haiku_no_cache(\"fish3\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish3\"))', '@Template.define\\ndef primes(first_digit: int) -> int:\\n \"\"\"Give a prime number with {first_digit} as the first digit. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider):\\n assert type(primes(6)) is int', '@Template.define\\ndef count_char(char: str) -> Callable[[str], int]:\\n \"\"\"Write a function which takes a string and counts the occurrances of \\'{char}\\'. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider), handler(ProgramSynthesis()):\\n count_a = count_char(\"a\")\\n assert callable(count_a)\\n assert count_a(\"banana\") == 3\\n assert count_a(\"cherry\") == 0\\n # Print the source code of the generated function\\n print(inspect.getsource(count_a))', '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', '@dataclasses.dataclass\\nclass KnockKnockJoke:\\n whos_there: str\\n punchline: str\\n\\n\\n@Template.define\\ndef write_joke(theme: str) -> KnockKnockJoke:\\n \"\"\"Write a knock-knock joke on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef rate_joke(joke: KnockKnockJoke) -> bool:\\n \"\"\"Decide if {joke} is funny or not. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\ndef do_comedy():\\n joke = write_joke(\"lizards\")\\n print(\"> You are onstage at a comedy club. You tell the following joke:\")\\n print(\\n f\"Knock knock.\\\\nWho\\'s there?\\\\n{joke.whos_there}.\\\\n{joke.whos_there} who?\\\\n{joke.punchline}\"\\n )\\n if rate_joke(joke):\\n print(\"> The crowd laughs politely.\")\\n else:\\n print(\"> The crowd stares in stony silence.\")\\n\\n\\nwith handler(provider):\\n do_comedy()', 'def log_llm(*args, **kwargs):\\n result = fwd()\\n print(\"Request fired: \", args, kwargs, result)\\n return result\\n\\n\\n# Avoid cache\\ntry:\\n haiku.cache_clear()\\nexcept Exception:\\n pass\\n\\n# Put completion handler innermost so it has highest precedence during the call\\nwith handler(provider), handler({completion: log_llm}):\\n _ = haiku(\"fish2\")\\n _ = limerick(\"fish\") # or use haiku(\"fish-2\") to avoid cache', '# 1. Create a logger\\nlogger = logging.getLogger(\"effectful.llm\")\\nlogger.setLevel(logging.INFO)\\nlog_handler = logging.StreamHandler(sys.stdout)\\nlog_handler.setFormatter(logging.Formatter(\"%(levelname)s %(payload)s\"))\\nlogger.addHandler(log_handler)\\n# 2. Pass it to the handler\\nllm_logger = LLMLoggingHandler(logger=logger) # can also be LLMLoggingHandler()\\n\\n# Avoid cache for demonstration\\ntry:\\n haiku.cache_clear()\\n limerick.cache_clear()\\nexcept Exception:\\n pass\\n\\nwith handler(provider), handler(llm_logger):\\n _ = haiku(\"fish3\")\\n _ = limerick(\"fish4\")', '# Sub-templates for different story styles\\n@Template.define\\ndef story_with_moral(topic: str) -> str:\\n \"\"\"Write a short story about {topic} and end with a moral lesson. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef story_funny(topic: str) -> str:\\n \"\"\"Write a funny, humorous story about {topic}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n# Main orchestrator template - has access to sub-templates\\n@Template.define\\ndef write_story(topic: str, style: str) -> str:\\n \"\"\"Write a story about {topic} in the style: {style}.\\n Available styles: \\'moral\\' for a story with a lesson, \\'funny\\' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\"\"\"\\n raise NotHandled\\n\\n\\n# Verify sub-templates are captured in write_story\\'s lexical context\\nassert story_with_moral in write_story.tools\\nassert story_funny in write_story.tools\\nprint(\"Sub-templates available to write_story:\", list(write_story.tools))\\n\\nwith handler(provider), handler(llm_logger):\\n print(\"=== Story with moral ===\")\\n print(write_story(\"a curious cat\", \"moral\"))\\n print()\\n print(\"=== Funny story ===\")\\n print(write_story(\"a curious cat\", \"funny\"))'], 'Out': {}, 'get_ipython': >, 'exit': , 'quit': , 'open': , '_': 'In the ocean where fishies do play, \\nA big whale came swimming one day. \\nWith a splash and a dive, \\nHe felt so alive, \\nChasing fish in the blue, gleaming bay.', '__': '', '___': '', '__vsc_ipynb_file__': '/Users/datnguyenthanh/Marc/effectful/docs/source/llm.ipynb', '_i': '# 1. Create a logger\\nlogger = logging.getLogger(\"effectful.llm\")\\nlogger.setLevel(logging.INFO)\\nlog_handler = logging.StreamHandler(sys.stdout)\\nlog_handler.setFormatter(logging.Formatter(\"%(levelname)s %(payload)s\"))\\nlogger.addHandler(log_handler)\\n# 2. Pass it to the handler\\nllm_logger = LLMLoggingHandler(logger=logger) # can also be LLMLoggingHandler()\\n\\n# Avoid cache for demonstration\\ntry:\\n haiku.cache_clear()\\n limerick.cache_clear()\\nexcept Exception:\\n pass\\n\\nwith handler(provider), handler(llm_logger):\\n _ = haiku(\"fish3\")\\n _ = limerick(\"fish4\")', '_ii': 'def log_llm(*args, **kwargs):\\n result = fwd()\\n print(\"Request fired: \", args, kwargs, result)\\n return result\\n\\n\\n# Avoid cache\\ntry:\\n haiku.cache_clear()\\nexcept Exception:\\n pass\\n\\n# Put completion handler innermost so it has highest precedence during the call\\nwith handler(provider), handler({completion: log_llm}):\\n _ = haiku(\"fish2\")\\n _ = limerick(\"fish\") # or use haiku(\"fish-2\") to avoid cache', '_iii': '@dataclasses.dataclass\\nclass KnockKnockJoke:\\n whos_there: str\\n punchline: str\\n\\n\\n@Template.define\\ndef write_joke(theme: str) -> KnockKnockJoke:\\n \"\"\"Write a knock-knock joke on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef rate_joke(joke: KnockKnockJoke) -> bool:\\n \"\"\"Decide if {joke} is funny or not. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\ndef do_comedy():\\n joke = write_joke(\"lizards\")\\n print(\"> You are onstage at a comedy club. You tell the following joke:\")\\n print(\\n f\"Knock knock.\\\\nWho\\'s there?\\\\n{joke.whos_there}.\\\\n{joke.whos_there} who?\\\\n{joke.punchline}\"\\n )\\n if rate_joke(joke):\\n print(\"> The crowd laughs politely.\")\\n else:\\n print(\"> The crowd stares in stony silence.\")\\n\\n\\nwith handler(provider):\\n do_comedy()', '_i1': 'import dataclasses\\nimport functools\\nimport inspect\\nimport logging\\nimport sys\\nfrom collections.abc import Callable\\n\\nfrom effectful.handlers.llm import Template\\nfrom effectful.handlers.llm.providers import (\\n CacheLLMRequestHandler,\\n LiteLLMProvider,\\n LLMLoggingHandler,\\n RetryLLMHandler,\\n completion,\\n tool_call,\\n)\\nfrom effectful.handlers.llm.synthesis import ProgramSynthesis\\nfrom effectful.ops.semantics import NotHandled, fwd, handler\\nfrom effectful.ops.syntax import defop\\n\\nprovider = LiteLLMProvider()', 'dataclasses': , 'functools': , 'inspect': , 'logging': , 'sys': , 'Callable': , 'Template': , 'CacheLLMRequestHandler': , 'LiteLLMProvider': , 'LLMLoggingHandler': , 'RetryLLMHandler': , 'completion': Operation(completion, (model: str, messages: List = [], timeout: Union[float, str, openai.Timeout, NoneType] = None, temperature: Optional[float] = None, top_p: Optional[float] = None, n: Optional[int] = None, stream: Optional[bool] = None, stream_options: Optional[dict] = None, stop=None, max_completion_tokens: Optional[int] = None, max_tokens: Optional[int] = None, modalities: Optional[List[Literal['text', 'audio']]] = None, prediction: Optional[openai.types.chat.chat_completion_prediction_content_param.ChatCompletionPredictionContentParam] = None, audio: Optional[openai.types.chat.chat_completion_audio_param.ChatCompletionAudioParam] = None, presence_penalty: Optional[float] = None, frequency_penalty: Optional[float] = None, logit_bias: Optional[dict] = None, user: Optional[str] = None, reasoning_effort: Optional[Literal['none', 'minimal', 'low', 'medium', 'high', 'default']] = None, verbosity: Optional[Literal['low', 'medium', 'high']] = None, response_format: Union[dict, Type[pydantic.main.BaseModel], NoneType] = None, seed: Optional[int] = None, tools: Optional[List] = None, tool_choice: Union[str, dict, NoneType] = None, logprobs: Optional[bool] = None, top_logprobs: Optional[int] = None, parallel_tool_calls: Optional[bool] = None, web_search_options: Optional[litellm.types.llms.openai.OpenAIWebSearchOptions] = None, deployment_id=None, extra_headers: Optional[dict] = None, safety_identifier: Optional[str] = None, service_tier: Optional[str] = None, functions: Optional[List] = None, function_call: Optional[str] = None, base_url: Optional[str] = None, api_version: Optional[str] = None, api_key: Optional[str] = None, model_list: Optional[list] = None, thinking: Optional[litellm.types.llms.anthropic.AnthropicThinkingParam] = None, shared_session: Optional[ForwardRef('ClientSession')] = None, **kwargs) -> Union[litellm.types.utils.ModelResponse, litellm.litellm_core_utils.streaming_handler.CustomStreamWrapper]), 'tool_call': Operation(tool_call, (template: effectful.handlers.llm.Template, tool: Union[effectful.ops.types.Operation[..., T], effectful.handlers.llm.Template[..., T]], *args, **kwargs) -> T), 'ProgramSynthesis': , 'NotHandled': , 'fwd': Operation(fwd, (*args, **kwargs) -> Any), 'handler': , 'defop': , 'provider': , '_i2': '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}.\"\"\"\\n raise NotHandled', 'limerick': Template(__prompt_template__='Write a limerick on the theme of {theme}. Do not use any tools.', __signature__= str>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='limerick'), '_i3': 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '_i4': 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '_i5': '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled', '_i6': 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '_i7': '@functools.cache\\n@Template.define\\ndef haiku(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef haiku_no_cache(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nprint()\\nwith handler(provider):\\n print(haiku(\"fish\"))\\n print(\"-\" * 40)\\n print(haiku(\"fish\"))\\n\\nprint()\\ncache_handler1 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler1):\\n print(haiku_no_cache(\"fish2\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish2\"))\\n\\nprint()\\ncache_handler2 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler2):\\n print(haiku_no_cache(\"fish3\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish3\"))', 'haiku': , 'haiku_no_cache': ..., 'cache_handler1': , 'cache_handler2': , '_i8': '@Template.define\\ndef primes(first_digit: int) -> int:\\n \"\"\"Give a prime number with {first_digit} as the first digit. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider):\\n assert type(primes(6)) is int', 'primes': Template(__prompt_template__='Give a prime number with {first_digit} as the first digit. Do not use any tools.', __signature__= int>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='primes'), '_i9': '@Template.define\\ndef count_char(char: str) -> Callable[[str], int]:\\n \"\"\"Write a function which takes a string and counts the occurrances of \\'{char}\\'. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider), handler(ProgramSynthesis()):\\n count_a = count_char(\"a\")\\n assert callable(count_a)\\n assert count_a(\"banana\") == 3\\n assert count_a(\"cherry\") == 0\\n # Print the source code of the generated function\\n print(inspect.getsource(count_a))', 'count_char': Template(__prompt_template__=\"Write a function which takes a string and counts the occurrances of '{char}'. Do not use any tools.\", __signature__= collections.abc.Callable[[str], int]>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='count_char'), 'count_a': , '_i10': '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', 'cities': Operation(cities, () -> list[str]), 'weather': Operation(weather, (city: str) -> str), 'vacation': Template(__prompt_template__='Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.', __signature__= str>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='vacation'), 'log_tool_call': , '_i11': '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', '_i12': '@dataclasses.dataclass\\nclass KnockKnockJoke:\\n whos_there: str\\n punchline: str\\n\\n\\n@Template.define\\ndef write_joke(theme: str) -> KnockKnockJoke:\\n \"\"\"Write a knock-knock joke on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef rate_joke(joke: KnockKnockJoke) -> bool:\\n \"\"\"Decide if {joke} is funny or not. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\ndef do_comedy():\\n joke = write_joke(\"lizards\")\\n print(\"> You are onstage at a comedy club. You tell the following joke:\")\\n print(\\n f\"Knock knock.\\\\nWho\\'s there?\\\\n{joke.whos_there}.\\\\n{joke.whos_there} who?\\\\n{joke.punchline}\"\\n )\\n if rate_joke(joke):\\n print(\"> The crowd laughs politely.\")\\n else:\\n print(\"> The crowd stares in stony silence.\")\\n\\n\\nwith handler(provider):\\n do_comedy()', 'KnockKnockJoke': , 'write_joke': Template(__prompt_template__='Write a knock-knock joke on the theme of {theme}. Do not use any tools.', __signature__= __main__.KnockKnockJoke>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='write_joke'), 'rate_joke': Template(__prompt_template__='Decide if {joke} is funny or not. Do not use any tools.', __signature__= bool>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='rate_joke'), 'do_comedy': , '_i13': 'def log_llm(*args, **kwargs):\\n result = fwd()\\n print(\"Request fired: \", args, kwargs, result)\\n return result\\n\\n\\n# Avoid cache\\ntry:\\n haiku.cache_clear()\\nexcept Exception:\\n pass\\n\\n# Put completion handler innermost so it has highest precedence during the call\\nwith handler(provider), handler({completion: log_llm}):\\n _ = haiku(\"fish2\")\\n _ = limerick(\"fish\") # or use haiku(\"fish-2\") to avoid cache', 'log_llm': , '_i14': '# 1. Create a logger\\nlogger = logging.getLogger(\"effectful.llm\")\\nlogger.setLevel(logging.INFO)\\nlog_handler = logging.StreamHandler(sys.stdout)\\nlog_handler.setFormatter(logging.Formatter(\"%(levelname)s %(payload)s\"))\\nlogger.addHandler(log_handler)\\n# 2. Pass it to the handler\\nllm_logger = LLMLoggingHandler(logger=logger) # can also be LLMLoggingHandler()\\n\\n# Avoid cache for demonstration\\ntry:\\n haiku.cache_clear()\\n limerick.cache_clear()\\nexcept Exception:\\n pass\\n\\nwith handler(provider), handler(llm_logger):\\n _ = haiku(\"fish3\")\\n _ = limerick(\"fish4\")', 'logger': , 'log_handler': , 'llm_logger': , '_i15': '# Sub-templates for different story styles\\n@Template.define\\ndef story_with_moral(topic: str) -> str:\\n \"\"\"Write a short story about {topic} and end with a moral lesson. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef story_funny(topic: str) -> str:\\n \"\"\"Write a funny, humorous story about {topic}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n# Main orchestrator template - has access to sub-templates\\n@Template.define\\ndef write_story(topic: str, style: str) -> str:\\n \"\"\"Write a story about {topic} in the style: {style}.\\n Available styles: \\'moral\\' for a story with a lesson, \\'funny\\' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\"\"\"\\n raise NotHandled\\n\\n\\n# Verify sub-templates are captured in write_story\\'s lexical context\\nassert story_with_moral in write_story.tools\\nassert story_funny in write_story.tools\\nprint(\"Sub-templates available to write_story:\", list(write_story.tools))\\n\\nwith handler(provider), handler(llm_logger):\\n print(\"=== Story with moral ===\")\\n print(write_story(\"a curious cat\", \"moral\"))\\n print()\\n print(\"=== Funny story ===\")\\n print(write_story(\"a curious cat\", \"funny\"))', 'story_with_moral': Template(__prompt_template__='Write a short story about {topic} and end with a moral lesson. Do not use any tools.', __signature__= str>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='story_with_moral'), 'story_funny': Template(__prompt_template__='Write a funny, humorous story about {topic}. Do not use any tools.', __signature__= str>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='story_funny'), 'write_story': Template(__prompt_template__=\"Write a story about {topic} in the style: {style}.\\n Available styles: 'moral' for a story with a lesson, 'funny' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\", __signature__= str>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='write_story')}), mappingproxy({'__name__': '__main__', '__doc__': 'Automatically created module for IPython interactive environment', '__package__': None, '__loader__': None, '__spec__': None, '__builtin__': , '__builtins__': , '_ih': ['', 'import dataclasses\\nimport functools\\nimport inspect\\nimport logging\\nimport sys\\nfrom collections.abc import Callable\\n\\nfrom effectful.handlers.llm import Template\\nfrom effectful.handlers.llm.providers import (\\n CacheLLMRequestHandler,\\n LiteLLMProvider,\\n LLMLoggingHandler,\\n RetryLLMHandler,\\n completion,\\n tool_call,\\n)\\nfrom effectful.handlers.llm.synthesis import ProgramSynthesis\\nfrom effectful.ops.semantics import NotHandled, fwd, handler\\nfrom effectful.ops.syntax import defop\\n\\nprovider = LiteLLMProvider()', '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}.\"\"\"\\n raise NotHandled', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '@functools.cache\\n@Template.define\\ndef haiku(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef haiku_no_cache(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nprint()\\nwith handler(provider):\\n print(haiku(\"fish\"))\\n print(\"-\" * 40)\\n print(haiku(\"fish\"))\\n\\nprint()\\ncache_handler1 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler1):\\n print(haiku_no_cache(\"fish2\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish2\"))\\n\\nprint()\\ncache_handler2 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler2):\\n print(haiku_no_cache(\"fish3\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish3\"))', '@Template.define\\ndef primes(first_digit: int) -> int:\\n \"\"\"Give a prime number with {first_digit} as the first digit. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider):\\n assert type(primes(6)) is int', '@Template.define\\ndef count_char(char: str) -> Callable[[str], int]:\\n \"\"\"Write a function which takes a string and counts the occurrances of \\'{char}\\'. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider), handler(ProgramSynthesis()):\\n count_a = count_char(\"a\")\\n assert callable(count_a)\\n assert count_a(\"banana\") == 3\\n assert count_a(\"cherry\") == 0\\n # Print the source code of the generated function\\n print(inspect.getsource(count_a))', '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', '@dataclasses.dataclass\\nclass KnockKnockJoke:\\n whos_there: str\\n punchline: str\\n\\n\\n@Template.define\\ndef write_joke(theme: str) -> KnockKnockJoke:\\n \"\"\"Write a knock-knock joke on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef rate_joke(joke: KnockKnockJoke) -> bool:\\n \"\"\"Decide if {joke} is funny or not. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\ndef do_comedy():\\n joke = write_joke(\"lizards\")\\n print(\"> You are onstage at a comedy club. You tell the following joke:\")\\n print(\\n f\"Knock knock.\\\\nWho\\'s there?\\\\n{joke.whos_there}.\\\\n{joke.whos_there} who?\\\\n{joke.punchline}\"\\n )\\n if rate_joke(joke):\\n print(\"> The crowd laughs politely.\")\\n else:\\n print(\"> The crowd stares in stony silence.\")\\n\\n\\nwith handler(provider):\\n do_comedy()', 'def log_llm(*args, **kwargs):\\n result = fwd()\\n print(\"Request fired: \", args, kwargs, result)\\n return result\\n\\n\\n# Avoid cache\\ntry:\\n haiku.cache_clear()\\nexcept Exception:\\n pass\\n\\n# Put completion handler innermost so it has highest precedence during the call\\nwith handler(provider), handler({completion: log_llm}):\\n _ = haiku(\"fish2\")\\n _ = limerick(\"fish\") # or use haiku(\"fish-2\") to avoid cache', '# 1. Create a logger\\nlogger = logging.getLogger(\"effectful.llm\")\\nlogger.setLevel(logging.INFO)\\nlog_handler = logging.StreamHandler(sys.stdout)\\nlog_handler.setFormatter(logging.Formatter(\"%(levelname)s %(payload)s\"))\\nlogger.addHandler(log_handler)\\n# 2. Pass it to the handler\\nllm_logger = LLMLoggingHandler(logger=logger) # can also be LLMLoggingHandler()\\n\\n# Avoid cache for demonstration\\ntry:\\n haiku.cache_clear()\\n limerick.cache_clear()\\nexcept Exception:\\n pass\\n\\nwith handler(provider), handler(llm_logger):\\n _ = haiku(\"fish3\")\\n _ = limerick(\"fish4\")', '# Sub-templates for different story styles\\n@Template.define\\ndef story_with_moral(topic: str) -> str:\\n \"\"\"Write a short story about {topic} and end with a moral lesson. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef story_funny(topic: str) -> str:\\n \"\"\"Write a funny, humorous story about {topic}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n# Main orchestrator template - has access to sub-templates\\n@Template.define\\ndef write_story(topic: str, style: str) -> str:\\n \"\"\"Write a story about {topic} in the style: {style}.\\n Available styles: \\'moral\\' for a story with a lesson, \\'funny\\' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\"\"\"\\n raise NotHandled\\n\\n\\n# Verify sub-templates are captured in write_story\\'s lexical context\\nassert story_with_moral in write_story.tools\\nassert story_funny in write_story.tools\\nprint(\"Sub-templates available to write_story:\", list(write_story.tools))\\n\\nwith handler(provider), handler(llm_logger):\\n print(\"=== Story with moral ===\")\\n print(write_story(\"a curious cat\", \"moral\"))\\n print()\\n print(\"=== Funny story ===\")\\n print(write_story(\"a curious cat\", \"funny\"))'], '_oh': {}, '_dh': [PosixPath('/Users/datnguyenthanh/Marc/effectful')], 'In': ['', 'import dataclasses\\nimport functools\\nimport inspect\\nimport logging\\nimport sys\\nfrom collections.abc import Callable\\n\\nfrom effectful.handlers.llm import Template\\nfrom effectful.handlers.llm.providers import (\\n CacheLLMRequestHandler,\\n LiteLLMProvider,\\n LLMLoggingHandler,\\n RetryLLMHandler,\\n completion,\\n tool_call,\\n)\\nfrom effectful.handlers.llm.synthesis import ProgramSynthesis\\nfrom effectful.ops.semantics import NotHandled, fwd, handler\\nfrom effectful.ops.syntax import defop\\n\\nprovider = LiteLLMProvider()', '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}.\"\"\"\\n raise NotHandled', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '@functools.cache\\n@Template.define\\ndef haiku(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef haiku_no_cache(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nprint()\\nwith handler(provider):\\n print(haiku(\"fish\"))\\n print(\"-\" * 40)\\n print(haiku(\"fish\"))\\n\\nprint()\\ncache_handler1 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler1):\\n print(haiku_no_cache(\"fish2\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish2\"))\\n\\nprint()\\ncache_handler2 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler2):\\n print(haiku_no_cache(\"fish3\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish3\"))', '@Template.define\\ndef primes(first_digit: int) -> int:\\n \"\"\"Give a prime number with {first_digit} as the first digit. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider):\\n assert type(primes(6)) is int', '@Template.define\\ndef count_char(char: str) -> Callable[[str], int]:\\n \"\"\"Write a function which takes a string and counts the occurrances of \\'{char}\\'. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider), handler(ProgramSynthesis()):\\n count_a = count_char(\"a\")\\n assert callable(count_a)\\n assert count_a(\"banana\") == 3\\n assert count_a(\"cherry\") == 0\\n # Print the source code of the generated function\\n print(inspect.getsource(count_a))', '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', '@dataclasses.dataclass\\nclass KnockKnockJoke:\\n whos_there: str\\n punchline: str\\n\\n\\n@Template.define\\ndef write_joke(theme: str) -> KnockKnockJoke:\\n \"\"\"Write a knock-knock joke on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef rate_joke(joke: KnockKnockJoke) -> bool:\\n \"\"\"Decide if {joke} is funny or not. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\ndef do_comedy():\\n joke = write_joke(\"lizards\")\\n print(\"> You are onstage at a comedy club. You tell the following joke:\")\\n print(\\n f\"Knock knock.\\\\nWho\\'s there?\\\\n{joke.whos_there}.\\\\n{joke.whos_there} who?\\\\n{joke.punchline}\"\\n )\\n if rate_joke(joke):\\n print(\"> The crowd laughs politely.\")\\n else:\\n print(\"> The crowd stares in stony silence.\")\\n\\n\\nwith handler(provider):\\n do_comedy()', 'def log_llm(*args, **kwargs):\\n result = fwd()\\n print(\"Request fired: \", args, kwargs, result)\\n return result\\n\\n\\n# Avoid cache\\ntry:\\n haiku.cache_clear()\\nexcept Exception:\\n pass\\n\\n# Put completion handler innermost so it has highest precedence during the call\\nwith handler(provider), handler({completion: log_llm}):\\n _ = haiku(\"fish2\")\\n _ = limerick(\"fish\") # or use haiku(\"fish-2\") to avoid cache', '# 1. Create a logger\\nlogger = logging.getLogger(\"effectful.llm\")\\nlogger.setLevel(logging.INFO)\\nlog_handler = logging.StreamHandler(sys.stdout)\\nlog_handler.setFormatter(logging.Formatter(\"%(levelname)s %(payload)s\"))\\nlogger.addHandler(log_handler)\\n# 2. Pass it to the handler\\nllm_logger = LLMLoggingHandler(logger=logger) # can also be LLMLoggingHandler()\\n\\n# Avoid cache for demonstration\\ntry:\\n haiku.cache_clear()\\n limerick.cache_clear()\\nexcept Exception:\\n pass\\n\\nwith handler(provider), handler(llm_logger):\\n _ = haiku(\"fish3\")\\n _ = limerick(\"fish4\")', '# Sub-templates for different story styles\\n@Template.define\\ndef story_with_moral(topic: str) -> str:\\n \"\"\"Write a short story about {topic} and end with a moral lesson. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef story_funny(topic: str) -> str:\\n \"\"\"Write a funny, humorous story about {topic}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n# Main orchestrator template - has access to sub-templates\\n@Template.define\\ndef write_story(topic: str, style: str) -> str:\\n \"\"\"Write a story about {topic} in the style: {style}.\\n Available styles: \\'moral\\' for a story with a lesson, \\'funny\\' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\"\"\"\\n raise NotHandled\\n\\n\\n# Verify sub-templates are captured in write_story\\'s lexical context\\nassert story_with_moral in write_story.tools\\nassert story_funny in write_story.tools\\nprint(\"Sub-templates available to write_story:\", list(write_story.tools))\\n\\nwith handler(provider), handler(llm_logger):\\n print(\"=== Story with moral ===\")\\n print(write_story(\"a curious cat\", \"moral\"))\\n print()\\n print(\"=== Funny story ===\")\\n print(write_story(\"a curious cat\", \"funny\"))'], 'Out': {}, 'get_ipython': >, 'exit': , 'quit': , 'open': , '_': 'In the ocean where fishies do play, \\nA big whale came swimming one day. \\nWith a splash and a dive, \\nHe felt so alive, \\nChasing fish in the blue, gleaming bay.', '__': '', '___': '', '__vsc_ipynb_file__': '/Users/datnguyenthanh/Marc/effectful/docs/source/llm.ipynb', '_i': '# 1. Create a logger\\nlogger = logging.getLogger(\"effectful.llm\")\\nlogger.setLevel(logging.INFO)\\nlog_handler = logging.StreamHandler(sys.stdout)\\nlog_handler.setFormatter(logging.Formatter(\"%(levelname)s %(payload)s\"))\\nlogger.addHandler(log_handler)\\n# 2. Pass it to the handler\\nllm_logger = LLMLoggingHandler(logger=logger) # can also be LLMLoggingHandler()\\n\\n# Avoid cache for demonstration\\ntry:\\n haiku.cache_clear()\\n limerick.cache_clear()\\nexcept Exception:\\n pass\\n\\nwith handler(provider), handler(llm_logger):\\n _ = haiku(\"fish3\")\\n _ = limerick(\"fish4\")', '_ii': 'def log_llm(*args, **kwargs):\\n result = fwd()\\n print(\"Request fired: \", args, kwargs, result)\\n return result\\n\\n\\n# Avoid cache\\ntry:\\n haiku.cache_clear()\\nexcept Exception:\\n pass\\n\\n# Put completion handler innermost so it has highest precedence during the call\\nwith handler(provider), handler({completion: log_llm}):\\n _ = haiku(\"fish2\")\\n _ = limerick(\"fish\") # or use haiku(\"fish-2\") to avoid cache', '_iii': '@dataclasses.dataclass\\nclass KnockKnockJoke:\\n whos_there: str\\n punchline: str\\n\\n\\n@Template.define\\ndef write_joke(theme: str) -> KnockKnockJoke:\\n \"\"\"Write a knock-knock joke on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef rate_joke(joke: KnockKnockJoke) -> bool:\\n \"\"\"Decide if {joke} is funny or not. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\ndef do_comedy():\\n joke = write_joke(\"lizards\")\\n print(\"> You are onstage at a comedy club. You tell the following joke:\")\\n print(\\n f\"Knock knock.\\\\nWho\\'s there?\\\\n{joke.whos_there}.\\\\n{joke.whos_there} who?\\\\n{joke.punchline}\"\\n )\\n if rate_joke(joke):\\n print(\"> The crowd laughs politely.\")\\n else:\\n print(\"> The crowd stares in stony silence.\")\\n\\n\\nwith handler(provider):\\n do_comedy()', '_i1': 'import dataclasses\\nimport functools\\nimport inspect\\nimport logging\\nimport sys\\nfrom collections.abc import Callable\\n\\nfrom effectful.handlers.llm import Template\\nfrom effectful.handlers.llm.providers import (\\n CacheLLMRequestHandler,\\n LiteLLMProvider,\\n LLMLoggingHandler,\\n RetryLLMHandler,\\n completion,\\n tool_call,\\n)\\nfrom effectful.handlers.llm.synthesis import ProgramSynthesis\\nfrom effectful.ops.semantics import NotHandled, fwd, handler\\nfrom effectful.ops.syntax import defop\\n\\nprovider = LiteLLMProvider()', 'dataclasses': , 'functools': , 'inspect': , 'logging': , 'sys': , 'Callable': , 'Template': , 'CacheLLMRequestHandler': , 'LiteLLMProvider': , 'LLMLoggingHandler': , 'RetryLLMHandler': , 'completion': Operation(completion, (model: str, messages: List = [], timeout: Union[float, str, openai.Timeout, NoneType] = None, temperature: Optional[float] = None, top_p: Optional[float] = None, n: Optional[int] = None, stream: Optional[bool] = None, stream_options: Optional[dict] = None, stop=None, max_completion_tokens: Optional[int] = None, max_tokens: Optional[int] = None, modalities: Optional[List[Literal['text', 'audio']]] = None, prediction: Optional[openai.types.chat.chat_completion_prediction_content_param.ChatCompletionPredictionContentParam] = None, audio: Optional[openai.types.chat.chat_completion_audio_param.ChatCompletionAudioParam] = None, presence_penalty: Optional[float] = None, frequency_penalty: Optional[float] = None, logit_bias: Optional[dict] = None, user: Optional[str] = None, reasoning_effort: Optional[Literal['none', 'minimal', 'low', 'medium', 'high', 'default']] = None, verbosity: Optional[Literal['low', 'medium', 'high']] = None, response_format: Union[dict, Type[pydantic.main.BaseModel], NoneType] = None, seed: Optional[int] = None, tools: Optional[List] = None, tool_choice: Union[str, dict, NoneType] = None, logprobs: Optional[bool] = None, top_logprobs: Optional[int] = None, parallel_tool_calls: Optional[bool] = None, web_search_options: Optional[litellm.types.llms.openai.OpenAIWebSearchOptions] = None, deployment_id=None, extra_headers: Optional[dict] = None, safety_identifier: Optional[str] = None, service_tier: Optional[str] = None, functions: Optional[List] = None, function_call: Optional[str] = None, base_url: Optional[str] = None, api_version: Optional[str] = None, api_key: Optional[str] = None, model_list: Optional[list] = None, thinking: Optional[litellm.types.llms.anthropic.AnthropicThinkingParam] = None, shared_session: Optional[ForwardRef('ClientSession')] = None, **kwargs) -> Union[litellm.types.utils.ModelResponse, litellm.litellm_core_utils.streaming_handler.CustomStreamWrapper]), 'tool_call': Operation(tool_call, (template: effectful.handlers.llm.Template, tool: Union[effectful.ops.types.Operation[..., T], effectful.handlers.llm.Template[..., T]], *args, **kwargs) -> T), 'ProgramSynthesis': , 'NotHandled': , 'fwd': Operation(fwd, (*args, **kwargs) -> Any), 'handler': , 'defop': , 'provider': , '_i2': '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}.\"\"\"\\n raise NotHandled', 'limerick': Template(__prompt_template__='Write a limerick on the theme of {theme}. Do not use any tools.', __signature__= str>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='limerick'), '_i3': 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '_i4': 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '_i5': '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled', '_i6': 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '_i7': '@functools.cache\\n@Template.define\\ndef haiku(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef haiku_no_cache(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nprint()\\nwith handler(provider):\\n print(haiku(\"fish\"))\\n print(\"-\" * 40)\\n print(haiku(\"fish\"))\\n\\nprint()\\ncache_handler1 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler1):\\n print(haiku_no_cache(\"fish2\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish2\"))\\n\\nprint()\\ncache_handler2 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler2):\\n print(haiku_no_cache(\"fish3\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish3\"))', 'haiku': , 'haiku_no_cache': ..., 'cache_handler1': , 'cache_handler2': , '_i8': '@Template.define\\ndef primes(first_digit: int) -> int:\\n \"\"\"Give a prime number with {first_digit} as the first digit. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider):\\n assert type(primes(6)) is int', 'primes': Template(__prompt_template__='Give a prime number with {first_digit} as the first digit. Do not use any tools.', __signature__= int>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='primes'), '_i9': '@Template.define\\ndef count_char(char: str) -> Callable[[str], int]:\\n \"\"\"Write a function which takes a string and counts the occurrances of \\'{char}\\'. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider), handler(ProgramSynthesis()):\\n count_a = count_char(\"a\")\\n assert callable(count_a)\\n assert count_a(\"banana\") == 3\\n assert count_a(\"cherry\") == 0\\n # Print the source code of the generated function\\n print(inspect.getsource(count_a))', 'count_char': Template(__prompt_template__=\"Write a function which takes a string and counts the occurrances of '{char}'. Do not use any tools.\", __signature__= collections.abc.Callable[[str], int]>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='count_char'), 'count_a': , '_i10': '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', 'cities': Operation(cities, () -> list[str]), 'weather': Operation(weather, (city: str) -> str), 'vacation': Template(__prompt_template__='Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.', __signature__= str>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='vacation'), 'log_tool_call': , '_i11': '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', '_i12': '@dataclasses.dataclass\\nclass KnockKnockJoke:\\n whos_there: str\\n punchline: str\\n\\n\\n@Template.define\\ndef write_joke(theme: str) -> KnockKnockJoke:\\n \"\"\"Write a knock-knock joke on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef rate_joke(joke: KnockKnockJoke) -> bool:\\n \"\"\"Decide if {joke} is funny or not. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\ndef do_comedy():\\n joke = write_joke(\"lizards\")\\n print(\"> You are onstage at a comedy club. You tell the following joke:\")\\n print(\\n f\"Knock knock.\\\\nWho\\'s there?\\\\n{joke.whos_there}.\\\\n{joke.whos_there} who?\\\\n{joke.punchline}\"\\n )\\n if rate_joke(joke):\\n print(\"> The crowd laughs politely.\")\\n else:\\n print(\"> The crowd stares in stony silence.\")\\n\\n\\nwith handler(provider):\\n do_comedy()', 'KnockKnockJoke': , 'write_joke': Template(__prompt_template__='Write a knock-knock joke on the theme of {theme}. Do not use any tools.', __signature__= __main__.KnockKnockJoke>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='write_joke'), 'rate_joke': Template(__prompt_template__='Decide if {joke} is funny or not. Do not use any tools.', __signature__= bool>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='rate_joke'), 'do_comedy': , '_i13': 'def log_llm(*args, **kwargs):\\n result = fwd()\\n print(\"Request fired: \", args, kwargs, result)\\n return result\\n\\n\\n# Avoid cache\\ntry:\\n haiku.cache_clear()\\nexcept Exception:\\n pass\\n\\n# Put completion handler innermost so it has highest precedence during the call\\nwith handler(provider), handler({completion: log_llm}):\\n _ = haiku(\"fish2\")\\n _ = limerick(\"fish\") # or use haiku(\"fish-2\") to avoid cache', 'log_llm': , '_i14': '# 1. Create a logger\\nlogger = logging.getLogger(\"effectful.llm\")\\nlogger.setLevel(logging.INFO)\\nlog_handler = logging.StreamHandler(sys.stdout)\\nlog_handler.setFormatter(logging.Formatter(\"%(levelname)s %(payload)s\"))\\nlogger.addHandler(log_handler)\\n# 2. Pass it to the handler\\nllm_logger = LLMLoggingHandler(logger=logger) # can also be LLMLoggingHandler()\\n\\n# Avoid cache for demonstration\\ntry:\\n haiku.cache_clear()\\n limerick.cache_clear()\\nexcept Exception:\\n pass\\n\\nwith handler(provider), handler(llm_logger):\\n _ = haiku(\"fish3\")\\n _ = limerick(\"fish4\")', 'logger': , 'log_handler': , 'llm_logger': , '_i15': '# Sub-templates for different story styles\\n@Template.define\\ndef story_with_moral(topic: str) -> str:\\n \"\"\"Write a short story about {topic} and end with a moral lesson. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef story_funny(topic: str) -> str:\\n \"\"\"Write a funny, humorous story about {topic}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n# Main orchestrator template - has access to sub-templates\\n@Template.define\\ndef write_story(topic: str, style: str) -> str:\\n \"\"\"Write a story about {topic} in the style: {style}.\\n Available styles: \\'moral\\' for a story with a lesson, \\'funny\\' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\"\"\"\\n raise NotHandled\\n\\n\\n# Verify sub-templates are captured in write_story\\'s lexical context\\nassert story_with_moral in write_story.tools\\nassert story_funny in write_story.tools\\nprint(\"Sub-templates available to write_story:\", list(write_story.tools))\\n\\nwith handler(provider), handler(llm_logger):\\n print(\"=== Story with moral ===\")\\n print(write_story(\"a curious cat\", \"moral\"))\\n print()\\n print(\"=== Funny story ===\")\\n print(write_story(\"a curious cat\", \"funny\"))', 'story_with_moral': Template(__prompt_template__='Write a short story about {topic} and end with a moral lesson. Do not use any tools.', __signature__= str>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='story_with_moral'), 'story_funny': Template(__prompt_template__='Write a funny, humorous story about {topic}. Do not use any tools.', __signature__= str>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='story_funny'), 'write_story': Template(__prompt_template__=\"Write a story about {topic} in the style: {style}.\\n Available styles: 'moral' for a story with a lesson, 'funny' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\", __signature__= str>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='write_story')})), __name__='haiku_no_cache'), Template(__prompt_template__='Give a prime number with {first_digit} as the first digit. Do not use any tools.', __signature__= int>, __context__=LexicalContext(mappingproxy({'__name__': '__main__', '__doc__': 'Automatically created module for IPython interactive environment', '__package__': None, '__loader__': None, '__spec__': None, '__builtin__': , '__builtins__': , '_ih': ['', 'import dataclasses\\nimport functools\\nimport inspect\\nimport logging\\nimport sys\\nfrom collections.abc import Callable\\n\\nfrom effectful.handlers.llm import Template\\nfrom effectful.handlers.llm.providers import (\\n CacheLLMRequestHandler,\\n LiteLLMProvider,\\n LLMLoggingHandler,\\n RetryLLMHandler,\\n completion,\\n tool_call,\\n)\\nfrom effectful.handlers.llm.synthesis import ProgramSynthesis\\nfrom effectful.ops.semantics import NotHandled, fwd, handler\\nfrom effectful.ops.syntax import defop\\n\\nprovider = LiteLLMProvider()', '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}.\"\"\"\\n raise NotHandled', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '@functools.cache\\n@Template.define\\ndef haiku(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef haiku_no_cache(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nprint()\\nwith handler(provider):\\n print(haiku(\"fish\"))\\n print(\"-\" * 40)\\n print(haiku(\"fish\"))\\n\\nprint()\\ncache_handler1 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler1):\\n print(haiku_no_cache(\"fish2\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish2\"))\\n\\nprint()\\ncache_handler2 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler2):\\n print(haiku_no_cache(\"fish3\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish3\"))', '@Template.define\\ndef primes(first_digit: int) -> int:\\n \"\"\"Give a prime number with {first_digit} as the first digit. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider):\\n assert type(primes(6)) is int', '@Template.define\\ndef count_char(char: str) -> Callable[[str], int]:\\n \"\"\"Write a function which takes a string and counts the occurrances of \\'{char}\\'. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider), handler(ProgramSynthesis()):\\n count_a = count_char(\"a\")\\n assert callable(count_a)\\n assert count_a(\"banana\") == 3\\n assert count_a(\"cherry\") == 0\\n # Print the source code of the generated function\\n print(inspect.getsource(count_a))', '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', '@dataclasses.dataclass\\nclass KnockKnockJoke:\\n whos_there: str\\n punchline: str\\n\\n\\n@Template.define\\ndef write_joke(theme: str) -> KnockKnockJoke:\\n \"\"\"Write a knock-knock joke on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef rate_joke(joke: KnockKnockJoke) -> bool:\\n \"\"\"Decide if {joke} is funny or not. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\ndef do_comedy():\\n joke = write_joke(\"lizards\")\\n print(\"> You are onstage at a comedy club. You tell the following joke:\")\\n print(\\n f\"Knock knock.\\\\nWho\\'s there?\\\\n{joke.whos_there}.\\\\n{joke.whos_there} who?\\\\n{joke.punchline}\"\\n )\\n if rate_joke(joke):\\n print(\"> The crowd laughs politely.\")\\n else:\\n print(\"> The crowd stares in stony silence.\")\\n\\n\\nwith handler(provider):\\n do_comedy()', 'def log_llm(*args, **kwargs):\\n result = fwd()\\n print(\"Request fired: \", args, kwargs, result)\\n return result\\n\\n\\n# Avoid cache\\ntry:\\n haiku.cache_clear()\\nexcept Exception:\\n pass\\n\\n# Put completion handler innermost so it has highest precedence during the call\\nwith handler(provider), handler({completion: log_llm}):\\n _ = haiku(\"fish2\")\\n _ = limerick(\"fish\") # or use haiku(\"fish-2\") to avoid cache', '# 1. Create a logger\\nlogger = logging.getLogger(\"effectful.llm\")\\nlogger.setLevel(logging.INFO)\\nlog_handler = logging.StreamHandler(sys.stdout)\\nlog_handler.setFormatter(logging.Formatter(\"%(levelname)s %(payload)s\"))\\nlogger.addHandler(log_handler)\\n# 2. Pass it to the handler\\nllm_logger = LLMLoggingHandler(logger=logger) # can also be LLMLoggingHandler()\\n\\n# Avoid cache for demonstration\\ntry:\\n haiku.cache_clear()\\n limerick.cache_clear()\\nexcept Exception:\\n pass\\n\\nwith handler(provider), handler(llm_logger):\\n _ = haiku(\"fish3\")\\n _ = limerick(\"fish4\")', '# Sub-templates for different story styles\\n@Template.define\\ndef story_with_moral(topic: str) -> str:\\n \"\"\"Write a short story about {topic} and end with a moral lesson. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef story_funny(topic: str) -> str:\\n \"\"\"Write a funny, humorous story about {topic}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n# Main orchestrator template - has access to sub-templates\\n@Template.define\\ndef write_story(topic: str, style: str) -> str:\\n \"\"\"Write a story about {topic} in the style: {style}.\\n Available styles: \\'moral\\' for a story with a lesson, \\'funny\\' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\"\"\"\\n raise NotHandled\\n\\n\\n# Verify sub-templates are captured in write_story\\'s lexical context\\nassert story_with_moral in write_story.tools\\nassert story_funny in write_story.tools\\nprint(\"Sub-templates available to write_story:\", list(write_story.tools))\\n\\nwith handler(provider), handler(llm_logger):\\n print(\"=== Story with moral ===\")\\n print(write_story(\"a curious cat\", \"moral\"))\\n print()\\n print(\"=== Funny story ===\")\\n print(write_story(\"a curious cat\", \"funny\"))'], '_oh': {}, '_dh': [PosixPath('/Users/datnguyenthanh/Marc/effectful')], 'In': ['', 'import dataclasses\\nimport functools\\nimport inspect\\nimport logging\\nimport sys\\nfrom collections.abc import Callable\\n\\nfrom effectful.handlers.llm import Template\\nfrom effectful.handlers.llm.providers import (\\n CacheLLMRequestHandler,\\n LiteLLMProvider,\\n LLMLoggingHandler,\\n RetryLLMHandler,\\n completion,\\n tool_call,\\n)\\nfrom effectful.handlers.llm.synthesis import ProgramSynthesis\\nfrom effectful.ops.semantics import NotHandled, fwd, handler\\nfrom effectful.ops.syntax import defop\\n\\nprovider = LiteLLMProvider()', '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}.\"\"\"\\n raise NotHandled', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '@functools.cache\\n@Template.define\\ndef haiku(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef haiku_no_cache(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nprint()\\nwith handler(provider):\\n print(haiku(\"fish\"))\\n print(\"-\" * 40)\\n print(haiku(\"fish\"))\\n\\nprint()\\ncache_handler1 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler1):\\n print(haiku_no_cache(\"fish2\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish2\"))\\n\\nprint()\\ncache_handler2 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler2):\\n print(haiku_no_cache(\"fish3\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish3\"))', '@Template.define\\ndef primes(first_digit: int) -> int:\\n \"\"\"Give a prime number with {first_digit} as the first digit. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider):\\n assert type(primes(6)) is int', '@Template.define\\ndef count_char(char: str) -> Callable[[str], int]:\\n \"\"\"Write a function which takes a string and counts the occurrances of \\'{char}\\'. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider), handler(ProgramSynthesis()):\\n count_a = count_char(\"a\")\\n assert callable(count_a)\\n assert count_a(\"banana\") == 3\\n assert count_a(\"cherry\") == 0\\n # Print the source code of the generated function\\n print(inspect.getsource(count_a))', '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', '@dataclasses.dataclass\\nclass KnockKnockJoke:\\n whos_there: str\\n punchline: str\\n\\n\\n@Template.define\\ndef write_joke(theme: str) -> KnockKnockJoke:\\n \"\"\"Write a knock-knock joke on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef rate_joke(joke: KnockKnockJoke) -> bool:\\n \"\"\"Decide if {joke} is funny or not. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\ndef do_comedy():\\n joke = write_joke(\"lizards\")\\n print(\"> You are onstage at a comedy club. You tell the following joke:\")\\n print(\\n f\"Knock knock.\\\\nWho\\'s there?\\\\n{joke.whos_there}.\\\\n{joke.whos_there} who?\\\\n{joke.punchline}\"\\n )\\n if rate_joke(joke):\\n print(\"> The crowd laughs politely.\")\\n else:\\n print(\"> The crowd stares in stony silence.\")\\n\\n\\nwith handler(provider):\\n do_comedy()', 'def log_llm(*args, **kwargs):\\n result = fwd()\\n print(\"Request fired: \", args, kwargs, result)\\n return result\\n\\n\\n# Avoid cache\\ntry:\\n haiku.cache_clear()\\nexcept Exception:\\n pass\\n\\n# Put completion handler innermost so it has highest precedence during the call\\nwith handler(provider), handler({completion: log_llm}):\\n _ = haiku(\"fish2\")\\n _ = limerick(\"fish\") # or use haiku(\"fish-2\") to avoid cache', '# 1. Create a logger\\nlogger = logging.getLogger(\"effectful.llm\")\\nlogger.setLevel(logging.INFO)\\nlog_handler = logging.StreamHandler(sys.stdout)\\nlog_handler.setFormatter(logging.Formatter(\"%(levelname)s %(payload)s\"))\\nlogger.addHandler(log_handler)\\n# 2. Pass it to the handler\\nllm_logger = LLMLoggingHandler(logger=logger) # can also be LLMLoggingHandler()\\n\\n# Avoid cache for demonstration\\ntry:\\n haiku.cache_clear()\\n limerick.cache_clear()\\nexcept Exception:\\n pass\\n\\nwith handler(provider), handler(llm_logger):\\n _ = haiku(\"fish3\")\\n _ = limerick(\"fish4\")', '# Sub-templates for different story styles\\n@Template.define\\ndef story_with_moral(topic: str) -> str:\\n \"\"\"Write a short story about {topic} and end with a moral lesson. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef story_funny(topic: str) -> str:\\n \"\"\"Write a funny, humorous story about {topic}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n# Main orchestrator template - has access to sub-templates\\n@Template.define\\ndef write_story(topic: str, style: str) -> str:\\n \"\"\"Write a story about {topic} in the style: {style}.\\n Available styles: \\'moral\\' for a story with a lesson, \\'funny\\' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\"\"\"\\n raise NotHandled\\n\\n\\n# Verify sub-templates are captured in write_story\\'s lexical context\\nassert story_with_moral in write_story.tools\\nassert story_funny in write_story.tools\\nprint(\"Sub-templates available to write_story:\", list(write_story.tools))\\n\\nwith handler(provider), handler(llm_logger):\\n print(\"=== Story with moral ===\")\\n print(write_story(\"a curious cat\", \"moral\"))\\n print()\\n print(\"=== Funny story ===\")\\n print(write_story(\"a curious cat\", \"funny\"))'], 'Out': {}, 'get_ipython': >, 'exit': , 'quit': , 'open': , '_': 'In the ocean where fishies do play, \\nA big whale came swimming one day. \\nWith a splash and a dive, \\nHe felt so alive, \\nChasing fish in the blue, gleaming bay.', '__': '', '___': '', '__vsc_ipynb_file__': '/Users/datnguyenthanh/Marc/effectful/docs/source/llm.ipynb', '_i': '# 1. Create a logger\\nlogger = logging.getLogger(\"effectful.llm\")\\nlogger.setLevel(logging.INFO)\\nlog_handler = logging.StreamHandler(sys.stdout)\\nlog_handler.setFormatter(logging.Formatter(\"%(levelname)s %(payload)s\"))\\nlogger.addHandler(log_handler)\\n# 2. Pass it to the handler\\nllm_logger = LLMLoggingHandler(logger=logger) # can also be LLMLoggingHandler()\\n\\n# Avoid cache for demonstration\\ntry:\\n haiku.cache_clear()\\n limerick.cache_clear()\\nexcept Exception:\\n pass\\n\\nwith handler(provider), handler(llm_logger):\\n _ = haiku(\"fish3\")\\n _ = limerick(\"fish4\")', '_ii': 'def log_llm(*args, **kwargs):\\n result = fwd()\\n print(\"Request fired: \", args, kwargs, result)\\n return result\\n\\n\\n# Avoid cache\\ntry:\\n haiku.cache_clear()\\nexcept Exception:\\n pass\\n\\n# Put completion handler innermost so it has highest precedence during the call\\nwith handler(provider), handler({completion: log_llm}):\\n _ = haiku(\"fish2\")\\n _ = limerick(\"fish\") # or use haiku(\"fish-2\") to avoid cache', '_iii': '@dataclasses.dataclass\\nclass KnockKnockJoke:\\n whos_there: str\\n punchline: str\\n\\n\\n@Template.define\\ndef write_joke(theme: str) -> KnockKnockJoke:\\n \"\"\"Write a knock-knock joke on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef rate_joke(joke: KnockKnockJoke) -> bool:\\n \"\"\"Decide if {joke} is funny or not. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\ndef do_comedy():\\n joke = write_joke(\"lizards\")\\n print(\"> You are onstage at a comedy club. You tell the following joke:\")\\n print(\\n f\"Knock knock.\\\\nWho\\'s there?\\\\n{joke.whos_there}.\\\\n{joke.whos_there} who?\\\\n{joke.punchline}\"\\n )\\n if rate_joke(joke):\\n print(\"> The crowd laughs politely.\")\\n else:\\n print(\"> The crowd stares in stony silence.\")\\n\\n\\nwith handler(provider):\\n do_comedy()', '_i1': 'import dataclasses\\nimport functools\\nimport inspect\\nimport logging\\nimport sys\\nfrom collections.abc import Callable\\n\\nfrom effectful.handlers.llm import Template\\nfrom effectful.handlers.llm.providers import (\\n CacheLLMRequestHandler,\\n LiteLLMProvider,\\n LLMLoggingHandler,\\n RetryLLMHandler,\\n completion,\\n tool_call,\\n)\\nfrom effectful.handlers.llm.synthesis import ProgramSynthesis\\nfrom effectful.ops.semantics import NotHandled, fwd, handler\\nfrom effectful.ops.syntax import defop\\n\\nprovider = LiteLLMProvider()', 'dataclasses': , 'functools': , 'inspect': , 'logging': , 'sys': , 'Callable': , 'Template': , 'CacheLLMRequestHandler': , 'LiteLLMProvider': , 'LLMLoggingHandler': , 'RetryLLMHandler': , 'completion': Operation(completion, (model: str, messages: List = [], timeout: Union[float, str, openai.Timeout, NoneType] = None, temperature: Optional[float] = None, top_p: Optional[float] = None, n: Optional[int] = None, stream: Optional[bool] = None, stream_options: Optional[dict] = None, stop=None, max_completion_tokens: Optional[int] = None, max_tokens: Optional[int] = None, modalities: Optional[List[Literal['text', 'audio']]] = None, prediction: Optional[openai.types.chat.chat_completion_prediction_content_param.ChatCompletionPredictionContentParam] = None, audio: Optional[openai.types.chat.chat_completion_audio_param.ChatCompletionAudioParam] = None, presence_penalty: Optional[float] = None, frequency_penalty: Optional[float] = None, logit_bias: Optional[dict] = None, user: Optional[str] = None, reasoning_effort: Optional[Literal['none', 'minimal', 'low', 'medium', 'high', 'default']] = None, verbosity: Optional[Literal['low', 'medium', 'high']] = None, response_format: Union[dict, Type[pydantic.main.BaseModel], NoneType] = None, seed: Optional[int] = None, tools: Optional[List] = None, tool_choice: Union[str, dict, NoneType] = None, logprobs: Optional[bool] = None, top_logprobs: Optional[int] = None, parallel_tool_calls: Optional[bool] = None, web_search_options: Optional[litellm.types.llms.openai.OpenAIWebSearchOptions] = None, deployment_id=None, extra_headers: Optional[dict] = None, safety_identifier: Optional[str] = None, service_tier: Optional[str] = None, functions: Optional[List] = None, function_call: Optional[str] = None, base_url: Optional[str] = None, api_version: Optional[str] = None, api_key: Optional[str] = None, model_list: Optional[list] = None, thinking: Optional[litellm.types.llms.anthropic.AnthropicThinkingParam] = None, shared_session: Optional[ForwardRef('ClientSession')] = None, **kwargs) -> Union[litellm.types.utils.ModelResponse, litellm.litellm_core_utils.streaming_handler.CustomStreamWrapper]), 'tool_call': Operation(tool_call, (template: effectful.handlers.llm.Template, tool: Union[effectful.ops.types.Operation[..., T], effectful.handlers.llm.Template[..., T]], *args, **kwargs) -> T), 'ProgramSynthesis': , 'NotHandled': , 'fwd': Operation(fwd, (*args, **kwargs) -> Any), 'handler': , 'defop': , 'provider': , '_i2': '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}.\"\"\"\\n raise NotHandled', 'limerick': Template(__prompt_template__='Write a limerick on the theme of {theme}. Do not use any tools.', __signature__= str>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='limerick'), '_i3': 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '_i4': 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '_i5': '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled', '_i6': 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '_i7': '@functools.cache\\n@Template.define\\ndef haiku(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef haiku_no_cache(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nprint()\\nwith handler(provider):\\n print(haiku(\"fish\"))\\n print(\"-\" * 40)\\n print(haiku(\"fish\"))\\n\\nprint()\\ncache_handler1 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler1):\\n print(haiku_no_cache(\"fish2\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish2\"))\\n\\nprint()\\ncache_handler2 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler2):\\n print(haiku_no_cache(\"fish3\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish3\"))', 'haiku': , 'haiku_no_cache': Template(__prompt_template__='Write a haiku on the theme of {theme}. Do not use any tools.', __signature__= str>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='haiku_no_cache'), 'cache_handler1': , 'cache_handler2': , '_i8': '@Template.define\\ndef primes(first_digit: int) -> int:\\n \"\"\"Give a prime number with {first_digit} as the first digit. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider):\\n assert type(primes(6)) is int', 'primes': ..., '_i9': '@Template.define\\ndef count_char(char: str) -> Callable[[str], int]:\\n \"\"\"Write a function which takes a string and counts the occurrances of \\'{char}\\'. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider), handler(ProgramSynthesis()):\\n count_a = count_char(\"a\")\\n assert callable(count_a)\\n assert count_a(\"banana\") == 3\\n assert count_a(\"cherry\") == 0\\n # Print the source code of the generated function\\n print(inspect.getsource(count_a))', 'count_char': Template(__prompt_template__=\"Write a function which takes a string and counts the occurrances of '{char}'. Do not use any tools.\", __signature__= collections.abc.Callable[[str], int]>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='count_char'), 'count_a': , '_i10': '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', 'cities': Operation(cities, () -> list[str]), 'weather': Operation(weather, (city: str) -> str), 'vacation': Template(__prompt_template__='Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.', __signature__= str>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='vacation'), 'log_tool_call': , '_i11': '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', '_i12': '@dataclasses.dataclass\\nclass KnockKnockJoke:\\n whos_there: str\\n punchline: str\\n\\n\\n@Template.define\\ndef write_joke(theme: str) -> KnockKnockJoke:\\n \"\"\"Write a knock-knock joke on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef rate_joke(joke: KnockKnockJoke) -> bool:\\n \"\"\"Decide if {joke} is funny or not. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\ndef do_comedy():\\n joke = write_joke(\"lizards\")\\n print(\"> You are onstage at a comedy club. You tell the following joke:\")\\n print(\\n f\"Knock knock.\\\\nWho\\'s there?\\\\n{joke.whos_there}.\\\\n{joke.whos_there} who?\\\\n{joke.punchline}\"\\n )\\n if rate_joke(joke):\\n print(\"> The crowd laughs politely.\")\\n else:\\n print(\"> The crowd stares in stony silence.\")\\n\\n\\nwith handler(provider):\\n do_comedy()', 'KnockKnockJoke': , 'write_joke': Template(__prompt_template__='Write a knock-knock joke on the theme of {theme}. Do not use any tools.', __signature__= __main__.KnockKnockJoke>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='write_joke'), 'rate_joke': Template(__prompt_template__='Decide if {joke} is funny or not. Do not use any tools.', __signature__= bool>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='rate_joke'), 'do_comedy': , '_i13': 'def log_llm(*args, **kwargs):\\n result = fwd()\\n print(\"Request fired: \", args, kwargs, result)\\n return result\\n\\n\\n# Avoid cache\\ntry:\\n haiku.cache_clear()\\nexcept Exception:\\n pass\\n\\n# Put completion handler innermost so it has highest precedence during the call\\nwith handler(provider), handler({completion: log_llm}):\\n _ = haiku(\"fish2\")\\n _ = limerick(\"fish\") # or use haiku(\"fish-2\") to avoid cache', 'log_llm': , '_i14': '# 1. Create a logger\\nlogger = logging.getLogger(\"effectful.llm\")\\nlogger.setLevel(logging.INFO)\\nlog_handler = logging.StreamHandler(sys.stdout)\\nlog_handler.setFormatter(logging.Formatter(\"%(levelname)s %(payload)s\"))\\nlogger.addHandler(log_handler)\\n# 2. Pass it to the handler\\nllm_logger = LLMLoggingHandler(logger=logger) # can also be LLMLoggingHandler()\\n\\n# Avoid cache for demonstration\\ntry:\\n haiku.cache_clear()\\n limerick.cache_clear()\\nexcept Exception:\\n pass\\n\\nwith handler(provider), handler(llm_logger):\\n _ = haiku(\"fish3\")\\n _ = limerick(\"fish4\")', 'logger': , 'log_handler': , 'llm_logger': , '_i15': '# Sub-templates for different story styles\\n@Template.define\\ndef story_with_moral(topic: str) -> str:\\n \"\"\"Write a short story about {topic} and end with a moral lesson. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef story_funny(topic: str) -> str:\\n \"\"\"Write a funny, humorous story about {topic}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n# Main orchestrator template - has access to sub-templates\\n@Template.define\\ndef write_story(topic: str, style: str) -> str:\\n \"\"\"Write a story about {topic} in the style: {style}.\\n Available styles: \\'moral\\' for a story with a lesson, \\'funny\\' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\"\"\"\\n raise NotHandled\\n\\n\\n# Verify sub-templates are captured in write_story\\'s lexical context\\nassert story_with_moral in write_story.tools\\nassert story_funny in write_story.tools\\nprint(\"Sub-templates available to write_story:\", list(write_story.tools))\\n\\nwith handler(provider), handler(llm_logger):\\n print(\"=== Story with moral ===\")\\n print(write_story(\"a curious cat\", \"moral\"))\\n print()\\n print(\"=== Funny story ===\")\\n print(write_story(\"a curious cat\", \"funny\"))', 'story_with_moral': Template(__prompt_template__='Write a short story about {topic} and end with a moral lesson. Do not use any tools.', __signature__= str>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='story_with_moral'), 'story_funny': Template(__prompt_template__='Write a funny, humorous story about {topic}. Do not use any tools.', __signature__= str>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='story_funny'), 'write_story': Template(__prompt_template__=\"Write a story about {topic} in the style: {style}.\\n Available styles: 'moral' for a story with a lesson, 'funny' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\", __signature__= str>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='write_story')}), mappingproxy({'__name__': '__main__', '__doc__': 'Automatically created module for IPython interactive environment', '__package__': None, '__loader__': None, '__spec__': None, '__builtin__': , '__builtins__': , '_ih': ['', 'import dataclasses\\nimport functools\\nimport inspect\\nimport logging\\nimport sys\\nfrom collections.abc import Callable\\n\\nfrom effectful.handlers.llm import Template\\nfrom effectful.handlers.llm.providers import (\\n CacheLLMRequestHandler,\\n LiteLLMProvider,\\n LLMLoggingHandler,\\n RetryLLMHandler,\\n completion,\\n tool_call,\\n)\\nfrom effectful.handlers.llm.synthesis import ProgramSynthesis\\nfrom effectful.ops.semantics import NotHandled, fwd, handler\\nfrom effectful.ops.syntax import defop\\n\\nprovider = LiteLLMProvider()', '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}.\"\"\"\\n raise NotHandled', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '@functools.cache\\n@Template.define\\ndef haiku(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef haiku_no_cache(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nprint()\\nwith handler(provider):\\n print(haiku(\"fish\"))\\n print(\"-\" * 40)\\n print(haiku(\"fish\"))\\n\\nprint()\\ncache_handler1 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler1):\\n print(haiku_no_cache(\"fish2\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish2\"))\\n\\nprint()\\ncache_handler2 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler2):\\n print(haiku_no_cache(\"fish3\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish3\"))', '@Template.define\\ndef primes(first_digit: int) -> int:\\n \"\"\"Give a prime number with {first_digit} as the first digit. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider):\\n assert type(primes(6)) is int', '@Template.define\\ndef count_char(char: str) -> Callable[[str], int]:\\n \"\"\"Write a function which takes a string and counts the occurrances of \\'{char}\\'. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider), handler(ProgramSynthesis()):\\n count_a = count_char(\"a\")\\n assert callable(count_a)\\n assert count_a(\"banana\") == 3\\n assert count_a(\"cherry\") == 0\\n # Print the source code of the generated function\\n print(inspect.getsource(count_a))', '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', '@dataclasses.dataclass\\nclass KnockKnockJoke:\\n whos_there: str\\n punchline: str\\n\\n\\n@Template.define\\ndef write_joke(theme: str) -> KnockKnockJoke:\\n \"\"\"Write a knock-knock joke on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef rate_joke(joke: KnockKnockJoke) -> bool:\\n \"\"\"Decide if {joke} is funny or not. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\ndef do_comedy():\\n joke = write_joke(\"lizards\")\\n print(\"> You are onstage at a comedy club. You tell the following joke:\")\\n print(\\n f\"Knock knock.\\\\nWho\\'s there?\\\\n{joke.whos_there}.\\\\n{joke.whos_there} who?\\\\n{joke.punchline}\"\\n )\\n if rate_joke(joke):\\n print(\"> The crowd laughs politely.\")\\n else:\\n print(\"> The crowd stares in stony silence.\")\\n\\n\\nwith handler(provider):\\n do_comedy()', 'def log_llm(*args, **kwargs):\\n result = fwd()\\n print(\"Request fired: \", args, kwargs, result)\\n return result\\n\\n\\n# Avoid cache\\ntry:\\n haiku.cache_clear()\\nexcept Exception:\\n pass\\n\\n# Put completion handler innermost so it has highest precedence during the call\\nwith handler(provider), handler({completion: log_llm}):\\n _ = haiku(\"fish2\")\\n _ = limerick(\"fish\") # or use haiku(\"fish-2\") to avoid cache', '# 1. Create a logger\\nlogger = logging.getLogger(\"effectful.llm\")\\nlogger.setLevel(logging.INFO)\\nlog_handler = logging.StreamHandler(sys.stdout)\\nlog_handler.setFormatter(logging.Formatter(\"%(levelname)s %(payload)s\"))\\nlogger.addHandler(log_handler)\\n# 2. Pass it to the handler\\nllm_logger = LLMLoggingHandler(logger=logger) # can also be LLMLoggingHandler()\\n\\n# Avoid cache for demonstration\\ntry:\\n haiku.cache_clear()\\n limerick.cache_clear()\\nexcept Exception:\\n pass\\n\\nwith handler(provider), handler(llm_logger):\\n _ = haiku(\"fish3\")\\n _ = limerick(\"fish4\")', '# Sub-templates for different story styles\\n@Template.define\\ndef story_with_moral(topic: str) -> str:\\n \"\"\"Write a short story about {topic} and end with a moral lesson. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef story_funny(topic: str) -> str:\\n \"\"\"Write a funny, humorous story about {topic}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n# Main orchestrator template - has access to sub-templates\\n@Template.define\\ndef write_story(topic: str, style: str) -> str:\\n \"\"\"Write a story about {topic} in the style: {style}.\\n Available styles: \\'moral\\' for a story with a lesson, \\'funny\\' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\"\"\"\\n raise NotHandled\\n\\n\\n# Verify sub-templates are captured in write_story\\'s lexical context\\nassert story_with_moral in write_story.tools\\nassert story_funny in write_story.tools\\nprint(\"Sub-templates available to write_story:\", list(write_story.tools))\\n\\nwith handler(provider), handler(llm_logger):\\n print(\"=== Story with moral ===\")\\n print(write_story(\"a curious cat\", \"moral\"))\\n print()\\n print(\"=== Funny story ===\")\\n print(write_story(\"a curious cat\", \"funny\"))'], '_oh': {}, '_dh': [PosixPath('/Users/datnguyenthanh/Marc/effectful')], 'In': ['', 'import dataclasses\\nimport functools\\nimport inspect\\nimport logging\\nimport sys\\nfrom collections.abc import Callable\\n\\nfrom effectful.handlers.llm import Template\\nfrom effectful.handlers.llm.providers import (\\n CacheLLMRequestHandler,\\n LiteLLMProvider,\\n LLMLoggingHandler,\\n RetryLLMHandler,\\n completion,\\n tool_call,\\n)\\nfrom effectful.handlers.llm.synthesis import ProgramSynthesis\\nfrom effectful.ops.semantics import NotHandled, fwd, handler\\nfrom effectful.ops.syntax import defop\\n\\nprovider = LiteLLMProvider()', '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}.\"\"\"\\n raise NotHandled', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '@functools.cache\\n@Template.define\\ndef haiku(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef haiku_no_cache(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nprint()\\nwith handler(provider):\\n print(haiku(\"fish\"))\\n print(\"-\" * 40)\\n print(haiku(\"fish\"))\\n\\nprint()\\ncache_handler1 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler1):\\n print(haiku_no_cache(\"fish2\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish2\"))\\n\\nprint()\\ncache_handler2 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler2):\\n print(haiku_no_cache(\"fish3\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish3\"))', '@Template.define\\ndef primes(first_digit: int) -> int:\\n \"\"\"Give a prime number with {first_digit} as the first digit. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider):\\n assert type(primes(6)) is int', '@Template.define\\ndef count_char(char: str) -> Callable[[str], int]:\\n \"\"\"Write a function which takes a string and counts the occurrances of \\'{char}\\'. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider), handler(ProgramSynthesis()):\\n count_a = count_char(\"a\")\\n assert callable(count_a)\\n assert count_a(\"banana\") == 3\\n assert count_a(\"cherry\") == 0\\n # Print the source code of the generated function\\n print(inspect.getsource(count_a))', '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', '@dataclasses.dataclass\\nclass KnockKnockJoke:\\n whos_there: str\\n punchline: str\\n\\n\\n@Template.define\\ndef write_joke(theme: str) -> KnockKnockJoke:\\n \"\"\"Write a knock-knock joke on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef rate_joke(joke: KnockKnockJoke) -> bool:\\n \"\"\"Decide if {joke} is funny or not. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\ndef do_comedy():\\n joke = write_joke(\"lizards\")\\n print(\"> You are onstage at a comedy club. You tell the following joke:\")\\n print(\\n f\"Knock knock.\\\\nWho\\'s there?\\\\n{joke.whos_there}.\\\\n{joke.whos_there} who?\\\\n{joke.punchline}\"\\n )\\n if rate_joke(joke):\\n print(\"> The crowd laughs politely.\")\\n else:\\n print(\"> The crowd stares in stony silence.\")\\n\\n\\nwith handler(provider):\\n do_comedy()', 'def log_llm(*args, **kwargs):\\n result = fwd()\\n print(\"Request fired: \", args, kwargs, result)\\n return result\\n\\n\\n# Avoid cache\\ntry:\\n haiku.cache_clear()\\nexcept Exception:\\n pass\\n\\n# Put completion handler innermost so it has highest precedence during the call\\nwith handler(provider), handler({completion: log_llm}):\\n _ = haiku(\"fish2\")\\n _ = limerick(\"fish\") # or use haiku(\"fish-2\") to avoid cache', '# 1. Create a logger\\nlogger = logging.getLogger(\"effectful.llm\")\\nlogger.setLevel(logging.INFO)\\nlog_handler = logging.StreamHandler(sys.stdout)\\nlog_handler.setFormatter(logging.Formatter(\"%(levelname)s %(payload)s\"))\\nlogger.addHandler(log_handler)\\n# 2. Pass it to the handler\\nllm_logger = LLMLoggingHandler(logger=logger) # can also be LLMLoggingHandler()\\n\\n# Avoid cache for demonstration\\ntry:\\n haiku.cache_clear()\\n limerick.cache_clear()\\nexcept Exception:\\n pass\\n\\nwith handler(provider), handler(llm_logger):\\n _ = haiku(\"fish3\")\\n _ = limerick(\"fish4\")', '# Sub-templates for different story styles\\n@Template.define\\ndef story_with_moral(topic: str) -> str:\\n \"\"\"Write a short story about {topic} and end with a moral lesson. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef story_funny(topic: str) -> str:\\n \"\"\"Write a funny, humorous story about {topic}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n# Main orchestrator template - has access to sub-templates\\n@Template.define\\ndef write_story(topic: str, style: str) -> str:\\n \"\"\"Write a story about {topic} in the style: {style}.\\n Available styles: \\'moral\\' for a story with a lesson, \\'funny\\' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\"\"\"\\n raise NotHandled\\n\\n\\n# Verify sub-templates are captured in write_story\\'s lexical context\\nassert story_with_moral in write_story.tools\\nassert story_funny in write_story.tools\\nprint(\"Sub-templates available to write_story:\", list(write_story.tools))\\n\\nwith handler(provider), handler(llm_logger):\\n print(\"=== Story with moral ===\")\\n print(write_story(\"a curious cat\", \"moral\"))\\n print()\\n print(\"=== Funny story ===\")\\n print(write_story(\"a curious cat\", \"funny\"))'], 'Out': {}, 'get_ipython': >, 'exit': , 'quit': , 'open': , '_': 'In the ocean where fishies do play, \\nA big whale came swimming one day. \\nWith a splash and a dive, \\nHe felt so alive, \\nChasing fish in the blue, gleaming bay.', '__': '', '___': '', '__vsc_ipynb_file__': '/Users/datnguyenthanh/Marc/effectful/docs/source/llm.ipynb', '_i': '# 1. Create a logger\\nlogger = logging.getLogger(\"effectful.llm\")\\nlogger.setLevel(logging.INFO)\\nlog_handler = logging.StreamHandler(sys.stdout)\\nlog_handler.setFormatter(logging.Formatter(\"%(levelname)s %(payload)s\"))\\nlogger.addHandler(log_handler)\\n# 2. Pass it to the handler\\nllm_logger = LLMLoggingHandler(logger=logger) # can also be LLMLoggingHandler()\\n\\n# Avoid cache for demonstration\\ntry:\\n haiku.cache_clear()\\n limerick.cache_clear()\\nexcept Exception:\\n pass\\n\\nwith handler(provider), handler(llm_logger):\\n _ = haiku(\"fish3\")\\n _ = limerick(\"fish4\")', '_ii': 'def log_llm(*args, **kwargs):\\n result = fwd()\\n print(\"Request fired: \", args, kwargs, result)\\n return result\\n\\n\\n# Avoid cache\\ntry:\\n haiku.cache_clear()\\nexcept Exception:\\n pass\\n\\n# Put completion handler innermost so it has highest precedence during the call\\nwith handler(provider), handler({completion: log_llm}):\\n _ = haiku(\"fish2\")\\n _ = limerick(\"fish\") # or use haiku(\"fish-2\") to avoid cache', '_iii': '@dataclasses.dataclass\\nclass KnockKnockJoke:\\n whos_there: str\\n punchline: str\\n\\n\\n@Template.define\\ndef write_joke(theme: str) -> KnockKnockJoke:\\n \"\"\"Write a knock-knock joke on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef rate_joke(joke: KnockKnockJoke) -> bool:\\n \"\"\"Decide if {joke} is funny or not. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\ndef do_comedy():\\n joke = write_joke(\"lizards\")\\n print(\"> You are onstage at a comedy club. You tell the following joke:\")\\n print(\\n f\"Knock knock.\\\\nWho\\'s there?\\\\n{joke.whos_there}.\\\\n{joke.whos_there} who?\\\\n{joke.punchline}\"\\n )\\n if rate_joke(joke):\\n print(\"> The crowd laughs politely.\")\\n else:\\n print(\"> The crowd stares in stony silence.\")\\n\\n\\nwith handler(provider):\\n do_comedy()', '_i1': 'import dataclasses\\nimport functools\\nimport inspect\\nimport logging\\nimport sys\\nfrom collections.abc import Callable\\n\\nfrom effectful.handlers.llm import Template\\nfrom effectful.handlers.llm.providers import (\\n CacheLLMRequestHandler,\\n LiteLLMProvider,\\n LLMLoggingHandler,\\n RetryLLMHandler,\\n completion,\\n tool_call,\\n)\\nfrom effectful.handlers.llm.synthesis import ProgramSynthesis\\nfrom effectful.ops.semantics import NotHandled, fwd, handler\\nfrom effectful.ops.syntax import defop\\n\\nprovider = LiteLLMProvider()', 'dataclasses': , 'functools': , 'inspect': , 'logging': , 'sys': , 'Callable': , 'Template': , 'CacheLLMRequestHandler': , 'LiteLLMProvider': , 'LLMLoggingHandler': , 'RetryLLMHandler': , 'completion': Operation(completion, (model: str, messages: List = [], timeout: Union[float, str, openai.Timeout, NoneType] = None, temperature: Optional[float] = None, top_p: Optional[float] = None, n: Optional[int] = None, stream: Optional[bool] = None, stream_options: Optional[dict] = None, stop=None, max_completion_tokens: Optional[int] = None, max_tokens: Optional[int] = None, modalities: Optional[List[Literal['text', 'audio']]] = None, prediction: Optional[openai.types.chat.chat_completion_prediction_content_param.ChatCompletionPredictionContentParam] = None, audio: Optional[openai.types.chat.chat_completion_audio_param.ChatCompletionAudioParam] = None, presence_penalty: Optional[float] = None, frequency_penalty: Optional[float] = None, logit_bias: Optional[dict] = None, user: Optional[str] = None, reasoning_effort: Optional[Literal['none', 'minimal', 'low', 'medium', 'high', 'default']] = None, verbosity: Optional[Literal['low', 'medium', 'high']] = None, response_format: Union[dict, Type[pydantic.main.BaseModel], NoneType] = None, seed: Optional[int] = None, tools: Optional[List] = None, tool_choice: Union[str, dict, NoneType] = None, logprobs: Optional[bool] = None, top_logprobs: Optional[int] = None, parallel_tool_calls: Optional[bool] = None, web_search_options: Optional[litellm.types.llms.openai.OpenAIWebSearchOptions] = None, deployment_id=None, extra_headers: Optional[dict] = None, safety_identifier: Optional[str] = None, service_tier: Optional[str] = None, functions: Optional[List] = None, function_call: Optional[str] = None, base_url: Optional[str] = None, api_version: Optional[str] = None, api_key: Optional[str] = None, model_list: Optional[list] = None, thinking: Optional[litellm.types.llms.anthropic.AnthropicThinkingParam] = None, shared_session: Optional[ForwardRef('ClientSession')] = None, **kwargs) -> Union[litellm.types.utils.ModelResponse, litellm.litellm_core_utils.streaming_handler.CustomStreamWrapper]), 'tool_call': Operation(tool_call, (template: effectful.handlers.llm.Template, tool: Union[effectful.ops.types.Operation[..., T], effectful.handlers.llm.Template[..., T]], *args, **kwargs) -> T), 'ProgramSynthesis': , 'NotHandled': , 'fwd': Operation(fwd, (*args, **kwargs) -> Any), 'handler': , 'defop': , 'provider': , '_i2': '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}.\"\"\"\\n raise NotHandled', 'limerick': Template(__prompt_template__='Write a limerick on the theme of {theme}. Do not use any tools.', __signature__= str>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='limerick'), '_i3': 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '_i4': 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '_i5': '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled', '_i6': 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '_i7': '@functools.cache\\n@Template.define\\ndef haiku(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef haiku_no_cache(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nprint()\\nwith handler(provider):\\n print(haiku(\"fish\"))\\n print(\"-\" * 40)\\n print(haiku(\"fish\"))\\n\\nprint()\\ncache_handler1 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler1):\\n print(haiku_no_cache(\"fish2\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish2\"))\\n\\nprint()\\ncache_handler2 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler2):\\n print(haiku_no_cache(\"fish3\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish3\"))', 'haiku': , 'haiku_no_cache': Template(__prompt_template__='Write a haiku on the theme of {theme}. Do not use any tools.', __signature__= str>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='haiku_no_cache'), 'cache_handler1': , 'cache_handler2': , '_i8': '@Template.define\\ndef primes(first_digit: int) -> int:\\n \"\"\"Give a prime number with {first_digit} as the first digit. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider):\\n assert type(primes(6)) is int', 'primes': ..., '_i9': '@Template.define\\ndef count_char(char: str) -> Callable[[str], int]:\\n \"\"\"Write a function which takes a string and counts the occurrances of \\'{char}\\'. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider), handler(ProgramSynthesis()):\\n count_a = count_char(\"a\")\\n assert callable(count_a)\\n assert count_a(\"banana\") == 3\\n assert count_a(\"cherry\") == 0\\n # Print the source code of the generated function\\n print(inspect.getsource(count_a))', 'count_char': Template(__prompt_template__=\"Write a function which takes a string and counts the occurrances of '{char}'. Do not use any tools.\", __signature__= collections.abc.Callable[[str], int]>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='count_char'), 'count_a': , '_i10': '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', 'cities': Operation(cities, () -> list[str]), 'weather': Operation(weather, (city: str) -> str), 'vacation': Template(__prompt_template__='Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.', __signature__= str>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='vacation'), 'log_tool_call': , '_i11': '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', '_i12': '@dataclasses.dataclass\\nclass KnockKnockJoke:\\n whos_there: str\\n punchline: str\\n\\n\\n@Template.define\\ndef write_joke(theme: str) -> KnockKnockJoke:\\n \"\"\"Write a knock-knock joke on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef rate_joke(joke: KnockKnockJoke) -> bool:\\n \"\"\"Decide if {joke} is funny or not. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\ndef do_comedy():\\n joke = write_joke(\"lizards\")\\n print(\"> You are onstage at a comedy club. You tell the following joke:\")\\n print(\\n f\"Knock knock.\\\\nWho\\'s there?\\\\n{joke.whos_there}.\\\\n{joke.whos_there} who?\\\\n{joke.punchline}\"\\n )\\n if rate_joke(joke):\\n print(\"> The crowd laughs politely.\")\\n else:\\n print(\"> The crowd stares in stony silence.\")\\n\\n\\nwith handler(provider):\\n do_comedy()', 'KnockKnockJoke': , 'write_joke': Template(__prompt_template__='Write a knock-knock joke on the theme of {theme}. Do not use any tools.', __signature__= __main__.KnockKnockJoke>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='write_joke'), 'rate_joke': Template(__prompt_template__='Decide if {joke} is funny or not. Do not use any tools.', __signature__= bool>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='rate_joke'), 'do_comedy': , '_i13': 'def log_llm(*args, **kwargs):\\n result = fwd()\\n print(\"Request fired: \", args, kwargs, result)\\n return result\\n\\n\\n# Avoid cache\\ntry:\\n haiku.cache_clear()\\nexcept Exception:\\n pass\\n\\n# Put completion handler innermost so it has highest precedence during the call\\nwith handler(provider), handler({completion: log_llm}):\\n _ = haiku(\"fish2\")\\n _ = limerick(\"fish\") # or use haiku(\"fish-2\") to avoid cache', 'log_llm': , '_i14': '# 1. Create a logger\\nlogger = logging.getLogger(\"effectful.llm\")\\nlogger.setLevel(logging.INFO)\\nlog_handler = logging.StreamHandler(sys.stdout)\\nlog_handler.setFormatter(logging.Formatter(\"%(levelname)s %(payload)s\"))\\nlogger.addHandler(log_handler)\\n# 2. Pass it to the handler\\nllm_logger = LLMLoggingHandler(logger=logger) # can also be LLMLoggingHandler()\\n\\n# Avoid cache for demonstration\\ntry:\\n haiku.cache_clear()\\n limerick.cache_clear()\\nexcept Exception:\\n pass\\n\\nwith handler(provider), handler(llm_logger):\\n _ = haiku(\"fish3\")\\n _ = limerick(\"fish4\")', 'logger': , 'log_handler': , 'llm_logger': , '_i15': '# Sub-templates for different story styles\\n@Template.define\\ndef story_with_moral(topic: str) -> str:\\n \"\"\"Write a short story about {topic} and end with a moral lesson. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef story_funny(topic: str) -> str:\\n \"\"\"Write a funny, humorous story about {topic}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n# Main orchestrator template - has access to sub-templates\\n@Template.define\\ndef write_story(topic: str, style: str) -> str:\\n \"\"\"Write a story about {topic} in the style: {style}.\\n Available styles: \\'moral\\' for a story with a lesson, \\'funny\\' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\"\"\"\\n raise NotHandled\\n\\n\\n# Verify sub-templates are captured in write_story\\'s lexical context\\nassert story_with_moral in write_story.tools\\nassert story_funny in write_story.tools\\nprint(\"Sub-templates available to write_story:\", list(write_story.tools))\\n\\nwith handler(provider), handler(llm_logger):\\n print(\"=== Story with moral ===\")\\n print(write_story(\"a curious cat\", \"moral\"))\\n print()\\n print(\"=== Funny story ===\")\\n print(write_story(\"a curious cat\", \"funny\"))', 'story_with_moral': Template(__prompt_template__='Write a short story about {topic} and end with a moral lesson. Do not use any tools.', __signature__= str>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='story_with_moral'), 'story_funny': Template(__prompt_template__='Write a funny, humorous story about {topic}. Do not use any tools.', __signature__= str>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='story_funny'), 'write_story': Template(__prompt_template__=\"Write a story about {topic} in the style: {style}.\\n Available styles: 'moral' for a story with a lesson, 'funny' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\", __signature__= str>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='write_story')})), __name__='primes'), Template(__prompt_template__=\"Write a function which takes a string and counts the occurrances of '{char}'. Do not use any tools.\", __signature__= collections.abc.Callable[[str], int]>, __context__=LexicalContext(mappingproxy({'__name__': '__main__', '__doc__': 'Automatically created module for IPython interactive environment', '__package__': None, '__loader__': None, '__spec__': None, '__builtin__': , '__builtins__': , '_ih': ['', 'import dataclasses\\nimport functools\\nimport inspect\\nimport logging\\nimport sys\\nfrom collections.abc import Callable\\n\\nfrom effectful.handlers.llm import Template\\nfrom effectful.handlers.llm.providers import (\\n CacheLLMRequestHandler,\\n LiteLLMProvider,\\n LLMLoggingHandler,\\n RetryLLMHandler,\\n completion,\\n tool_call,\\n)\\nfrom effectful.handlers.llm.synthesis import ProgramSynthesis\\nfrom effectful.ops.semantics import NotHandled, fwd, handler\\nfrom effectful.ops.syntax import defop\\n\\nprovider = LiteLLMProvider()', '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}.\"\"\"\\n raise NotHandled', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '@functools.cache\\n@Template.define\\ndef haiku(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef haiku_no_cache(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nprint()\\nwith handler(provider):\\n print(haiku(\"fish\"))\\n print(\"-\" * 40)\\n print(haiku(\"fish\"))\\n\\nprint()\\ncache_handler1 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler1):\\n print(haiku_no_cache(\"fish2\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish2\"))\\n\\nprint()\\ncache_handler2 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler2):\\n print(haiku_no_cache(\"fish3\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish3\"))', '@Template.define\\ndef primes(first_digit: int) -> int:\\n \"\"\"Give a prime number with {first_digit} as the first digit. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider):\\n assert type(primes(6)) is int', '@Template.define\\ndef count_char(char: str) -> Callable[[str], int]:\\n \"\"\"Write a function which takes a string and counts the occurrances of \\'{char}\\'. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider), handler(ProgramSynthesis()):\\n count_a = count_char(\"a\")\\n assert callable(count_a)\\n assert count_a(\"banana\") == 3\\n assert count_a(\"cherry\") == 0\\n # Print the source code of the generated function\\n print(inspect.getsource(count_a))', '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', '@dataclasses.dataclass\\nclass KnockKnockJoke:\\n whos_there: str\\n punchline: str\\n\\n\\n@Template.define\\ndef write_joke(theme: str) -> KnockKnockJoke:\\n \"\"\"Write a knock-knock joke on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef rate_joke(joke: KnockKnockJoke) -> bool:\\n \"\"\"Decide if {joke} is funny or not. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\ndef do_comedy():\\n joke = write_joke(\"lizards\")\\n print(\"> You are onstage at a comedy club. You tell the following joke:\")\\n print(\\n f\"Knock knock.\\\\nWho\\'s there?\\\\n{joke.whos_there}.\\\\n{joke.whos_there} who?\\\\n{joke.punchline}\"\\n )\\n if rate_joke(joke):\\n print(\"> The crowd laughs politely.\")\\n else:\\n print(\"> The crowd stares in stony silence.\")\\n\\n\\nwith handler(provider):\\n do_comedy()', 'def log_llm(*args, **kwargs):\\n result = fwd()\\n print(\"Request fired: \", args, kwargs, result)\\n return result\\n\\n\\n# Avoid cache\\ntry:\\n haiku.cache_clear()\\nexcept Exception:\\n pass\\n\\n# Put completion handler innermost so it has highest precedence during the call\\nwith handler(provider), handler({completion: log_llm}):\\n _ = haiku(\"fish2\")\\n _ = limerick(\"fish\") # or use haiku(\"fish-2\") to avoid cache', '# 1. Create a logger\\nlogger = logging.getLogger(\"effectful.llm\")\\nlogger.setLevel(logging.INFO)\\nlog_handler = logging.StreamHandler(sys.stdout)\\nlog_handler.setFormatter(logging.Formatter(\"%(levelname)s %(payload)s\"))\\nlogger.addHandler(log_handler)\\n# 2. Pass it to the handler\\nllm_logger = LLMLoggingHandler(logger=logger) # can also be LLMLoggingHandler()\\n\\n# Avoid cache for demonstration\\ntry:\\n haiku.cache_clear()\\n limerick.cache_clear()\\nexcept Exception:\\n pass\\n\\nwith handler(provider), handler(llm_logger):\\n _ = haiku(\"fish3\")\\n _ = limerick(\"fish4\")', '# Sub-templates for different story styles\\n@Template.define\\ndef story_with_moral(topic: str) -> str:\\n \"\"\"Write a short story about {topic} and end with a moral lesson. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef story_funny(topic: str) -> str:\\n \"\"\"Write a funny, humorous story about {topic}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n# Main orchestrator template - has access to sub-templates\\n@Template.define\\ndef write_story(topic: str, style: str) -> str:\\n \"\"\"Write a story about {topic} in the style: {style}.\\n Available styles: \\'moral\\' for a story with a lesson, \\'funny\\' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\"\"\"\\n raise NotHandled\\n\\n\\n# Verify sub-templates are captured in write_story\\'s lexical context\\nassert story_with_moral in write_story.tools\\nassert story_funny in write_story.tools\\nprint(\"Sub-templates available to write_story:\", list(write_story.tools))\\n\\nwith handler(provider), handler(llm_logger):\\n print(\"=== Story with moral ===\")\\n print(write_story(\"a curious cat\", \"moral\"))\\n print()\\n print(\"=== Funny story ===\")\\n print(write_story(\"a curious cat\", \"funny\"))'], '_oh': {}, '_dh': [PosixPath('/Users/datnguyenthanh/Marc/effectful')], 'In': ['', 'import dataclasses\\nimport functools\\nimport inspect\\nimport logging\\nimport sys\\nfrom collections.abc import Callable\\n\\nfrom effectful.handlers.llm import Template\\nfrom effectful.handlers.llm.providers import (\\n CacheLLMRequestHandler,\\n LiteLLMProvider,\\n LLMLoggingHandler,\\n RetryLLMHandler,\\n completion,\\n tool_call,\\n)\\nfrom effectful.handlers.llm.synthesis import ProgramSynthesis\\nfrom effectful.ops.semantics import NotHandled, fwd, handler\\nfrom effectful.ops.syntax import defop\\n\\nprovider = LiteLLMProvider()', '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}.\"\"\"\\n raise NotHandled', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '@functools.cache\\n@Template.define\\ndef haiku(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef haiku_no_cache(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nprint()\\nwith handler(provider):\\n print(haiku(\"fish\"))\\n print(\"-\" * 40)\\n print(haiku(\"fish\"))\\n\\nprint()\\ncache_handler1 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler1):\\n print(haiku_no_cache(\"fish2\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish2\"))\\n\\nprint()\\ncache_handler2 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler2):\\n print(haiku_no_cache(\"fish3\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish3\"))', '@Template.define\\ndef primes(first_digit: int) -> int:\\n \"\"\"Give a prime number with {first_digit} as the first digit. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider):\\n assert type(primes(6)) is int', '@Template.define\\ndef count_char(char: str) -> Callable[[str], int]:\\n \"\"\"Write a function which takes a string and counts the occurrances of \\'{char}\\'. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider), handler(ProgramSynthesis()):\\n count_a = count_char(\"a\")\\n assert callable(count_a)\\n assert count_a(\"banana\") == 3\\n assert count_a(\"cherry\") == 0\\n # Print the source code of the generated function\\n print(inspect.getsource(count_a))', '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', '@dataclasses.dataclass\\nclass KnockKnockJoke:\\n whos_there: str\\n punchline: str\\n\\n\\n@Template.define\\ndef write_joke(theme: str) -> KnockKnockJoke:\\n \"\"\"Write a knock-knock joke on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef rate_joke(joke: KnockKnockJoke) -> bool:\\n \"\"\"Decide if {joke} is funny or not. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\ndef do_comedy():\\n joke = write_joke(\"lizards\")\\n print(\"> You are onstage at a comedy club. You tell the following joke:\")\\n print(\\n f\"Knock knock.\\\\nWho\\'s there?\\\\n{joke.whos_there}.\\\\n{joke.whos_there} who?\\\\n{joke.punchline}\"\\n )\\n if rate_joke(joke):\\n print(\"> The crowd laughs politely.\")\\n else:\\n print(\"> The crowd stares in stony silence.\")\\n\\n\\nwith handler(provider):\\n do_comedy()', 'def log_llm(*args, **kwargs):\\n result = fwd()\\n print(\"Request fired: \", args, kwargs, result)\\n return result\\n\\n\\n# Avoid cache\\ntry:\\n haiku.cache_clear()\\nexcept Exception:\\n pass\\n\\n# Put completion handler innermost so it has highest precedence during the call\\nwith handler(provider), handler({completion: log_llm}):\\n _ = haiku(\"fish2\")\\n _ = limerick(\"fish\") # or use haiku(\"fish-2\") to avoid cache', '# 1. Create a logger\\nlogger = logging.getLogger(\"effectful.llm\")\\nlogger.setLevel(logging.INFO)\\nlog_handler = logging.StreamHandler(sys.stdout)\\nlog_handler.setFormatter(logging.Formatter(\"%(levelname)s %(payload)s\"))\\nlogger.addHandler(log_handler)\\n# 2. Pass it to the handler\\nllm_logger = LLMLoggingHandler(logger=logger) # can also be LLMLoggingHandler()\\n\\n# Avoid cache for demonstration\\ntry:\\n haiku.cache_clear()\\n limerick.cache_clear()\\nexcept Exception:\\n pass\\n\\nwith handler(provider), handler(llm_logger):\\n _ = haiku(\"fish3\")\\n _ = limerick(\"fish4\")', '# Sub-templates for different story styles\\n@Template.define\\ndef story_with_moral(topic: str) -> str:\\n \"\"\"Write a short story about {topic} and end with a moral lesson. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef story_funny(topic: str) -> str:\\n \"\"\"Write a funny, humorous story about {topic}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n# Main orchestrator template - has access to sub-templates\\n@Template.define\\ndef write_story(topic: str, style: str) -> str:\\n \"\"\"Write a story about {topic} in the style: {style}.\\n Available styles: \\'moral\\' for a story with a lesson, \\'funny\\' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\"\"\"\\n raise NotHandled\\n\\n\\n# Verify sub-templates are captured in write_story\\'s lexical context\\nassert story_with_moral in write_story.tools\\nassert story_funny in write_story.tools\\nprint(\"Sub-templates available to write_story:\", list(write_story.tools))\\n\\nwith handler(provider), handler(llm_logger):\\n print(\"=== Story with moral ===\")\\n print(write_story(\"a curious cat\", \"moral\"))\\n print()\\n print(\"=== Funny story ===\")\\n print(write_story(\"a curious cat\", \"funny\"))'], 'Out': {}, 'get_ipython': >, 'exit': , 'quit': , 'open': , '_': 'In the ocean where fishies do play, \\nA big whale came swimming one day. \\nWith a splash and a dive, \\nHe felt so alive, \\nChasing fish in the blue, gleaming bay.', '__': '', '___': '', '__vsc_ipynb_file__': '/Users/datnguyenthanh/Marc/effectful/docs/source/llm.ipynb', '_i': '# 1. Create a logger\\nlogger = logging.getLogger(\"effectful.llm\")\\nlogger.setLevel(logging.INFO)\\nlog_handler = logging.StreamHandler(sys.stdout)\\nlog_handler.setFormatter(logging.Formatter(\"%(levelname)s %(payload)s\"))\\nlogger.addHandler(log_handler)\\n# 2. Pass it to the handler\\nllm_logger = LLMLoggingHandler(logger=logger) # can also be LLMLoggingHandler()\\n\\n# Avoid cache for demonstration\\ntry:\\n haiku.cache_clear()\\n limerick.cache_clear()\\nexcept Exception:\\n pass\\n\\nwith handler(provider), handler(llm_logger):\\n _ = haiku(\"fish3\")\\n _ = limerick(\"fish4\")', '_ii': 'def log_llm(*args, **kwargs):\\n result = fwd()\\n print(\"Request fired: \", args, kwargs, result)\\n return result\\n\\n\\n# Avoid cache\\ntry:\\n haiku.cache_clear()\\nexcept Exception:\\n pass\\n\\n# Put completion handler innermost so it has highest precedence during the call\\nwith handler(provider), handler({completion: log_llm}):\\n _ = haiku(\"fish2\")\\n _ = limerick(\"fish\") # or use haiku(\"fish-2\") to avoid cache', '_iii': '@dataclasses.dataclass\\nclass KnockKnockJoke:\\n whos_there: str\\n punchline: str\\n\\n\\n@Template.define\\ndef write_joke(theme: str) -> KnockKnockJoke:\\n \"\"\"Write a knock-knock joke on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef rate_joke(joke: KnockKnockJoke) -> bool:\\n \"\"\"Decide if {joke} is funny or not. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\ndef do_comedy():\\n joke = write_joke(\"lizards\")\\n print(\"> You are onstage at a comedy club. You tell the following joke:\")\\n print(\\n f\"Knock knock.\\\\nWho\\'s there?\\\\n{joke.whos_there}.\\\\n{joke.whos_there} who?\\\\n{joke.punchline}\"\\n )\\n if rate_joke(joke):\\n print(\"> The crowd laughs politely.\")\\n else:\\n print(\"> The crowd stares in stony silence.\")\\n\\n\\nwith handler(provider):\\n do_comedy()', '_i1': 'import dataclasses\\nimport functools\\nimport inspect\\nimport logging\\nimport sys\\nfrom collections.abc import Callable\\n\\nfrom effectful.handlers.llm import Template\\nfrom effectful.handlers.llm.providers import (\\n CacheLLMRequestHandler,\\n LiteLLMProvider,\\n LLMLoggingHandler,\\n RetryLLMHandler,\\n completion,\\n tool_call,\\n)\\nfrom effectful.handlers.llm.synthesis import ProgramSynthesis\\nfrom effectful.ops.semantics import NotHandled, fwd, handler\\nfrom effectful.ops.syntax import defop\\n\\nprovider = LiteLLMProvider()', 'dataclasses': , 'functools': , 'inspect': , 'logging': , 'sys': , 'Callable': , 'Template': , 'CacheLLMRequestHandler': , 'LiteLLMProvider': , 'LLMLoggingHandler': , 'RetryLLMHandler': , 'completion': Operation(completion, (model: str, messages: List = [], timeout: Union[float, str, openai.Timeout, NoneType] = None, temperature: Optional[float] = None, top_p: Optional[float] = None, n: Optional[int] = None, stream: Optional[bool] = None, stream_options: Optional[dict] = None, stop=None, max_completion_tokens: Optional[int] = None, max_tokens: Optional[int] = None, modalities: Optional[List[Literal['text', 'audio']]] = None, prediction: Optional[openai.types.chat.chat_completion_prediction_content_param.ChatCompletionPredictionContentParam] = None, audio: Optional[openai.types.chat.chat_completion_audio_param.ChatCompletionAudioParam] = None, presence_penalty: Optional[float] = None, frequency_penalty: Optional[float] = None, logit_bias: Optional[dict] = None, user: Optional[str] = None, reasoning_effort: Optional[Literal['none', 'minimal', 'low', 'medium', 'high', 'default']] = None, verbosity: Optional[Literal['low', 'medium', 'high']] = None, response_format: Union[dict, Type[pydantic.main.BaseModel], NoneType] = None, seed: Optional[int] = None, tools: Optional[List] = None, tool_choice: Union[str, dict, NoneType] = None, logprobs: Optional[bool] = None, top_logprobs: Optional[int] = None, parallel_tool_calls: Optional[bool] = None, web_search_options: Optional[litellm.types.llms.openai.OpenAIWebSearchOptions] = None, deployment_id=None, extra_headers: Optional[dict] = None, safety_identifier: Optional[str] = None, service_tier: Optional[str] = None, functions: Optional[List] = None, function_call: Optional[str] = None, base_url: Optional[str] = None, api_version: Optional[str] = None, api_key: Optional[str] = None, model_list: Optional[list] = None, thinking: Optional[litellm.types.llms.anthropic.AnthropicThinkingParam] = None, shared_session: Optional[ForwardRef('ClientSession')] = None, **kwargs) -> Union[litellm.types.utils.ModelResponse, litellm.litellm_core_utils.streaming_handler.CustomStreamWrapper]), 'tool_call': Operation(tool_call, (template: effectful.handlers.llm.Template, tool: Union[effectful.ops.types.Operation[..., T], effectful.handlers.llm.Template[..., T]], *args, **kwargs) -> T), 'ProgramSynthesis': , 'NotHandled': , 'fwd': Operation(fwd, (*args, **kwargs) -> Any), 'handler': , 'defop': , 'provider': , '_i2': '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}.\"\"\"\\n raise NotHandled', 'limerick': Template(__prompt_template__='Write a limerick on the theme of {theme}. Do not use any tools.', __signature__= str>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='limerick'), '_i3': 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '_i4': 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '_i5': '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled', '_i6': 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '_i7': '@functools.cache\\n@Template.define\\ndef haiku(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef haiku_no_cache(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nprint()\\nwith handler(provider):\\n print(haiku(\"fish\"))\\n print(\"-\" * 40)\\n print(haiku(\"fish\"))\\n\\nprint()\\ncache_handler1 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler1):\\n print(haiku_no_cache(\"fish2\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish2\"))\\n\\nprint()\\ncache_handler2 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler2):\\n print(haiku_no_cache(\"fish3\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish3\"))', 'haiku': , 'haiku_no_cache': Template(__prompt_template__='Write a haiku on the theme of {theme}. Do not use any tools.', __signature__= str>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='haiku_no_cache'), 'cache_handler1': , 'cache_handler2': , '_i8': '@Template.define\\ndef primes(first_digit: int) -> int:\\n \"\"\"Give a prime number with {first_digit} as the first digit. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider):\\n assert type(primes(6)) is int', 'primes': Template(__prompt_template__='Give a prime number with {first_digit} as the first digit. Do not use any tools.', __signature__= int>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='primes'), '_i9': '@Template.define\\ndef count_char(char: str) -> Callable[[str], int]:\\n \"\"\"Write a function which takes a string and counts the occurrances of \\'{char}\\'. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider), handler(ProgramSynthesis()):\\n count_a = count_char(\"a\")\\n assert callable(count_a)\\n assert count_a(\"banana\") == 3\\n assert count_a(\"cherry\") == 0\\n # Print the source code of the generated function\\n print(inspect.getsource(count_a))', 'count_char': ..., 'count_a': , '_i10': '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', 'cities': Operation(cities, () -> list[str]), 'weather': Operation(weather, (city: str) -> str), 'vacation': Template(__prompt_template__='Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.', __signature__= str>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='vacation'), 'log_tool_call': , '_i11': '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', '_i12': '@dataclasses.dataclass\\nclass KnockKnockJoke:\\n whos_there: str\\n punchline: str\\n\\n\\n@Template.define\\ndef write_joke(theme: str) -> KnockKnockJoke:\\n \"\"\"Write a knock-knock joke on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef rate_joke(joke: KnockKnockJoke) -> bool:\\n \"\"\"Decide if {joke} is funny or not. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\ndef do_comedy():\\n joke = write_joke(\"lizards\")\\n print(\"> You are onstage at a comedy club. You tell the following joke:\")\\n print(\\n f\"Knock knock.\\\\nWho\\'s there?\\\\n{joke.whos_there}.\\\\n{joke.whos_there} who?\\\\n{joke.punchline}\"\\n )\\n if rate_joke(joke):\\n print(\"> The crowd laughs politely.\")\\n else:\\n print(\"> The crowd stares in stony silence.\")\\n\\n\\nwith handler(provider):\\n do_comedy()', 'KnockKnockJoke': , 'write_joke': Template(__prompt_template__='Write a knock-knock joke on the theme of {theme}. Do not use any tools.', __signature__= __main__.KnockKnockJoke>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='write_joke'), 'rate_joke': Template(__prompt_template__='Decide if {joke} is funny or not. Do not use any tools.', __signature__= bool>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='rate_joke'), 'do_comedy': , '_i13': 'def log_llm(*args, **kwargs):\\n result = fwd()\\n print(\"Request fired: \", args, kwargs, result)\\n return result\\n\\n\\n# Avoid cache\\ntry:\\n haiku.cache_clear()\\nexcept Exception:\\n pass\\n\\n# Put completion handler innermost so it has highest precedence during the call\\nwith handler(provider), handler({completion: log_llm}):\\n _ = haiku(\"fish2\")\\n _ = limerick(\"fish\") # or use haiku(\"fish-2\") to avoid cache', 'log_llm': , '_i14': '# 1. Create a logger\\nlogger = logging.getLogger(\"effectful.llm\")\\nlogger.setLevel(logging.INFO)\\nlog_handler = logging.StreamHandler(sys.stdout)\\nlog_handler.setFormatter(logging.Formatter(\"%(levelname)s %(payload)s\"))\\nlogger.addHandler(log_handler)\\n# 2. Pass it to the handler\\nllm_logger = LLMLoggingHandler(logger=logger) # can also be LLMLoggingHandler()\\n\\n# Avoid cache for demonstration\\ntry:\\n haiku.cache_clear()\\n limerick.cache_clear()\\nexcept Exception:\\n pass\\n\\nwith handler(provider), handler(llm_logger):\\n _ = haiku(\"fish3\")\\n _ = limerick(\"fish4\")', 'logger': , 'log_handler': , 'llm_logger': , '_i15': '# Sub-templates for different story styles\\n@Template.define\\ndef story_with_moral(topic: str) -> str:\\n \"\"\"Write a short story about {topic} and end with a moral lesson. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef story_funny(topic: str) -> str:\\n \"\"\"Write a funny, humorous story about {topic}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n# Main orchestrator template - has access to sub-templates\\n@Template.define\\ndef write_story(topic: str, style: str) -> str:\\n \"\"\"Write a story about {topic} in the style: {style}.\\n Available styles: \\'moral\\' for a story with a lesson, \\'funny\\' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\"\"\"\\n raise NotHandled\\n\\n\\n# Verify sub-templates are captured in write_story\\'s lexical context\\nassert story_with_moral in write_story.tools\\nassert story_funny in write_story.tools\\nprint(\"Sub-templates available to write_story:\", list(write_story.tools))\\n\\nwith handler(provider), handler(llm_logger):\\n print(\"=== Story with moral ===\")\\n print(write_story(\"a curious cat\", \"moral\"))\\n print()\\n print(\"=== Funny story ===\")\\n print(write_story(\"a curious cat\", \"funny\"))', 'story_with_moral': Template(__prompt_template__='Write a short story about {topic} and end with a moral lesson. Do not use any tools.', __signature__= str>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='story_with_moral'), 'story_funny': Template(__prompt_template__='Write a funny, humorous story about {topic}. Do not use any tools.', __signature__= str>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='story_funny'), 'write_story': Template(__prompt_template__=\"Write a story about {topic} in the style: {style}.\\n Available styles: 'moral' for a story with a lesson, 'funny' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\", __signature__= str>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='write_story')}), mappingproxy({'__name__': '__main__', '__doc__': 'Automatically created module for IPython interactive environment', '__package__': None, '__loader__': None, '__spec__': None, '__builtin__': , '__builtins__': , '_ih': ['', 'import dataclasses\\nimport functools\\nimport inspect\\nimport logging\\nimport sys\\nfrom collections.abc import Callable\\n\\nfrom effectful.handlers.llm import Template\\nfrom effectful.handlers.llm.providers import (\\n CacheLLMRequestHandler,\\n LiteLLMProvider,\\n LLMLoggingHandler,\\n RetryLLMHandler,\\n completion,\\n tool_call,\\n)\\nfrom effectful.handlers.llm.synthesis import ProgramSynthesis\\nfrom effectful.ops.semantics import NotHandled, fwd, handler\\nfrom effectful.ops.syntax import defop\\n\\nprovider = LiteLLMProvider()', '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}.\"\"\"\\n raise NotHandled', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '@functools.cache\\n@Template.define\\ndef haiku(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef haiku_no_cache(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nprint()\\nwith handler(provider):\\n print(haiku(\"fish\"))\\n print(\"-\" * 40)\\n print(haiku(\"fish\"))\\n\\nprint()\\ncache_handler1 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler1):\\n print(haiku_no_cache(\"fish2\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish2\"))\\n\\nprint()\\ncache_handler2 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler2):\\n print(haiku_no_cache(\"fish3\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish3\"))', '@Template.define\\ndef primes(first_digit: int) -> int:\\n \"\"\"Give a prime number with {first_digit} as the first digit. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider):\\n assert type(primes(6)) is int', '@Template.define\\ndef count_char(char: str) -> Callable[[str], int]:\\n \"\"\"Write a function which takes a string and counts the occurrances of \\'{char}\\'. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider), handler(ProgramSynthesis()):\\n count_a = count_char(\"a\")\\n assert callable(count_a)\\n assert count_a(\"banana\") == 3\\n assert count_a(\"cherry\") == 0\\n # Print the source code of the generated function\\n print(inspect.getsource(count_a))', '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', '@dataclasses.dataclass\\nclass KnockKnockJoke:\\n whos_there: str\\n punchline: str\\n\\n\\n@Template.define\\ndef write_joke(theme: str) -> KnockKnockJoke:\\n \"\"\"Write a knock-knock joke on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef rate_joke(joke: KnockKnockJoke) -> bool:\\n \"\"\"Decide if {joke} is funny or not. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\ndef do_comedy():\\n joke = write_joke(\"lizards\")\\n print(\"> You are onstage at a comedy club. You tell the following joke:\")\\n print(\\n f\"Knock knock.\\\\nWho\\'s there?\\\\n{joke.whos_there}.\\\\n{joke.whos_there} who?\\\\n{joke.punchline}\"\\n )\\n if rate_joke(joke):\\n print(\"> The crowd laughs politely.\")\\n else:\\n print(\"> The crowd stares in stony silence.\")\\n\\n\\nwith handler(provider):\\n do_comedy()', 'def log_llm(*args, **kwargs):\\n result = fwd()\\n print(\"Request fired: \", args, kwargs, result)\\n return result\\n\\n\\n# Avoid cache\\ntry:\\n haiku.cache_clear()\\nexcept Exception:\\n pass\\n\\n# Put completion handler innermost so it has highest precedence during the call\\nwith handler(provider), handler({completion: log_llm}):\\n _ = haiku(\"fish2\")\\n _ = limerick(\"fish\") # or use haiku(\"fish-2\") to avoid cache', '# 1. Create a logger\\nlogger = logging.getLogger(\"effectful.llm\")\\nlogger.setLevel(logging.INFO)\\nlog_handler = logging.StreamHandler(sys.stdout)\\nlog_handler.setFormatter(logging.Formatter(\"%(levelname)s %(payload)s\"))\\nlogger.addHandler(log_handler)\\n# 2. Pass it to the handler\\nllm_logger = LLMLoggingHandler(logger=logger) # can also be LLMLoggingHandler()\\n\\n# Avoid cache for demonstration\\ntry:\\n haiku.cache_clear()\\n limerick.cache_clear()\\nexcept Exception:\\n pass\\n\\nwith handler(provider), handler(llm_logger):\\n _ = haiku(\"fish3\")\\n _ = limerick(\"fish4\")', '# Sub-templates for different story styles\\n@Template.define\\ndef story_with_moral(topic: str) -> str:\\n \"\"\"Write a short story about {topic} and end with a moral lesson. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef story_funny(topic: str) -> str:\\n \"\"\"Write a funny, humorous story about {topic}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n# Main orchestrator template - has access to sub-templates\\n@Template.define\\ndef write_story(topic: str, style: str) -> str:\\n \"\"\"Write a story about {topic} in the style: {style}.\\n Available styles: \\'moral\\' for a story with a lesson, \\'funny\\' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\"\"\"\\n raise NotHandled\\n\\n\\n# Verify sub-templates are captured in write_story\\'s lexical context\\nassert story_with_moral in write_story.tools\\nassert story_funny in write_story.tools\\nprint(\"Sub-templates available to write_story:\", list(write_story.tools))\\n\\nwith handler(provider), handler(llm_logger):\\n print(\"=== Story with moral ===\")\\n print(write_story(\"a curious cat\", \"moral\"))\\n print()\\n print(\"=== Funny story ===\")\\n print(write_story(\"a curious cat\", \"funny\"))'], '_oh': {}, '_dh': [PosixPath('/Users/datnguyenthanh/Marc/effectful')], 'In': ['', 'import dataclasses\\nimport functools\\nimport inspect\\nimport logging\\nimport sys\\nfrom collections.abc import Callable\\n\\nfrom effectful.handlers.llm import Template\\nfrom effectful.handlers.llm.providers import (\\n CacheLLMRequestHandler,\\n LiteLLMProvider,\\n LLMLoggingHandler,\\n RetryLLMHandler,\\n completion,\\n tool_call,\\n)\\nfrom effectful.handlers.llm.synthesis import ProgramSynthesis\\nfrom effectful.ops.semantics import NotHandled, fwd, handler\\nfrom effectful.ops.syntax import defop\\n\\nprovider = LiteLLMProvider()', '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}.\"\"\"\\n raise NotHandled', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '@functools.cache\\n@Template.define\\ndef haiku(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef haiku_no_cache(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nprint()\\nwith handler(provider):\\n print(haiku(\"fish\"))\\n print(\"-\" * 40)\\n print(haiku(\"fish\"))\\n\\nprint()\\ncache_handler1 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler1):\\n print(haiku_no_cache(\"fish2\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish2\"))\\n\\nprint()\\ncache_handler2 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler2):\\n print(haiku_no_cache(\"fish3\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish3\"))', '@Template.define\\ndef primes(first_digit: int) -> int:\\n \"\"\"Give a prime number with {first_digit} as the first digit. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider):\\n assert type(primes(6)) is int', '@Template.define\\ndef count_char(char: str) -> Callable[[str], int]:\\n \"\"\"Write a function which takes a string and counts the occurrances of \\'{char}\\'. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider), handler(ProgramSynthesis()):\\n count_a = count_char(\"a\")\\n assert callable(count_a)\\n assert count_a(\"banana\") == 3\\n assert count_a(\"cherry\") == 0\\n # Print the source code of the generated function\\n print(inspect.getsource(count_a))', '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', '@dataclasses.dataclass\\nclass KnockKnockJoke:\\n whos_there: str\\n punchline: str\\n\\n\\n@Template.define\\ndef write_joke(theme: str) -> KnockKnockJoke:\\n \"\"\"Write a knock-knock joke on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef rate_joke(joke: KnockKnockJoke) -> bool:\\n \"\"\"Decide if {joke} is funny or not. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\ndef do_comedy():\\n joke = write_joke(\"lizards\")\\n print(\"> You are onstage at a comedy club. You tell the following joke:\")\\n print(\\n f\"Knock knock.\\\\nWho\\'s there?\\\\n{joke.whos_there}.\\\\n{joke.whos_there} who?\\\\n{joke.punchline}\"\\n )\\n if rate_joke(joke):\\n print(\"> The crowd laughs politely.\")\\n else:\\n print(\"> The crowd stares in stony silence.\")\\n\\n\\nwith handler(provider):\\n do_comedy()', 'def log_llm(*args, **kwargs):\\n result = fwd()\\n print(\"Request fired: \", args, kwargs, result)\\n return result\\n\\n\\n# Avoid cache\\ntry:\\n haiku.cache_clear()\\nexcept Exception:\\n pass\\n\\n# Put completion handler innermost so it has highest precedence during the call\\nwith handler(provider), handler({completion: log_llm}):\\n _ = haiku(\"fish2\")\\n _ = limerick(\"fish\") # or use haiku(\"fish-2\") to avoid cache', '# 1. Create a logger\\nlogger = logging.getLogger(\"effectful.llm\")\\nlogger.setLevel(logging.INFO)\\nlog_handler = logging.StreamHandler(sys.stdout)\\nlog_handler.setFormatter(logging.Formatter(\"%(levelname)s %(payload)s\"))\\nlogger.addHandler(log_handler)\\n# 2. Pass it to the handler\\nllm_logger = LLMLoggingHandler(logger=logger) # can also be LLMLoggingHandler()\\n\\n# Avoid cache for demonstration\\ntry:\\n haiku.cache_clear()\\n limerick.cache_clear()\\nexcept Exception:\\n pass\\n\\nwith handler(provider), handler(llm_logger):\\n _ = haiku(\"fish3\")\\n _ = limerick(\"fish4\")', '# Sub-templates for different story styles\\n@Template.define\\ndef story_with_moral(topic: str) -> str:\\n \"\"\"Write a short story about {topic} and end with a moral lesson. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef story_funny(topic: str) -> str:\\n \"\"\"Write a funny, humorous story about {topic}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n# Main orchestrator template - has access to sub-templates\\n@Template.define\\ndef write_story(topic: str, style: str) -> str:\\n \"\"\"Write a story about {topic} in the style: {style}.\\n Available styles: \\'moral\\' for a story with a lesson, \\'funny\\' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\"\"\"\\n raise NotHandled\\n\\n\\n# Verify sub-templates are captured in write_story\\'s lexical context\\nassert story_with_moral in write_story.tools\\nassert story_funny in write_story.tools\\nprint(\"Sub-templates available to write_story:\", list(write_story.tools))\\n\\nwith handler(provider), handler(llm_logger):\\n print(\"=== Story with moral ===\")\\n print(write_story(\"a curious cat\", \"moral\"))\\n print()\\n print(\"=== Funny story ===\")\\n print(write_story(\"a curious cat\", \"funny\"))'], 'Out': {}, 'get_ipython': >, 'exit': , 'quit': , 'open': , '_': 'In the ocean where fishies do play, \\nA big whale came swimming one day. \\nWith a splash and a dive, \\nHe felt so alive, \\nChasing fish in the blue, gleaming bay.', '__': '', '___': '', '__vsc_ipynb_file__': '/Users/datnguyenthanh/Marc/effectful/docs/source/llm.ipynb', '_i': '# 1. Create a logger\\nlogger = logging.getLogger(\"effectful.llm\")\\nlogger.setLevel(logging.INFO)\\nlog_handler = logging.StreamHandler(sys.stdout)\\nlog_handler.setFormatter(logging.Formatter(\"%(levelname)s %(payload)s\"))\\nlogger.addHandler(log_handler)\\n# 2. Pass it to the handler\\nllm_logger = LLMLoggingHandler(logger=logger) # can also be LLMLoggingHandler()\\n\\n# Avoid cache for demonstration\\ntry:\\n haiku.cache_clear()\\n limerick.cache_clear()\\nexcept Exception:\\n pass\\n\\nwith handler(provider), handler(llm_logger):\\n _ = haiku(\"fish3\")\\n _ = limerick(\"fish4\")', '_ii': 'def log_llm(*args, **kwargs):\\n result = fwd()\\n print(\"Request fired: \", args, kwargs, result)\\n return result\\n\\n\\n# Avoid cache\\ntry:\\n haiku.cache_clear()\\nexcept Exception:\\n pass\\n\\n# Put completion handler innermost so it has highest precedence during the call\\nwith handler(provider), handler({completion: log_llm}):\\n _ = haiku(\"fish2\")\\n _ = limerick(\"fish\") # or use haiku(\"fish-2\") to avoid cache', '_iii': '@dataclasses.dataclass\\nclass KnockKnockJoke:\\n whos_there: str\\n punchline: str\\n\\n\\n@Template.define\\ndef write_joke(theme: str) -> KnockKnockJoke:\\n \"\"\"Write a knock-knock joke on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef rate_joke(joke: KnockKnockJoke) -> bool:\\n \"\"\"Decide if {joke} is funny or not. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\ndef do_comedy():\\n joke = write_joke(\"lizards\")\\n print(\"> You are onstage at a comedy club. You tell the following joke:\")\\n print(\\n f\"Knock knock.\\\\nWho\\'s there?\\\\n{joke.whos_there}.\\\\n{joke.whos_there} who?\\\\n{joke.punchline}\"\\n )\\n if rate_joke(joke):\\n print(\"> The crowd laughs politely.\")\\n else:\\n print(\"> The crowd stares in stony silence.\")\\n\\n\\nwith handler(provider):\\n do_comedy()', '_i1': 'import dataclasses\\nimport functools\\nimport inspect\\nimport logging\\nimport sys\\nfrom collections.abc import Callable\\n\\nfrom effectful.handlers.llm import Template\\nfrom effectful.handlers.llm.providers import (\\n CacheLLMRequestHandler,\\n LiteLLMProvider,\\n LLMLoggingHandler,\\n RetryLLMHandler,\\n completion,\\n tool_call,\\n)\\nfrom effectful.handlers.llm.synthesis import ProgramSynthesis\\nfrom effectful.ops.semantics import NotHandled, fwd, handler\\nfrom effectful.ops.syntax import defop\\n\\nprovider = LiteLLMProvider()', 'dataclasses': , 'functools': , 'inspect': , 'logging': , 'sys': , 'Callable': , 'Template': , 'CacheLLMRequestHandler': , 'LiteLLMProvider': , 'LLMLoggingHandler': , 'RetryLLMHandler': , 'completion': Operation(completion, (model: str, messages: List = [], timeout: Union[float, str, openai.Timeout, NoneType] = None, temperature: Optional[float] = None, top_p: Optional[float] = None, n: Optional[int] = None, stream: Optional[bool] = None, stream_options: Optional[dict] = None, stop=None, max_completion_tokens: Optional[int] = None, max_tokens: Optional[int] = None, modalities: Optional[List[Literal['text', 'audio']]] = None, prediction: Optional[openai.types.chat.chat_completion_prediction_content_param.ChatCompletionPredictionContentParam] = None, audio: Optional[openai.types.chat.chat_completion_audio_param.ChatCompletionAudioParam] = None, presence_penalty: Optional[float] = None, frequency_penalty: Optional[float] = None, logit_bias: Optional[dict] = None, user: Optional[str] = None, reasoning_effort: Optional[Literal['none', 'minimal', 'low', 'medium', 'high', 'default']] = None, verbosity: Optional[Literal['low', 'medium', 'high']] = None, response_format: Union[dict, Type[pydantic.main.BaseModel], NoneType] = None, seed: Optional[int] = None, tools: Optional[List] = None, tool_choice: Union[str, dict, NoneType] = None, logprobs: Optional[bool] = None, top_logprobs: Optional[int] = None, parallel_tool_calls: Optional[bool] = None, web_search_options: Optional[litellm.types.llms.openai.OpenAIWebSearchOptions] = None, deployment_id=None, extra_headers: Optional[dict] = None, safety_identifier: Optional[str] = None, service_tier: Optional[str] = None, functions: Optional[List] = None, function_call: Optional[str] = None, base_url: Optional[str] = None, api_version: Optional[str] = None, api_key: Optional[str] = None, model_list: Optional[list] = None, thinking: Optional[litellm.types.llms.anthropic.AnthropicThinkingParam] = None, shared_session: Optional[ForwardRef('ClientSession')] = None, **kwargs) -> Union[litellm.types.utils.ModelResponse, litellm.litellm_core_utils.streaming_handler.CustomStreamWrapper]), 'tool_call': Operation(tool_call, (template: effectful.handlers.llm.Template, tool: Union[effectful.ops.types.Operation[..., T], effectful.handlers.llm.Template[..., T]], *args, **kwargs) -> T), 'ProgramSynthesis': , 'NotHandled': , 'fwd': Operation(fwd, (*args, **kwargs) -> Any), 'handler': , 'defop': , 'provider': , '_i2': '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}.\"\"\"\\n raise NotHandled', 'limerick': Template(__prompt_template__='Write a limerick on the theme of {theme}. Do not use any tools.', __signature__= str>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='limerick'), '_i3': 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '_i4': 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '_i5': '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled', '_i6': 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '_i7': '@functools.cache\\n@Template.define\\ndef haiku(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef haiku_no_cache(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nprint()\\nwith handler(provider):\\n print(haiku(\"fish\"))\\n print(\"-\" * 40)\\n print(haiku(\"fish\"))\\n\\nprint()\\ncache_handler1 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler1):\\n print(haiku_no_cache(\"fish2\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish2\"))\\n\\nprint()\\ncache_handler2 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler2):\\n print(haiku_no_cache(\"fish3\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish3\"))', 'haiku': , 'haiku_no_cache': Template(__prompt_template__='Write a haiku on the theme of {theme}. Do not use any tools.', __signature__= str>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='haiku_no_cache'), 'cache_handler1': , 'cache_handler2': , '_i8': '@Template.define\\ndef primes(first_digit: int) -> int:\\n \"\"\"Give a prime number with {first_digit} as the first digit. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider):\\n assert type(primes(6)) is int', 'primes': Template(__prompt_template__='Give a prime number with {first_digit} as the first digit. Do not use any tools.', __signature__= int>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='primes'), '_i9': '@Template.define\\ndef count_char(char: str) -> Callable[[str], int]:\\n \"\"\"Write a function which takes a string and counts the occurrances of \\'{char}\\'. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider), handler(ProgramSynthesis()):\\n count_a = count_char(\"a\")\\n assert callable(count_a)\\n assert count_a(\"banana\") == 3\\n assert count_a(\"cherry\") == 0\\n # Print the source code of the generated function\\n print(inspect.getsource(count_a))', 'count_char': ..., 'count_a': , '_i10': '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', 'cities': Operation(cities, () -> list[str]), 'weather': Operation(weather, (city: str) -> str), 'vacation': Template(__prompt_template__='Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.', __signature__= str>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='vacation'), 'log_tool_call': , '_i11': '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', '_i12': '@dataclasses.dataclass\\nclass KnockKnockJoke:\\n whos_there: str\\n punchline: str\\n\\n\\n@Template.define\\ndef write_joke(theme: str) -> KnockKnockJoke:\\n \"\"\"Write a knock-knock joke on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef rate_joke(joke: KnockKnockJoke) -> bool:\\n \"\"\"Decide if {joke} is funny or not. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\ndef do_comedy():\\n joke = write_joke(\"lizards\")\\n print(\"> You are onstage at a comedy club. You tell the following joke:\")\\n print(\\n f\"Knock knock.\\\\nWho\\'s there?\\\\n{joke.whos_there}.\\\\n{joke.whos_there} who?\\\\n{joke.punchline}\"\\n )\\n if rate_joke(joke):\\n print(\"> The crowd laughs politely.\")\\n else:\\n print(\"> The crowd stares in stony silence.\")\\n\\n\\nwith handler(provider):\\n do_comedy()', 'KnockKnockJoke': , 'write_joke': Template(__prompt_template__='Write a knock-knock joke on the theme of {theme}. Do not use any tools.', __signature__= __main__.KnockKnockJoke>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='write_joke'), 'rate_joke': Template(__prompt_template__='Decide if {joke} is funny or not. Do not use any tools.', __signature__= bool>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='rate_joke'), 'do_comedy': , '_i13': 'def log_llm(*args, **kwargs):\\n result = fwd()\\n print(\"Request fired: \", args, kwargs, result)\\n return result\\n\\n\\n# Avoid cache\\ntry:\\n haiku.cache_clear()\\nexcept Exception:\\n pass\\n\\n# Put completion handler innermost so it has highest precedence during the call\\nwith handler(provider), handler({completion: log_llm}):\\n _ = haiku(\"fish2\")\\n _ = limerick(\"fish\") # or use haiku(\"fish-2\") to avoid cache', 'log_llm': , '_i14': '# 1. Create a logger\\nlogger = logging.getLogger(\"effectful.llm\")\\nlogger.setLevel(logging.INFO)\\nlog_handler = logging.StreamHandler(sys.stdout)\\nlog_handler.setFormatter(logging.Formatter(\"%(levelname)s %(payload)s\"))\\nlogger.addHandler(log_handler)\\n# 2. Pass it to the handler\\nllm_logger = LLMLoggingHandler(logger=logger) # can also be LLMLoggingHandler()\\n\\n# Avoid cache for demonstration\\ntry:\\n haiku.cache_clear()\\n limerick.cache_clear()\\nexcept Exception:\\n pass\\n\\nwith handler(provider), handler(llm_logger):\\n _ = haiku(\"fish3\")\\n _ = limerick(\"fish4\")', 'logger': , 'log_handler': , 'llm_logger': , '_i15': '# Sub-templates for different story styles\\n@Template.define\\ndef story_with_moral(topic: str) -> str:\\n \"\"\"Write a short story about {topic} and end with a moral lesson. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef story_funny(topic: str) -> str:\\n \"\"\"Write a funny, humorous story about {topic}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n# Main orchestrator template - has access to sub-templates\\n@Template.define\\ndef write_story(topic: str, style: str) -> str:\\n \"\"\"Write a story about {topic} in the style: {style}.\\n Available styles: \\'moral\\' for a story with a lesson, \\'funny\\' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\"\"\"\\n raise NotHandled\\n\\n\\n# Verify sub-templates are captured in write_story\\'s lexical context\\nassert story_with_moral in write_story.tools\\nassert story_funny in write_story.tools\\nprint(\"Sub-templates available to write_story:\", list(write_story.tools))\\n\\nwith handler(provider), handler(llm_logger):\\n print(\"=== Story with moral ===\")\\n print(write_story(\"a curious cat\", \"moral\"))\\n print()\\n print(\"=== Funny story ===\")\\n print(write_story(\"a curious cat\", \"funny\"))', 'story_with_moral': Template(__prompt_template__='Write a short story about {topic} and end with a moral lesson. Do not use any tools.', __signature__= str>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='story_with_moral'), 'story_funny': Template(__prompt_template__='Write a funny, humorous story about {topic}. Do not use any tools.', __signature__= str>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='story_funny'), 'write_story': Template(__prompt_template__=\"Write a story about {topic} in the style: {style}.\\n Available styles: 'moral' for a story with a lesson, 'funny' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\", __signature__= str>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='write_story')})), __name__='count_char'), Operation(cities, () -> list[str]), Operation(weather, (city: str) -> str), Template(__prompt_template__='Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.', __signature__= str>, __context__=LexicalContext(mappingproxy({'__name__': '__main__', '__doc__': 'Automatically created module for IPython interactive environment', '__package__': None, '__loader__': None, '__spec__': None, '__builtin__': , '__builtins__': , '_ih': ['', 'import dataclasses\\nimport functools\\nimport inspect\\nimport logging\\nimport sys\\nfrom collections.abc import Callable\\n\\nfrom effectful.handlers.llm import Template\\nfrom effectful.handlers.llm.providers import (\\n CacheLLMRequestHandler,\\n LiteLLMProvider,\\n LLMLoggingHandler,\\n RetryLLMHandler,\\n completion,\\n tool_call,\\n)\\nfrom effectful.handlers.llm.synthesis import ProgramSynthesis\\nfrom effectful.ops.semantics import NotHandled, fwd, handler\\nfrom effectful.ops.syntax import defop\\n\\nprovider = LiteLLMProvider()', '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}.\"\"\"\\n raise NotHandled', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '@functools.cache\\n@Template.define\\ndef haiku(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef haiku_no_cache(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nprint()\\nwith handler(provider):\\n print(haiku(\"fish\"))\\n print(\"-\" * 40)\\n print(haiku(\"fish\"))\\n\\nprint()\\ncache_handler1 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler1):\\n print(haiku_no_cache(\"fish2\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish2\"))\\n\\nprint()\\ncache_handler2 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler2):\\n print(haiku_no_cache(\"fish3\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish3\"))', '@Template.define\\ndef primes(first_digit: int) -> int:\\n \"\"\"Give a prime number with {first_digit} as the first digit. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider):\\n assert type(primes(6)) is int', '@Template.define\\ndef count_char(char: str) -> Callable[[str], int]:\\n \"\"\"Write a function which takes a string and counts the occurrances of \\'{char}\\'. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider), handler(ProgramSynthesis()):\\n count_a = count_char(\"a\")\\n assert callable(count_a)\\n assert count_a(\"banana\") == 3\\n assert count_a(\"cherry\") == 0\\n # Print the source code of the generated function\\n print(inspect.getsource(count_a))', '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', '@dataclasses.dataclass\\nclass KnockKnockJoke:\\n whos_there: str\\n punchline: str\\n\\n\\n@Template.define\\ndef write_joke(theme: str) -> KnockKnockJoke:\\n \"\"\"Write a knock-knock joke on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef rate_joke(joke: KnockKnockJoke) -> bool:\\n \"\"\"Decide if {joke} is funny or not. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\ndef do_comedy():\\n joke = write_joke(\"lizards\")\\n print(\"> You are onstage at a comedy club. You tell the following joke:\")\\n print(\\n f\"Knock knock.\\\\nWho\\'s there?\\\\n{joke.whos_there}.\\\\n{joke.whos_there} who?\\\\n{joke.punchline}\"\\n )\\n if rate_joke(joke):\\n print(\"> The crowd laughs politely.\")\\n else:\\n print(\"> The crowd stares in stony silence.\")\\n\\n\\nwith handler(provider):\\n do_comedy()', 'def log_llm(*args, **kwargs):\\n result = fwd()\\n print(\"Request fired: \", args, kwargs, result)\\n return result\\n\\n\\n# Avoid cache\\ntry:\\n haiku.cache_clear()\\nexcept Exception:\\n pass\\n\\n# Put completion handler innermost so it has highest precedence during the call\\nwith handler(provider), handler({completion: log_llm}):\\n _ = haiku(\"fish2\")\\n _ = limerick(\"fish\") # or use haiku(\"fish-2\") to avoid cache', '# 1. Create a logger\\nlogger = logging.getLogger(\"effectful.llm\")\\nlogger.setLevel(logging.INFO)\\nlog_handler = logging.StreamHandler(sys.stdout)\\nlog_handler.setFormatter(logging.Formatter(\"%(levelname)s %(payload)s\"))\\nlogger.addHandler(log_handler)\\n# 2. Pass it to the handler\\nllm_logger = LLMLoggingHandler(logger=logger) # can also be LLMLoggingHandler()\\n\\n# Avoid cache for demonstration\\ntry:\\n haiku.cache_clear()\\n limerick.cache_clear()\\nexcept Exception:\\n pass\\n\\nwith handler(provider), handler(llm_logger):\\n _ = haiku(\"fish3\")\\n _ = limerick(\"fish4\")', '# Sub-templates for different story styles\\n@Template.define\\ndef story_with_moral(topic: str) -> str:\\n \"\"\"Write a short story about {topic} and end with a moral lesson. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef story_funny(topic: str) -> str:\\n \"\"\"Write a funny, humorous story about {topic}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n# Main orchestrator template - has access to sub-templates\\n@Template.define\\ndef write_story(topic: str, style: str) -> str:\\n \"\"\"Write a story about {topic} in the style: {style}.\\n Available styles: \\'moral\\' for a story with a lesson, \\'funny\\' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\"\"\"\\n raise NotHandled\\n\\n\\n# Verify sub-templates are captured in write_story\\'s lexical context\\nassert story_with_moral in write_story.tools\\nassert story_funny in write_story.tools\\nprint(\"Sub-templates available to write_story:\", list(write_story.tools))\\n\\nwith handler(provider), handler(llm_logger):\\n print(\"=== Story with moral ===\")\\n print(write_story(\"a curious cat\", \"moral\"))\\n print()\\n print(\"=== Funny story ===\")\\n print(write_story(\"a curious cat\", \"funny\"))'], '_oh': {}, '_dh': [PosixPath('/Users/datnguyenthanh/Marc/effectful')], 'In': ['', 'import dataclasses\\nimport functools\\nimport inspect\\nimport logging\\nimport sys\\nfrom collections.abc import Callable\\n\\nfrom effectful.handlers.llm import Template\\nfrom effectful.handlers.llm.providers import (\\n CacheLLMRequestHandler,\\n LiteLLMProvider,\\n LLMLoggingHandler,\\n RetryLLMHandler,\\n completion,\\n tool_call,\\n)\\nfrom effectful.handlers.llm.synthesis import ProgramSynthesis\\nfrom effectful.ops.semantics import NotHandled, fwd, handler\\nfrom effectful.ops.syntax import defop\\n\\nprovider = LiteLLMProvider()', '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}.\"\"\"\\n raise NotHandled', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '@functools.cache\\n@Template.define\\ndef haiku(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef haiku_no_cache(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nprint()\\nwith handler(provider):\\n print(haiku(\"fish\"))\\n print(\"-\" * 40)\\n print(haiku(\"fish\"))\\n\\nprint()\\ncache_handler1 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler1):\\n print(haiku_no_cache(\"fish2\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish2\"))\\n\\nprint()\\ncache_handler2 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler2):\\n print(haiku_no_cache(\"fish3\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish3\"))', '@Template.define\\ndef primes(first_digit: int) -> int:\\n \"\"\"Give a prime number with {first_digit} as the first digit. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider):\\n assert type(primes(6)) is int', '@Template.define\\ndef count_char(char: str) -> Callable[[str], int]:\\n \"\"\"Write a function which takes a string and counts the occurrances of \\'{char}\\'. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider), handler(ProgramSynthesis()):\\n count_a = count_char(\"a\")\\n assert callable(count_a)\\n assert count_a(\"banana\") == 3\\n assert count_a(\"cherry\") == 0\\n # Print the source code of the generated function\\n print(inspect.getsource(count_a))', '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', '@dataclasses.dataclass\\nclass KnockKnockJoke:\\n whos_there: str\\n punchline: str\\n\\n\\n@Template.define\\ndef write_joke(theme: str) -> KnockKnockJoke:\\n \"\"\"Write a knock-knock joke on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef rate_joke(joke: KnockKnockJoke) -> bool:\\n \"\"\"Decide if {joke} is funny or not. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\ndef do_comedy():\\n joke = write_joke(\"lizards\")\\n print(\"> You are onstage at a comedy club. You tell the following joke:\")\\n print(\\n f\"Knock knock.\\\\nWho\\'s there?\\\\n{joke.whos_there}.\\\\n{joke.whos_there} who?\\\\n{joke.punchline}\"\\n )\\n if rate_joke(joke):\\n print(\"> The crowd laughs politely.\")\\n else:\\n print(\"> The crowd stares in stony silence.\")\\n\\n\\nwith handler(provider):\\n do_comedy()', 'def log_llm(*args, **kwargs):\\n result = fwd()\\n print(\"Request fired: \", args, kwargs, result)\\n return result\\n\\n\\n# Avoid cache\\ntry:\\n haiku.cache_clear()\\nexcept Exception:\\n pass\\n\\n# Put completion handler innermost so it has highest precedence during the call\\nwith handler(provider), handler({completion: log_llm}):\\n _ = haiku(\"fish2\")\\n _ = limerick(\"fish\") # or use haiku(\"fish-2\") to avoid cache', '# 1. Create a logger\\nlogger = logging.getLogger(\"effectful.llm\")\\nlogger.setLevel(logging.INFO)\\nlog_handler = logging.StreamHandler(sys.stdout)\\nlog_handler.setFormatter(logging.Formatter(\"%(levelname)s %(payload)s\"))\\nlogger.addHandler(log_handler)\\n# 2. Pass it to the handler\\nllm_logger = LLMLoggingHandler(logger=logger) # can also be LLMLoggingHandler()\\n\\n# Avoid cache for demonstration\\ntry:\\n haiku.cache_clear()\\n limerick.cache_clear()\\nexcept Exception:\\n pass\\n\\nwith handler(provider), handler(llm_logger):\\n _ = haiku(\"fish3\")\\n _ = limerick(\"fish4\")', '# Sub-templates for different story styles\\n@Template.define\\ndef story_with_moral(topic: str) -> str:\\n \"\"\"Write a short story about {topic} and end with a moral lesson. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef story_funny(topic: str) -> str:\\n \"\"\"Write a funny, humorous story about {topic}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n# Main orchestrator template - has access to sub-templates\\n@Template.define\\ndef write_story(topic: str, style: str) -> str:\\n \"\"\"Write a story about {topic} in the style: {style}.\\n Available styles: \\'moral\\' for a story with a lesson, \\'funny\\' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\"\"\"\\n raise NotHandled\\n\\n\\n# Verify sub-templates are captured in write_story\\'s lexical context\\nassert story_with_moral in write_story.tools\\nassert story_funny in write_story.tools\\nprint(\"Sub-templates available to write_story:\", list(write_story.tools))\\n\\nwith handler(provider), handler(llm_logger):\\n print(\"=== Story with moral ===\")\\n print(write_story(\"a curious cat\", \"moral\"))\\n print()\\n print(\"=== Funny story ===\")\\n print(write_story(\"a curious cat\", \"funny\"))'], 'Out': {}, 'get_ipython': >, 'exit': , 'quit': , 'open': , '_': 'In the ocean where fishies do play, \\nA big whale came swimming one day. \\nWith a splash and a dive, \\nHe felt so alive, \\nChasing fish in the blue, gleaming bay.', '__': '', '___': '', '__vsc_ipynb_file__': '/Users/datnguyenthanh/Marc/effectful/docs/source/llm.ipynb', '_i': '# 1. Create a logger\\nlogger = logging.getLogger(\"effectful.llm\")\\nlogger.setLevel(logging.INFO)\\nlog_handler = logging.StreamHandler(sys.stdout)\\nlog_handler.setFormatter(logging.Formatter(\"%(levelname)s %(payload)s\"))\\nlogger.addHandler(log_handler)\\n# 2. Pass it to the handler\\nllm_logger = LLMLoggingHandler(logger=logger) # can also be LLMLoggingHandler()\\n\\n# Avoid cache for demonstration\\ntry:\\n haiku.cache_clear()\\n limerick.cache_clear()\\nexcept Exception:\\n pass\\n\\nwith handler(provider), handler(llm_logger):\\n _ = haiku(\"fish3\")\\n _ = limerick(\"fish4\")', '_ii': 'def log_llm(*args, **kwargs):\\n result = fwd()\\n print(\"Request fired: \", args, kwargs, result)\\n return result\\n\\n\\n# Avoid cache\\ntry:\\n haiku.cache_clear()\\nexcept Exception:\\n pass\\n\\n# Put completion handler innermost so it has highest precedence during the call\\nwith handler(provider), handler({completion: log_llm}):\\n _ = haiku(\"fish2\")\\n _ = limerick(\"fish\") # or use haiku(\"fish-2\") to avoid cache', '_iii': '@dataclasses.dataclass\\nclass KnockKnockJoke:\\n whos_there: str\\n punchline: str\\n\\n\\n@Template.define\\ndef write_joke(theme: str) -> KnockKnockJoke:\\n \"\"\"Write a knock-knock joke on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef rate_joke(joke: KnockKnockJoke) -> bool:\\n \"\"\"Decide if {joke} is funny or not. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\ndef do_comedy():\\n joke = write_joke(\"lizards\")\\n print(\"> You are onstage at a comedy club. You tell the following joke:\")\\n print(\\n f\"Knock knock.\\\\nWho\\'s there?\\\\n{joke.whos_there}.\\\\n{joke.whos_there} who?\\\\n{joke.punchline}\"\\n )\\n if rate_joke(joke):\\n print(\"> The crowd laughs politely.\")\\n else:\\n print(\"> The crowd stares in stony silence.\")\\n\\n\\nwith handler(provider):\\n do_comedy()', '_i1': 'import dataclasses\\nimport functools\\nimport inspect\\nimport logging\\nimport sys\\nfrom collections.abc import Callable\\n\\nfrom effectful.handlers.llm import Template\\nfrom effectful.handlers.llm.providers import (\\n CacheLLMRequestHandler,\\n LiteLLMProvider,\\n LLMLoggingHandler,\\n RetryLLMHandler,\\n completion,\\n tool_call,\\n)\\nfrom effectful.handlers.llm.synthesis import ProgramSynthesis\\nfrom effectful.ops.semantics import NotHandled, fwd, handler\\nfrom effectful.ops.syntax import defop\\n\\nprovider = LiteLLMProvider()', 'dataclasses': , 'functools': , 'inspect': , 'logging': , 'sys': , 'Callable': , 'Template': , 'CacheLLMRequestHandler': , 'LiteLLMProvider': , 'LLMLoggingHandler': , 'RetryLLMHandler': , 'completion': Operation(completion, (model: str, messages: List = [], timeout: Union[float, str, openai.Timeout, NoneType] = None, temperature: Optional[float] = None, top_p: Optional[float] = None, n: Optional[int] = None, stream: Optional[bool] = None, stream_options: Optional[dict] = None, stop=None, max_completion_tokens: Optional[int] = None, max_tokens: Optional[int] = None, modalities: Optional[List[Literal['text', 'audio']]] = None, prediction: Optional[openai.types.chat.chat_completion_prediction_content_param.ChatCompletionPredictionContentParam] = None, audio: Optional[openai.types.chat.chat_completion_audio_param.ChatCompletionAudioParam] = None, presence_penalty: Optional[float] = None, frequency_penalty: Optional[float] = None, logit_bias: Optional[dict] = None, user: Optional[str] = None, reasoning_effort: Optional[Literal['none', 'minimal', 'low', 'medium', 'high', 'default']] = None, verbosity: Optional[Literal['low', 'medium', 'high']] = None, response_format: Union[dict, Type[pydantic.main.BaseModel], NoneType] = None, seed: Optional[int] = None, tools: Optional[List] = None, tool_choice: Union[str, dict, NoneType] = None, logprobs: Optional[bool] = None, top_logprobs: Optional[int] = None, parallel_tool_calls: Optional[bool] = None, web_search_options: Optional[litellm.types.llms.openai.OpenAIWebSearchOptions] = None, deployment_id=None, extra_headers: Optional[dict] = None, safety_identifier: Optional[str] = None, service_tier: Optional[str] = None, functions: Optional[List] = None, function_call: Optional[str] = None, base_url: Optional[str] = None, api_version: Optional[str] = None, api_key: Optional[str] = None, model_list: Optional[list] = None, thinking: Optional[litellm.types.llms.anthropic.AnthropicThinkingParam] = None, shared_session: Optional[ForwardRef('ClientSession')] = None, **kwargs) -> Union[litellm.types.utils.ModelResponse, litellm.litellm_core_utils.streaming_handler.CustomStreamWrapper]), 'tool_call': Operation(tool_call, (template: effectful.handlers.llm.Template, tool: Union[effectful.ops.types.Operation[..., T], effectful.handlers.llm.Template[..., T]], *args, **kwargs) -> T), 'ProgramSynthesis': , 'NotHandled': , 'fwd': Operation(fwd, (*args, **kwargs) -> Any), 'handler': , 'defop': , 'provider': , '_i2': '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}.\"\"\"\\n raise NotHandled', 'limerick': Template(__prompt_template__='Write a limerick on the theme of {theme}. Do not use any tools.', __signature__= str>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='limerick'), '_i3': 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '_i4': 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '_i5': '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled', '_i6': 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '_i7': '@functools.cache\\n@Template.define\\ndef haiku(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef haiku_no_cache(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nprint()\\nwith handler(provider):\\n print(haiku(\"fish\"))\\n print(\"-\" * 40)\\n print(haiku(\"fish\"))\\n\\nprint()\\ncache_handler1 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler1):\\n print(haiku_no_cache(\"fish2\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish2\"))\\n\\nprint()\\ncache_handler2 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler2):\\n print(haiku_no_cache(\"fish3\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish3\"))', 'haiku': , 'haiku_no_cache': Template(__prompt_template__='Write a haiku on the theme of {theme}. Do not use any tools.', __signature__= str>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='haiku_no_cache'), 'cache_handler1': , 'cache_handler2': , '_i8': '@Template.define\\ndef primes(first_digit: int) -> int:\\n \"\"\"Give a prime number with {first_digit} as the first digit. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider):\\n assert type(primes(6)) is int', 'primes': Template(__prompt_template__='Give a prime number with {first_digit} as the first digit. Do not use any tools.', __signature__= int>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='primes'), '_i9': '@Template.define\\ndef count_char(char: str) -> Callable[[str], int]:\\n \"\"\"Write a function which takes a string and counts the occurrances of \\'{char}\\'. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider), handler(ProgramSynthesis()):\\n count_a = count_char(\"a\")\\n assert callable(count_a)\\n assert count_a(\"banana\") == 3\\n assert count_a(\"cherry\") == 0\\n # Print the source code of the generated function\\n print(inspect.getsource(count_a))', 'count_char': Template(__prompt_template__=\"Write a function which takes a string and counts the occurrances of '{char}'. Do not use any tools.\", __signature__= collections.abc.Callable[[str], int]>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='count_char'), 'count_a': , '_i10': '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', 'cities': Operation(cities, () -> list[str]), 'weather': Operation(weather, (city: str) -> str), 'vacation': ..., 'log_tool_call': , '_i11': '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', '_i12': '@dataclasses.dataclass\\nclass KnockKnockJoke:\\n whos_there: str\\n punchline: str\\n\\n\\n@Template.define\\ndef write_joke(theme: str) -> KnockKnockJoke:\\n \"\"\"Write a knock-knock joke on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef rate_joke(joke: KnockKnockJoke) -> bool:\\n \"\"\"Decide if {joke} is funny or not. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\ndef do_comedy():\\n joke = write_joke(\"lizards\")\\n print(\"> You are onstage at a comedy club. You tell the following joke:\")\\n print(\\n f\"Knock knock.\\\\nWho\\'s there?\\\\n{joke.whos_there}.\\\\n{joke.whos_there} who?\\\\n{joke.punchline}\"\\n )\\n if rate_joke(joke):\\n print(\"> The crowd laughs politely.\")\\n else:\\n print(\"> The crowd stares in stony silence.\")\\n\\n\\nwith handler(provider):\\n do_comedy()', 'KnockKnockJoke': , 'write_joke': Template(__prompt_template__='Write a knock-knock joke on the theme of {theme}. Do not use any tools.', __signature__= __main__.KnockKnockJoke>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='write_joke'), 'rate_joke': Template(__prompt_template__='Decide if {joke} is funny or not. Do not use any tools.', __signature__= bool>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='rate_joke'), 'do_comedy': , '_i13': 'def log_llm(*args, **kwargs):\\n result = fwd()\\n print(\"Request fired: \", args, kwargs, result)\\n return result\\n\\n\\n# Avoid cache\\ntry:\\n haiku.cache_clear()\\nexcept Exception:\\n pass\\n\\n# Put completion handler innermost so it has highest precedence during the call\\nwith handler(provider), handler({completion: log_llm}):\\n _ = haiku(\"fish2\")\\n _ = limerick(\"fish\") # or use haiku(\"fish-2\") to avoid cache', 'log_llm': , '_i14': '# 1. Create a logger\\nlogger = logging.getLogger(\"effectful.llm\")\\nlogger.setLevel(logging.INFO)\\nlog_handler = logging.StreamHandler(sys.stdout)\\nlog_handler.setFormatter(logging.Formatter(\"%(levelname)s %(payload)s\"))\\nlogger.addHandler(log_handler)\\n# 2. Pass it to the handler\\nllm_logger = LLMLoggingHandler(logger=logger) # can also be LLMLoggingHandler()\\n\\n# Avoid cache for demonstration\\ntry:\\n haiku.cache_clear()\\n limerick.cache_clear()\\nexcept Exception:\\n pass\\n\\nwith handler(provider), handler(llm_logger):\\n _ = haiku(\"fish3\")\\n _ = limerick(\"fish4\")', 'logger': , 'log_handler': , 'llm_logger': , '_i15': '# Sub-templates for different story styles\\n@Template.define\\ndef story_with_moral(topic: str) -> str:\\n \"\"\"Write a short story about {topic} and end with a moral lesson. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef story_funny(topic: str) -> str:\\n \"\"\"Write a funny, humorous story about {topic}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n# Main orchestrator template - has access to sub-templates\\n@Template.define\\ndef write_story(topic: str, style: str) -> str:\\n \"\"\"Write a story about {topic} in the style: {style}.\\n Available styles: \\'moral\\' for a story with a lesson, \\'funny\\' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\"\"\"\\n raise NotHandled\\n\\n\\n# Verify sub-templates are captured in write_story\\'s lexical context\\nassert story_with_moral in write_story.tools\\nassert story_funny in write_story.tools\\nprint(\"Sub-templates available to write_story:\", list(write_story.tools))\\n\\nwith handler(provider), handler(llm_logger):\\n print(\"=== Story with moral ===\")\\n print(write_story(\"a curious cat\", \"moral\"))\\n print()\\n print(\"=== Funny story ===\")\\n print(write_story(\"a curious cat\", \"funny\"))', 'story_with_moral': Template(__prompt_template__='Write a short story about {topic} and end with a moral lesson. Do not use any tools.', __signature__= str>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='story_with_moral'), 'story_funny': Template(__prompt_template__='Write a funny, humorous story about {topic}. Do not use any tools.', __signature__= str>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='story_funny'), 'write_story': Template(__prompt_template__=\"Write a story about {topic} in the style: {style}.\\n Available styles: 'moral' for a story with a lesson, 'funny' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\", __signature__= str>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='write_story')}), mappingproxy({'__name__': '__main__', '__doc__': 'Automatically created module for IPython interactive environment', '__package__': None, '__loader__': None, '__spec__': None, '__builtin__': , '__builtins__': , '_ih': ['', 'import dataclasses\\nimport functools\\nimport inspect\\nimport logging\\nimport sys\\nfrom collections.abc import Callable\\n\\nfrom effectful.handlers.llm import Template\\nfrom effectful.handlers.llm.providers import (\\n CacheLLMRequestHandler,\\n LiteLLMProvider,\\n LLMLoggingHandler,\\n RetryLLMHandler,\\n completion,\\n tool_call,\\n)\\nfrom effectful.handlers.llm.synthesis import ProgramSynthesis\\nfrom effectful.ops.semantics import NotHandled, fwd, handler\\nfrom effectful.ops.syntax import defop\\n\\nprovider = LiteLLMProvider()', '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}.\"\"\"\\n raise NotHandled', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '@functools.cache\\n@Template.define\\ndef haiku(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef haiku_no_cache(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nprint()\\nwith handler(provider):\\n print(haiku(\"fish\"))\\n print(\"-\" * 40)\\n print(haiku(\"fish\"))\\n\\nprint()\\ncache_handler1 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler1):\\n print(haiku_no_cache(\"fish2\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish2\"))\\n\\nprint()\\ncache_handler2 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler2):\\n print(haiku_no_cache(\"fish3\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish3\"))', '@Template.define\\ndef primes(first_digit: int) -> int:\\n \"\"\"Give a prime number with {first_digit} as the first digit. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider):\\n assert type(primes(6)) is int', '@Template.define\\ndef count_char(char: str) -> Callable[[str], int]:\\n \"\"\"Write a function which takes a string and counts the occurrances of \\'{char}\\'. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider), handler(ProgramSynthesis()):\\n count_a = count_char(\"a\")\\n assert callable(count_a)\\n assert count_a(\"banana\") == 3\\n assert count_a(\"cherry\") == 0\\n # Print the source code of the generated function\\n print(inspect.getsource(count_a))', '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', '@dataclasses.dataclass\\nclass KnockKnockJoke:\\n whos_there: str\\n punchline: str\\n\\n\\n@Template.define\\ndef write_joke(theme: str) -> KnockKnockJoke:\\n \"\"\"Write a knock-knock joke on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef rate_joke(joke: KnockKnockJoke) -> bool:\\n \"\"\"Decide if {joke} is funny or not. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\ndef do_comedy():\\n joke = write_joke(\"lizards\")\\n print(\"> You are onstage at a comedy club. You tell the following joke:\")\\n print(\\n f\"Knock knock.\\\\nWho\\'s there?\\\\n{joke.whos_there}.\\\\n{joke.whos_there} who?\\\\n{joke.punchline}\"\\n )\\n if rate_joke(joke):\\n print(\"> The crowd laughs politely.\")\\n else:\\n print(\"> The crowd stares in stony silence.\")\\n\\n\\nwith handler(provider):\\n do_comedy()', 'def log_llm(*args, **kwargs):\\n result = fwd()\\n print(\"Request fired: \", args, kwargs, result)\\n return result\\n\\n\\n# Avoid cache\\ntry:\\n haiku.cache_clear()\\nexcept Exception:\\n pass\\n\\n# Put completion handler innermost so it has highest precedence during the call\\nwith handler(provider), handler({completion: log_llm}):\\n _ = haiku(\"fish2\")\\n _ = limerick(\"fish\") # or use haiku(\"fish-2\") to avoid cache', '# 1. Create a logger\\nlogger = logging.getLogger(\"effectful.llm\")\\nlogger.setLevel(logging.INFO)\\nlog_handler = logging.StreamHandler(sys.stdout)\\nlog_handler.setFormatter(logging.Formatter(\"%(levelname)s %(payload)s\"))\\nlogger.addHandler(log_handler)\\n# 2. Pass it to the handler\\nllm_logger = LLMLoggingHandler(logger=logger) # can also be LLMLoggingHandler()\\n\\n# Avoid cache for demonstration\\ntry:\\n haiku.cache_clear()\\n limerick.cache_clear()\\nexcept Exception:\\n pass\\n\\nwith handler(provider), handler(llm_logger):\\n _ = haiku(\"fish3\")\\n _ = limerick(\"fish4\")', '# Sub-templates for different story styles\\n@Template.define\\ndef story_with_moral(topic: str) -> str:\\n \"\"\"Write a short story about {topic} and end with a moral lesson. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef story_funny(topic: str) -> str:\\n \"\"\"Write a funny, humorous story about {topic}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n# Main orchestrator template - has access to sub-templates\\n@Template.define\\ndef write_story(topic: str, style: str) -> str:\\n \"\"\"Write a story about {topic} in the style: {style}.\\n Available styles: \\'moral\\' for a story with a lesson, \\'funny\\' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\"\"\"\\n raise NotHandled\\n\\n\\n# Verify sub-templates are captured in write_story\\'s lexical context\\nassert story_with_moral in write_story.tools\\nassert story_funny in write_story.tools\\nprint(\"Sub-templates available to write_story:\", list(write_story.tools))\\n\\nwith handler(provider), handler(llm_logger):\\n print(\"=== Story with moral ===\")\\n print(write_story(\"a curious cat\", \"moral\"))\\n print()\\n print(\"=== Funny story ===\")\\n print(write_story(\"a curious cat\", \"funny\"))'], '_oh': {}, '_dh': [PosixPath('/Users/datnguyenthanh/Marc/effectful')], 'In': ['', 'import dataclasses\\nimport functools\\nimport inspect\\nimport logging\\nimport sys\\nfrom collections.abc import Callable\\n\\nfrom effectful.handlers.llm import Template\\nfrom effectful.handlers.llm.providers import (\\n CacheLLMRequestHandler,\\n LiteLLMProvider,\\n LLMLoggingHandler,\\n RetryLLMHandler,\\n completion,\\n tool_call,\\n)\\nfrom effectful.handlers.llm.synthesis import ProgramSynthesis\\nfrom effectful.ops.semantics import NotHandled, fwd, handler\\nfrom effectful.ops.syntax import defop\\n\\nprovider = LiteLLMProvider()', '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}.\"\"\"\\n raise NotHandled', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '@functools.cache\\n@Template.define\\ndef haiku(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef haiku_no_cache(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nprint()\\nwith handler(provider):\\n print(haiku(\"fish\"))\\n print(\"-\" * 40)\\n print(haiku(\"fish\"))\\n\\nprint()\\ncache_handler1 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler1):\\n print(haiku_no_cache(\"fish2\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish2\"))\\n\\nprint()\\ncache_handler2 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler2):\\n print(haiku_no_cache(\"fish3\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish3\"))', '@Template.define\\ndef primes(first_digit: int) -> int:\\n \"\"\"Give a prime number with {first_digit} as the first digit. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider):\\n assert type(primes(6)) is int', '@Template.define\\ndef count_char(char: str) -> Callable[[str], int]:\\n \"\"\"Write a function which takes a string and counts the occurrances of \\'{char}\\'. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider), handler(ProgramSynthesis()):\\n count_a = count_char(\"a\")\\n assert callable(count_a)\\n assert count_a(\"banana\") == 3\\n assert count_a(\"cherry\") == 0\\n # Print the source code of the generated function\\n print(inspect.getsource(count_a))', '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', '@dataclasses.dataclass\\nclass KnockKnockJoke:\\n whos_there: str\\n punchline: str\\n\\n\\n@Template.define\\ndef write_joke(theme: str) -> KnockKnockJoke:\\n \"\"\"Write a knock-knock joke on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef rate_joke(joke: KnockKnockJoke) -> bool:\\n \"\"\"Decide if {joke} is funny or not. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\ndef do_comedy():\\n joke = write_joke(\"lizards\")\\n print(\"> You are onstage at a comedy club. You tell the following joke:\")\\n print(\\n f\"Knock knock.\\\\nWho\\'s there?\\\\n{joke.whos_there}.\\\\n{joke.whos_there} who?\\\\n{joke.punchline}\"\\n )\\n if rate_joke(joke):\\n print(\"> The crowd laughs politely.\")\\n else:\\n print(\"> The crowd stares in stony silence.\")\\n\\n\\nwith handler(provider):\\n do_comedy()', 'def log_llm(*args, **kwargs):\\n result = fwd()\\n print(\"Request fired: \", args, kwargs, result)\\n return result\\n\\n\\n# Avoid cache\\ntry:\\n haiku.cache_clear()\\nexcept Exception:\\n pass\\n\\n# Put completion handler innermost so it has highest precedence during the call\\nwith handler(provider), handler({completion: log_llm}):\\n _ = haiku(\"fish2\")\\n _ = limerick(\"fish\") # or use haiku(\"fish-2\") to avoid cache', '# 1. Create a logger\\nlogger = logging.getLogger(\"effectful.llm\")\\nlogger.setLevel(logging.INFO)\\nlog_handler = logging.StreamHandler(sys.stdout)\\nlog_handler.setFormatter(logging.Formatter(\"%(levelname)s %(payload)s\"))\\nlogger.addHandler(log_handler)\\n# 2. Pass it to the handler\\nllm_logger = LLMLoggingHandler(logger=logger) # can also be LLMLoggingHandler()\\n\\n# Avoid cache for demonstration\\ntry:\\n haiku.cache_clear()\\n limerick.cache_clear()\\nexcept Exception:\\n pass\\n\\nwith handler(provider), handler(llm_logger):\\n _ = haiku(\"fish3\")\\n _ = limerick(\"fish4\")', '# Sub-templates for different story styles\\n@Template.define\\ndef story_with_moral(topic: str) -> str:\\n \"\"\"Write a short story about {topic} and end with a moral lesson. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef story_funny(topic: str) -> str:\\n \"\"\"Write a funny, humorous story about {topic}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n# Main orchestrator template - has access to sub-templates\\n@Template.define\\ndef write_story(topic: str, style: str) -> str:\\n \"\"\"Write a story about {topic} in the style: {style}.\\n Available styles: \\'moral\\' for a story with a lesson, \\'funny\\' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\"\"\"\\n raise NotHandled\\n\\n\\n# Verify sub-templates are captured in write_story\\'s lexical context\\nassert story_with_moral in write_story.tools\\nassert story_funny in write_story.tools\\nprint(\"Sub-templates available to write_story:\", list(write_story.tools))\\n\\nwith handler(provider), handler(llm_logger):\\n print(\"=== Story with moral ===\")\\n print(write_story(\"a curious cat\", \"moral\"))\\n print()\\n print(\"=== Funny story ===\")\\n print(write_story(\"a curious cat\", \"funny\"))'], 'Out': {}, 'get_ipython': >, 'exit': , 'quit': , 'open': , '_': 'In the ocean where fishies do play, \\nA big whale came swimming one day. \\nWith a splash and a dive, \\nHe felt so alive, \\nChasing fish in the blue, gleaming bay.', '__': '', '___': '', '__vsc_ipynb_file__': '/Users/datnguyenthanh/Marc/effectful/docs/source/llm.ipynb', '_i': '# 1. Create a logger\\nlogger = logging.getLogger(\"effectful.llm\")\\nlogger.setLevel(logging.INFO)\\nlog_handler = logging.StreamHandler(sys.stdout)\\nlog_handler.setFormatter(logging.Formatter(\"%(levelname)s %(payload)s\"))\\nlogger.addHandler(log_handler)\\n# 2. Pass it to the handler\\nllm_logger = LLMLoggingHandler(logger=logger) # can also be LLMLoggingHandler()\\n\\n# Avoid cache for demonstration\\ntry:\\n haiku.cache_clear()\\n limerick.cache_clear()\\nexcept Exception:\\n pass\\n\\nwith handler(provider), handler(llm_logger):\\n _ = haiku(\"fish3\")\\n _ = limerick(\"fish4\")', '_ii': 'def log_llm(*args, **kwargs):\\n result = fwd()\\n print(\"Request fired: \", args, kwargs, result)\\n return result\\n\\n\\n# Avoid cache\\ntry:\\n haiku.cache_clear()\\nexcept Exception:\\n pass\\n\\n# Put completion handler innermost so it has highest precedence during the call\\nwith handler(provider), handler({completion: log_llm}):\\n _ = haiku(\"fish2\")\\n _ = limerick(\"fish\") # or use haiku(\"fish-2\") to avoid cache', '_iii': '@dataclasses.dataclass\\nclass KnockKnockJoke:\\n whos_there: str\\n punchline: str\\n\\n\\n@Template.define\\ndef write_joke(theme: str) -> KnockKnockJoke:\\n \"\"\"Write a knock-knock joke on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef rate_joke(joke: KnockKnockJoke) -> bool:\\n \"\"\"Decide if {joke} is funny or not. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\ndef do_comedy():\\n joke = write_joke(\"lizards\")\\n print(\"> You are onstage at a comedy club. You tell the following joke:\")\\n print(\\n f\"Knock knock.\\\\nWho\\'s there?\\\\n{joke.whos_there}.\\\\n{joke.whos_there} who?\\\\n{joke.punchline}\"\\n )\\n if rate_joke(joke):\\n print(\"> The crowd laughs politely.\")\\n else:\\n print(\"> The crowd stares in stony silence.\")\\n\\n\\nwith handler(provider):\\n do_comedy()', '_i1': 'import dataclasses\\nimport functools\\nimport inspect\\nimport logging\\nimport sys\\nfrom collections.abc import Callable\\n\\nfrom effectful.handlers.llm import Template\\nfrom effectful.handlers.llm.providers import (\\n CacheLLMRequestHandler,\\n LiteLLMProvider,\\n LLMLoggingHandler,\\n RetryLLMHandler,\\n completion,\\n tool_call,\\n)\\nfrom effectful.handlers.llm.synthesis import ProgramSynthesis\\nfrom effectful.ops.semantics import NotHandled, fwd, handler\\nfrom effectful.ops.syntax import defop\\n\\nprovider = LiteLLMProvider()', 'dataclasses': , 'functools': , 'inspect': , 'logging': , 'sys': , 'Callable': , 'Template': , 'CacheLLMRequestHandler': , 'LiteLLMProvider': , 'LLMLoggingHandler': , 'RetryLLMHandler': , 'completion': Operation(completion, (model: str, messages: List = [], timeout: Union[float, str, openai.Timeout, NoneType] = None, temperature: Optional[float] = None, top_p: Optional[float] = None, n: Optional[int] = None, stream: Optional[bool] = None, stream_options: Optional[dict] = None, stop=None, max_completion_tokens: Optional[int] = None, max_tokens: Optional[int] = None, modalities: Optional[List[Literal['text', 'audio']]] = None, prediction: Optional[openai.types.chat.chat_completion_prediction_content_param.ChatCompletionPredictionContentParam] = None, audio: Optional[openai.types.chat.chat_completion_audio_param.ChatCompletionAudioParam] = None, presence_penalty: Optional[float] = None, frequency_penalty: Optional[float] = None, logit_bias: Optional[dict] = None, user: Optional[str] = None, reasoning_effort: Optional[Literal['none', 'minimal', 'low', 'medium', 'high', 'default']] = None, verbosity: Optional[Literal['low', 'medium', 'high']] = None, response_format: Union[dict, Type[pydantic.main.BaseModel], NoneType] = None, seed: Optional[int] = None, tools: Optional[List] = None, tool_choice: Union[str, dict, NoneType] = None, logprobs: Optional[bool] = None, top_logprobs: Optional[int] = None, parallel_tool_calls: Optional[bool] = None, web_search_options: Optional[litellm.types.llms.openai.OpenAIWebSearchOptions] = None, deployment_id=None, extra_headers: Optional[dict] = None, safety_identifier: Optional[str] = None, service_tier: Optional[str] = None, functions: Optional[List] = None, function_call: Optional[str] = None, base_url: Optional[str] = None, api_version: Optional[str] = None, api_key: Optional[str] = None, model_list: Optional[list] = None, thinking: Optional[litellm.types.llms.anthropic.AnthropicThinkingParam] = None, shared_session: Optional[ForwardRef('ClientSession')] = None, **kwargs) -> Union[litellm.types.utils.ModelResponse, litellm.litellm_core_utils.streaming_handler.CustomStreamWrapper]), 'tool_call': Operation(tool_call, (template: effectful.handlers.llm.Template, tool: Union[effectful.ops.types.Operation[..., T], effectful.handlers.llm.Template[..., T]], *args, **kwargs) -> T), 'ProgramSynthesis': , 'NotHandled': , 'fwd': Operation(fwd, (*args, **kwargs) -> Any), 'handler': , 'defop': , 'provider': , '_i2': '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}.\"\"\"\\n raise NotHandled', 'limerick': Template(__prompt_template__='Write a limerick on the theme of {theme}. Do not use any tools.', __signature__= str>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='limerick'), '_i3': 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '_i4': 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '_i5': '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled', '_i6': 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '_i7': '@functools.cache\\n@Template.define\\ndef haiku(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef haiku_no_cache(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nprint()\\nwith handler(provider):\\n print(haiku(\"fish\"))\\n print(\"-\" * 40)\\n print(haiku(\"fish\"))\\n\\nprint()\\ncache_handler1 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler1):\\n print(haiku_no_cache(\"fish2\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish2\"))\\n\\nprint()\\ncache_handler2 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler2):\\n print(haiku_no_cache(\"fish3\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish3\"))', 'haiku': , 'haiku_no_cache': Template(__prompt_template__='Write a haiku on the theme of {theme}. Do not use any tools.', __signature__= str>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='haiku_no_cache'), 'cache_handler1': , 'cache_handler2': , '_i8': '@Template.define\\ndef primes(first_digit: int) -> int:\\n \"\"\"Give a prime number with {first_digit} as the first digit. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider):\\n assert type(primes(6)) is int', 'primes': Template(__prompt_template__='Give a prime number with {first_digit} as the first digit. Do not use any tools.', __signature__= int>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='primes'), '_i9': '@Template.define\\ndef count_char(char: str) -> Callable[[str], int]:\\n \"\"\"Write a function which takes a string and counts the occurrances of \\'{char}\\'. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider), handler(ProgramSynthesis()):\\n count_a = count_char(\"a\")\\n assert callable(count_a)\\n assert count_a(\"banana\") == 3\\n assert count_a(\"cherry\") == 0\\n # Print the source code of the generated function\\n print(inspect.getsource(count_a))', 'count_char': Template(__prompt_template__=\"Write a function which takes a string and counts the occurrances of '{char}'. Do not use any tools.\", __signature__= collections.abc.Callable[[str], int]>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='count_char'), 'count_a': , '_i10': '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', 'cities': Operation(cities, () -> list[str]), 'weather': Operation(weather, (city: str) -> str), 'vacation': ..., 'log_tool_call': , '_i11': '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', '_i12': '@dataclasses.dataclass\\nclass KnockKnockJoke:\\n whos_there: str\\n punchline: str\\n\\n\\n@Template.define\\ndef write_joke(theme: str) -> KnockKnockJoke:\\n \"\"\"Write a knock-knock joke on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef rate_joke(joke: KnockKnockJoke) -> bool:\\n \"\"\"Decide if {joke} is funny or not. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\ndef do_comedy():\\n joke = write_joke(\"lizards\")\\n print(\"> You are onstage at a comedy club. You tell the following joke:\")\\n print(\\n f\"Knock knock.\\\\nWho\\'s there?\\\\n{joke.whos_there}.\\\\n{joke.whos_there} who?\\\\n{joke.punchline}\"\\n )\\n if rate_joke(joke):\\n print(\"> The crowd laughs politely.\")\\n else:\\n print(\"> The crowd stares in stony silence.\")\\n\\n\\nwith handler(provider):\\n do_comedy()', 'KnockKnockJoke': , 'write_joke': Template(__prompt_template__='Write a knock-knock joke on the theme of {theme}. Do not use any tools.', __signature__= __main__.KnockKnockJoke>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='write_joke'), 'rate_joke': Template(__prompt_template__='Decide if {joke} is funny or not. Do not use any tools.', __signature__= bool>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='rate_joke'), 'do_comedy': , '_i13': 'def log_llm(*args, **kwargs):\\n result = fwd()\\n print(\"Request fired: \", args, kwargs, result)\\n return result\\n\\n\\n# Avoid cache\\ntry:\\n haiku.cache_clear()\\nexcept Exception:\\n pass\\n\\n# Put completion handler innermost so it has highest precedence during the call\\nwith handler(provider), handler({completion: log_llm}):\\n _ = haiku(\"fish2\")\\n _ = limerick(\"fish\") # or use haiku(\"fish-2\") to avoid cache', 'log_llm': , '_i14': '# 1. Create a logger\\nlogger = logging.getLogger(\"effectful.llm\")\\nlogger.setLevel(logging.INFO)\\nlog_handler = logging.StreamHandler(sys.stdout)\\nlog_handler.setFormatter(logging.Formatter(\"%(levelname)s %(payload)s\"))\\nlogger.addHandler(log_handler)\\n# 2. Pass it to the handler\\nllm_logger = LLMLoggingHandler(logger=logger) # can also be LLMLoggingHandler()\\n\\n# Avoid cache for demonstration\\ntry:\\n haiku.cache_clear()\\n limerick.cache_clear()\\nexcept Exception:\\n pass\\n\\nwith handler(provider), handler(llm_logger):\\n _ = haiku(\"fish3\")\\n _ = limerick(\"fish4\")', 'logger': , 'log_handler': , 'llm_logger': , '_i15': '# Sub-templates for different story styles\\n@Template.define\\ndef story_with_moral(topic: str) -> str:\\n \"\"\"Write a short story about {topic} and end with a moral lesson. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef story_funny(topic: str) -> str:\\n \"\"\"Write a funny, humorous story about {topic}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n# Main orchestrator template - has access to sub-templates\\n@Template.define\\ndef write_story(topic: str, style: str) -> str:\\n \"\"\"Write a story about {topic} in the style: {style}.\\n Available styles: \\'moral\\' for a story with a lesson, \\'funny\\' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\"\"\"\\n raise NotHandled\\n\\n\\n# Verify sub-templates are captured in write_story\\'s lexical context\\nassert story_with_moral in write_story.tools\\nassert story_funny in write_story.tools\\nprint(\"Sub-templates available to write_story:\", list(write_story.tools))\\n\\nwith handler(provider), handler(llm_logger):\\n print(\"=== Story with moral ===\")\\n print(write_story(\"a curious cat\", \"moral\"))\\n print()\\n print(\"=== Funny story ===\")\\n print(write_story(\"a curious cat\", \"funny\"))', 'story_with_moral': Template(__prompt_template__='Write a short story about {topic} and end with a moral lesson. Do not use any tools.', __signature__= str>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='story_with_moral'), 'story_funny': Template(__prompt_template__='Write a funny, humorous story about {topic}. Do not use any tools.', __signature__= str>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='story_funny'), 'write_story': Template(__prompt_template__=\"Write a story about {topic} in the style: {style}.\\n Available styles: 'moral' for a story with a lesson, 'funny' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\", __signature__= str>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='write_story')})), __name__='vacation'), Template(__prompt_template__='Write a knock-knock joke on the theme of {theme}. Do not use any tools.', __signature__= __main__.KnockKnockJoke>, __context__=LexicalContext(mappingproxy({'__name__': '__main__', '__doc__': 'Automatically created module for IPython interactive environment', '__package__': None, '__loader__': None, '__spec__': None, '__builtin__': , '__builtins__': , '_ih': ['', 'import dataclasses\\nimport functools\\nimport inspect\\nimport logging\\nimport sys\\nfrom collections.abc import Callable\\n\\nfrom effectful.handlers.llm import Template\\nfrom effectful.handlers.llm.providers import (\\n CacheLLMRequestHandler,\\n LiteLLMProvider,\\n LLMLoggingHandler,\\n RetryLLMHandler,\\n completion,\\n tool_call,\\n)\\nfrom effectful.handlers.llm.synthesis import ProgramSynthesis\\nfrom effectful.ops.semantics import NotHandled, fwd, handler\\nfrom effectful.ops.syntax import defop\\n\\nprovider = LiteLLMProvider()', '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}.\"\"\"\\n raise NotHandled', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '@functools.cache\\n@Template.define\\ndef haiku(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef haiku_no_cache(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nprint()\\nwith handler(provider):\\n print(haiku(\"fish\"))\\n print(\"-\" * 40)\\n print(haiku(\"fish\"))\\n\\nprint()\\ncache_handler1 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler1):\\n print(haiku_no_cache(\"fish2\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish2\"))\\n\\nprint()\\ncache_handler2 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler2):\\n print(haiku_no_cache(\"fish3\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish3\"))', '@Template.define\\ndef primes(first_digit: int) -> int:\\n \"\"\"Give a prime number with {first_digit} as the first digit. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider):\\n assert type(primes(6)) is int', '@Template.define\\ndef count_char(char: str) -> Callable[[str], int]:\\n \"\"\"Write a function which takes a string and counts the occurrances of \\'{char}\\'. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider), handler(ProgramSynthesis()):\\n count_a = count_char(\"a\")\\n assert callable(count_a)\\n assert count_a(\"banana\") == 3\\n assert count_a(\"cherry\") == 0\\n # Print the source code of the generated function\\n print(inspect.getsource(count_a))', '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', '@dataclasses.dataclass\\nclass KnockKnockJoke:\\n whos_there: str\\n punchline: str\\n\\n\\n@Template.define\\ndef write_joke(theme: str) -> KnockKnockJoke:\\n \"\"\"Write a knock-knock joke on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef rate_joke(joke: KnockKnockJoke) -> bool:\\n \"\"\"Decide if {joke} is funny or not. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\ndef do_comedy():\\n joke = write_joke(\"lizards\")\\n print(\"> You are onstage at a comedy club. You tell the following joke:\")\\n print(\\n f\"Knock knock.\\\\nWho\\'s there?\\\\n{joke.whos_there}.\\\\n{joke.whos_there} who?\\\\n{joke.punchline}\"\\n )\\n if rate_joke(joke):\\n print(\"> The crowd laughs politely.\")\\n else:\\n print(\"> The crowd stares in stony silence.\")\\n\\n\\nwith handler(provider):\\n do_comedy()', 'def log_llm(*args, **kwargs):\\n result = fwd()\\n print(\"Request fired: \", args, kwargs, result)\\n return result\\n\\n\\n# Avoid cache\\ntry:\\n haiku.cache_clear()\\nexcept Exception:\\n pass\\n\\n# Put completion handler innermost so it has highest precedence during the call\\nwith handler(provider), handler({completion: log_llm}):\\n _ = haiku(\"fish2\")\\n _ = limerick(\"fish\") # or use haiku(\"fish-2\") to avoid cache', '# 1. Create a logger\\nlogger = logging.getLogger(\"effectful.llm\")\\nlogger.setLevel(logging.INFO)\\nlog_handler = logging.StreamHandler(sys.stdout)\\nlog_handler.setFormatter(logging.Formatter(\"%(levelname)s %(payload)s\"))\\nlogger.addHandler(log_handler)\\n# 2. Pass it to the handler\\nllm_logger = LLMLoggingHandler(logger=logger) # can also be LLMLoggingHandler()\\n\\n# Avoid cache for demonstration\\ntry:\\n haiku.cache_clear()\\n limerick.cache_clear()\\nexcept Exception:\\n pass\\n\\nwith handler(provider), handler(llm_logger):\\n _ = haiku(\"fish3\")\\n _ = limerick(\"fish4\")', '# Sub-templates for different story styles\\n@Template.define\\ndef story_with_moral(topic: str) -> str:\\n \"\"\"Write a short story about {topic} and end with a moral lesson. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef story_funny(topic: str) -> str:\\n \"\"\"Write a funny, humorous story about {topic}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n# Main orchestrator template - has access to sub-templates\\n@Template.define\\ndef write_story(topic: str, style: str) -> str:\\n \"\"\"Write a story about {topic} in the style: {style}.\\n Available styles: \\'moral\\' for a story with a lesson, \\'funny\\' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\"\"\"\\n raise NotHandled\\n\\n\\n# Verify sub-templates are captured in write_story\\'s lexical context\\nassert story_with_moral in write_story.tools\\nassert story_funny in write_story.tools\\nprint(\"Sub-templates available to write_story:\", list(write_story.tools))\\n\\nwith handler(provider), handler(llm_logger):\\n print(\"=== Story with moral ===\")\\n print(write_story(\"a curious cat\", \"moral\"))\\n print()\\n print(\"=== Funny story ===\")\\n print(write_story(\"a curious cat\", \"funny\"))'], '_oh': {}, '_dh': [PosixPath('/Users/datnguyenthanh/Marc/effectful')], 'In': ['', 'import dataclasses\\nimport functools\\nimport inspect\\nimport logging\\nimport sys\\nfrom collections.abc import Callable\\n\\nfrom effectful.handlers.llm import Template\\nfrom effectful.handlers.llm.providers import (\\n CacheLLMRequestHandler,\\n LiteLLMProvider,\\n LLMLoggingHandler,\\n RetryLLMHandler,\\n completion,\\n tool_call,\\n)\\nfrom effectful.handlers.llm.synthesis import ProgramSynthesis\\nfrom effectful.ops.semantics import NotHandled, fwd, handler\\nfrom effectful.ops.syntax import defop\\n\\nprovider = LiteLLMProvider()', '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}.\"\"\"\\n raise NotHandled', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '@functools.cache\\n@Template.define\\ndef haiku(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef haiku_no_cache(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nprint()\\nwith handler(provider):\\n print(haiku(\"fish\"))\\n print(\"-\" * 40)\\n print(haiku(\"fish\"))\\n\\nprint()\\ncache_handler1 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler1):\\n print(haiku_no_cache(\"fish2\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish2\"))\\n\\nprint()\\ncache_handler2 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler2):\\n print(haiku_no_cache(\"fish3\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish3\"))', '@Template.define\\ndef primes(first_digit: int) -> int:\\n \"\"\"Give a prime number with {first_digit} as the first digit. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider):\\n assert type(primes(6)) is int', '@Template.define\\ndef count_char(char: str) -> Callable[[str], int]:\\n \"\"\"Write a function which takes a string and counts the occurrances of \\'{char}\\'. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider), handler(ProgramSynthesis()):\\n count_a = count_char(\"a\")\\n assert callable(count_a)\\n assert count_a(\"banana\") == 3\\n assert count_a(\"cherry\") == 0\\n # Print the source code of the generated function\\n print(inspect.getsource(count_a))', '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', '@dataclasses.dataclass\\nclass KnockKnockJoke:\\n whos_there: str\\n punchline: str\\n\\n\\n@Template.define\\ndef write_joke(theme: str) -> KnockKnockJoke:\\n \"\"\"Write a knock-knock joke on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef rate_joke(joke: KnockKnockJoke) -> bool:\\n \"\"\"Decide if {joke} is funny or not. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\ndef do_comedy():\\n joke = write_joke(\"lizards\")\\n print(\"> You are onstage at a comedy club. You tell the following joke:\")\\n print(\\n f\"Knock knock.\\\\nWho\\'s there?\\\\n{joke.whos_there}.\\\\n{joke.whos_there} who?\\\\n{joke.punchline}\"\\n )\\n if rate_joke(joke):\\n print(\"> The crowd laughs politely.\")\\n else:\\n print(\"> The crowd stares in stony silence.\")\\n\\n\\nwith handler(provider):\\n do_comedy()', 'def log_llm(*args, **kwargs):\\n result = fwd()\\n print(\"Request fired: \", args, kwargs, result)\\n return result\\n\\n\\n# Avoid cache\\ntry:\\n haiku.cache_clear()\\nexcept Exception:\\n pass\\n\\n# Put completion handler innermost so it has highest precedence during the call\\nwith handler(provider), handler({completion: log_llm}):\\n _ = haiku(\"fish2\")\\n _ = limerick(\"fish\") # or use haiku(\"fish-2\") to avoid cache', '# 1. Create a logger\\nlogger = logging.getLogger(\"effectful.llm\")\\nlogger.setLevel(logging.INFO)\\nlog_handler = logging.StreamHandler(sys.stdout)\\nlog_handler.setFormatter(logging.Formatter(\"%(levelname)s %(payload)s\"))\\nlogger.addHandler(log_handler)\\n# 2. Pass it to the handler\\nllm_logger = LLMLoggingHandler(logger=logger) # can also be LLMLoggingHandler()\\n\\n# Avoid cache for demonstration\\ntry:\\n haiku.cache_clear()\\n limerick.cache_clear()\\nexcept Exception:\\n pass\\n\\nwith handler(provider), handler(llm_logger):\\n _ = haiku(\"fish3\")\\n _ = limerick(\"fish4\")', '# Sub-templates for different story styles\\n@Template.define\\ndef story_with_moral(topic: str) -> str:\\n \"\"\"Write a short story about {topic} and end with a moral lesson. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef story_funny(topic: str) -> str:\\n \"\"\"Write a funny, humorous story about {topic}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n# Main orchestrator template - has access to sub-templates\\n@Template.define\\ndef write_story(topic: str, style: str) -> str:\\n \"\"\"Write a story about {topic} in the style: {style}.\\n Available styles: \\'moral\\' for a story with a lesson, \\'funny\\' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\"\"\"\\n raise NotHandled\\n\\n\\n# Verify sub-templates are captured in write_story\\'s lexical context\\nassert story_with_moral in write_story.tools\\nassert story_funny in write_story.tools\\nprint(\"Sub-templates available to write_story:\", list(write_story.tools))\\n\\nwith handler(provider), handler(llm_logger):\\n print(\"=== Story with moral ===\")\\n print(write_story(\"a curious cat\", \"moral\"))\\n print()\\n print(\"=== Funny story ===\")\\n print(write_story(\"a curious cat\", \"funny\"))'], 'Out': {}, 'get_ipython': >, 'exit': , 'quit': , 'open': , '_': 'In the ocean where fishies do play, \\nA big whale came swimming one day. \\nWith a splash and a dive, \\nHe felt so alive, \\nChasing fish in the blue, gleaming bay.', '__': '', '___': '', '__vsc_ipynb_file__': '/Users/datnguyenthanh/Marc/effectful/docs/source/llm.ipynb', '_i': '# 1. Create a logger\\nlogger = logging.getLogger(\"effectful.llm\")\\nlogger.setLevel(logging.INFO)\\nlog_handler = logging.StreamHandler(sys.stdout)\\nlog_handler.setFormatter(logging.Formatter(\"%(levelname)s %(payload)s\"))\\nlogger.addHandler(log_handler)\\n# 2. Pass it to the handler\\nllm_logger = LLMLoggingHandler(logger=logger) # can also be LLMLoggingHandler()\\n\\n# Avoid cache for demonstration\\ntry:\\n haiku.cache_clear()\\n limerick.cache_clear()\\nexcept Exception:\\n pass\\n\\nwith handler(provider), handler(llm_logger):\\n _ = haiku(\"fish3\")\\n _ = limerick(\"fish4\")', '_ii': 'def log_llm(*args, **kwargs):\\n result = fwd()\\n print(\"Request fired: \", args, kwargs, result)\\n return result\\n\\n\\n# Avoid cache\\ntry:\\n haiku.cache_clear()\\nexcept Exception:\\n pass\\n\\n# Put completion handler innermost so it has highest precedence during the call\\nwith handler(provider), handler({completion: log_llm}):\\n _ = haiku(\"fish2\")\\n _ = limerick(\"fish\") # or use haiku(\"fish-2\") to avoid cache', '_iii': '@dataclasses.dataclass\\nclass KnockKnockJoke:\\n whos_there: str\\n punchline: str\\n\\n\\n@Template.define\\ndef write_joke(theme: str) -> KnockKnockJoke:\\n \"\"\"Write a knock-knock joke on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef rate_joke(joke: KnockKnockJoke) -> bool:\\n \"\"\"Decide if {joke} is funny or not. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\ndef do_comedy():\\n joke = write_joke(\"lizards\")\\n print(\"> You are onstage at a comedy club. You tell the following joke:\")\\n print(\\n f\"Knock knock.\\\\nWho\\'s there?\\\\n{joke.whos_there}.\\\\n{joke.whos_there} who?\\\\n{joke.punchline}\"\\n )\\n if rate_joke(joke):\\n print(\"> The crowd laughs politely.\")\\n else:\\n print(\"> The crowd stares in stony silence.\")\\n\\n\\nwith handler(provider):\\n do_comedy()', '_i1': 'import dataclasses\\nimport functools\\nimport inspect\\nimport logging\\nimport sys\\nfrom collections.abc import Callable\\n\\nfrom effectful.handlers.llm import Template\\nfrom effectful.handlers.llm.providers import (\\n CacheLLMRequestHandler,\\n LiteLLMProvider,\\n LLMLoggingHandler,\\n RetryLLMHandler,\\n completion,\\n tool_call,\\n)\\nfrom effectful.handlers.llm.synthesis import ProgramSynthesis\\nfrom effectful.ops.semantics import NotHandled, fwd, handler\\nfrom effectful.ops.syntax import defop\\n\\nprovider = LiteLLMProvider()', 'dataclasses': , 'functools': , 'inspect': , 'logging': , 'sys': , 'Callable': , 'Template': , 'CacheLLMRequestHandler': , 'LiteLLMProvider': , 'LLMLoggingHandler': , 'RetryLLMHandler': , 'completion': Operation(completion, (model: str, messages: List = [], timeout: Union[float, str, openai.Timeout, NoneType] = None, temperature: Optional[float] = None, top_p: Optional[float] = None, n: Optional[int] = None, stream: Optional[bool] = None, stream_options: Optional[dict] = None, stop=None, max_completion_tokens: Optional[int] = None, max_tokens: Optional[int] = None, modalities: Optional[List[Literal['text', 'audio']]] = None, prediction: Optional[openai.types.chat.chat_completion_prediction_content_param.ChatCompletionPredictionContentParam] = None, audio: Optional[openai.types.chat.chat_completion_audio_param.ChatCompletionAudioParam] = None, presence_penalty: Optional[float] = None, frequency_penalty: Optional[float] = None, logit_bias: Optional[dict] = None, user: Optional[str] = None, reasoning_effort: Optional[Literal['none', 'minimal', 'low', 'medium', 'high', 'default']] = None, verbosity: Optional[Literal['low', 'medium', 'high']] = None, response_format: Union[dict, Type[pydantic.main.BaseModel], NoneType] = None, seed: Optional[int] = None, tools: Optional[List] = None, tool_choice: Union[str, dict, NoneType] = None, logprobs: Optional[bool] = None, top_logprobs: Optional[int] = None, parallel_tool_calls: Optional[bool] = None, web_search_options: Optional[litellm.types.llms.openai.OpenAIWebSearchOptions] = None, deployment_id=None, extra_headers: Optional[dict] = None, safety_identifier: Optional[str] = None, service_tier: Optional[str] = None, functions: Optional[List] = None, function_call: Optional[str] = None, base_url: Optional[str] = None, api_version: Optional[str] = None, api_key: Optional[str] = None, model_list: Optional[list] = None, thinking: Optional[litellm.types.llms.anthropic.AnthropicThinkingParam] = None, shared_session: Optional[ForwardRef('ClientSession')] = None, **kwargs) -> Union[litellm.types.utils.ModelResponse, litellm.litellm_core_utils.streaming_handler.CustomStreamWrapper]), 'tool_call': Operation(tool_call, (template: effectful.handlers.llm.Template, tool: Union[effectful.ops.types.Operation[..., T], effectful.handlers.llm.Template[..., T]], *args, **kwargs) -> T), 'ProgramSynthesis': , 'NotHandled': , 'fwd': Operation(fwd, (*args, **kwargs) -> Any), 'handler': , 'defop': , 'provider': , '_i2': '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}.\"\"\"\\n raise NotHandled', 'limerick': Template(__prompt_template__='Write a limerick on the theme of {theme}. Do not use any tools.', __signature__= str>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='limerick'), '_i3': 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '_i4': 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '_i5': '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled', '_i6': 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '_i7': '@functools.cache\\n@Template.define\\ndef haiku(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef haiku_no_cache(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nprint()\\nwith handler(provider):\\n print(haiku(\"fish\"))\\n print(\"-\" * 40)\\n print(haiku(\"fish\"))\\n\\nprint()\\ncache_handler1 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler1):\\n print(haiku_no_cache(\"fish2\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish2\"))\\n\\nprint()\\ncache_handler2 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler2):\\n print(haiku_no_cache(\"fish3\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish3\"))', 'haiku': , 'haiku_no_cache': Template(__prompt_template__='Write a haiku on the theme of {theme}. Do not use any tools.', __signature__= str>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='haiku_no_cache'), 'cache_handler1': , 'cache_handler2': , '_i8': '@Template.define\\ndef primes(first_digit: int) -> int:\\n \"\"\"Give a prime number with {first_digit} as the first digit. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider):\\n assert type(primes(6)) is int', 'primes': Template(__prompt_template__='Give a prime number with {first_digit} as the first digit. Do not use any tools.', __signature__= int>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='primes'), '_i9': '@Template.define\\ndef count_char(char: str) -> Callable[[str], int]:\\n \"\"\"Write a function which takes a string and counts the occurrances of \\'{char}\\'. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider), handler(ProgramSynthesis()):\\n count_a = count_char(\"a\")\\n assert callable(count_a)\\n assert count_a(\"banana\") == 3\\n assert count_a(\"cherry\") == 0\\n # Print the source code of the generated function\\n print(inspect.getsource(count_a))', 'count_char': Template(__prompt_template__=\"Write a function which takes a string and counts the occurrances of '{char}'. Do not use any tools.\", __signature__= collections.abc.Callable[[str], int]>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='count_char'), 'count_a': , '_i10': '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', 'cities': Operation(cities, () -> list[str]), 'weather': Operation(weather, (city: str) -> str), 'vacation': Template(__prompt_template__='Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.', __signature__= str>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='vacation'), 'log_tool_call': , '_i11': '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', '_i12': '@dataclasses.dataclass\\nclass KnockKnockJoke:\\n whos_there: str\\n punchline: str\\n\\n\\n@Template.define\\ndef write_joke(theme: str) -> KnockKnockJoke:\\n \"\"\"Write a knock-knock joke on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef rate_joke(joke: KnockKnockJoke) -> bool:\\n \"\"\"Decide if {joke} is funny or not. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\ndef do_comedy():\\n joke = write_joke(\"lizards\")\\n print(\"> You are onstage at a comedy club. You tell the following joke:\")\\n print(\\n f\"Knock knock.\\\\nWho\\'s there?\\\\n{joke.whos_there}.\\\\n{joke.whos_there} who?\\\\n{joke.punchline}\"\\n )\\n if rate_joke(joke):\\n print(\"> The crowd laughs politely.\")\\n else:\\n print(\"> The crowd stares in stony silence.\")\\n\\n\\nwith handler(provider):\\n do_comedy()', 'KnockKnockJoke': , 'write_joke': ..., 'rate_joke': Template(__prompt_template__='Decide if {joke} is funny or not. Do not use any tools.', __signature__= bool>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='rate_joke'), 'do_comedy': , '_i13': 'def log_llm(*args, **kwargs):\\n result = fwd()\\n print(\"Request fired: \", args, kwargs, result)\\n return result\\n\\n\\n# Avoid cache\\ntry:\\n haiku.cache_clear()\\nexcept Exception:\\n pass\\n\\n# Put completion handler innermost so it has highest precedence during the call\\nwith handler(provider), handler({completion: log_llm}):\\n _ = haiku(\"fish2\")\\n _ = limerick(\"fish\") # or use haiku(\"fish-2\") to avoid cache', 'log_llm': , '_i14': '# 1. Create a logger\\nlogger = logging.getLogger(\"effectful.llm\")\\nlogger.setLevel(logging.INFO)\\nlog_handler = logging.StreamHandler(sys.stdout)\\nlog_handler.setFormatter(logging.Formatter(\"%(levelname)s %(payload)s\"))\\nlogger.addHandler(log_handler)\\n# 2. Pass it to the handler\\nllm_logger = LLMLoggingHandler(logger=logger) # can also be LLMLoggingHandler()\\n\\n# Avoid cache for demonstration\\ntry:\\n haiku.cache_clear()\\n limerick.cache_clear()\\nexcept Exception:\\n pass\\n\\nwith handler(provider), handler(llm_logger):\\n _ = haiku(\"fish3\")\\n _ = limerick(\"fish4\")', 'logger': , 'log_handler': , 'llm_logger': , '_i15': '# Sub-templates for different story styles\\n@Template.define\\ndef story_with_moral(topic: str) -> str:\\n \"\"\"Write a short story about {topic} and end with a moral lesson. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef story_funny(topic: str) -> str:\\n \"\"\"Write a funny, humorous story about {topic}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n# Main orchestrator template - has access to sub-templates\\n@Template.define\\ndef write_story(topic: str, style: str) -> str:\\n \"\"\"Write a story about {topic} in the style: {style}.\\n Available styles: \\'moral\\' for a story with a lesson, \\'funny\\' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\"\"\"\\n raise NotHandled\\n\\n\\n# Verify sub-templates are captured in write_story\\'s lexical context\\nassert story_with_moral in write_story.tools\\nassert story_funny in write_story.tools\\nprint(\"Sub-templates available to write_story:\", list(write_story.tools))\\n\\nwith handler(provider), handler(llm_logger):\\n print(\"=== Story with moral ===\")\\n print(write_story(\"a curious cat\", \"moral\"))\\n print()\\n print(\"=== Funny story ===\")\\n print(write_story(\"a curious cat\", \"funny\"))', 'story_with_moral': Template(__prompt_template__='Write a short story about {topic} and end with a moral lesson. Do not use any tools.', __signature__= str>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='story_with_moral'), 'story_funny': Template(__prompt_template__='Write a funny, humorous story about {topic}. Do not use any tools.', __signature__= str>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='story_funny'), 'write_story': Template(__prompt_template__=\"Write a story about {topic} in the style: {style}.\\n Available styles: 'moral' for a story with a lesson, 'funny' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\", __signature__= str>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='write_story')}), mappingproxy({'__name__': '__main__', '__doc__': 'Automatically created module for IPython interactive environment', '__package__': None, '__loader__': None, '__spec__': None, '__builtin__': , '__builtins__': , '_ih': ['', 'import dataclasses\\nimport functools\\nimport inspect\\nimport logging\\nimport sys\\nfrom collections.abc import Callable\\n\\nfrom effectful.handlers.llm import Template\\nfrom effectful.handlers.llm.providers import (\\n CacheLLMRequestHandler,\\n LiteLLMProvider,\\n LLMLoggingHandler,\\n RetryLLMHandler,\\n completion,\\n tool_call,\\n)\\nfrom effectful.handlers.llm.synthesis import ProgramSynthesis\\nfrom effectful.ops.semantics import NotHandled, fwd, handler\\nfrom effectful.ops.syntax import defop\\n\\nprovider = LiteLLMProvider()', '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}.\"\"\"\\n raise NotHandled', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '@functools.cache\\n@Template.define\\ndef haiku(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef haiku_no_cache(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nprint()\\nwith handler(provider):\\n print(haiku(\"fish\"))\\n print(\"-\" * 40)\\n print(haiku(\"fish\"))\\n\\nprint()\\ncache_handler1 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler1):\\n print(haiku_no_cache(\"fish2\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish2\"))\\n\\nprint()\\ncache_handler2 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler2):\\n print(haiku_no_cache(\"fish3\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish3\"))', '@Template.define\\ndef primes(first_digit: int) -> int:\\n \"\"\"Give a prime number with {first_digit} as the first digit. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider):\\n assert type(primes(6)) is int', '@Template.define\\ndef count_char(char: str) -> Callable[[str], int]:\\n \"\"\"Write a function which takes a string and counts the occurrances of \\'{char}\\'. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider), handler(ProgramSynthesis()):\\n count_a = count_char(\"a\")\\n assert callable(count_a)\\n assert count_a(\"banana\") == 3\\n assert count_a(\"cherry\") == 0\\n # Print the source code of the generated function\\n print(inspect.getsource(count_a))', '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', '@dataclasses.dataclass\\nclass KnockKnockJoke:\\n whos_there: str\\n punchline: str\\n\\n\\n@Template.define\\ndef write_joke(theme: str) -> KnockKnockJoke:\\n \"\"\"Write a knock-knock joke on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef rate_joke(joke: KnockKnockJoke) -> bool:\\n \"\"\"Decide if {joke} is funny or not. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\ndef do_comedy():\\n joke = write_joke(\"lizards\")\\n print(\"> You are onstage at a comedy club. You tell the following joke:\")\\n print(\\n f\"Knock knock.\\\\nWho\\'s there?\\\\n{joke.whos_there}.\\\\n{joke.whos_there} who?\\\\n{joke.punchline}\"\\n )\\n if rate_joke(joke):\\n print(\"> The crowd laughs politely.\")\\n else:\\n print(\"> The crowd stares in stony silence.\")\\n\\n\\nwith handler(provider):\\n do_comedy()', 'def log_llm(*args, **kwargs):\\n result = fwd()\\n print(\"Request fired: \", args, kwargs, result)\\n return result\\n\\n\\n# Avoid cache\\ntry:\\n haiku.cache_clear()\\nexcept Exception:\\n pass\\n\\n# Put completion handler innermost so it has highest precedence during the call\\nwith handler(provider), handler({completion: log_llm}):\\n _ = haiku(\"fish2\")\\n _ = limerick(\"fish\") # or use haiku(\"fish-2\") to avoid cache', '# 1. Create a logger\\nlogger = logging.getLogger(\"effectful.llm\")\\nlogger.setLevel(logging.INFO)\\nlog_handler = logging.StreamHandler(sys.stdout)\\nlog_handler.setFormatter(logging.Formatter(\"%(levelname)s %(payload)s\"))\\nlogger.addHandler(log_handler)\\n# 2. Pass it to the handler\\nllm_logger = LLMLoggingHandler(logger=logger) # can also be LLMLoggingHandler()\\n\\n# Avoid cache for demonstration\\ntry:\\n haiku.cache_clear()\\n limerick.cache_clear()\\nexcept Exception:\\n pass\\n\\nwith handler(provider), handler(llm_logger):\\n _ = haiku(\"fish3\")\\n _ = limerick(\"fish4\")', '# Sub-templates for different story styles\\n@Template.define\\ndef story_with_moral(topic: str) -> str:\\n \"\"\"Write a short story about {topic} and end with a moral lesson. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef story_funny(topic: str) -> str:\\n \"\"\"Write a funny, humorous story about {topic}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n# Main orchestrator template - has access to sub-templates\\n@Template.define\\ndef write_story(topic: str, style: str) -> str:\\n \"\"\"Write a story about {topic} in the style: {style}.\\n Available styles: \\'moral\\' for a story with a lesson, \\'funny\\' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\"\"\"\\n raise NotHandled\\n\\n\\n# Verify sub-templates are captured in write_story\\'s lexical context\\nassert story_with_moral in write_story.tools\\nassert story_funny in write_story.tools\\nprint(\"Sub-templates available to write_story:\", list(write_story.tools))\\n\\nwith handler(provider), handler(llm_logger):\\n print(\"=== Story with moral ===\")\\n print(write_story(\"a curious cat\", \"moral\"))\\n print()\\n print(\"=== Funny story ===\")\\n print(write_story(\"a curious cat\", \"funny\"))'], '_oh': {}, '_dh': [PosixPath('/Users/datnguyenthanh/Marc/effectful')], 'In': ['', 'import dataclasses\\nimport functools\\nimport inspect\\nimport logging\\nimport sys\\nfrom collections.abc import Callable\\n\\nfrom effectful.handlers.llm import Template\\nfrom effectful.handlers.llm.providers import (\\n CacheLLMRequestHandler,\\n LiteLLMProvider,\\n LLMLoggingHandler,\\n RetryLLMHandler,\\n completion,\\n tool_call,\\n)\\nfrom effectful.handlers.llm.synthesis import ProgramSynthesis\\nfrom effectful.ops.semantics import NotHandled, fwd, handler\\nfrom effectful.ops.syntax import defop\\n\\nprovider = LiteLLMProvider()', '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}.\"\"\"\\n raise NotHandled', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '@functools.cache\\n@Template.define\\ndef haiku(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef haiku_no_cache(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nprint()\\nwith handler(provider):\\n print(haiku(\"fish\"))\\n print(\"-\" * 40)\\n print(haiku(\"fish\"))\\n\\nprint()\\ncache_handler1 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler1):\\n print(haiku_no_cache(\"fish2\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish2\"))\\n\\nprint()\\ncache_handler2 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler2):\\n print(haiku_no_cache(\"fish3\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish3\"))', '@Template.define\\ndef primes(first_digit: int) -> int:\\n \"\"\"Give a prime number with {first_digit} as the first digit. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider):\\n assert type(primes(6)) is int', '@Template.define\\ndef count_char(char: str) -> Callable[[str], int]:\\n \"\"\"Write a function which takes a string and counts the occurrances of \\'{char}\\'. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider), handler(ProgramSynthesis()):\\n count_a = count_char(\"a\")\\n assert callable(count_a)\\n assert count_a(\"banana\") == 3\\n assert count_a(\"cherry\") == 0\\n # Print the source code of the generated function\\n print(inspect.getsource(count_a))', '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', '@dataclasses.dataclass\\nclass KnockKnockJoke:\\n whos_there: str\\n punchline: str\\n\\n\\n@Template.define\\ndef write_joke(theme: str) -> KnockKnockJoke:\\n \"\"\"Write a knock-knock joke on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef rate_joke(joke: KnockKnockJoke) -> bool:\\n \"\"\"Decide if {joke} is funny or not. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\ndef do_comedy():\\n joke = write_joke(\"lizards\")\\n print(\"> You are onstage at a comedy club. You tell the following joke:\")\\n print(\\n f\"Knock knock.\\\\nWho\\'s there?\\\\n{joke.whos_there}.\\\\n{joke.whos_there} who?\\\\n{joke.punchline}\"\\n )\\n if rate_joke(joke):\\n print(\"> The crowd laughs politely.\")\\n else:\\n print(\"> The crowd stares in stony silence.\")\\n\\n\\nwith handler(provider):\\n do_comedy()', 'def log_llm(*args, **kwargs):\\n result = fwd()\\n print(\"Request fired: \", args, kwargs, result)\\n return result\\n\\n\\n# Avoid cache\\ntry:\\n haiku.cache_clear()\\nexcept Exception:\\n pass\\n\\n# Put completion handler innermost so it has highest precedence during the call\\nwith handler(provider), handler({completion: log_llm}):\\n _ = haiku(\"fish2\")\\n _ = limerick(\"fish\") # or use haiku(\"fish-2\") to avoid cache', '# 1. Create a logger\\nlogger = logging.getLogger(\"effectful.llm\")\\nlogger.setLevel(logging.INFO)\\nlog_handler = logging.StreamHandler(sys.stdout)\\nlog_handler.setFormatter(logging.Formatter(\"%(levelname)s %(payload)s\"))\\nlogger.addHandler(log_handler)\\n# 2. Pass it to the handler\\nllm_logger = LLMLoggingHandler(logger=logger) # can also be LLMLoggingHandler()\\n\\n# Avoid cache for demonstration\\ntry:\\n haiku.cache_clear()\\n limerick.cache_clear()\\nexcept Exception:\\n pass\\n\\nwith handler(provider), handler(llm_logger):\\n _ = haiku(\"fish3\")\\n _ = limerick(\"fish4\")', '# Sub-templates for different story styles\\n@Template.define\\ndef story_with_moral(topic: str) -> str:\\n \"\"\"Write a short story about {topic} and end with a moral lesson. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef story_funny(topic: str) -> str:\\n \"\"\"Write a funny, humorous story about {topic}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n# Main orchestrator template - has access to sub-templates\\n@Template.define\\ndef write_story(topic: str, style: str) -> str:\\n \"\"\"Write a story about {topic} in the style: {style}.\\n Available styles: \\'moral\\' for a story with a lesson, \\'funny\\' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\"\"\"\\n raise NotHandled\\n\\n\\n# Verify sub-templates are captured in write_story\\'s lexical context\\nassert story_with_moral in write_story.tools\\nassert story_funny in write_story.tools\\nprint(\"Sub-templates available to write_story:\", list(write_story.tools))\\n\\nwith handler(provider), handler(llm_logger):\\n print(\"=== Story with moral ===\")\\n print(write_story(\"a curious cat\", \"moral\"))\\n print()\\n print(\"=== Funny story ===\")\\n print(write_story(\"a curious cat\", \"funny\"))'], 'Out': {}, 'get_ipython': >, 'exit': , 'quit': , 'open': , '_': 'In the ocean where fishies do play, \\nA big whale came swimming one day. \\nWith a splash and a dive, \\nHe felt so alive, \\nChasing fish in the blue, gleaming bay.', '__': '', '___': '', '__vsc_ipynb_file__': '/Users/datnguyenthanh/Marc/effectful/docs/source/llm.ipynb', '_i': '# 1. Create a logger\\nlogger = logging.getLogger(\"effectful.llm\")\\nlogger.setLevel(logging.INFO)\\nlog_handler = logging.StreamHandler(sys.stdout)\\nlog_handler.setFormatter(logging.Formatter(\"%(levelname)s %(payload)s\"))\\nlogger.addHandler(log_handler)\\n# 2. Pass it to the handler\\nllm_logger = LLMLoggingHandler(logger=logger) # can also be LLMLoggingHandler()\\n\\n# Avoid cache for demonstration\\ntry:\\n haiku.cache_clear()\\n limerick.cache_clear()\\nexcept Exception:\\n pass\\n\\nwith handler(provider), handler(llm_logger):\\n _ = haiku(\"fish3\")\\n _ = limerick(\"fish4\")', '_ii': 'def log_llm(*args, **kwargs):\\n result = fwd()\\n print(\"Request fired: \", args, kwargs, result)\\n return result\\n\\n\\n# Avoid cache\\ntry:\\n haiku.cache_clear()\\nexcept Exception:\\n pass\\n\\n# Put completion handler innermost so it has highest precedence during the call\\nwith handler(provider), handler({completion: log_llm}):\\n _ = haiku(\"fish2\")\\n _ = limerick(\"fish\") # or use haiku(\"fish-2\") to avoid cache', '_iii': '@dataclasses.dataclass\\nclass KnockKnockJoke:\\n whos_there: str\\n punchline: str\\n\\n\\n@Template.define\\ndef write_joke(theme: str) -> KnockKnockJoke:\\n \"\"\"Write a knock-knock joke on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef rate_joke(joke: KnockKnockJoke) -> bool:\\n \"\"\"Decide if {joke} is funny or not. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\ndef do_comedy():\\n joke = write_joke(\"lizards\")\\n print(\"> You are onstage at a comedy club. You tell the following joke:\")\\n print(\\n f\"Knock knock.\\\\nWho\\'s there?\\\\n{joke.whos_there}.\\\\n{joke.whos_there} who?\\\\n{joke.punchline}\"\\n )\\n if rate_joke(joke):\\n print(\"> The crowd laughs politely.\")\\n else:\\n print(\"> The crowd stares in stony silence.\")\\n\\n\\nwith handler(provider):\\n do_comedy()', '_i1': 'import dataclasses\\nimport functools\\nimport inspect\\nimport logging\\nimport sys\\nfrom collections.abc import Callable\\n\\nfrom effectful.handlers.llm import Template\\nfrom effectful.handlers.llm.providers import (\\n CacheLLMRequestHandler,\\n LiteLLMProvider,\\n LLMLoggingHandler,\\n RetryLLMHandler,\\n completion,\\n tool_call,\\n)\\nfrom effectful.handlers.llm.synthesis import ProgramSynthesis\\nfrom effectful.ops.semantics import NotHandled, fwd, handler\\nfrom effectful.ops.syntax import defop\\n\\nprovider = LiteLLMProvider()', 'dataclasses': , 'functools': , 'inspect': , 'logging': , 'sys': , 'Callable': , 'Template': , 'CacheLLMRequestHandler': , 'LiteLLMProvider': , 'LLMLoggingHandler': , 'RetryLLMHandler': , 'completion': Operation(completion, (model: str, messages: List = [], timeout: Union[float, str, openai.Timeout, NoneType] = None, temperature: Optional[float] = None, top_p: Optional[float] = None, n: Optional[int] = None, stream: Optional[bool] = None, stream_options: Optional[dict] = None, stop=None, max_completion_tokens: Optional[int] = None, max_tokens: Optional[int] = None, modalities: Optional[List[Literal['text', 'audio']]] = None, prediction: Optional[openai.types.chat.chat_completion_prediction_content_param.ChatCompletionPredictionContentParam] = None, audio: Optional[openai.types.chat.chat_completion_audio_param.ChatCompletionAudioParam] = None, presence_penalty: Optional[float] = None, frequency_penalty: Optional[float] = None, logit_bias: Optional[dict] = None, user: Optional[str] = None, reasoning_effort: Optional[Literal['none', 'minimal', 'low', 'medium', 'high', 'default']] = None, verbosity: Optional[Literal['low', 'medium', 'high']] = None, response_format: Union[dict, Type[pydantic.main.BaseModel], NoneType] = None, seed: Optional[int] = None, tools: Optional[List] = None, tool_choice: Union[str, dict, NoneType] = None, logprobs: Optional[bool] = None, top_logprobs: Optional[int] = None, parallel_tool_calls: Optional[bool] = None, web_search_options: Optional[litellm.types.llms.openai.OpenAIWebSearchOptions] = None, deployment_id=None, extra_headers: Optional[dict] = None, safety_identifier: Optional[str] = None, service_tier: Optional[str] = None, functions: Optional[List] = None, function_call: Optional[str] = None, base_url: Optional[str] = None, api_version: Optional[str] = None, api_key: Optional[str] = None, model_list: Optional[list] = None, thinking: Optional[litellm.types.llms.anthropic.AnthropicThinkingParam] = None, shared_session: Optional[ForwardRef('ClientSession')] = None, **kwargs) -> Union[litellm.types.utils.ModelResponse, litellm.litellm_core_utils.streaming_handler.CustomStreamWrapper]), 'tool_call': Operation(tool_call, (template: effectful.handlers.llm.Template, tool: Union[effectful.ops.types.Operation[..., T], effectful.handlers.llm.Template[..., T]], *args, **kwargs) -> T), 'ProgramSynthesis': , 'NotHandled': , 'fwd': Operation(fwd, (*args, **kwargs) -> Any), 'handler': , 'defop': , 'provider': , '_i2': '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}.\"\"\"\\n raise NotHandled', 'limerick': Template(__prompt_template__='Write a limerick on the theme of {theme}. Do not use any tools.', __signature__= str>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='limerick'), '_i3': 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '_i4': 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '_i5': '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled', '_i6': 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '_i7': '@functools.cache\\n@Template.define\\ndef haiku(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef haiku_no_cache(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nprint()\\nwith handler(provider):\\n print(haiku(\"fish\"))\\n print(\"-\" * 40)\\n print(haiku(\"fish\"))\\n\\nprint()\\ncache_handler1 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler1):\\n print(haiku_no_cache(\"fish2\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish2\"))\\n\\nprint()\\ncache_handler2 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler2):\\n print(haiku_no_cache(\"fish3\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish3\"))', 'haiku': , 'haiku_no_cache': Template(__prompt_template__='Write a haiku on the theme of {theme}. Do not use any tools.', __signature__= str>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='haiku_no_cache'), 'cache_handler1': , 'cache_handler2': , '_i8': '@Template.define\\ndef primes(first_digit: int) -> int:\\n \"\"\"Give a prime number with {first_digit} as the first digit. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider):\\n assert type(primes(6)) is int', 'primes': Template(__prompt_template__='Give a prime number with {first_digit} as the first digit. Do not use any tools.', __signature__= int>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='primes'), '_i9': '@Template.define\\ndef count_char(char: str) -> Callable[[str], int]:\\n \"\"\"Write a function which takes a string and counts the occurrances of \\'{char}\\'. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider), handler(ProgramSynthesis()):\\n count_a = count_char(\"a\")\\n assert callable(count_a)\\n assert count_a(\"banana\") == 3\\n assert count_a(\"cherry\") == 0\\n # Print the source code of the generated function\\n print(inspect.getsource(count_a))', 'count_char': Template(__prompt_template__=\"Write a function which takes a string and counts the occurrances of '{char}'. Do not use any tools.\", __signature__= collections.abc.Callable[[str], int]>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='count_char'), 'count_a': , '_i10': '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', 'cities': Operation(cities, () -> list[str]), 'weather': Operation(weather, (city: str) -> str), 'vacation': Template(__prompt_template__='Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.', __signature__= str>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='vacation'), 'log_tool_call': , '_i11': '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', '_i12': '@dataclasses.dataclass\\nclass KnockKnockJoke:\\n whos_there: str\\n punchline: str\\n\\n\\n@Template.define\\ndef write_joke(theme: str) -> KnockKnockJoke:\\n \"\"\"Write a knock-knock joke on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef rate_joke(joke: KnockKnockJoke) -> bool:\\n \"\"\"Decide if {joke} is funny or not. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\ndef do_comedy():\\n joke = write_joke(\"lizards\")\\n print(\"> You are onstage at a comedy club. You tell the following joke:\")\\n print(\\n f\"Knock knock.\\\\nWho\\'s there?\\\\n{joke.whos_there}.\\\\n{joke.whos_there} who?\\\\n{joke.punchline}\"\\n )\\n if rate_joke(joke):\\n print(\"> The crowd laughs politely.\")\\n else:\\n print(\"> The crowd stares in stony silence.\")\\n\\n\\nwith handler(provider):\\n do_comedy()', 'KnockKnockJoke': , 'write_joke': ..., 'rate_joke': Template(__prompt_template__='Decide if {joke} is funny or not. Do not use any tools.', __signature__= bool>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='rate_joke'), 'do_comedy': , '_i13': 'def log_llm(*args, **kwargs):\\n result = fwd()\\n print(\"Request fired: \", args, kwargs, result)\\n return result\\n\\n\\n# Avoid cache\\ntry:\\n haiku.cache_clear()\\nexcept Exception:\\n pass\\n\\n# Put completion handler innermost so it has highest precedence during the call\\nwith handler(provider), handler({completion: log_llm}):\\n _ = haiku(\"fish2\")\\n _ = limerick(\"fish\") # or use haiku(\"fish-2\") to avoid cache', 'log_llm': , '_i14': '# 1. Create a logger\\nlogger = logging.getLogger(\"effectful.llm\")\\nlogger.setLevel(logging.INFO)\\nlog_handler = logging.StreamHandler(sys.stdout)\\nlog_handler.setFormatter(logging.Formatter(\"%(levelname)s %(payload)s\"))\\nlogger.addHandler(log_handler)\\n# 2. Pass it to the handler\\nllm_logger = LLMLoggingHandler(logger=logger) # can also be LLMLoggingHandler()\\n\\n# Avoid cache for demonstration\\ntry:\\n haiku.cache_clear()\\n limerick.cache_clear()\\nexcept Exception:\\n pass\\n\\nwith handler(provider), handler(llm_logger):\\n _ = haiku(\"fish3\")\\n _ = limerick(\"fish4\")', 'logger': , 'log_handler': , 'llm_logger': , '_i15': '# Sub-templates for different story styles\\n@Template.define\\ndef story_with_moral(topic: str) -> str:\\n \"\"\"Write a short story about {topic} and end with a moral lesson. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef story_funny(topic: str) -> str:\\n \"\"\"Write a funny, humorous story about {topic}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n# Main orchestrator template - has access to sub-templates\\n@Template.define\\ndef write_story(topic: str, style: str) -> str:\\n \"\"\"Write a story about {topic} in the style: {style}.\\n Available styles: \\'moral\\' for a story with a lesson, \\'funny\\' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\"\"\"\\n raise NotHandled\\n\\n\\n# Verify sub-templates are captured in write_story\\'s lexical context\\nassert story_with_moral in write_story.tools\\nassert story_funny in write_story.tools\\nprint(\"Sub-templates available to write_story:\", list(write_story.tools))\\n\\nwith handler(provider), handler(llm_logger):\\n print(\"=== Story with moral ===\")\\n print(write_story(\"a curious cat\", \"moral\"))\\n print()\\n print(\"=== Funny story ===\")\\n print(write_story(\"a curious cat\", \"funny\"))', 'story_with_moral': Template(__prompt_template__='Write a short story about {topic} and end with a moral lesson. Do not use any tools.', __signature__= str>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='story_with_moral'), 'story_funny': Template(__prompt_template__='Write a funny, humorous story about {topic}. Do not use any tools.', __signature__= str>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='story_funny'), 'write_story': Template(__prompt_template__=\"Write a story about {topic} in the style: {style}.\\n Available styles: 'moral' for a story with a lesson, 'funny' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\", __signature__= str>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='write_story')})), __name__='write_joke'), Template(__prompt_template__='Decide if {joke} is funny or not. Do not use any tools.', __signature__= bool>, __context__=LexicalContext(mappingproxy({'__name__': '__main__', '__doc__': 'Automatically created module for IPython interactive environment', '__package__': None, '__loader__': None, '__spec__': None, '__builtin__': , '__builtins__': , '_ih': ['', 'import dataclasses\\nimport functools\\nimport inspect\\nimport logging\\nimport sys\\nfrom collections.abc import Callable\\n\\nfrom effectful.handlers.llm import Template\\nfrom effectful.handlers.llm.providers import (\\n CacheLLMRequestHandler,\\n LiteLLMProvider,\\n LLMLoggingHandler,\\n RetryLLMHandler,\\n completion,\\n tool_call,\\n)\\nfrom effectful.handlers.llm.synthesis import ProgramSynthesis\\nfrom effectful.ops.semantics import NotHandled, fwd, handler\\nfrom effectful.ops.syntax import defop\\n\\nprovider = LiteLLMProvider()', '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}.\"\"\"\\n raise NotHandled', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '@functools.cache\\n@Template.define\\ndef haiku(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef haiku_no_cache(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nprint()\\nwith handler(provider):\\n print(haiku(\"fish\"))\\n print(\"-\" * 40)\\n print(haiku(\"fish\"))\\n\\nprint()\\ncache_handler1 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler1):\\n print(haiku_no_cache(\"fish2\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish2\"))\\n\\nprint()\\ncache_handler2 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler2):\\n print(haiku_no_cache(\"fish3\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish3\"))', '@Template.define\\ndef primes(first_digit: int) -> int:\\n \"\"\"Give a prime number with {first_digit} as the first digit. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider):\\n assert type(primes(6)) is int', '@Template.define\\ndef count_char(char: str) -> Callable[[str], int]:\\n \"\"\"Write a function which takes a string and counts the occurrances of \\'{char}\\'. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider), handler(ProgramSynthesis()):\\n count_a = count_char(\"a\")\\n assert callable(count_a)\\n assert count_a(\"banana\") == 3\\n assert count_a(\"cherry\") == 0\\n # Print the source code of the generated function\\n print(inspect.getsource(count_a))', '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', '@dataclasses.dataclass\\nclass KnockKnockJoke:\\n whos_there: str\\n punchline: str\\n\\n\\n@Template.define\\ndef write_joke(theme: str) -> KnockKnockJoke:\\n \"\"\"Write a knock-knock joke on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef rate_joke(joke: KnockKnockJoke) -> bool:\\n \"\"\"Decide if {joke} is funny or not. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\ndef do_comedy():\\n joke = write_joke(\"lizards\")\\n print(\"> You are onstage at a comedy club. You tell the following joke:\")\\n print(\\n f\"Knock knock.\\\\nWho\\'s there?\\\\n{joke.whos_there}.\\\\n{joke.whos_there} who?\\\\n{joke.punchline}\"\\n )\\n if rate_joke(joke):\\n print(\"> The crowd laughs politely.\")\\n else:\\n print(\"> The crowd stares in stony silence.\")\\n\\n\\nwith handler(provider):\\n do_comedy()', 'def log_llm(*args, **kwargs):\\n result = fwd()\\n print(\"Request fired: \", args, kwargs, result)\\n return result\\n\\n\\n# Avoid cache\\ntry:\\n haiku.cache_clear()\\nexcept Exception:\\n pass\\n\\n# Put completion handler innermost so it has highest precedence during the call\\nwith handler(provider), handler({completion: log_llm}):\\n _ = haiku(\"fish2\")\\n _ = limerick(\"fish\") # or use haiku(\"fish-2\") to avoid cache', '# 1. Create a logger\\nlogger = logging.getLogger(\"effectful.llm\")\\nlogger.setLevel(logging.INFO)\\nlog_handler = logging.StreamHandler(sys.stdout)\\nlog_handler.setFormatter(logging.Formatter(\"%(levelname)s %(payload)s\"))\\nlogger.addHandler(log_handler)\\n# 2. Pass it to the handler\\nllm_logger = LLMLoggingHandler(logger=logger) # can also be LLMLoggingHandler()\\n\\n# Avoid cache for demonstration\\ntry:\\n haiku.cache_clear()\\n limerick.cache_clear()\\nexcept Exception:\\n pass\\n\\nwith handler(provider), handler(llm_logger):\\n _ = haiku(\"fish3\")\\n _ = limerick(\"fish4\")', '# Sub-templates for different story styles\\n@Template.define\\ndef story_with_moral(topic: str) -> str:\\n \"\"\"Write a short story about {topic} and end with a moral lesson. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef story_funny(topic: str) -> str:\\n \"\"\"Write a funny, humorous story about {topic}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n# Main orchestrator template - has access to sub-templates\\n@Template.define\\ndef write_story(topic: str, style: str) -> str:\\n \"\"\"Write a story about {topic} in the style: {style}.\\n Available styles: \\'moral\\' for a story with a lesson, \\'funny\\' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\"\"\"\\n raise NotHandled\\n\\n\\n# Verify sub-templates are captured in write_story\\'s lexical context\\nassert story_with_moral in write_story.tools\\nassert story_funny in write_story.tools\\nprint(\"Sub-templates available to write_story:\", list(write_story.tools))\\n\\nwith handler(provider), handler(llm_logger):\\n print(\"=== Story with moral ===\")\\n print(write_story(\"a curious cat\", \"moral\"))\\n print()\\n print(\"=== Funny story ===\")\\n print(write_story(\"a curious cat\", \"funny\"))'], '_oh': {}, '_dh': [PosixPath('/Users/datnguyenthanh/Marc/effectful')], 'In': ['', 'import dataclasses\\nimport functools\\nimport inspect\\nimport logging\\nimport sys\\nfrom collections.abc import Callable\\n\\nfrom effectful.handlers.llm import Template\\nfrom effectful.handlers.llm.providers import (\\n CacheLLMRequestHandler,\\n LiteLLMProvider,\\n LLMLoggingHandler,\\n RetryLLMHandler,\\n completion,\\n tool_call,\\n)\\nfrom effectful.handlers.llm.synthesis import ProgramSynthesis\\nfrom effectful.ops.semantics import NotHandled, fwd, handler\\nfrom effectful.ops.syntax import defop\\n\\nprovider = LiteLLMProvider()', '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}.\"\"\"\\n raise NotHandled', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '@functools.cache\\n@Template.define\\ndef haiku(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef haiku_no_cache(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nprint()\\nwith handler(provider):\\n print(haiku(\"fish\"))\\n print(\"-\" * 40)\\n print(haiku(\"fish\"))\\n\\nprint()\\ncache_handler1 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler1):\\n print(haiku_no_cache(\"fish2\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish2\"))\\n\\nprint()\\ncache_handler2 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler2):\\n print(haiku_no_cache(\"fish3\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish3\"))', '@Template.define\\ndef primes(first_digit: int) -> int:\\n \"\"\"Give a prime number with {first_digit} as the first digit. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider):\\n assert type(primes(6)) is int', '@Template.define\\ndef count_char(char: str) -> Callable[[str], int]:\\n \"\"\"Write a function which takes a string and counts the occurrances of \\'{char}\\'. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider), handler(ProgramSynthesis()):\\n count_a = count_char(\"a\")\\n assert callable(count_a)\\n assert count_a(\"banana\") == 3\\n assert count_a(\"cherry\") == 0\\n # Print the source code of the generated function\\n print(inspect.getsource(count_a))', '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', '@dataclasses.dataclass\\nclass KnockKnockJoke:\\n whos_there: str\\n punchline: str\\n\\n\\n@Template.define\\ndef write_joke(theme: str) -> KnockKnockJoke:\\n \"\"\"Write a knock-knock joke on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef rate_joke(joke: KnockKnockJoke) -> bool:\\n \"\"\"Decide if {joke} is funny or not. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\ndef do_comedy():\\n joke = write_joke(\"lizards\")\\n print(\"> You are onstage at a comedy club. You tell the following joke:\")\\n print(\\n f\"Knock knock.\\\\nWho\\'s there?\\\\n{joke.whos_there}.\\\\n{joke.whos_there} who?\\\\n{joke.punchline}\"\\n )\\n if rate_joke(joke):\\n print(\"> The crowd laughs politely.\")\\n else:\\n print(\"> The crowd stares in stony silence.\")\\n\\n\\nwith handler(provider):\\n do_comedy()', 'def log_llm(*args, **kwargs):\\n result = fwd()\\n print(\"Request fired: \", args, kwargs, result)\\n return result\\n\\n\\n# Avoid cache\\ntry:\\n haiku.cache_clear()\\nexcept Exception:\\n pass\\n\\n# Put completion handler innermost so it has highest precedence during the call\\nwith handler(provider), handler({completion: log_llm}):\\n _ = haiku(\"fish2\")\\n _ = limerick(\"fish\") # or use haiku(\"fish-2\") to avoid cache', '# 1. Create a logger\\nlogger = logging.getLogger(\"effectful.llm\")\\nlogger.setLevel(logging.INFO)\\nlog_handler = logging.StreamHandler(sys.stdout)\\nlog_handler.setFormatter(logging.Formatter(\"%(levelname)s %(payload)s\"))\\nlogger.addHandler(log_handler)\\n# 2. Pass it to the handler\\nllm_logger = LLMLoggingHandler(logger=logger) # can also be LLMLoggingHandler()\\n\\n# Avoid cache for demonstration\\ntry:\\n haiku.cache_clear()\\n limerick.cache_clear()\\nexcept Exception:\\n pass\\n\\nwith handler(provider), handler(llm_logger):\\n _ = haiku(\"fish3\")\\n _ = limerick(\"fish4\")', '# Sub-templates for different story styles\\n@Template.define\\ndef story_with_moral(topic: str) -> str:\\n \"\"\"Write a short story about {topic} and end with a moral lesson. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef story_funny(topic: str) -> str:\\n \"\"\"Write a funny, humorous story about {topic}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n# Main orchestrator template - has access to sub-templates\\n@Template.define\\ndef write_story(topic: str, style: str) -> str:\\n \"\"\"Write a story about {topic} in the style: {style}.\\n Available styles: \\'moral\\' for a story with a lesson, \\'funny\\' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\"\"\"\\n raise NotHandled\\n\\n\\n# Verify sub-templates are captured in write_story\\'s lexical context\\nassert story_with_moral in write_story.tools\\nassert story_funny in write_story.tools\\nprint(\"Sub-templates available to write_story:\", list(write_story.tools))\\n\\nwith handler(provider), handler(llm_logger):\\n print(\"=== Story with moral ===\")\\n print(write_story(\"a curious cat\", \"moral\"))\\n print()\\n print(\"=== Funny story ===\")\\n print(write_story(\"a curious cat\", \"funny\"))'], 'Out': {}, 'get_ipython': >, 'exit': , 'quit': , 'open': , '_': 'In the ocean where fishies do play, \\nA big whale came swimming one day. \\nWith a splash and a dive, \\nHe felt so alive, \\nChasing fish in the blue, gleaming bay.', '__': '', '___': '', '__vsc_ipynb_file__': '/Users/datnguyenthanh/Marc/effectful/docs/source/llm.ipynb', '_i': '# 1. Create a logger\\nlogger = logging.getLogger(\"effectful.llm\")\\nlogger.setLevel(logging.INFO)\\nlog_handler = logging.StreamHandler(sys.stdout)\\nlog_handler.setFormatter(logging.Formatter(\"%(levelname)s %(payload)s\"))\\nlogger.addHandler(log_handler)\\n# 2. Pass it to the handler\\nllm_logger = LLMLoggingHandler(logger=logger) # can also be LLMLoggingHandler()\\n\\n# Avoid cache for demonstration\\ntry:\\n haiku.cache_clear()\\n limerick.cache_clear()\\nexcept Exception:\\n pass\\n\\nwith handler(provider), handler(llm_logger):\\n _ = haiku(\"fish3\")\\n _ = limerick(\"fish4\")', '_ii': 'def log_llm(*args, **kwargs):\\n result = fwd()\\n print(\"Request fired: \", args, kwargs, result)\\n return result\\n\\n\\n# Avoid cache\\ntry:\\n haiku.cache_clear()\\nexcept Exception:\\n pass\\n\\n# Put completion handler innermost so it has highest precedence during the call\\nwith handler(provider), handler({completion: log_llm}):\\n _ = haiku(\"fish2\")\\n _ = limerick(\"fish\") # or use haiku(\"fish-2\") to avoid cache', '_iii': '@dataclasses.dataclass\\nclass KnockKnockJoke:\\n whos_there: str\\n punchline: str\\n\\n\\n@Template.define\\ndef write_joke(theme: str) -> KnockKnockJoke:\\n \"\"\"Write a knock-knock joke on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef rate_joke(joke: KnockKnockJoke) -> bool:\\n \"\"\"Decide if {joke} is funny or not. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\ndef do_comedy():\\n joke = write_joke(\"lizards\")\\n print(\"> You are onstage at a comedy club. You tell the following joke:\")\\n print(\\n f\"Knock knock.\\\\nWho\\'s there?\\\\n{joke.whos_there}.\\\\n{joke.whos_there} who?\\\\n{joke.punchline}\"\\n )\\n if rate_joke(joke):\\n print(\"> The crowd laughs politely.\")\\n else:\\n print(\"> The crowd stares in stony silence.\")\\n\\n\\nwith handler(provider):\\n do_comedy()', '_i1': 'import dataclasses\\nimport functools\\nimport inspect\\nimport logging\\nimport sys\\nfrom collections.abc import Callable\\n\\nfrom effectful.handlers.llm import Template\\nfrom effectful.handlers.llm.providers import (\\n CacheLLMRequestHandler,\\n LiteLLMProvider,\\n LLMLoggingHandler,\\n RetryLLMHandler,\\n completion,\\n tool_call,\\n)\\nfrom effectful.handlers.llm.synthesis import ProgramSynthesis\\nfrom effectful.ops.semantics import NotHandled, fwd, handler\\nfrom effectful.ops.syntax import defop\\n\\nprovider = LiteLLMProvider()', 'dataclasses': , 'functools': , 'inspect': , 'logging': , 'sys': , 'Callable': , 'Template': , 'CacheLLMRequestHandler': , 'LiteLLMProvider': , 'LLMLoggingHandler': , 'RetryLLMHandler': , 'completion': Operation(completion, (model: str, messages: List = [], timeout: Union[float, str, openai.Timeout, NoneType] = None, temperature: Optional[float] = None, top_p: Optional[float] = None, n: Optional[int] = None, stream: Optional[bool] = None, stream_options: Optional[dict] = None, stop=None, max_completion_tokens: Optional[int] = None, max_tokens: Optional[int] = None, modalities: Optional[List[Literal['text', 'audio']]] = None, prediction: Optional[openai.types.chat.chat_completion_prediction_content_param.ChatCompletionPredictionContentParam] = None, audio: Optional[openai.types.chat.chat_completion_audio_param.ChatCompletionAudioParam] = None, presence_penalty: Optional[float] = None, frequency_penalty: Optional[float] = None, logit_bias: Optional[dict] = None, user: Optional[str] = None, reasoning_effort: Optional[Literal['none', 'minimal', 'low', 'medium', 'high', 'default']] = None, verbosity: Optional[Literal['low', 'medium', 'high']] = None, response_format: Union[dict, Type[pydantic.main.BaseModel], NoneType] = None, seed: Optional[int] = None, tools: Optional[List] = None, tool_choice: Union[str, dict, NoneType] = None, logprobs: Optional[bool] = None, top_logprobs: Optional[int] = None, parallel_tool_calls: Optional[bool] = None, web_search_options: Optional[litellm.types.llms.openai.OpenAIWebSearchOptions] = None, deployment_id=None, extra_headers: Optional[dict] = None, safety_identifier: Optional[str] = None, service_tier: Optional[str] = None, functions: Optional[List] = None, function_call: Optional[str] = None, base_url: Optional[str] = None, api_version: Optional[str] = None, api_key: Optional[str] = None, model_list: Optional[list] = None, thinking: Optional[litellm.types.llms.anthropic.AnthropicThinkingParam] = None, shared_session: Optional[ForwardRef('ClientSession')] = None, **kwargs) -> Union[litellm.types.utils.ModelResponse, litellm.litellm_core_utils.streaming_handler.CustomStreamWrapper]), 'tool_call': Operation(tool_call, (template: effectful.handlers.llm.Template, tool: Union[effectful.ops.types.Operation[..., T], effectful.handlers.llm.Template[..., T]], *args, **kwargs) -> T), 'ProgramSynthesis': , 'NotHandled': , 'fwd': Operation(fwd, (*args, **kwargs) -> Any), 'handler': , 'defop': , 'provider': , '_i2': '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}.\"\"\"\\n raise NotHandled', 'limerick': Template(__prompt_template__='Write a limerick on the theme of {theme}. Do not use any tools.', __signature__= str>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='limerick'), '_i3': 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '_i4': 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '_i5': '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled', '_i6': 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '_i7': '@functools.cache\\n@Template.define\\ndef haiku(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef haiku_no_cache(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nprint()\\nwith handler(provider):\\n print(haiku(\"fish\"))\\n print(\"-\" * 40)\\n print(haiku(\"fish\"))\\n\\nprint()\\ncache_handler1 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler1):\\n print(haiku_no_cache(\"fish2\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish2\"))\\n\\nprint()\\ncache_handler2 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler2):\\n print(haiku_no_cache(\"fish3\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish3\"))', 'haiku': , 'haiku_no_cache': Template(__prompt_template__='Write a haiku on the theme of {theme}. Do not use any tools.', __signature__= str>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='haiku_no_cache'), 'cache_handler1': , 'cache_handler2': , '_i8': '@Template.define\\ndef primes(first_digit: int) -> int:\\n \"\"\"Give a prime number with {first_digit} as the first digit. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider):\\n assert type(primes(6)) is int', 'primes': Template(__prompt_template__='Give a prime number with {first_digit} as the first digit. Do not use any tools.', __signature__= int>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='primes'), '_i9': '@Template.define\\ndef count_char(char: str) -> Callable[[str], int]:\\n \"\"\"Write a function which takes a string and counts the occurrances of \\'{char}\\'. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider), handler(ProgramSynthesis()):\\n count_a = count_char(\"a\")\\n assert callable(count_a)\\n assert count_a(\"banana\") == 3\\n assert count_a(\"cherry\") == 0\\n # Print the source code of the generated function\\n print(inspect.getsource(count_a))', 'count_char': Template(__prompt_template__=\"Write a function which takes a string and counts the occurrances of '{char}'. Do not use any tools.\", __signature__= collections.abc.Callable[[str], int]>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='count_char'), 'count_a': , '_i10': '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', 'cities': Operation(cities, () -> list[str]), 'weather': Operation(weather, (city: str) -> str), 'vacation': Template(__prompt_template__='Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.', __signature__= str>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='vacation'), 'log_tool_call': , '_i11': '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', '_i12': '@dataclasses.dataclass\\nclass KnockKnockJoke:\\n whos_there: str\\n punchline: str\\n\\n\\n@Template.define\\ndef write_joke(theme: str) -> KnockKnockJoke:\\n \"\"\"Write a knock-knock joke on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef rate_joke(joke: KnockKnockJoke) -> bool:\\n \"\"\"Decide if {joke} is funny or not. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\ndef do_comedy():\\n joke = write_joke(\"lizards\")\\n print(\"> You are onstage at a comedy club. You tell the following joke:\")\\n print(\\n f\"Knock knock.\\\\nWho\\'s there?\\\\n{joke.whos_there}.\\\\n{joke.whos_there} who?\\\\n{joke.punchline}\"\\n )\\n if rate_joke(joke):\\n print(\"> The crowd laughs politely.\")\\n else:\\n print(\"> The crowd stares in stony silence.\")\\n\\n\\nwith handler(provider):\\n do_comedy()', 'KnockKnockJoke': , 'write_joke': Template(__prompt_template__='Write a knock-knock joke on the theme of {theme}. Do not use any tools.', __signature__= __main__.KnockKnockJoke>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='write_joke'), 'rate_joke': ..., 'do_comedy': , '_i13': 'def log_llm(*args, **kwargs):\\n result = fwd()\\n print(\"Request fired: \", args, kwargs, result)\\n return result\\n\\n\\n# Avoid cache\\ntry:\\n haiku.cache_clear()\\nexcept Exception:\\n pass\\n\\n# Put completion handler innermost so it has highest precedence during the call\\nwith handler(provider), handler({completion: log_llm}):\\n _ = haiku(\"fish2\")\\n _ = limerick(\"fish\") # or use haiku(\"fish-2\") to avoid cache', 'log_llm': , '_i14': '# 1. Create a logger\\nlogger = logging.getLogger(\"effectful.llm\")\\nlogger.setLevel(logging.INFO)\\nlog_handler = logging.StreamHandler(sys.stdout)\\nlog_handler.setFormatter(logging.Formatter(\"%(levelname)s %(payload)s\"))\\nlogger.addHandler(log_handler)\\n# 2. Pass it to the handler\\nllm_logger = LLMLoggingHandler(logger=logger) # can also be LLMLoggingHandler()\\n\\n# Avoid cache for demonstration\\ntry:\\n haiku.cache_clear()\\n limerick.cache_clear()\\nexcept Exception:\\n pass\\n\\nwith handler(provider), handler(llm_logger):\\n _ = haiku(\"fish3\")\\n _ = limerick(\"fish4\")', 'logger': , 'log_handler': , 'llm_logger': , '_i15': '# Sub-templates for different story styles\\n@Template.define\\ndef story_with_moral(topic: str) -> str:\\n \"\"\"Write a short story about {topic} and end with a moral lesson. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef story_funny(topic: str) -> str:\\n \"\"\"Write a funny, humorous story about {topic}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n# Main orchestrator template - has access to sub-templates\\n@Template.define\\ndef write_story(topic: str, style: str) -> str:\\n \"\"\"Write a story about {topic} in the style: {style}.\\n Available styles: \\'moral\\' for a story with a lesson, \\'funny\\' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\"\"\"\\n raise NotHandled\\n\\n\\n# Verify sub-templates are captured in write_story\\'s lexical context\\nassert story_with_moral in write_story.tools\\nassert story_funny in write_story.tools\\nprint(\"Sub-templates available to write_story:\", list(write_story.tools))\\n\\nwith handler(provider), handler(llm_logger):\\n print(\"=== Story with moral ===\")\\n print(write_story(\"a curious cat\", \"moral\"))\\n print()\\n print(\"=== Funny story ===\")\\n print(write_story(\"a curious cat\", \"funny\"))', 'story_with_moral': Template(__prompt_template__='Write a short story about {topic} and end with a moral lesson. Do not use any tools.', __signature__= str>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='story_with_moral'), 'story_funny': Template(__prompt_template__='Write a funny, humorous story about {topic}. Do not use any tools.', __signature__= str>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='story_funny'), 'write_story': Template(__prompt_template__=\"Write a story about {topic} in the style: {style}.\\n Available styles: 'moral' for a story with a lesson, 'funny' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\", __signature__= str>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='write_story')}), mappingproxy({'__name__': '__main__', '__doc__': 'Automatically created module for IPython interactive environment', '__package__': None, '__loader__': None, '__spec__': None, '__builtin__': , '__builtins__': , '_ih': ['', 'import dataclasses\\nimport functools\\nimport inspect\\nimport logging\\nimport sys\\nfrom collections.abc import Callable\\n\\nfrom effectful.handlers.llm import Template\\nfrom effectful.handlers.llm.providers import (\\n CacheLLMRequestHandler,\\n LiteLLMProvider,\\n LLMLoggingHandler,\\n RetryLLMHandler,\\n completion,\\n tool_call,\\n)\\nfrom effectful.handlers.llm.synthesis import ProgramSynthesis\\nfrom effectful.ops.semantics import NotHandled, fwd, handler\\nfrom effectful.ops.syntax import defop\\n\\nprovider = LiteLLMProvider()', '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}.\"\"\"\\n raise NotHandled', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '@functools.cache\\n@Template.define\\ndef haiku(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef haiku_no_cache(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nprint()\\nwith handler(provider):\\n print(haiku(\"fish\"))\\n print(\"-\" * 40)\\n print(haiku(\"fish\"))\\n\\nprint()\\ncache_handler1 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler1):\\n print(haiku_no_cache(\"fish2\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish2\"))\\n\\nprint()\\ncache_handler2 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler2):\\n print(haiku_no_cache(\"fish3\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish3\"))', '@Template.define\\ndef primes(first_digit: int) -> int:\\n \"\"\"Give a prime number with {first_digit} as the first digit. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider):\\n assert type(primes(6)) is int', '@Template.define\\ndef count_char(char: str) -> Callable[[str], int]:\\n \"\"\"Write a function which takes a string and counts the occurrances of \\'{char}\\'. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider), handler(ProgramSynthesis()):\\n count_a = count_char(\"a\")\\n assert callable(count_a)\\n assert count_a(\"banana\") == 3\\n assert count_a(\"cherry\") == 0\\n # Print the source code of the generated function\\n print(inspect.getsource(count_a))', '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', '@dataclasses.dataclass\\nclass KnockKnockJoke:\\n whos_there: str\\n punchline: str\\n\\n\\n@Template.define\\ndef write_joke(theme: str) -> KnockKnockJoke:\\n \"\"\"Write a knock-knock joke on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef rate_joke(joke: KnockKnockJoke) -> bool:\\n \"\"\"Decide if {joke} is funny or not. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\ndef do_comedy():\\n joke = write_joke(\"lizards\")\\n print(\"> You are onstage at a comedy club. You tell the following joke:\")\\n print(\\n f\"Knock knock.\\\\nWho\\'s there?\\\\n{joke.whos_there}.\\\\n{joke.whos_there} who?\\\\n{joke.punchline}\"\\n )\\n if rate_joke(joke):\\n print(\"> The crowd laughs politely.\")\\n else:\\n print(\"> The crowd stares in stony silence.\")\\n\\n\\nwith handler(provider):\\n do_comedy()', 'def log_llm(*args, **kwargs):\\n result = fwd()\\n print(\"Request fired: \", args, kwargs, result)\\n return result\\n\\n\\n# Avoid cache\\ntry:\\n haiku.cache_clear()\\nexcept Exception:\\n pass\\n\\n# Put completion handler innermost so it has highest precedence during the call\\nwith handler(provider), handler({completion: log_llm}):\\n _ = haiku(\"fish2\")\\n _ = limerick(\"fish\") # or use haiku(\"fish-2\") to avoid cache', '# 1. Create a logger\\nlogger = logging.getLogger(\"effectful.llm\")\\nlogger.setLevel(logging.INFO)\\nlog_handler = logging.StreamHandler(sys.stdout)\\nlog_handler.setFormatter(logging.Formatter(\"%(levelname)s %(payload)s\"))\\nlogger.addHandler(log_handler)\\n# 2. Pass it to the handler\\nllm_logger = LLMLoggingHandler(logger=logger) # can also be LLMLoggingHandler()\\n\\n# Avoid cache for demonstration\\ntry:\\n haiku.cache_clear()\\n limerick.cache_clear()\\nexcept Exception:\\n pass\\n\\nwith handler(provider), handler(llm_logger):\\n _ = haiku(\"fish3\")\\n _ = limerick(\"fish4\")', '# Sub-templates for different story styles\\n@Template.define\\ndef story_with_moral(topic: str) -> str:\\n \"\"\"Write a short story about {topic} and end with a moral lesson. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef story_funny(topic: str) -> str:\\n \"\"\"Write a funny, humorous story about {topic}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n# Main orchestrator template - has access to sub-templates\\n@Template.define\\ndef write_story(topic: str, style: str) -> str:\\n \"\"\"Write a story about {topic} in the style: {style}.\\n Available styles: \\'moral\\' for a story with a lesson, \\'funny\\' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\"\"\"\\n raise NotHandled\\n\\n\\n# Verify sub-templates are captured in write_story\\'s lexical context\\nassert story_with_moral in write_story.tools\\nassert story_funny in write_story.tools\\nprint(\"Sub-templates available to write_story:\", list(write_story.tools))\\n\\nwith handler(provider), handler(llm_logger):\\n print(\"=== Story with moral ===\")\\n print(write_story(\"a curious cat\", \"moral\"))\\n print()\\n print(\"=== Funny story ===\")\\n print(write_story(\"a curious cat\", \"funny\"))'], '_oh': {}, '_dh': [PosixPath('/Users/datnguyenthanh/Marc/effectful')], 'In': ['', 'import dataclasses\\nimport functools\\nimport inspect\\nimport logging\\nimport sys\\nfrom collections.abc import Callable\\n\\nfrom effectful.handlers.llm import Template\\nfrom effectful.handlers.llm.providers import (\\n CacheLLMRequestHandler,\\n LiteLLMProvider,\\n LLMLoggingHandler,\\n RetryLLMHandler,\\n completion,\\n tool_call,\\n)\\nfrom effectful.handlers.llm.synthesis import ProgramSynthesis\\nfrom effectful.ops.semantics import NotHandled, fwd, handler\\nfrom effectful.ops.syntax import defop\\n\\nprovider = LiteLLMProvider()', '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}.\"\"\"\\n raise NotHandled', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '@functools.cache\\n@Template.define\\ndef haiku(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef haiku_no_cache(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nprint()\\nwith handler(provider):\\n print(haiku(\"fish\"))\\n print(\"-\" * 40)\\n print(haiku(\"fish\"))\\n\\nprint()\\ncache_handler1 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler1):\\n print(haiku_no_cache(\"fish2\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish2\"))\\n\\nprint()\\ncache_handler2 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler2):\\n print(haiku_no_cache(\"fish3\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish3\"))', '@Template.define\\ndef primes(first_digit: int) -> int:\\n \"\"\"Give a prime number with {first_digit} as the first digit. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider):\\n assert type(primes(6)) is int', '@Template.define\\ndef count_char(char: str) -> Callable[[str], int]:\\n \"\"\"Write a function which takes a string and counts the occurrances of \\'{char}\\'. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider), handler(ProgramSynthesis()):\\n count_a = count_char(\"a\")\\n assert callable(count_a)\\n assert count_a(\"banana\") == 3\\n assert count_a(\"cherry\") == 0\\n # Print the source code of the generated function\\n print(inspect.getsource(count_a))', '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', '@dataclasses.dataclass\\nclass KnockKnockJoke:\\n whos_there: str\\n punchline: str\\n\\n\\n@Template.define\\ndef write_joke(theme: str) -> KnockKnockJoke:\\n \"\"\"Write a knock-knock joke on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef rate_joke(joke: KnockKnockJoke) -> bool:\\n \"\"\"Decide if {joke} is funny or not. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\ndef do_comedy():\\n joke = write_joke(\"lizards\")\\n print(\"> You are onstage at a comedy club. You tell the following joke:\")\\n print(\\n f\"Knock knock.\\\\nWho\\'s there?\\\\n{joke.whos_there}.\\\\n{joke.whos_there} who?\\\\n{joke.punchline}\"\\n )\\n if rate_joke(joke):\\n print(\"> The crowd laughs politely.\")\\n else:\\n print(\"> The crowd stares in stony silence.\")\\n\\n\\nwith handler(provider):\\n do_comedy()', 'def log_llm(*args, **kwargs):\\n result = fwd()\\n print(\"Request fired: \", args, kwargs, result)\\n return result\\n\\n\\n# Avoid cache\\ntry:\\n haiku.cache_clear()\\nexcept Exception:\\n pass\\n\\n# Put completion handler innermost so it has highest precedence during the call\\nwith handler(provider), handler({completion: log_llm}):\\n _ = haiku(\"fish2\")\\n _ = limerick(\"fish\") # or use haiku(\"fish-2\") to avoid cache', '# 1. Create a logger\\nlogger = logging.getLogger(\"effectful.llm\")\\nlogger.setLevel(logging.INFO)\\nlog_handler = logging.StreamHandler(sys.stdout)\\nlog_handler.setFormatter(logging.Formatter(\"%(levelname)s %(payload)s\"))\\nlogger.addHandler(log_handler)\\n# 2. Pass it to the handler\\nllm_logger = LLMLoggingHandler(logger=logger) # can also be LLMLoggingHandler()\\n\\n# Avoid cache for demonstration\\ntry:\\n haiku.cache_clear()\\n limerick.cache_clear()\\nexcept Exception:\\n pass\\n\\nwith handler(provider), handler(llm_logger):\\n _ = haiku(\"fish3\")\\n _ = limerick(\"fish4\")', '# Sub-templates for different story styles\\n@Template.define\\ndef story_with_moral(topic: str) -> str:\\n \"\"\"Write a short story about {topic} and end with a moral lesson. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef story_funny(topic: str) -> str:\\n \"\"\"Write a funny, humorous story about {topic}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n# Main orchestrator template - has access to sub-templates\\n@Template.define\\ndef write_story(topic: str, style: str) -> str:\\n \"\"\"Write a story about {topic} in the style: {style}.\\n Available styles: \\'moral\\' for a story with a lesson, \\'funny\\' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\"\"\"\\n raise NotHandled\\n\\n\\n# Verify sub-templates are captured in write_story\\'s lexical context\\nassert story_with_moral in write_story.tools\\nassert story_funny in write_story.tools\\nprint(\"Sub-templates available to write_story:\", list(write_story.tools))\\n\\nwith handler(provider), handler(llm_logger):\\n print(\"=== Story with moral ===\")\\n print(write_story(\"a curious cat\", \"moral\"))\\n print()\\n print(\"=== Funny story ===\")\\n print(write_story(\"a curious cat\", \"funny\"))'], 'Out': {}, 'get_ipython': >, 'exit': , 'quit': , 'open': , '_': 'In the ocean where fishies do play, \\nA big whale came swimming one day. \\nWith a splash and a dive, \\nHe felt so alive, \\nChasing fish in the blue, gleaming bay.', '__': '', '___': '', '__vsc_ipynb_file__': '/Users/datnguyenthanh/Marc/effectful/docs/source/llm.ipynb', '_i': '# 1. Create a logger\\nlogger = logging.getLogger(\"effectful.llm\")\\nlogger.setLevel(logging.INFO)\\nlog_handler = logging.StreamHandler(sys.stdout)\\nlog_handler.setFormatter(logging.Formatter(\"%(levelname)s %(payload)s\"))\\nlogger.addHandler(log_handler)\\n# 2. Pass it to the handler\\nllm_logger = LLMLoggingHandler(logger=logger) # can also be LLMLoggingHandler()\\n\\n# Avoid cache for demonstration\\ntry:\\n haiku.cache_clear()\\n limerick.cache_clear()\\nexcept Exception:\\n pass\\n\\nwith handler(provider), handler(llm_logger):\\n _ = haiku(\"fish3\")\\n _ = limerick(\"fish4\")', '_ii': 'def log_llm(*args, **kwargs):\\n result = fwd()\\n print(\"Request fired: \", args, kwargs, result)\\n return result\\n\\n\\n# Avoid cache\\ntry:\\n haiku.cache_clear()\\nexcept Exception:\\n pass\\n\\n# Put completion handler innermost so it has highest precedence during the call\\nwith handler(provider), handler({completion: log_llm}):\\n _ = haiku(\"fish2\")\\n _ = limerick(\"fish\") # or use haiku(\"fish-2\") to avoid cache', '_iii': '@dataclasses.dataclass\\nclass KnockKnockJoke:\\n whos_there: str\\n punchline: str\\n\\n\\n@Template.define\\ndef write_joke(theme: str) -> KnockKnockJoke:\\n \"\"\"Write a knock-knock joke on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef rate_joke(joke: KnockKnockJoke) -> bool:\\n \"\"\"Decide if {joke} is funny or not. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\ndef do_comedy():\\n joke = write_joke(\"lizards\")\\n print(\"> You are onstage at a comedy club. You tell the following joke:\")\\n print(\\n f\"Knock knock.\\\\nWho\\'s there?\\\\n{joke.whos_there}.\\\\n{joke.whos_there} who?\\\\n{joke.punchline}\"\\n )\\n if rate_joke(joke):\\n print(\"> The crowd laughs politely.\")\\n else:\\n print(\"> The crowd stares in stony silence.\")\\n\\n\\nwith handler(provider):\\n do_comedy()', '_i1': 'import dataclasses\\nimport functools\\nimport inspect\\nimport logging\\nimport sys\\nfrom collections.abc import Callable\\n\\nfrom effectful.handlers.llm import Template\\nfrom effectful.handlers.llm.providers import (\\n CacheLLMRequestHandler,\\n LiteLLMProvider,\\n LLMLoggingHandler,\\n RetryLLMHandler,\\n completion,\\n tool_call,\\n)\\nfrom effectful.handlers.llm.synthesis import ProgramSynthesis\\nfrom effectful.ops.semantics import NotHandled, fwd, handler\\nfrom effectful.ops.syntax import defop\\n\\nprovider = LiteLLMProvider()', 'dataclasses': , 'functools': , 'inspect': , 'logging': , 'sys': , 'Callable': , 'Template': , 'CacheLLMRequestHandler': , 'LiteLLMProvider': , 'LLMLoggingHandler': , 'RetryLLMHandler': , 'completion': Operation(completion, (model: str, messages: List = [], timeout: Union[float, str, openai.Timeout, NoneType] = None, temperature: Optional[float] = None, top_p: Optional[float] = None, n: Optional[int] = None, stream: Optional[bool] = None, stream_options: Optional[dict] = None, stop=None, max_completion_tokens: Optional[int] = None, max_tokens: Optional[int] = None, modalities: Optional[List[Literal['text', 'audio']]] = None, prediction: Optional[openai.types.chat.chat_completion_prediction_content_param.ChatCompletionPredictionContentParam] = None, audio: Optional[openai.types.chat.chat_completion_audio_param.ChatCompletionAudioParam] = None, presence_penalty: Optional[float] = None, frequency_penalty: Optional[float] = None, logit_bias: Optional[dict] = None, user: Optional[str] = None, reasoning_effort: Optional[Literal['none', 'minimal', 'low', 'medium', 'high', 'default']] = None, verbosity: Optional[Literal['low', 'medium', 'high']] = None, response_format: Union[dict, Type[pydantic.main.BaseModel], NoneType] = None, seed: Optional[int] = None, tools: Optional[List] = None, tool_choice: Union[str, dict, NoneType] = None, logprobs: Optional[bool] = None, top_logprobs: Optional[int] = None, parallel_tool_calls: Optional[bool] = None, web_search_options: Optional[litellm.types.llms.openai.OpenAIWebSearchOptions] = None, deployment_id=None, extra_headers: Optional[dict] = None, safety_identifier: Optional[str] = None, service_tier: Optional[str] = None, functions: Optional[List] = None, function_call: Optional[str] = None, base_url: Optional[str] = None, api_version: Optional[str] = None, api_key: Optional[str] = None, model_list: Optional[list] = None, thinking: Optional[litellm.types.llms.anthropic.AnthropicThinkingParam] = None, shared_session: Optional[ForwardRef('ClientSession')] = None, **kwargs) -> Union[litellm.types.utils.ModelResponse, litellm.litellm_core_utils.streaming_handler.CustomStreamWrapper]), 'tool_call': Operation(tool_call, (template: effectful.handlers.llm.Template, tool: Union[effectful.ops.types.Operation[..., T], effectful.handlers.llm.Template[..., T]], *args, **kwargs) -> T), 'ProgramSynthesis': , 'NotHandled': , 'fwd': Operation(fwd, (*args, **kwargs) -> Any), 'handler': , 'defop': , 'provider': , '_i2': '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}.\"\"\"\\n raise NotHandled', 'limerick': Template(__prompt_template__='Write a limerick on the theme of {theme}. Do not use any tools.', __signature__= str>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='limerick'), '_i3': 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '_i4': 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '_i5': '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled', '_i6': 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '_i7': '@functools.cache\\n@Template.define\\ndef haiku(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef haiku_no_cache(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nprint()\\nwith handler(provider):\\n print(haiku(\"fish\"))\\n print(\"-\" * 40)\\n print(haiku(\"fish\"))\\n\\nprint()\\ncache_handler1 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler1):\\n print(haiku_no_cache(\"fish2\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish2\"))\\n\\nprint()\\ncache_handler2 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler2):\\n print(haiku_no_cache(\"fish3\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish3\"))', 'haiku': , 'haiku_no_cache': Template(__prompt_template__='Write a haiku on the theme of {theme}. Do not use any tools.', __signature__= str>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='haiku_no_cache'), 'cache_handler1': , 'cache_handler2': , '_i8': '@Template.define\\ndef primes(first_digit: int) -> int:\\n \"\"\"Give a prime number with {first_digit} as the first digit. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider):\\n assert type(primes(6)) is int', 'primes': Template(__prompt_template__='Give a prime number with {first_digit} as the first digit. Do not use any tools.', __signature__= int>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='primes'), '_i9': '@Template.define\\ndef count_char(char: str) -> Callable[[str], int]:\\n \"\"\"Write a function which takes a string and counts the occurrances of \\'{char}\\'. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider), handler(ProgramSynthesis()):\\n count_a = count_char(\"a\")\\n assert callable(count_a)\\n assert count_a(\"banana\") == 3\\n assert count_a(\"cherry\") == 0\\n # Print the source code of the generated function\\n print(inspect.getsource(count_a))', 'count_char': Template(__prompt_template__=\"Write a function which takes a string and counts the occurrances of '{char}'. Do not use any tools.\", __signature__= collections.abc.Callable[[str], int]>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='count_char'), 'count_a': , '_i10': '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', 'cities': Operation(cities, () -> list[str]), 'weather': Operation(weather, (city: str) -> str), 'vacation': Template(__prompt_template__='Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.', __signature__= str>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='vacation'), 'log_tool_call': , '_i11': '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', '_i12': '@dataclasses.dataclass\\nclass KnockKnockJoke:\\n whos_there: str\\n punchline: str\\n\\n\\n@Template.define\\ndef write_joke(theme: str) -> KnockKnockJoke:\\n \"\"\"Write a knock-knock joke on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef rate_joke(joke: KnockKnockJoke) -> bool:\\n \"\"\"Decide if {joke} is funny or not. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\ndef do_comedy():\\n joke = write_joke(\"lizards\")\\n print(\"> You are onstage at a comedy club. You tell the following joke:\")\\n print(\\n f\"Knock knock.\\\\nWho\\'s there?\\\\n{joke.whos_there}.\\\\n{joke.whos_there} who?\\\\n{joke.punchline}\"\\n )\\n if rate_joke(joke):\\n print(\"> The crowd laughs politely.\")\\n else:\\n print(\"> The crowd stares in stony silence.\")\\n\\n\\nwith handler(provider):\\n do_comedy()', 'KnockKnockJoke': , 'write_joke': Template(__prompt_template__='Write a knock-knock joke on the theme of {theme}. Do not use any tools.', __signature__= __main__.KnockKnockJoke>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='write_joke'), 'rate_joke': ..., 'do_comedy': , '_i13': 'def log_llm(*args, **kwargs):\\n result = fwd()\\n print(\"Request fired: \", args, kwargs, result)\\n return result\\n\\n\\n# Avoid cache\\ntry:\\n haiku.cache_clear()\\nexcept Exception:\\n pass\\n\\n# Put completion handler innermost so it has highest precedence during the call\\nwith handler(provider), handler({completion: log_llm}):\\n _ = haiku(\"fish2\")\\n _ = limerick(\"fish\") # or use haiku(\"fish-2\") to avoid cache', 'log_llm': , '_i14': '# 1. Create a logger\\nlogger = logging.getLogger(\"effectful.llm\")\\nlogger.setLevel(logging.INFO)\\nlog_handler = logging.StreamHandler(sys.stdout)\\nlog_handler.setFormatter(logging.Formatter(\"%(levelname)s %(payload)s\"))\\nlogger.addHandler(log_handler)\\n# 2. Pass it to the handler\\nllm_logger = LLMLoggingHandler(logger=logger) # can also be LLMLoggingHandler()\\n\\n# Avoid cache for demonstration\\ntry:\\n haiku.cache_clear()\\n limerick.cache_clear()\\nexcept Exception:\\n pass\\n\\nwith handler(provider), handler(llm_logger):\\n _ = haiku(\"fish3\")\\n _ = limerick(\"fish4\")', 'logger': , 'log_handler': , 'llm_logger': , '_i15': '# Sub-templates for different story styles\\n@Template.define\\ndef story_with_moral(topic: str) -> str:\\n \"\"\"Write a short story about {topic} and end with a moral lesson. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef story_funny(topic: str) -> str:\\n \"\"\"Write a funny, humorous story about {topic}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n# Main orchestrator template - has access to sub-templates\\n@Template.define\\ndef write_story(topic: str, style: str) -> str:\\n \"\"\"Write a story about {topic} in the style: {style}.\\n Available styles: \\'moral\\' for a story with a lesson, \\'funny\\' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\"\"\"\\n raise NotHandled\\n\\n\\n# Verify sub-templates are captured in write_story\\'s lexical context\\nassert story_with_moral in write_story.tools\\nassert story_funny in write_story.tools\\nprint(\"Sub-templates available to write_story:\", list(write_story.tools))\\n\\nwith handler(provider), handler(llm_logger):\\n print(\"=== Story with moral ===\")\\n print(write_story(\"a curious cat\", \"moral\"))\\n print()\\n print(\"=== Funny story ===\")\\n print(write_story(\"a curious cat\", \"funny\"))', 'story_with_moral': Template(__prompt_template__='Write a short story about {topic} and end with a moral lesson. Do not use any tools.', __signature__= str>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='story_with_moral'), 'story_funny': Template(__prompt_template__='Write a funny, humorous story about {topic}. Do not use any tools.', __signature__= str>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='story_funny'), 'write_story': Template(__prompt_template__=\"Write a story about {topic} in the style: {style}.\\n Available styles: 'moral' for a story with a lesson, 'funny' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\", __signature__= str>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='write_story')})), __name__='rate_joke'), Template(__prompt_template__='Write a short story about {topic} and end with a moral lesson. Do not use any tools.', __signature__= str>, __context__=LexicalContext(mappingproxy({'__name__': '__main__', '__doc__': 'Automatically created module for IPython interactive environment', '__package__': None, '__loader__': None, '__spec__': None, '__builtin__': , '__builtins__': , '_ih': ['', 'import dataclasses\\nimport functools\\nimport inspect\\nimport logging\\nimport sys\\nfrom collections.abc import Callable\\n\\nfrom effectful.handlers.llm import Template\\nfrom effectful.handlers.llm.providers import (\\n CacheLLMRequestHandler,\\n LiteLLMProvider,\\n LLMLoggingHandler,\\n RetryLLMHandler,\\n completion,\\n tool_call,\\n)\\nfrom effectful.handlers.llm.synthesis import ProgramSynthesis\\nfrom effectful.ops.semantics import NotHandled, fwd, handler\\nfrom effectful.ops.syntax import defop\\n\\nprovider = LiteLLMProvider()', '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}.\"\"\"\\n raise NotHandled', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '@functools.cache\\n@Template.define\\ndef haiku(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef haiku_no_cache(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nprint()\\nwith handler(provider):\\n print(haiku(\"fish\"))\\n print(\"-\" * 40)\\n print(haiku(\"fish\"))\\n\\nprint()\\ncache_handler1 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler1):\\n print(haiku_no_cache(\"fish2\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish2\"))\\n\\nprint()\\ncache_handler2 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler2):\\n print(haiku_no_cache(\"fish3\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish3\"))', '@Template.define\\ndef primes(first_digit: int) -> int:\\n \"\"\"Give a prime number with {first_digit} as the first digit. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider):\\n assert type(primes(6)) is int', '@Template.define\\ndef count_char(char: str) -> Callable[[str], int]:\\n \"\"\"Write a function which takes a string and counts the occurrances of \\'{char}\\'. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider), handler(ProgramSynthesis()):\\n count_a = count_char(\"a\")\\n assert callable(count_a)\\n assert count_a(\"banana\") == 3\\n assert count_a(\"cherry\") == 0\\n # Print the source code of the generated function\\n print(inspect.getsource(count_a))', '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', '@dataclasses.dataclass\\nclass KnockKnockJoke:\\n whos_there: str\\n punchline: str\\n\\n\\n@Template.define\\ndef write_joke(theme: str) -> KnockKnockJoke:\\n \"\"\"Write a knock-knock joke on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef rate_joke(joke: KnockKnockJoke) -> bool:\\n \"\"\"Decide if {joke} is funny or not. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\ndef do_comedy():\\n joke = write_joke(\"lizards\")\\n print(\"> You are onstage at a comedy club. You tell the following joke:\")\\n print(\\n f\"Knock knock.\\\\nWho\\'s there?\\\\n{joke.whos_there}.\\\\n{joke.whos_there} who?\\\\n{joke.punchline}\"\\n )\\n if rate_joke(joke):\\n print(\"> The crowd laughs politely.\")\\n else:\\n print(\"> The crowd stares in stony silence.\")\\n\\n\\nwith handler(provider):\\n do_comedy()', 'def log_llm(*args, **kwargs):\\n result = fwd()\\n print(\"Request fired: \", args, kwargs, result)\\n return result\\n\\n\\n# Avoid cache\\ntry:\\n haiku.cache_clear()\\nexcept Exception:\\n pass\\n\\n# Put completion handler innermost so it has highest precedence during the call\\nwith handler(provider), handler({completion: log_llm}):\\n _ = haiku(\"fish2\")\\n _ = limerick(\"fish\") # or use haiku(\"fish-2\") to avoid cache', '# 1. Create a logger\\nlogger = logging.getLogger(\"effectful.llm\")\\nlogger.setLevel(logging.INFO)\\nlog_handler = logging.StreamHandler(sys.stdout)\\nlog_handler.setFormatter(logging.Formatter(\"%(levelname)s %(payload)s\"))\\nlogger.addHandler(log_handler)\\n# 2. Pass it to the handler\\nllm_logger = LLMLoggingHandler(logger=logger) # can also be LLMLoggingHandler()\\n\\n# Avoid cache for demonstration\\ntry:\\n haiku.cache_clear()\\n limerick.cache_clear()\\nexcept Exception:\\n pass\\n\\nwith handler(provider), handler(llm_logger):\\n _ = haiku(\"fish3\")\\n _ = limerick(\"fish4\")', '# Sub-templates for different story styles\\n@Template.define\\ndef story_with_moral(topic: str) -> str:\\n \"\"\"Write a short story about {topic} and end with a moral lesson. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef story_funny(topic: str) -> str:\\n \"\"\"Write a funny, humorous story about {topic}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n# Main orchestrator template - has access to sub-templates\\n@Template.define\\ndef write_story(topic: str, style: str) -> str:\\n \"\"\"Write a story about {topic} in the style: {style}.\\n Available styles: \\'moral\\' for a story with a lesson, \\'funny\\' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\"\"\"\\n raise NotHandled\\n\\n\\n# Verify sub-templates are captured in write_story\\'s lexical context\\nassert story_with_moral in write_story.tools\\nassert story_funny in write_story.tools\\nprint(\"Sub-templates available to write_story:\", list(write_story.tools))\\n\\nwith handler(provider), handler(llm_logger):\\n print(\"=== Story with moral ===\")\\n print(write_story(\"a curious cat\", \"moral\"))\\n print()\\n print(\"=== Funny story ===\")\\n print(write_story(\"a curious cat\", \"funny\"))'], '_oh': {}, '_dh': [PosixPath('/Users/datnguyenthanh/Marc/effectful')], 'In': ['', 'import dataclasses\\nimport functools\\nimport inspect\\nimport logging\\nimport sys\\nfrom collections.abc import Callable\\n\\nfrom effectful.handlers.llm import Template\\nfrom effectful.handlers.llm.providers import (\\n CacheLLMRequestHandler,\\n LiteLLMProvider,\\n LLMLoggingHandler,\\n RetryLLMHandler,\\n completion,\\n tool_call,\\n)\\nfrom effectful.handlers.llm.synthesis import ProgramSynthesis\\nfrom effectful.ops.semantics import NotHandled, fwd, handler\\nfrom effectful.ops.syntax import defop\\n\\nprovider = LiteLLMProvider()', '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}.\"\"\"\\n raise NotHandled', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '@functools.cache\\n@Template.define\\ndef haiku(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef haiku_no_cache(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nprint()\\nwith handler(provider):\\n print(haiku(\"fish\"))\\n print(\"-\" * 40)\\n print(haiku(\"fish\"))\\n\\nprint()\\ncache_handler1 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler1):\\n print(haiku_no_cache(\"fish2\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish2\"))\\n\\nprint()\\ncache_handler2 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler2):\\n print(haiku_no_cache(\"fish3\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish3\"))', '@Template.define\\ndef primes(first_digit: int) -> int:\\n \"\"\"Give a prime number with {first_digit} as the first digit. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider):\\n assert type(primes(6)) is int', '@Template.define\\ndef count_char(char: str) -> Callable[[str], int]:\\n \"\"\"Write a function which takes a string and counts the occurrances of \\'{char}\\'. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider), handler(ProgramSynthesis()):\\n count_a = count_char(\"a\")\\n assert callable(count_a)\\n assert count_a(\"banana\") == 3\\n assert count_a(\"cherry\") == 0\\n # Print the source code of the generated function\\n print(inspect.getsource(count_a))', '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', '@dataclasses.dataclass\\nclass KnockKnockJoke:\\n whos_there: str\\n punchline: str\\n\\n\\n@Template.define\\ndef write_joke(theme: str) -> KnockKnockJoke:\\n \"\"\"Write a knock-knock joke on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef rate_joke(joke: KnockKnockJoke) -> bool:\\n \"\"\"Decide if {joke} is funny or not. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\ndef do_comedy():\\n joke = write_joke(\"lizards\")\\n print(\"> You are onstage at a comedy club. You tell the following joke:\")\\n print(\\n f\"Knock knock.\\\\nWho\\'s there?\\\\n{joke.whos_there}.\\\\n{joke.whos_there} who?\\\\n{joke.punchline}\"\\n )\\n if rate_joke(joke):\\n print(\"> The crowd laughs politely.\")\\n else:\\n print(\"> The crowd stares in stony silence.\")\\n\\n\\nwith handler(provider):\\n do_comedy()', 'def log_llm(*args, **kwargs):\\n result = fwd()\\n print(\"Request fired: \", args, kwargs, result)\\n return result\\n\\n\\n# Avoid cache\\ntry:\\n haiku.cache_clear()\\nexcept Exception:\\n pass\\n\\n# Put completion handler innermost so it has highest precedence during the call\\nwith handler(provider), handler({completion: log_llm}):\\n _ = haiku(\"fish2\")\\n _ = limerick(\"fish\") # or use haiku(\"fish-2\") to avoid cache', '# 1. Create a logger\\nlogger = logging.getLogger(\"effectful.llm\")\\nlogger.setLevel(logging.INFO)\\nlog_handler = logging.StreamHandler(sys.stdout)\\nlog_handler.setFormatter(logging.Formatter(\"%(levelname)s %(payload)s\"))\\nlogger.addHandler(log_handler)\\n# 2. Pass it to the handler\\nllm_logger = LLMLoggingHandler(logger=logger) # can also be LLMLoggingHandler()\\n\\n# Avoid cache for demonstration\\ntry:\\n haiku.cache_clear()\\n limerick.cache_clear()\\nexcept Exception:\\n pass\\n\\nwith handler(provider), handler(llm_logger):\\n _ = haiku(\"fish3\")\\n _ = limerick(\"fish4\")', '# Sub-templates for different story styles\\n@Template.define\\ndef story_with_moral(topic: str) -> str:\\n \"\"\"Write a short story about {topic} and end with a moral lesson. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef story_funny(topic: str) -> str:\\n \"\"\"Write a funny, humorous story about {topic}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n# Main orchestrator template - has access to sub-templates\\n@Template.define\\ndef write_story(topic: str, style: str) -> str:\\n \"\"\"Write a story about {topic} in the style: {style}.\\n Available styles: \\'moral\\' for a story with a lesson, \\'funny\\' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\"\"\"\\n raise NotHandled\\n\\n\\n# Verify sub-templates are captured in write_story\\'s lexical context\\nassert story_with_moral in write_story.tools\\nassert story_funny in write_story.tools\\nprint(\"Sub-templates available to write_story:\", list(write_story.tools))\\n\\nwith handler(provider), handler(llm_logger):\\n print(\"=== Story with moral ===\")\\n print(write_story(\"a curious cat\", \"moral\"))\\n print()\\n print(\"=== Funny story ===\")\\n print(write_story(\"a curious cat\", \"funny\"))'], 'Out': {}, 'get_ipython': >, 'exit': , 'quit': , 'open': , '_': 'In the ocean where fishies do play, \\nA big whale came swimming one day. \\nWith a splash and a dive, \\nHe felt so alive, \\nChasing fish in the blue, gleaming bay.', '__': '', '___': '', '__vsc_ipynb_file__': '/Users/datnguyenthanh/Marc/effectful/docs/source/llm.ipynb', '_i': '# 1. Create a logger\\nlogger = logging.getLogger(\"effectful.llm\")\\nlogger.setLevel(logging.INFO)\\nlog_handler = logging.StreamHandler(sys.stdout)\\nlog_handler.setFormatter(logging.Formatter(\"%(levelname)s %(payload)s\"))\\nlogger.addHandler(log_handler)\\n# 2. Pass it to the handler\\nllm_logger = LLMLoggingHandler(logger=logger) # can also be LLMLoggingHandler()\\n\\n# Avoid cache for demonstration\\ntry:\\n haiku.cache_clear()\\n limerick.cache_clear()\\nexcept Exception:\\n pass\\n\\nwith handler(provider), handler(llm_logger):\\n _ = haiku(\"fish3\")\\n _ = limerick(\"fish4\")', '_ii': 'def log_llm(*args, **kwargs):\\n result = fwd()\\n print(\"Request fired: \", args, kwargs, result)\\n return result\\n\\n\\n# Avoid cache\\ntry:\\n haiku.cache_clear()\\nexcept Exception:\\n pass\\n\\n# Put completion handler innermost so it has highest precedence during the call\\nwith handler(provider), handler({completion: log_llm}):\\n _ = haiku(\"fish2\")\\n _ = limerick(\"fish\") # or use haiku(\"fish-2\") to avoid cache', '_iii': '@dataclasses.dataclass\\nclass KnockKnockJoke:\\n whos_there: str\\n punchline: str\\n\\n\\n@Template.define\\ndef write_joke(theme: str) -> KnockKnockJoke:\\n \"\"\"Write a knock-knock joke on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef rate_joke(joke: KnockKnockJoke) -> bool:\\n \"\"\"Decide if {joke} is funny or not. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\ndef do_comedy():\\n joke = write_joke(\"lizards\")\\n print(\"> You are onstage at a comedy club. You tell the following joke:\")\\n print(\\n f\"Knock knock.\\\\nWho\\'s there?\\\\n{joke.whos_there}.\\\\n{joke.whos_there} who?\\\\n{joke.punchline}\"\\n )\\n if rate_joke(joke):\\n print(\"> The crowd laughs politely.\")\\n else:\\n print(\"> The crowd stares in stony silence.\")\\n\\n\\nwith handler(provider):\\n do_comedy()', '_i1': 'import dataclasses\\nimport functools\\nimport inspect\\nimport logging\\nimport sys\\nfrom collections.abc import Callable\\n\\nfrom effectful.handlers.llm import Template\\nfrom effectful.handlers.llm.providers import (\\n CacheLLMRequestHandler,\\n LiteLLMProvider,\\n LLMLoggingHandler,\\n RetryLLMHandler,\\n completion,\\n tool_call,\\n)\\nfrom effectful.handlers.llm.synthesis import ProgramSynthesis\\nfrom effectful.ops.semantics import NotHandled, fwd, handler\\nfrom effectful.ops.syntax import defop\\n\\nprovider = LiteLLMProvider()', 'dataclasses': , 'functools': , 'inspect': , 'logging': , 'sys': , 'Callable': , 'Template': , 'CacheLLMRequestHandler': , 'LiteLLMProvider': , 'LLMLoggingHandler': , 'RetryLLMHandler': , 'completion': Operation(completion, (model: str, messages: List = [], timeout: Union[float, str, openai.Timeout, NoneType] = None, temperature: Optional[float] = None, top_p: Optional[float] = None, n: Optional[int] = None, stream: Optional[bool] = None, stream_options: Optional[dict] = None, stop=None, max_completion_tokens: Optional[int] = None, max_tokens: Optional[int] = None, modalities: Optional[List[Literal['text', 'audio']]] = None, prediction: Optional[openai.types.chat.chat_completion_prediction_content_param.ChatCompletionPredictionContentParam] = None, audio: Optional[openai.types.chat.chat_completion_audio_param.ChatCompletionAudioParam] = None, presence_penalty: Optional[float] = None, frequency_penalty: Optional[float] = None, logit_bias: Optional[dict] = None, user: Optional[str] = None, reasoning_effort: Optional[Literal['none', 'minimal', 'low', 'medium', 'high', 'default']] = None, verbosity: Optional[Literal['low', 'medium', 'high']] = None, response_format: Union[dict, Type[pydantic.main.BaseModel], NoneType] = None, seed: Optional[int] = None, tools: Optional[List] = None, tool_choice: Union[str, dict, NoneType] = None, logprobs: Optional[bool] = None, top_logprobs: Optional[int] = None, parallel_tool_calls: Optional[bool] = None, web_search_options: Optional[litellm.types.llms.openai.OpenAIWebSearchOptions] = None, deployment_id=None, extra_headers: Optional[dict] = None, safety_identifier: Optional[str] = None, service_tier: Optional[str] = None, functions: Optional[List] = None, function_call: Optional[str] = None, base_url: Optional[str] = None, api_version: Optional[str] = None, api_key: Optional[str] = None, model_list: Optional[list] = None, thinking: Optional[litellm.types.llms.anthropic.AnthropicThinkingParam] = None, shared_session: Optional[ForwardRef('ClientSession')] = None, **kwargs) -> Union[litellm.types.utils.ModelResponse, litellm.litellm_core_utils.streaming_handler.CustomStreamWrapper]), 'tool_call': Operation(tool_call, (template: effectful.handlers.llm.Template, tool: Union[effectful.ops.types.Operation[..., T], effectful.handlers.llm.Template[..., T]], *args, **kwargs) -> T), 'ProgramSynthesis': , 'NotHandled': , 'fwd': Operation(fwd, (*args, **kwargs) -> Any), 'handler': , 'defop': , 'provider': , '_i2': '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}.\"\"\"\\n raise NotHandled', 'limerick': Template(__prompt_template__='Write a limerick on the theme of {theme}. Do not use any tools.', __signature__= str>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='limerick'), '_i3': 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '_i4': 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '_i5': '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled', '_i6': 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '_i7': '@functools.cache\\n@Template.define\\ndef haiku(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef haiku_no_cache(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nprint()\\nwith handler(provider):\\n print(haiku(\"fish\"))\\n print(\"-\" * 40)\\n print(haiku(\"fish\"))\\n\\nprint()\\ncache_handler1 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler1):\\n print(haiku_no_cache(\"fish2\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish2\"))\\n\\nprint()\\ncache_handler2 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler2):\\n print(haiku_no_cache(\"fish3\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish3\"))', 'haiku': , 'haiku_no_cache': Template(__prompt_template__='Write a haiku on the theme of {theme}. Do not use any tools.', __signature__= str>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='haiku_no_cache'), 'cache_handler1': , 'cache_handler2': , '_i8': '@Template.define\\ndef primes(first_digit: int) -> int:\\n \"\"\"Give a prime number with {first_digit} as the first digit. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider):\\n assert type(primes(6)) is int', 'primes': Template(__prompt_template__='Give a prime number with {first_digit} as the first digit. Do not use any tools.', __signature__= int>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='primes'), '_i9': '@Template.define\\ndef count_char(char: str) -> Callable[[str], int]:\\n \"\"\"Write a function which takes a string and counts the occurrances of \\'{char}\\'. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider), handler(ProgramSynthesis()):\\n count_a = count_char(\"a\")\\n assert callable(count_a)\\n assert count_a(\"banana\") == 3\\n assert count_a(\"cherry\") == 0\\n # Print the source code of the generated function\\n print(inspect.getsource(count_a))', 'count_char': Template(__prompt_template__=\"Write a function which takes a string and counts the occurrances of '{char}'. Do not use any tools.\", __signature__= collections.abc.Callable[[str], int]>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='count_char'), 'count_a': , '_i10': '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', 'cities': Operation(cities, () -> list[str]), 'weather': Operation(weather, (city: str) -> str), 'vacation': Template(__prompt_template__='Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.', __signature__= str>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='vacation'), 'log_tool_call': , '_i11': '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', '_i12': '@dataclasses.dataclass\\nclass KnockKnockJoke:\\n whos_there: str\\n punchline: str\\n\\n\\n@Template.define\\ndef write_joke(theme: str) -> KnockKnockJoke:\\n \"\"\"Write a knock-knock joke on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef rate_joke(joke: KnockKnockJoke) -> bool:\\n \"\"\"Decide if {joke} is funny or not. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\ndef do_comedy():\\n joke = write_joke(\"lizards\")\\n print(\"> You are onstage at a comedy club. You tell the following joke:\")\\n print(\\n f\"Knock knock.\\\\nWho\\'s there?\\\\n{joke.whos_there}.\\\\n{joke.whos_there} who?\\\\n{joke.punchline}\"\\n )\\n if rate_joke(joke):\\n print(\"> The crowd laughs politely.\")\\n else:\\n print(\"> The crowd stares in stony silence.\")\\n\\n\\nwith handler(provider):\\n do_comedy()', 'KnockKnockJoke': , 'write_joke': Template(__prompt_template__='Write a knock-knock joke on the theme of {theme}. Do not use any tools.', __signature__= __main__.KnockKnockJoke>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='write_joke'), 'rate_joke': Template(__prompt_template__='Decide if {joke} is funny or not. Do not use any tools.', __signature__= bool>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='rate_joke'), 'do_comedy': , '_i13': 'def log_llm(*args, **kwargs):\\n result = fwd()\\n print(\"Request fired: \", args, kwargs, result)\\n return result\\n\\n\\n# Avoid cache\\ntry:\\n haiku.cache_clear()\\nexcept Exception:\\n pass\\n\\n# Put completion handler innermost so it has highest precedence during the call\\nwith handler(provider), handler({completion: log_llm}):\\n _ = haiku(\"fish2\")\\n _ = limerick(\"fish\") # or use haiku(\"fish-2\") to avoid cache', 'log_llm': , '_i14': '# 1. Create a logger\\nlogger = logging.getLogger(\"effectful.llm\")\\nlogger.setLevel(logging.INFO)\\nlog_handler = logging.StreamHandler(sys.stdout)\\nlog_handler.setFormatter(logging.Formatter(\"%(levelname)s %(payload)s\"))\\nlogger.addHandler(log_handler)\\n# 2. Pass it to the handler\\nllm_logger = LLMLoggingHandler(logger=logger) # can also be LLMLoggingHandler()\\n\\n# Avoid cache for demonstration\\ntry:\\n haiku.cache_clear()\\n limerick.cache_clear()\\nexcept Exception:\\n pass\\n\\nwith handler(provider), handler(llm_logger):\\n _ = haiku(\"fish3\")\\n _ = limerick(\"fish4\")', 'logger': , 'log_handler': , 'llm_logger': , '_i15': '# Sub-templates for different story styles\\n@Template.define\\ndef story_with_moral(topic: str) -> str:\\n \"\"\"Write a short story about {topic} and end with a moral lesson. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef story_funny(topic: str) -> str:\\n \"\"\"Write a funny, humorous story about {topic}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n# Main orchestrator template - has access to sub-templates\\n@Template.define\\ndef write_story(topic: str, style: str) -> str:\\n \"\"\"Write a story about {topic} in the style: {style}.\\n Available styles: \\'moral\\' for a story with a lesson, \\'funny\\' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\"\"\"\\n raise NotHandled\\n\\n\\n# Verify sub-templates are captured in write_story\\'s lexical context\\nassert story_with_moral in write_story.tools\\nassert story_funny in write_story.tools\\nprint(\"Sub-templates available to write_story:\", list(write_story.tools))\\n\\nwith handler(provider), handler(llm_logger):\\n print(\"=== Story with moral ===\")\\n print(write_story(\"a curious cat\", \"moral\"))\\n print()\\n print(\"=== Funny story ===\")\\n print(write_story(\"a curious cat\", \"funny\"))', 'story_with_moral': ..., 'story_funny': Template(__prompt_template__='Write a funny, humorous story about {topic}. Do not use any tools.', __signature__= str>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='story_funny'), 'write_story': Template(__prompt_template__=\"Write a story about {topic} in the style: {style}.\\n Available styles: 'moral' for a story with a lesson, 'funny' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\", __signature__= str>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='write_story')}), mappingproxy({'__name__': '__main__', '__doc__': 'Automatically created module for IPython interactive environment', '__package__': None, '__loader__': None, '__spec__': None, '__builtin__': , '__builtins__': , '_ih': ['', 'import dataclasses\\nimport functools\\nimport inspect\\nimport logging\\nimport sys\\nfrom collections.abc import Callable\\n\\nfrom effectful.handlers.llm import Template\\nfrom effectful.handlers.llm.providers import (\\n CacheLLMRequestHandler,\\n LiteLLMProvider,\\n LLMLoggingHandler,\\n RetryLLMHandler,\\n completion,\\n tool_call,\\n)\\nfrom effectful.handlers.llm.synthesis import ProgramSynthesis\\nfrom effectful.ops.semantics import NotHandled, fwd, handler\\nfrom effectful.ops.syntax import defop\\n\\nprovider = LiteLLMProvider()', '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}.\"\"\"\\n raise NotHandled', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '@functools.cache\\n@Template.define\\ndef haiku(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef haiku_no_cache(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nprint()\\nwith handler(provider):\\n print(haiku(\"fish\"))\\n print(\"-\" * 40)\\n print(haiku(\"fish\"))\\n\\nprint()\\ncache_handler1 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler1):\\n print(haiku_no_cache(\"fish2\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish2\"))\\n\\nprint()\\ncache_handler2 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler2):\\n print(haiku_no_cache(\"fish3\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish3\"))', '@Template.define\\ndef primes(first_digit: int) -> int:\\n \"\"\"Give a prime number with {first_digit} as the first digit. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider):\\n assert type(primes(6)) is int', '@Template.define\\ndef count_char(char: str) -> Callable[[str], int]:\\n \"\"\"Write a function which takes a string and counts the occurrances of \\'{char}\\'. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider), handler(ProgramSynthesis()):\\n count_a = count_char(\"a\")\\n assert callable(count_a)\\n assert count_a(\"banana\") == 3\\n assert count_a(\"cherry\") == 0\\n # Print the source code of the generated function\\n print(inspect.getsource(count_a))', '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', '@dataclasses.dataclass\\nclass KnockKnockJoke:\\n whos_there: str\\n punchline: str\\n\\n\\n@Template.define\\ndef write_joke(theme: str) -> KnockKnockJoke:\\n \"\"\"Write a knock-knock joke on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef rate_joke(joke: KnockKnockJoke) -> bool:\\n \"\"\"Decide if {joke} is funny or not. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\ndef do_comedy():\\n joke = write_joke(\"lizards\")\\n print(\"> You are onstage at a comedy club. You tell the following joke:\")\\n print(\\n f\"Knock knock.\\\\nWho\\'s there?\\\\n{joke.whos_there}.\\\\n{joke.whos_there} who?\\\\n{joke.punchline}\"\\n )\\n if rate_joke(joke):\\n print(\"> The crowd laughs politely.\")\\n else:\\n print(\"> The crowd stares in stony silence.\")\\n\\n\\nwith handler(provider):\\n do_comedy()', 'def log_llm(*args, **kwargs):\\n result = fwd()\\n print(\"Request fired: \", args, kwargs, result)\\n return result\\n\\n\\n# Avoid cache\\ntry:\\n haiku.cache_clear()\\nexcept Exception:\\n pass\\n\\n# Put completion handler innermost so it has highest precedence during the call\\nwith handler(provider), handler({completion: log_llm}):\\n _ = haiku(\"fish2\")\\n _ = limerick(\"fish\") # or use haiku(\"fish-2\") to avoid cache', '# 1. Create a logger\\nlogger = logging.getLogger(\"effectful.llm\")\\nlogger.setLevel(logging.INFO)\\nlog_handler = logging.StreamHandler(sys.stdout)\\nlog_handler.setFormatter(logging.Formatter(\"%(levelname)s %(payload)s\"))\\nlogger.addHandler(log_handler)\\n# 2. Pass it to the handler\\nllm_logger = LLMLoggingHandler(logger=logger) # can also be LLMLoggingHandler()\\n\\n# Avoid cache for demonstration\\ntry:\\n haiku.cache_clear()\\n limerick.cache_clear()\\nexcept Exception:\\n pass\\n\\nwith handler(provider), handler(llm_logger):\\n _ = haiku(\"fish3\")\\n _ = limerick(\"fish4\")', '# Sub-templates for different story styles\\n@Template.define\\ndef story_with_moral(topic: str) -> str:\\n \"\"\"Write a short story about {topic} and end with a moral lesson. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef story_funny(topic: str) -> str:\\n \"\"\"Write a funny, humorous story about {topic}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n# Main orchestrator template - has access to sub-templates\\n@Template.define\\ndef write_story(topic: str, style: str) -> str:\\n \"\"\"Write a story about {topic} in the style: {style}.\\n Available styles: \\'moral\\' for a story with a lesson, \\'funny\\' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\"\"\"\\n raise NotHandled\\n\\n\\n# Verify sub-templates are captured in write_story\\'s lexical context\\nassert story_with_moral in write_story.tools\\nassert story_funny in write_story.tools\\nprint(\"Sub-templates available to write_story:\", list(write_story.tools))\\n\\nwith handler(provider), handler(llm_logger):\\n print(\"=== Story with moral ===\")\\n print(write_story(\"a curious cat\", \"moral\"))\\n print()\\n print(\"=== Funny story ===\")\\n print(write_story(\"a curious cat\", \"funny\"))'], '_oh': {}, '_dh': [PosixPath('/Users/datnguyenthanh/Marc/effectful')], 'In': ['', 'import dataclasses\\nimport functools\\nimport inspect\\nimport logging\\nimport sys\\nfrom collections.abc import Callable\\n\\nfrom effectful.handlers.llm import Template\\nfrom effectful.handlers.llm.providers import (\\n CacheLLMRequestHandler,\\n LiteLLMProvider,\\n LLMLoggingHandler,\\n RetryLLMHandler,\\n completion,\\n tool_call,\\n)\\nfrom effectful.handlers.llm.synthesis import ProgramSynthesis\\nfrom effectful.ops.semantics import NotHandled, fwd, handler\\nfrom effectful.ops.syntax import defop\\n\\nprovider = LiteLLMProvider()', '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}.\"\"\"\\n raise NotHandled', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '@functools.cache\\n@Template.define\\ndef haiku(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef haiku_no_cache(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nprint()\\nwith handler(provider):\\n print(haiku(\"fish\"))\\n print(\"-\" * 40)\\n print(haiku(\"fish\"))\\n\\nprint()\\ncache_handler1 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler1):\\n print(haiku_no_cache(\"fish2\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish2\"))\\n\\nprint()\\ncache_handler2 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler2):\\n print(haiku_no_cache(\"fish3\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish3\"))', '@Template.define\\ndef primes(first_digit: int) -> int:\\n \"\"\"Give a prime number with {first_digit} as the first digit. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider):\\n assert type(primes(6)) is int', '@Template.define\\ndef count_char(char: str) -> Callable[[str], int]:\\n \"\"\"Write a function which takes a string and counts the occurrances of \\'{char}\\'. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider), handler(ProgramSynthesis()):\\n count_a = count_char(\"a\")\\n assert callable(count_a)\\n assert count_a(\"banana\") == 3\\n assert count_a(\"cherry\") == 0\\n # Print the source code of the generated function\\n print(inspect.getsource(count_a))', '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', '@dataclasses.dataclass\\nclass KnockKnockJoke:\\n whos_there: str\\n punchline: str\\n\\n\\n@Template.define\\ndef write_joke(theme: str) -> KnockKnockJoke:\\n \"\"\"Write a knock-knock joke on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef rate_joke(joke: KnockKnockJoke) -> bool:\\n \"\"\"Decide if {joke} is funny or not. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\ndef do_comedy():\\n joke = write_joke(\"lizards\")\\n print(\"> You are onstage at a comedy club. You tell the following joke:\")\\n print(\\n f\"Knock knock.\\\\nWho\\'s there?\\\\n{joke.whos_there}.\\\\n{joke.whos_there} who?\\\\n{joke.punchline}\"\\n )\\n if rate_joke(joke):\\n print(\"> The crowd laughs politely.\")\\n else:\\n print(\"> The crowd stares in stony silence.\")\\n\\n\\nwith handler(provider):\\n do_comedy()', 'def log_llm(*args, **kwargs):\\n result = fwd()\\n print(\"Request fired: \", args, kwargs, result)\\n return result\\n\\n\\n# Avoid cache\\ntry:\\n haiku.cache_clear()\\nexcept Exception:\\n pass\\n\\n# Put completion handler innermost so it has highest precedence during the call\\nwith handler(provider), handler({completion: log_llm}):\\n _ = haiku(\"fish2\")\\n _ = limerick(\"fish\") # or use haiku(\"fish-2\") to avoid cache', '# 1. Create a logger\\nlogger = logging.getLogger(\"effectful.llm\")\\nlogger.setLevel(logging.INFO)\\nlog_handler = logging.StreamHandler(sys.stdout)\\nlog_handler.setFormatter(logging.Formatter(\"%(levelname)s %(payload)s\"))\\nlogger.addHandler(log_handler)\\n# 2. Pass it to the handler\\nllm_logger = LLMLoggingHandler(logger=logger) # can also be LLMLoggingHandler()\\n\\n# Avoid cache for demonstration\\ntry:\\n haiku.cache_clear()\\n limerick.cache_clear()\\nexcept Exception:\\n pass\\n\\nwith handler(provider), handler(llm_logger):\\n _ = haiku(\"fish3\")\\n _ = limerick(\"fish4\")', '# Sub-templates for different story styles\\n@Template.define\\ndef story_with_moral(topic: str) -> str:\\n \"\"\"Write a short story about {topic} and end with a moral lesson. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef story_funny(topic: str) -> str:\\n \"\"\"Write a funny, humorous story about {topic}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n# Main orchestrator template - has access to sub-templates\\n@Template.define\\ndef write_story(topic: str, style: str) -> str:\\n \"\"\"Write a story about {topic} in the style: {style}.\\n Available styles: \\'moral\\' for a story with a lesson, \\'funny\\' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\"\"\"\\n raise NotHandled\\n\\n\\n# Verify sub-templates are captured in write_story\\'s lexical context\\nassert story_with_moral in write_story.tools\\nassert story_funny in write_story.tools\\nprint(\"Sub-templates available to write_story:\", list(write_story.tools))\\n\\nwith handler(provider), handler(llm_logger):\\n print(\"=== Story with moral ===\")\\n print(write_story(\"a curious cat\", \"moral\"))\\n print()\\n print(\"=== Funny story ===\")\\n print(write_story(\"a curious cat\", \"funny\"))'], 'Out': {}, 'get_ipython': >, 'exit': , 'quit': , 'open': , '_': 'In the ocean where fishies do play, \\nA big whale came swimming one day. \\nWith a splash and a dive, \\nHe felt so alive, \\nChasing fish in the blue, gleaming bay.', '__': '', '___': '', '__vsc_ipynb_file__': '/Users/datnguyenthanh/Marc/effectful/docs/source/llm.ipynb', '_i': '# 1. Create a logger\\nlogger = logging.getLogger(\"effectful.llm\")\\nlogger.setLevel(logging.INFO)\\nlog_handler = logging.StreamHandler(sys.stdout)\\nlog_handler.setFormatter(logging.Formatter(\"%(levelname)s %(payload)s\"))\\nlogger.addHandler(log_handler)\\n# 2. Pass it to the handler\\nllm_logger = LLMLoggingHandler(logger=logger) # can also be LLMLoggingHandler()\\n\\n# Avoid cache for demonstration\\ntry:\\n haiku.cache_clear()\\n limerick.cache_clear()\\nexcept Exception:\\n pass\\n\\nwith handler(provider), handler(llm_logger):\\n _ = haiku(\"fish3\")\\n _ = limerick(\"fish4\")', '_ii': 'def log_llm(*args, **kwargs):\\n result = fwd()\\n print(\"Request fired: \", args, kwargs, result)\\n return result\\n\\n\\n# Avoid cache\\ntry:\\n haiku.cache_clear()\\nexcept Exception:\\n pass\\n\\n# Put completion handler innermost so it has highest precedence during the call\\nwith handler(provider), handler({completion: log_llm}):\\n _ = haiku(\"fish2\")\\n _ = limerick(\"fish\") # or use haiku(\"fish-2\") to avoid cache', '_iii': '@dataclasses.dataclass\\nclass KnockKnockJoke:\\n whos_there: str\\n punchline: str\\n\\n\\n@Template.define\\ndef write_joke(theme: str) -> KnockKnockJoke:\\n \"\"\"Write a knock-knock joke on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef rate_joke(joke: KnockKnockJoke) -> bool:\\n \"\"\"Decide if {joke} is funny or not. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\ndef do_comedy():\\n joke = write_joke(\"lizards\")\\n print(\"> You are onstage at a comedy club. You tell the following joke:\")\\n print(\\n f\"Knock knock.\\\\nWho\\'s there?\\\\n{joke.whos_there}.\\\\n{joke.whos_there} who?\\\\n{joke.punchline}\"\\n )\\n if rate_joke(joke):\\n print(\"> The crowd laughs politely.\")\\n else:\\n print(\"> The crowd stares in stony silence.\")\\n\\n\\nwith handler(provider):\\n do_comedy()', '_i1': 'import dataclasses\\nimport functools\\nimport inspect\\nimport logging\\nimport sys\\nfrom collections.abc import Callable\\n\\nfrom effectful.handlers.llm import Template\\nfrom effectful.handlers.llm.providers import (\\n CacheLLMRequestHandler,\\n LiteLLMProvider,\\n LLMLoggingHandler,\\n RetryLLMHandler,\\n completion,\\n tool_call,\\n)\\nfrom effectful.handlers.llm.synthesis import ProgramSynthesis\\nfrom effectful.ops.semantics import NotHandled, fwd, handler\\nfrom effectful.ops.syntax import defop\\n\\nprovider = LiteLLMProvider()', 'dataclasses': , 'functools': , 'inspect': , 'logging': , 'sys': , 'Callable': , 'Template': , 'CacheLLMRequestHandler': , 'LiteLLMProvider': , 'LLMLoggingHandler': , 'RetryLLMHandler': , 'completion': Operation(completion, (model: str, messages: List = [], timeout: Union[float, str, openai.Timeout, NoneType] = None, temperature: Optional[float] = None, top_p: Optional[float] = None, n: Optional[int] = None, stream: Optional[bool] = None, stream_options: Optional[dict] = None, stop=None, max_completion_tokens: Optional[int] = None, max_tokens: Optional[int] = None, modalities: Optional[List[Literal['text', 'audio']]] = None, prediction: Optional[openai.types.chat.chat_completion_prediction_content_param.ChatCompletionPredictionContentParam] = None, audio: Optional[openai.types.chat.chat_completion_audio_param.ChatCompletionAudioParam] = None, presence_penalty: Optional[float] = None, frequency_penalty: Optional[float] = None, logit_bias: Optional[dict] = None, user: Optional[str] = None, reasoning_effort: Optional[Literal['none', 'minimal', 'low', 'medium', 'high', 'default']] = None, verbosity: Optional[Literal['low', 'medium', 'high']] = None, response_format: Union[dict, Type[pydantic.main.BaseModel], NoneType] = None, seed: Optional[int] = None, tools: Optional[List] = None, tool_choice: Union[str, dict, NoneType] = None, logprobs: Optional[bool] = None, top_logprobs: Optional[int] = None, parallel_tool_calls: Optional[bool] = None, web_search_options: Optional[litellm.types.llms.openai.OpenAIWebSearchOptions] = None, deployment_id=None, extra_headers: Optional[dict] = None, safety_identifier: Optional[str] = None, service_tier: Optional[str] = None, functions: Optional[List] = None, function_call: Optional[str] = None, base_url: Optional[str] = None, api_version: Optional[str] = None, api_key: Optional[str] = None, model_list: Optional[list] = None, thinking: Optional[litellm.types.llms.anthropic.AnthropicThinkingParam] = None, shared_session: Optional[ForwardRef('ClientSession')] = None, **kwargs) -> Union[litellm.types.utils.ModelResponse, litellm.litellm_core_utils.streaming_handler.CustomStreamWrapper]), 'tool_call': Operation(tool_call, (template: effectful.handlers.llm.Template, tool: Union[effectful.ops.types.Operation[..., T], effectful.handlers.llm.Template[..., T]], *args, **kwargs) -> T), 'ProgramSynthesis': , 'NotHandled': , 'fwd': Operation(fwd, (*args, **kwargs) -> Any), 'handler': , 'defop': , 'provider': , '_i2': '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}.\"\"\"\\n raise NotHandled', 'limerick': Template(__prompt_template__='Write a limerick on the theme of {theme}. Do not use any tools.', __signature__= str>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='limerick'), '_i3': 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '_i4': 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '_i5': '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled', '_i6': 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '_i7': '@functools.cache\\n@Template.define\\ndef haiku(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef haiku_no_cache(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nprint()\\nwith handler(provider):\\n print(haiku(\"fish\"))\\n print(\"-\" * 40)\\n print(haiku(\"fish\"))\\n\\nprint()\\ncache_handler1 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler1):\\n print(haiku_no_cache(\"fish2\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish2\"))\\n\\nprint()\\ncache_handler2 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler2):\\n print(haiku_no_cache(\"fish3\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish3\"))', 'haiku': , 'haiku_no_cache': Template(__prompt_template__='Write a haiku on the theme of {theme}. Do not use any tools.', __signature__= str>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='haiku_no_cache'), 'cache_handler1': , 'cache_handler2': , '_i8': '@Template.define\\ndef primes(first_digit: int) -> int:\\n \"\"\"Give a prime number with {first_digit} as the first digit. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider):\\n assert type(primes(6)) is int', 'primes': Template(__prompt_template__='Give a prime number with {first_digit} as the first digit. Do not use any tools.', __signature__= int>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='primes'), '_i9': '@Template.define\\ndef count_char(char: str) -> Callable[[str], int]:\\n \"\"\"Write a function which takes a string and counts the occurrances of \\'{char}\\'. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider), handler(ProgramSynthesis()):\\n count_a = count_char(\"a\")\\n assert callable(count_a)\\n assert count_a(\"banana\") == 3\\n assert count_a(\"cherry\") == 0\\n # Print the source code of the generated function\\n print(inspect.getsource(count_a))', 'count_char': Template(__prompt_template__=\"Write a function which takes a string and counts the occurrances of '{char}'. Do not use any tools.\", __signature__= collections.abc.Callable[[str], int]>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='count_char'), 'count_a': , '_i10': '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', 'cities': Operation(cities, () -> list[str]), 'weather': Operation(weather, (city: str) -> str), 'vacation': Template(__prompt_template__='Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.', __signature__= str>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='vacation'), 'log_tool_call': , '_i11': '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', '_i12': '@dataclasses.dataclass\\nclass KnockKnockJoke:\\n whos_there: str\\n punchline: str\\n\\n\\n@Template.define\\ndef write_joke(theme: str) -> KnockKnockJoke:\\n \"\"\"Write a knock-knock joke on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef rate_joke(joke: KnockKnockJoke) -> bool:\\n \"\"\"Decide if {joke} is funny or not. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\ndef do_comedy():\\n joke = write_joke(\"lizards\")\\n print(\"> You are onstage at a comedy club. You tell the following joke:\")\\n print(\\n f\"Knock knock.\\\\nWho\\'s there?\\\\n{joke.whos_there}.\\\\n{joke.whos_there} who?\\\\n{joke.punchline}\"\\n )\\n if rate_joke(joke):\\n print(\"> The crowd laughs politely.\")\\n else:\\n print(\"> The crowd stares in stony silence.\")\\n\\n\\nwith handler(provider):\\n do_comedy()', 'KnockKnockJoke': , 'write_joke': Template(__prompt_template__='Write a knock-knock joke on the theme of {theme}. Do not use any tools.', __signature__= __main__.KnockKnockJoke>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='write_joke'), 'rate_joke': Template(__prompt_template__='Decide if {joke} is funny or not. Do not use any tools.', __signature__= bool>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='rate_joke'), 'do_comedy': , '_i13': 'def log_llm(*args, **kwargs):\\n result = fwd()\\n print(\"Request fired: \", args, kwargs, result)\\n return result\\n\\n\\n# Avoid cache\\ntry:\\n haiku.cache_clear()\\nexcept Exception:\\n pass\\n\\n# Put completion handler innermost so it has highest precedence during the call\\nwith handler(provider), handler({completion: log_llm}):\\n _ = haiku(\"fish2\")\\n _ = limerick(\"fish\") # or use haiku(\"fish-2\") to avoid cache', 'log_llm': , '_i14': '# 1. Create a logger\\nlogger = logging.getLogger(\"effectful.llm\")\\nlogger.setLevel(logging.INFO)\\nlog_handler = logging.StreamHandler(sys.stdout)\\nlog_handler.setFormatter(logging.Formatter(\"%(levelname)s %(payload)s\"))\\nlogger.addHandler(log_handler)\\n# 2. Pass it to the handler\\nllm_logger = LLMLoggingHandler(logger=logger) # can also be LLMLoggingHandler()\\n\\n# Avoid cache for demonstration\\ntry:\\n haiku.cache_clear()\\n limerick.cache_clear()\\nexcept Exception:\\n pass\\n\\nwith handler(provider), handler(llm_logger):\\n _ = haiku(\"fish3\")\\n _ = limerick(\"fish4\")', 'logger': , 'log_handler': , 'llm_logger': , '_i15': '# Sub-templates for different story styles\\n@Template.define\\ndef story_with_moral(topic: str) -> str:\\n \"\"\"Write a short story about {topic} and end with a moral lesson. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef story_funny(topic: str) -> str:\\n \"\"\"Write a funny, humorous story about {topic}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n# Main orchestrator template - has access to sub-templates\\n@Template.define\\ndef write_story(topic: str, style: str) -> str:\\n \"\"\"Write a story about {topic} in the style: {style}.\\n Available styles: \\'moral\\' for a story with a lesson, \\'funny\\' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\"\"\"\\n raise NotHandled\\n\\n\\n# Verify sub-templates are captured in write_story\\'s lexical context\\nassert story_with_moral in write_story.tools\\nassert story_funny in write_story.tools\\nprint(\"Sub-templates available to write_story:\", list(write_story.tools))\\n\\nwith handler(provider), handler(llm_logger):\\n print(\"=== Story with moral ===\")\\n print(write_story(\"a curious cat\", \"moral\"))\\n print()\\n print(\"=== Funny story ===\")\\n print(write_story(\"a curious cat\", \"funny\"))', 'story_with_moral': ..., 'story_funny': Template(__prompt_template__='Write a funny, humorous story about {topic}. Do not use any tools.', __signature__= str>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='story_funny'), 'write_story': Template(__prompt_template__=\"Write a story about {topic} in the style: {style}.\\n Available styles: 'moral' for a story with a lesson, 'funny' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\", __signature__= str>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='write_story')})), __name__='story_with_moral'), Template(__prompt_template__='Write a funny, humorous story about {topic}. Do not use any tools.', __signature__= str>, __context__=LexicalContext(mappingproxy({'__name__': '__main__', '__doc__': 'Automatically created module for IPython interactive environment', '__package__': None, '__loader__': None, '__spec__': None, '__builtin__': , '__builtins__': , '_ih': ['', 'import dataclasses\\nimport functools\\nimport inspect\\nimport logging\\nimport sys\\nfrom collections.abc import Callable\\n\\nfrom effectful.handlers.llm import Template\\nfrom effectful.handlers.llm.providers import (\\n CacheLLMRequestHandler,\\n LiteLLMProvider,\\n LLMLoggingHandler,\\n RetryLLMHandler,\\n completion,\\n tool_call,\\n)\\nfrom effectful.handlers.llm.synthesis import ProgramSynthesis\\nfrom effectful.ops.semantics import NotHandled, fwd, handler\\nfrom effectful.ops.syntax import defop\\n\\nprovider = LiteLLMProvider()', '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}.\"\"\"\\n raise NotHandled', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '@functools.cache\\n@Template.define\\ndef haiku(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef haiku_no_cache(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nprint()\\nwith handler(provider):\\n print(haiku(\"fish\"))\\n print(\"-\" * 40)\\n print(haiku(\"fish\"))\\n\\nprint()\\ncache_handler1 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler1):\\n print(haiku_no_cache(\"fish2\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish2\"))\\n\\nprint()\\ncache_handler2 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler2):\\n print(haiku_no_cache(\"fish3\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish3\"))', '@Template.define\\ndef primes(first_digit: int) -> int:\\n \"\"\"Give a prime number with {first_digit} as the first digit. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider):\\n assert type(primes(6)) is int', '@Template.define\\ndef count_char(char: str) -> Callable[[str], int]:\\n \"\"\"Write a function which takes a string and counts the occurrances of \\'{char}\\'. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider), handler(ProgramSynthesis()):\\n count_a = count_char(\"a\")\\n assert callable(count_a)\\n assert count_a(\"banana\") == 3\\n assert count_a(\"cherry\") == 0\\n # Print the source code of the generated function\\n print(inspect.getsource(count_a))', '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', '@dataclasses.dataclass\\nclass KnockKnockJoke:\\n whos_there: str\\n punchline: str\\n\\n\\n@Template.define\\ndef write_joke(theme: str) -> KnockKnockJoke:\\n \"\"\"Write a knock-knock joke on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef rate_joke(joke: KnockKnockJoke) -> bool:\\n \"\"\"Decide if {joke} is funny or not. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\ndef do_comedy():\\n joke = write_joke(\"lizards\")\\n print(\"> You are onstage at a comedy club. You tell the following joke:\")\\n print(\\n f\"Knock knock.\\\\nWho\\'s there?\\\\n{joke.whos_there}.\\\\n{joke.whos_there} who?\\\\n{joke.punchline}\"\\n )\\n if rate_joke(joke):\\n print(\"> The crowd laughs politely.\")\\n else:\\n print(\"> The crowd stares in stony silence.\")\\n\\n\\nwith handler(provider):\\n do_comedy()', 'def log_llm(*args, **kwargs):\\n result = fwd()\\n print(\"Request fired: \", args, kwargs, result)\\n return result\\n\\n\\n# Avoid cache\\ntry:\\n haiku.cache_clear()\\nexcept Exception:\\n pass\\n\\n# Put completion handler innermost so it has highest precedence during the call\\nwith handler(provider), handler({completion: log_llm}):\\n _ = haiku(\"fish2\")\\n _ = limerick(\"fish\") # or use haiku(\"fish-2\") to avoid cache', '# 1. Create a logger\\nlogger = logging.getLogger(\"effectful.llm\")\\nlogger.setLevel(logging.INFO)\\nlog_handler = logging.StreamHandler(sys.stdout)\\nlog_handler.setFormatter(logging.Formatter(\"%(levelname)s %(payload)s\"))\\nlogger.addHandler(log_handler)\\n# 2. Pass it to the handler\\nllm_logger = LLMLoggingHandler(logger=logger) # can also be LLMLoggingHandler()\\n\\n# Avoid cache for demonstration\\ntry:\\n haiku.cache_clear()\\n limerick.cache_clear()\\nexcept Exception:\\n pass\\n\\nwith handler(provider), handler(llm_logger):\\n _ = haiku(\"fish3\")\\n _ = limerick(\"fish4\")', '# Sub-templates for different story styles\\n@Template.define\\ndef story_with_moral(topic: str) -> str:\\n \"\"\"Write a short story about {topic} and end with a moral lesson. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef story_funny(topic: str) -> str:\\n \"\"\"Write a funny, humorous story about {topic}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n# Main orchestrator template - has access to sub-templates\\n@Template.define\\ndef write_story(topic: str, style: str) -> str:\\n \"\"\"Write a story about {topic} in the style: {style}.\\n Available styles: \\'moral\\' for a story with a lesson, \\'funny\\' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\"\"\"\\n raise NotHandled\\n\\n\\n# Verify sub-templates are captured in write_story\\'s lexical context\\nassert story_with_moral in write_story.tools\\nassert story_funny in write_story.tools\\nprint(\"Sub-templates available to write_story:\", list(write_story.tools))\\n\\nwith handler(provider), handler(llm_logger):\\n print(\"=== Story with moral ===\")\\n print(write_story(\"a curious cat\", \"moral\"))\\n print()\\n print(\"=== Funny story ===\")\\n print(write_story(\"a curious cat\", \"funny\"))'], '_oh': {}, '_dh': [PosixPath('/Users/datnguyenthanh/Marc/effectful')], 'In': ['', 'import dataclasses\\nimport functools\\nimport inspect\\nimport logging\\nimport sys\\nfrom collections.abc import Callable\\n\\nfrom effectful.handlers.llm import Template\\nfrom effectful.handlers.llm.providers import (\\n CacheLLMRequestHandler,\\n LiteLLMProvider,\\n LLMLoggingHandler,\\n RetryLLMHandler,\\n completion,\\n tool_call,\\n)\\nfrom effectful.handlers.llm.synthesis import ProgramSynthesis\\nfrom effectful.ops.semantics import NotHandled, fwd, handler\\nfrom effectful.ops.syntax import defop\\n\\nprovider = LiteLLMProvider()', '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}.\"\"\"\\n raise NotHandled', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '@functools.cache\\n@Template.define\\ndef haiku(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef haiku_no_cache(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nprint()\\nwith handler(provider):\\n print(haiku(\"fish\"))\\n print(\"-\" * 40)\\n print(haiku(\"fish\"))\\n\\nprint()\\ncache_handler1 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler1):\\n print(haiku_no_cache(\"fish2\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish2\"))\\n\\nprint()\\ncache_handler2 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler2):\\n print(haiku_no_cache(\"fish3\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish3\"))', '@Template.define\\ndef primes(first_digit: int) -> int:\\n \"\"\"Give a prime number with {first_digit} as the first digit. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider):\\n assert type(primes(6)) is int', '@Template.define\\ndef count_char(char: str) -> Callable[[str], int]:\\n \"\"\"Write a function which takes a string and counts the occurrances of \\'{char}\\'. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider), handler(ProgramSynthesis()):\\n count_a = count_char(\"a\")\\n assert callable(count_a)\\n assert count_a(\"banana\") == 3\\n assert count_a(\"cherry\") == 0\\n # Print the source code of the generated function\\n print(inspect.getsource(count_a))', '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', '@dataclasses.dataclass\\nclass KnockKnockJoke:\\n whos_there: str\\n punchline: str\\n\\n\\n@Template.define\\ndef write_joke(theme: str) -> KnockKnockJoke:\\n \"\"\"Write a knock-knock joke on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef rate_joke(joke: KnockKnockJoke) -> bool:\\n \"\"\"Decide if {joke} is funny or not. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\ndef do_comedy():\\n joke = write_joke(\"lizards\")\\n print(\"> You are onstage at a comedy club. You tell the following joke:\")\\n print(\\n f\"Knock knock.\\\\nWho\\'s there?\\\\n{joke.whos_there}.\\\\n{joke.whos_there} who?\\\\n{joke.punchline}\"\\n )\\n if rate_joke(joke):\\n print(\"> The crowd laughs politely.\")\\n else:\\n print(\"> The crowd stares in stony silence.\")\\n\\n\\nwith handler(provider):\\n do_comedy()', 'def log_llm(*args, **kwargs):\\n result = fwd()\\n print(\"Request fired: \", args, kwargs, result)\\n return result\\n\\n\\n# Avoid cache\\ntry:\\n haiku.cache_clear()\\nexcept Exception:\\n pass\\n\\n# Put completion handler innermost so it has highest precedence during the call\\nwith handler(provider), handler({completion: log_llm}):\\n _ = haiku(\"fish2\")\\n _ = limerick(\"fish\") # or use haiku(\"fish-2\") to avoid cache', '# 1. Create a logger\\nlogger = logging.getLogger(\"effectful.llm\")\\nlogger.setLevel(logging.INFO)\\nlog_handler = logging.StreamHandler(sys.stdout)\\nlog_handler.setFormatter(logging.Formatter(\"%(levelname)s %(payload)s\"))\\nlogger.addHandler(log_handler)\\n# 2. Pass it to the handler\\nllm_logger = LLMLoggingHandler(logger=logger) # can also be LLMLoggingHandler()\\n\\n# Avoid cache for demonstration\\ntry:\\n haiku.cache_clear()\\n limerick.cache_clear()\\nexcept Exception:\\n pass\\n\\nwith handler(provider), handler(llm_logger):\\n _ = haiku(\"fish3\")\\n _ = limerick(\"fish4\")', '# Sub-templates for different story styles\\n@Template.define\\ndef story_with_moral(topic: str) -> str:\\n \"\"\"Write a short story about {topic} and end with a moral lesson. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef story_funny(topic: str) -> str:\\n \"\"\"Write a funny, humorous story about {topic}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n# Main orchestrator template - has access to sub-templates\\n@Template.define\\ndef write_story(topic: str, style: str) -> str:\\n \"\"\"Write a story about {topic} in the style: {style}.\\n Available styles: \\'moral\\' for a story with a lesson, \\'funny\\' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\"\"\"\\n raise NotHandled\\n\\n\\n# Verify sub-templates are captured in write_story\\'s lexical context\\nassert story_with_moral in write_story.tools\\nassert story_funny in write_story.tools\\nprint(\"Sub-templates available to write_story:\", list(write_story.tools))\\n\\nwith handler(provider), handler(llm_logger):\\n print(\"=== Story with moral ===\")\\n print(write_story(\"a curious cat\", \"moral\"))\\n print()\\n print(\"=== Funny story ===\")\\n print(write_story(\"a curious cat\", \"funny\"))'], 'Out': {}, 'get_ipython': >, 'exit': , 'quit': , 'open': , '_': 'In the ocean where fishies do play, \\nA big whale came swimming one day. \\nWith a splash and a dive, \\nHe felt so alive, \\nChasing fish in the blue, gleaming bay.', '__': '', '___': '', '__vsc_ipynb_file__': '/Users/datnguyenthanh/Marc/effectful/docs/source/llm.ipynb', '_i': '# 1. Create a logger\\nlogger = logging.getLogger(\"effectful.llm\")\\nlogger.setLevel(logging.INFO)\\nlog_handler = logging.StreamHandler(sys.stdout)\\nlog_handler.setFormatter(logging.Formatter(\"%(levelname)s %(payload)s\"))\\nlogger.addHandler(log_handler)\\n# 2. Pass it to the handler\\nllm_logger = LLMLoggingHandler(logger=logger) # can also be LLMLoggingHandler()\\n\\n# Avoid cache for demonstration\\ntry:\\n haiku.cache_clear()\\n limerick.cache_clear()\\nexcept Exception:\\n pass\\n\\nwith handler(provider), handler(llm_logger):\\n _ = haiku(\"fish3\")\\n _ = limerick(\"fish4\")', '_ii': 'def log_llm(*args, **kwargs):\\n result = fwd()\\n print(\"Request fired: \", args, kwargs, result)\\n return result\\n\\n\\n# Avoid cache\\ntry:\\n haiku.cache_clear()\\nexcept Exception:\\n pass\\n\\n# Put completion handler innermost so it has highest precedence during the call\\nwith handler(provider), handler({completion: log_llm}):\\n _ = haiku(\"fish2\")\\n _ = limerick(\"fish\") # or use haiku(\"fish-2\") to avoid cache', '_iii': '@dataclasses.dataclass\\nclass KnockKnockJoke:\\n whos_there: str\\n punchline: str\\n\\n\\n@Template.define\\ndef write_joke(theme: str) -> KnockKnockJoke:\\n \"\"\"Write a knock-knock joke on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef rate_joke(joke: KnockKnockJoke) -> bool:\\n \"\"\"Decide if {joke} is funny or not. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\ndef do_comedy():\\n joke = write_joke(\"lizards\")\\n print(\"> You are onstage at a comedy club. You tell the following joke:\")\\n print(\\n f\"Knock knock.\\\\nWho\\'s there?\\\\n{joke.whos_there}.\\\\n{joke.whos_there} who?\\\\n{joke.punchline}\"\\n )\\n if rate_joke(joke):\\n print(\"> The crowd laughs politely.\")\\n else:\\n print(\"> The crowd stares in stony silence.\")\\n\\n\\nwith handler(provider):\\n do_comedy()', '_i1': 'import dataclasses\\nimport functools\\nimport inspect\\nimport logging\\nimport sys\\nfrom collections.abc import Callable\\n\\nfrom effectful.handlers.llm import Template\\nfrom effectful.handlers.llm.providers import (\\n CacheLLMRequestHandler,\\n LiteLLMProvider,\\n LLMLoggingHandler,\\n RetryLLMHandler,\\n completion,\\n tool_call,\\n)\\nfrom effectful.handlers.llm.synthesis import ProgramSynthesis\\nfrom effectful.ops.semantics import NotHandled, fwd, handler\\nfrom effectful.ops.syntax import defop\\n\\nprovider = LiteLLMProvider()', 'dataclasses': , 'functools': , 'inspect': , 'logging': , 'sys': , 'Callable': , 'Template': , 'CacheLLMRequestHandler': , 'LiteLLMProvider': , 'LLMLoggingHandler': , 'RetryLLMHandler': , 'completion': Operation(completion, (model: str, messages: List = [], timeout: Union[float, str, openai.Timeout, NoneType] = None, temperature: Optional[float] = None, top_p: Optional[float] = None, n: Optional[int] = None, stream: Optional[bool] = None, stream_options: Optional[dict] = None, stop=None, max_completion_tokens: Optional[int] = None, max_tokens: Optional[int] = None, modalities: Optional[List[Literal['text', 'audio']]] = None, prediction: Optional[openai.types.chat.chat_completion_prediction_content_param.ChatCompletionPredictionContentParam] = None, audio: Optional[openai.types.chat.chat_completion_audio_param.ChatCompletionAudioParam] = None, presence_penalty: Optional[float] = None, frequency_penalty: Optional[float] = None, logit_bias: Optional[dict] = None, user: Optional[str] = None, reasoning_effort: Optional[Literal['none', 'minimal', 'low', 'medium', 'high', 'default']] = None, verbosity: Optional[Literal['low', 'medium', 'high']] = None, response_format: Union[dict, Type[pydantic.main.BaseModel], NoneType] = None, seed: Optional[int] = None, tools: Optional[List] = None, tool_choice: Union[str, dict, NoneType] = None, logprobs: Optional[bool] = None, top_logprobs: Optional[int] = None, parallel_tool_calls: Optional[bool] = None, web_search_options: Optional[litellm.types.llms.openai.OpenAIWebSearchOptions] = None, deployment_id=None, extra_headers: Optional[dict] = None, safety_identifier: Optional[str] = None, service_tier: Optional[str] = None, functions: Optional[List] = None, function_call: Optional[str] = None, base_url: Optional[str] = None, api_version: Optional[str] = None, api_key: Optional[str] = None, model_list: Optional[list] = None, thinking: Optional[litellm.types.llms.anthropic.AnthropicThinkingParam] = None, shared_session: Optional[ForwardRef('ClientSession')] = None, **kwargs) -> Union[litellm.types.utils.ModelResponse, litellm.litellm_core_utils.streaming_handler.CustomStreamWrapper]), 'tool_call': Operation(tool_call, (template: effectful.handlers.llm.Template, tool: Union[effectful.ops.types.Operation[..., T], effectful.handlers.llm.Template[..., T]], *args, **kwargs) -> T), 'ProgramSynthesis': , 'NotHandled': , 'fwd': Operation(fwd, (*args, **kwargs) -> Any), 'handler': , 'defop': , 'provider': , '_i2': '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}.\"\"\"\\n raise NotHandled', 'limerick': Template(__prompt_template__='Write a limerick on the theme of {theme}. Do not use any tools.', __signature__= str>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='limerick'), '_i3': 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '_i4': 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '_i5': '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled', '_i6': 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '_i7': '@functools.cache\\n@Template.define\\ndef haiku(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef haiku_no_cache(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nprint()\\nwith handler(provider):\\n print(haiku(\"fish\"))\\n print(\"-\" * 40)\\n print(haiku(\"fish\"))\\n\\nprint()\\ncache_handler1 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler1):\\n print(haiku_no_cache(\"fish2\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish2\"))\\n\\nprint()\\ncache_handler2 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler2):\\n print(haiku_no_cache(\"fish3\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish3\"))', 'haiku': , 'haiku_no_cache': Template(__prompt_template__='Write a haiku on the theme of {theme}. Do not use any tools.', __signature__= str>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='haiku_no_cache'), 'cache_handler1': , 'cache_handler2': , '_i8': '@Template.define\\ndef primes(first_digit: int) -> int:\\n \"\"\"Give a prime number with {first_digit} as the first digit. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider):\\n assert type(primes(6)) is int', 'primes': Template(__prompt_template__='Give a prime number with {first_digit} as the first digit. Do not use any tools.', __signature__= int>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='primes'), '_i9': '@Template.define\\ndef count_char(char: str) -> Callable[[str], int]:\\n \"\"\"Write a function which takes a string and counts the occurrances of \\'{char}\\'. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider), handler(ProgramSynthesis()):\\n count_a = count_char(\"a\")\\n assert callable(count_a)\\n assert count_a(\"banana\") == 3\\n assert count_a(\"cherry\") == 0\\n # Print the source code of the generated function\\n print(inspect.getsource(count_a))', 'count_char': Template(__prompt_template__=\"Write a function which takes a string and counts the occurrances of '{char}'. Do not use any tools.\", __signature__= collections.abc.Callable[[str], int]>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='count_char'), 'count_a': , '_i10': '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', 'cities': Operation(cities, () -> list[str]), 'weather': Operation(weather, (city: str) -> str), 'vacation': Template(__prompt_template__='Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.', __signature__= str>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='vacation'), 'log_tool_call': , '_i11': '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', '_i12': '@dataclasses.dataclass\\nclass KnockKnockJoke:\\n whos_there: str\\n punchline: str\\n\\n\\n@Template.define\\ndef write_joke(theme: str) -> KnockKnockJoke:\\n \"\"\"Write a knock-knock joke on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef rate_joke(joke: KnockKnockJoke) -> bool:\\n \"\"\"Decide if {joke} is funny or not. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\ndef do_comedy():\\n joke = write_joke(\"lizards\")\\n print(\"> You are onstage at a comedy club. You tell the following joke:\")\\n print(\\n f\"Knock knock.\\\\nWho\\'s there?\\\\n{joke.whos_there}.\\\\n{joke.whos_there} who?\\\\n{joke.punchline}\"\\n )\\n if rate_joke(joke):\\n print(\"> The crowd laughs politely.\")\\n else:\\n print(\"> The crowd stares in stony silence.\")\\n\\n\\nwith handler(provider):\\n do_comedy()', 'KnockKnockJoke': , 'write_joke': Template(__prompt_template__='Write a knock-knock joke on the theme of {theme}. Do not use any tools.', __signature__= __main__.KnockKnockJoke>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='write_joke'), 'rate_joke': Template(__prompt_template__='Decide if {joke} is funny or not. Do not use any tools.', __signature__= bool>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='rate_joke'), 'do_comedy': , '_i13': 'def log_llm(*args, **kwargs):\\n result = fwd()\\n print(\"Request fired: \", args, kwargs, result)\\n return result\\n\\n\\n# Avoid cache\\ntry:\\n haiku.cache_clear()\\nexcept Exception:\\n pass\\n\\n# Put completion handler innermost so it has highest precedence during the call\\nwith handler(provider), handler({completion: log_llm}):\\n _ = haiku(\"fish2\")\\n _ = limerick(\"fish\") # or use haiku(\"fish-2\") to avoid cache', 'log_llm': , '_i14': '# 1. Create a logger\\nlogger = logging.getLogger(\"effectful.llm\")\\nlogger.setLevel(logging.INFO)\\nlog_handler = logging.StreamHandler(sys.stdout)\\nlog_handler.setFormatter(logging.Formatter(\"%(levelname)s %(payload)s\"))\\nlogger.addHandler(log_handler)\\n# 2. Pass it to the handler\\nllm_logger = LLMLoggingHandler(logger=logger) # can also be LLMLoggingHandler()\\n\\n# Avoid cache for demonstration\\ntry:\\n haiku.cache_clear()\\n limerick.cache_clear()\\nexcept Exception:\\n pass\\n\\nwith handler(provider), handler(llm_logger):\\n _ = haiku(\"fish3\")\\n _ = limerick(\"fish4\")', 'logger': , 'log_handler': , 'llm_logger': , '_i15': '# Sub-templates for different story styles\\n@Template.define\\ndef story_with_moral(topic: str) -> str:\\n \"\"\"Write a short story about {topic} and end with a moral lesson. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef story_funny(topic: str) -> str:\\n \"\"\"Write a funny, humorous story about {topic}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n# Main orchestrator template - has access to sub-templates\\n@Template.define\\ndef write_story(topic: str, style: str) -> str:\\n \"\"\"Write a story about {topic} in the style: {style}.\\n Available styles: \\'moral\\' for a story with a lesson, \\'funny\\' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\"\"\"\\n raise NotHandled\\n\\n\\n# Verify sub-templates are captured in write_story\\'s lexical context\\nassert story_with_moral in write_story.tools\\nassert story_funny in write_story.tools\\nprint(\"Sub-templates available to write_story:\", list(write_story.tools))\\n\\nwith handler(provider), handler(llm_logger):\\n print(\"=== Story with moral ===\")\\n print(write_story(\"a curious cat\", \"moral\"))\\n print()\\n print(\"=== Funny story ===\")\\n print(write_story(\"a curious cat\", \"funny\"))', 'story_with_moral': Template(__prompt_template__='Write a short story about {topic} and end with a moral lesson. Do not use any tools.', __signature__= str>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='story_with_moral'), 'story_funny': ..., 'write_story': Template(__prompt_template__=\"Write a story about {topic} in the style: {style}.\\n Available styles: 'moral' for a story with a lesson, 'funny' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\", __signature__= str>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='write_story')}), mappingproxy({'__name__': '__main__', '__doc__': 'Automatically created module for IPython interactive environment', '__package__': None, '__loader__': None, '__spec__': None, '__builtin__': , '__builtins__': , '_ih': ['', 'import dataclasses\\nimport functools\\nimport inspect\\nimport logging\\nimport sys\\nfrom collections.abc import Callable\\n\\nfrom effectful.handlers.llm import Template\\nfrom effectful.handlers.llm.providers import (\\n CacheLLMRequestHandler,\\n LiteLLMProvider,\\n LLMLoggingHandler,\\n RetryLLMHandler,\\n completion,\\n tool_call,\\n)\\nfrom effectful.handlers.llm.synthesis import ProgramSynthesis\\nfrom effectful.ops.semantics import NotHandled, fwd, handler\\nfrom effectful.ops.syntax import defop\\n\\nprovider = LiteLLMProvider()', '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}.\"\"\"\\n raise NotHandled', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '@functools.cache\\n@Template.define\\ndef haiku(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef haiku_no_cache(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nprint()\\nwith handler(provider):\\n print(haiku(\"fish\"))\\n print(\"-\" * 40)\\n print(haiku(\"fish\"))\\n\\nprint()\\ncache_handler1 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler1):\\n print(haiku_no_cache(\"fish2\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish2\"))\\n\\nprint()\\ncache_handler2 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler2):\\n print(haiku_no_cache(\"fish3\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish3\"))', '@Template.define\\ndef primes(first_digit: int) -> int:\\n \"\"\"Give a prime number with {first_digit} as the first digit. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider):\\n assert type(primes(6)) is int', '@Template.define\\ndef count_char(char: str) -> Callable[[str], int]:\\n \"\"\"Write a function which takes a string and counts the occurrances of \\'{char}\\'. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider), handler(ProgramSynthesis()):\\n count_a = count_char(\"a\")\\n assert callable(count_a)\\n assert count_a(\"banana\") == 3\\n assert count_a(\"cherry\") == 0\\n # Print the source code of the generated function\\n print(inspect.getsource(count_a))', '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', '@dataclasses.dataclass\\nclass KnockKnockJoke:\\n whos_there: str\\n punchline: str\\n\\n\\n@Template.define\\ndef write_joke(theme: str) -> KnockKnockJoke:\\n \"\"\"Write a knock-knock joke on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef rate_joke(joke: KnockKnockJoke) -> bool:\\n \"\"\"Decide if {joke} is funny or not. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\ndef do_comedy():\\n joke = write_joke(\"lizards\")\\n print(\"> You are onstage at a comedy club. You tell the following joke:\")\\n print(\\n f\"Knock knock.\\\\nWho\\'s there?\\\\n{joke.whos_there}.\\\\n{joke.whos_there} who?\\\\n{joke.punchline}\"\\n )\\n if rate_joke(joke):\\n print(\"> The crowd laughs politely.\")\\n else:\\n print(\"> The crowd stares in stony silence.\")\\n\\n\\nwith handler(provider):\\n do_comedy()', 'def log_llm(*args, **kwargs):\\n result = fwd()\\n print(\"Request fired: \", args, kwargs, result)\\n return result\\n\\n\\n# Avoid cache\\ntry:\\n haiku.cache_clear()\\nexcept Exception:\\n pass\\n\\n# Put completion handler innermost so it has highest precedence during the call\\nwith handler(provider), handler({completion: log_llm}):\\n _ = haiku(\"fish2\")\\n _ = limerick(\"fish\") # or use haiku(\"fish-2\") to avoid cache', '# 1. Create a logger\\nlogger = logging.getLogger(\"effectful.llm\")\\nlogger.setLevel(logging.INFO)\\nlog_handler = logging.StreamHandler(sys.stdout)\\nlog_handler.setFormatter(logging.Formatter(\"%(levelname)s %(payload)s\"))\\nlogger.addHandler(log_handler)\\n# 2. Pass it to the handler\\nllm_logger = LLMLoggingHandler(logger=logger) # can also be LLMLoggingHandler()\\n\\n# Avoid cache for demonstration\\ntry:\\n haiku.cache_clear()\\n limerick.cache_clear()\\nexcept Exception:\\n pass\\n\\nwith handler(provider), handler(llm_logger):\\n _ = haiku(\"fish3\")\\n _ = limerick(\"fish4\")', '# Sub-templates for different story styles\\n@Template.define\\ndef story_with_moral(topic: str) -> str:\\n \"\"\"Write a short story about {topic} and end with a moral lesson. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef story_funny(topic: str) -> str:\\n \"\"\"Write a funny, humorous story about {topic}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n# Main orchestrator template - has access to sub-templates\\n@Template.define\\ndef write_story(topic: str, style: str) -> str:\\n \"\"\"Write a story about {topic} in the style: {style}.\\n Available styles: \\'moral\\' for a story with a lesson, \\'funny\\' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\"\"\"\\n raise NotHandled\\n\\n\\n# Verify sub-templates are captured in write_story\\'s lexical context\\nassert story_with_moral in write_story.tools\\nassert story_funny in write_story.tools\\nprint(\"Sub-templates available to write_story:\", list(write_story.tools))\\n\\nwith handler(provider), handler(llm_logger):\\n print(\"=== Story with moral ===\")\\n print(write_story(\"a curious cat\", \"moral\"))\\n print()\\n print(\"=== Funny story ===\")\\n print(write_story(\"a curious cat\", \"funny\"))'], '_oh': {}, '_dh': [PosixPath('/Users/datnguyenthanh/Marc/effectful')], 'In': ['', 'import dataclasses\\nimport functools\\nimport inspect\\nimport logging\\nimport sys\\nfrom collections.abc import Callable\\n\\nfrom effectful.handlers.llm import Template\\nfrom effectful.handlers.llm.providers import (\\n CacheLLMRequestHandler,\\n LiteLLMProvider,\\n LLMLoggingHandler,\\n RetryLLMHandler,\\n completion,\\n tool_call,\\n)\\nfrom effectful.handlers.llm.synthesis import ProgramSynthesis\\nfrom effectful.ops.semantics import NotHandled, fwd, handler\\nfrom effectful.ops.syntax import defop\\n\\nprovider = LiteLLMProvider()', '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}.\"\"\"\\n raise NotHandled', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '@functools.cache\\n@Template.define\\ndef haiku(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef haiku_no_cache(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nprint()\\nwith handler(provider):\\n print(haiku(\"fish\"))\\n print(\"-\" * 40)\\n print(haiku(\"fish\"))\\n\\nprint()\\ncache_handler1 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler1):\\n print(haiku_no_cache(\"fish2\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish2\"))\\n\\nprint()\\ncache_handler2 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler2):\\n print(haiku_no_cache(\"fish3\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish3\"))', '@Template.define\\ndef primes(first_digit: int) -> int:\\n \"\"\"Give a prime number with {first_digit} as the first digit. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider):\\n assert type(primes(6)) is int', '@Template.define\\ndef count_char(char: str) -> Callable[[str], int]:\\n \"\"\"Write a function which takes a string and counts the occurrances of \\'{char}\\'. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider), handler(ProgramSynthesis()):\\n count_a = count_char(\"a\")\\n assert callable(count_a)\\n assert count_a(\"banana\") == 3\\n assert count_a(\"cherry\") == 0\\n # Print the source code of the generated function\\n print(inspect.getsource(count_a))', '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', '@dataclasses.dataclass\\nclass KnockKnockJoke:\\n whos_there: str\\n punchline: str\\n\\n\\n@Template.define\\ndef write_joke(theme: str) -> KnockKnockJoke:\\n \"\"\"Write a knock-knock joke on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef rate_joke(joke: KnockKnockJoke) -> bool:\\n \"\"\"Decide if {joke} is funny or not. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\ndef do_comedy():\\n joke = write_joke(\"lizards\")\\n print(\"> You are onstage at a comedy club. You tell the following joke:\")\\n print(\\n f\"Knock knock.\\\\nWho\\'s there?\\\\n{joke.whos_there}.\\\\n{joke.whos_there} who?\\\\n{joke.punchline}\"\\n )\\n if rate_joke(joke):\\n print(\"> The crowd laughs politely.\")\\n else:\\n print(\"> The crowd stares in stony silence.\")\\n\\n\\nwith handler(provider):\\n do_comedy()', 'def log_llm(*args, **kwargs):\\n result = fwd()\\n print(\"Request fired: \", args, kwargs, result)\\n return result\\n\\n\\n# Avoid cache\\ntry:\\n haiku.cache_clear()\\nexcept Exception:\\n pass\\n\\n# Put completion handler innermost so it has highest precedence during the call\\nwith handler(provider), handler({completion: log_llm}):\\n _ = haiku(\"fish2\")\\n _ = limerick(\"fish\") # or use haiku(\"fish-2\") to avoid cache', '# 1. Create a logger\\nlogger = logging.getLogger(\"effectful.llm\")\\nlogger.setLevel(logging.INFO)\\nlog_handler = logging.StreamHandler(sys.stdout)\\nlog_handler.setFormatter(logging.Formatter(\"%(levelname)s %(payload)s\"))\\nlogger.addHandler(log_handler)\\n# 2. Pass it to the handler\\nllm_logger = LLMLoggingHandler(logger=logger) # can also be LLMLoggingHandler()\\n\\n# Avoid cache for demonstration\\ntry:\\n haiku.cache_clear()\\n limerick.cache_clear()\\nexcept Exception:\\n pass\\n\\nwith handler(provider), handler(llm_logger):\\n _ = haiku(\"fish3\")\\n _ = limerick(\"fish4\")', '# Sub-templates for different story styles\\n@Template.define\\ndef story_with_moral(topic: str) -> str:\\n \"\"\"Write a short story about {topic} and end with a moral lesson. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef story_funny(topic: str) -> str:\\n \"\"\"Write a funny, humorous story about {topic}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n# Main orchestrator template - has access to sub-templates\\n@Template.define\\ndef write_story(topic: str, style: str) -> str:\\n \"\"\"Write a story about {topic} in the style: {style}.\\n Available styles: \\'moral\\' for a story with a lesson, \\'funny\\' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\"\"\"\\n raise NotHandled\\n\\n\\n# Verify sub-templates are captured in write_story\\'s lexical context\\nassert story_with_moral in write_story.tools\\nassert story_funny in write_story.tools\\nprint(\"Sub-templates available to write_story:\", list(write_story.tools))\\n\\nwith handler(provider), handler(llm_logger):\\n print(\"=== Story with moral ===\")\\n print(write_story(\"a curious cat\", \"moral\"))\\n print()\\n print(\"=== Funny story ===\")\\n print(write_story(\"a curious cat\", \"funny\"))'], 'Out': {}, 'get_ipython': >, 'exit': , 'quit': , 'open': , '_': 'In the ocean where fishies do play, \\nA big whale came swimming one day. \\nWith a splash and a dive, \\nHe felt so alive, \\nChasing fish in the blue, gleaming bay.', '__': '', '___': '', '__vsc_ipynb_file__': '/Users/datnguyenthanh/Marc/effectful/docs/source/llm.ipynb', '_i': '# 1. Create a logger\\nlogger = logging.getLogger(\"effectful.llm\")\\nlogger.setLevel(logging.INFO)\\nlog_handler = logging.StreamHandler(sys.stdout)\\nlog_handler.setFormatter(logging.Formatter(\"%(levelname)s %(payload)s\"))\\nlogger.addHandler(log_handler)\\n# 2. Pass it to the handler\\nllm_logger = LLMLoggingHandler(logger=logger) # can also be LLMLoggingHandler()\\n\\n# Avoid cache for demonstration\\ntry:\\n haiku.cache_clear()\\n limerick.cache_clear()\\nexcept Exception:\\n pass\\n\\nwith handler(provider), handler(llm_logger):\\n _ = haiku(\"fish3\")\\n _ = limerick(\"fish4\")', '_ii': 'def log_llm(*args, **kwargs):\\n result = fwd()\\n print(\"Request fired: \", args, kwargs, result)\\n return result\\n\\n\\n# Avoid cache\\ntry:\\n haiku.cache_clear()\\nexcept Exception:\\n pass\\n\\n# Put completion handler innermost so it has highest precedence during the call\\nwith handler(provider), handler({completion: log_llm}):\\n _ = haiku(\"fish2\")\\n _ = limerick(\"fish\") # or use haiku(\"fish-2\") to avoid cache', '_iii': '@dataclasses.dataclass\\nclass KnockKnockJoke:\\n whos_there: str\\n punchline: str\\n\\n\\n@Template.define\\ndef write_joke(theme: str) -> KnockKnockJoke:\\n \"\"\"Write a knock-knock joke on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef rate_joke(joke: KnockKnockJoke) -> bool:\\n \"\"\"Decide if {joke} is funny or not. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\ndef do_comedy():\\n joke = write_joke(\"lizards\")\\n print(\"> You are onstage at a comedy club. You tell the following joke:\")\\n print(\\n f\"Knock knock.\\\\nWho\\'s there?\\\\n{joke.whos_there}.\\\\n{joke.whos_there} who?\\\\n{joke.punchline}\"\\n )\\n if rate_joke(joke):\\n print(\"> The crowd laughs politely.\")\\n else:\\n print(\"> The crowd stares in stony silence.\")\\n\\n\\nwith handler(provider):\\n do_comedy()', '_i1': 'import dataclasses\\nimport functools\\nimport inspect\\nimport logging\\nimport sys\\nfrom collections.abc import Callable\\n\\nfrom effectful.handlers.llm import Template\\nfrom effectful.handlers.llm.providers import (\\n CacheLLMRequestHandler,\\n LiteLLMProvider,\\n LLMLoggingHandler,\\n RetryLLMHandler,\\n completion,\\n tool_call,\\n)\\nfrom effectful.handlers.llm.synthesis import ProgramSynthesis\\nfrom effectful.ops.semantics import NotHandled, fwd, handler\\nfrom effectful.ops.syntax import defop\\n\\nprovider = LiteLLMProvider()', 'dataclasses': , 'functools': , 'inspect': , 'logging': , 'sys': , 'Callable': , 'Template': , 'CacheLLMRequestHandler': , 'LiteLLMProvider': , 'LLMLoggingHandler': , 'RetryLLMHandler': , 'completion': Operation(completion, (model: str, messages: List = [], timeout: Union[float, str, openai.Timeout, NoneType] = None, temperature: Optional[float] = None, top_p: Optional[float] = None, n: Optional[int] = None, stream: Optional[bool] = None, stream_options: Optional[dict] = None, stop=None, max_completion_tokens: Optional[int] = None, max_tokens: Optional[int] = None, modalities: Optional[List[Literal['text', 'audio']]] = None, prediction: Optional[openai.types.chat.chat_completion_prediction_content_param.ChatCompletionPredictionContentParam] = None, audio: Optional[openai.types.chat.chat_completion_audio_param.ChatCompletionAudioParam] = None, presence_penalty: Optional[float] = None, frequency_penalty: Optional[float] = None, logit_bias: Optional[dict] = None, user: Optional[str] = None, reasoning_effort: Optional[Literal['none', 'minimal', 'low', 'medium', 'high', 'default']] = None, verbosity: Optional[Literal['low', 'medium', 'high']] = None, response_format: Union[dict, Type[pydantic.main.BaseModel], NoneType] = None, seed: Optional[int] = None, tools: Optional[List] = None, tool_choice: Union[str, dict, NoneType] = None, logprobs: Optional[bool] = None, top_logprobs: Optional[int] = None, parallel_tool_calls: Optional[bool] = None, web_search_options: Optional[litellm.types.llms.openai.OpenAIWebSearchOptions] = None, deployment_id=None, extra_headers: Optional[dict] = None, safety_identifier: Optional[str] = None, service_tier: Optional[str] = None, functions: Optional[List] = None, function_call: Optional[str] = None, base_url: Optional[str] = None, api_version: Optional[str] = None, api_key: Optional[str] = None, model_list: Optional[list] = None, thinking: Optional[litellm.types.llms.anthropic.AnthropicThinkingParam] = None, shared_session: Optional[ForwardRef('ClientSession')] = None, **kwargs) -> Union[litellm.types.utils.ModelResponse, litellm.litellm_core_utils.streaming_handler.CustomStreamWrapper]), 'tool_call': Operation(tool_call, (template: effectful.handlers.llm.Template, tool: Union[effectful.ops.types.Operation[..., T], effectful.handlers.llm.Template[..., T]], *args, **kwargs) -> T), 'ProgramSynthesis': , 'NotHandled': , 'fwd': Operation(fwd, (*args, **kwargs) -> Any), 'handler': , 'defop': , 'provider': , '_i2': '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}.\"\"\"\\n raise NotHandled', 'limerick': Template(__prompt_template__='Write a limerick on the theme of {theme}. Do not use any tools.', __signature__= str>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='limerick'), '_i3': 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '_i4': 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '_i5': '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled', '_i6': 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '_i7': '@functools.cache\\n@Template.define\\ndef haiku(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef haiku_no_cache(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nprint()\\nwith handler(provider):\\n print(haiku(\"fish\"))\\n print(\"-\" * 40)\\n print(haiku(\"fish\"))\\n\\nprint()\\ncache_handler1 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler1):\\n print(haiku_no_cache(\"fish2\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish2\"))\\n\\nprint()\\ncache_handler2 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler2):\\n print(haiku_no_cache(\"fish3\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish3\"))', 'haiku': , 'haiku_no_cache': Template(__prompt_template__='Write a haiku on the theme of {theme}. Do not use any tools.', __signature__= str>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='haiku_no_cache'), 'cache_handler1': , 'cache_handler2': , '_i8': '@Template.define\\ndef primes(first_digit: int) -> int:\\n \"\"\"Give a prime number with {first_digit} as the first digit. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider):\\n assert type(primes(6)) is int', 'primes': Template(__prompt_template__='Give a prime number with {first_digit} as the first digit. Do not use any tools.', __signature__= int>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='primes'), '_i9': '@Template.define\\ndef count_char(char: str) -> Callable[[str], int]:\\n \"\"\"Write a function which takes a string and counts the occurrances of \\'{char}\\'. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider), handler(ProgramSynthesis()):\\n count_a = count_char(\"a\")\\n assert callable(count_a)\\n assert count_a(\"banana\") == 3\\n assert count_a(\"cherry\") == 0\\n # Print the source code of the generated function\\n print(inspect.getsource(count_a))', 'count_char': Template(__prompt_template__=\"Write a function which takes a string and counts the occurrances of '{char}'. Do not use any tools.\", __signature__= collections.abc.Callable[[str], int]>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='count_char'), 'count_a': , '_i10': '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', 'cities': Operation(cities, () -> list[str]), 'weather': Operation(weather, (city: str) -> str), 'vacation': Template(__prompt_template__='Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.', __signature__= str>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='vacation'), 'log_tool_call': , '_i11': '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', '_i12': '@dataclasses.dataclass\\nclass KnockKnockJoke:\\n whos_there: str\\n punchline: str\\n\\n\\n@Template.define\\ndef write_joke(theme: str) -> KnockKnockJoke:\\n \"\"\"Write a knock-knock joke on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef rate_joke(joke: KnockKnockJoke) -> bool:\\n \"\"\"Decide if {joke} is funny or not. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\ndef do_comedy():\\n joke = write_joke(\"lizards\")\\n print(\"> You are onstage at a comedy club. You tell the following joke:\")\\n print(\\n f\"Knock knock.\\\\nWho\\'s there?\\\\n{joke.whos_there}.\\\\n{joke.whos_there} who?\\\\n{joke.punchline}\"\\n )\\n if rate_joke(joke):\\n print(\"> The crowd laughs politely.\")\\n else:\\n print(\"> The crowd stares in stony silence.\")\\n\\n\\nwith handler(provider):\\n do_comedy()', 'KnockKnockJoke': , 'write_joke': Template(__prompt_template__='Write a knock-knock joke on the theme of {theme}. Do not use any tools.', __signature__= __main__.KnockKnockJoke>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='write_joke'), 'rate_joke': Template(__prompt_template__='Decide if {joke} is funny or not. Do not use any tools.', __signature__= bool>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='rate_joke'), 'do_comedy': , '_i13': 'def log_llm(*args, **kwargs):\\n result = fwd()\\n print(\"Request fired: \", args, kwargs, result)\\n return result\\n\\n\\n# Avoid cache\\ntry:\\n haiku.cache_clear()\\nexcept Exception:\\n pass\\n\\n# Put completion handler innermost so it has highest precedence during the call\\nwith handler(provider), handler({completion: log_llm}):\\n _ = haiku(\"fish2\")\\n _ = limerick(\"fish\") # or use haiku(\"fish-2\") to avoid cache', 'log_llm': , '_i14': '# 1. Create a logger\\nlogger = logging.getLogger(\"effectful.llm\")\\nlogger.setLevel(logging.INFO)\\nlog_handler = logging.StreamHandler(sys.stdout)\\nlog_handler.setFormatter(logging.Formatter(\"%(levelname)s %(payload)s\"))\\nlogger.addHandler(log_handler)\\n# 2. Pass it to the handler\\nllm_logger = LLMLoggingHandler(logger=logger) # can also be LLMLoggingHandler()\\n\\n# Avoid cache for demonstration\\ntry:\\n haiku.cache_clear()\\n limerick.cache_clear()\\nexcept Exception:\\n pass\\n\\nwith handler(provider), handler(llm_logger):\\n _ = haiku(\"fish3\")\\n _ = limerick(\"fish4\")', 'logger': , 'log_handler': , 'llm_logger': , '_i15': '# Sub-templates for different story styles\\n@Template.define\\ndef story_with_moral(topic: str) -> str:\\n \"\"\"Write a short story about {topic} and end with a moral lesson. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef story_funny(topic: str) -> str:\\n \"\"\"Write a funny, humorous story about {topic}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n# Main orchestrator template - has access to sub-templates\\n@Template.define\\ndef write_story(topic: str, style: str) -> str:\\n \"\"\"Write a story about {topic} in the style: {style}.\\n Available styles: \\'moral\\' for a story with a lesson, \\'funny\\' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\"\"\"\\n raise NotHandled\\n\\n\\n# Verify sub-templates are captured in write_story\\'s lexical context\\nassert story_with_moral in write_story.tools\\nassert story_funny in write_story.tools\\nprint(\"Sub-templates available to write_story:\", list(write_story.tools))\\n\\nwith handler(provider), handler(llm_logger):\\n print(\"=== Story with moral ===\")\\n print(write_story(\"a curious cat\", \"moral\"))\\n print()\\n print(\"=== Funny story ===\")\\n print(write_story(\"a curious cat\", \"funny\"))', 'story_with_moral': Template(__prompt_template__='Write a short story about {topic} and end with a moral lesson. Do not use any tools.', __signature__= str>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='story_with_moral'), 'story_funny': ..., 'write_story': Template(__prompt_template__=\"Write a story about {topic} in the style: {style}.\\n Available styles: 'moral' for a story with a lesson, 'funny' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\", __signature__= str>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='write_story')})), __name__='story_funny'), Template(__prompt_template__=\"Write a story about {topic} in the style: {style}.\\n Available styles: 'moral' for a story with a lesson, 'funny' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\", __signature__= str>, __context__=LexicalContext(mappingproxy({'__name__': '__main__', '__doc__': 'Automatically created module for IPython interactive environment', '__package__': None, '__loader__': None, '__spec__': None, '__builtin__': , '__builtins__': , '_ih': ['', 'import dataclasses\\nimport functools\\nimport inspect\\nimport logging\\nimport sys\\nfrom collections.abc import Callable\\n\\nfrom effectful.handlers.llm import Template\\nfrom effectful.handlers.llm.providers import (\\n CacheLLMRequestHandler,\\n LiteLLMProvider,\\n LLMLoggingHandler,\\n RetryLLMHandler,\\n completion,\\n tool_call,\\n)\\nfrom effectful.handlers.llm.synthesis import ProgramSynthesis\\nfrom effectful.ops.semantics import NotHandled, fwd, handler\\nfrom effectful.ops.syntax import defop\\n\\nprovider = LiteLLMProvider()', '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}.\"\"\"\\n raise NotHandled', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '@functools.cache\\n@Template.define\\ndef haiku(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef haiku_no_cache(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nprint()\\nwith handler(provider):\\n print(haiku(\"fish\"))\\n print(\"-\" * 40)\\n print(haiku(\"fish\"))\\n\\nprint()\\ncache_handler1 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler1):\\n print(haiku_no_cache(\"fish2\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish2\"))\\n\\nprint()\\ncache_handler2 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler2):\\n print(haiku_no_cache(\"fish3\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish3\"))', '@Template.define\\ndef primes(first_digit: int) -> int:\\n \"\"\"Give a prime number with {first_digit} as the first digit. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider):\\n assert type(primes(6)) is int', '@Template.define\\ndef count_char(char: str) -> Callable[[str], int]:\\n \"\"\"Write a function which takes a string and counts the occurrances of \\'{char}\\'. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider), handler(ProgramSynthesis()):\\n count_a = count_char(\"a\")\\n assert callable(count_a)\\n assert count_a(\"banana\") == 3\\n assert count_a(\"cherry\") == 0\\n # Print the source code of the generated function\\n print(inspect.getsource(count_a))', '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', '@dataclasses.dataclass\\nclass KnockKnockJoke:\\n whos_there: str\\n punchline: str\\n\\n\\n@Template.define\\ndef write_joke(theme: str) -> KnockKnockJoke:\\n \"\"\"Write a knock-knock joke on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef rate_joke(joke: KnockKnockJoke) -> bool:\\n \"\"\"Decide if {joke} is funny or not. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\ndef do_comedy():\\n joke = write_joke(\"lizards\")\\n print(\"> You are onstage at a comedy club. You tell the following joke:\")\\n print(\\n f\"Knock knock.\\\\nWho\\'s there?\\\\n{joke.whos_there}.\\\\n{joke.whos_there} who?\\\\n{joke.punchline}\"\\n )\\n if rate_joke(joke):\\n print(\"> The crowd laughs politely.\")\\n else:\\n print(\"> The crowd stares in stony silence.\")\\n\\n\\nwith handler(provider):\\n do_comedy()', 'def log_llm(*args, **kwargs):\\n result = fwd()\\n print(\"Request fired: \", args, kwargs, result)\\n return result\\n\\n\\n# Avoid cache\\ntry:\\n haiku.cache_clear()\\nexcept Exception:\\n pass\\n\\n# Put completion handler innermost so it has highest precedence during the call\\nwith handler(provider), handler({completion: log_llm}):\\n _ = haiku(\"fish2\")\\n _ = limerick(\"fish\") # or use haiku(\"fish-2\") to avoid cache', '# 1. Create a logger\\nlogger = logging.getLogger(\"effectful.llm\")\\nlogger.setLevel(logging.INFO)\\nlog_handler = logging.StreamHandler(sys.stdout)\\nlog_handler.setFormatter(logging.Formatter(\"%(levelname)s %(payload)s\"))\\nlogger.addHandler(log_handler)\\n# 2. Pass it to the handler\\nllm_logger = LLMLoggingHandler(logger=logger) # can also be LLMLoggingHandler()\\n\\n# Avoid cache for demonstration\\ntry:\\n haiku.cache_clear()\\n limerick.cache_clear()\\nexcept Exception:\\n pass\\n\\nwith handler(provider), handler(llm_logger):\\n _ = haiku(\"fish3\")\\n _ = limerick(\"fish4\")', '# Sub-templates for different story styles\\n@Template.define\\ndef story_with_moral(topic: str) -> str:\\n \"\"\"Write a short story about {topic} and end with a moral lesson. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef story_funny(topic: str) -> str:\\n \"\"\"Write a funny, humorous story about {topic}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n# Main orchestrator template - has access to sub-templates\\n@Template.define\\ndef write_story(topic: str, style: str) -> str:\\n \"\"\"Write a story about {topic} in the style: {style}.\\n Available styles: \\'moral\\' for a story with a lesson, \\'funny\\' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\"\"\"\\n raise NotHandled\\n\\n\\n# Verify sub-templates are captured in write_story\\'s lexical context\\nassert story_with_moral in write_story.tools\\nassert story_funny in write_story.tools\\nprint(\"Sub-templates available to write_story:\", list(write_story.tools))\\n\\nwith handler(provider), handler(llm_logger):\\n print(\"=== Story with moral ===\")\\n print(write_story(\"a curious cat\", \"moral\"))\\n print()\\n print(\"=== Funny story ===\")\\n print(write_story(\"a curious cat\", \"funny\"))'], '_oh': {}, '_dh': [PosixPath('/Users/datnguyenthanh/Marc/effectful')], 'In': ['', 'import dataclasses\\nimport functools\\nimport inspect\\nimport logging\\nimport sys\\nfrom collections.abc import Callable\\n\\nfrom effectful.handlers.llm import Template\\nfrom effectful.handlers.llm.providers import (\\n CacheLLMRequestHandler,\\n LiteLLMProvider,\\n LLMLoggingHandler,\\n RetryLLMHandler,\\n completion,\\n tool_call,\\n)\\nfrom effectful.handlers.llm.synthesis import ProgramSynthesis\\nfrom effectful.ops.semantics import NotHandled, fwd, handler\\nfrom effectful.ops.syntax import defop\\n\\nprovider = LiteLLMProvider()', '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}.\"\"\"\\n raise NotHandled', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '@functools.cache\\n@Template.define\\ndef haiku(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef haiku_no_cache(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nprint()\\nwith handler(provider):\\n print(haiku(\"fish\"))\\n print(\"-\" * 40)\\n print(haiku(\"fish\"))\\n\\nprint()\\ncache_handler1 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler1):\\n print(haiku_no_cache(\"fish2\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish2\"))\\n\\nprint()\\ncache_handler2 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler2):\\n print(haiku_no_cache(\"fish3\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish3\"))', '@Template.define\\ndef primes(first_digit: int) -> int:\\n \"\"\"Give a prime number with {first_digit} as the first digit. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider):\\n assert type(primes(6)) is int', '@Template.define\\ndef count_char(char: str) -> Callable[[str], int]:\\n \"\"\"Write a function which takes a string and counts the occurrances of \\'{char}\\'. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider), handler(ProgramSynthesis()):\\n count_a = count_char(\"a\")\\n assert callable(count_a)\\n assert count_a(\"banana\") == 3\\n assert count_a(\"cherry\") == 0\\n # Print the source code of the generated function\\n print(inspect.getsource(count_a))', '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', '@dataclasses.dataclass\\nclass KnockKnockJoke:\\n whos_there: str\\n punchline: str\\n\\n\\n@Template.define\\ndef write_joke(theme: str) -> KnockKnockJoke:\\n \"\"\"Write a knock-knock joke on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef rate_joke(joke: KnockKnockJoke) -> bool:\\n \"\"\"Decide if {joke} is funny or not. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\ndef do_comedy():\\n joke = write_joke(\"lizards\")\\n print(\"> You are onstage at a comedy club. You tell the following joke:\")\\n print(\\n f\"Knock knock.\\\\nWho\\'s there?\\\\n{joke.whos_there}.\\\\n{joke.whos_there} who?\\\\n{joke.punchline}\"\\n )\\n if rate_joke(joke):\\n print(\"> The crowd laughs politely.\")\\n else:\\n print(\"> The crowd stares in stony silence.\")\\n\\n\\nwith handler(provider):\\n do_comedy()', 'def log_llm(*args, **kwargs):\\n result = fwd()\\n print(\"Request fired: \", args, kwargs, result)\\n return result\\n\\n\\n# Avoid cache\\ntry:\\n haiku.cache_clear()\\nexcept Exception:\\n pass\\n\\n# Put completion handler innermost so it has highest precedence during the call\\nwith handler(provider), handler({completion: log_llm}):\\n _ = haiku(\"fish2\")\\n _ = limerick(\"fish\") # or use haiku(\"fish-2\") to avoid cache', '# 1. Create a logger\\nlogger = logging.getLogger(\"effectful.llm\")\\nlogger.setLevel(logging.INFO)\\nlog_handler = logging.StreamHandler(sys.stdout)\\nlog_handler.setFormatter(logging.Formatter(\"%(levelname)s %(payload)s\"))\\nlogger.addHandler(log_handler)\\n# 2. Pass it to the handler\\nllm_logger = LLMLoggingHandler(logger=logger) # can also be LLMLoggingHandler()\\n\\n# Avoid cache for demonstration\\ntry:\\n haiku.cache_clear()\\n limerick.cache_clear()\\nexcept Exception:\\n pass\\n\\nwith handler(provider), handler(llm_logger):\\n _ = haiku(\"fish3\")\\n _ = limerick(\"fish4\")', '# Sub-templates for different story styles\\n@Template.define\\ndef story_with_moral(topic: str) -> str:\\n \"\"\"Write a short story about {topic} and end with a moral lesson. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef story_funny(topic: str) -> str:\\n \"\"\"Write a funny, humorous story about {topic}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n# Main orchestrator template - has access to sub-templates\\n@Template.define\\ndef write_story(topic: str, style: str) -> str:\\n \"\"\"Write a story about {topic} in the style: {style}.\\n Available styles: \\'moral\\' for a story with a lesson, \\'funny\\' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\"\"\"\\n raise NotHandled\\n\\n\\n# Verify sub-templates are captured in write_story\\'s lexical context\\nassert story_with_moral in write_story.tools\\nassert story_funny in write_story.tools\\nprint(\"Sub-templates available to write_story:\", list(write_story.tools))\\n\\nwith handler(provider), handler(llm_logger):\\n print(\"=== Story with moral ===\")\\n print(write_story(\"a curious cat\", \"moral\"))\\n print()\\n print(\"=== Funny story ===\")\\n print(write_story(\"a curious cat\", \"funny\"))'], 'Out': {}, 'get_ipython': >, 'exit': , 'quit': , 'open': , '_': 'In the ocean where fishies do play, \\nA big whale came swimming one day. \\nWith a splash and a dive, \\nHe felt so alive, \\nChasing fish in the blue, gleaming bay.', '__': '', '___': '', '__vsc_ipynb_file__': '/Users/datnguyenthanh/Marc/effectful/docs/source/llm.ipynb', '_i': '# 1. Create a logger\\nlogger = logging.getLogger(\"effectful.llm\")\\nlogger.setLevel(logging.INFO)\\nlog_handler = logging.StreamHandler(sys.stdout)\\nlog_handler.setFormatter(logging.Formatter(\"%(levelname)s %(payload)s\"))\\nlogger.addHandler(log_handler)\\n# 2. Pass it to the handler\\nllm_logger = LLMLoggingHandler(logger=logger) # can also be LLMLoggingHandler()\\n\\n# Avoid cache for demonstration\\ntry:\\n haiku.cache_clear()\\n limerick.cache_clear()\\nexcept Exception:\\n pass\\n\\nwith handler(provider), handler(llm_logger):\\n _ = haiku(\"fish3\")\\n _ = limerick(\"fish4\")', '_ii': 'def log_llm(*args, **kwargs):\\n result = fwd()\\n print(\"Request fired: \", args, kwargs, result)\\n return result\\n\\n\\n# Avoid cache\\ntry:\\n haiku.cache_clear()\\nexcept Exception:\\n pass\\n\\n# Put completion handler innermost so it has highest precedence during the call\\nwith handler(provider), handler({completion: log_llm}):\\n _ = haiku(\"fish2\")\\n _ = limerick(\"fish\") # or use haiku(\"fish-2\") to avoid cache', '_iii': '@dataclasses.dataclass\\nclass KnockKnockJoke:\\n whos_there: str\\n punchline: str\\n\\n\\n@Template.define\\ndef write_joke(theme: str) -> KnockKnockJoke:\\n \"\"\"Write a knock-knock joke on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef rate_joke(joke: KnockKnockJoke) -> bool:\\n \"\"\"Decide if {joke} is funny or not. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\ndef do_comedy():\\n joke = write_joke(\"lizards\")\\n print(\"> You are onstage at a comedy club. You tell the following joke:\")\\n print(\\n f\"Knock knock.\\\\nWho\\'s there?\\\\n{joke.whos_there}.\\\\n{joke.whos_there} who?\\\\n{joke.punchline}\"\\n )\\n if rate_joke(joke):\\n print(\"> The crowd laughs politely.\")\\n else:\\n print(\"> The crowd stares in stony silence.\")\\n\\n\\nwith handler(provider):\\n do_comedy()', '_i1': 'import dataclasses\\nimport functools\\nimport inspect\\nimport logging\\nimport sys\\nfrom collections.abc import Callable\\n\\nfrom effectful.handlers.llm import Template\\nfrom effectful.handlers.llm.providers import (\\n CacheLLMRequestHandler,\\n LiteLLMProvider,\\n LLMLoggingHandler,\\n RetryLLMHandler,\\n completion,\\n tool_call,\\n)\\nfrom effectful.handlers.llm.synthesis import ProgramSynthesis\\nfrom effectful.ops.semantics import NotHandled, fwd, handler\\nfrom effectful.ops.syntax import defop\\n\\nprovider = LiteLLMProvider()', 'dataclasses': , 'functools': , 'inspect': , 'logging': , 'sys': , 'Callable': , 'Template': , 'CacheLLMRequestHandler': , 'LiteLLMProvider': , 'LLMLoggingHandler': , 'RetryLLMHandler': , 'completion': Operation(completion, (model: str, messages: List = [], timeout: Union[float, str, openai.Timeout, NoneType] = None, temperature: Optional[float] = None, top_p: Optional[float] = None, n: Optional[int] = None, stream: Optional[bool] = None, stream_options: Optional[dict] = None, stop=None, max_completion_tokens: Optional[int] = None, max_tokens: Optional[int] = None, modalities: Optional[List[Literal['text', 'audio']]] = None, prediction: Optional[openai.types.chat.chat_completion_prediction_content_param.ChatCompletionPredictionContentParam] = None, audio: Optional[openai.types.chat.chat_completion_audio_param.ChatCompletionAudioParam] = None, presence_penalty: Optional[float] = None, frequency_penalty: Optional[float] = None, logit_bias: Optional[dict] = None, user: Optional[str] = None, reasoning_effort: Optional[Literal['none', 'minimal', 'low', 'medium', 'high', 'default']] = None, verbosity: Optional[Literal['low', 'medium', 'high']] = None, response_format: Union[dict, Type[pydantic.main.BaseModel], NoneType] = None, seed: Optional[int] = None, tools: Optional[List] = None, tool_choice: Union[str, dict, NoneType] = None, logprobs: Optional[bool] = None, top_logprobs: Optional[int] = None, parallel_tool_calls: Optional[bool] = None, web_search_options: Optional[litellm.types.llms.openai.OpenAIWebSearchOptions] = None, deployment_id=None, extra_headers: Optional[dict] = None, safety_identifier: Optional[str] = None, service_tier: Optional[str] = None, functions: Optional[List] = None, function_call: Optional[str] = None, base_url: Optional[str] = None, api_version: Optional[str] = None, api_key: Optional[str] = None, model_list: Optional[list] = None, thinking: Optional[litellm.types.llms.anthropic.AnthropicThinkingParam] = None, shared_session: Optional[ForwardRef('ClientSession')] = None, **kwargs) -> Union[litellm.types.utils.ModelResponse, litellm.litellm_core_utils.streaming_handler.CustomStreamWrapper]), 'tool_call': Operation(tool_call, (template: effectful.handlers.llm.Template, tool: Union[effectful.ops.types.Operation[..., T], effectful.handlers.llm.Template[..., T]], *args, **kwargs) -> T), 'ProgramSynthesis': , 'NotHandled': , 'fwd': Operation(fwd, (*args, **kwargs) -> Any), 'handler': , 'defop': , 'provider': , '_i2': '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}.\"\"\"\\n raise NotHandled', 'limerick': Template(__prompt_template__='Write a limerick on the theme of {theme}. Do not use any tools.', __signature__= str>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='limerick'), '_i3': 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '_i4': 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '_i5': '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled', '_i6': 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '_i7': '@functools.cache\\n@Template.define\\ndef haiku(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef haiku_no_cache(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nprint()\\nwith handler(provider):\\n print(haiku(\"fish\"))\\n print(\"-\" * 40)\\n print(haiku(\"fish\"))\\n\\nprint()\\ncache_handler1 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler1):\\n print(haiku_no_cache(\"fish2\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish2\"))\\n\\nprint()\\ncache_handler2 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler2):\\n print(haiku_no_cache(\"fish3\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish3\"))', 'haiku': , 'haiku_no_cache': Template(__prompt_template__='Write a haiku on the theme of {theme}. Do not use any tools.', __signature__= str>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='haiku_no_cache'), 'cache_handler1': , 'cache_handler2': , '_i8': '@Template.define\\ndef primes(first_digit: int) -> int:\\n \"\"\"Give a prime number with {first_digit} as the first digit. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider):\\n assert type(primes(6)) is int', 'primes': Template(__prompt_template__='Give a prime number with {first_digit} as the first digit. Do not use any tools.', __signature__= int>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='primes'), '_i9': '@Template.define\\ndef count_char(char: str) -> Callable[[str], int]:\\n \"\"\"Write a function which takes a string and counts the occurrances of \\'{char}\\'. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider), handler(ProgramSynthesis()):\\n count_a = count_char(\"a\")\\n assert callable(count_a)\\n assert count_a(\"banana\") == 3\\n assert count_a(\"cherry\") == 0\\n # Print the source code of the generated function\\n print(inspect.getsource(count_a))', 'count_char': Template(__prompt_template__=\"Write a function which takes a string and counts the occurrances of '{char}'. Do not use any tools.\", __signature__= collections.abc.Callable[[str], int]>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='count_char'), 'count_a': , '_i10': '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', 'cities': Operation(cities, () -> list[str]), 'weather': Operation(weather, (city: str) -> str), 'vacation': Template(__prompt_template__='Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.', __signature__= str>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='vacation'), 'log_tool_call': , '_i11': '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', '_i12': '@dataclasses.dataclass\\nclass KnockKnockJoke:\\n whos_there: str\\n punchline: str\\n\\n\\n@Template.define\\ndef write_joke(theme: str) -> KnockKnockJoke:\\n \"\"\"Write a knock-knock joke on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef rate_joke(joke: KnockKnockJoke) -> bool:\\n \"\"\"Decide if {joke} is funny or not. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\ndef do_comedy():\\n joke = write_joke(\"lizards\")\\n print(\"> You are onstage at a comedy club. You tell the following joke:\")\\n print(\\n f\"Knock knock.\\\\nWho\\'s there?\\\\n{joke.whos_there}.\\\\n{joke.whos_there} who?\\\\n{joke.punchline}\"\\n )\\n if rate_joke(joke):\\n print(\"> The crowd laughs politely.\")\\n else:\\n print(\"> The crowd stares in stony silence.\")\\n\\n\\nwith handler(provider):\\n do_comedy()', 'KnockKnockJoke': , 'write_joke': Template(__prompt_template__='Write a knock-knock joke on the theme of {theme}. Do not use any tools.', __signature__= __main__.KnockKnockJoke>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='write_joke'), 'rate_joke': Template(__prompt_template__='Decide if {joke} is funny or not. Do not use any tools.', __signature__= bool>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='rate_joke'), 'do_comedy': , '_i13': 'def log_llm(*args, **kwargs):\\n result = fwd()\\n print(\"Request fired: \", args, kwargs, result)\\n return result\\n\\n\\n# Avoid cache\\ntry:\\n haiku.cache_clear()\\nexcept Exception:\\n pass\\n\\n# Put completion handler innermost so it has highest precedence during the call\\nwith handler(provider), handler({completion: log_llm}):\\n _ = haiku(\"fish2\")\\n _ = limerick(\"fish\") # or use haiku(\"fish-2\") to avoid cache', 'log_llm': , '_i14': '# 1. Create a logger\\nlogger = logging.getLogger(\"effectful.llm\")\\nlogger.setLevel(logging.INFO)\\nlog_handler = logging.StreamHandler(sys.stdout)\\nlog_handler.setFormatter(logging.Formatter(\"%(levelname)s %(payload)s\"))\\nlogger.addHandler(log_handler)\\n# 2. Pass it to the handler\\nllm_logger = LLMLoggingHandler(logger=logger) # can also be LLMLoggingHandler()\\n\\n# Avoid cache for demonstration\\ntry:\\n haiku.cache_clear()\\n limerick.cache_clear()\\nexcept Exception:\\n pass\\n\\nwith handler(provider), handler(llm_logger):\\n _ = haiku(\"fish3\")\\n _ = limerick(\"fish4\")', 'logger': , 'log_handler': , 'llm_logger': , '_i15': '# Sub-templates for different story styles\\n@Template.define\\ndef story_with_moral(topic: str) -> str:\\n \"\"\"Write a short story about {topic} and end with a moral lesson. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef story_funny(topic: str) -> str:\\n \"\"\"Write a funny, humorous story about {topic}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n# Main orchestrator template - has access to sub-templates\\n@Template.define\\ndef write_story(topic: str, style: str) -> str:\\n \"\"\"Write a story about {topic} in the style: {style}.\\n Available styles: \\'moral\\' for a story with a lesson, \\'funny\\' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\"\"\"\\n raise NotHandled\\n\\n\\n# Verify sub-templates are captured in write_story\\'s lexical context\\nassert story_with_moral in write_story.tools\\nassert story_funny in write_story.tools\\nprint(\"Sub-templates available to write_story:\", list(write_story.tools))\\n\\nwith handler(provider), handler(llm_logger):\\n print(\"=== Story with moral ===\")\\n print(write_story(\"a curious cat\", \"moral\"))\\n print()\\n print(\"=== Funny story ===\")\\n print(write_story(\"a curious cat\", \"funny\"))', 'story_with_moral': Template(__prompt_template__='Write a short story about {topic} and end with a moral lesson. Do not use any tools.', __signature__= str>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='story_with_moral'), 'story_funny': Template(__prompt_template__='Write a funny, humorous story about {topic}. Do not use any tools.', __signature__= str>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='story_funny'), 'write_story': ...}), mappingproxy({'__name__': '__main__', '__doc__': 'Automatically created module for IPython interactive environment', '__package__': None, '__loader__': None, '__spec__': None, '__builtin__': , '__builtins__': , '_ih': ['', 'import dataclasses\\nimport functools\\nimport inspect\\nimport logging\\nimport sys\\nfrom collections.abc import Callable\\n\\nfrom effectful.handlers.llm import Template\\nfrom effectful.handlers.llm.providers import (\\n CacheLLMRequestHandler,\\n LiteLLMProvider,\\n LLMLoggingHandler,\\n RetryLLMHandler,\\n completion,\\n tool_call,\\n)\\nfrom effectful.handlers.llm.synthesis import ProgramSynthesis\\nfrom effectful.ops.semantics import NotHandled, fwd, handler\\nfrom effectful.ops.syntax import defop\\n\\nprovider = LiteLLMProvider()', '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}.\"\"\"\\n raise NotHandled', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '@functools.cache\\n@Template.define\\ndef haiku(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef haiku_no_cache(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nprint()\\nwith handler(provider):\\n print(haiku(\"fish\"))\\n print(\"-\" * 40)\\n print(haiku(\"fish\"))\\n\\nprint()\\ncache_handler1 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler1):\\n print(haiku_no_cache(\"fish2\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish2\"))\\n\\nprint()\\ncache_handler2 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler2):\\n print(haiku_no_cache(\"fish3\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish3\"))', '@Template.define\\ndef primes(first_digit: int) -> int:\\n \"\"\"Give a prime number with {first_digit} as the first digit. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider):\\n assert type(primes(6)) is int', '@Template.define\\ndef count_char(char: str) -> Callable[[str], int]:\\n \"\"\"Write a function which takes a string and counts the occurrances of \\'{char}\\'. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider), handler(ProgramSynthesis()):\\n count_a = count_char(\"a\")\\n assert callable(count_a)\\n assert count_a(\"banana\") == 3\\n assert count_a(\"cherry\") == 0\\n # Print the source code of the generated function\\n print(inspect.getsource(count_a))', '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', '@dataclasses.dataclass\\nclass KnockKnockJoke:\\n whos_there: str\\n punchline: str\\n\\n\\n@Template.define\\ndef write_joke(theme: str) -> KnockKnockJoke:\\n \"\"\"Write a knock-knock joke on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef rate_joke(joke: KnockKnockJoke) -> bool:\\n \"\"\"Decide if {joke} is funny or not. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\ndef do_comedy():\\n joke = write_joke(\"lizards\")\\n print(\"> You are onstage at a comedy club. You tell the following joke:\")\\n print(\\n f\"Knock knock.\\\\nWho\\'s there?\\\\n{joke.whos_there}.\\\\n{joke.whos_there} who?\\\\n{joke.punchline}\"\\n )\\n if rate_joke(joke):\\n print(\"> The crowd laughs politely.\")\\n else:\\n print(\"> The crowd stares in stony silence.\")\\n\\n\\nwith handler(provider):\\n do_comedy()', 'def log_llm(*args, **kwargs):\\n result = fwd()\\n print(\"Request fired: \", args, kwargs, result)\\n return result\\n\\n\\n# Avoid cache\\ntry:\\n haiku.cache_clear()\\nexcept Exception:\\n pass\\n\\n# Put completion handler innermost so it has highest precedence during the call\\nwith handler(provider), handler({completion: log_llm}):\\n _ = haiku(\"fish2\")\\n _ = limerick(\"fish\") # or use haiku(\"fish-2\") to avoid cache', '# 1. Create a logger\\nlogger = logging.getLogger(\"effectful.llm\")\\nlogger.setLevel(logging.INFO)\\nlog_handler = logging.StreamHandler(sys.stdout)\\nlog_handler.setFormatter(logging.Formatter(\"%(levelname)s %(payload)s\"))\\nlogger.addHandler(log_handler)\\n# 2. Pass it to the handler\\nllm_logger = LLMLoggingHandler(logger=logger) # can also be LLMLoggingHandler()\\n\\n# Avoid cache for demonstration\\ntry:\\n haiku.cache_clear()\\n limerick.cache_clear()\\nexcept Exception:\\n pass\\n\\nwith handler(provider), handler(llm_logger):\\n _ = haiku(\"fish3\")\\n _ = limerick(\"fish4\")', '# Sub-templates for different story styles\\n@Template.define\\ndef story_with_moral(topic: str) -> str:\\n \"\"\"Write a short story about {topic} and end with a moral lesson. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef story_funny(topic: str) -> str:\\n \"\"\"Write a funny, humorous story about {topic}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n# Main orchestrator template - has access to sub-templates\\n@Template.define\\ndef write_story(topic: str, style: str) -> str:\\n \"\"\"Write a story about {topic} in the style: {style}.\\n Available styles: \\'moral\\' for a story with a lesson, \\'funny\\' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\"\"\"\\n raise NotHandled\\n\\n\\n# Verify sub-templates are captured in write_story\\'s lexical context\\nassert story_with_moral in write_story.tools\\nassert story_funny in write_story.tools\\nprint(\"Sub-templates available to write_story:\", list(write_story.tools))\\n\\nwith handler(provider), handler(llm_logger):\\n print(\"=== Story with moral ===\")\\n print(write_story(\"a curious cat\", \"moral\"))\\n print()\\n print(\"=== Funny story ===\")\\n print(write_story(\"a curious cat\", \"funny\"))'], '_oh': {}, '_dh': [PosixPath('/Users/datnguyenthanh/Marc/effectful')], 'In': ['', 'import dataclasses\\nimport functools\\nimport inspect\\nimport logging\\nimport sys\\nfrom collections.abc import Callable\\n\\nfrom effectful.handlers.llm import Template\\nfrom effectful.handlers.llm.providers import (\\n CacheLLMRequestHandler,\\n LiteLLMProvider,\\n LLMLoggingHandler,\\n RetryLLMHandler,\\n completion,\\n tool_call,\\n)\\nfrom effectful.handlers.llm.synthesis import ProgramSynthesis\\nfrom effectful.ops.semantics import NotHandled, fwd, handler\\nfrom effectful.ops.syntax import defop\\n\\nprovider = LiteLLMProvider()', '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}.\"\"\"\\n raise NotHandled', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '@functools.cache\\n@Template.define\\ndef haiku(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef haiku_no_cache(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nprint()\\nwith handler(provider):\\n print(haiku(\"fish\"))\\n print(\"-\" * 40)\\n print(haiku(\"fish\"))\\n\\nprint()\\ncache_handler1 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler1):\\n print(haiku_no_cache(\"fish2\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish2\"))\\n\\nprint()\\ncache_handler2 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler2):\\n print(haiku_no_cache(\"fish3\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish3\"))', '@Template.define\\ndef primes(first_digit: int) -> int:\\n \"\"\"Give a prime number with {first_digit} as the first digit. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider):\\n assert type(primes(6)) is int', '@Template.define\\ndef count_char(char: str) -> Callable[[str], int]:\\n \"\"\"Write a function which takes a string and counts the occurrances of \\'{char}\\'. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider), handler(ProgramSynthesis()):\\n count_a = count_char(\"a\")\\n assert callable(count_a)\\n assert count_a(\"banana\") == 3\\n assert count_a(\"cherry\") == 0\\n # Print the source code of the generated function\\n print(inspect.getsource(count_a))', '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', '@dataclasses.dataclass\\nclass KnockKnockJoke:\\n whos_there: str\\n punchline: str\\n\\n\\n@Template.define\\ndef write_joke(theme: str) -> KnockKnockJoke:\\n \"\"\"Write a knock-knock joke on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef rate_joke(joke: KnockKnockJoke) -> bool:\\n \"\"\"Decide if {joke} is funny or not. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\ndef do_comedy():\\n joke = write_joke(\"lizards\")\\n print(\"> You are onstage at a comedy club. You tell the following joke:\")\\n print(\\n f\"Knock knock.\\\\nWho\\'s there?\\\\n{joke.whos_there}.\\\\n{joke.whos_there} who?\\\\n{joke.punchline}\"\\n )\\n if rate_joke(joke):\\n print(\"> The crowd laughs politely.\")\\n else:\\n print(\"> The crowd stares in stony silence.\")\\n\\n\\nwith handler(provider):\\n do_comedy()', 'def log_llm(*args, **kwargs):\\n result = fwd()\\n print(\"Request fired: \", args, kwargs, result)\\n return result\\n\\n\\n# Avoid cache\\ntry:\\n haiku.cache_clear()\\nexcept Exception:\\n pass\\n\\n# Put completion handler innermost so it has highest precedence during the call\\nwith handler(provider), handler({completion: log_llm}):\\n _ = haiku(\"fish2\")\\n _ = limerick(\"fish\") # or use haiku(\"fish-2\") to avoid cache', '# 1. Create a logger\\nlogger = logging.getLogger(\"effectful.llm\")\\nlogger.setLevel(logging.INFO)\\nlog_handler = logging.StreamHandler(sys.stdout)\\nlog_handler.setFormatter(logging.Formatter(\"%(levelname)s %(payload)s\"))\\nlogger.addHandler(log_handler)\\n# 2. Pass it to the handler\\nllm_logger = LLMLoggingHandler(logger=logger) # can also be LLMLoggingHandler()\\n\\n# Avoid cache for demonstration\\ntry:\\n haiku.cache_clear()\\n limerick.cache_clear()\\nexcept Exception:\\n pass\\n\\nwith handler(provider), handler(llm_logger):\\n _ = haiku(\"fish3\")\\n _ = limerick(\"fish4\")', '# Sub-templates for different story styles\\n@Template.define\\ndef story_with_moral(topic: str) -> str:\\n \"\"\"Write a short story about {topic} and end with a moral lesson. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef story_funny(topic: str) -> str:\\n \"\"\"Write a funny, humorous story about {topic}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n# Main orchestrator template - has access to sub-templates\\n@Template.define\\ndef write_story(topic: str, style: str) -> str:\\n \"\"\"Write a story about {topic} in the style: {style}.\\n Available styles: \\'moral\\' for a story with a lesson, \\'funny\\' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\"\"\"\\n raise NotHandled\\n\\n\\n# Verify sub-templates are captured in write_story\\'s lexical context\\nassert story_with_moral in write_story.tools\\nassert story_funny in write_story.tools\\nprint(\"Sub-templates available to write_story:\", list(write_story.tools))\\n\\nwith handler(provider), handler(llm_logger):\\n print(\"=== Story with moral ===\")\\n print(write_story(\"a curious cat\", \"moral\"))\\n print()\\n print(\"=== Funny story ===\")\\n print(write_story(\"a curious cat\", \"funny\"))'], 'Out': {}, 'get_ipython': >, 'exit': , 'quit': , 'open': , '_': 'In the ocean where fishies do play, \\nA big whale came swimming one day. \\nWith a splash and a dive, \\nHe felt so alive, \\nChasing fish in the blue, gleaming bay.', '__': '', '___': '', '__vsc_ipynb_file__': '/Users/datnguyenthanh/Marc/effectful/docs/source/llm.ipynb', '_i': '# 1. Create a logger\\nlogger = logging.getLogger(\"effectful.llm\")\\nlogger.setLevel(logging.INFO)\\nlog_handler = logging.StreamHandler(sys.stdout)\\nlog_handler.setFormatter(logging.Formatter(\"%(levelname)s %(payload)s\"))\\nlogger.addHandler(log_handler)\\n# 2. Pass it to the handler\\nllm_logger = LLMLoggingHandler(logger=logger) # can also be LLMLoggingHandler()\\n\\n# Avoid cache for demonstration\\ntry:\\n haiku.cache_clear()\\n limerick.cache_clear()\\nexcept Exception:\\n pass\\n\\nwith handler(provider), handler(llm_logger):\\n _ = haiku(\"fish3\")\\n _ = limerick(\"fish4\")', '_ii': 'def log_llm(*args, **kwargs):\\n result = fwd()\\n print(\"Request fired: \", args, kwargs, result)\\n return result\\n\\n\\n# Avoid cache\\ntry:\\n haiku.cache_clear()\\nexcept Exception:\\n pass\\n\\n# Put completion handler innermost so it has highest precedence during the call\\nwith handler(provider), handler({completion: log_llm}):\\n _ = haiku(\"fish2\")\\n _ = limerick(\"fish\") # or use haiku(\"fish-2\") to avoid cache', '_iii': '@dataclasses.dataclass\\nclass KnockKnockJoke:\\n whos_there: str\\n punchline: str\\n\\n\\n@Template.define\\ndef write_joke(theme: str) -> KnockKnockJoke:\\n \"\"\"Write a knock-knock joke on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef rate_joke(joke: KnockKnockJoke) -> bool:\\n \"\"\"Decide if {joke} is funny or not. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\ndef do_comedy():\\n joke = write_joke(\"lizards\")\\n print(\"> You are onstage at a comedy club. You tell the following joke:\")\\n print(\\n f\"Knock knock.\\\\nWho\\'s there?\\\\n{joke.whos_there}.\\\\n{joke.whos_there} who?\\\\n{joke.punchline}\"\\n )\\n if rate_joke(joke):\\n print(\"> The crowd laughs politely.\")\\n else:\\n print(\"> The crowd stares in stony silence.\")\\n\\n\\nwith handler(provider):\\n do_comedy()', '_i1': 'import dataclasses\\nimport functools\\nimport inspect\\nimport logging\\nimport sys\\nfrom collections.abc import Callable\\n\\nfrom effectful.handlers.llm import Template\\nfrom effectful.handlers.llm.providers import (\\n CacheLLMRequestHandler,\\n LiteLLMProvider,\\n LLMLoggingHandler,\\n RetryLLMHandler,\\n completion,\\n tool_call,\\n)\\nfrom effectful.handlers.llm.synthesis import ProgramSynthesis\\nfrom effectful.ops.semantics import NotHandled, fwd, handler\\nfrom effectful.ops.syntax import defop\\n\\nprovider = LiteLLMProvider()', 'dataclasses': , 'functools': , 'inspect': , 'logging': , 'sys': , 'Callable': , 'Template': , 'CacheLLMRequestHandler': , 'LiteLLMProvider': , 'LLMLoggingHandler': , 'RetryLLMHandler': , 'completion': Operation(completion, (model: str, messages: List = [], timeout: Union[float, str, openai.Timeout, NoneType] = None, temperature: Optional[float] = None, top_p: Optional[float] = None, n: Optional[int] = None, stream: Optional[bool] = None, stream_options: Optional[dict] = None, stop=None, max_completion_tokens: Optional[int] = None, max_tokens: Optional[int] = None, modalities: Optional[List[Literal['text', 'audio']]] = None, prediction: Optional[openai.types.chat.chat_completion_prediction_content_param.ChatCompletionPredictionContentParam] = None, audio: Optional[openai.types.chat.chat_completion_audio_param.ChatCompletionAudioParam] = None, presence_penalty: Optional[float] = None, frequency_penalty: Optional[float] = None, logit_bias: Optional[dict] = None, user: Optional[str] = None, reasoning_effort: Optional[Literal['none', 'minimal', 'low', 'medium', 'high', 'default']] = None, verbosity: Optional[Literal['low', 'medium', 'high']] = None, response_format: Union[dict, Type[pydantic.main.BaseModel], NoneType] = None, seed: Optional[int] = None, tools: Optional[List] = None, tool_choice: Union[str, dict, NoneType] = None, logprobs: Optional[bool] = None, top_logprobs: Optional[int] = None, parallel_tool_calls: Optional[bool] = None, web_search_options: Optional[litellm.types.llms.openai.OpenAIWebSearchOptions] = None, deployment_id=None, extra_headers: Optional[dict] = None, safety_identifier: Optional[str] = None, service_tier: Optional[str] = None, functions: Optional[List] = None, function_call: Optional[str] = None, base_url: Optional[str] = None, api_version: Optional[str] = None, api_key: Optional[str] = None, model_list: Optional[list] = None, thinking: Optional[litellm.types.llms.anthropic.AnthropicThinkingParam] = None, shared_session: Optional[ForwardRef('ClientSession')] = None, **kwargs) -> Union[litellm.types.utils.ModelResponse, litellm.litellm_core_utils.streaming_handler.CustomStreamWrapper]), 'tool_call': Operation(tool_call, (template: effectful.handlers.llm.Template, tool: Union[effectful.ops.types.Operation[..., T], effectful.handlers.llm.Template[..., T]], *args, **kwargs) -> T), 'ProgramSynthesis': , 'NotHandled': , 'fwd': Operation(fwd, (*args, **kwargs) -> Any), 'handler': , 'defop': , 'provider': , '_i2': '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}.\"\"\"\\n raise NotHandled', 'limerick': Template(__prompt_template__='Write a limerick on the theme of {theme}. Do not use any tools.', __signature__= str>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='limerick'), '_i3': 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '_i4': 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '_i5': '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled', '_i6': 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '_i7': '@functools.cache\\n@Template.define\\ndef haiku(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef haiku_no_cache(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nprint()\\nwith handler(provider):\\n print(haiku(\"fish\"))\\n print(\"-\" * 40)\\n print(haiku(\"fish\"))\\n\\nprint()\\ncache_handler1 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler1):\\n print(haiku_no_cache(\"fish2\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish2\"))\\n\\nprint()\\ncache_handler2 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler2):\\n print(haiku_no_cache(\"fish3\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish3\"))', 'haiku': , 'haiku_no_cache': Template(__prompt_template__='Write a haiku on the theme of {theme}. Do not use any tools.', __signature__= str>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='haiku_no_cache'), 'cache_handler1': , 'cache_handler2': , '_i8': '@Template.define\\ndef primes(first_digit: int) -> int:\\n \"\"\"Give a prime number with {first_digit} as the first digit. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider):\\n assert type(primes(6)) is int', 'primes': Template(__prompt_template__='Give a prime number with {first_digit} as the first digit. Do not use any tools.', __signature__= int>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='primes'), '_i9': '@Template.define\\ndef count_char(char: str) -> Callable[[str], int]:\\n \"\"\"Write a function which takes a string and counts the occurrances of \\'{char}\\'. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider), handler(ProgramSynthesis()):\\n count_a = count_char(\"a\")\\n assert callable(count_a)\\n assert count_a(\"banana\") == 3\\n assert count_a(\"cherry\") == 0\\n # Print the source code of the generated function\\n print(inspect.getsource(count_a))', 'count_char': Template(__prompt_template__=\"Write a function which takes a string and counts the occurrances of '{char}'. Do not use any tools.\", __signature__= collections.abc.Callable[[str], int]>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='count_char'), 'count_a': , '_i10': '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', 'cities': Operation(cities, () -> list[str]), 'weather': Operation(weather, (city: str) -> str), 'vacation': Template(__prompt_template__='Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.', __signature__= str>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='vacation'), 'log_tool_call': , '_i11': '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', '_i12': '@dataclasses.dataclass\\nclass KnockKnockJoke:\\n whos_there: str\\n punchline: str\\n\\n\\n@Template.define\\ndef write_joke(theme: str) -> KnockKnockJoke:\\n \"\"\"Write a knock-knock joke on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef rate_joke(joke: KnockKnockJoke) -> bool:\\n \"\"\"Decide if {joke} is funny or not. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\ndef do_comedy():\\n joke = write_joke(\"lizards\")\\n print(\"> You are onstage at a comedy club. You tell the following joke:\")\\n print(\\n f\"Knock knock.\\\\nWho\\'s there?\\\\n{joke.whos_there}.\\\\n{joke.whos_there} who?\\\\n{joke.punchline}\"\\n )\\n if rate_joke(joke):\\n print(\"> The crowd laughs politely.\")\\n else:\\n print(\"> The crowd stares in stony silence.\")\\n\\n\\nwith handler(provider):\\n do_comedy()', 'KnockKnockJoke': , 'write_joke': Template(__prompt_template__='Write a knock-knock joke on the theme of {theme}. Do not use any tools.', __signature__= __main__.KnockKnockJoke>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='write_joke'), 'rate_joke': Template(__prompt_template__='Decide if {joke} is funny or not. Do not use any tools.', __signature__= bool>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='rate_joke'), 'do_comedy': , '_i13': 'def log_llm(*args, **kwargs):\\n result = fwd()\\n print(\"Request fired: \", args, kwargs, result)\\n return result\\n\\n\\n# Avoid cache\\ntry:\\n haiku.cache_clear()\\nexcept Exception:\\n pass\\n\\n# Put completion handler innermost so it has highest precedence during the call\\nwith handler(provider), handler({completion: log_llm}):\\n _ = haiku(\"fish2\")\\n _ = limerick(\"fish\") # or use haiku(\"fish-2\") to avoid cache', 'log_llm': , '_i14': '# 1. Create a logger\\nlogger = logging.getLogger(\"effectful.llm\")\\nlogger.setLevel(logging.INFO)\\nlog_handler = logging.StreamHandler(sys.stdout)\\nlog_handler.setFormatter(logging.Formatter(\"%(levelname)s %(payload)s\"))\\nlogger.addHandler(log_handler)\\n# 2. Pass it to the handler\\nllm_logger = LLMLoggingHandler(logger=logger) # can also be LLMLoggingHandler()\\n\\n# Avoid cache for demonstration\\ntry:\\n haiku.cache_clear()\\n limerick.cache_clear()\\nexcept Exception:\\n pass\\n\\nwith handler(provider), handler(llm_logger):\\n _ = haiku(\"fish3\")\\n _ = limerick(\"fish4\")', 'logger': , 'log_handler': , 'llm_logger': , '_i15': '# Sub-templates for different story styles\\n@Template.define\\ndef story_with_moral(topic: str) -> str:\\n \"\"\"Write a short story about {topic} and end with a moral lesson. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef story_funny(topic: str) -> str:\\n \"\"\"Write a funny, humorous story about {topic}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n# Main orchestrator template - has access to sub-templates\\n@Template.define\\ndef write_story(topic: str, style: str) -> str:\\n \"\"\"Write a story about {topic} in the style: {style}.\\n Available styles: \\'moral\\' for a story with a lesson, \\'funny\\' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\"\"\"\\n raise NotHandled\\n\\n\\n# Verify sub-templates are captured in write_story\\'s lexical context\\nassert story_with_moral in write_story.tools\\nassert story_funny in write_story.tools\\nprint(\"Sub-templates available to write_story:\", list(write_story.tools))\\n\\nwith handler(provider), handler(llm_logger):\\n print(\"=== Story with moral ===\")\\n print(write_story(\"a curious cat\", \"moral\"))\\n print()\\n print(\"=== Funny story ===\")\\n print(write_story(\"a curious cat\", \"funny\"))', 'story_with_moral': Template(__prompt_template__='Write a short story about {topic} and end with a moral lesson. Do not use any tools.', __signature__= str>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='story_with_moral'), 'story_funny': Template(__prompt_template__='Write a funny, humorous story about {topic}. Do not use any tools.', __signature__= str>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='story_funny'), 'write_story': ...})), __name__='write_story')]\n", + "=== Story with moral ===\n", + "INFO {'args': (), 'kwargs': {'messages': [{'type': 'message', 'content': [{'type': 'text', 'text': \"Write a story about a curious cat in the style: moral.\\n Available styles: 'moral' for a story with a lesson, 'funny' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\"}], 'role': 'user'}], 'response_format': None, 'tools': [{'type': 'function', 'function': {'name': 'limerick', 'description': 'Write a limerick on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'haiku_no_cache', 'description': 'Write a haiku on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'primes', 'description': 'Give a prime number with {first_digit} as the first digit. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'first_digit': {'title': 'First Digit', 'type': 'integer'}}, 'required': ['first_digit'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'count_char', 'description': \"Write a function which takes a string and counts the occurrances of '{char}'. Do not use any tools.\", 'parameters': {'additionalProperties': False, 'properties': {'char': {'title': 'Char', 'type': 'string'}}, 'required': ['char'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'cities', 'description': '', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}, {'type': 'function', 'function': {'name': 'weather', 'description': '', 'parameters': {'additionalProperties': False, 'properties': {'city': {'title': 'City', 'type': 'string'}}, 'required': ['city'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'vacation', 'description': 'Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}, {'type': 'function', 'function': {'name': 'write_joke', 'description': 'Write a knock-knock joke on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'rate_joke', 'description': 'Decide if {joke} is funny or not. Do not use any tools.', 'parameters': {'$defs': {'KnockKnockJoke': {'properties': {'whos_there': {'title': 'Whos There', 'type': 'string'}, 'punchline': {'title': 'Punchline', 'type': 'string'}}, 'required': ['whos_there', 'punchline'], 'title': 'KnockKnockJoke', 'type': 'object', 'additionalProperties': False}}, 'additionalProperties': False, 'properties': {'joke': {'$ref': '#/$defs/KnockKnockJoke'}}, 'required': ['joke'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'story_with_moral', 'description': 'Write a short story about {topic} and end with a moral lesson. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'topic': {'title': 'Topic', 'type': 'string'}}, 'required': ['topic'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'story_funny', 'description': 'Write a funny, humorous story about {topic}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'topic': {'title': 'Topic', 'type': 'string'}}, 'required': ['topic'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'write_story', 'description': \"Write a story about {topic} in the style: {style}.\\n Available styles: 'moral' for a story with a lesson, 'funny' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\", 'parameters': {'additionalProperties': False, 'properties': {'topic': {'title': 'Topic', 'type': 'string'}, 'style': {'title': 'Style', 'type': 'string'}}, 'required': ['topic', 'style'], 'title': 'Params', 'type': 'object'}, 'strict': True}}]}, 'response': ModelResponse(id='chatcmpl-CnAOJJzdrkeZmPdyh1h0cMSFZlFJE', created=1765834051, model='gpt-4o-2024-08-06', object='chat.completion', system_fingerprint='fp_83554c687e', choices=[Choices(finish_reason='tool_calls', index=0, message=Message(content=None, role='assistant', tool_calls=[ChatCompletionMessageToolCall(function=Function(arguments='{\"topic\":\"a curious cat\"}', name='story_with_moral'), id='call_nJMDv3AxDTvyxxoDKXAzH4aB', type='function')], function_call=None, provider_specific_fields={'refusal': None}, annotations=[]), provider_specific_fields={})], usage=Usage(completion_tokens=18, prompt_tokens=560, total_tokens=578, completion_tokens_details=CompletionTokensDetailsWrapper(accepted_prediction_tokens=0, audio_tokens=0, reasoning_tokens=0, rejected_prediction_tokens=0, text_tokens=None, image_tokens=None), prompt_tokens_details=PromptTokensDetailsWrapper(audio_tokens=0, cached_tokens=0, text_tokens=None, image_tokens=None)), service_tier='default')}\n", + "INFO {'args': (), 'kwargs': {'messages': [{'type': 'message', 'content': [{'type': 'text', 'text': 'Write a short story about a curious cat and end with a moral lesson. Do not use any tools.'}], 'role': 'user'}], 'response_format': None, 'tools': [{'type': 'function', 'function': {'name': 'limerick', 'description': 'Write a limerick on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'haiku_no_cache', 'description': 'Write a haiku on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'primes', 'description': 'Give a prime number with {first_digit} as the first digit. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'first_digit': {'title': 'First Digit', 'type': 'integer'}}, 'required': ['first_digit'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'count_char', 'description': \"Write a function which takes a string and counts the occurrances of '{char}'. Do not use any tools.\", 'parameters': {'additionalProperties': False, 'properties': {'char': {'title': 'Char', 'type': 'string'}}, 'required': ['char'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'cities', 'description': '', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}, {'type': 'function', 'function': {'name': 'weather', 'description': '', 'parameters': {'additionalProperties': False, 'properties': {'city': {'title': 'City', 'type': 'string'}}, 'required': ['city'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'vacation', 'description': 'Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}, {'type': 'function', 'function': {'name': 'write_joke', 'description': 'Write a knock-knock joke on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'rate_joke', 'description': 'Decide if {joke} is funny or not. Do not use any tools.', 'parameters': {'$defs': {'KnockKnockJoke': {'properties': {'whos_there': {'title': 'Whos There', 'type': 'string'}, 'punchline': {'title': 'Punchline', 'type': 'string'}}, 'required': ['whos_there', 'punchline'], 'title': 'KnockKnockJoke', 'type': 'object', 'additionalProperties': False}}, 'additionalProperties': False, 'properties': {'joke': {'$ref': '#/$defs/KnockKnockJoke'}}, 'required': ['joke'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'story_with_moral', 'description': 'Write a short story about {topic} and end with a moral lesson. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'topic': {'title': 'Topic', 'type': 'string'}}, 'required': ['topic'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'story_funny', 'description': 'Write a funny, humorous story about {topic}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'topic': {'title': 'Topic', 'type': 'string'}}, 'required': ['topic'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'write_story', 'description': \"Write a story about {topic} in the style: {style}.\\n Available styles: 'moral' for a story with a lesson, 'funny' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\", 'parameters': {'additionalProperties': False, 'properties': {'topic': {'title': 'Topic', 'type': 'string'}, 'style': {'title': 'Style', 'type': 'string'}}, 'required': ['topic', 'style'], 'title': 'Params', 'type': 'object'}, 'strict': True}}]}, 'response': ModelResponse(id='chatcmpl-CnAOJnGjxZtPLcZ9ekNXtCUneEitd', created=1765834051, model='gpt-4o-2024-08-06', object='chat.completion', system_fingerprint='fp_83554c687e', choices=[Choices(finish_reason='stop', index=0, message=Message(content='In a quaint village nestled between rolling hills and whispering streams, there lived a cat named Whiskers. Whiskers was not an ordinary cat; his sleek, shiny coat gleamed under the sun, and his eyes sparkled with a clever curiosity that set him apart. His inquisitive nature drove him to explore every nook and cranny of the village, always seeking new adventures.\\n\\nOne day, while wandering near the woods, Whiskers stumbled upon a mysterious path he had never seen before. The path was lined with wildflowers and arched by towering trees that seemed to stretch on forever. Intrigued, Whiskers decided to follow it to see where it would lead.\\n\\nThe further he ventured, the stranger the path became. He encountered bubbling brooks, frogs that croaked like they were sharing secrets, and birds that sang unfamiliar melodies. Despite the eerie feeling curling around his paws, Whiskers pressed on.\\n\\nAfter what felt like hours, he arrived at a clearing with a peculiar sight: a large cage in the center with a small bird trapped inside. The bird chirped desperately, its tiny eyes pleading for help. Whiskers, though naturally inclined to chase birds, felt a tug of compassion watching the helpless creature.\\n\\nUsing his sharp claws, Whiskers carefully picked at the lock until it clicked open. The bird flapped its wings gratefully and soared into the sky, singing a joyful tune. Whiskers watched it disappear among the clouds, a warm feeling blossoming in his chest.\\n\\nContent with his good deed, Whiskers made his way back home, sticking to the original path. As he lay in his favorite sun-dappled spot on the porch, he reflected on his adventure.\\n\\nThe moral of the story is: Curiosity is a beautiful thing that leads to new discoveries, but it must be guided by kindness and the courage to act, for it is in helping others that we find our true purpose.', role='assistant', tool_calls=None, function_call=None, provider_specific_fields={'refusal': None}, annotations=[]), provider_specific_fields={})], usage=Usage(completion_tokens=395, prompt_tokens=528, total_tokens=923, completion_tokens_details=CompletionTokensDetailsWrapper(accepted_prediction_tokens=0, audio_tokens=0, reasoning_tokens=0, rejected_prediction_tokens=0, text_tokens=None, image_tokens=None), prompt_tokens_details=PromptTokensDetailsWrapper(audio_tokens=0, cached_tokens=0, text_tokens=None, image_tokens=None)), service_tier='default')}\n", + "INFO {'tool': 'story_with_moral', 'args': (), 'kwargs': {'topic': 'a curious cat'}}\n", + "INFO {'args': (), 'kwargs': {'messages': [{'type': 'message', 'content': [{'type': 'text', 'text': \"Write a story about a curious cat in the style: moral.\\n Available styles: 'moral' for a story with a lesson, 'funny' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\"}], 'role': 'user'}, {'content': None, 'role': 'assistant', 'tool_calls': [{'function': {'arguments': '{\"topic\":\"a curious cat\"}', 'name': 'story_with_moral'}, 'id': 'call_nJMDv3AxDTvyxxoDKXAzH4aB', 'type': 'function'}], 'function_call': None, 'provider_specific_fields': {'refusal': None}, 'annotations': []}, {'role': 'tool', 'tool_call_id': 'call_nJMDv3AxDTvyxxoDKXAzH4aB', 'name': 'story_with_moral', 'content': [{'type': 'text', 'text': 'In a quaint village nestled between rolling hills and whispering streams, there lived a cat named Whiskers. Whiskers was not an ordinary cat; his sleek, shiny coat gleamed under the sun, and his eyes sparkled with a clever curiosity that set him apart. His inquisitive nature drove him to explore every nook and cranny of the village, always seeking new adventures.\\n\\nOne day, while wandering near the woods, Whiskers stumbled upon a mysterious path he had never seen before. The path was lined with wildflowers and arched by towering trees that seemed to stretch on forever. Intrigued, Whiskers decided to follow it to see where it would lead.\\n\\nThe further he ventured, the stranger the path became. He encountered bubbling brooks, frogs that croaked like they were sharing secrets, and birds that sang unfamiliar melodies. Despite the eerie feeling curling around his paws, Whiskers pressed on.\\n\\nAfter what felt like hours, he arrived at a clearing with a peculiar sight: a large cage in the center with a small bird trapped inside. The bird chirped desperately, its tiny eyes pleading for help. Whiskers, though naturally inclined to chase birds, felt a tug of compassion watching the helpless creature.\\n\\nUsing his sharp claws, Whiskers carefully picked at the lock until it clicked open. The bird flapped its wings gratefully and soared into the sky, singing a joyful tune. Whiskers watched it disappear among the clouds, a warm feeling blossoming in his chest.\\n\\nContent with his good deed, Whiskers made his way back home, sticking to the original path. As he lay in his favorite sun-dappled spot on the porch, he reflected on his adventure.\\n\\nThe moral of the story is: Curiosity is a beautiful thing that leads to new discoveries, but it must be guided by kindness and the courage to act, for it is in helping others that we find our true purpose.'}]}], 'response_format': None, 'tools': [{'type': 'function', 'function': {'name': 'limerick', 'description': 'Write a limerick on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'haiku_no_cache', 'description': 'Write a haiku on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'primes', 'description': 'Give a prime number with {first_digit} as the first digit. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'first_digit': {'title': 'First Digit', 'type': 'integer'}}, 'required': ['first_digit'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'count_char', 'description': \"Write a function which takes a string and counts the occurrances of '{char}'. Do not use any tools.\", 'parameters': {'additionalProperties': False, 'properties': {'char': {'title': 'Char', 'type': 'string'}}, 'required': ['char'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'cities', 'description': '', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}, {'type': 'function', 'function': {'name': 'weather', 'description': '', 'parameters': {'additionalProperties': False, 'properties': {'city': {'title': 'City', 'type': 'string'}}, 'required': ['city'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'vacation', 'description': 'Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}, {'type': 'function', 'function': {'name': 'write_joke', 'description': 'Write a knock-knock joke on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'rate_joke', 'description': 'Decide if {joke} is funny or not. Do not use any tools.', 'parameters': {'$defs': {'KnockKnockJoke': {'properties': {'whos_there': {'title': 'Whos There', 'type': 'string'}, 'punchline': {'title': 'Punchline', 'type': 'string'}}, 'required': ['whos_there', 'punchline'], 'title': 'KnockKnockJoke', 'type': 'object', 'additionalProperties': False}}, 'additionalProperties': False, 'properties': {'joke': {'$ref': '#/$defs/KnockKnockJoke'}}, 'required': ['joke'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'story_with_moral', 'description': 'Write a short story about {topic} and end with a moral lesson. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'topic': {'title': 'Topic', 'type': 'string'}}, 'required': ['topic'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'story_funny', 'description': 'Write a funny, humorous story about {topic}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'topic': {'title': 'Topic', 'type': 'string'}}, 'required': ['topic'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'write_story', 'description': \"Write a story about {topic} in the style: {style}.\\n Available styles: 'moral' for a story with a lesson, 'funny' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\", 'parameters': {'additionalProperties': False, 'properties': {'topic': {'title': 'Topic', 'type': 'string'}, 'style': {'title': 'Style', 'type': 'string'}}, 'required': ['topic', 'style'], 'title': 'Params', 'type': 'object'}, 'strict': True}}]}, 'response': ModelResponse(id='chatcmpl-CnAOQIJsyohvBlsBGz9cztpetifUP', created=1765834058, model='gpt-4o-2024-08-06', object='chat.completion', system_fingerprint='fp_83554c687e', choices=[Choices(finish_reason='stop', index=0, message=Message(content=\"Here's a story about a curious cat named Whiskers who embarks on an adventure that teaches him an important lesson about kindness and courage. Whiskers' curiosity leads him to explore a mysterious path where he eventually discovers a trapped bird. Instead of succumbing to his natural instincts, Whiskers chooses to help the bird, freeing it from its cage. Through this act of compassion, Whiskers learns that while curiosity can lead to new discoveries, it is the courage to act with kindness that truly defines us.\", role='assistant', tool_calls=None, function_call=None, provider_specific_fields={'refusal': None}, annotations=[]), provider_specific_fields={})], usage=Usage(completion_tokens=105, prompt_tokens=982, total_tokens=1087, completion_tokens_details=CompletionTokensDetailsWrapper(accepted_prediction_tokens=0, audio_tokens=0, reasoning_tokens=0, rejected_prediction_tokens=0, text_tokens=None, image_tokens=None), prompt_tokens_details=PromptTokensDetailsWrapper(audio_tokens=0, cached_tokens=0, text_tokens=None, image_tokens=None)), service_tier='default')}\n", + "Here's a story about a curious cat named Whiskers who embarks on an adventure that teaches him an important lesson about kindness and courage. Whiskers' curiosity leads him to explore a mysterious path where he eventually discovers a trapped bird. Instead of succumbing to his natural instincts, Whiskers chooses to help the bird, freeing it from its cage. Through this act of compassion, Whiskers learns that while curiosity can lead to new discoveries, it is the courage to act with kindness that truly defines us.\n", + "\n", + "=== Funny story ===\n", + "INFO {'args': (), 'kwargs': {'messages': [{'type': 'message', 'content': [{'type': 'text', 'text': \"Write a story about a curious cat in the style: funny.\\n Available styles: 'moral' for a story with a lesson, 'funny' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\"}], 'role': 'user'}], 'response_format': None, 'tools': [{'type': 'function', 'function': {'name': 'limerick', 'description': 'Write a limerick on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'haiku_no_cache', 'description': 'Write a haiku on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'primes', 'description': 'Give a prime number with {first_digit} as the first digit. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'first_digit': {'title': 'First Digit', 'type': 'integer'}}, 'required': ['first_digit'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'count_char', 'description': \"Write a function which takes a string and counts the occurrances of '{char}'. Do not use any tools.\", 'parameters': {'additionalProperties': False, 'properties': {'char': {'title': 'Char', 'type': 'string'}}, 'required': ['char'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'cities', 'description': '', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}, {'type': 'function', 'function': {'name': 'weather', 'description': '', 'parameters': {'additionalProperties': False, 'properties': {'city': {'title': 'City', 'type': 'string'}}, 'required': ['city'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'vacation', 'description': 'Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}, {'type': 'function', 'function': {'name': 'write_joke', 'description': 'Write a knock-knock joke on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'rate_joke', 'description': 'Decide if {joke} is funny or not. Do not use any tools.', 'parameters': {'$defs': {'KnockKnockJoke': {'properties': {'whos_there': {'title': 'Whos There', 'type': 'string'}, 'punchline': {'title': 'Punchline', 'type': 'string'}}, 'required': ['whos_there', 'punchline'], 'title': 'KnockKnockJoke', 'type': 'object', 'additionalProperties': False}}, 'additionalProperties': False, 'properties': {'joke': {'$ref': '#/$defs/KnockKnockJoke'}}, 'required': ['joke'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'story_with_moral', 'description': 'Write a short story about {topic} and end with a moral lesson. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'topic': {'title': 'Topic', 'type': 'string'}}, 'required': ['topic'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'story_funny', 'description': 'Write a funny, humorous story about {topic}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'topic': {'title': 'Topic', 'type': 'string'}}, 'required': ['topic'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'write_story', 'description': \"Write a story about {topic} in the style: {style}.\\n Available styles: 'moral' for a story with a lesson, 'funny' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\", 'parameters': {'additionalProperties': False, 'properties': {'topic': {'title': 'Topic', 'type': 'string'}, 'style': {'title': 'Style', 'type': 'string'}}, 'required': ['topic', 'style'], 'title': 'Params', 'type': 'object'}, 'strict': True}}]}, 'response': ModelResponse(id='chatcmpl-CnAOTXQYpAjE6AhrSHrREbvtgiSzs', created=1765834061, model='gpt-4o-2024-08-06', object='chat.completion', system_fingerprint='fp_83554c687e', choices=[Choices(finish_reason='tool_calls', index=0, message=Message(content=None, role='assistant', tool_calls=[ChatCompletionMessageToolCall(function=Function(arguments='{\"topic\":\"a curious cat\"}', name='story_funny'), id='call_zMjPfWzaDFKuswF7HiHFFu4R', type='function')], function_call=None, provider_specific_fields={'refusal': None}, annotations=[]), provider_specific_fields={})], usage=Usage(completion_tokens=17, prompt_tokens=560, total_tokens=577, completion_tokens_details=CompletionTokensDetailsWrapper(accepted_prediction_tokens=0, audio_tokens=0, reasoning_tokens=0, rejected_prediction_tokens=0, text_tokens=None, image_tokens=None), prompt_tokens_details=PromptTokensDetailsWrapper(audio_tokens=0, cached_tokens=0, text_tokens=None, image_tokens=None)), service_tier='default')}\n", + "INFO {'args': (), 'kwargs': {'messages': [{'type': 'message', 'content': [{'type': 'text', 'text': 'Write a funny, humorous story about a curious cat. Do not use any tools.'}], 'role': 'user'}], 'response_format': None, 'tools': [{'type': 'function', 'function': {'name': 'limerick', 'description': 'Write a limerick on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'haiku_no_cache', 'description': 'Write a haiku on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'primes', 'description': 'Give a prime number with {first_digit} as the first digit. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'first_digit': {'title': 'First Digit', 'type': 'integer'}}, 'required': ['first_digit'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'count_char', 'description': \"Write a function which takes a string and counts the occurrances of '{char}'. Do not use any tools.\", 'parameters': {'additionalProperties': False, 'properties': {'char': {'title': 'Char', 'type': 'string'}}, 'required': ['char'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'cities', 'description': '', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}, {'type': 'function', 'function': {'name': 'weather', 'description': '', 'parameters': {'additionalProperties': False, 'properties': {'city': {'title': 'City', 'type': 'string'}}, 'required': ['city'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'vacation', 'description': 'Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}, {'type': 'function', 'function': {'name': 'write_joke', 'description': 'Write a knock-knock joke on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'rate_joke', 'description': 'Decide if {joke} is funny or not. Do not use any tools.', 'parameters': {'$defs': {'KnockKnockJoke': {'properties': {'whos_there': {'title': 'Whos There', 'type': 'string'}, 'punchline': {'title': 'Punchline', 'type': 'string'}}, 'required': ['whos_there', 'punchline'], 'title': 'KnockKnockJoke', 'type': 'object', 'additionalProperties': False}}, 'additionalProperties': False, 'properties': {'joke': {'$ref': '#/$defs/KnockKnockJoke'}}, 'required': ['joke'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'story_with_moral', 'description': 'Write a short story about {topic} and end with a moral lesson. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'topic': {'title': 'Topic', 'type': 'string'}}, 'required': ['topic'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'story_funny', 'description': 'Write a funny, humorous story about {topic}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'topic': {'title': 'Topic', 'type': 'string'}}, 'required': ['topic'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'write_story', 'description': \"Write a story about {topic} in the style: {style}.\\n Available styles: 'moral' for a story with a lesson, 'funny' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\", 'parameters': {'additionalProperties': False, 'properties': {'topic': {'title': 'Topic', 'type': 'string'}, 'style': {'title': 'Style', 'type': 'string'}}, 'required': ['topic', 'style'], 'title': 'Params', 'type': 'object'}, 'strict': True}}]}, 'response': ModelResponse(id='chatcmpl-CnAOUficJNRttQKM2b9t8qJDzfXnQ', created=1765834062, model='gpt-4o-2024-08-06', object='chat.completion', system_fingerprint='fp_83554c687e', choices=[Choices(finish_reason='stop', index=0, message=Message(content=\"Once upon a time in the quaint little town of Whiskerfield, there lived a particularly curious cat named Whiskers. Now, Whiskers wasn't your average feline; he had a knack for getting himself into the most bizarre situations, much to the amusement of the townsfolk.\\n\\nOne sunny morning, as Whiskers ventured out of his cozy basket, he noticed a peculiar shiny object gleaming in the garden. It was unlike anything he had ever seen before – a mix between a large spoon and a tiny satellite dish. His curiosity piqued, Whiskers approached with his usual stealth, attempting to decipher this mysterious contraption.\\n\\nUnbeknownst to Whiskers, the shiny object was none other than the town's new state-of-the-art bird feeder, designed with reflective surfaces to keep the squirrels away. But to Whiskers, it was the most intriguing puzzle he'd ever encountered. With his tail twitching like a metronome, Whiskers pounced at the feeder, only to collide with its slippery surface and land unceremoniously on his back, paws in the air.\\n\\nUndeterred by his clumsy introduction, Whiskers began his investigation with fervor. He circled the feeder, pawing at it and meowing loudly, as if expecting a response. The neighborhood birds watched from a safe distance, chirping in a chorus that resembled laughter. Whiskers, paying no mind to his feathered audience, was determined to unlock the secrets of this shiny beacon.\\n\\nAs noon approached, Whiskers, now slightly exasperated and hungry, decided to enlist the help of his best friend, Rover the golden retriever. Rover, although quite good-natured, wasn't exactly the brains of their operation, but he was always up for an adventure. With wagging tails and determined purrs, the duo devised a plan. Rover would use his weight to tip the feeder, while Whiskers would keep an eye out for any unexpected critters.\\n\\nThe plan was in motion. Rover, in his typical bounding style, lunged at the feeder, causing it to wobble precariously. Just as it began to tip, a sudden gust of wind swung the contraption in a whirlwind of seeds and reflections. Whiskers and Rover, caught in the midst of this flying feast, found themselves covered in birdseed, with Whiskers' fur boasting a collection of tiny sunflower hats.\\n\\nAs they sat there, bewildered and giggling in their own peculiar way, the townsfolk couldn't help but chuckle at the antics of Whiskers and Rover. Even the birds stopped their fluttering to admire the spectacle. From that day on, the bird feeder was not just a source of food for the birds, but also a stage for Whiskerfield's most unexpected entertainment duo.\\n\\nAnd so, Whiskers the curious cat learned an important lesson: sometimes, curiosity might not uncover the mysteries you expect, but it certainly creates the most memorable adventures. And as for Rover, well, he just loved being part of the fun, birdseed hats and all.\", role='assistant', tool_calls=None, function_call=None, provider_specific_fields={'refusal': None}, annotations=[]), provider_specific_fields={})], usage=Usage(completion_tokens=632, prompt_tokens=524, total_tokens=1156, completion_tokens_details=CompletionTokensDetailsWrapper(accepted_prediction_tokens=0, audio_tokens=0, reasoning_tokens=0, rejected_prediction_tokens=0, text_tokens=None, image_tokens=None), prompt_tokens_details=PromptTokensDetailsWrapper(audio_tokens=0, cached_tokens=0, text_tokens=None, image_tokens=None)), service_tier='default')}\n", + "INFO {'tool': 'story_funny', 'args': (), 'kwargs': {'topic': 'a curious cat'}}\n", + "INFO {'args': (), 'kwargs': {'messages': [{'type': 'message', 'content': [{'type': 'text', 'text': \"Write a story about a curious cat in the style: funny.\\n Available styles: 'moral' for a story with a lesson, 'funny' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\"}], 'role': 'user'}, {'content': None, 'role': 'assistant', 'tool_calls': [{'function': {'arguments': '{\"topic\":\"a curious cat\"}', 'name': 'story_funny'}, 'id': 'call_zMjPfWzaDFKuswF7HiHFFu4R', 'type': 'function'}], 'function_call': None, 'provider_specific_fields': {'refusal': None}, 'annotations': []}, {'role': 'tool', 'tool_call_id': 'call_zMjPfWzaDFKuswF7HiHFFu4R', 'name': 'story_funny', 'content': [{'type': 'text', 'text': \"Once upon a time in the quaint little town of Whiskerfield, there lived a particularly curious cat named Whiskers. Now, Whiskers wasn't your average feline; he had a knack for getting himself into the most bizarre situations, much to the amusement of the townsfolk.\\n\\nOne sunny morning, as Whiskers ventured out of his cozy basket, he noticed a peculiar shiny object gleaming in the garden. It was unlike anything he had ever seen before – a mix between a large spoon and a tiny satellite dish. His curiosity piqued, Whiskers approached with his usual stealth, attempting to decipher this mysterious contraption.\\n\\nUnbeknownst to Whiskers, the shiny object was none other than the town's new state-of-the-art bird feeder, designed with reflective surfaces to keep the squirrels away. But to Whiskers, it was the most intriguing puzzle he'd ever encountered. With his tail twitching like a metronome, Whiskers pounced at the feeder, only to collide with its slippery surface and land unceremoniously on his back, paws in the air.\\n\\nUndeterred by his clumsy introduction, Whiskers began his investigation with fervor. He circled the feeder, pawing at it and meowing loudly, as if expecting a response. The neighborhood birds watched from a safe distance, chirping in a chorus that resembled laughter. Whiskers, paying no mind to his feathered audience, was determined to unlock the secrets of this shiny beacon.\\n\\nAs noon approached, Whiskers, now slightly exasperated and hungry, decided to enlist the help of his best friend, Rover the golden retriever. Rover, although quite good-natured, wasn't exactly the brains of their operation, but he was always up for an adventure. With wagging tails and determined purrs, the duo devised a plan. Rover would use his weight to tip the feeder, while Whiskers would keep an eye out for any unexpected critters.\\n\\nThe plan was in motion. Rover, in his typical bounding style, lunged at the feeder, causing it to wobble precariously. Just as it began to tip, a sudden gust of wind swung the contraption in a whirlwind of seeds and reflections. Whiskers and Rover, caught in the midst of this flying feast, found themselves covered in birdseed, with Whiskers' fur boasting a collection of tiny sunflower hats.\\n\\nAs they sat there, bewildered and giggling in their own peculiar way, the townsfolk couldn't help but chuckle at the antics of Whiskers and Rover. Even the birds stopped their fluttering to admire the spectacle. From that day on, the bird feeder was not just a source of food for the birds, but also a stage for Whiskerfield's most unexpected entertainment duo.\\n\\nAnd so, Whiskers the curious cat learned an important lesson: sometimes, curiosity might not uncover the mysteries you expect, but it certainly creates the most memorable adventures. And as for Rover, well, he just loved being part of the fun, birdseed hats and all.\"}]}], 'response_format': None, 'tools': [{'type': 'function', 'function': {'name': 'limerick', 'description': 'Write a limerick on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'haiku_no_cache', 'description': 'Write a haiku on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'primes', 'description': 'Give a prime number with {first_digit} as the first digit. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'first_digit': {'title': 'First Digit', 'type': 'integer'}}, 'required': ['first_digit'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'count_char', 'description': \"Write a function which takes a string and counts the occurrances of '{char}'. Do not use any tools.\", 'parameters': {'additionalProperties': False, 'properties': {'char': {'title': 'Char', 'type': 'string'}}, 'required': ['char'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'cities', 'description': '', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}, {'type': 'function', 'function': {'name': 'weather', 'description': '', 'parameters': {'additionalProperties': False, 'properties': {'city': {'title': 'City', 'type': 'string'}}, 'required': ['city'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'vacation', 'description': 'Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}, {'type': 'function', 'function': {'name': 'write_joke', 'description': 'Write a knock-knock joke on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'rate_joke', 'description': 'Decide if {joke} is funny or not. Do not use any tools.', 'parameters': {'$defs': {'KnockKnockJoke': {'properties': {'whos_there': {'title': 'Whos There', 'type': 'string'}, 'punchline': {'title': 'Punchline', 'type': 'string'}}, 'required': ['whos_there', 'punchline'], 'title': 'KnockKnockJoke', 'type': 'object', 'additionalProperties': False}}, 'additionalProperties': False, 'properties': {'joke': {'$ref': '#/$defs/KnockKnockJoke'}}, 'required': ['joke'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'story_with_moral', 'description': 'Write a short story about {topic} and end with a moral lesson. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'topic': {'title': 'Topic', 'type': 'string'}}, 'required': ['topic'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'story_funny', 'description': 'Write a funny, humorous story about {topic}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'topic': {'title': 'Topic', 'type': 'string'}}, 'required': ['topic'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'write_story', 'description': \"Write a story about {topic} in the style: {style}.\\n Available styles: 'moral' for a story with a lesson, 'funny' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\", 'parameters': {'additionalProperties': False, 'properties': {'topic': {'title': 'Topic', 'type': 'string'}, 'style': {'title': 'Style', 'type': 'string'}}, 'required': ['topic', 'style'], 'title': 'Params', 'type': 'object'}, 'strict': True}}]}, 'response': ModelResponse(id='chatcmpl-CnAOfnzkwylzEgoQjoqCoqso1JcbO', created=1765834073, model='gpt-4o-2024-08-06', object='chat.completion', system_fingerprint='fp_83554c687e', choices=[Choices(finish_reason='stop', index=0, message=Message(content=\"Here's a funny story about a curious cat named Whiskers. In the quaint town of Whiskerfield, Whiskers is known for getting into bizarre situations. One day, he discovers a shiny bird feeder and mistakes it for a mysterious contraption. Despite several comedic mishaps, including enlisting the help of Rover the golden retriever, Whiskers ends up covered in birdseed, providing entertainment for the entire town. Through his antics, Whiskers realizes that while curiosity might not solve mysteries, it sure makes for adventurous tales.\", role='assistant', tool_calls=None, function_call=None, provider_specific_fields={'refusal': None}, annotations=[]), provider_specific_fields={})], usage=Usage(completion_tokens=111, prompt_tokens=1217, total_tokens=1328, completion_tokens_details=CompletionTokensDetailsWrapper(accepted_prediction_tokens=0, audio_tokens=0, reasoning_tokens=0, rejected_prediction_tokens=0, text_tokens=None, image_tokens=None), prompt_tokens_details=PromptTokensDetailsWrapper(audio_tokens=0, cached_tokens=0, text_tokens=None, image_tokens=None)), service_tier='default')}\n", + "Here's a funny story about a curious cat named Whiskers. In the quaint town of Whiskerfield, Whiskers is known for getting into bizarre situations. One day, he discovers a shiny bird feeder and mistakes it for a mysterious contraption. Despite several comedic mishaps, including enlisting the help of Rover the golden retriever, Whiskers ends up covered in birdseed, providing entertainment for the entire town. Through his antics, Whiskers realizes that while curiosity might not solve mysteries, it sure makes for adventurous tales.\n" + ] + } + ], + "source": [ + "# Sub-templates for different story styles\n", + "@Template.define\n", + "def story_with_moral(topic: str) -> str:\n", + " \"\"\"Write a short story about {topic} and end with a moral lesson. Do not use any tools.\"\"\"\n", + " raise NotHandled\n", + "\n", + "\n", + "@Template.define\n", + "def story_funny(topic: str) -> str:\n", + " \"\"\"Write a funny, humorous story about {topic}. Do not use any tools.\"\"\"\n", + " raise NotHandled\n", + "\n", + "\n", + "# Main orchestrator template - has access to sub-templates\n", + "@Template.define\n", + "def write_story(topic: str, style: str) -> str:\n", + " \"\"\"Write a story about {topic} in the style: {style}.\n", + " Available styles: 'moral' for a story with a lesson, 'funny' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\"\"\"\n", + " raise NotHandled\n", + "\n", + "\n", + "# Verify sub-templates are captured in write_story's lexical context\n", + "assert story_with_moral in write_story.tools\n", + "assert story_funny in write_story.tools\n", + "print(\"Sub-templates available to write_story:\", list(write_story.tools))\n", + "\n", + "with handler(provider), handler(llm_logger):\n", + " print(\"=== Story with moral ===\")\n", + " print(write_story(\"a curious cat\", \"moral\"))\n", + " print()\n", + " print(\"=== Funny story ===\")\n", + " print(write_story(\"a curious cat\", \"funny\"))" + ] + }, { "cell_type": "markdown", "id": "bd25826d", @@ -489,7 +573,7 @@ }, { "cell_type": "code", - "execution_count": 11, + "execution_count": 16, "id": "4334d07a", "metadata": {}, "outputs": [ @@ -497,12 +581,36 @@ "name": "stdout", "output_type": "stream", "text": [ - "INFO {'args': (), 'kwargs': {'messages': [{'type': 'message', 'content': [{'type': 'text', 'text': 'Use the unstable_service tool to fetch data.'}], 'role': 'user'}], 'response_format': None, 'tools': [{'type': 'function', 'function': {'name': 'unstable_service', 'description': 'Fetch data from an unstable external service. May require retries.', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}]}, 'response': ModelResponse(id='chatcmpl-CkjX4Tdccqd0ljEj2AUMlASFo31Tp', created=1765254150, model='gpt-4o-2024-08-06', object='chat.completion', system_fingerprint='fp_83554c687e', choices=[Choices(finish_reason='tool_calls', index=0, message=Message(content=None, role='assistant', tool_calls=[ChatCompletionMessageToolCall(function=Function(arguments='{}', name='unstable_service'), id='call_6cYMZnIK0hrv3xTStyyWBLXR', type='function')], function_call=None, provider_specific_fields={'refusal': None}, annotations=[]), provider_specific_fields={})], usage=Usage(completion_tokens=11, prompt_tokens=52, total_tokens=63, completion_tokens_details=CompletionTokensDetailsWrapper(accepted_prediction_tokens=0, audio_tokens=0, reasoning_tokens=0, rejected_prediction_tokens=0, text_tokens=None, image_tokens=None), prompt_tokens_details=PromptTokensDetailsWrapper(audio_tokens=0, cached_tokens=0, text_tokens=None, image_tokens=None)), service_tier='default')}\n", - "INFO {'args': (), 'kwargs': {'messages': [{'type': 'message', 'content': [{'type': 'text', 'text': 'Use the unstable_service tool to fetch data.'}], 'role': 'user'}, {'content': None, 'role': 'assistant', 'tool_calls': [{'function': {'arguments': '{}', 'name': 'unstable_service'}, 'id': 'call_6cYMZnIK0hrv3xTStyyWBLXR', 'type': 'function'}], 'function_call': None, 'provider_specific_fields': {'refusal': None}, 'annotations': []}, {'role': 'tool', 'tool_call_id': 'call_6cYMZnIK0hrv3xTStyyWBLXR', 'name': 'unstable_service', 'content': \"{'status': 'failure', 'exception': 'Service unavailable! Attempt 1/3. Please retry.'}\"}], 'response_format': None, 'tools': [{'type': 'function', 'function': {'name': 'unstable_service', 'description': 'Fetch data from an unstable external service. May require retries.', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}]}, 'response': ModelResponse(id='chatcmpl-CkjX4gCmm8GEpTADaz6b3WJHXETYu', created=1765254150, model='gpt-4o-2024-08-06', object='chat.completion', system_fingerprint='fp_83554c687e', choices=[Choices(finish_reason='tool_calls', index=0, message=Message(content=None, role='assistant', tool_calls=[ChatCompletionMessageToolCall(function=Function(arguments='{}', name='unstable_service'), id='call_7Cz1w1toF0CccR8e5XUp0dIP', type='function')], function_call=None, provider_specific_fields={'refusal': None}, annotations=[]), provider_specific_fields={})], usage=Usage(completion_tokens=11, prompt_tokens=95, total_tokens=106, completion_tokens_details=CompletionTokensDetailsWrapper(accepted_prediction_tokens=0, audio_tokens=0, reasoning_tokens=0, rejected_prediction_tokens=0, text_tokens=None, image_tokens=None), prompt_tokens_details=PromptTokensDetailsWrapper(audio_tokens=0, cached_tokens=0, text_tokens=None, image_tokens=None)), service_tier='default')}\n", - "INFO {'args': (), 'kwargs': {'messages': [{'type': 'message', 'content': [{'type': 'text', 'text': 'Use the unstable_service tool to fetch data.'}], 'role': 'user'}, {'content': None, 'role': 'assistant', 'tool_calls': [{'function': {'arguments': '{}', 'name': 'unstable_service'}, 'id': 'call_6cYMZnIK0hrv3xTStyyWBLXR', 'type': 'function'}], 'function_call': None, 'provider_specific_fields': {'refusal': None}, 'annotations': []}, {'role': 'tool', 'tool_call_id': 'call_6cYMZnIK0hrv3xTStyyWBLXR', 'name': 'unstable_service', 'content': \"{'status': 'failure', 'exception': 'Service unavailable! Attempt 1/3. Please retry.'}\"}, {'content': None, 'role': 'assistant', 'tool_calls': [{'function': {'arguments': '{}', 'name': 'unstable_service'}, 'id': 'call_7Cz1w1toF0CccR8e5XUp0dIP', 'type': 'function'}], 'function_call': None, 'provider_specific_fields': {'refusal': None}, 'annotations': []}, {'role': 'tool', 'tool_call_id': 'call_7Cz1w1toF0CccR8e5XUp0dIP', 'name': 'unstable_service', 'content': \"{'status': 'failure', 'exception': 'Service unavailable! Attempt 2/3. Please retry.'}\"}], 'response_format': None, 'tools': [{'type': 'function', 'function': {'name': 'unstable_service', 'description': 'Fetch data from an unstable external service. May require retries.', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}]}, 'response': ModelResponse(id='chatcmpl-CkjX57HW80BM2iCR3guWCc8e9etYS', created=1765254151, model='gpt-4o-2024-08-06', object='chat.completion', system_fingerprint='fp_83554c687e', choices=[Choices(finish_reason='tool_calls', index=0, message=Message(content=None, role='assistant', tool_calls=[ChatCompletionMessageToolCall(function=Function(arguments='{}', name='unstable_service'), id='call_KLr43KhV2A4GzUbQfbhD1iXz', type='function')], function_call=None, provider_specific_fields={'refusal': None}, annotations=[]), provider_specific_fields={})], usage=Usage(completion_tokens=11, prompt_tokens=138, total_tokens=149, completion_tokens_details=CompletionTokensDetailsWrapper(accepted_prediction_tokens=0, audio_tokens=0, reasoning_tokens=0, rejected_prediction_tokens=0, text_tokens=None, image_tokens=None), prompt_tokens_details=PromptTokensDetailsWrapper(audio_tokens=0, cached_tokens=0, text_tokens=None, image_tokens=None)), service_tier='default')}\n", + "INFO {'args': (), 'kwargs': {'messages': [{'type': 'message', 'content': [{'type': 'text', 'text': 'Use the unstable_service tool to fetch data.'}], 'role': 'user'}], 'response_format': None, 'tools': [{'type': 'function', 'function': {'name': 'limerick', 'description': 'Write a limerick on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'haiku_no_cache', 'description': 'Write a haiku on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'primes', 'description': 'Give a prime number with {first_digit} as the first digit. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'first_digit': {'title': 'First Digit', 'type': 'integer'}}, 'required': ['first_digit'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'count_char', 'description': \"Write a function which takes a string and counts the occurrances of '{char}'. Do not use any tools.\", 'parameters': {'additionalProperties': False, 'properties': {'char': {'title': 'Char', 'type': 'string'}}, 'required': ['char'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'cities', 'description': '', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}, {'type': 'function', 'function': {'name': 'weather', 'description': '', 'parameters': {'additionalProperties': False, 'properties': {'city': {'title': 'City', 'type': 'string'}}, 'required': ['city'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'vacation', 'description': 'Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}, {'type': 'function', 'function': {'name': 'write_joke', 'description': 'Write a knock-knock joke on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'rate_joke', 'description': 'Decide if {joke} is funny or not. Do not use any tools.', 'parameters': {'$defs': {'KnockKnockJoke': {'properties': {'whos_there': {'title': 'Whos There', 'type': 'string'}, 'punchline': {'title': 'Punchline', 'type': 'string'}}, 'required': ['whos_there', 'punchline'], 'title': 'KnockKnockJoke', 'type': 'object', 'additionalProperties': False}}, 'additionalProperties': False, 'properties': {'joke': {'$ref': '#/$defs/KnockKnockJoke'}}, 'required': ['joke'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'story_with_moral', 'description': 'Write a short story about {topic} and end with a moral lesson. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'topic': {'title': 'Topic', 'type': 'string'}}, 'required': ['topic'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'story_funny', 'description': 'Write a funny, humorous story about {topic}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'topic': {'title': 'Topic', 'type': 'string'}}, 'required': ['topic'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'write_story', 'description': \"Write a story about {topic} in the style: {style}.\\n Available styles: 'moral' for a story with a lesson, 'funny' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\", 'parameters': {'additionalProperties': False, 'properties': {'topic': {'title': 'Topic', 'type': 'string'}, 'style': {'title': 'Style', 'type': 'string'}}, 'required': ['topic', 'style'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'unstable_service', 'description': 'Fetch data from an unstable external service. May require retries.', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}, {'type': 'function', 'function': {'name': 'fetch_data', 'description': 'Use the unstable_service tool to fetch data.', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}]}, 'response': ModelResponse(id='chatcmpl-CnAOpUS8BAytmElgOX8fLDSSym8TP', created=1765834083, model='gpt-4o-2024-08-06', object='chat.completion', system_fingerprint='fp_83554c687e', choices=[Choices(finish_reason='tool_calls', index=0, message=Message(content=None, role='assistant', tool_calls=[ChatCompletionMessageToolCall(function=Function(arguments='{}', name='fetch_data'), id='call_T6RgNagFWflpLfddbDdAhy7e', type='function')], function_call=None, provider_specific_fields={'refusal': None}, annotations=[]), provider_specific_fields={})], usage=Usage(completion_tokens=10, prompt_tokens=553, total_tokens=563, completion_tokens_details=CompletionTokensDetailsWrapper(accepted_prediction_tokens=0, audio_tokens=0, reasoning_tokens=0, rejected_prediction_tokens=0, text_tokens=None, image_tokens=None), prompt_tokens_details=PromptTokensDetailsWrapper(audio_tokens=0, cached_tokens=0, text_tokens=None, image_tokens=None)), service_tier='default')}\n", + "INFO {'args': (), 'kwargs': {'messages': [{'type': 'message', 'content': [{'type': 'text', 'text': 'Use the unstable_service tool to fetch data.'}], 'role': 'user'}], 'response_format': None, 'tools': [{'type': 'function', 'function': {'name': 'limerick', 'description': 'Write a limerick on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'haiku_no_cache', 'description': 'Write a haiku on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'primes', 'description': 'Give a prime number with {first_digit} as the first digit. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'first_digit': {'title': 'First Digit', 'type': 'integer'}}, 'required': ['first_digit'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'count_char', 'description': \"Write a function which takes a string and counts the occurrances of '{char}'. Do not use any tools.\", 'parameters': {'additionalProperties': False, 'properties': {'char': {'title': 'Char', 'type': 'string'}}, 'required': ['char'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'cities', 'description': '', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}, {'type': 'function', 'function': {'name': 'weather', 'description': '', 'parameters': {'additionalProperties': False, 'properties': {'city': {'title': 'City', 'type': 'string'}}, 'required': ['city'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'vacation', 'description': 'Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}, {'type': 'function', 'function': {'name': 'write_joke', 'description': 'Write a knock-knock joke on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'rate_joke', 'description': 'Decide if {joke} is funny or not. Do not use any tools.', 'parameters': {'$defs': {'KnockKnockJoke': {'properties': {'whos_there': {'title': 'Whos There', 'type': 'string'}, 'punchline': {'title': 'Punchline', 'type': 'string'}}, 'required': ['whos_there', 'punchline'], 'title': 'KnockKnockJoke', 'type': 'object', 'additionalProperties': False}}, 'additionalProperties': False, 'properties': {'joke': {'$ref': '#/$defs/KnockKnockJoke'}}, 'required': ['joke'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'story_with_moral', 'description': 'Write a short story about {topic} and end with a moral lesson. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'topic': {'title': 'Topic', 'type': 'string'}}, 'required': ['topic'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'story_funny', 'description': 'Write a funny, humorous story about {topic}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'topic': {'title': 'Topic', 'type': 'string'}}, 'required': ['topic'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'write_story', 'description': \"Write a story about {topic} in the style: {style}.\\n Available styles: 'moral' for a story with a lesson, 'funny' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\", 'parameters': {'additionalProperties': False, 'properties': {'topic': {'title': 'Topic', 'type': 'string'}, 'style': {'title': 'Style', 'type': 'string'}}, 'required': ['topic', 'style'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'unstable_service', 'description': 'Fetch data from an unstable external service. May require retries.', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}, {'type': 'function', 'function': {'name': 'fetch_data', 'description': 'Use the unstable_service tool to fetch data.', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}]}, 'response': ModelResponse(id='chatcmpl-CnAOqUGCwcplfEVlflhfHFeoN0vmV', created=1765834084, model='gpt-4o-2024-08-06', object='chat.completion', system_fingerprint='fp_83554c687e', choices=[Choices(finish_reason='tool_calls', index=0, message=Message(content=None, role='assistant', tool_calls=[ChatCompletionMessageToolCall(function=Function(arguments='{}', name='fetch_data'), id='call_1zL43TYBfRd82z76Ww3VtZ10', type='function')], function_call=None, provider_specific_fields={'refusal': None}, annotations=[]), provider_specific_fields={})], usage=Usage(completion_tokens=10, prompt_tokens=553, total_tokens=563, completion_tokens_details=CompletionTokensDetailsWrapper(accepted_prediction_tokens=0, audio_tokens=0, reasoning_tokens=0, rejected_prediction_tokens=0, text_tokens=None, image_tokens=None), prompt_tokens_details=PromptTokensDetailsWrapper(audio_tokens=0, cached_tokens=0, text_tokens=None, image_tokens=None)), service_tier='default')}\n", + "INFO {'args': (), 'kwargs': {'messages': [{'type': 'message', 'content': [{'type': 'text', 'text': 'Use the unstable_service tool to fetch data.'}], 'role': 'user'}], 'response_format': None, 'tools': [{'type': 'function', 'function': {'name': 'limerick', 'description': 'Write a limerick on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'haiku_no_cache', 'description': 'Write a haiku on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'primes', 'description': 'Give a prime number with {first_digit} as the first digit. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'first_digit': {'title': 'First Digit', 'type': 'integer'}}, 'required': ['first_digit'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'count_char', 'description': \"Write a function which takes a string and counts the occurrances of '{char}'. Do not use any tools.\", 'parameters': {'additionalProperties': False, 'properties': {'char': {'title': 'Char', 'type': 'string'}}, 'required': ['char'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'cities', 'description': '', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}, {'type': 'function', 'function': {'name': 'weather', 'description': '', 'parameters': {'additionalProperties': False, 'properties': {'city': {'title': 'City', 'type': 'string'}}, 'required': ['city'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'vacation', 'description': 'Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}, {'type': 'function', 'function': {'name': 'write_joke', 'description': 'Write a knock-knock joke on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'rate_joke', 'description': 'Decide if {joke} is funny or not. Do not use any tools.', 'parameters': {'$defs': {'KnockKnockJoke': {'properties': {'whos_there': {'title': 'Whos There', 'type': 'string'}, 'punchline': {'title': 'Punchline', 'type': 'string'}}, 'required': ['whos_there', 'punchline'], 'title': 'KnockKnockJoke', 'type': 'object', 'additionalProperties': False}}, 'additionalProperties': False, 'properties': {'joke': {'$ref': '#/$defs/KnockKnockJoke'}}, 'required': ['joke'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'story_with_moral', 'description': 'Write a short story about {topic} and end with a moral lesson. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'topic': {'title': 'Topic', 'type': 'string'}}, 'required': ['topic'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'story_funny', 'description': 'Write a funny, humorous story about {topic}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'topic': {'title': 'Topic', 'type': 'string'}}, 'required': ['topic'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'write_story', 'description': \"Write a story about {topic} in the style: {style}.\\n Available styles: 'moral' for a story with a lesson, 'funny' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\", 'parameters': {'additionalProperties': False, 'properties': {'topic': {'title': 'Topic', 'type': 'string'}, 'style': {'title': 'Style', 'type': 'string'}}, 'required': ['topic', 'style'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'unstable_service', 'description': 'Fetch data from an unstable external service. May require retries.', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}, {'type': 'function', 'function': {'name': 'fetch_data', 'description': 'Use the unstable_service tool to fetch data.', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}]}, 'response': ModelResponse(id='chatcmpl-CnAOrjGy3cKfupoXxoUpyh5g3Rg2i', created=1765834085, model='gpt-4o-2024-08-06', object='chat.completion', system_fingerprint='fp_83554c687e', choices=[Choices(finish_reason='tool_calls', index=0, message=Message(content=None, role='assistant', tool_calls=[ChatCompletionMessageToolCall(function=Function(arguments='{}', name='fetch_data'), id='call_QIhDdQlUVyQb4xL3bp9mA3Ln', type='function')], function_call=None, provider_specific_fields={'refusal': None}, annotations=[]), provider_specific_fields={})], usage=Usage(completion_tokens=10, prompt_tokens=553, total_tokens=563, completion_tokens_details=CompletionTokensDetailsWrapper(accepted_prediction_tokens=0, audio_tokens=0, reasoning_tokens=0, rejected_prediction_tokens=0, text_tokens=None, image_tokens=None), prompt_tokens_details=PromptTokensDetailsWrapper(audio_tokens=0, cached_tokens=0, text_tokens=None, image_tokens=None)), service_tier='default')}\n", + "INFO {'args': (), 'kwargs': {'messages': [{'type': 'message', 'content': [{'type': 'text', 'text': 'Use the unstable_service tool to fetch data.'}], 'role': 'user'}], 'response_format': None, 'tools': [{'type': 'function', 'function': {'name': 'limerick', 'description': 'Write a limerick on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'haiku_no_cache', 'description': 'Write a haiku on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'primes', 'description': 'Give a prime number with {first_digit} as the first digit. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'first_digit': {'title': 'First Digit', 'type': 'integer'}}, 'required': ['first_digit'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'count_char', 'description': \"Write a function which takes a string and counts the occurrances of '{char}'. Do not use any tools.\", 'parameters': {'additionalProperties': False, 'properties': {'char': {'title': 'Char', 'type': 'string'}}, 'required': ['char'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'cities', 'description': '', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}, {'type': 'function', 'function': {'name': 'weather', 'description': '', 'parameters': {'additionalProperties': False, 'properties': {'city': {'title': 'City', 'type': 'string'}}, 'required': ['city'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'vacation', 'description': 'Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}, {'type': 'function', 'function': {'name': 'write_joke', 'description': 'Write a knock-knock joke on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'rate_joke', 'description': 'Decide if {joke} is funny or not. Do not use any tools.', 'parameters': {'$defs': {'KnockKnockJoke': {'properties': {'whos_there': {'title': 'Whos There', 'type': 'string'}, 'punchline': {'title': 'Punchline', 'type': 'string'}}, 'required': ['whos_there', 'punchline'], 'title': 'KnockKnockJoke', 'type': 'object', 'additionalProperties': False}}, 'additionalProperties': False, 'properties': {'joke': {'$ref': '#/$defs/KnockKnockJoke'}}, 'required': ['joke'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'story_with_moral', 'description': 'Write a short story about {topic} and end with a moral lesson. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'topic': {'title': 'Topic', 'type': 'string'}}, 'required': ['topic'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'story_funny', 'description': 'Write a funny, humorous story about {topic}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'topic': {'title': 'Topic', 'type': 'string'}}, 'required': ['topic'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'write_story', 'description': \"Write a story about {topic} in the style: {style}.\\n Available styles: 'moral' for a story with a lesson, 'funny' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\", 'parameters': {'additionalProperties': False, 'properties': {'topic': {'title': 'Topic', 'type': 'string'}, 'style': {'title': 'Style', 'type': 'string'}}, 'required': ['topic', 'style'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'unstable_service', 'description': 'Fetch data from an unstable external service. May require retries.', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}, {'type': 'function', 'function': {'name': 'fetch_data', 'description': 'Use the unstable_service tool to fetch data.', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}]}, 'response': ModelResponse(id='chatcmpl-CnAOrSd4OYuyZMTYJQEH8nAuKmbII', created=1765834085, model='gpt-4o-2024-08-06', object='chat.completion', system_fingerprint='fp_83554c687e', choices=[Choices(finish_reason='tool_calls', index=0, message=Message(content=None, role='assistant', tool_calls=[ChatCompletionMessageToolCall(function=Function(arguments='{}', name='fetch_data'), id='call_ndnD0MTgx5kCh0RQloWTEMDO', type='function')], function_call=None, provider_specific_fields={'refusal': None}, annotations=[]), provider_specific_fields={})], usage=Usage(completion_tokens=10, prompt_tokens=553, total_tokens=563, completion_tokens_details=CompletionTokensDetailsWrapper(accepted_prediction_tokens=0, audio_tokens=0, reasoning_tokens=0, rejected_prediction_tokens=0, text_tokens=None, image_tokens=None), prompt_tokens_details=PromptTokensDetailsWrapper(audio_tokens=0, cached_tokens=0, text_tokens=None, image_tokens=None)), service_tier='default')}\n", + "INFO {'args': (), 'kwargs': {'messages': [{'type': 'message', 'content': [{'type': 'text', 'text': 'Use the unstable_service tool to fetch data.'}], 'role': 'user'}], 'response_format': None, 'tools': [{'type': 'function', 'function': {'name': 'limerick', 'description': 'Write a limerick on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'haiku_no_cache', 'description': 'Write a haiku on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'primes', 'description': 'Give a prime number with {first_digit} as the first digit. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'first_digit': {'title': 'First Digit', 'type': 'integer'}}, 'required': ['first_digit'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'count_char', 'description': \"Write a function which takes a string and counts the occurrances of '{char}'. Do not use any tools.\", 'parameters': {'additionalProperties': False, 'properties': {'char': {'title': 'Char', 'type': 'string'}}, 'required': ['char'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'cities', 'description': '', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}, {'type': 'function', 'function': {'name': 'weather', 'description': '', 'parameters': {'additionalProperties': False, 'properties': {'city': {'title': 'City', 'type': 'string'}}, 'required': ['city'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'vacation', 'description': 'Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}, {'type': 'function', 'function': {'name': 'write_joke', 'description': 'Write a knock-knock joke on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'rate_joke', 'description': 'Decide if {joke} is funny or not. Do not use any tools.', 'parameters': {'$defs': {'KnockKnockJoke': {'properties': {'whos_there': {'title': 'Whos There', 'type': 'string'}, 'punchline': {'title': 'Punchline', 'type': 'string'}}, 'required': ['whos_there', 'punchline'], 'title': 'KnockKnockJoke', 'type': 'object', 'additionalProperties': False}}, 'additionalProperties': False, 'properties': {'joke': {'$ref': '#/$defs/KnockKnockJoke'}}, 'required': ['joke'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'story_with_moral', 'description': 'Write a short story about {topic} and end with a moral lesson. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'topic': {'title': 'Topic', 'type': 'string'}}, 'required': ['topic'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'story_funny', 'description': 'Write a funny, humorous story about {topic}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'topic': {'title': 'Topic', 'type': 'string'}}, 'required': ['topic'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'write_story', 'description': \"Write a story about {topic} in the style: {style}.\\n Available styles: 'moral' for a story with a lesson, 'funny' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\", 'parameters': {'additionalProperties': False, 'properties': {'topic': {'title': 'Topic', 'type': 'string'}, 'style': {'title': 'Style', 'type': 'string'}}, 'required': ['topic', 'style'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'unstable_service', 'description': 'Fetch data from an unstable external service. May require retries.', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}, {'type': 'function', 'function': {'name': 'fetch_data', 'description': 'Use the unstable_service tool to fetch data.', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}]}, 'response': ModelResponse(id='chatcmpl-CnAOs7Deh2JdFir6jSkT2tvgXUwAD', created=1765834086, model='gpt-4o-2024-08-06', object='chat.completion', system_fingerprint='fp_83554c687e', choices=[Choices(finish_reason='tool_calls', index=0, message=Message(content=None, role='assistant', tool_calls=[ChatCompletionMessageToolCall(function=Function(arguments='{}', name='fetch_data'), id='call_zZZ2qOKtJOSK0NQR05Es8GCT', type='function')], function_call=None, provider_specific_fields={'refusal': None}, annotations=[]), provider_specific_fields={})], usage=Usage(completion_tokens=10, prompt_tokens=553, total_tokens=563, completion_tokens_details=CompletionTokensDetailsWrapper(accepted_prediction_tokens=0, audio_tokens=0, reasoning_tokens=0, rejected_prediction_tokens=0, text_tokens=None, image_tokens=None), prompt_tokens_details=PromptTokensDetailsWrapper(audio_tokens=0, cached_tokens=0, text_tokens=None, image_tokens=None)), service_tier='default')}\n", + "INFO {'args': (), 'kwargs': {'messages': [{'type': 'message', 'content': [{'type': 'text', 'text': 'Use the unstable_service tool to fetch data.'}], 'role': 'user'}], 'response_format': None, 'tools': [{'type': 'function', 'function': {'name': 'limerick', 'description': 'Write a limerick on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'haiku_no_cache', 'description': 'Write a haiku on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'primes', 'description': 'Give a prime number with {first_digit} as the first digit. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'first_digit': {'title': 'First Digit', 'type': 'integer'}}, 'required': ['first_digit'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'count_char', 'description': \"Write a function which takes a string and counts the occurrances of '{char}'. Do not use any tools.\", 'parameters': {'additionalProperties': False, 'properties': {'char': {'title': 'Char', 'type': 'string'}}, 'required': ['char'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'cities', 'description': '', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}, {'type': 'function', 'function': {'name': 'weather', 'description': '', 'parameters': {'additionalProperties': False, 'properties': {'city': {'title': 'City', 'type': 'string'}}, 'required': ['city'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'vacation', 'description': 'Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}, {'type': 'function', 'function': {'name': 'write_joke', 'description': 'Write a knock-knock joke on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'rate_joke', 'description': 'Decide if {joke} is funny or not. Do not use any tools.', 'parameters': {'$defs': {'KnockKnockJoke': {'properties': {'whos_there': {'title': 'Whos There', 'type': 'string'}, 'punchline': {'title': 'Punchline', 'type': 'string'}}, 'required': ['whos_there', 'punchline'], 'title': 'KnockKnockJoke', 'type': 'object', 'additionalProperties': False}}, 'additionalProperties': False, 'properties': {'joke': {'$ref': '#/$defs/KnockKnockJoke'}}, 'required': ['joke'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'story_with_moral', 'description': 'Write a short story about {topic} and end with a moral lesson. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'topic': {'title': 'Topic', 'type': 'string'}}, 'required': ['topic'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'story_funny', 'description': 'Write a funny, humorous story about {topic}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'topic': {'title': 'Topic', 'type': 'string'}}, 'required': ['topic'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'write_story', 'description': \"Write a story about {topic} in the style: {style}.\\n Available styles: 'moral' for a story with a lesson, 'funny' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\", 'parameters': {'additionalProperties': False, 'properties': {'topic': {'title': 'Topic', 'type': 'string'}, 'style': {'title': 'Style', 'type': 'string'}}, 'required': ['topic', 'style'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'unstable_service', 'description': 'Fetch data from an unstable external service. May require retries.', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}, {'type': 'function', 'function': {'name': 'fetch_data', 'description': 'Use the unstable_service tool to fetch data.', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}]}, 'response': ModelResponse(id='chatcmpl-CnAOsEkupw6S2qV7dAVO6IXlv48fE', created=1765834086, model='gpt-4o-2024-08-06', object='chat.completion', system_fingerprint='fp_83554c687e', choices=[Choices(finish_reason='tool_calls', index=0, message=Message(content=None, role='assistant', tool_calls=[ChatCompletionMessageToolCall(function=Function(arguments='{}', name='fetch_data'), id='call_SROFh6GD7MXpKjEjuCKfwsoR', type='function')], function_call=None, provider_specific_fields={'refusal': None}, annotations=[]), provider_specific_fields={})], usage=Usage(completion_tokens=10, prompt_tokens=553, total_tokens=563, completion_tokens_details=CompletionTokensDetailsWrapper(accepted_prediction_tokens=0, audio_tokens=0, reasoning_tokens=0, rejected_prediction_tokens=0, text_tokens=None, image_tokens=None), prompt_tokens_details=PromptTokensDetailsWrapper(audio_tokens=0, cached_tokens=0, text_tokens=None, image_tokens=None)), service_tier='default')}\n", + "INFO {'args': (), 'kwargs': {'messages': [{'type': 'message', 'content': [{'type': 'text', 'text': 'Use the unstable_service tool to fetch data.'}], 'role': 'user'}], 'response_format': None, 'tools': [{'type': 'function', 'function': {'name': 'limerick', 'description': 'Write a limerick on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'haiku_no_cache', 'description': 'Write a haiku on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'primes', 'description': 'Give a prime number with {first_digit} as the first digit. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'first_digit': {'title': 'First Digit', 'type': 'integer'}}, 'required': ['first_digit'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'count_char', 'description': \"Write a function which takes a string and counts the occurrances of '{char}'. Do not use any tools.\", 'parameters': {'additionalProperties': False, 'properties': {'char': {'title': 'Char', 'type': 'string'}}, 'required': ['char'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'cities', 'description': '', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}, {'type': 'function', 'function': {'name': 'weather', 'description': '', 'parameters': {'additionalProperties': False, 'properties': {'city': {'title': 'City', 'type': 'string'}}, 'required': ['city'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'vacation', 'description': 'Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}, {'type': 'function', 'function': {'name': 'write_joke', 'description': 'Write a knock-knock joke on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'rate_joke', 'description': 'Decide if {joke} is funny or not. Do not use any tools.', 'parameters': {'$defs': {'KnockKnockJoke': {'properties': {'whos_there': {'title': 'Whos There', 'type': 'string'}, 'punchline': {'title': 'Punchline', 'type': 'string'}}, 'required': ['whos_there', 'punchline'], 'title': 'KnockKnockJoke', 'type': 'object', 'additionalProperties': False}}, 'additionalProperties': False, 'properties': {'joke': {'$ref': '#/$defs/KnockKnockJoke'}}, 'required': ['joke'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'story_with_moral', 'description': 'Write a short story about {topic} and end with a moral lesson. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'topic': {'title': 'Topic', 'type': 'string'}}, 'required': ['topic'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'story_funny', 'description': 'Write a funny, humorous story about {topic}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'topic': {'title': 'Topic', 'type': 'string'}}, 'required': ['topic'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'write_story', 'description': \"Write a story about {topic} in the style: {style}.\\n Available styles: 'moral' for a story with a lesson, 'funny' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\", 'parameters': {'additionalProperties': False, 'properties': {'topic': {'title': 'Topic', 'type': 'string'}, 'style': {'title': 'Style', 'type': 'string'}}, 'required': ['topic', 'style'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'unstable_service', 'description': 'Fetch data from an unstable external service. May require retries.', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}, {'type': 'function', 'function': {'name': 'fetch_data', 'description': 'Use the unstable_service tool to fetch data.', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}]}, 'response': ModelResponse(id='chatcmpl-CnAOtOfJnpaXYL7B3vdnHZKo7CfTQ', created=1765834087, model='gpt-4o-2024-08-06', object='chat.completion', system_fingerprint='fp_83554c687e', choices=[Choices(finish_reason='tool_calls', index=0, message=Message(content=None, role='assistant', tool_calls=[ChatCompletionMessageToolCall(function=Function(arguments='{}', name='fetch_data'), id='call_OfaqegdBaNqhYXbmtMSfq8E3', type='function')], function_call=None, provider_specific_fields={'refusal': None}, annotations=[]), provider_specific_fields={})], usage=Usage(completion_tokens=10, prompt_tokens=553, total_tokens=563, completion_tokens_details=CompletionTokensDetailsWrapper(accepted_prediction_tokens=0, audio_tokens=0, reasoning_tokens=0, rejected_prediction_tokens=0, text_tokens=None, image_tokens=None), prompt_tokens_details=PromptTokensDetailsWrapper(audio_tokens=0, cached_tokens=0, text_tokens=None, image_tokens=None)), service_tier='default')}\n", + "INFO {'args': (), 'kwargs': {'messages': [{'type': 'message', 'content': [{'type': 'text', 'text': 'Use the unstable_service tool to fetch data.'}], 'role': 'user'}], 'response_format': None, 'tools': [{'type': 'function', 'function': {'name': 'limerick', 'description': 'Write a limerick on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'haiku_no_cache', 'description': 'Write a haiku on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'primes', 'description': 'Give a prime number with {first_digit} as the first digit. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'first_digit': {'title': 'First Digit', 'type': 'integer'}}, 'required': ['first_digit'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'count_char', 'description': \"Write a function which takes a string and counts the occurrances of '{char}'. Do not use any tools.\", 'parameters': {'additionalProperties': False, 'properties': {'char': {'title': 'Char', 'type': 'string'}}, 'required': ['char'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'cities', 'description': '', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}, {'type': 'function', 'function': {'name': 'weather', 'description': '', 'parameters': {'additionalProperties': False, 'properties': {'city': {'title': 'City', 'type': 'string'}}, 'required': ['city'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'vacation', 'description': 'Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}, {'type': 'function', 'function': {'name': 'write_joke', 'description': 'Write a knock-knock joke on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'rate_joke', 'description': 'Decide if {joke} is funny or not. Do not use any tools.', 'parameters': {'$defs': {'KnockKnockJoke': {'properties': {'whos_there': {'title': 'Whos There', 'type': 'string'}, 'punchline': {'title': 'Punchline', 'type': 'string'}}, 'required': ['whos_there', 'punchline'], 'title': 'KnockKnockJoke', 'type': 'object', 'additionalProperties': False}}, 'additionalProperties': False, 'properties': {'joke': {'$ref': '#/$defs/KnockKnockJoke'}}, 'required': ['joke'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'story_with_moral', 'description': 'Write a short story about {topic} and end with a moral lesson. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'topic': {'title': 'Topic', 'type': 'string'}}, 'required': ['topic'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'story_funny', 'description': 'Write a funny, humorous story about {topic}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'topic': {'title': 'Topic', 'type': 'string'}}, 'required': ['topic'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'write_story', 'description': \"Write a story about {topic} in the style: {style}.\\n Available styles: 'moral' for a story with a lesson, 'funny' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\", 'parameters': {'additionalProperties': False, 'properties': {'topic': {'title': 'Topic', 'type': 'string'}, 'style': {'title': 'Style', 'type': 'string'}}, 'required': ['topic', 'style'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'unstable_service', 'description': 'Fetch data from an unstable external service. May require retries.', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}, {'type': 'function', 'function': {'name': 'fetch_data', 'description': 'Use the unstable_service tool to fetch data.', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}]}, 'response': ModelResponse(id='chatcmpl-CnAOu2KvgpurkvKS5js70BrYPZ6ei', created=1765834088, model='gpt-4o-2024-08-06', object='chat.completion', system_fingerprint='fp_83554c687e', choices=[Choices(finish_reason='tool_calls', index=0, message=Message(content=None, role='assistant', tool_calls=[ChatCompletionMessageToolCall(function=Function(arguments='{}', name='fetch_data'), id='call_G1XWytSwDBOJXd5DbTnAgegd', type='function')], function_call=None, provider_specific_fields={'refusal': None}, annotations=[]), provider_specific_fields={})], usage=Usage(completion_tokens=10, prompt_tokens=553, total_tokens=563, completion_tokens_details=CompletionTokensDetailsWrapper(accepted_prediction_tokens=0, audio_tokens=0, reasoning_tokens=0, rejected_prediction_tokens=0, text_tokens=None, image_tokens=None), prompt_tokens_details=PromptTokensDetailsWrapper(audio_tokens=0, cached_tokens=0, text_tokens=None, image_tokens=None)), service_tier='default')}\n", + "INFO {'args': (), 'kwargs': {'messages': [{'type': 'message', 'content': [{'type': 'text', 'text': 'Use the unstable_service tool to fetch data.'}], 'role': 'user'}], 'response_format': None, 'tools': [{'type': 'function', 'function': {'name': 'limerick', 'description': 'Write a limerick on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'haiku_no_cache', 'description': 'Write a haiku on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'primes', 'description': 'Give a prime number with {first_digit} as the first digit. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'first_digit': {'title': 'First Digit', 'type': 'integer'}}, 'required': ['first_digit'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'count_char', 'description': \"Write a function which takes a string and counts the occurrances of '{char}'. Do not use any tools.\", 'parameters': {'additionalProperties': False, 'properties': {'char': {'title': 'Char', 'type': 'string'}}, 'required': ['char'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'cities', 'description': '', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}, {'type': 'function', 'function': {'name': 'weather', 'description': '', 'parameters': {'additionalProperties': False, 'properties': {'city': {'title': 'City', 'type': 'string'}}, 'required': ['city'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'vacation', 'description': 'Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}, {'type': 'function', 'function': {'name': 'write_joke', 'description': 'Write a knock-knock joke on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'rate_joke', 'description': 'Decide if {joke} is funny or not. Do not use any tools.', 'parameters': {'$defs': {'KnockKnockJoke': {'properties': {'whos_there': {'title': 'Whos There', 'type': 'string'}, 'punchline': {'title': 'Punchline', 'type': 'string'}}, 'required': ['whos_there', 'punchline'], 'title': 'KnockKnockJoke', 'type': 'object', 'additionalProperties': False}}, 'additionalProperties': False, 'properties': {'joke': {'$ref': '#/$defs/KnockKnockJoke'}}, 'required': ['joke'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'story_with_moral', 'description': 'Write a short story about {topic} and end with a moral lesson. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'topic': {'title': 'Topic', 'type': 'string'}}, 'required': ['topic'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'story_funny', 'description': 'Write a funny, humorous story about {topic}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'topic': {'title': 'Topic', 'type': 'string'}}, 'required': ['topic'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'write_story', 'description': \"Write a story about {topic} in the style: {style}.\\n Available styles: 'moral' for a story with a lesson, 'funny' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\", 'parameters': {'additionalProperties': False, 'properties': {'topic': {'title': 'Topic', 'type': 'string'}, 'style': {'title': 'Style', 'type': 'string'}}, 'required': ['topic', 'style'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'unstable_service', 'description': 'Fetch data from an unstable external service. May require retries.', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}, {'type': 'function', 'function': {'name': 'fetch_data', 'description': 'Use the unstable_service tool to fetch data.', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}]}, 'response': ModelResponse(id='chatcmpl-CnAOuyGE1IdeXQAUdAihmAEuou0Nm', created=1765834088, model='gpt-4o-2024-08-06', object='chat.completion', system_fingerprint='fp_e819e3438b', choices=[Choices(finish_reason='tool_calls', index=0, message=Message(content=None, role='assistant', tool_calls=[ChatCompletionMessageToolCall(function=Function(arguments='{}', name='unstable_service'), id='call_yefiR7zjd3zhSaL4hO2IlVPz', type='function')], function_call=None, provider_specific_fields={'refusal': None}, annotations=[]), provider_specific_fields={})], usage=Usage(completion_tokens=11, prompt_tokens=553, total_tokens=564, completion_tokens_details=CompletionTokensDetailsWrapper(accepted_prediction_tokens=0, audio_tokens=0, reasoning_tokens=0, rejected_prediction_tokens=0, text_tokens=None, image_tokens=None), prompt_tokens_details=PromptTokensDetailsWrapper(audio_tokens=0, cached_tokens=0, text_tokens=None, image_tokens=None)), service_tier='default')}\n", + "INFO {'args': (), 'kwargs': {'messages': [{'type': 'message', 'content': [{'type': 'text', 'text': 'Use the unstable_service tool to fetch data.'}], 'role': 'user'}, {'content': None, 'role': 'assistant', 'tool_calls': [{'function': {'arguments': '{}', 'name': 'unstable_service'}, 'id': 'call_yefiR7zjd3zhSaL4hO2IlVPz', 'type': 'function'}], 'function_call': None, 'provider_specific_fields': {'refusal': None}, 'annotations': []}, {'role': 'tool', 'tool_call_id': 'call_yefiR7zjd3zhSaL4hO2IlVPz', 'name': 'unstable_service', 'content': \"{'status': 'failure', 'exception': 'Service unavailable! Attempt 1/3. Please retry.'}\"}], 'response_format': None, 'tools': [{'type': 'function', 'function': {'name': 'limerick', 'description': 'Write a limerick on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'haiku_no_cache', 'description': 'Write a haiku on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'primes', 'description': 'Give a prime number with {first_digit} as the first digit. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'first_digit': {'title': 'First Digit', 'type': 'integer'}}, 'required': ['first_digit'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'count_char', 'description': \"Write a function which takes a string and counts the occurrances of '{char}'. Do not use any tools.\", 'parameters': {'additionalProperties': False, 'properties': {'char': {'title': 'Char', 'type': 'string'}}, 'required': ['char'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'cities', 'description': '', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}, {'type': 'function', 'function': {'name': 'weather', 'description': '', 'parameters': {'additionalProperties': False, 'properties': {'city': {'title': 'City', 'type': 'string'}}, 'required': ['city'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'vacation', 'description': 'Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}, {'type': 'function', 'function': {'name': 'write_joke', 'description': 'Write a knock-knock joke on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'rate_joke', 'description': 'Decide if {joke} is funny or not. Do not use any tools.', 'parameters': {'$defs': {'KnockKnockJoke': {'properties': {'whos_there': {'title': 'Whos There', 'type': 'string'}, 'punchline': {'title': 'Punchline', 'type': 'string'}}, 'required': ['whos_there', 'punchline'], 'title': 'KnockKnockJoke', 'type': 'object', 'additionalProperties': False}}, 'additionalProperties': False, 'properties': {'joke': {'$ref': '#/$defs/KnockKnockJoke'}}, 'required': ['joke'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'story_with_moral', 'description': 'Write a short story about {topic} and end with a moral lesson. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'topic': {'title': 'Topic', 'type': 'string'}}, 'required': ['topic'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'story_funny', 'description': 'Write a funny, humorous story about {topic}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'topic': {'title': 'Topic', 'type': 'string'}}, 'required': ['topic'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'write_story', 'description': \"Write a story about {topic} in the style: {style}.\\n Available styles: 'moral' for a story with a lesson, 'funny' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\", 'parameters': {'additionalProperties': False, 'properties': {'topic': {'title': 'Topic', 'type': 'string'}, 'style': {'title': 'Style', 'type': 'string'}}, 'required': ['topic', 'style'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'unstable_service', 'description': 'Fetch data from an unstable external service. May require retries.', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}, {'type': 'function', 'function': {'name': 'fetch_data', 'description': 'Use the unstable_service tool to fetch data.', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}]}, 'response': ModelResponse(id='chatcmpl-CnAOv3DETvRpajf5oFobKAex8uI4c', created=1765834089, model='gpt-4o-2024-08-06', object='chat.completion', system_fingerprint='fp_83554c687e', choices=[Choices(finish_reason='tool_calls', index=0, message=Message(content=None, role='assistant', tool_calls=[ChatCompletionMessageToolCall(function=Function(arguments='{}', name='unstable_service'), id='call_WKh4jx4XtF95mgUTv8uAXlLE', type='function')], function_call=None, provider_specific_fields={'refusal': None}, annotations=[]), provider_specific_fields={})], usage=Usage(completion_tokens=11, prompt_tokens=596, total_tokens=607, completion_tokens_details=CompletionTokensDetailsWrapper(accepted_prediction_tokens=0, audio_tokens=0, reasoning_tokens=0, rejected_prediction_tokens=0, text_tokens=None, image_tokens=None), prompt_tokens_details=PromptTokensDetailsWrapper(audio_tokens=0, cached_tokens=0, text_tokens=None, image_tokens=None)), service_tier='default')}\n", + "INFO {'args': (), 'kwargs': {'messages': [{'type': 'message', 'content': [{'type': 'text', 'text': 'Use the unstable_service tool to fetch data.'}], 'role': 'user'}, {'content': None, 'role': 'assistant', 'tool_calls': [{'function': {'arguments': '{}', 'name': 'unstable_service'}, 'id': 'call_yefiR7zjd3zhSaL4hO2IlVPz', 'type': 'function'}], 'function_call': None, 'provider_specific_fields': {'refusal': None}, 'annotations': []}, {'role': 'tool', 'tool_call_id': 'call_yefiR7zjd3zhSaL4hO2IlVPz', 'name': 'unstable_service', 'content': \"{'status': 'failure', 'exception': 'Service unavailable! Attempt 1/3. Please retry.'}\"}, {'content': None, 'role': 'assistant', 'tool_calls': [{'function': {'arguments': '{}', 'name': 'unstable_service'}, 'id': 'call_WKh4jx4XtF95mgUTv8uAXlLE', 'type': 'function'}], 'function_call': None, 'provider_specific_fields': {'refusal': None}, 'annotations': []}, {'role': 'tool', 'tool_call_id': 'call_WKh4jx4XtF95mgUTv8uAXlLE', 'name': 'unstable_service', 'content': \"{'status': 'failure', 'exception': 'Service unavailable! Attempt 2/3. Please retry.'}\"}], 'response_format': None, 'tools': [{'type': 'function', 'function': {'name': 'limerick', 'description': 'Write a limerick on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'haiku_no_cache', 'description': 'Write a haiku on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'primes', 'description': 'Give a prime number with {first_digit} as the first digit. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'first_digit': {'title': 'First Digit', 'type': 'integer'}}, 'required': ['first_digit'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'count_char', 'description': \"Write a function which takes a string and counts the occurrances of '{char}'. Do not use any tools.\", 'parameters': {'additionalProperties': False, 'properties': {'char': {'title': 'Char', 'type': 'string'}}, 'required': ['char'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'cities', 'description': '', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}, {'type': 'function', 'function': {'name': 'weather', 'description': '', 'parameters': {'additionalProperties': False, 'properties': {'city': {'title': 'City', 'type': 'string'}}, 'required': ['city'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'vacation', 'description': 'Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}, {'type': 'function', 'function': {'name': 'write_joke', 'description': 'Write a knock-knock joke on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'rate_joke', 'description': 'Decide if {joke} is funny or not. Do not use any tools.', 'parameters': {'$defs': {'KnockKnockJoke': {'properties': {'whos_there': {'title': 'Whos There', 'type': 'string'}, 'punchline': {'title': 'Punchline', 'type': 'string'}}, 'required': ['whos_there', 'punchline'], 'title': 'KnockKnockJoke', 'type': 'object', 'additionalProperties': False}}, 'additionalProperties': False, 'properties': {'joke': {'$ref': '#/$defs/KnockKnockJoke'}}, 'required': ['joke'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'story_with_moral', 'description': 'Write a short story about {topic} and end with a moral lesson. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'topic': {'title': 'Topic', 'type': 'string'}}, 'required': ['topic'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'story_funny', 'description': 'Write a funny, humorous story about {topic}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'topic': {'title': 'Topic', 'type': 'string'}}, 'required': ['topic'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'write_story', 'description': \"Write a story about {topic} in the style: {style}.\\n Available styles: 'moral' for a story with a lesson, 'funny' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\", 'parameters': {'additionalProperties': False, 'properties': {'topic': {'title': 'Topic', 'type': 'string'}, 'style': {'title': 'Style', 'type': 'string'}}, 'required': ['topic', 'style'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'unstable_service', 'description': 'Fetch data from an unstable external service. May require retries.', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}, {'type': 'function', 'function': {'name': 'fetch_data', 'description': 'Use the unstable_service tool to fetch data.', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}]}, 'response': ModelResponse(id='chatcmpl-CnAOwAICzWpytXVphXANfC6ix6Iv6', created=1765834090, model='gpt-4o-2024-08-06', object='chat.completion', system_fingerprint='fp_83554c687e', choices=[Choices(finish_reason='tool_calls', index=0, message=Message(content=None, role='assistant', tool_calls=[ChatCompletionMessageToolCall(function=Function(arguments='{}', name='unstable_service'), id='call_g20DJy4DQvADfZGwqnNClZf7', type='function')], function_call=None, provider_specific_fields={'refusal': None}, annotations=[]), provider_specific_fields={})], usage=Usage(completion_tokens=11, prompt_tokens=639, total_tokens=650, completion_tokens_details=CompletionTokensDetailsWrapper(accepted_prediction_tokens=0, audio_tokens=0, reasoning_tokens=0, rejected_prediction_tokens=0, text_tokens=None, image_tokens=None), prompt_tokens_details=PromptTokensDetailsWrapper(audio_tokens=0, cached_tokens=0, text_tokens=None, image_tokens=None)), service_tier='default')}\n", "INFO {'tool': 'unstable_service', 'args': (), 'kwargs': {}}\n", - "INFO {'args': (), 'kwargs': {'messages': [{'type': 'message', 'content': [{'type': 'text', 'text': 'Use the unstable_service tool to fetch data.'}], 'role': 'user'}, {'content': None, 'role': 'assistant', 'tool_calls': [{'function': {'arguments': '{}', 'name': 'unstable_service'}, 'id': 'call_6cYMZnIK0hrv3xTStyyWBLXR', 'type': 'function'}], 'function_call': None, 'provider_specific_fields': {'refusal': None}, 'annotations': []}, {'role': 'tool', 'tool_call_id': 'call_6cYMZnIK0hrv3xTStyyWBLXR', 'name': 'unstable_service', 'content': \"{'status': 'failure', 'exception': 'Service unavailable! Attempt 1/3. Please retry.'}\"}, {'content': None, 'role': 'assistant', 'tool_calls': [{'function': {'arguments': '{}', 'name': 'unstable_service'}, 'id': 'call_7Cz1w1toF0CccR8e5XUp0dIP', 'type': 'function'}], 'function_call': None, 'provider_specific_fields': {'refusal': None}, 'annotations': []}, {'role': 'tool', 'tool_call_id': 'call_7Cz1w1toF0CccR8e5XUp0dIP', 'name': 'unstable_service', 'content': \"{'status': 'failure', 'exception': 'Service unavailable! Attempt 2/3. Please retry.'}\"}, {'content': None, 'role': 'assistant', 'tool_calls': [{'function': {'arguments': '{}', 'name': 'unstable_service'}, 'id': 'call_KLr43KhV2A4GzUbQfbhD1iXz', 'type': 'function'}], 'function_call': None, 'provider_specific_fields': {'refusal': None}, 'annotations': []}, {'role': 'tool', 'tool_call_id': 'call_KLr43KhV2A4GzUbQfbhD1iXz', 'name': 'unstable_service', 'content': [{'type': 'text', 'text': \"{ 'status': 'ok', 'data': [1, 2, 3] }\"}]}], 'response_format': None, 'tools': [{'type': 'function', 'function': {'name': 'unstable_service', 'description': 'Fetch data from an unstable external service. May require retries.', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}]}, 'response': ModelResponse(id='chatcmpl-CkjX5GhYtff6cMPBmDxTmDp8PVvEr', created=1765254151, model='gpt-4o-2024-08-06', object='chat.completion', system_fingerprint='fp_83554c687e', choices=[Choices(finish_reason='stop', index=0, message=Message(content='I successfully fetched the data: \\\\([1, 2, 3]\\\\).', role='assistant', tool_calls=None, function_call=None, provider_specific_fields={'refusal': None}, annotations=[]), provider_specific_fields={})], usage=Usage(completion_tokens=18, prompt_tokens=178, total_tokens=196, completion_tokens_details=CompletionTokensDetailsWrapper(accepted_prediction_tokens=0, audio_tokens=0, reasoning_tokens=0, rejected_prediction_tokens=0, text_tokens=None, image_tokens=None), prompt_tokens_details=PromptTokensDetailsWrapper(audio_tokens=0, cached_tokens=0, text_tokens=None, image_tokens=None)), service_tier='default')}\n", - "Result: I successfully fetched the data: \\([1, 2, 3]\\). Retries: 3\n" + "INFO {'args': (), 'kwargs': {'messages': [{'type': 'message', 'content': [{'type': 'text', 'text': 'Use the unstable_service tool to fetch data.'}], 'role': 'user'}, {'content': None, 'role': 'assistant', 'tool_calls': [{'function': {'arguments': '{}', 'name': 'unstable_service'}, 'id': 'call_yefiR7zjd3zhSaL4hO2IlVPz', 'type': 'function'}], 'function_call': None, 'provider_specific_fields': {'refusal': None}, 'annotations': []}, {'role': 'tool', 'tool_call_id': 'call_yefiR7zjd3zhSaL4hO2IlVPz', 'name': 'unstable_service', 'content': \"{'status': 'failure', 'exception': 'Service unavailable! Attempt 1/3. Please retry.'}\"}, {'content': None, 'role': 'assistant', 'tool_calls': [{'function': {'arguments': '{}', 'name': 'unstable_service'}, 'id': 'call_WKh4jx4XtF95mgUTv8uAXlLE', 'type': 'function'}], 'function_call': None, 'provider_specific_fields': {'refusal': None}, 'annotations': []}, {'role': 'tool', 'tool_call_id': 'call_WKh4jx4XtF95mgUTv8uAXlLE', 'name': 'unstable_service', 'content': \"{'status': 'failure', 'exception': 'Service unavailable! Attempt 2/3. Please retry.'}\"}, {'content': None, 'role': 'assistant', 'tool_calls': [{'function': {'arguments': '{}', 'name': 'unstable_service'}, 'id': 'call_g20DJy4DQvADfZGwqnNClZf7', 'type': 'function'}], 'function_call': None, 'provider_specific_fields': {'refusal': None}, 'annotations': []}, {'role': 'tool', 'tool_call_id': 'call_g20DJy4DQvADfZGwqnNClZf7', 'name': 'unstable_service', 'content': [{'type': 'text', 'text': \"{ 'status': 'ok', 'data': [1, 2, 3] }\"}]}], 'response_format': None, 'tools': [{'type': 'function', 'function': {'name': 'limerick', 'description': 'Write a limerick on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'haiku_no_cache', 'description': 'Write a haiku on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'primes', 'description': 'Give a prime number with {first_digit} as the first digit. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'first_digit': {'title': 'First Digit', 'type': 'integer'}}, 'required': ['first_digit'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'count_char', 'description': \"Write a function which takes a string and counts the occurrances of '{char}'. Do not use any tools.\", 'parameters': {'additionalProperties': False, 'properties': {'char': {'title': 'Char', 'type': 'string'}}, 'required': ['char'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'cities', 'description': '', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}, {'type': 'function', 'function': {'name': 'weather', 'description': '', 'parameters': {'additionalProperties': False, 'properties': {'city': {'title': 'City', 'type': 'string'}}, 'required': ['city'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'vacation', 'description': 'Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}, {'type': 'function', 'function': {'name': 'write_joke', 'description': 'Write a knock-knock joke on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'rate_joke', 'description': 'Decide if {joke} is funny or not. Do not use any tools.', 'parameters': {'$defs': {'KnockKnockJoke': {'properties': {'whos_there': {'title': 'Whos There', 'type': 'string'}, 'punchline': {'title': 'Punchline', 'type': 'string'}}, 'required': ['whos_there', 'punchline'], 'title': 'KnockKnockJoke', 'type': 'object', 'additionalProperties': False}}, 'additionalProperties': False, 'properties': {'joke': {'$ref': '#/$defs/KnockKnockJoke'}}, 'required': ['joke'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'story_with_moral', 'description': 'Write a short story about {topic} and end with a moral lesson. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'topic': {'title': 'Topic', 'type': 'string'}}, 'required': ['topic'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'story_funny', 'description': 'Write a funny, humorous story about {topic}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'topic': {'title': 'Topic', 'type': 'string'}}, 'required': ['topic'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'write_story', 'description': \"Write a story about {topic} in the style: {style}.\\n Available styles: 'moral' for a story with a lesson, 'funny' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\", 'parameters': {'additionalProperties': False, 'properties': {'topic': {'title': 'Topic', 'type': 'string'}, 'style': {'title': 'Style', 'type': 'string'}}, 'required': ['topic', 'style'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'unstable_service', 'description': 'Fetch data from an unstable external service. May require retries.', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}, {'type': 'function', 'function': {'name': 'fetch_data', 'description': 'Use the unstable_service tool to fetch data.', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}]}, 'response': ModelResponse(id='chatcmpl-CnAOwp4xrlzgidgfQPxc56WoCermB', created=1765834090, model='gpt-4o-2024-08-06', object='chat.completion', system_fingerprint='fp_83554c687e', choices=[Choices(finish_reason='stop', index=0, message=Message(content='The data fetched from the unstable service is: `[1, 2, 3]`.', role='assistant', tool_calls=None, function_call=None, provider_specific_fields={'refusal': None}, annotations=[]), provider_specific_fields={})], usage=Usage(completion_tokens=20, prompt_tokens=679, total_tokens=699, completion_tokens_details=CompletionTokensDetailsWrapper(accepted_prediction_tokens=0, audio_tokens=0, reasoning_tokens=0, rejected_prediction_tokens=0, text_tokens=None, image_tokens=None), prompt_tokens_details=PromptTokensDetailsWrapper(audio_tokens=0, cached_tokens=0, text_tokens=None, image_tokens=None)), service_tier='default')}\n", + "INFO {'tool': 'fetch_data', 'args': (), 'kwargs': {}}\n", + "INFO {'args': (), 'kwargs': {'messages': [{'type': 'message', 'content': [{'type': 'text', 'text': 'Use the unstable_service tool to fetch data.'}], 'role': 'user'}, {'content': None, 'role': 'assistant', 'tool_calls': [{'function': {'arguments': '{}', 'name': 'fetch_data'}, 'id': 'call_G1XWytSwDBOJXd5DbTnAgegd', 'type': 'function'}], 'function_call': None, 'provider_specific_fields': {'refusal': None}, 'annotations': []}, {'role': 'tool', 'tool_call_id': 'call_G1XWytSwDBOJXd5DbTnAgegd', 'name': 'fetch_data', 'content': [{'type': 'text', 'text': 'The data fetched from the unstable service is: `[1, 2, 3]`.'}]}], 'response_format': None, 'tools': [{'type': 'function', 'function': {'name': 'limerick', 'description': 'Write a limerick on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'haiku_no_cache', 'description': 'Write a haiku on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'primes', 'description': 'Give a prime number with {first_digit} as the first digit. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'first_digit': {'title': 'First Digit', 'type': 'integer'}}, 'required': ['first_digit'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'count_char', 'description': \"Write a function which takes a string and counts the occurrances of '{char}'. Do not use any tools.\", 'parameters': {'additionalProperties': False, 'properties': {'char': {'title': 'Char', 'type': 'string'}}, 'required': ['char'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'cities', 'description': '', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}, {'type': 'function', 'function': {'name': 'weather', 'description': '', 'parameters': {'additionalProperties': False, 'properties': {'city': {'title': 'City', 'type': 'string'}}, 'required': ['city'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'vacation', 'description': 'Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}, {'type': 'function', 'function': {'name': 'write_joke', 'description': 'Write a knock-knock joke on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'rate_joke', 'description': 'Decide if {joke} is funny or not. Do not use any tools.', 'parameters': {'$defs': {'KnockKnockJoke': {'properties': {'whos_there': {'title': 'Whos There', 'type': 'string'}, 'punchline': {'title': 'Punchline', 'type': 'string'}}, 'required': ['whos_there', 'punchline'], 'title': 'KnockKnockJoke', 'type': 'object', 'additionalProperties': False}}, 'additionalProperties': False, 'properties': {'joke': {'$ref': '#/$defs/KnockKnockJoke'}}, 'required': ['joke'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'story_with_moral', 'description': 'Write a short story about {topic} and end with a moral lesson. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'topic': {'title': 'Topic', 'type': 'string'}}, 'required': ['topic'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'story_funny', 'description': 'Write a funny, humorous story about {topic}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'topic': {'title': 'Topic', 'type': 'string'}}, 'required': ['topic'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'write_story', 'description': \"Write a story about {topic} in the style: {style}.\\n Available styles: 'moral' for a story with a lesson, 'funny' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\", 'parameters': {'additionalProperties': False, 'properties': {'topic': {'title': 'Topic', 'type': 'string'}, 'style': {'title': 'Style', 'type': 'string'}}, 'required': ['topic', 'style'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'unstable_service', 'description': 'Fetch data from an unstable external service. May require retries.', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}, {'type': 'function', 'function': {'name': 'fetch_data', 'description': 'Use the unstable_service tool to fetch data.', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}]}, 'response': ModelResponse(id='chatcmpl-CnAOxj5IZQ85aFjs4jCuNolWoY9OL', created=1765834091, model='gpt-4o-2024-08-06', object='chat.completion', system_fingerprint='fp_83554c687e', choices=[Choices(finish_reason='stop', index=0, message=Message(content='The data fetched from the unstable service is: `[1, 2, 3]`.', role='assistant', tool_calls=None, function_call=None, provider_specific_fields={'refusal': None}, annotations=[]), provider_specific_fields={})], usage=Usage(completion_tokens=20, prompt_tokens=590, total_tokens=610, completion_tokens_details=CompletionTokensDetailsWrapper(accepted_prediction_tokens=0, audio_tokens=0, reasoning_tokens=0, rejected_prediction_tokens=0, text_tokens=None, image_tokens=None), prompt_tokens_details=PromptTokensDetailsWrapper(audio_tokens=0, cached_tokens=0, text_tokens=None, image_tokens=None)), service_tier='default')}\n", + "INFO {'tool': 'fetch_data', 'args': (), 'kwargs': {}}\n", + "INFO {'args': (), 'kwargs': {'messages': [{'type': 'message', 'content': [{'type': 'text', 'text': 'Use the unstable_service tool to fetch data.'}], 'role': 'user'}, {'content': None, 'role': 'assistant', 'tool_calls': [{'function': {'arguments': '{}', 'name': 'fetch_data'}, 'id': 'call_OfaqegdBaNqhYXbmtMSfq8E3', 'type': 'function'}], 'function_call': None, 'provider_specific_fields': {'refusal': None}, 'annotations': []}, {'role': 'tool', 'tool_call_id': 'call_OfaqegdBaNqhYXbmtMSfq8E3', 'name': 'fetch_data', 'content': [{'type': 'text', 'text': 'The data fetched from the unstable service is: `[1, 2, 3]`.'}]}], 'response_format': None, 'tools': [{'type': 'function', 'function': {'name': 'limerick', 'description': 'Write a limerick on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'haiku_no_cache', 'description': 'Write a haiku on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'primes', 'description': 'Give a prime number with {first_digit} as the first digit. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'first_digit': {'title': 'First Digit', 'type': 'integer'}}, 'required': ['first_digit'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'count_char', 'description': \"Write a function which takes a string and counts the occurrances of '{char}'. Do not use any tools.\", 'parameters': {'additionalProperties': False, 'properties': {'char': {'title': 'Char', 'type': 'string'}}, 'required': ['char'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'cities', 'description': '', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}, {'type': 'function', 'function': {'name': 'weather', 'description': '', 'parameters': {'additionalProperties': False, 'properties': {'city': {'title': 'City', 'type': 'string'}}, 'required': ['city'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'vacation', 'description': 'Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}, {'type': 'function', 'function': {'name': 'write_joke', 'description': 'Write a knock-knock joke on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'rate_joke', 'description': 'Decide if {joke} is funny or not. Do not use any tools.', 'parameters': {'$defs': {'KnockKnockJoke': {'properties': {'whos_there': {'title': 'Whos There', 'type': 'string'}, 'punchline': {'title': 'Punchline', 'type': 'string'}}, 'required': ['whos_there', 'punchline'], 'title': 'KnockKnockJoke', 'type': 'object', 'additionalProperties': False}}, 'additionalProperties': False, 'properties': {'joke': {'$ref': '#/$defs/KnockKnockJoke'}}, 'required': ['joke'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'story_with_moral', 'description': 'Write a short story about {topic} and end with a moral lesson. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'topic': {'title': 'Topic', 'type': 'string'}}, 'required': ['topic'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'story_funny', 'description': 'Write a funny, humorous story about {topic}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'topic': {'title': 'Topic', 'type': 'string'}}, 'required': ['topic'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'write_story', 'description': \"Write a story about {topic} in the style: {style}.\\n Available styles: 'moral' for a story with a lesson, 'funny' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\", 'parameters': {'additionalProperties': False, 'properties': {'topic': {'title': 'Topic', 'type': 'string'}, 'style': {'title': 'Style', 'type': 'string'}}, 'required': ['topic', 'style'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'unstable_service', 'description': 'Fetch data from an unstable external service. May require retries.', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}, {'type': 'function', 'function': {'name': 'fetch_data', 'description': 'Use the unstable_service tool to fetch data.', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}]}, 'response': ModelResponse(id='chatcmpl-CnAOx62IhQMgrL2JUgPzFMn7RImks', created=1765834091, model='gpt-4o-2024-08-06', object='chat.completion', system_fingerprint='fp_83554c687e', choices=[Choices(finish_reason='stop', index=0, message=Message(content='I successfully fetched the data from the unstable service: `[1, 2, 3]`.', role='assistant', tool_calls=None, function_call=None, provider_specific_fields={'refusal': None}, annotations=[]), provider_specific_fields={})], usage=Usage(completion_tokens=21, prompt_tokens=590, total_tokens=611, completion_tokens_details=CompletionTokensDetailsWrapper(accepted_prediction_tokens=0, audio_tokens=0, reasoning_tokens=0, rejected_prediction_tokens=0, text_tokens=None, image_tokens=None), prompt_tokens_details=PromptTokensDetailsWrapper(audio_tokens=0, cached_tokens=0, text_tokens=None, image_tokens=None)), service_tier='default')}\n", + "INFO {'tool': 'fetch_data', 'args': (), 'kwargs': {}}\n", + "INFO {'args': (), 'kwargs': {'messages': [{'type': 'message', 'content': [{'type': 'text', 'text': 'Use the unstable_service tool to fetch data.'}], 'role': 'user'}, {'content': None, 'role': 'assistant', 'tool_calls': [{'function': {'arguments': '{}', 'name': 'fetch_data'}, 'id': 'call_SROFh6GD7MXpKjEjuCKfwsoR', 'type': 'function'}], 'function_call': None, 'provider_specific_fields': {'refusal': None}, 'annotations': []}, {'role': 'tool', 'tool_call_id': 'call_SROFh6GD7MXpKjEjuCKfwsoR', 'name': 'fetch_data', 'content': [{'type': 'text', 'text': 'I successfully fetched the data from the unstable service: `[1, 2, 3]`.'}]}], 'response_format': None, 'tools': [{'type': 'function', 'function': {'name': 'limerick', 'description': 'Write a limerick on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'haiku_no_cache', 'description': 'Write a haiku on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'primes', 'description': 'Give a prime number with {first_digit} as the first digit. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'first_digit': {'title': 'First Digit', 'type': 'integer'}}, 'required': ['first_digit'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'count_char', 'description': \"Write a function which takes a string and counts the occurrances of '{char}'. Do not use any tools.\", 'parameters': {'additionalProperties': False, 'properties': {'char': {'title': 'Char', 'type': 'string'}}, 'required': ['char'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'cities', 'description': '', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}, {'type': 'function', 'function': {'name': 'weather', 'description': '', 'parameters': {'additionalProperties': False, 'properties': {'city': {'title': 'City', 'type': 'string'}}, 'required': ['city'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'vacation', 'description': 'Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}, {'type': 'function', 'function': {'name': 'write_joke', 'description': 'Write a knock-knock joke on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'rate_joke', 'description': 'Decide if {joke} is funny or not. Do not use any tools.', 'parameters': {'$defs': {'KnockKnockJoke': {'properties': {'whos_there': {'title': 'Whos There', 'type': 'string'}, 'punchline': {'title': 'Punchline', 'type': 'string'}}, 'required': ['whos_there', 'punchline'], 'title': 'KnockKnockJoke', 'type': 'object', 'additionalProperties': False}}, 'additionalProperties': False, 'properties': {'joke': {'$ref': '#/$defs/KnockKnockJoke'}}, 'required': ['joke'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'story_with_moral', 'description': 'Write a short story about {topic} and end with a moral lesson. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'topic': {'title': 'Topic', 'type': 'string'}}, 'required': ['topic'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'story_funny', 'description': 'Write a funny, humorous story about {topic}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'topic': {'title': 'Topic', 'type': 'string'}}, 'required': ['topic'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'write_story', 'description': \"Write a story about {topic} in the style: {style}.\\n Available styles: 'moral' for a story with a lesson, 'funny' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\", 'parameters': {'additionalProperties': False, 'properties': {'topic': {'title': 'Topic', 'type': 'string'}, 'style': {'title': 'Style', 'type': 'string'}}, 'required': ['topic', 'style'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'unstable_service', 'description': 'Fetch data from an unstable external service. May require retries.', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}, {'type': 'function', 'function': {'name': 'fetch_data', 'description': 'Use the unstable_service tool to fetch data.', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}]}, 'response': ModelResponse(id='chatcmpl-CnAOyXctxr4R0RMfvWHsU5AYqAIOY', created=1765834092, model='gpt-4o-2024-08-06', object='chat.completion', system_fingerprint='fp_83554c687e', choices=[Choices(finish_reason='stop', index=0, message=Message(content='I successfully fetched the data from the unstable service, and the data is: `[1, 2, 3]`.', role='assistant', tool_calls=None, function_call=None, provider_specific_fields={'refusal': None}, annotations=[]), provider_specific_fields={})], usage=Usage(completion_tokens=26, prompt_tokens=591, total_tokens=617, completion_tokens_details=CompletionTokensDetailsWrapper(accepted_prediction_tokens=0, audio_tokens=0, reasoning_tokens=0, rejected_prediction_tokens=0, text_tokens=None, image_tokens=None), prompt_tokens_details=PromptTokensDetailsWrapper(audio_tokens=0, cached_tokens=0, text_tokens=None, image_tokens=None)), service_tier='default')}\n", + "INFO {'tool': 'fetch_data', 'args': (), 'kwargs': {}}\n", + "INFO {'args': (), 'kwargs': {'messages': [{'type': 'message', 'content': [{'type': 'text', 'text': 'Use the unstable_service tool to fetch data.'}], 'role': 'user'}, {'content': None, 'role': 'assistant', 'tool_calls': [{'function': {'arguments': '{}', 'name': 'fetch_data'}, 'id': 'call_zZZ2qOKtJOSK0NQR05Es8GCT', 'type': 'function'}], 'function_call': None, 'provider_specific_fields': {'refusal': None}, 'annotations': []}, {'role': 'tool', 'tool_call_id': 'call_zZZ2qOKtJOSK0NQR05Es8GCT', 'name': 'fetch_data', 'content': [{'type': 'text', 'text': 'I successfully fetched the data from the unstable service, and the data is: `[1, 2, 3]`.'}]}], 'response_format': None, 'tools': [{'type': 'function', 'function': {'name': 'limerick', 'description': 'Write a limerick on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'haiku_no_cache', 'description': 'Write a haiku on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'primes', 'description': 'Give a prime number with {first_digit} as the first digit. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'first_digit': {'title': 'First Digit', 'type': 'integer'}}, 'required': ['first_digit'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'count_char', 'description': \"Write a function which takes a string and counts the occurrances of '{char}'. Do not use any tools.\", 'parameters': {'additionalProperties': False, 'properties': {'char': {'title': 'Char', 'type': 'string'}}, 'required': ['char'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'cities', 'description': '', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}, {'type': 'function', 'function': {'name': 'weather', 'description': '', 'parameters': {'additionalProperties': False, 'properties': {'city': {'title': 'City', 'type': 'string'}}, 'required': ['city'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'vacation', 'description': 'Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}, {'type': 'function', 'function': {'name': 'write_joke', 'description': 'Write a knock-knock joke on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'rate_joke', 'description': 'Decide if {joke} is funny or not. Do not use any tools.', 'parameters': {'$defs': {'KnockKnockJoke': {'properties': {'whos_there': {'title': 'Whos There', 'type': 'string'}, 'punchline': {'title': 'Punchline', 'type': 'string'}}, 'required': ['whos_there', 'punchline'], 'title': 'KnockKnockJoke', 'type': 'object', 'additionalProperties': False}}, 'additionalProperties': False, 'properties': {'joke': {'$ref': '#/$defs/KnockKnockJoke'}}, 'required': ['joke'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'story_with_moral', 'description': 'Write a short story about {topic} and end with a moral lesson. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'topic': {'title': 'Topic', 'type': 'string'}}, 'required': ['topic'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'story_funny', 'description': 'Write a funny, humorous story about {topic}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'topic': {'title': 'Topic', 'type': 'string'}}, 'required': ['topic'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'write_story', 'description': \"Write a story about {topic} in the style: {style}.\\n Available styles: 'moral' for a story with a lesson, 'funny' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\", 'parameters': {'additionalProperties': False, 'properties': {'topic': {'title': 'Topic', 'type': 'string'}, 'style': {'title': 'Style', 'type': 'string'}}, 'required': ['topic', 'style'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'unstable_service', 'description': 'Fetch data from an unstable external service. May require retries.', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}, {'type': 'function', 'function': {'name': 'fetch_data', 'description': 'Use the unstable_service tool to fetch data.', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}]}, 'response': ModelResponse(id='chatcmpl-CnAP0wwqVQC9MyAl9zWVe7zzCHxAC', created=1765834094, model='gpt-4o-2024-08-06', object='chat.completion', system_fingerprint='fp_83554c687e', choices=[Choices(finish_reason='stop', index=0, message=Message(content='I successfully fetched the data from the unstable service, and the data is: `[1, 2, 3]`.', role='assistant', tool_calls=None, function_call=None, provider_specific_fields={'refusal': None}, annotations=[]), provider_specific_fields={})], usage=Usage(completion_tokens=26, prompt_tokens=596, total_tokens=622, completion_tokens_details=CompletionTokensDetailsWrapper(accepted_prediction_tokens=0, audio_tokens=0, reasoning_tokens=0, rejected_prediction_tokens=0, text_tokens=None, image_tokens=None), prompt_tokens_details=PromptTokensDetailsWrapper(audio_tokens=0, cached_tokens=0, text_tokens=None, image_tokens=None)), service_tier='default')}\n", + "INFO {'tool': 'fetch_data', 'args': (), 'kwargs': {}}\n", + "INFO {'args': (), 'kwargs': {'messages': [{'type': 'message', 'content': [{'type': 'text', 'text': 'Use the unstable_service tool to fetch data.'}], 'role': 'user'}, {'content': None, 'role': 'assistant', 'tool_calls': [{'function': {'arguments': '{}', 'name': 'fetch_data'}, 'id': 'call_ndnD0MTgx5kCh0RQloWTEMDO', 'type': 'function'}], 'function_call': None, 'provider_specific_fields': {'refusal': None}, 'annotations': []}, {'role': 'tool', 'tool_call_id': 'call_ndnD0MTgx5kCh0RQloWTEMDO', 'name': 'fetch_data', 'content': [{'type': 'text', 'text': 'I successfully fetched the data from the unstable service, and the data is: `[1, 2, 3]`.'}]}], 'response_format': None, 'tools': [{'type': 'function', 'function': {'name': 'limerick', 'description': 'Write a limerick on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'haiku_no_cache', 'description': 'Write a haiku on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'primes', 'description': 'Give a prime number with {first_digit} as the first digit. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'first_digit': {'title': 'First Digit', 'type': 'integer'}}, 'required': ['first_digit'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'count_char', 'description': \"Write a function which takes a string and counts the occurrances of '{char}'. Do not use any tools.\", 'parameters': {'additionalProperties': False, 'properties': {'char': {'title': 'Char', 'type': 'string'}}, 'required': ['char'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'cities', 'description': '', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}, {'type': 'function', 'function': {'name': 'weather', 'description': '', 'parameters': {'additionalProperties': False, 'properties': {'city': {'title': 'City', 'type': 'string'}}, 'required': ['city'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'vacation', 'description': 'Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}, {'type': 'function', 'function': {'name': 'write_joke', 'description': 'Write a knock-knock joke on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'rate_joke', 'description': 'Decide if {joke} is funny or not. Do not use any tools.', 'parameters': {'$defs': {'KnockKnockJoke': {'properties': {'whos_there': {'title': 'Whos There', 'type': 'string'}, 'punchline': {'title': 'Punchline', 'type': 'string'}}, 'required': ['whos_there', 'punchline'], 'title': 'KnockKnockJoke', 'type': 'object', 'additionalProperties': False}}, 'additionalProperties': False, 'properties': {'joke': {'$ref': '#/$defs/KnockKnockJoke'}}, 'required': ['joke'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'story_with_moral', 'description': 'Write a short story about {topic} and end with a moral lesson. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'topic': {'title': 'Topic', 'type': 'string'}}, 'required': ['topic'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'story_funny', 'description': 'Write a funny, humorous story about {topic}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'topic': {'title': 'Topic', 'type': 'string'}}, 'required': ['topic'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'write_story', 'description': \"Write a story about {topic} in the style: {style}.\\n Available styles: 'moral' for a story with a lesson, 'funny' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\", 'parameters': {'additionalProperties': False, 'properties': {'topic': {'title': 'Topic', 'type': 'string'}, 'style': {'title': 'Style', 'type': 'string'}}, 'required': ['topic', 'style'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'unstable_service', 'description': 'Fetch data from an unstable external service. May require retries.', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}, {'type': 'function', 'function': {'name': 'fetch_data', 'description': 'Use the unstable_service tool to fetch data.', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}]}, 'response': ModelResponse(id='chatcmpl-CnAP0MVlZ9RfdBubfjSXaW6MoAdrL', created=1765834094, model='gpt-4o-2024-08-06', object='chat.completion', system_fingerprint='fp_83554c687e', choices=[Choices(finish_reason='stop', index=0, message=Message(content='I successfully fetched the data from the unstable service, and the data is: `[1, 2, 3]`.', role='assistant', tool_calls=None, function_call=None, provider_specific_fields={'refusal': None}, annotations=[]), provider_specific_fields={})], usage=Usage(completion_tokens=26, prompt_tokens=596, total_tokens=622, completion_tokens_details=CompletionTokensDetailsWrapper(accepted_prediction_tokens=0, audio_tokens=0, reasoning_tokens=0, rejected_prediction_tokens=0, text_tokens=None, image_tokens=None), prompt_tokens_details=PromptTokensDetailsWrapper(audio_tokens=0, cached_tokens=0, text_tokens=None, image_tokens=None)), service_tier='default')}\n", + "INFO {'tool': 'fetch_data', 'args': (), 'kwargs': {}}\n", + "INFO {'args': (), 'kwargs': {'messages': [{'type': 'message', 'content': [{'type': 'text', 'text': 'Use the unstable_service tool to fetch data.'}], 'role': 'user'}, {'content': None, 'role': 'assistant', 'tool_calls': [{'function': {'arguments': '{}', 'name': 'fetch_data'}, 'id': 'call_QIhDdQlUVyQb4xL3bp9mA3Ln', 'type': 'function'}], 'function_call': None, 'provider_specific_fields': {'refusal': None}, 'annotations': []}, {'role': 'tool', 'tool_call_id': 'call_QIhDdQlUVyQb4xL3bp9mA3Ln', 'name': 'fetch_data', 'content': [{'type': 'text', 'text': 'I successfully fetched the data from the unstable service, and the data is: `[1, 2, 3]`.'}]}], 'response_format': None, 'tools': [{'type': 'function', 'function': {'name': 'limerick', 'description': 'Write a limerick on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'haiku_no_cache', 'description': 'Write a haiku on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'primes', 'description': 'Give a prime number with {first_digit} as the first digit. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'first_digit': {'title': 'First Digit', 'type': 'integer'}}, 'required': ['first_digit'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'count_char', 'description': \"Write a function which takes a string and counts the occurrances of '{char}'. Do not use any tools.\", 'parameters': {'additionalProperties': False, 'properties': {'char': {'title': 'Char', 'type': 'string'}}, 'required': ['char'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'cities', 'description': '', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}, {'type': 'function', 'function': {'name': 'weather', 'description': '', 'parameters': {'additionalProperties': False, 'properties': {'city': {'title': 'City', 'type': 'string'}}, 'required': ['city'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'vacation', 'description': 'Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}, {'type': 'function', 'function': {'name': 'write_joke', 'description': 'Write a knock-knock joke on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'rate_joke', 'description': 'Decide if {joke} is funny or not. Do not use any tools.', 'parameters': {'$defs': {'KnockKnockJoke': {'properties': {'whos_there': {'title': 'Whos There', 'type': 'string'}, 'punchline': {'title': 'Punchline', 'type': 'string'}}, 'required': ['whos_there', 'punchline'], 'title': 'KnockKnockJoke', 'type': 'object', 'additionalProperties': False}}, 'additionalProperties': False, 'properties': {'joke': {'$ref': '#/$defs/KnockKnockJoke'}}, 'required': ['joke'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'story_with_moral', 'description': 'Write a short story about {topic} and end with a moral lesson. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'topic': {'title': 'Topic', 'type': 'string'}}, 'required': ['topic'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'story_funny', 'description': 'Write a funny, humorous story about {topic}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'topic': {'title': 'Topic', 'type': 'string'}}, 'required': ['topic'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'write_story', 'description': \"Write a story about {topic} in the style: {style}.\\n Available styles: 'moral' for a story with a lesson, 'funny' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\", 'parameters': {'additionalProperties': False, 'properties': {'topic': {'title': 'Topic', 'type': 'string'}, 'style': {'title': 'Style', 'type': 'string'}}, 'required': ['topic', 'style'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'unstable_service', 'description': 'Fetch data from an unstable external service. May require retries.', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}, {'type': 'function', 'function': {'name': 'fetch_data', 'description': 'Use the unstable_service tool to fetch data.', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}]}, 'response': ModelResponse(id='chatcmpl-CnAP1erUfmc0ilFRVBJTR0bcGcrmx', created=1765834095, model='gpt-4o-2024-08-06', object='chat.completion', system_fingerprint='fp_83554c687e', choices=[Choices(finish_reason='stop', index=0, message=Message(content='I successfully fetched the data from the unstable service, and the data is: `[1, 2, 3]`.', role='assistant', tool_calls=None, function_call=None, provider_specific_fields={'refusal': None}, annotations=[]), provider_specific_fields={})], usage=Usage(completion_tokens=26, prompt_tokens=596, total_tokens=622, completion_tokens_details=CompletionTokensDetailsWrapper(accepted_prediction_tokens=0, audio_tokens=0, reasoning_tokens=0, rejected_prediction_tokens=0, text_tokens=None, image_tokens=None), prompt_tokens_details=PromptTokensDetailsWrapper(audio_tokens=0, cached_tokens=0, text_tokens=None, image_tokens=None)), service_tier='default')}\n", + "INFO {'tool': 'fetch_data', 'args': (), 'kwargs': {}}\n", + "INFO {'args': (), 'kwargs': {'messages': [{'type': 'message', 'content': [{'type': 'text', 'text': 'Use the unstable_service tool to fetch data.'}], 'role': 'user'}, {'content': None, 'role': 'assistant', 'tool_calls': [{'function': {'arguments': '{}', 'name': 'fetch_data'}, 'id': 'call_1zL43TYBfRd82z76Ww3VtZ10', 'type': 'function'}], 'function_call': None, 'provider_specific_fields': {'refusal': None}, 'annotations': []}, {'role': 'tool', 'tool_call_id': 'call_1zL43TYBfRd82z76Ww3VtZ10', 'name': 'fetch_data', 'content': [{'type': 'text', 'text': 'I successfully fetched the data from the unstable service, and the data is: `[1, 2, 3]`.'}]}], 'response_format': None, 'tools': [{'type': 'function', 'function': {'name': 'limerick', 'description': 'Write a limerick on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'haiku_no_cache', 'description': 'Write a haiku on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'primes', 'description': 'Give a prime number with {first_digit} as the first digit. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'first_digit': {'title': 'First Digit', 'type': 'integer'}}, 'required': ['first_digit'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'count_char', 'description': \"Write a function which takes a string and counts the occurrances of '{char}'. Do not use any tools.\", 'parameters': {'additionalProperties': False, 'properties': {'char': {'title': 'Char', 'type': 'string'}}, 'required': ['char'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'cities', 'description': '', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}, {'type': 'function', 'function': {'name': 'weather', 'description': '', 'parameters': {'additionalProperties': False, 'properties': {'city': {'title': 'City', 'type': 'string'}}, 'required': ['city'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'vacation', 'description': 'Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}, {'type': 'function', 'function': {'name': 'write_joke', 'description': 'Write a knock-knock joke on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'rate_joke', 'description': 'Decide if {joke} is funny or not. Do not use any tools.', 'parameters': {'$defs': {'KnockKnockJoke': {'properties': {'whos_there': {'title': 'Whos There', 'type': 'string'}, 'punchline': {'title': 'Punchline', 'type': 'string'}}, 'required': ['whos_there', 'punchline'], 'title': 'KnockKnockJoke', 'type': 'object', 'additionalProperties': False}}, 'additionalProperties': False, 'properties': {'joke': {'$ref': '#/$defs/KnockKnockJoke'}}, 'required': ['joke'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'story_with_moral', 'description': 'Write a short story about {topic} and end with a moral lesson. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'topic': {'title': 'Topic', 'type': 'string'}}, 'required': ['topic'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'story_funny', 'description': 'Write a funny, humorous story about {topic}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'topic': {'title': 'Topic', 'type': 'string'}}, 'required': ['topic'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'write_story', 'description': \"Write a story about {topic} in the style: {style}.\\n Available styles: 'moral' for a story with a lesson, 'funny' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\", 'parameters': {'additionalProperties': False, 'properties': {'topic': {'title': 'Topic', 'type': 'string'}, 'style': {'title': 'Style', 'type': 'string'}}, 'required': ['topic', 'style'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'unstable_service', 'description': 'Fetch data from an unstable external service. May require retries.', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}, {'type': 'function', 'function': {'name': 'fetch_data', 'description': 'Use the unstable_service tool to fetch data.', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}]}, 'response': ModelResponse(id='chatcmpl-CnAP2E2oVkYct8MOs2A5fRhpclikB', created=1765834096, model='gpt-4o-2024-08-06', object='chat.completion', system_fingerprint='fp_83554c687e', choices=[Choices(finish_reason='stop', index=0, message=Message(content='I successfully retrieved the data from the unstable service: `[1, 2, 3]`.', role='assistant', tool_calls=None, function_call=None, provider_specific_fields={'refusal': None}, annotations=[]), provider_specific_fields={})], usage=Usage(completion_tokens=21, prompt_tokens=596, total_tokens=617, completion_tokens_details=CompletionTokensDetailsWrapper(accepted_prediction_tokens=0, audio_tokens=0, reasoning_tokens=0, rejected_prediction_tokens=0, text_tokens=None, image_tokens=None), prompt_tokens_details=PromptTokensDetailsWrapper(audio_tokens=0, cached_tokens=0, text_tokens=None, image_tokens=None)), service_tier='default')}\n", + "INFO {'tool': 'fetch_data', 'args': (), 'kwargs': {}}\n", + "INFO {'args': (), 'kwargs': {'messages': [{'type': 'message', 'content': [{'type': 'text', 'text': 'Use the unstable_service tool to fetch data.'}], 'role': 'user'}, {'content': None, 'role': 'assistant', 'tool_calls': [{'function': {'arguments': '{}', 'name': 'fetch_data'}, 'id': 'call_T6RgNagFWflpLfddbDdAhy7e', 'type': 'function'}], 'function_call': None, 'provider_specific_fields': {'refusal': None}, 'annotations': []}, {'role': 'tool', 'tool_call_id': 'call_T6RgNagFWflpLfddbDdAhy7e', 'name': 'fetch_data', 'content': [{'type': 'text', 'text': 'I successfully retrieved the data from the unstable service: `[1, 2, 3]`.'}]}], 'response_format': None, 'tools': [{'type': 'function', 'function': {'name': 'limerick', 'description': 'Write a limerick on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'haiku_no_cache', 'description': 'Write a haiku on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'primes', 'description': 'Give a prime number with {first_digit} as the first digit. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'first_digit': {'title': 'First Digit', 'type': 'integer'}}, 'required': ['first_digit'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'count_char', 'description': \"Write a function which takes a string and counts the occurrances of '{char}'. Do not use any tools.\", 'parameters': {'additionalProperties': False, 'properties': {'char': {'title': 'Char', 'type': 'string'}}, 'required': ['char'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'cities', 'description': '', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}, {'type': 'function', 'function': {'name': 'weather', 'description': '', 'parameters': {'additionalProperties': False, 'properties': {'city': {'title': 'City', 'type': 'string'}}, 'required': ['city'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'vacation', 'description': 'Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}, {'type': 'function', 'function': {'name': 'write_joke', 'description': 'Write a knock-knock joke on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'rate_joke', 'description': 'Decide if {joke} is funny or not. Do not use any tools.', 'parameters': {'$defs': {'KnockKnockJoke': {'properties': {'whos_there': {'title': 'Whos There', 'type': 'string'}, 'punchline': {'title': 'Punchline', 'type': 'string'}}, 'required': ['whos_there', 'punchline'], 'title': 'KnockKnockJoke', 'type': 'object', 'additionalProperties': False}}, 'additionalProperties': False, 'properties': {'joke': {'$ref': '#/$defs/KnockKnockJoke'}}, 'required': ['joke'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'story_with_moral', 'description': 'Write a short story about {topic} and end with a moral lesson. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'topic': {'title': 'Topic', 'type': 'string'}}, 'required': ['topic'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'story_funny', 'description': 'Write a funny, humorous story about {topic}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'topic': {'title': 'Topic', 'type': 'string'}}, 'required': ['topic'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'write_story', 'description': \"Write a story about {topic} in the style: {style}.\\n Available styles: 'moral' for a story with a lesson, 'funny' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\", 'parameters': {'additionalProperties': False, 'properties': {'topic': {'title': 'Topic', 'type': 'string'}, 'style': {'title': 'Style', 'type': 'string'}}, 'required': ['topic', 'style'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'unstable_service', 'description': 'Fetch data from an unstable external service. May require retries.', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}, {'type': 'function', 'function': {'name': 'fetch_data', 'description': 'Use the unstable_service tool to fetch data.', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}]}, 'response': ModelResponse(id='chatcmpl-CnAP233bZtdUk1k1OCTmZHwHE9jN2', created=1765834096, model='gpt-4o-2024-08-06', object='chat.completion', system_fingerprint='fp_83554c687e', choices=[Choices(finish_reason='stop', index=0, message=Message(content='I successfully retrieved the data from the unstable service: `[1, 2, 3]`.', role='assistant', tool_calls=None, function_call=None, provider_specific_fields={'refusal': None}, annotations=[]), provider_specific_fields={})], usage=Usage(completion_tokens=21, prompt_tokens=591, total_tokens=612, completion_tokens_details=CompletionTokensDetailsWrapper(accepted_prediction_tokens=0, audio_tokens=0, reasoning_tokens=0, rejected_prediction_tokens=0, text_tokens=None, image_tokens=None), prompt_tokens_details=PromptTokensDetailsWrapper(audio_tokens=0, cached_tokens=0, text_tokens=None, image_tokens=None)), service_tier='default')}\n", + "Result: I successfully retrieved the data from the unstable service: `[1, 2, 3]`. Retries: 3\n" ] } ], @@ -510,6 +618,7 @@ "call_count = 0\n", "REQUIRED_RETRIES = 3\n", "\n", + "\n", "@defop\n", "def unstable_service() -> str:\n", " \"\"\"Fetch data from an unstable external service. May require retries.\"\"\"\n", @@ -522,10 +631,10 @@ " return \"{ 'status': 'ok', 'data': [1, 2, 3] }\"\n", "\n", "\n", - "@Template.define(tools=[unstable_service])\n", + "@Template.define # unstable_service auto-captured from lexical scope\n", "def fetch_data() -> str:\n", " \"\"\"Use the unstable_service tool to fetch data.\"\"\"\n", - " raise NotImplementedError\n", + " raise NotHandled\n", "\n", "\n", "retry_handler = RetryLLMHandler(max_retries=5, add_error_feedback=True)\n", @@ -546,7 +655,7 @@ }, { "cell_type": "code", - "execution_count": 18, + "execution_count": null, "id": "39b2b225", "metadata": {}, "outputs": [ @@ -554,10 +663,12 @@ "name": "stdout", "output_type": "stream", "text": [ - "INFO {'args': (), 'kwargs': {'messages': [{'type': 'message', 'content': [{'type': 'text', 'text': 'Give a rating for Die Hard. The explanation MUST include the numeric score.'}], 'role': 'user'}], 'response_format': , 'tools': []}, 'response': ModelResponse(id='chatcmpl-CkjbhOTBz1G18GHnmdx0IcPaqOIiB', created=1765254437, model='gpt-4o-2024-08-06', object='chat.completion', system_fingerprint='fp_83554c687e', choices=[Choices(finish_reason='stop', index=0, message=Message(content='{\"value\":{\"score\":9,\"explanation\":\"Die Hard is often regarded as a quintessential action film, praised for its innovative story, memorable characters, and thrilling sequences. The film\\'s protagonist, John McClane, played by Bruce Willis, is celebrated for his relatable and everyman qualities, which set a new standard for action heroes. Additionally, Alan Rickman\\'s portrayal of the villain Hans Gruber is highly acclaimed for adding depth and sophistication to the antagonist role. The movie\\'s pace, witty dialogues, and suspenseful action have made it a beloved classic in the action genre. For these reasons, it deserves a high score of 9 out of 10.\"}}', role='assistant', tool_calls=None, function_call=None, provider_specific_fields={'refusal': None}, annotations=[]), provider_specific_fields={})], usage=Usage(completion_tokens=138, prompt_tokens=108, total_tokens=246, completion_tokens_details=CompletionTokensDetailsWrapper(accepted_prediction_tokens=0, audio_tokens=0, reasoning_tokens=0, rejected_prediction_tokens=0, text_tokens=None, image_tokens=None), prompt_tokens_details=PromptTokensDetailsWrapper(audio_tokens=0, cached_tokens=0, text_tokens=None, image_tokens=None)), service_tier='default')}\n", - "INFO {'args': (), 'kwargs': {'messages': [{'type': 'message', 'content': [{'type': 'text', 'text': 'Retry generating the following prompt: Give a rating for Die Hard. The explanation MUST include the numeric score.\\n\\nError from previous generation:\\n```\\nTraceback (most recent call last):\\n File \"/Users/datnguyenthanh/Marc/effectful/effectful/handlers/llm/providers.py\", line 321, in _retry_completion\\n return fwd()\\n ^^^^^\\n File \"/Users/datnguyenthanh/Marc/effectful/effectful/ops/types.py\", line 433, in __call__\\n return self_handler(*args, **kwargs)\\n ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\\n File \"/opt/homebrew/Cellar/python@3.12/3.12.9/Frameworks/Python.framework/Versions/3.12/lib/python3.12/contextlib.py\", line 81, in inner\\n return func(*args, **kwds)\\n ^^^^^^^^^^^^^^^^^^^\\n File \"/Users/datnguyenthanh/Marc/effectful/effectful/internals/runtime.py\", line 45, in _cont_wrapper\\n return fn(*a, **k)\\n ^^^^^^^^^^^\\n File \"/Users/datnguyenthanh/Marc/effectful/effectful/internals/runtime.py\", line 56, in _cont_wrapper\\n return fn(*a, **k)\\n ^^^^^^^^^^^\\n File \"/Users/datnguyenthanh/Marc/effectful/effectful/internals/runtime.py\", line 70, in bound_body\\n return body(*a, **k)\\n ^^^^^^^^^^^^^\\n File \"/Users/datnguyenthanh/Marc/effectful/effectful/internals/runtime.py\", line 56, in _cont_wrapper\\n return fn(*a, **k)\\n ^^^^^^^^^^^\\n File \"/Users/datnguyenthanh/Marc/effectful/effectful/handlers/llm/providers.py\", line 471, in _call\\n return decode_response(template, resp)\\n ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\\n File \"/Users/datnguyenthanh/Marc/effectful/effectful/ops/types.py\", line 449, in __call__\\n return class_apply(self, *args, **kwargs) # type: ignore[return-value]\\n ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\\n File \"/Users/datnguyenthanh/Marc/effectful/effectful/ops/types.py\", line 474, in apply\\n return op.__default_rule__(*args, **kwargs)\\n ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\\n File \"/Users/datnguyenthanh/Marc/effectful/effectful/ops/types.py\", line 334, in __default_rule__\\n return self.__default__(*args, **kwargs)\\n ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\\n File \"/Users/datnguyenthanh/Marc/effectful/effectful/handlers/llm/providers.py\", line 426, in decode_response\\n result = Result.model_validate_json(result_str)\\n ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\\n File \"/Users/datnguyenthanh/Marc/effectful/.venv/lib/python3.12/site-packages/pydantic/main.py\", line 766, in model_validate_json\\n return cls.__pydantic_validator__.validate_json(\\n ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\\npydantic_core._pydantic_core.ValidationError: 1 validation error for Response\\nvalue.score\\n score must be 1–5, got 9 [type=invalid_score, input_value=9, input_type=int]\\n```'}], 'role': 'user'}], 'response_format': , 'tools': []}, 'response': ModelResponse(id='chatcmpl-CkjbkDlrwCHGvzQNh2wdT28L7j19N', created=1765254440, model='gpt-4o-2024-08-06', object='chat.completion', system_fingerprint='fp_83554c687e', choices=[Choices(finish_reason='stop', index=0, message=Message(content='{\"value\":{\"score\":5,\"explanation\":\"Die Hard is a highly acclaimed action film widely regarded as a classic in its genre. It combines thrilling action sequences with a charismatic performance by Bruce Willis as the lead character. The film\\'s clever plot, high stakes, and memorable antagonist make it a favorite among action movie enthusiasts. Critics and audiences alike often rate it at the top end of action cinema, earning it a score of 5 out of 5.\"}}', role='assistant', tool_calls=None, function_call=None, provider_specific_fields={'refusal': None}, annotations=[]), provider_specific_fields={})], usage=Usage(completion_tokens=97, prompt_tokens=843, total_tokens=940, completion_tokens_details=CompletionTokensDetailsWrapper(accepted_prediction_tokens=0, audio_tokens=0, reasoning_tokens=0, rejected_prediction_tokens=0, text_tokens=None, image_tokens=None), prompt_tokens_details=PromptTokensDetailsWrapper(audio_tokens=0, cached_tokens=0, text_tokens=None, image_tokens=None)), service_tier='default')}\n", + "INFO {'args': (), 'kwargs': {'messages': [{'type': 'message', 'content': [{'type': 'text', 'text': 'Give a rating for Die Hard. The explanation MUST include the numeric score.'}], 'role': 'user'}], 'response_format': , 'tools': []}, 'response': ModelResponse(id='chatcmpl-ClKlfxy7G2JCHEWA97nDGMi5WQIfB', created=1765397283, model='gpt-4o-2024-08-06', object='chat.completion', system_fingerprint='fp_e819e3438b', choices=[Choices(finish_reason='stop', index=0, message=Message(content='{\"value\":{\"score\":9,\"explanation\":\"Die Hard is widely regarded as a classic in the action film genre, offering a perfect blend of intense action sequences, clever plot, and memorable performances, particularly by Bruce Willis as the iconic John McClane. The film\\'s strong pace, witty dialogue, and exceptional direction by John McTiernan make it a standout. It set a new standard for action movies and has a lasting impact that is still felt today, which merits a score of 9 out of 10.\"}}', role='assistant', tool_calls=None, function_call=None, provider_specific_fields={'refusal': None}, annotations=[]), provider_specific_fields={})], usage=Usage(completion_tokens=110, prompt_tokens=108, total_tokens=218, completion_tokens_details=CompletionTokensDetailsWrapper(accepted_prediction_tokens=0, audio_tokens=0, reasoning_tokens=0, rejected_prediction_tokens=0, text_tokens=None, image_tokens=None), prompt_tokens_details=PromptTokensDetailsWrapper(audio_tokens=0, cached_tokens=0, text_tokens=None, image_tokens=None)), service_tier='default')}\n", + "INFO {'args': (), 'kwargs': {'messages': [{'type': 'message', 'content': [{'type': 'text', 'text': 'Give a rating for Die Hard. The explanation MUST include the numeric score.'}], 'role': 'user'}], 'response_format': , 'tools': []}, 'response': ModelResponse(id='chatcmpl-ClKlfxy7G2JCHEWA97nDGMi5WQIfB', created=1765397283, model='gpt-4o-2024-08-06', object='chat.completion', system_fingerprint='fp_e819e3438b', choices=[Choices(finish_reason='stop', index=0, message=Message(content='{\"value\":{\"score\":9,\"explanation\":\"Die Hard is widely regarded as a classic in the action film genre, offering a perfect blend of intense action sequences, clever plot, and memorable performances, particularly by Bruce Willis as the iconic John McClane. The film\\'s strong pace, witty dialogue, and exceptional direction by John McTiernan make it a standout. It set a new standard for action movies and has a lasting impact that is still felt today, which merits a score of 9 out of 10.\"}}', role='assistant', tool_calls=None, function_call=None, provider_specific_fields={'refusal': None}, annotations=[]), provider_specific_fields={})], usage=Usage(completion_tokens=110, prompt_tokens=108, total_tokens=218, completion_tokens_details=CompletionTokensDetailsWrapper(accepted_prediction_tokens=0, audio_tokens=0, reasoning_tokens=0, rejected_prediction_tokens=0, text_tokens=None, image_tokens=None), prompt_tokens_details=PromptTokensDetailsWrapper(audio_tokens=0, cached_tokens=0, text_tokens=None, image_tokens=None)), service_tier='default')}\n", + "INFO {'args': (), 'kwargs': {'messages': [{'type': 'message', 'content': [{'type': 'text', 'text': 'Retry generating the following prompt: Give a rating for Die Hard. The explanation MUST include the numeric score.\\n\\nError from previous generation:\\n```\\nTraceback (most recent call last):\\n File \"/Users/datnguyenthanh/Marc/effectful/effectful/handlers/llm/providers.py\", line 464, in _retry_completion\\n return fwd(current_template, *args, **kwargs)\\n ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\\n File \"/Users/datnguyenthanh/Marc/effectful/effectful/ops/types.py\", line 433, in __call__\\n return self_handler(*args, **kwargs)\\n ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\\n File \"/opt/homebrew/Cellar/python@3.12/3.12.9/Frameworks/Python.framework/Versions/3.12/lib/python3.12/contextlib.py\", line 81, in inner\\n return func(*args, **kwds)\\n ^^^^^^^^^^^^^^^^^^^\\n File \"/Users/datnguyenthanh/Marc/effectful/effectful/internals/runtime.py\", line 45, in _cont_wrapper\\n return fn(*a, **k)\\n ^^^^^^^^^^^\\n File \"/Users/datnguyenthanh/Marc/effectful/effectful/internals/runtime.py\", line 56, in _cont_wrapper\\n return fn(*a, **k)\\n ^^^^^^^^^^^\\n File \"/Users/datnguyenthanh/Marc/effectful/effectful/internals/runtime.py\", line 70, in bound_body\\n return body(*a, **k)\\n ^^^^^^^^^^^^^\\n File \"/Users/datnguyenthanh/Marc/effectful/effectful/internals/runtime.py\", line 56, in _cont_wrapper\\n return fn(*a, **k)\\n ^^^^^^^^^^^\\n File \"/Users/datnguyenthanh/Marc/effectful/effectful/handlers/llm/providers.py\", line 630, in _call\\n return decode_response(template, resp)\\n ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\\n File \"/Users/datnguyenthanh/Marc/effectful/effectful/ops/types.py\", line 449, in __call__\\n return class_apply(self, *args, **kwargs) # type: ignore[return-value]\\n ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\\n File \"/Users/datnguyenthanh/Marc/effectful/effectful/ops/types.py\", line 474, in apply\\n return op.__default_rule__(*args, **kwargs)\\n ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\\n File \"/Users/datnguyenthanh/Marc/effectful/effectful/ops/types.py\", line 334, in __default_rule__\\n return self.__default__(*args, **kwargs)\\n ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\\n File \"/Users/datnguyenthanh/Marc/effectful/effectful/handlers/llm/providers.py\", line 574, in decode_response\\n result = Result.model_validate_json(result_str)\\n ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\\n File \"/Users/datnguyenthanh/Marc/effectful/.venv/lib/python3.12/site-packages/pydantic/main.py\", line 766, in model_validate_json\\n return cls.__pydantic_validator__.validate_json(\\n ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\\npydantic_core._pydantic_core.ValidationError: 1 validation error for Response\\nvalue.score\\n score must be 1–5, got 9 [type=invalid_score, input_value=9, input_type=int]\\n```'}], 'role': 'user'}], 'response_format': , 'tools': []}, 'response': ModelResponse(id='chatcmpl-ClKlikK4JqFvu8DRzy8JV2hEBOoAT', created=1765397286, model='gpt-4o-2024-08-06', object='chat.completion', system_fingerprint='fp_e819e3438b', choices=[Choices(finish_reason='stop', index=0, message=Message(content='{\"value\":{\"score\":5,\"explanation\":\"Die Hard is a quintessential action film that sets the standard for the genre, earning a score of 5 out of 5. Its gripping storyline, charismatic performance by Bruce Willis, and innovative action sequences make it a timeless classic.\"}}', role='assistant', tool_calls=None, function_call=None, provider_specific_fields={'refusal': None}, annotations=[]), provider_specific_fields={})], usage=Usage(completion_tokens=61, prompt_tokens=856, total_tokens=917, completion_tokens_details=CompletionTokensDetailsWrapper(accepted_prediction_tokens=0, audio_tokens=0, reasoning_tokens=0, rejected_prediction_tokens=0, text_tokens=None, image_tokens=None), prompt_tokens_details=PromptTokensDetailsWrapper(audio_tokens=0, cached_tokens=0, text_tokens=None, image_tokens=None)), service_tier='default')}\n", + "INFO {'args': (), 'kwargs': {'messages': [{'type': 'message', 'content': [{'type': 'text', 'text': 'Retry generating the following prompt: Give a rating for Die Hard. The explanation MUST include the numeric score.\\n\\nError from previous generation:\\n```\\nTraceback (most recent call last):\\n File \"/Users/datnguyenthanh/Marc/effectful/effectful/handlers/llm/providers.py\", line 464, in _retry_completion\\n return fwd(current_template, *args, **kwargs)\\n ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\\n File \"/Users/datnguyenthanh/Marc/effectful/effectful/ops/types.py\", line 433, in __call__\\n return self_handler(*args, **kwargs)\\n ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\\n File \"/opt/homebrew/Cellar/python@3.12/3.12.9/Frameworks/Python.framework/Versions/3.12/lib/python3.12/contextlib.py\", line 81, in inner\\n return func(*args, **kwds)\\n ^^^^^^^^^^^^^^^^^^^\\n File \"/Users/datnguyenthanh/Marc/effectful/effectful/internals/runtime.py\", line 45, in _cont_wrapper\\n return fn(*a, **k)\\n ^^^^^^^^^^^\\n File \"/Users/datnguyenthanh/Marc/effectful/effectful/internals/runtime.py\", line 56, in _cont_wrapper\\n return fn(*a, **k)\\n ^^^^^^^^^^^\\n File \"/Users/datnguyenthanh/Marc/effectful/effectful/internals/runtime.py\", line 70, in bound_body\\n return body(*a, **k)\\n ^^^^^^^^^^^^^\\n File \"/Users/datnguyenthanh/Marc/effectful/effectful/internals/runtime.py\", line 56, in _cont_wrapper\\n return fn(*a, **k)\\n ^^^^^^^^^^^\\n File \"/Users/datnguyenthanh/Marc/effectful/effectful/handlers/llm/providers.py\", line 630, in _call\\n return decode_response(template, resp)\\n ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\\n File \"/Users/datnguyenthanh/Marc/effectful/effectful/ops/types.py\", line 449, in __call__\\n return class_apply(self, *args, **kwargs) # type: ignore[return-value]\\n ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\\n File \"/Users/datnguyenthanh/Marc/effectful/effectful/ops/types.py\", line 474, in apply\\n return op.__default_rule__(*args, **kwargs)\\n ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\\n File \"/Users/datnguyenthanh/Marc/effectful/effectful/ops/types.py\", line 334, in __default_rule__\\n return self.__default__(*args, **kwargs)\\n ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\\n File \"/Users/datnguyenthanh/Marc/effectful/effectful/handlers/llm/providers.py\", line 574, in decode_response\\n result = Result.model_validate_json(result_str)\\n ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\\n File \"/Users/datnguyenthanh/Marc/effectful/.venv/lib/python3.12/site-packages/pydantic/main.py\", line 766, in model_validate_json\\n return cls.__pydantic_validator__.validate_json(\\n ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\\npydantic_core._pydantic_core.ValidationError: 1 validation error for Response\\nvalue.score\\n score must be 1–5, got 9 [type=invalid_score, input_value=9, input_type=int]\\n```'}], 'role': 'user'}], 'response_format': , 'tools': []}, 'response': ModelResponse(id='chatcmpl-ClKlikK4JqFvu8DRzy8JV2hEBOoAT', created=1765397286, model='gpt-4o-2024-08-06', object='chat.completion', system_fingerprint='fp_e819e3438b', choices=[Choices(finish_reason='stop', index=0, message=Message(content='{\"value\":{\"score\":5,\"explanation\":\"Die Hard is a quintessential action film that sets the standard for the genre, earning a score of 5 out of 5. Its gripping storyline, charismatic performance by Bruce Willis, and innovative action sequences make it a timeless classic.\"}}', role='assistant', tool_calls=None, function_call=None, provider_specific_fields={'refusal': None}, annotations=[]), provider_specific_fields={})], usage=Usage(completion_tokens=61, prompt_tokens=856, total_tokens=917, completion_tokens_details=CompletionTokensDetailsWrapper(accepted_prediction_tokens=0, audio_tokens=0, reasoning_tokens=0, rejected_prediction_tokens=0, text_tokens=None, image_tokens=None), prompt_tokens_details=PromptTokensDetailsWrapper(audio_tokens=0, cached_tokens=0, text_tokens=None, image_tokens=None)), service_tier='default')}\n", "Score: 5/5\n", - "Explanation: Die Hard is a highly acclaimed action film widely regarded as a classic in its genre. It combines thrilling action sequences with a charismatic performance by Bruce Willis as the lead character. The film's clever plot, high stakes, and memorable antagonist make it a favorite among action movie enthusiasts. Critics and audiences alike often rate it at the top end of action cinema, earning it a score of 5 out of 5.\n" + "Explanation: Die Hard is a quintessential action film that sets the standard for the genre, earning a score of 5 out of 5. Its gripping storyline, charismatic performance by Bruce Willis, and innovative action sequences make it a timeless classic.\n" ] } ], @@ -598,8 +709,8 @@ "\n", "@Template.define\n", "def give_rating_for_movie(movie_name: str) -> Rating:\n", - " \"\"\"Give a rating for {movie_name}. The explanation MUST include the numeric score.\"\"\"\n", - " raise NotImplementedError\n", + " \"\"\"Give a rating for {movie_name}. The explanation MUST include the numeric score. Do not use any tools.\"\"\"\n", + " raise NotHandled\n", "\n", "\n", "# RetryLLMHandler with error feedback - the traceback helps LLM correct validation errors\n", @@ -613,7 +724,7 @@ "with handler(provider), handler(retry_handler), handler(llm_logger):\n", " rating = give_rating_for_movie(\"Die Hard\")\n", " print(f\"Score: {rating.score}/5\")\n", - " print(f\"Explanation: {rating.explanation}\")\n" + " print(f\"Explanation: {rating.explanation}\")" ] } ], diff --git a/effectful/handlers/llm/__init__.py b/effectful/handlers/llm/__init__.py index 0cdbdd0e..fbcb8593 100644 --- a/effectful/handlers/llm/__init__.py +++ b/effectful/handlers/llm/__init__.py @@ -1,17 +1,70 @@ +from __future__ import annotations + import dataclasses import functools import inspect -from collections.abc import Callable, Iterable +import types +from collections import ChainMap +from collections.abc import Callable, Iterable, Mapping +from typing import Any +from effectful.ops.semantics import evaluate from effectful.ops.syntax import defop from effectful.ops.types import NotHandled, Operation +class LexicalContext(ChainMap): + """ChainMap subclass for Template lexical scope. + + This avoids recursive evaluation of circular Template references. + """ + + pass + + +@evaluate.register(LexicalContext) +def _evaluate_lexical_context(expr: LexicalContext, **kwargs) -> LexicalContext: + return expr + + @dataclasses.dataclass(frozen=True) class Template[**P, T]: - __signature__: inspect.Signature __prompt_template__: str - tools: tuple[Operation, ...] + __signature__: inspect.Signature + __context__: Mapping[str, Any] + __name__: str + + @staticmethod + def _get_excluded_operations() -> frozenset[Operation]: + """Get the set of internal operations to exclude from auto-capture.""" + from effectful.handlers.llm import providers + from effectful.ops import semantics + + excluded: set[Operation] = set() + for module in (providers, semantics): + for name in dir(module): + obj = getattr(module, name) + if isinstance(obj, Operation): + excluded.add(obj) + return frozenset(excluded) + + @property + def tools(self) -> tuple[Operation | Template, ...]: + """Operations and Templates available as tools. Auto-capture from lexical context.""" + excluded_ops = self._get_excluded_operations() + result: list[Operation | Template] = [] + # ChainMap.items() respects shadowing (locals shadow globals) + for name, obj in self.__context__.items(): + if name.startswith("_") or obj in result: + continue + if isinstance(obj, Operation): + # Exclude internal operations from providers and semantics modules + if obj in excluded_ops: + continue + result.append(obj) + elif isinstance(obj, Template): + result.append(obj) + return tuple(result) @defop def __call__(self, *args: P.args, **kwargs: P.kwargs) -> T: @@ -24,15 +77,39 @@ def __get__(self, instance, _owner): return self @classmethod - def define(cls, _func=None, *, tools: Iterable[Operation] = ()): + def define( + cls, + _func=None, + *, + tools: Iterable[Operation | Template] | str | None = None, + ): + """Define a prompt template. + + Args: + tools: Tools to expose to the LLM: + - None (default): no tools + - "auto": auto-capture from lexical scope + - list: explicit list of Operations/Templates + """ + frame: types.FrameType = inspect.currentframe().f_back # type: ignore + globals_proxy: types.MappingProxyType[str, Any] = types.MappingProxyType( + frame.f_globals + ) + locals_proxy: types.MappingProxyType[str, Any] = types.MappingProxyType( + frame.f_locals + ) + # LexicalContext: locals first (shadow globals), then globals + context = LexicalContext(locals_proxy, globals_proxy) # type: ignore[arg-type] + def decorator(body: Callable[P, T]): if not body.__doc__: raise ValueError("Expected a docstring on body") return cls( - __signature__=inspect.signature(body), __prompt_template__=body.__doc__, - tools=tuple(tools), + __signature__=inspect.signature(body), + __name__=body.__name__, + __context__=context, ) if _func is None: diff --git a/effectful/handlers/llm/providers.py b/effectful/handlers/llm/providers.py index 27097f2d..aa091d67 100644 --- a/effectful/handlers/llm/providers.py +++ b/effectful/handlers/llm/providers.py @@ -47,13 +47,14 @@ def _pil_image_to_base64_data_uri(pil_image: Image.Image) -> str: @dataclasses.dataclass class Tool[**P, T]: - operation: Operation[P, T] + callable: Operation[P, T] | Template[P, T] name: str parameter_annotations: dict[str, type] + description: str def serialise_return_value(self, value) -> OpenAIMessageContent: """Serializes a value returned by the function into a json format suitable for the OpenAI API.""" - sig = inspect.signature(self.operation) + sig = inspect.signature(self.callable) encoded_ty = type_to_encodable_type(sig.return_annotation) encoded_value = encoded_ty.encode(value) return encoded_ty.serialize(encoded_value) @@ -76,7 +77,6 @@ def call_with_json_args( ) -> OpenAIMessageContent: """Implements a roundtrip call to a python function. Input is a json string representing an LLM tool call request parameters. The output is the serialised response to the model.""" try: - op = self.operation # build dict of raw encodable types U raw_args = self.parameter_model.model_validate_json(json_str) @@ -91,11 +91,11 @@ def call_with_json_args( # call tool with python types result = tool_call( template, - self.operation, + self.callable, **params, ) # serialize back to U using encoder for return type - sig = inspect.signature(op) + sig = inspect.signature(self.callable) encoded_ty = type_to_encodable_type(sig.return_annotation) encoded_value = encoded_ty.encode(result) # serialise back to Json @@ -104,30 +104,49 @@ def call_with_json_args( return str({"status": "failure", "exception": str(exn)}) @classmethod - def of_operation(cls, op: Operation[P, T], name: str): - sig = inspect.signature(op) - hints = get_type_hints(op) - parameter_annotations: dict[str, type] = {} + def define(cls, obj: Operation[P, T] | Template[P, T]): + """Create a Tool from an Operation or Template. + + Returns None if the object cannot be converted to a tool (e.g., missing type annotations). + """ + sig = inspect.signature(obj) + tool_name = obj.__name__ + description = ( + obj.__prompt_template__ if isinstance(obj, Template) else obj.__doc__ or "" + ) + + # Try to get type hints, fall back to signature annotations if that fails + try: + hints = get_type_hints(obj) + except Exception: + hints = { + p.name: p.annotation + for p in sig.parameters.values() + if p.annotation is not inspect.Parameter.empty + } + + parameter_annotations: dict[str, type] = {} for param_name, param in sig.parameters.items(): - # Check if parameter annotation is missing (inspect.Parameter.empty) + # Skip parameters without type annotations if param.annotation is inspect.Parameter.empty: raise TypeError( - f"Parameter '{param_name}' in operation '{op.__name__}' " + f"Parameter '{param_name}' in '{obj.__name__}' " "does not have a type annotation" ) # get_type_hints might not include the parameter if annotation is invalid if param_name not in hints: raise TypeError( - f"Parameter '{param_name}' in operation '{op.__name__}' " + f"Parameter '{param_name}' in '{obj.__name__}' " "does not have a valid type annotation" ) parameter_annotations[param_name] = hints[param_name] return cls( - operation=op, - name=name, + callable=obj, + name=tool_name, parameter_annotations=parameter_annotations, + description=description, ) @property @@ -140,7 +159,7 @@ def function_definition(self) -> OpenAIChatCompletionToolParam: "type": "function", "function": { "name": self.name, - "description": self.operation.__doc__ or "", + "description": self.description, "parameters": response_format["json_schema"][ "schema" ], # extract the schema @@ -149,19 +168,14 @@ def function_definition(self) -> OpenAIChatCompletionToolParam: } -def _tools_of_operations(ops: Iterable[Operation]) -> Mapping[str, Tool]: +def _tools_of_operations( + ops: Iterable[Operation | Template], +) -> Mapping[str, Tool]: tools = {} for op in ops: - name = op.__name__ - - # Ensure tool names are unique. Operation names may not be. - if name in tools: - suffix = 0 - while f"{name}_{suffix}" in tools: - suffix += 1 - name = f"{name}_{suffix}" - - tools[name] = Tool.of_operation(op, name) + tool = Tool.define(op) + # NOTE: Because lexical handling is already guaranteeing unique names, we can just use the tool's name directly. + tools[tool.name] = tool return tools @@ -213,8 +227,10 @@ def completion(*args, **kwargs) -> Any: # Note: attempting to type the tool arguments causes type-checker failures @defop -def tool_call[T](template: Template, tool: Operation[..., T], *args, **kwargs) -> T: - """Perform a model-initiated tool call.""" +def tool_call[T]( + template: Template, tool: Operation[..., T] | Template[..., T], *args, **kwargs +) -> T: + """Perform a model-initiated tool call (can be an Operation or another Template).""" return tool(*args, **kwargs) diff --git a/effectful/handlers/llm/synthesis.py b/effectful/handlers/llm/synthesis.py index 3a77441b..b9308776 100644 --- a/effectful/handlers/llm/synthesis.py +++ b/effectful/handlers/llm/synthesis.py @@ -51,7 +51,7 @@ def _parse_and_eval[T](self, t: type[T], content: str) -> T: # register into linecache linecache.cache[filename] = (len(source_code), None, lines, filename) - # TODO: assert callable type compatibility + # TODO: support injecting lexical context for synthesized code gs: dict = {} try: code_obj = compile(source_code, filename, "exec") diff --git a/tests/test_handlers_llm.py b/tests/test_handlers_llm.py index 20c8b969..9e0e510f 100644 --- a/tests/test_handlers_llm.py +++ b/tests/test_handlers_llm.py @@ -5,7 +5,7 @@ from effectful.handlers.llm import Template from effectful.handlers.llm.providers import RetryLLMHandler from effectful.handlers.llm.synthesis import ProgramSynthesis -from effectful.ops.semantics import handler +from effectful.ops.semantics import NotHandled, handler from effectful.ops.syntax import ObjectInterpretation, implements @@ -60,25 +60,38 @@ def _call[**P]( @Template.define def limerick(theme: str) -> str: """Write a limerick on the theme of {theme}.""" - raise NotImplementedError + raise NotHandled @Template.define def haiku(theme: str) -> str: """Write a haiku on the theme of {theme}.""" - raise NotImplementedError + raise NotHandled @Template.define() def primes(first_digit: int) -> int: """Give exactly one prime number with {first_digit} as the first digit. Respond with only the number.""" - raise NotImplementedError + raise NotHandled @Template.define def count_char(char: str) -> Callable[[str], int]: """Write a function which takes a string and counts the occurrances of '{char}'.""" - raise NotImplementedError + raise NotHandled + + +# Mutually recursive templates (module-level for live globals) +@Template.define +def mutual_a() -> str: + """Use mutual_a and mutual_b as tools to do task A.""" + raise NotHandled + + +@Template.define +def mutual_b() -> str: + """Use mutual_a and mutual_b as tools to do task B.""" + raise NotHandled # Unit tests @@ -232,3 +245,118 @@ def _call(self, template: Template, *args, **kwargs): # Second call should include error feedback with traceback assert "Retry generating" in call_prompts[1] assert "First attempt failed" in call_prompts[1] + + +def test_template_captures_other_templates_in_lexical_context(): + """Test that Templates defined in lexical scope are captured (orchestrator pattern).""" + + # Define sub-templates first + @Template.define + def story_with_moral(topic: str) -> str: + """Write a story about {topic} with a moral lesson. Do not use any tools at all for this.""" + raise NotHandled + + @Template.define + def story_funny(topic: str) -> str: + """Write a funny story about {topic}. Do not use any tools at all for this.""" + raise NotHandled + + # Main orchestrator template has access to sub-templates + @Template.define + def write_story(topic: str, style: str) -> str: + """Write a story about {topic} in style {style}.""" + raise NotHandled + + # __context__ is a ChainMap(locals, globals) - locals shadow globals + # Sub-templates should be visible in lexical context + assert "story_with_moral" in write_story.__context__ + assert "story_funny" in write_story.__context__ + assert write_story.__context__["story_with_moral"] is story_with_moral + assert write_story.__context__["story_funny"] is story_funny + + # Templates in lexical context are exposed as callable tools + assert story_with_moral in write_story.tools + assert story_funny in write_story.tools + + +def test_template_composition_with_chained_calls(): + """Test calling one template and passing result to another.""" + + @Template.define + def generate_topic() -> str: + """Generate an interesting topic for a story. Do not try to use any tools for this beside from write_story.""" + raise NotHandled + + @Template.define + def write_story(topic: str) -> str: + """Write a short story about {topic}.""" + raise NotHandled + + # Verify generate_topic is in write_story's lexical context + assert "generate_topic" in write_story.__context__ + + # Test chained template calls + mock_provider = SingleResponseLLMProvider("A magical forest") + + with handler(mock_provider): + topic = generate_topic() + assert topic == "A magical forest" + + # Now use that topic in the next template + mock_provider2 = SingleResponseLLMProvider( + "Once upon a time in a magical forest..." + ) + + with handler(mock_provider2): + story = write_story(topic) + assert story == "Once upon a time in a magical forest..." + + +def test_mutually_recursive_templates(): + """Test that module-level templates can see each other (mutual recursion).""" + # Both mutual_a and mutual_b should see each other via ChainMap (globals visible) + assert "mutual_a" in mutual_a.__context__ + assert "mutual_b" in mutual_a.__context__ + assert "mutual_a" in mutual_b.__context__ + assert "mutual_b" in mutual_b.__context__ + + # They should also be in each other's tools + assert mutual_a in mutual_b.tools + assert mutual_b in mutual_a.tools + # And themselves (self-recursion) + assert mutual_a in mutual_a.tools + assert mutual_b in mutual_b.tools + + +# Module-level variable for shadowing test +shadow_test_value = "global" + + +def test_lexical_context_shadowing(): + """Test that local variables shadow global variables in lexical context.""" + # Local shadows global + shadow_test_value = "local" # noqa: F841 - intentional shadowing + + @Template.define + def template_with_shadowed_var() -> str: + """Test template.""" + raise NotHandled + + # The lexical context should see the LOCAL value, not global + assert "shadow_test_value" in template_with_shadowed_var.__context__ + assert ( + template_with_shadowed_var.__context__["shadow_test_value"] == shadow_test_value + ) + + +def test_lexical_context_sees_globals_when_no_local(): + """Test that globals are visible when there's no local shadow.""" + + @Template.define + def template_sees_global() -> str: + """Test template.""" + raise NotHandled + + # Should see the global value (no local shadow in this scope) + assert "shadow_test_value" in template_sees_global.__context__ + assert template_sees_global.__context__["shadow_test_value"] == "global" diff --git a/tests/test_handlers_llm_provider.py b/tests/test_handlers_llm_provider.py index 9a0bcc5c..ac51bebd 100644 --- a/tests/test_handlers_llm_provider.py +++ b/tests/test_handlers_llm_provider.py @@ -1,5 +1,4 @@ """Tests for LLM handlers and providers. - This module tests the functionality from build/main.py and build/llm.py, breaking down individual components like LiteLLMProvider, LLMLoggingHandler, ProgramSynthesis, and sampling strategies. @@ -24,7 +23,7 @@ ) from effectful.handlers.llm.synthesis import ProgramSynthesis, SynthesisError from effectful.ops.semantics import fwd, handler -from effectful.ops.syntax import ObjectInterpretation, defop, implements +from effectful.ops.syntax import ObjectInterpretation, implements from effectful.ops.types import NotHandled # Check for API keys @@ -102,25 +101,26 @@ class MovieClassification: @Template.define def classify_genre(plot: str) -> MovieClassification: - """Classify the movie genre based on this plot: {plot}""" + """Classify the movie genre based on this plot: {plot}. Do not use any tools.""" raise NotImplementedError @Template.define def simple_prompt(topic: str) -> str: - """Write a short sentence about {topic}.""" + """Write a short sentence about {topic}. You MUST respond directly without using any tools.""" raise NotImplementedError @Template.define def generate_number(max_value: int) -> int: - """Generate a random number between 1 and {max_value}. Return only the number.""" + """Generate a random number between 1 and {max_value}. Return only the number. Do not use any tools.""" raise NotImplementedError @Template.define def create_function(char: str) -> Callable[[str], int]: """Create a function that counts occurrences of the character '{char}' in a string. + Do not use any tools. Return as a code block with the last definition being the function. """ @@ -260,90 +260,6 @@ def test_generates_callable(self): assert count_func("aardvark") == 3 -@dataclass -class Poem: - """A poem with content and form.""" - - content: str = Field(..., description="content of the poem") - form: str = Field(..., description="name of the type of the poem") - - -class PoemQuality(str, Enum): - """Quality rating for a poem.""" - - GOOD = "GOOD" - OKAY = "OKAY" - BAD = "BAD" - - -@defop -def evaluate_poem_tool(poem: Poem, explanation: str) -> PoemQuality: - """Evaluate the quality of a poem. - - Parameters: - - poem: Poem object representing the poem - - explanation: natural language explanation of the thought process - """ - raise NotHandled - - -class LoggingPoemEvaluationInterpretation(ObjectInterpretation): - """Provides an interpretation for `evaluate_poem_tool` that tracks evaluation counts.""" - - evaluation_count: int = 0 - evaluation_results: list[dict] = [] - - @implements(evaluate_poem_tool) - def _evaluate_poem_tool(self, poem: Poem, explanation: str) -> PoemQuality: - self.evaluation_count += 1 - - # Simple heuristic: require at least 2 evaluations, then approve - quality = PoemQuality.BAD if self.evaluation_count < 2 else PoemQuality.GOOD - - self.evaluation_results.append( - {"poem": poem, "explanation": explanation, "quality": quality} - ) - - return quality - - -@Template.define(tools=[evaluate_poem_tool]) -def generate_good_poem(topic: str) -> Poem: - """Generate a good poem about {topic} returning your result following - the provided json schema. Use the provided tools to evaluate the quality - and you MUST make sure it is a good poem. - """ - raise NotHandled - - -class TestToolCalling: - """Tests for templates with tool calling functionality.""" - - @pytest.mark.parametrize( - "model_name", - [ - pytest.param("gpt-5-nano", marks=requires_openai), - pytest.param("claude-sonnet-4-5-20250929", marks=requires_anthropic), - ], - ) - def test_tool_calling(self, model_name): - """Test that templates with tools work with openai.""" - poem_eval_ctx = LoggingPoemEvaluationInterpretation() - with ( - handler(LiteLLMProvider(model_name=model_name)), - handler(LimitLLMCallsHandler(max_calls=4)), - handler(poem_eval_ctx), - ): - poem = generate_good_poem("Python") - assert isinstance(poem, Poem) - assert isinstance(poem.content, str) - assert isinstance(poem.form, str) - - # Verify the tool was called at least once - assert poem_eval_ctx.evaluation_count >= 1 - assert len(poem_eval_ctx.evaluation_results) >= 1 - - def smiley_face() -> Image.Image: bmp = [ "00000000", @@ -365,7 +281,7 @@ def smiley_face() -> Image.Image: @Template.define def categorise_image(image: Image.Image) -> str: - """Return a description of the following image: + """Return a description of the following image. Do not use any tools. {image}""" raise NotHandled @@ -389,7 +305,7 @@ class BookReview(BaseModel): @Template.define def review_book(plot: str) -> BookReview: - """Review a book based on this plot: {plot}""" + """Review a book based on this plot: {plot}. Do not use any tools.""" raise NotImplementedError @@ -411,100 +327,3 @@ def test_pydantic_basemodel_return(self): assert 1 <= review.rating <= 5 assert isinstance(review.summary, str) assert len(review.summary) > 0 - - -class BookRecommendation(BaseModel): - """A book recommendation with details.""" - - title: str = Field(..., description="title of the recommended book") - reason: str = Field(..., description="reason for the recommendation") - - -@defop -def recommend_book_tool(genre: str, explanation: str) -> BookRecommendation: - """Recommend a book based on genre preference. - - Parameters: - - genre: The genre of book to recommend - - explanation: Natural language explanation of the recommendation - """ - raise NotHandled - - -class LoggingBookRecommendationInterpretation(ObjectInterpretation): - """Provides an interpretation for `recommend_book_tool` that tracks recommendations.""" - - recommendation_count: int = 0 - recommendation_results: list[dict] = [] - - @implements(recommend_book_tool) - def _recommend_book_tool(self, genre: str, explanation: str) -> BookRecommendation: - self.recommendation_count += 1 - - # Simple heuristic: recommend based on genre - recommendations = { - "fantasy": BookRecommendation( - title="The Lord of the Rings", reason="Classic fantasy epic" - ), - "sci-fi": BookRecommendation( - title="Dune", reason="Epic science fiction masterpiece" - ), - "mystery": BookRecommendation( - title="The Hound of the Baskervilles", - reason="Classic mystery novel", - ), - } - - recommendation = recommendations.get( - genre.lower(), - BookRecommendation( - title="1984", reason="Thought-provoking dystopian novel" - ), - ) - - self.recommendation_results.append( - { - "genre": genre, - "explanation": explanation, - "recommendation": recommendation, - } - ) - - return recommendation - - -@Template.define(tools=[recommend_book_tool]) -def get_book_recommendation(user_preference: str) -> BookRecommendation: - """Get a book recommendation based on user preference: {user_preference}. - Use the provided tools to make a recommendation. - """ - raise NotHandled - - -class TestPydanticBaseModelToolCalls: - @pytest.mark.parametrize( - "model_name", - [ - pytest.param("gpt-5-nano", marks=requires_openai), - pytest.param("claude-sonnet-4-5-20250929", marks=requires_anthropic), - ], - ) - def test_pydantic_basemodel_tool_calling(self, model_name): - """Test that templates with tools work with Pydantic BaseModel.""" - book_rec_ctx = LoggingBookRecommendationInterpretation() - with ( - handler(LiteLLMProvider(model_name=model_name)), - handler(LimitLLMCallsHandler(max_calls=4)), - handler(book_rec_ctx), - ): - recommendation = get_book_recommendation("I love fantasy novels") - - assert isinstance(recommendation, BookRecommendation) - assert isinstance(recommendation.title, str) - assert len(recommendation.title) > 0 - assert isinstance(recommendation.reason, str) - assert len(recommendation.reason) > 0 - - # Verify the tool was called at least once - assert book_rec_ctx.recommendation_count >= 1 - assert len(book_rec_ctx.recommendation_results) >= 1 diff --git a/tests/test_handlers_llm_tool_calling_book.py b/tests/test_handlers_llm_tool_calling_book.py new file mode 100644 index 00000000..bcdb1468 --- /dev/null +++ b/tests/test_handlers_llm_tool_calling_book.py @@ -0,0 +1,128 @@ +"""Tests for LLM tool calling functionality - Book recommendation. + +This module is separate to avoid lexical context pollution from other templates. +""" + +import os +from dataclasses import dataclass + +import pytest +from pydantic import BaseModel, Field + +from effectful.handlers.llm import Template +from effectful.handlers.llm.providers import ( + LiteLLMProvider, + completion, +) +from effectful.ops.semantics import fwd, handler +from effectful.ops.syntax import ObjectInterpretation, defop, implements +from effectful.ops.types import NotHandled + +# Check for API keys +HAS_OPENAI_KEY = "OPENAI_API_KEY" in os.environ and os.environ["OPENAI_API_KEY"] +HAS_ANTHROPIC_KEY = ( + "ANTHROPIC_API_KEY" in os.environ and os.environ["ANTHROPIC_API_KEY"] +) + +requires_openai = pytest.mark.skipif( + not HAS_OPENAI_KEY, reason="OPENAI_API_KEY environment variable not set" +) +requires_anthropic = pytest.mark.skipif( + not HAS_ANTHROPIC_KEY, reason="ANTHROPIC_API_KEY environment variable not set" +) + + +@dataclass +class LimitLLMCallsHandler(ObjectInterpretation): + """Handler that limits the number of LLM calls.""" + + max_calls: int = 10 + call_count: int = 0 + + @implements(completion) + def _completion(self, *args, **kwargs): + self.call_count += 1 + if self.call_count > self.max_calls: + raise RuntimeError( + f"Test used too many requests (max_calls = {self.max_calls})" + ) + return fwd() + + +class BookRecommendation(BaseModel): + """A book recommendation.""" + + title: str = Field(..., description="The title of the book") + reason: str = Field(..., description="Why this book is recommended") + + +@defop +def recommend_book_tool(genre: str, mood: str) -> BookRecommendation: + """Recommend a book based on genre and mood. + + Parameters: + - genre: The genre of book to recommend + - mood: The mood or feeling the reader is looking for + """ + raise NotHandled + + +class LoggingBookRecommendationInterpretation(ObjectInterpretation): + """Provides an interpretation for `recommend_book_tool` that tracks calls.""" + + recommendation_count: int = 0 + recommendation_results: list[dict] = [] + + @implements(recommend_book_tool) + def _recommend_book_tool(self, genre: str, mood: str) -> BookRecommendation: + self.recommendation_count += 1 + + recommendation = BookRecommendation( + title=f"The {mood.title()} {genre.title()} Adventure", + reason=f"A perfect {genre} book for when you're feeling {mood}", + ) + + self.recommendation_results.append( + {"genre": genre, "mood": mood, "recommendation": recommendation} + ) + + return recommendation + + +@Template.define +def get_book_recommendation(user_preference: str) -> BookRecommendation: + """Get a book recommendation based on user preference: {user_preference}. + + You MUST use recommend_book_tool to get the recommendation. + Return the recommendation as JSON with 'title' and 'reason' fields. + """ + raise NotHandled + + +class TestPydanticBaseModelToolCalls: + @pytest.mark.parametrize( + "model_name", + [ + pytest.param("gpt-5-nano", marks=requires_openai), + pytest.param("claude-sonnet-4-5-20250929", marks=requires_anthropic), + ], + ) + def test_pydantic_basemodel_tool_calling(self, model_name): + """Test that templates with tools work with Pydantic BaseModel.""" + book_rec_ctx = LoggingBookRecommendationInterpretation() + with ( + handler(LiteLLMProvider(model_name=model_name)), + handler(LimitLLMCallsHandler(max_calls=4)), + handler(book_rec_ctx), + ): + recommendation = get_book_recommendation("I love fantasy novels") + + assert isinstance(recommendation, BookRecommendation) + assert isinstance(recommendation.title, str) + assert len(recommendation.title) > 0 + assert isinstance(recommendation.reason, str) + assert len(recommendation.reason) > 0 + + # Verify the tool was called at least once + assert book_rec_ctx.recommendation_count >= 1 + assert len(book_rec_ctx.recommendation_results) >= 1 diff --git a/tests/test_handlers_llm_tool_calling_poem.py b/tests/test_handlers_llm_tool_calling_poem.py new file mode 100644 index 00000000..32acb637 --- /dev/null +++ b/tests/test_handlers_llm_tool_calling_poem.py @@ -0,0 +1,137 @@ +"""Tests for LLM tool calling functionality - Poem evaluation. + +This module is separate to avoid lexical context pollution from other templates. +""" + +import os +from dataclasses import dataclass +from enum import Enum + +import pytest +from pydantic import Field +from pydantic.dataclasses import dataclass as pydantic_dataclass + +from effectful.handlers.llm import Template +from effectful.handlers.llm.providers import ( + LiteLLMProvider, + completion, +) +from effectful.ops.semantics import fwd, handler +from effectful.ops.syntax import ObjectInterpretation, defop, implements +from effectful.ops.types import NotHandled + +# Check for API keys +HAS_OPENAI_KEY = "OPENAI_API_KEY" in os.environ and os.environ["OPENAI_API_KEY"] +HAS_ANTHROPIC_KEY = ( + "ANTHROPIC_API_KEY" in os.environ and os.environ["ANTHROPIC_API_KEY"] +) + +requires_openai = pytest.mark.skipif( + not HAS_OPENAI_KEY, reason="OPENAI_API_KEY environment variable not set" +) +requires_anthropic = pytest.mark.skipif( + not HAS_ANTHROPIC_KEY, reason="ANTHROPIC_API_KEY environment variable not set" +) + + +@dataclass +class LimitLLMCallsHandler(ObjectInterpretation): + """Handler that limits the number of LLM calls.""" + + max_calls: int = 10 + call_count: int = 0 + + @implements(completion) + def _completion(self, *args, **kwargs): + self.call_count += 1 + if self.call_count > self.max_calls: + raise RuntimeError( + f"Test used too many requests (max_calls = {self.max_calls})" + ) + return fwd() + + +@pydantic_dataclass +class Poem: + """A poem with content and form.""" + + content: str = Field(..., description="content of the poem") + form: str = Field(..., description="name of the type of the poem") + + +class PoemQuality(str, Enum): + """Quality rating for a poem.""" + + GOOD = "GOOD" + OKAY = "OKAY" + BAD = "BAD" + + +@defop +def evaluate_poem_tool(poem: Poem, explanation: str) -> PoemQuality: + """Evaluate the quality of a poem. + + Parameters: + - poem: Poem object representing the poem + - explanation: natural language explanation of the thought process + """ + raise NotHandled + + +class LoggingPoemEvaluationInterpretation(ObjectInterpretation): + """Provides an interpretation for `evaluate_poem_tool` that tracks evaluation counts.""" + + evaluation_count: int = 0 + evaluation_results: list[dict] = [] + + @implements(evaluate_poem_tool) + def _evaluate_poem_tool(self, poem: Poem, explanation: str) -> PoemQuality: + self.evaluation_count += 1 + + # Simple heuristic: require at least 2 evaluations, then approve + quality = PoemQuality.BAD if self.evaluation_count < 2 else PoemQuality.GOOD + + self.evaluation_results.append( + {"poem": poem, "explanation": explanation, "quality": quality} + ) + + return quality + + +@Template.define +def generate_good_poem(topic: str) -> Poem: + """Generate a good poem about {topic}. + + You MUST use the evaluate_poem_tool to check poem quality. + Keep iterating until evaluate_poem_tool returns GOOD. + Return your final poem as JSON with 'content' and 'form' fields. + """ + raise NotHandled + + +class TestToolCalling: + """Tests for templates with tool calling functionality.""" + + @pytest.mark.parametrize( + "model_name", + [ + pytest.param("gpt-5-nano", marks=requires_openai), + pytest.param("claude-sonnet-4-5-20250929", marks=requires_anthropic), + ], + ) + def test_tool_calling(self, model_name): + """Test that templates with tools work with openai.""" + poem_eval_ctx = LoggingPoemEvaluationInterpretation() + with ( + handler(LiteLLMProvider(model_name=model_name)), + handler(LimitLLMCallsHandler(max_calls=4)), + handler(poem_eval_ctx), + ): + poem = generate_good_poem("Python") + assert isinstance(poem, Poem) + assert isinstance(poem.content, str) + assert isinstance(poem.form, str) + + # Verify the tool was called at least once + assert poem_eval_ctx.evaluation_count >= 1 + assert len(poem_eval_ctx.evaluation_results) >= 1 From 8530fd08b34b6e124174ee431f701f99b5d908ff Mon Sep 17 00:00:00 2001 From: eb8680 Date: Mon, 22 Dec 2025 16:47:37 -0500 Subject: [PATCH 24/39] Update `staging-llm` from `master` (#457) * Release v0.2.3 (#374) * Install prettyprinter for term when library is available (#386) * install prettyprinter for term when library is available * lint * move code into types.py * fix pypandoc issue (#397) * Convert evaluate to a singledispatch (#398) * convert evaluate to a singledispatch * lint * add jnp.pi and ArrayTerm.T (#394) * Deprecate defterm (#399) * deprecate defterm * remove defterm case * remove defterm * lint * evaluate distribution arguments * lint * remove interpreter * Revert "remove interpreter" This reverts commit 30442779689da862aa6f8e0224330ffaf8f556ae. * wip * lint * Rework numpyro distribution handling to enable symbolic distributions and handling of distribution methods (#311) * refactor distribution operations * add a test for typeof of distributions * add tests for symbolic dists/arguments * introduce operations for distribution methods * comment * fix tests * work around https://github.com/BasisResearch/effectful/issues/310 * replace hack with new hack * tweak repr for _BaseOperation * lint * work around https://github.com/BasisResearch/effectful/issues/312 * clean up access to dist ops * wip * wip * add type annotations to get correct term conversion * lint * include distribution arguments as properties * fix distribution calls * try again * fixes * format * Box the output of `__type_rule__` (#387) * box the output of __type_rule__ * fix tests * fix tests * require callers of __type_rule__ to box arguments * fix * move Box out of ops.types * lint * fix test * fix syntactic_eq implementation for jax arrays (#405) * Fix recursion error in sizesof (#406) * fix recursion error in sizesof * format * Allow `_BaseOperation` subclasses to have an overrideable `apply` method (#414) * stash * fixes * initial * wip * lint * ensure each subclass has a fresh operation * wip * wip * lint * wip * wip * lint * refactor class method support * move defops * fix test * remove singledispatch case and add test * move definition * cleanup * simplify * cleanup * lint * fix failing test * fix classmethod * __isabstractmethod__ * revert --------- Co-authored-by: Eli * Try pulling in pyproject.toml from staging-llm to master (#425) * Generate instance-level `Operation`s for bound methods (#351) * generalize __get__ * nits * coverage of methoddescriptor api * methodtype * simplify * simplify * simplify * format * revert * restore * simplify * simplify * retain instance op on term construction * Simplify apply inheritance * assign * put call next to init_subclass * add explanatory comment * Operation.apply -> Operation.__apply__ * add test based on issue description * fix doctest * Fix dataclass @defops and added dataclass metaclass (#439) * fixed dataclass ordering and added metaclass for simplifying construction of dataclass terms * ensure term fields are not being overriden * added decorator and dataclass * updated to make defdata registration automatic * simplified dataclass loop * updated to give property op an appropriate name * added failing tests * fixed failing test * fixed numpyro/pyro/torch interfaces * minor fix + test for deffn kwargs * Type check and lint example code (#449) * format example code * type check examples * Add beam search example using thermometer continuations (#431) * add beam search example using thermometer continuations * address comments * add docstring * lint * Fix for jax 0.8.2 (#455) * fix for jax 0.8.2 * add more register * format --------- Co-authored-by: Jack Feser Co-authored-by: Tim Cooijmans Co-authored-by: Kiran Gopinathan <23038502+kiranandcode@users.noreply.github.com> --- docs/source/beam.py | 144 ++++++++++++++++++++++++++++ docs/source/beam_search_example.rst | 26 +++++ docs/source/conf.py | 4 +- docs/source/index.rst | 1 + docs/source/lambda_.py | 3 +- docs/source/minipyro.py | 39 ++++---- docs/source/readme_example.py | 2 +- docs/source/semi_ring.py | 2 +- effectful/handlers/jax/_terms.py | 6 +- scripts/clean.sh | 2 +- scripts/lint.sh | 5 + 11 files changed, 206 insertions(+), 28 deletions(-) create mode 100644 docs/source/beam.py create mode 100644 docs/source/beam_search_example.rst diff --git a/docs/source/beam.py b/docs/source/beam.py new file mode 100644 index 00000000..843572b0 --- /dev/null +++ b/docs/source/beam.py @@ -0,0 +1,144 @@ +"""This example demonstrates a beam search over a program that uses a `choose` +effect for nondeterminism and `score` effect to weigh its choices. + +""" + +import functools +import heapq +import random +from collections.abc import Callable +from dataclasses import dataclass +from pprint import pprint + +from effectful.ops.semantics import fwd, handler +from effectful.ops.syntax import ObjectInterpretation, defop, implements + + +@defop +def choose[T](choices: list[T]) -> T: + result = random.choice(choices) + print(f"choose({choices}) = {result}") + return result + + +@defop +def score(value: float) -> None: + pass + + +class Suspend(Exception): ... + + +class ReplayIntp(ObjectInterpretation): + def __init__(self, trace): + self.trace = trace + self.step = 0 + + @implements(choose) + def _(self, *args, **kwargs): + if self.step < len(self.trace): + result = self.trace[self.step][1] + self.step += 1 + return result + return fwd() + + +class TraceIntp(ObjectInterpretation): + def __init__(self): + self.trace = [] + + @implements(choose) + def _(self, *args, **kwargs): + result = fwd() + self.trace.append(((args, kwargs), result)) + return result + + +class ScoreIntp(ObjectInterpretation): + def __init__(self): + self.score = 0.0 + + @implements(score) + def _(self, value): + self.score += value + + +class ChooseOnceIntp(ObjectInterpretation): + def __init__(self): + self.is_first_call = True + + @implements(choose) + def _(self, *args, **kwargs): + if not self.is_first_call: + raise Suspend + + self.is_first_call = False + return fwd() + + +@dataclass +class BeamCandidate[S, T]: + """Represents a candidate execution path in beam search.""" + + trace: list[S] + score: float + in_progress: bool + result: T | None + + def __lt__(self, other: "BeamCandidate[S, T]") -> bool: + return self.score < other.score + + def expand[**P](self, model_fn: Callable[P, T], *args: P.args, **kwargs: P.kwargs): + in_progress = False + result = None + score_intp = ScoreIntp() + trace_intp = TraceIntp() + with ( + handler(score_intp), + handler(ChooseOnceIntp()), + handler(ReplayIntp(self.trace)), + handler(trace_intp), + ): + try: + result = model_fn(*args, **kwargs) + except Suspend: + in_progress = True + + return BeamCandidate(trace_intp.trace, score_intp.score, in_progress, result) + + +def beam_search[**P, S, T]( + model_fn: Callable[P, T], beam_width=3 +) -> Callable[P, BeamCandidate[S, T]]: + @functools.wraps(model_fn) + def wrapper(*args, **kwargs): + beam = [BeamCandidate([], 0.0, True, None)] + + while True: + expandable = [c for c in beam if c.in_progress] * beam_width + if not expandable: + return beam + + new_candidates = [c.expand(model_fn, *args, **kwargs) for c in expandable] + + for c in new_candidates: + heapq.heappushpop(beam, c) if len( + beam + ) >= beam_width else heapq.heappush(beam, c) + + return wrapper + + +if __name__ == "__main__": + + def model(): + s1 = choose(range(100)) + score(s1) + s2 = choose(range(-100, 100)) + score(s2) + s3 = choose(range(-100, 100)) + score(s3) + return s3 + + result: BeamCandidate = beam_search(model)() + pprint(result) diff --git a/docs/source/beam_search_example.rst b/docs/source/beam_search_example.rst new file mode 100644 index 00000000..595354ec --- /dev/null +++ b/docs/source/beam_search_example.rst @@ -0,0 +1,26 @@ +Angelic Nondeterminism +====================== + +Here we give an example of *angelic nondeterminism* in effectful [#f1]_. +Our model is a nondeterministic program that makes choices using a ``choose`` effect and uses a ``score`` effect to sum up a final score. +We implement a beam search that optimizes this final score as a handler for the ``choose`` and ``score`` effects. + +The beam search works by running the model until it reaches a ``choose``, at which point the continuation is captured. +This continuation is resumed multiple times with different values from ``choose`` to expand the beam. +The intermediate score is used to rank the beam candidates. + +Because Python does not have support for first-class continuations, we use *thermometer continuations* [#f2]_. +A thermometer continuation works by tracking any nondeterminism +(essentially, the model is rerun from the start replaying the ``choose`` effects). +If ``choose`` is the only source of nondeterminism, then the +after each ``choose`` and replaying it uses *thermometer continuations* to + +.. literalinclude:: ./beam.py + :language: python + +References +---------- + +.. [#f1] Li, Z., Solar-Lezama, A., Yue, Y., and Zheng, S., "EnCompass: Enhancing Agent Programming with Search Over Program Execution Paths", 2025. https://arxiv.org/abs/2512.03571 + +.. [#f2] James Koppel, Gabriel Scherer, and Armando Solar-Lezama. 2018. Capturing the future by replaying the past (functional pearl). Proc. ACM Program. Lang. 2, ICFP, Article 76 (September 2018), 29 pages. https://doi.org/10.1145/3236771 diff --git a/docs/source/conf.py b/docs/source/conf.py index e0f0854b..7a33f605 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -12,10 +12,8 @@ import os import sys -from typing import List sys.path.insert(0, os.path.abspath("../../")) -import sphinx_rtd_theme # noqa: E402 # -- Project information ----------------------------------------------------- @@ -69,7 +67,7 @@ # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. # This pattern also affects html_static_path and html_extra_path. -exclude_patterns: List[str] = [] +exclude_patterns: list[str] = [] # -- Options for HTML output ------------------------------------------------- diff --git a/docs/source/index.rst b/docs/source/index.rst index aa973489..92aa0207 100644 --- a/docs/source/index.rst +++ b/docs/source/index.rst @@ -16,6 +16,7 @@ Table of Contents minipyro_example lambda_example semi_ring_example + beam_search_example .. toctree:: :maxdepth: 2 diff --git a/docs/source/lambda_.py b/docs/source/lambda_.py index 2ec34ac7..a58d552b 100644 --- a/docs/source/lambda_.py +++ b/docs/source/lambda_.py @@ -1,5 +1,6 @@ import functools -from typing import Annotated, Callable +from collections.abc import Callable +from typing import Annotated from effectful.ops.semantics import coproduct, evaluate, fvsof, fwd, handler from effectful.ops.syntax import Scoped, defdata, defop, syntactic_eq diff --git a/docs/source/minipyro.py b/docs/source/minipyro.py index 6084fb7a..86b7de7a 100644 --- a/docs/source/minipyro.py +++ b/docs/source/minipyro.py @@ -14,10 +14,12 @@ """ import random +from collections import OrderedDict +from collections.abc import Callable from contextlib import contextmanager from dataclasses import dataclass from functools import partial -from typing import Callable, Concatenate, NamedTuple, Optional, OrderedDict, Union +from typing import Concatenate, NamedTuple from weakref import ref import numpy as np @@ -40,7 +42,6 @@ from effectful.ops.syntax import ObjectInterpretation, defop, implements from effectful.ops.types import Operation - # Poutine has a notion of 'messages', which are dictionaries # that are passed between handlers (or 'Messengers') in order # to facilitate coordination and composition using "magic" slots. @@ -56,7 +57,7 @@ class SampleMsg: name: str val: Tensor dist: Distribution - obs: Optional[Tensor] + obs: Tensor | None @dataclass @@ -65,7 +66,7 @@ class ParamMsg: val: Tensor -Message = Union[ParamMsg, SampleMsg] +Message = ParamMsg | SampleMsg Trace = OrderedDict[str, Message] @@ -87,16 +88,16 @@ class Seed(NamedTuple): @defop -def sample(name: str, dist: Distribution, obs: Optional[Tensor] = None) -> Tensor: +def sample(name: str, dist: Distribution, obs: Tensor | None = None) -> Tensor: raise RuntimeError("No default implementation of sample") @defop def param( var_name: str, - initial_value: Optional[Union[Tensor, Callable[[], Tensor]]] = None, - constraint: Optional[Constraint] = None, - event_dim: Optional[int] = None, + initial_value: Tensor | Callable[[], Tensor] | None = None, + constraint: Constraint | None = None, + event_dim: int | None = None, ) -> Tensor: raise RuntimeError("No default implementation of param") @@ -123,7 +124,7 @@ def get_rng_seed() -> Seed: @defop -def set_rng_seed(seed: Union[int, Seed]): +def set_rng_seed(seed: int | Seed): raise RuntimeError("No default implementation of get_rng_seed") @@ -165,9 +166,9 @@ def sample(self, var_name: str, dist: Distribution, **kwargs): def param( self, var_name: str, - initial_value: Optional[Union[Tensor, Callable[[], Tensor]]] = None, - constraint: Optional[Constraint] = None, - event_dim: Optional[int] = None, + initial_value: Tensor | Callable[[], Tensor] | None = None, + constraint: Constraint | None = None, + event_dim: int | None = None, ) -> Tensor: # Similar to `Tracer.sample` @@ -227,7 +228,7 @@ def get_rng_seed(self): ) @implements(set_rng_seed) - def set_rng_seed(self, seed: Union[int, Seed]): + def set_rng_seed(self, seed: int | Seed): if isinstance(seed, int): manual_seed(seed) random.seed(seed) @@ -276,9 +277,9 @@ def __init__(self, initial_store=None): def param( self, name: str, - initial_value: Union[Tensor, None, Callable[[], Tensor]] = None, + initial_value: Tensor | None | Callable[[], Tensor] = None, constraint: Constraint = distributions.constraints.real, - event_dim: Optional[int] = None, + event_dim: int | None = None, ) -> Tensor: if event_dim is not None: raise RuntimeError("minipyro.plate does not support the event_dim arg") @@ -321,7 +322,7 @@ class Plate(ObjectInterpretation): An `Interpretation` which automatically broadcasts the `sample` `Operation` """ - def __init__(self, name: str, size: int, dim: Optional[int]): + def __init__(self, name: str, size: int, dim: int | None): if dim is None: raise ValueError("mini-pyro requires the `dim` argument to `plate`") @@ -342,7 +343,7 @@ def do_sample(self, sampled_name: str, dist: Distribution, **kwargs) -> Tensor: # Helper for using `Plate` as a `handler` -def plate(name: str, size: int, dim: Optional[int] = None): +def plate(name: str, size: int, dim: int | None = None): return handler(Plate(name, size, dim)) @@ -354,7 +355,7 @@ def plate(name: str, size: int, dim: Optional[int] = None): def block[**P]( - hide_fn: Callable[Concatenate[Operation, object, P], bool] = lambda *_, **__: True + hide_fn: Callable[Concatenate[Operation, object, P], bool] = lambda *_, **__: True, ): """ Block is a helper for masking out a subset of calls to either @@ -372,7 +373,7 @@ def blocking(op: Operation, *args, **kwargs): return op(*args, **kwargs) return fwd() - return handler({sample: partial(blocking, sample), param: partial(blocking, param)}) # type: ignore + return handler({sample: partial(blocking, sample), param: partial(blocking, param)}) # This is a thin wrapper around the `torch.optim.Adam` class that diff --git a/docs/source/readme_example.py b/docs/source/readme_example.py index aa9e04fb..e97337ce 100644 --- a/docs/source/readme_example.py +++ b/docs/source/readme_example.py @@ -35,7 +35,7 @@ def assoc_add(x, y): commute_rules = {add: commute_add} assoc_rules = {add: assoc_add} -eager_mixed = functools.reduce(coproduct, (beta_rules, commute_rules, assoc_rules)) +eager_mixed = functools.reduce(coproduct, (beta_rules, commute_rules, assoc_rules)) # type: ignore x = defop(int, name="x") y = defop(int, name="y") diff --git a/docs/source/semi_ring.py b/docs/source/semi_ring.py index 685dcb13..0d5a65d0 100644 --- a/docs/source/semi_ring.py +++ b/docs/source/semi_ring.py @@ -1,7 +1,7 @@ import collections.abc import operator import types -from typing import Annotated, Tuple, Union, cast, overload +from typing import Annotated, cast, overload from effectful.ops.semantics import coproduct, evaluate, fwd, handler from effectful.ops.syntax import Scoped, defop diff --git a/effectful/handlers/jax/_terms.py b/effectful/handlers/jax/_terms.py index 989ec3d9..e3d60809 100644 --- a/effectful/handlers/jax/_terms.py +++ b/effectful/handlers/jax/_terms.py @@ -432,7 +432,8 @@ def ndim(self) -> int: return len(self.shape) -@bind_dims.register # type: ignore +@bind_dims.register(jax.Array) # type: ignore +@bind_dims.register(jax._src.core.Tracer) # type: ignore def _bind_dims_array(t: jax.Array, *args: Operation[[], jax.Array]) -> jax.Array: """Convert named dimensions to positional dimensions. @@ -501,6 +502,7 @@ def _evaluate(expr): return reindexed -@unbind_dims.register # type: ignore +@unbind_dims.register(jax.Array) # type: ignore +@unbind_dims.register(jax._src.core.Tracer) # type: ignore def _unbind_dims_array(t: jax.Array, *args: Operation[[], jax.Array]) -> jax.Array: return jax_getitem(t, tuple(n() for n in args)) diff --git a/scripts/clean.sh b/scripts/clean.sh index 0669b664..17fc1d65 100755 --- a/scripts/clean.sh +++ b/scripts/clean.sh @@ -1,7 +1,7 @@ #!/bin/bash set -euxo pipefail -SRC="effectful tests" +SRC="effectful tests docs/source" ruff check --fix $SRC ruff format $SRC diff --git a/scripts/lint.sh b/scripts/lint.sh index f186be70..c0e7e029 100755 --- a/scripts/lint.sh +++ b/scripts/lint.sh @@ -4,6 +4,11 @@ set -euxo pipefail SRC="tests/ effectful/" mypy $SRC +for f in docs/source/*.py +do + mypy $f +done + ruff check $SRC ruff format --diff $SRC From bae8d022dde0e2c80119b0ca5f8accc0de094fed Mon Sep 17 00:00:00 2001 From: Jack Feser Date: Mon, 29 Dec 2025 09:20:25 -0500 Subject: [PATCH 25/39] Convert `Template` into an operation (#424) * Release v0.2.3 (#374) * stash * Install prettyprinter for term when library is available (#386) * install prettyprinter for term when library is available * lint * move code into types.py * fixes * fix pypandoc issue (#397) * Convert evaluate to a singledispatch (#398) * convert evaluate to a singledispatch * lint * add jnp.pi and ArrayTerm.T (#394) * Deprecate defterm (#399) * deprecate defterm * remove defterm case * remove defterm * lint * evaluate distribution arguments * lint * remove interpreter * Revert "remove interpreter" This reverts commit 30442779689da862aa6f8e0224330ffaf8f556ae. * wip * lint * Rework numpyro distribution handling to enable symbolic distributions and handling of distribution methods (#311) * refactor distribution operations * add a test for typeof of distributions * add tests for symbolic dists/arguments * introduce operations for distribution methods * comment * fix tests * work around https://github.com/BasisResearch/effectful/issues/310 * replace hack with new hack * tweak repr for _BaseOperation * lint * work around https://github.com/BasisResearch/effectful/issues/312 * clean up access to dist ops * wip * wip * add type annotations to get correct term conversion * lint * include distribution arguments as properties * fix distribution calls * try again * fixes * format * Box the output of `__type_rule__` (#387) * box the output of __type_rule__ * fix tests * fix tests * require callers of __type_rule__ to box arguments * fix * move Box out of ops.types * lint * fix test * fix syntactic_eq implementation for jax arrays (#405) * Fix recursion error in sizesof (#406) * fix recursion error in sizesof * format * initial * wip * lint * ensure each subclass has a fresh operation * wip * wip * lint * wip * wip * lint * refactor class method support * move defops * fix test * remove singledispatch case and add test * move definition * cleanup * simplify * cleanup * lint * fix failing test * wip * Allow `_BaseOperation` subclasses to have an overrideable `apply` method (#414) * stash * fixes * initial * wip * lint * ensure each subclass has a fresh operation * wip * wip * lint * wip * wip * lint * refactor class method support * move defops * fix test * remove singledispatch case and add test * move definition * cleanup * simplify * cleanup * lint * fix failing test * fix classmethod * __isabstractmethod__ * revert --------- Co-authored-by: Eli * wip * fix define signature * wip * wip * wip * wip * wip * wip * wip * lint * wip * test fix * fix merge error * lint * wip * respond to feedback * respond to feedback * clean up notebook * remove breakpoint * relax typing requirement * improve handling of method templates * lint * remove imprecise check * handle nested classes using qualname * feedback * fix scoping issue and add tests * address feedback * add xfail tests * lint --------- Co-authored-by: Eli Co-authored-by: Tim Cooijmans Co-authored-by: eb8680 --- docs/source/llm.ipynb | 388 ++++++++++++------- effectful/handlers/llm/__init__.py | 118 +----- effectful/handlers/llm/providers.py | 302 +++++---------- effectful/handlers/llm/sampling.py | 2 +- effectful/handlers/llm/synthesis.py | 20 +- effectful/handlers/llm/template.py | 144 +++++++ effectful/ops/types.py | 5 +- tests/test_handlers_llm.py | 23 +- tests/test_handlers_llm_template.py | 206 ++++++++++ tests/test_handlers_llm_tool_calling_book.py | 11 +- tests/test_handlers_llm_tool_calling_poem.py | 8 +- 11 files changed, 727 insertions(+), 500 deletions(-) create mode 100644 effectful/handlers/llm/template.py create mode 100644 tests/test_handlers_llm_template.py diff --git a/docs/source/llm.ipynb b/docs/source/llm.ipynb index 6f2a24b0..64f342b0 100644 --- a/docs/source/llm.ipynb +++ b/docs/source/llm.ipynb @@ -2,7 +2,7 @@ "cells": [ { "cell_type": "code", - "execution_count": 1, + "execution_count": 33, "id": "5aaf649f", "metadata": {}, "outputs": [], @@ -14,18 +14,20 @@ "import sys\n", "from collections.abc import Callable\n", "\n", - "from effectful.handlers.llm import Template\n", + "import pydantic\n", + "from pydantic import ValidationError, field_validator\n", + "from pydantic_core import PydanticCustomError\n", + "\n", + "from effectful.handlers.llm import Template, Tool\n", "from effectful.handlers.llm.providers import (\n", " CacheLLMRequestHandler,\n", " LiteLLMProvider,\n", " LLMLoggingHandler,\n", " RetryLLMHandler,\n", " completion,\n", - " tool_call,\n", ")\n", "from effectful.handlers.llm.synthesis import ProgramSynthesis\n", "from effectful.ops.semantics import NotHandled, fwd, handler\n", - "from effectful.ops.syntax import defop\n", "\n", "provider = LiteLLMProvider()" ] @@ -56,7 +58,7 @@ }, { "cell_type": "code", - "execution_count": 5, + "execution_count": 34, "id": "1e832675", "metadata": {}, "outputs": [], @@ -79,7 +81,7 @@ }, { "cell_type": "code", - "execution_count": 6, + "execution_count": 35, "id": "634f6533", "metadata": {}, "outputs": [ @@ -87,17 +89,17 @@ "name": "stdout", "output_type": "stream", "text": [ - "In the ocean so deep and so swish, \n", - "Swam a curious gold-colored fish. \n", - "With a flick of its tail, \n", - "It set off to sail, \n", - "In search of a dream and a wish.\n", - "----------------------------------------\n", - "In the ocean where fish like to play, \n", - "They swim and they glide all the day. \n", + "In the ocean so vast and so wide, \n", + "A little fish tried hard to hide. \n", "With scales shining bright, \n", - "They bring such delight, \n", - "In the waters, they dance and display.\n" + "It gave quite a sight, \n", + "And swam with the current and tide.\n", + "----------------------------------------\n", + "In the sea swam a fish with a grin, \n", + "Who loved to flip and to spin, \n", + "He danced through the tide, \n", + "With friends by his side, \n", + "A joyous splash with his slippery fin!\n" ] } ], @@ -118,7 +120,7 @@ }, { "cell_type": "code", - "execution_count": 7, + "execution_count": 36, "id": "706ce53b", "metadata": {}, "outputs": [ @@ -127,37 +129,29 @@ "output_type": "stream", "text": [ "\n", - "Silent waters dance, \n", - "Silver flash beneath the waves— \n", - "Fish in quiet grace.\n", + "Glimmering scales shine, \n", + "Dancing in the water's flow, \n", + "Silent whispers swim. \n", "----------------------------------------\n", - "Silent waters dance, \n", - "Silver flash beneath the waves— \n", - "Fish in quiet grace.\n", + "Glimmering scales shine, \n", + "Dancing in the water's flow, \n", + "Silent whispers swim. \n", "\n", - "Fish swim silently, \n", - "In the embrace of water, \n", - "Nature's dance alive.\n", + "In clear water's dance, \n", + "Silent scales shimmer and glide, \n", + "Fish weave nature's trance. \n", "----------------------------------------\n", - "Fish swim silently, \n", - "In the embrace of water, \n", - "Nature's dance alive.\n", - "\n", - "\n", - "\u001b[1;31mGive Feedback / Get Help: https://github.com/BerriAI/litellm/issues/new\u001b[0m\n", - "LiteLLM.Info: If you need to debug this error, use `litellm._turn_on_debug()'.\n", + "In clear water's dance, \n", + "Silent scales shimmer and glide, \n", + "Fish weave nature's trance. \n", "\n", - "Here's a haiku on the theme of fish3:\n", - "\n", - "In the deep blue sea, \n", - "Silent swimmers glide with grace, \n", - "Whispers of the tide.\n", + "Fish swim in clear stream, \n", + "Scales shimmer in sunlight glow, \n", + "Nature's quiet dance.\n", "----------------------------------------\n", - "Here is a haiku on the theme of fish3:\n", - "\n", - "In the clear water, \n", - "Graceful fins weave through the sea, \n", - "Whispers of the deep.\n" + "Fish swim in clear stream, \n", + "Scales shimmer in sunlight glow, \n", + "Nature's quiet dance.\n" ] } ], @@ -208,7 +202,7 @@ }, { "cell_type": "code", - "execution_count": 8, + "execution_count": 37, "id": "2c766859", "metadata": {}, "outputs": [], @@ -233,7 +227,7 @@ }, { "cell_type": "code", - "execution_count": 9, + "execution_count": 38, "id": "c83bbdc0", "metadata": {}, "outputs": [ @@ -241,8 +235,12 @@ "name": "stdout", "output_type": "stream", "text": [ - "def count_a_occurrences(input_string: str) -> int:\n", - " return input_string.count('a')\n", + "def count_a(s: str) -> int:\n", + " count = 0\n", + " for character in s:\n", + " if character == 'a':\n", + " count += 1\n", + " return count\n", "\n" ] } @@ -277,7 +275,7 @@ }, { "cell_type": "code", - "execution_count": 11, + "execution_count": 39, "id": "66711301", "metadata": {}, "outputs": [ @@ -289,18 +287,20 @@ "Tool call: weather(*(), **{'city': 'Chicago'}) -> cold\n", "Tool call: weather(*(), **{'city': 'New York'}) -> wet\n", "Tool call: weather(*(), **{'city': 'Barcelona'}) -> sunny\n", - "Among the cities checked, Barcelona has good weather, as it is currently sunny.\n" + "Based on the weather conditions, Barcelona has good weather as it is sunny.\n" ] } ], "source": [ - "@defop\n", + "@Tool.define\n", "def cities() -> list[str]:\n", + " \"\"\"Return a list of cities that can be passed to `weather`.\"\"\"\n", " return [\"Chicago\", \"New York\", \"Barcelona\"]\n", "\n", "\n", - "@defop\n", + "@Tool.define\n", "def weather(city: str) -> str:\n", + " \"\"\"Given a city name, return a description of the weather in that city.\"\"\"\n", " status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\n", " return status.get(city, \"unknown\")\n", "\n", @@ -311,13 +311,13 @@ " raise NotHandled\n", "\n", "\n", - "def log_tool_call(_, tool, *args, **kwargs):\n", + "def log_tool_call(tool, *args, **kwargs):\n", " result = fwd()\n", " print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\n", " return result\n", "\n", "\n", - "with handler(provider), handler({tool_call: log_tool_call}):\n", + "with handler(provider), handler({Tool.__apply__: log_tool_call}):\n", " print(vacation())" ] }, @@ -333,7 +333,7 @@ }, { "cell_type": "code", - "execution_count": 12, + "execution_count": 40, "id": "17668ac8", "metadata": {}, "outputs": [ @@ -344,10 +344,10 @@ "> You are onstage at a comedy club. You tell the following joke:\n", "Knock knock.\n", "Who's there?\n", - "Liz.\n", - "Liz who?\n", - "Liz-ard you curious who's at the door?\n", - "> The crowd laughs politely.\n" + "Lizard.\n", + "Lizard who?\n", + "Lizard who? Lizard you! Open the door, it's chilly out here!\n", + "> The crowd stares in stony silence.\n" ] } ], @@ -397,7 +397,7 @@ }, { "cell_type": "code", - "execution_count": 13, + "execution_count": 41, "id": "cbf495a2", "metadata": {}, "outputs": [ @@ -405,15 +405,25 @@ "name": "stdout", "output_type": "stream", "text": [ - "Request fired: () {'messages': [{'type': 'message', 'content': [{'type': 'text', 'text': 'Write a haiku on the theme of fish2. Do not use any tools.'}], 'role': 'user'}], 'response_format': None, 'tools': [{'type': 'function', 'function': {'name': 'limerick', 'description': 'Write a limerick on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'haiku_no_cache', 'description': 'Write a haiku on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'primes', 'description': 'Give a prime number with {first_digit} as the first digit. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'first_digit': {'title': 'First Digit', 'type': 'integer'}}, 'required': ['first_digit'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'count_char', 'description': \"Write a function which takes a string and counts the occurrances of '{char}'. Do not use any tools.\", 'parameters': {'additionalProperties': False, 'properties': {'char': {'title': 'Char', 'type': 'string'}}, 'required': ['char'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'cities', 'description': '', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}, {'type': 'function', 'function': {'name': 'weather', 'description': '', 'parameters': {'additionalProperties': False, 'properties': {'city': {'title': 'City', 'type': 'string'}}, 'required': ['city'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'vacation', 'description': 'Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}, {'type': 'function', 'function': {'name': 'write_joke', 'description': 'Write a knock-knock joke on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'rate_joke', 'description': 'Decide if {joke} is funny or not. Do not use any tools.', 'parameters': {'$defs': {'KnockKnockJoke': {'properties': {'whos_there': {'title': 'Whos There', 'type': 'string'}, 'punchline': {'title': 'Punchline', 'type': 'string'}}, 'required': ['whos_there', 'punchline'], 'title': 'KnockKnockJoke', 'type': 'object', 'additionalProperties': False}}, 'additionalProperties': False, 'properties': {'joke': {'$ref': '#/$defs/KnockKnockJoke'}}, 'required': ['joke'], 'title': 'Params', 'type': 'object'}, 'strict': True}}]} ModelResponse(id='chatcmpl-CnANzyeB958opw15SxIJ5GLG5eCI8', created=1765834031, model='gpt-4o-2024-08-06', object='chat.completion', system_fingerprint='fp_83554c687e', choices=[Choices(finish_reason='stop', index=0, message=Message(content='In the gentle stream, \\nSilver scales shimmer and dance, \\nQuietly they glide. ', role='assistant', tool_calls=None, function_call=None, provider_specific_fields={'refusal': None}, annotations=[]), provider_specific_fields={})], usage=Usage(completion_tokens=20, prompt_tokens=364, total_tokens=384, completion_tokens_details=CompletionTokensDetailsWrapper(accepted_prediction_tokens=0, audio_tokens=0, reasoning_tokens=0, rejected_prediction_tokens=0, text_tokens=None, image_tokens=None), prompt_tokens_details=PromptTokensDetailsWrapper(audio_tokens=0, cached_tokens=0, text_tokens=None, image_tokens=None)), service_tier='default')\n", - "Request fired: () {'messages': [{'type': 'message', 'content': [{'type': 'text', 'text': 'Write a limerick on the theme of fish. Do not use any tools.'}], 'role': 'user'}], 'response_format': None, 'tools': [{'type': 'function', 'function': {'name': 'limerick', 'description': 'Write a limerick on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'haiku_no_cache', 'description': 'Write a haiku on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'primes', 'description': 'Give a prime number with {first_digit} as the first digit. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'first_digit': {'title': 'First Digit', 'type': 'integer'}}, 'required': ['first_digit'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'count_char', 'description': \"Write a function which takes a string and counts the occurrances of '{char}'. Do not use any tools.\", 'parameters': {'additionalProperties': False, 'properties': {'char': {'title': 'Char', 'type': 'string'}}, 'required': ['char'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'cities', 'description': '', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}, {'type': 'function', 'function': {'name': 'weather', 'description': '', 'parameters': {'additionalProperties': False, 'properties': {'city': {'title': 'City', 'type': 'string'}}, 'required': ['city'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'vacation', 'description': 'Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}, {'type': 'function', 'function': {'name': 'write_joke', 'description': 'Write a knock-knock joke on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'rate_joke', 'description': 'Decide if {joke} is funny or not. Do not use any tools.', 'parameters': {'$defs': {'KnockKnockJoke': {'properties': {'whos_there': {'title': 'Whos There', 'type': 'string'}, 'punchline': {'title': 'Punchline', 'type': 'string'}}, 'required': ['whos_there', 'punchline'], 'title': 'KnockKnockJoke', 'type': 'object', 'additionalProperties': False}}, 'additionalProperties': False, 'properties': {'joke': {'$ref': '#/$defs/KnockKnockJoke'}}, 'required': ['joke'], 'title': 'Params', 'type': 'object'}, 'strict': True}}]} ModelResponse(id='chatcmpl-CnAO05Uemhl4BA8dIUcQyqoKIyOvk', created=1765834032, model='gpt-4o-2024-08-06', object='chat.completion', system_fingerprint='fp_83554c687e', choices=[Choices(finish_reason='stop', index=0, message=Message(content='In the sea where the waves gently swish, \\nLived a fish with a hopeful wish. \\nHe dreamed of the skies, \\nTo soar and to rise, \\nBut alas, he remained just a fish. ', role='assistant', tool_calls=None, function_call=None, provider_specific_fields={'refusal': None}, annotations=[]), provider_specific_fields={})], usage=Usage(completion_tokens=46, prompt_tokens=364, total_tokens=410, completion_tokens_details=CompletionTokensDetailsWrapper(accepted_prediction_tokens=0, audio_tokens=0, reasoning_tokens=0, rejected_prediction_tokens=0, text_tokens=None, image_tokens=None), prompt_tokens_details=PromptTokensDetailsWrapper(audio_tokens=0, cached_tokens=0, text_tokens=None, image_tokens=None)), service_tier='default')\n" + "> Write a haiku on the theme of fish2. Do not use any tools.\n", + "In waters serene, \n", + "Gliding scales of silver bright, \n", + "Whispers of the sea.\n", + "> Write a limerick on the theme of fish. Do not use any tools.\n", + "In the ocean where fish love to play, \n", + "A trout took a boat for a day. \n", + "He swam with a wail, \n", + "To the tip of his tail, \n", + "And waved to the marlins in May.\n" ] } ], "source": [ "def log_llm(*args, **kwargs):\n", " result = fwd()\n", - " print(\"Request fired: \", args, kwargs, result)\n", + "\n", + " print(f\"> {kwargs['messages'][0]['content'][0]['text']}\")\n", + " print(result.choices[0].message.content)\n", " return result\n", "\n", "\n", @@ -440,7 +450,7 @@ }, { "cell_type": "code", - "execution_count": 14, + "execution_count": 42, "id": "81a15f00", "metadata": {}, "outputs": [ @@ -448,11 +458,14 @@ "name": "stdout", "output_type": "stream", "text": [ - "INFO {'args': (), 'kwargs': {'messages': [{'type': 'message', 'content': [{'type': 'text', 'text': 'Write a haiku on the theme of fish3. Do not use any tools.'}], 'role': 'user'}], 'response_format': None, 'tools': [{'type': 'function', 'function': {'name': 'limerick', 'description': 'Write a limerick on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'haiku_no_cache', 'description': 'Write a haiku on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'primes', 'description': 'Give a prime number with {first_digit} as the first digit. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'first_digit': {'title': 'First Digit', 'type': 'integer'}}, 'required': ['first_digit'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'count_char', 'description': \"Write a function which takes a string and counts the occurrances of '{char}'. Do not use any tools.\", 'parameters': {'additionalProperties': False, 'properties': {'char': {'title': 'Char', 'type': 'string'}}, 'required': ['char'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'cities', 'description': '', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}, {'type': 'function', 'function': {'name': 'weather', 'description': '', 'parameters': {'additionalProperties': False, 'properties': {'city': {'title': 'City', 'type': 'string'}}, 'required': ['city'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'vacation', 'description': 'Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}, {'type': 'function', 'function': {'name': 'write_joke', 'description': 'Write a knock-knock joke on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'rate_joke', 'description': 'Decide if {joke} is funny or not. Do not use any tools.', 'parameters': {'$defs': {'KnockKnockJoke': {'properties': {'whos_there': {'title': 'Whos There', 'type': 'string'}, 'punchline': {'title': 'Punchline', 'type': 'string'}}, 'required': ['whos_there', 'punchline'], 'title': 'KnockKnockJoke', 'type': 'object', 'additionalProperties': False}}, 'additionalProperties': False, 'properties': {'joke': {'$ref': '#/$defs/KnockKnockJoke'}}, 'required': ['joke'], 'title': 'Params', 'type': 'object'}, 'strict': True}}]}, 'response': ModelResponse(id='chatcmpl-CnAOCov9x47s8Jj0K2oGyrB21h9dM', created=1765834044, model='gpt-4o-2024-08-06', object='chat.completion', system_fingerprint='fp_83554c687e', choices=[Choices(finish_reason='tool_calls', index=0, message=Message(content=None, role='assistant', tool_calls=[ChatCompletionMessageToolCall(function=Function(arguments='{\"theme\":\"fish\"}', name='haiku_no_cache'), id='call_8gQN3B78H2aZzIdOZPhEqPqy', type='function')], function_call=None, provider_specific_fields={'refusal': None}, annotations=[]), provider_specific_fields={})], usage=Usage(completion_tokens=17, prompt_tokens=364, total_tokens=381, completion_tokens_details=CompletionTokensDetailsWrapper(accepted_prediction_tokens=0, audio_tokens=0, reasoning_tokens=0, rejected_prediction_tokens=0, text_tokens=None, image_tokens=None), prompt_tokens_details=PromptTokensDetailsWrapper(audio_tokens=0, cached_tokens=0, text_tokens=None, image_tokens=None)), service_tier='default')}\n", - "INFO {'args': (), 'kwargs': {'messages': [{'type': 'message', 'content': [{'type': 'text', 'text': 'Write a haiku on the theme of fish. Do not use any tools.'}], 'role': 'user'}], 'response_format': None, 'tools': [{'type': 'function', 'function': {'name': 'limerick', 'description': 'Write a limerick on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'haiku_no_cache', 'description': 'Write a haiku on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'primes', 'description': 'Give a prime number with {first_digit} as the first digit. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'first_digit': {'title': 'First Digit', 'type': 'integer'}}, 'required': ['first_digit'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'count_char', 'description': \"Write a function which takes a string and counts the occurrances of '{char}'. Do not use any tools.\", 'parameters': {'additionalProperties': False, 'properties': {'char': {'title': 'Char', 'type': 'string'}}, 'required': ['char'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'cities', 'description': '', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}, {'type': 'function', 'function': {'name': 'weather', 'description': '', 'parameters': {'additionalProperties': False, 'properties': {'city': {'title': 'City', 'type': 'string'}}, 'required': ['city'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'vacation', 'description': 'Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}, {'type': 'function', 'function': {'name': 'write_joke', 'description': 'Write a knock-knock joke on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'rate_joke', 'description': 'Decide if {joke} is funny or not. Do not use any tools.', 'parameters': {'$defs': {'KnockKnockJoke': {'properties': {'whos_there': {'title': 'Whos There', 'type': 'string'}, 'punchline': {'title': 'Punchline', 'type': 'string'}}, 'required': ['whos_there', 'punchline'], 'title': 'KnockKnockJoke', 'type': 'object', 'additionalProperties': False}}, 'additionalProperties': False, 'properties': {'joke': {'$ref': '#/$defs/KnockKnockJoke'}}, 'required': ['joke'], 'title': 'Params', 'type': 'object'}, 'strict': True}}]}, 'response': ModelResponse(id='chatcmpl-CnAOD4UrpELPotqSt3s76CnJGu6FB', created=1765834045, model='gpt-4o-2024-08-06', object='chat.completion', system_fingerprint='fp_83554c687e', choices=[Choices(finish_reason='stop', index=0, message=Message(content='In the quiet stream, \\nSilver scales shimmer with grace, \\nFish dance in moonlight.', role='assistant', tool_calls=None, function_call=None, provider_specific_fields={'refusal': None}, annotations=[]), provider_specific_fields={})], usage=Usage(completion_tokens=20, prompt_tokens=363, total_tokens=383, completion_tokens_details=CompletionTokensDetailsWrapper(accepted_prediction_tokens=0, audio_tokens=0, reasoning_tokens=0, rejected_prediction_tokens=0, text_tokens=None, image_tokens=None), prompt_tokens_details=PromptTokensDetailsWrapper(audio_tokens=0, cached_tokens=0, text_tokens=None, image_tokens=None)), service_tier='default')}\n", - "INFO {'tool': 'haiku_no_cache', 'args': (), 'kwargs': {'theme': 'fish'}}\n", - "INFO {'args': (), 'kwargs': {'messages': [{'type': 'message', 'content': [{'type': 'text', 'text': 'Write a haiku on the theme of fish3. Do not use any tools.'}], 'role': 'user'}, {'content': None, 'role': 'assistant', 'tool_calls': [{'function': {'arguments': '{\"theme\":\"fish\"}', 'name': 'haiku_no_cache'}, 'id': 'call_8gQN3B78H2aZzIdOZPhEqPqy', 'type': 'function'}], 'function_call': None, 'provider_specific_fields': {'refusal': None}, 'annotations': []}, {'role': 'tool', 'tool_call_id': 'call_8gQN3B78H2aZzIdOZPhEqPqy', 'name': 'haiku_no_cache', 'content': [{'type': 'text', 'text': 'In the quiet stream, \\nSilver scales shimmer with grace, \\nFish dance in moonlight.'}]}], 'response_format': None, 'tools': [{'type': 'function', 'function': {'name': 'limerick', 'description': 'Write a limerick on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'haiku_no_cache', 'description': 'Write a haiku on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'primes', 'description': 'Give a prime number with {first_digit} as the first digit. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'first_digit': {'title': 'First Digit', 'type': 'integer'}}, 'required': ['first_digit'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'count_char', 'description': \"Write a function which takes a string and counts the occurrances of '{char}'. Do not use any tools.\", 'parameters': {'additionalProperties': False, 'properties': {'char': {'title': 'Char', 'type': 'string'}}, 'required': ['char'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'cities', 'description': '', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}, {'type': 'function', 'function': {'name': 'weather', 'description': '', 'parameters': {'additionalProperties': False, 'properties': {'city': {'title': 'City', 'type': 'string'}}, 'required': ['city'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'vacation', 'description': 'Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}, {'type': 'function', 'function': {'name': 'write_joke', 'description': 'Write a knock-knock joke on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'rate_joke', 'description': 'Decide if {joke} is funny or not. Do not use any tools.', 'parameters': {'$defs': {'KnockKnockJoke': {'properties': {'whos_there': {'title': 'Whos There', 'type': 'string'}, 'punchline': {'title': 'Punchline', 'type': 'string'}}, 'required': ['whos_there', 'punchline'], 'title': 'KnockKnockJoke', 'type': 'object', 'additionalProperties': False}}, 'additionalProperties': False, 'properties': {'joke': {'$ref': '#/$defs/KnockKnockJoke'}}, 'required': ['joke'], 'title': 'Params', 'type': 'object'}, 'strict': True}}]}, 'response': ModelResponse(id='chatcmpl-CnAOEXNCJJEDIOBwwR8PaKYXoqOCs', created=1765834046, model='gpt-4o-2024-08-06', object='chat.completion', system_fingerprint='fp_83554c687e', choices=[Choices(finish_reason='stop', index=0, message=Message(content='In the quiet stream, \\nSilver scales shimmer with grace, \\nFish dance in moonlight.', role='assistant', tool_calls=None, function_call=None, provider_specific_fields={'refusal': None}, annotations=[]), provider_specific_fields={})], usage=Usage(completion_tokens=20, prompt_tokens=410, total_tokens=430, completion_tokens_details=CompletionTokensDetailsWrapper(accepted_prediction_tokens=0, audio_tokens=0, reasoning_tokens=0, rejected_prediction_tokens=0, text_tokens=None, image_tokens=None), prompt_tokens_details=PromptTokensDetailsWrapper(audio_tokens=0, cached_tokens=0, text_tokens=None, image_tokens=None)), service_tier='default')}\n", - "INFO {'args': (), 'kwargs': {'messages': [{'type': 'message', 'content': [{'type': 'text', 'text': 'Write a limerick on the theme of fish4. Do not use any tools.'}], 'role': 'user'}], 'response_format': None, 'tools': [{'type': 'function', 'function': {'name': 'limerick', 'description': 'Write a limerick on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'haiku_no_cache', 'description': 'Write a haiku on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'primes', 'description': 'Give a prime number with {first_digit} as the first digit. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'first_digit': {'title': 'First Digit', 'type': 'integer'}}, 'required': ['first_digit'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'count_char', 'description': \"Write a function which takes a string and counts the occurrances of '{char}'. Do not use any tools.\", 'parameters': {'additionalProperties': False, 'properties': {'char': {'title': 'Char', 'type': 'string'}}, 'required': ['char'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'cities', 'description': '', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}, {'type': 'function', 'function': {'name': 'weather', 'description': '', 'parameters': {'additionalProperties': False, 'properties': {'city': {'title': 'City', 'type': 'string'}}, 'required': ['city'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'vacation', 'description': 'Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}, {'type': 'function', 'function': {'name': 'write_joke', 'description': 'Write a knock-knock joke on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'rate_joke', 'description': 'Decide if {joke} is funny or not. Do not use any tools.', 'parameters': {'$defs': {'KnockKnockJoke': {'properties': {'whos_there': {'title': 'Whos There', 'type': 'string'}, 'punchline': {'title': 'Punchline', 'type': 'string'}}, 'required': ['whos_there', 'punchline'], 'title': 'KnockKnockJoke', 'type': 'object', 'additionalProperties': False}}, 'additionalProperties': False, 'properties': {'joke': {'$ref': '#/$defs/KnockKnockJoke'}}, 'required': ['joke'], 'title': 'Params', 'type': 'object'}, 'strict': True}}]}, 'response': ModelResponse(id='chatcmpl-CnAOEfHrN8jTkm6tIJ7EzgXP9bo2d', created=1765834046, model='gpt-4o-2024-08-06', object='chat.completion', system_fingerprint='fp_83554c687e', choices=[Choices(finish_reason='stop', index=0, message=Message(content='In the ocean where fishies do play, \\nA big whale came swimming one day. \\nWith a splash and a dive, \\nHe felt so alive, \\nChasing fish in the blue, gleaming bay.', role='assistant', tool_calls=None, function_call=None, provider_specific_fields={'refusal': None}, annotations=[]), provider_specific_fields={})], usage=Usage(completion_tokens=45, prompt_tokens=365, total_tokens=410, completion_tokens_details=CompletionTokensDetailsWrapper(accepted_prediction_tokens=0, audio_tokens=0, reasoning_tokens=0, rejected_prediction_tokens=0, text_tokens=None, image_tokens=None), prompt_tokens_details=PromptTokensDetailsWrapper(audio_tokens=0, cached_tokens=0, text_tokens=None, image_tokens=None)), service_tier='default')}\n" + "INFO {'args': (), 'kwargs': {'messages': [{'type': 'message', 'content': [{'type': 'text', 'text': 'Write a haiku on the theme of fish3. Do not use any tools.'}], 'role': 'user'}], 'response_format': None, 'tools': [{'type': 'function', 'function': {'name': 'vacation', 'description': 'Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}, {'type': 'function', 'function': {'name': 'count_char', 'description': \"Write a function which takes a string and counts the occurrances of '{char}'. Do not use any tools.\", 'parameters': {'additionalProperties': False, 'properties': {'char': {'title': 'Char', 'type': 'string'}}, 'required': ['char'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'story_funny', 'description': 'Write a funny, humorous story about {topic}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'topic': {'title': 'Topic', 'type': 'string'}}, 'required': ['topic'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'primes', 'description': 'Give a prime number with {first_digit} as the first digit. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'first_digit': {'title': 'First Digit', 'type': 'integer'}}, 'required': ['first_digit'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'weather', 'description': 'Given a city name, return a description of the weather in that city.', 'parameters': {'additionalProperties': False, 'properties': {'city': {'title': 'City', 'type': 'string'}}, 'required': ['city'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'story_with_moral', 'description': 'Write a short story about {topic} and end with a moral lesson. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'topic': {'title': 'Topic', 'type': 'string'}}, 'required': ['topic'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'haiku_no_cache', 'description': 'Write a haiku on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'write_joke', 'description': 'Write a knock-knock joke on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'unstable_service', 'description': 'Fetch data from an unstable external service. May require retries.', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}, {'type': 'function', 'function': {'name': 'rate_joke', 'description': 'Decide if {joke} is funny or not. Do not use any tools.', 'parameters': {'$defs': {'KnockKnockJoke': {'properties': {'whos_there': {'title': 'Whos There', 'type': 'string'}, 'punchline': {'title': 'Punchline', 'type': 'string'}}, 'required': ['whos_there', 'punchline'], 'title': 'KnockKnockJoke', 'type': 'object', 'additionalProperties': False}}, 'additionalProperties': False, 'properties': {'joke': {'$ref': '#/$defs/KnockKnockJoke'}}, 'required': ['joke'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'write_story', 'description': \"Write a story about {topic} in the style: {style}.\\n Available styles: 'moral' for a story with a lesson, 'funny' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\", 'parameters': {'additionalProperties': False, 'properties': {'topic': {'title': 'Topic', 'type': 'string'}, 'style': {'title': 'Style', 'type': 'string'}}, 'required': ['topic', 'style'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'fetch_data', 'description': 'Use the unstable_service tool to fetch data. Do not use the fetch_data tool.', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}, {'type': 'function', 'function': {'name': 'limerick', 'description': 'Write a limerick on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'give_rating_for_movie', 'description': 'Give a rating for {movie_name}. The explanation MUST include the numeric score. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'movie_name': {'title': 'Movie Name', 'type': 'string'}}, 'required': ['movie_name'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'cities', 'description': 'Return a list of cities that can be passed to `weather`.', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}]}, 'response': ModelResponse(id='chatcmpl-CpiNvfUe5SAUhZMSz1tNAzVXYgVmK', created=1766441379, model='gpt-4o-2024-08-06', object='chat.completion', system_fingerprint='fp_deacdd5f6f', choices=[Choices(finish_reason='stop', index=0, message=Message(content=\"In waters they glide, \\nSilver scales shimmer and dance, \\nNature's silent grace. \", role='assistant', tool_calls=None, function_call=None, provider_specific_fields={'refusal': None}, annotations=[]), provider_specific_fields={})], usage=Usage(completion_tokens=20, prompt_tokens=643, total_tokens=663, completion_tokens_details=CompletionTokensDetailsWrapper(accepted_prediction_tokens=0, audio_tokens=0, reasoning_tokens=0, rejected_prediction_tokens=0, text_tokens=None, image_tokens=None), prompt_tokens_details=PromptTokensDetailsWrapper(audio_tokens=0, cached_tokens=0, text_tokens=None, image_tokens=None)), service_tier='default')}\n", + "INFO {'args': (), 'kwargs': {'messages': [{'type': 'message', 'content': [{'type': 'text', 'text': 'Write a haiku on the theme of fish3. Do not use any tools.'}], 'role': 'user'}], 'response_format': None, 'tools': [{'type': 'function', 'function': {'name': 'vacation', 'description': 'Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}, {'type': 'function', 'function': {'name': 'count_char', 'description': \"Write a function which takes a string and counts the occurrances of '{char}'. Do not use any tools.\", 'parameters': {'additionalProperties': False, 'properties': {'char': {'title': 'Char', 'type': 'string'}}, 'required': ['char'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'story_funny', 'description': 'Write a funny, humorous story about {topic}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'topic': {'title': 'Topic', 'type': 'string'}}, 'required': ['topic'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'primes', 'description': 'Give a prime number with {first_digit} as the first digit. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'first_digit': {'title': 'First Digit', 'type': 'integer'}}, 'required': ['first_digit'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'weather', 'description': 'Given a city name, return a description of the weather in that city.', 'parameters': {'additionalProperties': False, 'properties': {'city': {'title': 'City', 'type': 'string'}}, 'required': ['city'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'story_with_moral', 'description': 'Write a short story about {topic} and end with a moral lesson. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'topic': {'title': 'Topic', 'type': 'string'}}, 'required': ['topic'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'haiku_no_cache', 'description': 'Write a haiku on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'write_joke', 'description': 'Write a knock-knock joke on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'unstable_service', 'description': 'Fetch data from an unstable external service. May require retries.', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}, {'type': 'function', 'function': {'name': 'rate_joke', 'description': 'Decide if {joke} is funny or not. Do not use any tools.', 'parameters': {'$defs': {'KnockKnockJoke': {'properties': {'whos_there': {'title': 'Whos There', 'type': 'string'}, 'punchline': {'title': 'Punchline', 'type': 'string'}}, 'required': ['whos_there', 'punchline'], 'title': 'KnockKnockJoke', 'type': 'object', 'additionalProperties': False}}, 'additionalProperties': False, 'properties': {'joke': {'$ref': '#/$defs/KnockKnockJoke'}}, 'required': ['joke'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'write_story', 'description': \"Write a story about {topic} in the style: {style}.\\n Available styles: 'moral' for a story with a lesson, 'funny' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\", 'parameters': {'additionalProperties': False, 'properties': {'topic': {'title': 'Topic', 'type': 'string'}, 'style': {'title': 'Style', 'type': 'string'}}, 'required': ['topic', 'style'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'fetch_data', 'description': 'Use the unstable_service tool to fetch data. Do not use the fetch_data tool.', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}, {'type': 'function', 'function': {'name': 'limerick', 'description': 'Write a limerick on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'give_rating_for_movie', 'description': 'Give a rating for {movie_name}. The explanation MUST include the numeric score. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'movie_name': {'title': 'Movie Name', 'type': 'string'}}, 'required': ['movie_name'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'cities', 'description': 'Return a list of cities that can be passed to `weather`.', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}]}, 'response': ModelResponse(id='chatcmpl-CpiNvfUe5SAUhZMSz1tNAzVXYgVmK', created=1766441379, model='gpt-4o-2024-08-06', object='chat.completion', system_fingerprint='fp_deacdd5f6f', choices=[Choices(finish_reason='stop', index=0, message=Message(content=\"In waters they glide, \\nSilver scales shimmer and dance, \\nNature's silent grace. \", role='assistant', tool_calls=None, function_call=None, provider_specific_fields={'refusal': None}, annotations=[]), provider_specific_fields={})], usage=Usage(completion_tokens=20, prompt_tokens=643, total_tokens=663, completion_tokens_details=CompletionTokensDetailsWrapper(accepted_prediction_tokens=0, audio_tokens=0, reasoning_tokens=0, rejected_prediction_tokens=0, text_tokens=None, image_tokens=None), prompt_tokens_details=PromptTokensDetailsWrapper(audio_tokens=0, cached_tokens=0, text_tokens=None, image_tokens=None)), service_tier='default')}\n", + "INFO {'args': (), 'kwargs': {'messages': [{'type': 'message', 'content': [{'type': 'text', 'text': 'Write a limerick on the theme of fish4. Do not use any tools.'}], 'role': 'user'}], 'response_format': None, 'tools': [{'type': 'function', 'function': {'name': 'vacation', 'description': 'Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}, {'type': 'function', 'function': {'name': 'count_char', 'description': \"Write a function which takes a string and counts the occurrances of '{char}'. Do not use any tools.\", 'parameters': {'additionalProperties': False, 'properties': {'char': {'title': 'Char', 'type': 'string'}}, 'required': ['char'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'story_funny', 'description': 'Write a funny, humorous story about {topic}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'topic': {'title': 'Topic', 'type': 'string'}}, 'required': ['topic'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'primes', 'description': 'Give a prime number with {first_digit} as the first digit. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'first_digit': {'title': 'First Digit', 'type': 'integer'}}, 'required': ['first_digit'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'weather', 'description': 'Given a city name, return a description of the weather in that city.', 'parameters': {'additionalProperties': False, 'properties': {'city': {'title': 'City', 'type': 'string'}}, 'required': ['city'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'story_with_moral', 'description': 'Write a short story about {topic} and end with a moral lesson. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'topic': {'title': 'Topic', 'type': 'string'}}, 'required': ['topic'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'haiku_no_cache', 'description': 'Write a haiku on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'write_joke', 'description': 'Write a knock-knock joke on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'unstable_service', 'description': 'Fetch data from an unstable external service. May require retries.', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}, {'type': 'function', 'function': {'name': 'rate_joke', 'description': 'Decide if {joke} is funny or not. Do not use any tools.', 'parameters': {'$defs': {'KnockKnockJoke': {'properties': {'whos_there': {'title': 'Whos There', 'type': 'string'}, 'punchline': {'title': 'Punchline', 'type': 'string'}}, 'required': ['whos_there', 'punchline'], 'title': 'KnockKnockJoke', 'type': 'object', 'additionalProperties': False}}, 'additionalProperties': False, 'properties': {'joke': {'$ref': '#/$defs/KnockKnockJoke'}}, 'required': ['joke'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'write_story', 'description': \"Write a story about {topic} in the style: {style}.\\n Available styles: 'moral' for a story with a lesson, 'funny' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\", 'parameters': {'additionalProperties': False, 'properties': {'topic': {'title': 'Topic', 'type': 'string'}, 'style': {'title': 'Style', 'type': 'string'}}, 'required': ['topic', 'style'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'fetch_data', 'description': 'Use the unstable_service tool to fetch data. Do not use the fetch_data tool.', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}, {'type': 'function', 'function': {'name': 'limerick', 'description': 'Write a limerick on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'give_rating_for_movie', 'description': 'Give a rating for {movie_name}. The explanation MUST include the numeric score. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'movie_name': {'title': 'Movie Name', 'type': 'string'}}, 'required': ['movie_name'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'cities', 'description': 'Return a list of cities that can be passed to `weather`.', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}]}, 'response': ModelResponse(id='chatcmpl-CpiNwNX281AjkWQ1dQ4k6EdUhYLcY', created=1766441380, model='gpt-4o-2024-08-06', object='chat.completion', system_fingerprint='fp_deacdd5f6f', choices=[Choices(finish_reason='tool_calls', index=0, message=Message(content=None, role='assistant', tool_calls=[ChatCompletionMessageToolCall(function=Function(arguments='{\"theme\":\"fish\"}', name='limerick'), id='call_saz9unenzuVoXATZ5fCZZ8Bt', type='function')], function_call=None, provider_specific_fields={'refusal': None}, annotations=[]), provider_specific_fields={})], usage=Usage(completion_tokens=15, prompt_tokens=644, total_tokens=659, completion_tokens_details=CompletionTokensDetailsWrapper(accepted_prediction_tokens=0, audio_tokens=0, reasoning_tokens=0, rejected_prediction_tokens=0, text_tokens=None, image_tokens=None), prompt_tokens_details=PromptTokensDetailsWrapper(audio_tokens=0, cached_tokens=0, text_tokens=None, image_tokens=None)), service_tier='default')}\n", + "INFO {'args': (), 'kwargs': {'messages': [{'type': 'message', 'content': [{'type': 'text', 'text': 'Write a limerick on the theme of fish4. Do not use any tools.'}], 'role': 'user'}], 'response_format': None, 'tools': [{'type': 'function', 'function': {'name': 'vacation', 'description': 'Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}, {'type': 'function', 'function': {'name': 'count_char', 'description': \"Write a function which takes a string and counts the occurrances of '{char}'. Do not use any tools.\", 'parameters': {'additionalProperties': False, 'properties': {'char': {'title': 'Char', 'type': 'string'}}, 'required': ['char'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'story_funny', 'description': 'Write a funny, humorous story about {topic}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'topic': {'title': 'Topic', 'type': 'string'}}, 'required': ['topic'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'primes', 'description': 'Give a prime number with {first_digit} as the first digit. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'first_digit': {'title': 'First Digit', 'type': 'integer'}}, 'required': ['first_digit'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'weather', 'description': 'Given a city name, return a description of the weather in that city.', 'parameters': {'additionalProperties': False, 'properties': {'city': {'title': 'City', 'type': 'string'}}, 'required': ['city'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'story_with_moral', 'description': 'Write a short story about {topic} and end with a moral lesson. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'topic': {'title': 'Topic', 'type': 'string'}}, 'required': ['topic'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'haiku_no_cache', 'description': 'Write a haiku on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'write_joke', 'description': 'Write a knock-knock joke on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'unstable_service', 'description': 'Fetch data from an unstable external service. May require retries.', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}, {'type': 'function', 'function': {'name': 'rate_joke', 'description': 'Decide if {joke} is funny or not. Do not use any tools.', 'parameters': {'$defs': {'KnockKnockJoke': {'properties': {'whos_there': {'title': 'Whos There', 'type': 'string'}, 'punchline': {'title': 'Punchline', 'type': 'string'}}, 'required': ['whos_there', 'punchline'], 'title': 'KnockKnockJoke', 'type': 'object', 'additionalProperties': False}}, 'additionalProperties': False, 'properties': {'joke': {'$ref': '#/$defs/KnockKnockJoke'}}, 'required': ['joke'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'write_story', 'description': \"Write a story about {topic} in the style: {style}.\\n Available styles: 'moral' for a story with a lesson, 'funny' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\", 'parameters': {'additionalProperties': False, 'properties': {'topic': {'title': 'Topic', 'type': 'string'}, 'style': {'title': 'Style', 'type': 'string'}}, 'required': ['topic', 'style'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'fetch_data', 'description': 'Use the unstable_service tool to fetch data. Do not use the fetch_data tool.', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}, {'type': 'function', 'function': {'name': 'limerick', 'description': 'Write a limerick on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'give_rating_for_movie', 'description': 'Give a rating for {movie_name}. The explanation MUST include the numeric score. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'movie_name': {'title': 'Movie Name', 'type': 'string'}}, 'required': ['movie_name'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'cities', 'description': 'Return a list of cities that can be passed to `weather`.', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}]}, 'response': ModelResponse(id='chatcmpl-CpiNwNX281AjkWQ1dQ4k6EdUhYLcY', created=1766441380, model='gpt-4o-2024-08-06', object='chat.completion', system_fingerprint='fp_deacdd5f6f', choices=[Choices(finish_reason='tool_calls', index=0, message=Message(content=None, role='assistant', tool_calls=[ChatCompletionMessageToolCall(function=Function(arguments='{\"theme\":\"fish\"}', name='limerick'), id='call_saz9unenzuVoXATZ5fCZZ8Bt', type='function')], function_call=None, provider_specific_fields={'refusal': None}, annotations=[]), provider_specific_fields={})], usage=Usage(completion_tokens=15, prompt_tokens=644, total_tokens=659, completion_tokens_details=CompletionTokensDetailsWrapper(accepted_prediction_tokens=0, audio_tokens=0, reasoning_tokens=0, rejected_prediction_tokens=0, text_tokens=None, image_tokens=None), prompt_tokens_details=PromptTokensDetailsWrapper(audio_tokens=0, cached_tokens=0, text_tokens=None, image_tokens=None)), service_tier='default')}\n", + "INFO {'args': (), 'kwargs': {'messages': [{'type': 'message', 'content': [{'type': 'text', 'text': 'Write a limerick on the theme of fish. Do not use any tools.'}], 'role': 'user'}], 'response_format': None, 'tools': [{'type': 'function', 'function': {'name': 'vacation', 'description': 'Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}, {'type': 'function', 'function': {'name': 'count_char', 'description': \"Write a function which takes a string and counts the occurrances of '{char}'. Do not use any tools.\", 'parameters': {'additionalProperties': False, 'properties': {'char': {'title': 'Char', 'type': 'string'}}, 'required': ['char'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'story_funny', 'description': 'Write a funny, humorous story about {topic}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'topic': {'title': 'Topic', 'type': 'string'}}, 'required': ['topic'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'primes', 'description': 'Give a prime number with {first_digit} as the first digit. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'first_digit': {'title': 'First Digit', 'type': 'integer'}}, 'required': ['first_digit'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'weather', 'description': 'Given a city name, return a description of the weather in that city.', 'parameters': {'additionalProperties': False, 'properties': {'city': {'title': 'City', 'type': 'string'}}, 'required': ['city'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'story_with_moral', 'description': 'Write a short story about {topic} and end with a moral lesson. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'topic': {'title': 'Topic', 'type': 'string'}}, 'required': ['topic'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'haiku_no_cache', 'description': 'Write a haiku on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'write_joke', 'description': 'Write a knock-knock joke on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'unstable_service', 'description': 'Fetch data from an unstable external service. May require retries.', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}, {'type': 'function', 'function': {'name': 'rate_joke', 'description': 'Decide if {joke} is funny or not. Do not use any tools.', 'parameters': {'$defs': {'KnockKnockJoke': {'properties': {'whos_there': {'title': 'Whos There', 'type': 'string'}, 'punchline': {'title': 'Punchline', 'type': 'string'}}, 'required': ['whos_there', 'punchline'], 'title': 'KnockKnockJoke', 'type': 'object', 'additionalProperties': False}}, 'additionalProperties': False, 'properties': {'joke': {'$ref': '#/$defs/KnockKnockJoke'}}, 'required': ['joke'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'write_story', 'description': \"Write a story about {topic} in the style: {style}.\\n Available styles: 'moral' for a story with a lesson, 'funny' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\", 'parameters': {'additionalProperties': False, 'properties': {'topic': {'title': 'Topic', 'type': 'string'}, 'style': {'title': 'Style', 'type': 'string'}}, 'required': ['topic', 'style'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'fetch_data', 'description': 'Use the unstable_service tool to fetch data. Do not use the fetch_data tool.', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}, {'type': 'function', 'function': {'name': 'limerick', 'description': 'Write a limerick on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'give_rating_for_movie', 'description': 'Give a rating for {movie_name}. The explanation MUST include the numeric score. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'movie_name': {'title': 'Movie Name', 'type': 'string'}}, 'required': ['movie_name'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'cities', 'description': 'Return a list of cities that can be passed to `weather`.', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}]}, 'response': ModelResponse(id='chatcmpl-CpiNxuLZfAwYkaGF3fQh9qG200UyU', created=1766441381, model='gpt-4o-2024-08-06', object='chat.completion', system_fingerprint='fp_deacdd5f6f', choices=[Choices(finish_reason='stop', index=0, message=Message(content='In a pond where the waters were still, \\nA fish had a wish to fulfill, \\nHe leaped in the air, \\nWith debonair flair, \\nAnd splashed down with plenty of thrill. ', role='assistant', tool_calls=None, function_call=None, provider_specific_fields={'refusal': None}, annotations=[]), provider_specific_fields={})], usage=Usage(completion_tokens=45, prompt_tokens=643, total_tokens=688, completion_tokens_details=CompletionTokensDetailsWrapper(accepted_prediction_tokens=0, audio_tokens=0, reasoning_tokens=0, rejected_prediction_tokens=0, text_tokens=None, image_tokens=None), prompt_tokens_details=PromptTokensDetailsWrapper(audio_tokens=0, cached_tokens=0, text_tokens=None, image_tokens=None)), service_tier='default')}\n", + "INFO {'args': (), 'kwargs': {'messages': [{'type': 'message', 'content': [{'type': 'text', 'text': 'Write a limerick on the theme of fish. Do not use any tools.'}], 'role': 'user'}], 'response_format': None, 'tools': [{'type': 'function', 'function': {'name': 'vacation', 'description': 'Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}, {'type': 'function', 'function': {'name': 'count_char', 'description': \"Write a function which takes a string and counts the occurrances of '{char}'. Do not use any tools.\", 'parameters': {'additionalProperties': False, 'properties': {'char': {'title': 'Char', 'type': 'string'}}, 'required': ['char'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'story_funny', 'description': 'Write a funny, humorous story about {topic}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'topic': {'title': 'Topic', 'type': 'string'}}, 'required': ['topic'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'primes', 'description': 'Give a prime number with {first_digit} as the first digit. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'first_digit': {'title': 'First Digit', 'type': 'integer'}}, 'required': ['first_digit'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'weather', 'description': 'Given a city name, return a description of the weather in that city.', 'parameters': {'additionalProperties': False, 'properties': {'city': {'title': 'City', 'type': 'string'}}, 'required': ['city'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'story_with_moral', 'description': 'Write a short story about {topic} and end with a moral lesson. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'topic': {'title': 'Topic', 'type': 'string'}}, 'required': ['topic'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'haiku_no_cache', 'description': 'Write a haiku on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'write_joke', 'description': 'Write a knock-knock joke on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'unstable_service', 'description': 'Fetch data from an unstable external service. May require retries.', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}, {'type': 'function', 'function': {'name': 'rate_joke', 'description': 'Decide if {joke} is funny or not. Do not use any tools.', 'parameters': {'$defs': {'KnockKnockJoke': {'properties': {'whos_there': {'title': 'Whos There', 'type': 'string'}, 'punchline': {'title': 'Punchline', 'type': 'string'}}, 'required': ['whos_there', 'punchline'], 'title': 'KnockKnockJoke', 'type': 'object', 'additionalProperties': False}}, 'additionalProperties': False, 'properties': {'joke': {'$ref': '#/$defs/KnockKnockJoke'}}, 'required': ['joke'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'write_story', 'description': \"Write a story about {topic} in the style: {style}.\\n Available styles: 'moral' for a story with a lesson, 'funny' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\", 'parameters': {'additionalProperties': False, 'properties': {'topic': {'title': 'Topic', 'type': 'string'}, 'style': {'title': 'Style', 'type': 'string'}}, 'required': ['topic', 'style'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'fetch_data', 'description': 'Use the unstable_service tool to fetch data. Do not use the fetch_data tool.', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}, {'type': 'function', 'function': {'name': 'limerick', 'description': 'Write a limerick on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'give_rating_for_movie', 'description': 'Give a rating for {movie_name}. The explanation MUST include the numeric score. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'movie_name': {'title': 'Movie Name', 'type': 'string'}}, 'required': ['movie_name'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'cities', 'description': 'Return a list of cities that can be passed to `weather`.', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}]}, 'response': ModelResponse(id='chatcmpl-CpiNxuLZfAwYkaGF3fQh9qG200UyU', created=1766441381, model='gpt-4o-2024-08-06', object='chat.completion', system_fingerprint='fp_deacdd5f6f', choices=[Choices(finish_reason='stop', index=0, message=Message(content='In a pond where the waters were still, \\nA fish had a wish to fulfill, \\nHe leaped in the air, \\nWith debonair flair, \\nAnd splashed down with plenty of thrill. ', role='assistant', tool_calls=None, function_call=None, provider_specific_fields={'refusal': None}, annotations=[]), provider_specific_fields={})], usage=Usage(completion_tokens=45, prompt_tokens=643, total_tokens=688, completion_tokens_details=CompletionTokensDetailsWrapper(accepted_prediction_tokens=0, audio_tokens=0, reasoning_tokens=0, rejected_prediction_tokens=0, text_tokens=None, image_tokens=None), prompt_tokens_details=PromptTokensDetailsWrapper(audio_tokens=0, cached_tokens=0, text_tokens=None, image_tokens=None)), service_tier='default')}\n", + "INFO {'args': (), 'kwargs': {'messages': [{'type': 'message', 'content': [{'type': 'text', 'text': 'Write a limerick on the theme of fish4. Do not use any tools.'}], 'role': 'user'}, {'content': None, 'role': 'assistant', 'tool_calls': [{'function': {'arguments': '{\"theme\":\"fish\"}', 'name': 'limerick'}, 'id': 'call_saz9unenzuVoXATZ5fCZZ8Bt', 'type': 'function'}], 'function_call': None, 'provider_specific_fields': {'refusal': None}, 'annotations': []}, {'role': 'tool', 'tool_call_id': 'call_saz9unenzuVoXATZ5fCZZ8Bt', 'name': 'limerick', 'content': [{'type': 'text', 'text': 'In a pond where the waters were still, \\nA fish had a wish to fulfill, \\nHe leaped in the air, \\nWith debonair flair, \\nAnd splashed down with plenty of thrill. '}]}], 'response_format': None, 'tools': [{'type': 'function', 'function': {'name': 'vacation', 'description': 'Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}, {'type': 'function', 'function': {'name': 'count_char', 'description': \"Write a function which takes a string and counts the occurrances of '{char}'. Do not use any tools.\", 'parameters': {'additionalProperties': False, 'properties': {'char': {'title': 'Char', 'type': 'string'}}, 'required': ['char'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'story_funny', 'description': 'Write a funny, humorous story about {topic}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'topic': {'title': 'Topic', 'type': 'string'}}, 'required': ['topic'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'primes', 'description': 'Give a prime number with {first_digit} as the first digit. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'first_digit': {'title': 'First Digit', 'type': 'integer'}}, 'required': ['first_digit'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'weather', 'description': 'Given a city name, return a description of the weather in that city.', 'parameters': {'additionalProperties': False, 'properties': {'city': {'title': 'City', 'type': 'string'}}, 'required': ['city'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'story_with_moral', 'description': 'Write a short story about {topic} and end with a moral lesson. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'topic': {'title': 'Topic', 'type': 'string'}}, 'required': ['topic'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'haiku_no_cache', 'description': 'Write a haiku on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'write_joke', 'description': 'Write a knock-knock joke on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'unstable_service', 'description': 'Fetch data from an unstable external service. May require retries.', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}, {'type': 'function', 'function': {'name': 'rate_joke', 'description': 'Decide if {joke} is funny or not. Do not use any tools.', 'parameters': {'$defs': {'KnockKnockJoke': {'properties': {'whos_there': {'title': 'Whos There', 'type': 'string'}, 'punchline': {'title': 'Punchline', 'type': 'string'}}, 'required': ['whos_there', 'punchline'], 'title': 'KnockKnockJoke', 'type': 'object', 'additionalProperties': False}}, 'additionalProperties': False, 'properties': {'joke': {'$ref': '#/$defs/KnockKnockJoke'}}, 'required': ['joke'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'write_story', 'description': \"Write a story about {topic} in the style: {style}.\\n Available styles: 'moral' for a story with a lesson, 'funny' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\", 'parameters': {'additionalProperties': False, 'properties': {'topic': {'title': 'Topic', 'type': 'string'}, 'style': {'title': 'Style', 'type': 'string'}}, 'required': ['topic', 'style'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'fetch_data', 'description': 'Use the unstable_service tool to fetch data. Do not use the fetch_data tool.', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}, {'type': 'function', 'function': {'name': 'limerick', 'description': 'Write a limerick on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'give_rating_for_movie', 'description': 'Give a rating for {movie_name}. The explanation MUST include the numeric score. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'movie_name': {'title': 'Movie Name', 'type': 'string'}}, 'required': ['movie_name'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'cities', 'description': 'Return a list of cities that can be passed to `weather`.', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}]}, 'response': ModelResponse(id='chatcmpl-CpiNzarHb5Rc5y8Lns7KYZRUs9MKu', created=1766441383, model='gpt-4o-2024-08-06', object='chat.completion', system_fingerprint='fp_deacdd5f6f', choices=[Choices(finish_reason='stop', index=0, message=Message(content='Here\\'s a limerick on the theme of \"fish\":\\n\\nIn a pond where the waters were still, \\nA fish had a wish to fulfill, \\nHe leaped in the air, \\nWith debonair flair, \\nAnd splashed down with plenty of thrill. ', role='assistant', tool_calls=None, function_call=None, provider_specific_fields={'refusal': None}, annotations=[]), provider_specific_fields={})], usage=Usage(completion_tokens=57, prompt_tokens=712, total_tokens=769, completion_tokens_details=CompletionTokensDetailsWrapper(accepted_prediction_tokens=0, audio_tokens=0, reasoning_tokens=0, rejected_prediction_tokens=0, text_tokens=None, image_tokens=None), prompt_tokens_details=PromptTokensDetailsWrapper(audio_tokens=0, cached_tokens=0, text_tokens=None, image_tokens=None)), service_tier='default')}\n", + "INFO {'args': (), 'kwargs': {'messages': [{'type': 'message', 'content': [{'type': 'text', 'text': 'Write a limerick on the theme of fish4. Do not use any tools.'}], 'role': 'user'}, {'content': None, 'role': 'assistant', 'tool_calls': [{'function': {'arguments': '{\"theme\":\"fish\"}', 'name': 'limerick'}, 'id': 'call_saz9unenzuVoXATZ5fCZZ8Bt', 'type': 'function'}], 'function_call': None, 'provider_specific_fields': {'refusal': None}, 'annotations': []}, {'role': 'tool', 'tool_call_id': 'call_saz9unenzuVoXATZ5fCZZ8Bt', 'name': 'limerick', 'content': [{'type': 'text', 'text': 'In a pond where the waters were still, \\nA fish had a wish to fulfill, \\nHe leaped in the air, \\nWith debonair flair, \\nAnd splashed down with plenty of thrill. '}]}], 'response_format': None, 'tools': [{'type': 'function', 'function': {'name': 'vacation', 'description': 'Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}, {'type': 'function', 'function': {'name': 'count_char', 'description': \"Write a function which takes a string and counts the occurrances of '{char}'. Do not use any tools.\", 'parameters': {'additionalProperties': False, 'properties': {'char': {'title': 'Char', 'type': 'string'}}, 'required': ['char'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'story_funny', 'description': 'Write a funny, humorous story about {topic}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'topic': {'title': 'Topic', 'type': 'string'}}, 'required': ['topic'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'primes', 'description': 'Give a prime number with {first_digit} as the first digit. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'first_digit': {'title': 'First Digit', 'type': 'integer'}}, 'required': ['first_digit'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'weather', 'description': 'Given a city name, return a description of the weather in that city.', 'parameters': {'additionalProperties': False, 'properties': {'city': {'title': 'City', 'type': 'string'}}, 'required': ['city'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'story_with_moral', 'description': 'Write a short story about {topic} and end with a moral lesson. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'topic': {'title': 'Topic', 'type': 'string'}}, 'required': ['topic'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'haiku_no_cache', 'description': 'Write a haiku on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'write_joke', 'description': 'Write a knock-knock joke on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'unstable_service', 'description': 'Fetch data from an unstable external service. May require retries.', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}, {'type': 'function', 'function': {'name': 'rate_joke', 'description': 'Decide if {joke} is funny or not. Do not use any tools.', 'parameters': {'$defs': {'KnockKnockJoke': {'properties': {'whos_there': {'title': 'Whos There', 'type': 'string'}, 'punchline': {'title': 'Punchline', 'type': 'string'}}, 'required': ['whos_there', 'punchline'], 'title': 'KnockKnockJoke', 'type': 'object', 'additionalProperties': False}}, 'additionalProperties': False, 'properties': {'joke': {'$ref': '#/$defs/KnockKnockJoke'}}, 'required': ['joke'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'write_story', 'description': \"Write a story about {topic} in the style: {style}.\\n Available styles: 'moral' for a story with a lesson, 'funny' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\", 'parameters': {'additionalProperties': False, 'properties': {'topic': {'title': 'Topic', 'type': 'string'}, 'style': {'title': 'Style', 'type': 'string'}}, 'required': ['topic', 'style'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'fetch_data', 'description': 'Use the unstable_service tool to fetch data. Do not use the fetch_data tool.', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}, {'type': 'function', 'function': {'name': 'limerick', 'description': 'Write a limerick on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'give_rating_for_movie', 'description': 'Give a rating for {movie_name}. The explanation MUST include the numeric score. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'movie_name': {'title': 'Movie Name', 'type': 'string'}}, 'required': ['movie_name'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'cities', 'description': 'Return a list of cities that can be passed to `weather`.', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}]}, 'response': ModelResponse(id='chatcmpl-CpiNzarHb5Rc5y8Lns7KYZRUs9MKu', created=1766441383, model='gpt-4o-2024-08-06', object='chat.completion', system_fingerprint='fp_deacdd5f6f', choices=[Choices(finish_reason='stop', index=0, message=Message(content='Here\\'s a limerick on the theme of \"fish\":\\n\\nIn a pond where the waters were still, \\nA fish had a wish to fulfill, \\nHe leaped in the air, \\nWith debonair flair, \\nAnd splashed down with plenty of thrill. ', role='assistant', tool_calls=None, function_call=None, provider_specific_fields={'refusal': None}, annotations=[]), provider_specific_fields={})], usage=Usage(completion_tokens=57, prompt_tokens=712, total_tokens=769, completion_tokens_details=CompletionTokensDetailsWrapper(accepted_prediction_tokens=0, audio_tokens=0, reasoning_tokens=0, rejected_prediction_tokens=0, text_tokens=None, image_tokens=None), prompt_tokens_details=PromptTokensDetailsWrapper(audio_tokens=0, cached_tokens=0, text_tokens=None, image_tokens=None)), service_tier='default')}\n" ] } ], @@ -467,11 +480,7 @@ "llm_logger = LLMLoggingHandler(logger=logger) # can also be LLMLoggingHandler()\n", "\n", "# Avoid cache for demonstration\n", - "try:\n", - " haiku.cache_clear()\n", - " limerick.cache_clear()\n", - "except Exception:\n", - " pass\n", + "haiku.cache_clear()\n", "\n", "with handler(provider), handler(llm_logger):\n", " _ = haiku(\"fish3\")\n", @@ -490,7 +499,7 @@ }, { "cell_type": "code", - "execution_count": 15, + "execution_count": 43, "id": "78a4bf44", "metadata": {}, "outputs": [ @@ -498,20 +507,116 @@ "name": "stdout", "output_type": "stream", "text": [ - "Sub-templates available to write_story: [Template(__prompt_template__='Write a limerick on the theme of {theme}. Do not use any tools.', __signature__= str>, __context__=LexicalContext(mappingproxy({'__name__': '__main__', '__doc__': 'Automatically created module for IPython interactive environment', '__package__': None, '__loader__': None, '__spec__': None, '__builtin__': , '__builtins__': , '_ih': ['', 'import dataclasses\\nimport functools\\nimport inspect\\nimport logging\\nimport sys\\nfrom collections.abc import Callable\\n\\nfrom effectful.handlers.llm import Template\\nfrom effectful.handlers.llm.providers import (\\n CacheLLMRequestHandler,\\n LiteLLMProvider,\\n LLMLoggingHandler,\\n RetryLLMHandler,\\n completion,\\n tool_call,\\n)\\nfrom effectful.handlers.llm.synthesis import ProgramSynthesis\\nfrom effectful.ops.semantics import NotHandled, fwd, handler\\nfrom effectful.ops.syntax import defop\\n\\nprovider = LiteLLMProvider()', '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}.\"\"\"\\n raise NotHandled', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '@functools.cache\\n@Template.define\\ndef haiku(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef haiku_no_cache(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nprint()\\nwith handler(provider):\\n print(haiku(\"fish\"))\\n print(\"-\" * 40)\\n print(haiku(\"fish\"))\\n\\nprint()\\ncache_handler1 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler1):\\n print(haiku_no_cache(\"fish2\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish2\"))\\n\\nprint()\\ncache_handler2 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler2):\\n print(haiku_no_cache(\"fish3\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish3\"))', '@Template.define\\ndef primes(first_digit: int) -> int:\\n \"\"\"Give a prime number with {first_digit} as the first digit. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider):\\n assert type(primes(6)) is int', '@Template.define\\ndef count_char(char: str) -> Callable[[str], int]:\\n \"\"\"Write a function which takes a string and counts the occurrances of \\'{char}\\'. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider), handler(ProgramSynthesis()):\\n count_a = count_char(\"a\")\\n assert callable(count_a)\\n assert count_a(\"banana\") == 3\\n assert count_a(\"cherry\") == 0\\n # Print the source code of the generated function\\n print(inspect.getsource(count_a))', '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', '@dataclasses.dataclass\\nclass KnockKnockJoke:\\n whos_there: str\\n punchline: str\\n\\n\\n@Template.define\\ndef write_joke(theme: str) -> KnockKnockJoke:\\n \"\"\"Write a knock-knock joke on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef rate_joke(joke: KnockKnockJoke) -> bool:\\n \"\"\"Decide if {joke} is funny or not. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\ndef do_comedy():\\n joke = write_joke(\"lizards\")\\n print(\"> You are onstage at a comedy club. You tell the following joke:\")\\n print(\\n f\"Knock knock.\\\\nWho\\'s there?\\\\n{joke.whos_there}.\\\\n{joke.whos_there} who?\\\\n{joke.punchline}\"\\n )\\n if rate_joke(joke):\\n print(\"> The crowd laughs politely.\")\\n else:\\n print(\"> The crowd stares in stony silence.\")\\n\\n\\nwith handler(provider):\\n do_comedy()', 'def log_llm(*args, **kwargs):\\n result = fwd()\\n print(\"Request fired: \", args, kwargs, result)\\n return result\\n\\n\\n# Avoid cache\\ntry:\\n haiku.cache_clear()\\nexcept Exception:\\n pass\\n\\n# Put completion handler innermost so it has highest precedence during the call\\nwith handler(provider), handler({completion: log_llm}):\\n _ = haiku(\"fish2\")\\n _ = limerick(\"fish\") # or use haiku(\"fish-2\") to avoid cache', '# 1. Create a logger\\nlogger = logging.getLogger(\"effectful.llm\")\\nlogger.setLevel(logging.INFO)\\nlog_handler = logging.StreamHandler(sys.stdout)\\nlog_handler.setFormatter(logging.Formatter(\"%(levelname)s %(payload)s\"))\\nlogger.addHandler(log_handler)\\n# 2. Pass it to the handler\\nllm_logger = LLMLoggingHandler(logger=logger) # can also be LLMLoggingHandler()\\n\\n# Avoid cache for demonstration\\ntry:\\n haiku.cache_clear()\\n limerick.cache_clear()\\nexcept Exception:\\n pass\\n\\nwith handler(provider), handler(llm_logger):\\n _ = haiku(\"fish3\")\\n _ = limerick(\"fish4\")', '# Sub-templates for different story styles\\n@Template.define\\ndef story_with_moral(topic: str) -> str:\\n \"\"\"Write a short story about {topic} and end with a moral lesson. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef story_funny(topic: str) -> str:\\n \"\"\"Write a funny, humorous story about {topic}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n# Main orchestrator template - has access to sub-templates\\n@Template.define\\ndef write_story(topic: str, style: str) -> str:\\n \"\"\"Write a story about {topic} in the style: {style}.\\n Available styles: \\'moral\\' for a story with a lesson, \\'funny\\' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\"\"\"\\n raise NotHandled\\n\\n\\n# Verify sub-templates are captured in write_story\\'s lexical context\\nassert story_with_moral in write_story.tools\\nassert story_funny in write_story.tools\\nprint(\"Sub-templates available to write_story:\", list(write_story.tools))\\n\\nwith handler(provider), handler(llm_logger):\\n print(\"=== Story with moral ===\")\\n print(write_story(\"a curious cat\", \"moral\"))\\n print()\\n print(\"=== Funny story ===\")\\n print(write_story(\"a curious cat\", \"funny\"))'], '_oh': {}, '_dh': [PosixPath('/Users/datnguyenthanh/Marc/effectful')], 'In': ['', 'import dataclasses\\nimport functools\\nimport inspect\\nimport logging\\nimport sys\\nfrom collections.abc import Callable\\n\\nfrom effectful.handlers.llm import Template\\nfrom effectful.handlers.llm.providers import (\\n CacheLLMRequestHandler,\\n LiteLLMProvider,\\n LLMLoggingHandler,\\n RetryLLMHandler,\\n completion,\\n tool_call,\\n)\\nfrom effectful.handlers.llm.synthesis import ProgramSynthesis\\nfrom effectful.ops.semantics import NotHandled, fwd, handler\\nfrom effectful.ops.syntax import defop\\n\\nprovider = LiteLLMProvider()', '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}.\"\"\"\\n raise NotHandled', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '@functools.cache\\n@Template.define\\ndef haiku(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef haiku_no_cache(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nprint()\\nwith handler(provider):\\n print(haiku(\"fish\"))\\n print(\"-\" * 40)\\n print(haiku(\"fish\"))\\n\\nprint()\\ncache_handler1 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler1):\\n print(haiku_no_cache(\"fish2\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish2\"))\\n\\nprint()\\ncache_handler2 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler2):\\n print(haiku_no_cache(\"fish3\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish3\"))', '@Template.define\\ndef primes(first_digit: int) -> int:\\n \"\"\"Give a prime number with {first_digit} as the first digit. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider):\\n assert type(primes(6)) is int', '@Template.define\\ndef count_char(char: str) -> Callable[[str], int]:\\n \"\"\"Write a function which takes a string and counts the occurrances of \\'{char}\\'. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider), handler(ProgramSynthesis()):\\n count_a = count_char(\"a\")\\n assert callable(count_a)\\n assert count_a(\"banana\") == 3\\n assert count_a(\"cherry\") == 0\\n # Print the source code of the generated function\\n print(inspect.getsource(count_a))', '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', '@dataclasses.dataclass\\nclass KnockKnockJoke:\\n whos_there: str\\n punchline: str\\n\\n\\n@Template.define\\ndef write_joke(theme: str) -> KnockKnockJoke:\\n \"\"\"Write a knock-knock joke on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef rate_joke(joke: KnockKnockJoke) -> bool:\\n \"\"\"Decide if {joke} is funny or not. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\ndef do_comedy():\\n joke = write_joke(\"lizards\")\\n print(\"> You are onstage at a comedy club. You tell the following joke:\")\\n print(\\n f\"Knock knock.\\\\nWho\\'s there?\\\\n{joke.whos_there}.\\\\n{joke.whos_there} who?\\\\n{joke.punchline}\"\\n )\\n if rate_joke(joke):\\n print(\"> The crowd laughs politely.\")\\n else:\\n print(\"> The crowd stares in stony silence.\")\\n\\n\\nwith handler(provider):\\n do_comedy()', 'def log_llm(*args, **kwargs):\\n result = fwd()\\n print(\"Request fired: \", args, kwargs, result)\\n return result\\n\\n\\n# Avoid cache\\ntry:\\n haiku.cache_clear()\\nexcept Exception:\\n pass\\n\\n# Put completion handler innermost so it has highest precedence during the call\\nwith handler(provider), handler({completion: log_llm}):\\n _ = haiku(\"fish2\")\\n _ = limerick(\"fish\") # or use haiku(\"fish-2\") to avoid cache', '# 1. Create a logger\\nlogger = logging.getLogger(\"effectful.llm\")\\nlogger.setLevel(logging.INFO)\\nlog_handler = logging.StreamHandler(sys.stdout)\\nlog_handler.setFormatter(logging.Formatter(\"%(levelname)s %(payload)s\"))\\nlogger.addHandler(log_handler)\\n# 2. Pass it to the handler\\nllm_logger = LLMLoggingHandler(logger=logger) # can also be LLMLoggingHandler()\\n\\n# Avoid cache for demonstration\\ntry:\\n haiku.cache_clear()\\n limerick.cache_clear()\\nexcept Exception:\\n pass\\n\\nwith handler(provider), handler(llm_logger):\\n _ = haiku(\"fish3\")\\n _ = limerick(\"fish4\")', '# Sub-templates for different story styles\\n@Template.define\\ndef story_with_moral(topic: str) -> str:\\n \"\"\"Write a short story about {topic} and end with a moral lesson. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef story_funny(topic: str) -> str:\\n \"\"\"Write a funny, humorous story about {topic}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n# Main orchestrator template - has access to sub-templates\\n@Template.define\\ndef write_story(topic: str, style: str) -> str:\\n \"\"\"Write a story about {topic} in the style: {style}.\\n Available styles: \\'moral\\' for a story with a lesson, \\'funny\\' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\"\"\"\\n raise NotHandled\\n\\n\\n# Verify sub-templates are captured in write_story\\'s lexical context\\nassert story_with_moral in write_story.tools\\nassert story_funny in write_story.tools\\nprint(\"Sub-templates available to write_story:\", list(write_story.tools))\\n\\nwith handler(provider), handler(llm_logger):\\n print(\"=== Story with moral ===\")\\n print(write_story(\"a curious cat\", \"moral\"))\\n print()\\n print(\"=== Funny story ===\")\\n print(write_story(\"a curious cat\", \"funny\"))'], 'Out': {}, 'get_ipython': >, 'exit': , 'quit': , 'open': , '_': 'In the ocean where fishies do play, \\nA big whale came swimming one day. \\nWith a splash and a dive, \\nHe felt so alive, \\nChasing fish in the blue, gleaming bay.', '__': '', '___': '', '__vsc_ipynb_file__': '/Users/datnguyenthanh/Marc/effectful/docs/source/llm.ipynb', '_i': '# 1. Create a logger\\nlogger = logging.getLogger(\"effectful.llm\")\\nlogger.setLevel(logging.INFO)\\nlog_handler = logging.StreamHandler(sys.stdout)\\nlog_handler.setFormatter(logging.Formatter(\"%(levelname)s %(payload)s\"))\\nlogger.addHandler(log_handler)\\n# 2. Pass it to the handler\\nllm_logger = LLMLoggingHandler(logger=logger) # can also be LLMLoggingHandler()\\n\\n# Avoid cache for demonstration\\ntry:\\n haiku.cache_clear()\\n limerick.cache_clear()\\nexcept Exception:\\n pass\\n\\nwith handler(provider), handler(llm_logger):\\n _ = haiku(\"fish3\")\\n _ = limerick(\"fish4\")', '_ii': 'def log_llm(*args, **kwargs):\\n result = fwd()\\n print(\"Request fired: \", args, kwargs, result)\\n return result\\n\\n\\n# Avoid cache\\ntry:\\n haiku.cache_clear()\\nexcept Exception:\\n pass\\n\\n# Put completion handler innermost so it has highest precedence during the call\\nwith handler(provider), handler({completion: log_llm}):\\n _ = haiku(\"fish2\")\\n _ = limerick(\"fish\") # or use haiku(\"fish-2\") to avoid cache', '_iii': '@dataclasses.dataclass\\nclass KnockKnockJoke:\\n whos_there: str\\n punchline: str\\n\\n\\n@Template.define\\ndef write_joke(theme: str) -> KnockKnockJoke:\\n \"\"\"Write a knock-knock joke on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef rate_joke(joke: KnockKnockJoke) -> bool:\\n \"\"\"Decide if {joke} is funny or not. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\ndef do_comedy():\\n joke = write_joke(\"lizards\")\\n print(\"> You are onstage at a comedy club. You tell the following joke:\")\\n print(\\n f\"Knock knock.\\\\nWho\\'s there?\\\\n{joke.whos_there}.\\\\n{joke.whos_there} who?\\\\n{joke.punchline}\"\\n )\\n if rate_joke(joke):\\n print(\"> The crowd laughs politely.\")\\n else:\\n print(\"> The crowd stares in stony silence.\")\\n\\n\\nwith handler(provider):\\n do_comedy()', '_i1': 'import dataclasses\\nimport functools\\nimport inspect\\nimport logging\\nimport sys\\nfrom collections.abc import Callable\\n\\nfrom effectful.handlers.llm import Template\\nfrom effectful.handlers.llm.providers import (\\n CacheLLMRequestHandler,\\n LiteLLMProvider,\\n LLMLoggingHandler,\\n RetryLLMHandler,\\n completion,\\n tool_call,\\n)\\nfrom effectful.handlers.llm.synthesis import ProgramSynthesis\\nfrom effectful.ops.semantics import NotHandled, fwd, handler\\nfrom effectful.ops.syntax import defop\\n\\nprovider = LiteLLMProvider()', 'dataclasses': , 'functools': , 'inspect': , 'logging': , 'sys': , 'Callable': , 'Template': , 'CacheLLMRequestHandler': , 'LiteLLMProvider': , 'LLMLoggingHandler': , 'RetryLLMHandler': , 'completion': Operation(completion, (model: str, messages: List = [], timeout: Union[float, str, openai.Timeout, NoneType] = None, temperature: Optional[float] = None, top_p: Optional[float] = None, n: Optional[int] = None, stream: Optional[bool] = None, stream_options: Optional[dict] = None, stop=None, max_completion_tokens: Optional[int] = None, max_tokens: Optional[int] = None, modalities: Optional[List[Literal['text', 'audio']]] = None, prediction: Optional[openai.types.chat.chat_completion_prediction_content_param.ChatCompletionPredictionContentParam] = None, audio: Optional[openai.types.chat.chat_completion_audio_param.ChatCompletionAudioParam] = None, presence_penalty: Optional[float] = None, frequency_penalty: Optional[float] = None, logit_bias: Optional[dict] = None, user: Optional[str] = None, reasoning_effort: Optional[Literal['none', 'minimal', 'low', 'medium', 'high', 'default']] = None, verbosity: Optional[Literal['low', 'medium', 'high']] = None, response_format: Union[dict, Type[pydantic.main.BaseModel], NoneType] = None, seed: Optional[int] = None, tools: Optional[List] = None, tool_choice: Union[str, dict, NoneType] = None, logprobs: Optional[bool] = None, top_logprobs: Optional[int] = None, parallel_tool_calls: Optional[bool] = None, web_search_options: Optional[litellm.types.llms.openai.OpenAIWebSearchOptions] = None, deployment_id=None, extra_headers: Optional[dict] = None, safety_identifier: Optional[str] = None, service_tier: Optional[str] = None, functions: Optional[List] = None, function_call: Optional[str] = None, base_url: Optional[str] = None, api_version: Optional[str] = None, api_key: Optional[str] = None, model_list: Optional[list] = None, thinking: Optional[litellm.types.llms.anthropic.AnthropicThinkingParam] = None, shared_session: Optional[ForwardRef('ClientSession')] = None, **kwargs) -> Union[litellm.types.utils.ModelResponse, litellm.litellm_core_utils.streaming_handler.CustomStreamWrapper]), 'tool_call': Operation(tool_call, (template: effectful.handlers.llm.Template, tool: Union[effectful.ops.types.Operation[..., T], effectful.handlers.llm.Template[..., T]], *args, **kwargs) -> T), 'ProgramSynthesis': , 'NotHandled': , 'fwd': Operation(fwd, (*args, **kwargs) -> Any), 'handler': , 'defop': , 'provider': , '_i2': '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}.\"\"\"\\n raise NotHandled', 'limerick': ..., '_i3': 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '_i4': 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '_i5': '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled', '_i6': 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '_i7': '@functools.cache\\n@Template.define\\ndef haiku(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef haiku_no_cache(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nprint()\\nwith handler(provider):\\n print(haiku(\"fish\"))\\n print(\"-\" * 40)\\n print(haiku(\"fish\"))\\n\\nprint()\\ncache_handler1 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler1):\\n print(haiku_no_cache(\"fish2\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish2\"))\\n\\nprint()\\ncache_handler2 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler2):\\n print(haiku_no_cache(\"fish3\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish3\"))', 'haiku': , 'haiku_no_cache': Template(__prompt_template__='Write a haiku on the theme of {theme}. Do not use any tools.', __signature__= str>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='haiku_no_cache'), 'cache_handler1': , 'cache_handler2': , '_i8': '@Template.define\\ndef primes(first_digit: int) -> int:\\n \"\"\"Give a prime number with {first_digit} as the first digit. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider):\\n assert type(primes(6)) is int', 'primes': Template(__prompt_template__='Give a prime number with {first_digit} as the first digit. Do not use any tools.', __signature__= int>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='primes'), '_i9': '@Template.define\\ndef count_char(char: str) -> Callable[[str], int]:\\n \"\"\"Write a function which takes a string and counts the occurrances of \\'{char}\\'. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider), handler(ProgramSynthesis()):\\n count_a = count_char(\"a\")\\n assert callable(count_a)\\n assert count_a(\"banana\") == 3\\n assert count_a(\"cherry\") == 0\\n # Print the source code of the generated function\\n print(inspect.getsource(count_a))', 'count_char': Template(__prompt_template__=\"Write a function which takes a string and counts the occurrances of '{char}'. Do not use any tools.\", __signature__= collections.abc.Callable[[str], int]>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='count_char'), 'count_a': , '_i10': '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', 'cities': Operation(cities, () -> list[str]), 'weather': Operation(weather, (city: str) -> str), 'vacation': Template(__prompt_template__='Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.', __signature__= str>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='vacation'), 'log_tool_call': , '_i11': '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', '_i12': '@dataclasses.dataclass\\nclass KnockKnockJoke:\\n whos_there: str\\n punchline: str\\n\\n\\n@Template.define\\ndef write_joke(theme: str) -> KnockKnockJoke:\\n \"\"\"Write a knock-knock joke on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef rate_joke(joke: KnockKnockJoke) -> bool:\\n \"\"\"Decide if {joke} is funny or not. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\ndef do_comedy():\\n joke = write_joke(\"lizards\")\\n print(\"> You are onstage at a comedy club. You tell the following joke:\")\\n print(\\n f\"Knock knock.\\\\nWho\\'s there?\\\\n{joke.whos_there}.\\\\n{joke.whos_there} who?\\\\n{joke.punchline}\"\\n )\\n if rate_joke(joke):\\n print(\"> The crowd laughs politely.\")\\n else:\\n print(\"> The crowd stares in stony silence.\")\\n\\n\\nwith handler(provider):\\n do_comedy()', 'KnockKnockJoke': , 'write_joke': Template(__prompt_template__='Write a knock-knock joke on the theme of {theme}. Do not use any tools.', __signature__= __main__.KnockKnockJoke>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='write_joke'), 'rate_joke': Template(__prompt_template__='Decide if {joke} is funny or not. Do not use any tools.', __signature__= bool>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='rate_joke'), 'do_comedy': , '_i13': 'def log_llm(*args, **kwargs):\\n result = fwd()\\n print(\"Request fired: \", args, kwargs, result)\\n return result\\n\\n\\n# Avoid cache\\ntry:\\n haiku.cache_clear()\\nexcept Exception:\\n pass\\n\\n# Put completion handler innermost so it has highest precedence during the call\\nwith handler(provider), handler({completion: log_llm}):\\n _ = haiku(\"fish2\")\\n _ = limerick(\"fish\") # or use haiku(\"fish-2\") to avoid cache', 'log_llm': , '_i14': '# 1. Create a logger\\nlogger = logging.getLogger(\"effectful.llm\")\\nlogger.setLevel(logging.INFO)\\nlog_handler = logging.StreamHandler(sys.stdout)\\nlog_handler.setFormatter(logging.Formatter(\"%(levelname)s %(payload)s\"))\\nlogger.addHandler(log_handler)\\n# 2. Pass it to the handler\\nllm_logger = LLMLoggingHandler(logger=logger) # can also be LLMLoggingHandler()\\n\\n# Avoid cache for demonstration\\ntry:\\n haiku.cache_clear()\\n limerick.cache_clear()\\nexcept Exception:\\n pass\\n\\nwith handler(provider), handler(llm_logger):\\n _ = haiku(\"fish3\")\\n _ = limerick(\"fish4\")', 'logger': , 'log_handler': , 'llm_logger': , '_i15': '# Sub-templates for different story styles\\n@Template.define\\ndef story_with_moral(topic: str) -> str:\\n \"\"\"Write a short story about {topic} and end with a moral lesson. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef story_funny(topic: str) -> str:\\n \"\"\"Write a funny, humorous story about {topic}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n# Main orchestrator template - has access to sub-templates\\n@Template.define\\ndef write_story(topic: str, style: str) -> str:\\n \"\"\"Write a story about {topic} in the style: {style}.\\n Available styles: \\'moral\\' for a story with a lesson, \\'funny\\' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\"\"\"\\n raise NotHandled\\n\\n\\n# Verify sub-templates are captured in write_story\\'s lexical context\\nassert story_with_moral in write_story.tools\\nassert story_funny in write_story.tools\\nprint(\"Sub-templates available to write_story:\", list(write_story.tools))\\n\\nwith handler(provider), handler(llm_logger):\\n print(\"=== Story with moral ===\")\\n print(write_story(\"a curious cat\", \"moral\"))\\n print()\\n print(\"=== Funny story ===\")\\n print(write_story(\"a curious cat\", \"funny\"))', 'story_with_moral': Template(__prompt_template__='Write a short story about {topic} and end with a moral lesson. Do not use any tools.', __signature__= str>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='story_with_moral'), 'story_funny': Template(__prompt_template__='Write a funny, humorous story about {topic}. Do not use any tools.', __signature__= str>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='story_funny'), 'write_story': Template(__prompt_template__=\"Write a story about {topic} in the style: {style}.\\n Available styles: 'moral' for a story with a lesson, 'funny' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\", __signature__= str>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='write_story')}), mappingproxy({'__name__': '__main__', '__doc__': 'Automatically created module for IPython interactive environment', '__package__': None, '__loader__': None, '__spec__': None, '__builtin__': , '__builtins__': , '_ih': ['', 'import dataclasses\\nimport functools\\nimport inspect\\nimport logging\\nimport sys\\nfrom collections.abc import Callable\\n\\nfrom effectful.handlers.llm import Template\\nfrom effectful.handlers.llm.providers import (\\n CacheLLMRequestHandler,\\n LiteLLMProvider,\\n LLMLoggingHandler,\\n RetryLLMHandler,\\n completion,\\n tool_call,\\n)\\nfrom effectful.handlers.llm.synthesis import ProgramSynthesis\\nfrom effectful.ops.semantics import NotHandled, fwd, handler\\nfrom effectful.ops.syntax import defop\\n\\nprovider = LiteLLMProvider()', '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}.\"\"\"\\n raise NotHandled', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '@functools.cache\\n@Template.define\\ndef haiku(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef haiku_no_cache(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nprint()\\nwith handler(provider):\\n print(haiku(\"fish\"))\\n print(\"-\" * 40)\\n print(haiku(\"fish\"))\\n\\nprint()\\ncache_handler1 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler1):\\n print(haiku_no_cache(\"fish2\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish2\"))\\n\\nprint()\\ncache_handler2 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler2):\\n print(haiku_no_cache(\"fish3\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish3\"))', '@Template.define\\ndef primes(first_digit: int) -> int:\\n \"\"\"Give a prime number with {first_digit} as the first digit. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider):\\n assert type(primes(6)) is int', '@Template.define\\ndef count_char(char: str) -> Callable[[str], int]:\\n \"\"\"Write a function which takes a string and counts the occurrances of \\'{char}\\'. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider), handler(ProgramSynthesis()):\\n count_a = count_char(\"a\")\\n assert callable(count_a)\\n assert count_a(\"banana\") == 3\\n assert count_a(\"cherry\") == 0\\n # Print the source code of the generated function\\n print(inspect.getsource(count_a))', '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', '@dataclasses.dataclass\\nclass KnockKnockJoke:\\n whos_there: str\\n punchline: str\\n\\n\\n@Template.define\\ndef write_joke(theme: str) -> KnockKnockJoke:\\n \"\"\"Write a knock-knock joke on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef rate_joke(joke: KnockKnockJoke) -> bool:\\n \"\"\"Decide if {joke} is funny or not. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\ndef do_comedy():\\n joke = write_joke(\"lizards\")\\n print(\"> You are onstage at a comedy club. You tell the following joke:\")\\n print(\\n f\"Knock knock.\\\\nWho\\'s there?\\\\n{joke.whos_there}.\\\\n{joke.whos_there} who?\\\\n{joke.punchline}\"\\n )\\n if rate_joke(joke):\\n print(\"> The crowd laughs politely.\")\\n else:\\n print(\"> The crowd stares in stony silence.\")\\n\\n\\nwith handler(provider):\\n do_comedy()', 'def log_llm(*args, **kwargs):\\n result = fwd()\\n print(\"Request fired: \", args, kwargs, result)\\n return result\\n\\n\\n# Avoid cache\\ntry:\\n haiku.cache_clear()\\nexcept Exception:\\n pass\\n\\n# Put completion handler innermost so it has highest precedence during the call\\nwith handler(provider), handler({completion: log_llm}):\\n _ = haiku(\"fish2\")\\n _ = limerick(\"fish\") # or use haiku(\"fish-2\") to avoid cache', '# 1. Create a logger\\nlogger = logging.getLogger(\"effectful.llm\")\\nlogger.setLevel(logging.INFO)\\nlog_handler = logging.StreamHandler(sys.stdout)\\nlog_handler.setFormatter(logging.Formatter(\"%(levelname)s %(payload)s\"))\\nlogger.addHandler(log_handler)\\n# 2. Pass it to the handler\\nllm_logger = LLMLoggingHandler(logger=logger) # can also be LLMLoggingHandler()\\n\\n# Avoid cache for demonstration\\ntry:\\n haiku.cache_clear()\\n limerick.cache_clear()\\nexcept Exception:\\n pass\\n\\nwith handler(provider), handler(llm_logger):\\n _ = haiku(\"fish3\")\\n _ = limerick(\"fish4\")', '# Sub-templates for different story styles\\n@Template.define\\ndef story_with_moral(topic: str) -> str:\\n \"\"\"Write a short story about {topic} and end with a moral lesson. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef story_funny(topic: str) -> str:\\n \"\"\"Write a funny, humorous story about {topic}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n# Main orchestrator template - has access to sub-templates\\n@Template.define\\ndef write_story(topic: str, style: str) -> str:\\n \"\"\"Write a story about {topic} in the style: {style}.\\n Available styles: \\'moral\\' for a story with a lesson, \\'funny\\' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\"\"\"\\n raise NotHandled\\n\\n\\n# Verify sub-templates are captured in write_story\\'s lexical context\\nassert story_with_moral in write_story.tools\\nassert story_funny in write_story.tools\\nprint(\"Sub-templates available to write_story:\", list(write_story.tools))\\n\\nwith handler(provider), handler(llm_logger):\\n print(\"=== Story with moral ===\")\\n print(write_story(\"a curious cat\", \"moral\"))\\n print()\\n print(\"=== Funny story ===\")\\n print(write_story(\"a curious cat\", \"funny\"))'], '_oh': {}, '_dh': [PosixPath('/Users/datnguyenthanh/Marc/effectful')], 'In': ['', 'import dataclasses\\nimport functools\\nimport inspect\\nimport logging\\nimport sys\\nfrom collections.abc import Callable\\n\\nfrom effectful.handlers.llm import Template\\nfrom effectful.handlers.llm.providers import (\\n CacheLLMRequestHandler,\\n LiteLLMProvider,\\n LLMLoggingHandler,\\n RetryLLMHandler,\\n completion,\\n tool_call,\\n)\\nfrom effectful.handlers.llm.synthesis import ProgramSynthesis\\nfrom effectful.ops.semantics import NotHandled, fwd, handler\\nfrom effectful.ops.syntax import defop\\n\\nprovider = LiteLLMProvider()', '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}.\"\"\"\\n raise NotHandled', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '@functools.cache\\n@Template.define\\ndef haiku(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef haiku_no_cache(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nprint()\\nwith handler(provider):\\n print(haiku(\"fish\"))\\n print(\"-\" * 40)\\n print(haiku(\"fish\"))\\n\\nprint()\\ncache_handler1 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler1):\\n print(haiku_no_cache(\"fish2\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish2\"))\\n\\nprint()\\ncache_handler2 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler2):\\n print(haiku_no_cache(\"fish3\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish3\"))', '@Template.define\\ndef primes(first_digit: int) -> int:\\n \"\"\"Give a prime number with {first_digit} as the first digit. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider):\\n assert type(primes(6)) is int', '@Template.define\\ndef count_char(char: str) -> Callable[[str], int]:\\n \"\"\"Write a function which takes a string and counts the occurrances of \\'{char}\\'. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider), handler(ProgramSynthesis()):\\n count_a = count_char(\"a\")\\n assert callable(count_a)\\n assert count_a(\"banana\") == 3\\n assert count_a(\"cherry\") == 0\\n # Print the source code of the generated function\\n print(inspect.getsource(count_a))', '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', '@dataclasses.dataclass\\nclass KnockKnockJoke:\\n whos_there: str\\n punchline: str\\n\\n\\n@Template.define\\ndef write_joke(theme: str) -> KnockKnockJoke:\\n \"\"\"Write a knock-knock joke on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef rate_joke(joke: KnockKnockJoke) -> bool:\\n \"\"\"Decide if {joke} is funny or not. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\ndef do_comedy():\\n joke = write_joke(\"lizards\")\\n print(\"> You are onstage at a comedy club. You tell the following joke:\")\\n print(\\n f\"Knock knock.\\\\nWho\\'s there?\\\\n{joke.whos_there}.\\\\n{joke.whos_there} who?\\\\n{joke.punchline}\"\\n )\\n if rate_joke(joke):\\n print(\"> The crowd laughs politely.\")\\n else:\\n print(\"> The crowd stares in stony silence.\")\\n\\n\\nwith handler(provider):\\n do_comedy()', 'def log_llm(*args, **kwargs):\\n result = fwd()\\n print(\"Request fired: \", args, kwargs, result)\\n return result\\n\\n\\n# Avoid cache\\ntry:\\n haiku.cache_clear()\\nexcept Exception:\\n pass\\n\\n# Put completion handler innermost so it has highest precedence during the call\\nwith handler(provider), handler({completion: log_llm}):\\n _ = haiku(\"fish2\")\\n _ = limerick(\"fish\") # or use haiku(\"fish-2\") to avoid cache', '# 1. Create a logger\\nlogger = logging.getLogger(\"effectful.llm\")\\nlogger.setLevel(logging.INFO)\\nlog_handler = logging.StreamHandler(sys.stdout)\\nlog_handler.setFormatter(logging.Formatter(\"%(levelname)s %(payload)s\"))\\nlogger.addHandler(log_handler)\\n# 2. Pass it to the handler\\nllm_logger = LLMLoggingHandler(logger=logger) # can also be LLMLoggingHandler()\\n\\n# Avoid cache for demonstration\\ntry:\\n haiku.cache_clear()\\n limerick.cache_clear()\\nexcept Exception:\\n pass\\n\\nwith handler(provider), handler(llm_logger):\\n _ = haiku(\"fish3\")\\n _ = limerick(\"fish4\")', '# Sub-templates for different story styles\\n@Template.define\\ndef story_with_moral(topic: str) -> str:\\n \"\"\"Write a short story about {topic} and end with a moral lesson. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef story_funny(topic: str) -> str:\\n \"\"\"Write a funny, humorous story about {topic}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n# Main orchestrator template - has access to sub-templates\\n@Template.define\\ndef write_story(topic: str, style: str) -> str:\\n \"\"\"Write a story about {topic} in the style: {style}.\\n Available styles: \\'moral\\' for a story with a lesson, \\'funny\\' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\"\"\"\\n raise NotHandled\\n\\n\\n# Verify sub-templates are captured in write_story\\'s lexical context\\nassert story_with_moral in write_story.tools\\nassert story_funny in write_story.tools\\nprint(\"Sub-templates available to write_story:\", list(write_story.tools))\\n\\nwith handler(provider), handler(llm_logger):\\n print(\"=== Story with moral ===\")\\n print(write_story(\"a curious cat\", \"moral\"))\\n print()\\n print(\"=== Funny story ===\")\\n print(write_story(\"a curious cat\", \"funny\"))'], 'Out': {}, 'get_ipython': >, 'exit': , 'quit': , 'open': , '_': 'In the ocean where fishies do play, \\nA big whale came swimming one day. \\nWith a splash and a dive, \\nHe felt so alive, \\nChasing fish in the blue, gleaming bay.', '__': '', '___': '', '__vsc_ipynb_file__': '/Users/datnguyenthanh/Marc/effectful/docs/source/llm.ipynb', '_i': '# 1. Create a logger\\nlogger = logging.getLogger(\"effectful.llm\")\\nlogger.setLevel(logging.INFO)\\nlog_handler = logging.StreamHandler(sys.stdout)\\nlog_handler.setFormatter(logging.Formatter(\"%(levelname)s %(payload)s\"))\\nlogger.addHandler(log_handler)\\n# 2. Pass it to the handler\\nllm_logger = LLMLoggingHandler(logger=logger) # can also be LLMLoggingHandler()\\n\\n# Avoid cache for demonstration\\ntry:\\n haiku.cache_clear()\\n limerick.cache_clear()\\nexcept Exception:\\n pass\\n\\nwith handler(provider), handler(llm_logger):\\n _ = haiku(\"fish3\")\\n _ = limerick(\"fish4\")', '_ii': 'def log_llm(*args, **kwargs):\\n result = fwd()\\n print(\"Request fired: \", args, kwargs, result)\\n return result\\n\\n\\n# Avoid cache\\ntry:\\n haiku.cache_clear()\\nexcept Exception:\\n pass\\n\\n# Put completion handler innermost so it has highest precedence during the call\\nwith handler(provider), handler({completion: log_llm}):\\n _ = haiku(\"fish2\")\\n _ = limerick(\"fish\") # or use haiku(\"fish-2\") to avoid cache', '_iii': '@dataclasses.dataclass\\nclass KnockKnockJoke:\\n whos_there: str\\n punchline: str\\n\\n\\n@Template.define\\ndef write_joke(theme: str) -> KnockKnockJoke:\\n \"\"\"Write a knock-knock joke on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef rate_joke(joke: KnockKnockJoke) -> bool:\\n \"\"\"Decide if {joke} is funny or not. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\ndef do_comedy():\\n joke = write_joke(\"lizards\")\\n print(\"> You are onstage at a comedy club. You tell the following joke:\")\\n print(\\n f\"Knock knock.\\\\nWho\\'s there?\\\\n{joke.whos_there}.\\\\n{joke.whos_there} who?\\\\n{joke.punchline}\"\\n )\\n if rate_joke(joke):\\n print(\"> The crowd laughs politely.\")\\n else:\\n print(\"> The crowd stares in stony silence.\")\\n\\n\\nwith handler(provider):\\n do_comedy()', '_i1': 'import dataclasses\\nimport functools\\nimport inspect\\nimport logging\\nimport sys\\nfrom collections.abc import Callable\\n\\nfrom effectful.handlers.llm import Template\\nfrom effectful.handlers.llm.providers import (\\n CacheLLMRequestHandler,\\n LiteLLMProvider,\\n LLMLoggingHandler,\\n RetryLLMHandler,\\n completion,\\n tool_call,\\n)\\nfrom effectful.handlers.llm.synthesis import ProgramSynthesis\\nfrom effectful.ops.semantics import NotHandled, fwd, handler\\nfrom effectful.ops.syntax import defop\\n\\nprovider = LiteLLMProvider()', 'dataclasses': , 'functools': , 'inspect': , 'logging': , 'sys': , 'Callable': , 'Template': , 'CacheLLMRequestHandler': , 'LiteLLMProvider': , 'LLMLoggingHandler': , 'RetryLLMHandler': , 'completion': Operation(completion, (model: str, messages: List = [], timeout: Union[float, str, openai.Timeout, NoneType] = None, temperature: Optional[float] = None, top_p: Optional[float] = None, n: Optional[int] = None, stream: Optional[bool] = None, stream_options: Optional[dict] = None, stop=None, max_completion_tokens: Optional[int] = None, max_tokens: Optional[int] = None, modalities: Optional[List[Literal['text', 'audio']]] = None, prediction: Optional[openai.types.chat.chat_completion_prediction_content_param.ChatCompletionPredictionContentParam] = None, audio: Optional[openai.types.chat.chat_completion_audio_param.ChatCompletionAudioParam] = None, presence_penalty: Optional[float] = None, frequency_penalty: Optional[float] = None, logit_bias: Optional[dict] = None, user: Optional[str] = None, reasoning_effort: Optional[Literal['none', 'minimal', 'low', 'medium', 'high', 'default']] = None, verbosity: Optional[Literal['low', 'medium', 'high']] = None, response_format: Union[dict, Type[pydantic.main.BaseModel], NoneType] = None, seed: Optional[int] = None, tools: Optional[List] = None, tool_choice: Union[str, dict, NoneType] = None, logprobs: Optional[bool] = None, top_logprobs: Optional[int] = None, parallel_tool_calls: Optional[bool] = None, web_search_options: Optional[litellm.types.llms.openai.OpenAIWebSearchOptions] = None, deployment_id=None, extra_headers: Optional[dict] = None, safety_identifier: Optional[str] = None, service_tier: Optional[str] = None, functions: Optional[List] = None, function_call: Optional[str] = None, base_url: Optional[str] = None, api_version: Optional[str] = None, api_key: Optional[str] = None, model_list: Optional[list] = None, thinking: Optional[litellm.types.llms.anthropic.AnthropicThinkingParam] = None, shared_session: Optional[ForwardRef('ClientSession')] = None, **kwargs) -> Union[litellm.types.utils.ModelResponse, litellm.litellm_core_utils.streaming_handler.CustomStreamWrapper]), 'tool_call': Operation(tool_call, (template: effectful.handlers.llm.Template, tool: Union[effectful.ops.types.Operation[..., T], effectful.handlers.llm.Template[..., T]], *args, **kwargs) -> T), 'ProgramSynthesis': , 'NotHandled': , 'fwd': Operation(fwd, (*args, **kwargs) -> Any), 'handler': , 'defop': , 'provider': , '_i2': '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}.\"\"\"\\n raise NotHandled', 'limerick': ..., '_i3': 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '_i4': 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '_i5': '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled', '_i6': 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '_i7': '@functools.cache\\n@Template.define\\ndef haiku(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef haiku_no_cache(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nprint()\\nwith handler(provider):\\n print(haiku(\"fish\"))\\n print(\"-\" * 40)\\n print(haiku(\"fish\"))\\n\\nprint()\\ncache_handler1 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler1):\\n print(haiku_no_cache(\"fish2\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish2\"))\\n\\nprint()\\ncache_handler2 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler2):\\n print(haiku_no_cache(\"fish3\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish3\"))', 'haiku': , 'haiku_no_cache': Template(__prompt_template__='Write a haiku on the theme of {theme}. Do not use any tools.', __signature__= str>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='haiku_no_cache'), 'cache_handler1': , 'cache_handler2': , '_i8': '@Template.define\\ndef primes(first_digit: int) -> int:\\n \"\"\"Give a prime number with {first_digit} as the first digit. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider):\\n assert type(primes(6)) is int', 'primes': Template(__prompt_template__='Give a prime number with {first_digit} as the first digit. Do not use any tools.', __signature__= int>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='primes'), '_i9': '@Template.define\\ndef count_char(char: str) -> Callable[[str], int]:\\n \"\"\"Write a function which takes a string and counts the occurrances of \\'{char}\\'. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider), handler(ProgramSynthesis()):\\n count_a = count_char(\"a\")\\n assert callable(count_a)\\n assert count_a(\"banana\") == 3\\n assert count_a(\"cherry\") == 0\\n # Print the source code of the generated function\\n print(inspect.getsource(count_a))', 'count_char': Template(__prompt_template__=\"Write a function which takes a string and counts the occurrances of '{char}'. Do not use any tools.\", __signature__= collections.abc.Callable[[str], int]>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='count_char'), 'count_a': , '_i10': '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', 'cities': Operation(cities, () -> list[str]), 'weather': Operation(weather, (city: str) -> str), 'vacation': Template(__prompt_template__='Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.', __signature__= str>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='vacation'), 'log_tool_call': , '_i11': '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', '_i12': '@dataclasses.dataclass\\nclass KnockKnockJoke:\\n whos_there: str\\n punchline: str\\n\\n\\n@Template.define\\ndef write_joke(theme: str) -> KnockKnockJoke:\\n \"\"\"Write a knock-knock joke on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef rate_joke(joke: KnockKnockJoke) -> bool:\\n \"\"\"Decide if {joke} is funny or not. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\ndef do_comedy():\\n joke = write_joke(\"lizards\")\\n print(\"> You are onstage at a comedy club. You tell the following joke:\")\\n print(\\n f\"Knock knock.\\\\nWho\\'s there?\\\\n{joke.whos_there}.\\\\n{joke.whos_there} who?\\\\n{joke.punchline}\"\\n )\\n if rate_joke(joke):\\n print(\"> The crowd laughs politely.\")\\n else:\\n print(\"> The crowd stares in stony silence.\")\\n\\n\\nwith handler(provider):\\n do_comedy()', 'KnockKnockJoke': , 'write_joke': Template(__prompt_template__='Write a knock-knock joke on the theme of {theme}. Do not use any tools.', __signature__= __main__.KnockKnockJoke>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='write_joke'), 'rate_joke': Template(__prompt_template__='Decide if {joke} is funny or not. Do not use any tools.', __signature__= bool>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='rate_joke'), 'do_comedy': , '_i13': 'def log_llm(*args, **kwargs):\\n result = fwd()\\n print(\"Request fired: \", args, kwargs, result)\\n return result\\n\\n\\n# Avoid cache\\ntry:\\n haiku.cache_clear()\\nexcept Exception:\\n pass\\n\\n# Put completion handler innermost so it has highest precedence during the call\\nwith handler(provider), handler({completion: log_llm}):\\n _ = haiku(\"fish2\")\\n _ = limerick(\"fish\") # or use haiku(\"fish-2\") to avoid cache', 'log_llm': , '_i14': '# 1. Create a logger\\nlogger = logging.getLogger(\"effectful.llm\")\\nlogger.setLevel(logging.INFO)\\nlog_handler = logging.StreamHandler(sys.stdout)\\nlog_handler.setFormatter(logging.Formatter(\"%(levelname)s %(payload)s\"))\\nlogger.addHandler(log_handler)\\n# 2. Pass it to the handler\\nllm_logger = LLMLoggingHandler(logger=logger) # can also be LLMLoggingHandler()\\n\\n# Avoid cache for demonstration\\ntry:\\n haiku.cache_clear()\\n limerick.cache_clear()\\nexcept Exception:\\n pass\\n\\nwith handler(provider), handler(llm_logger):\\n _ = haiku(\"fish3\")\\n _ = limerick(\"fish4\")', 'logger': , 'log_handler': , 'llm_logger': , '_i15': '# Sub-templates for different story styles\\n@Template.define\\ndef story_with_moral(topic: str) -> str:\\n \"\"\"Write a short story about {topic} and end with a moral lesson. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef story_funny(topic: str) -> str:\\n \"\"\"Write a funny, humorous story about {topic}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n# Main orchestrator template - has access to sub-templates\\n@Template.define\\ndef write_story(topic: str, style: str) -> str:\\n \"\"\"Write a story about {topic} in the style: {style}.\\n Available styles: \\'moral\\' for a story with a lesson, \\'funny\\' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\"\"\"\\n raise NotHandled\\n\\n\\n# Verify sub-templates are captured in write_story\\'s lexical context\\nassert story_with_moral in write_story.tools\\nassert story_funny in write_story.tools\\nprint(\"Sub-templates available to write_story:\", list(write_story.tools))\\n\\nwith handler(provider), handler(llm_logger):\\n print(\"=== Story with moral ===\")\\n print(write_story(\"a curious cat\", \"moral\"))\\n print()\\n print(\"=== Funny story ===\")\\n print(write_story(\"a curious cat\", \"funny\"))', 'story_with_moral': Template(__prompt_template__='Write a short story about {topic} and end with a moral lesson. Do not use any tools.', __signature__= str>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='story_with_moral'), 'story_funny': Template(__prompt_template__='Write a funny, humorous story about {topic}. Do not use any tools.', __signature__= str>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='story_funny'), 'write_story': Template(__prompt_template__=\"Write a story about {topic} in the style: {style}.\\n Available styles: 'moral' for a story with a lesson, 'funny' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\", __signature__= str>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='write_story')})), __name__='limerick'), Template(__prompt_template__='Write a haiku on the theme of {theme}. Do not use any tools.', __signature__= str>, __context__=LexicalContext(mappingproxy({'__name__': '__main__', '__doc__': 'Automatically created module for IPython interactive environment', '__package__': None, '__loader__': None, '__spec__': None, '__builtin__': , '__builtins__': , '_ih': ['', 'import dataclasses\\nimport functools\\nimport inspect\\nimport logging\\nimport sys\\nfrom collections.abc import Callable\\n\\nfrom effectful.handlers.llm import Template\\nfrom effectful.handlers.llm.providers import (\\n CacheLLMRequestHandler,\\n LiteLLMProvider,\\n LLMLoggingHandler,\\n RetryLLMHandler,\\n completion,\\n tool_call,\\n)\\nfrom effectful.handlers.llm.synthesis import ProgramSynthesis\\nfrom effectful.ops.semantics import NotHandled, fwd, handler\\nfrom effectful.ops.syntax import defop\\n\\nprovider = LiteLLMProvider()', '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}.\"\"\"\\n raise NotHandled', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '@functools.cache\\n@Template.define\\ndef haiku(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef haiku_no_cache(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nprint()\\nwith handler(provider):\\n print(haiku(\"fish\"))\\n print(\"-\" * 40)\\n print(haiku(\"fish\"))\\n\\nprint()\\ncache_handler1 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler1):\\n print(haiku_no_cache(\"fish2\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish2\"))\\n\\nprint()\\ncache_handler2 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler2):\\n print(haiku_no_cache(\"fish3\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish3\"))', '@Template.define\\ndef primes(first_digit: int) -> int:\\n \"\"\"Give a prime number with {first_digit} as the first digit. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider):\\n assert type(primes(6)) is int', '@Template.define\\ndef count_char(char: str) -> Callable[[str], int]:\\n \"\"\"Write a function which takes a string and counts the occurrances of \\'{char}\\'. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider), handler(ProgramSynthesis()):\\n count_a = count_char(\"a\")\\n assert callable(count_a)\\n assert count_a(\"banana\") == 3\\n assert count_a(\"cherry\") == 0\\n # Print the source code of the generated function\\n print(inspect.getsource(count_a))', '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', '@dataclasses.dataclass\\nclass KnockKnockJoke:\\n whos_there: str\\n punchline: str\\n\\n\\n@Template.define\\ndef write_joke(theme: str) -> KnockKnockJoke:\\n \"\"\"Write a knock-knock joke on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef rate_joke(joke: KnockKnockJoke) -> bool:\\n \"\"\"Decide if {joke} is funny or not. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\ndef do_comedy():\\n joke = write_joke(\"lizards\")\\n print(\"> You are onstage at a comedy club. You tell the following joke:\")\\n print(\\n f\"Knock knock.\\\\nWho\\'s there?\\\\n{joke.whos_there}.\\\\n{joke.whos_there} who?\\\\n{joke.punchline}\"\\n )\\n if rate_joke(joke):\\n print(\"> The crowd laughs politely.\")\\n else:\\n print(\"> The crowd stares in stony silence.\")\\n\\n\\nwith handler(provider):\\n do_comedy()', 'def log_llm(*args, **kwargs):\\n result = fwd()\\n print(\"Request fired: \", args, kwargs, result)\\n return result\\n\\n\\n# Avoid cache\\ntry:\\n haiku.cache_clear()\\nexcept Exception:\\n pass\\n\\n# Put completion handler innermost so it has highest precedence during the call\\nwith handler(provider), handler({completion: log_llm}):\\n _ = haiku(\"fish2\")\\n _ = limerick(\"fish\") # or use haiku(\"fish-2\") to avoid cache', '# 1. Create a logger\\nlogger = logging.getLogger(\"effectful.llm\")\\nlogger.setLevel(logging.INFO)\\nlog_handler = logging.StreamHandler(sys.stdout)\\nlog_handler.setFormatter(logging.Formatter(\"%(levelname)s %(payload)s\"))\\nlogger.addHandler(log_handler)\\n# 2. Pass it to the handler\\nllm_logger = LLMLoggingHandler(logger=logger) # can also be LLMLoggingHandler()\\n\\n# Avoid cache for demonstration\\ntry:\\n haiku.cache_clear()\\n limerick.cache_clear()\\nexcept Exception:\\n pass\\n\\nwith handler(provider), handler(llm_logger):\\n _ = haiku(\"fish3\")\\n _ = limerick(\"fish4\")', '# Sub-templates for different story styles\\n@Template.define\\ndef story_with_moral(topic: str) -> str:\\n \"\"\"Write a short story about {topic} and end with a moral lesson. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef story_funny(topic: str) -> str:\\n \"\"\"Write a funny, humorous story about {topic}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n# Main orchestrator template - has access to sub-templates\\n@Template.define\\ndef write_story(topic: str, style: str) -> str:\\n \"\"\"Write a story about {topic} in the style: {style}.\\n Available styles: \\'moral\\' for a story with a lesson, \\'funny\\' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\"\"\"\\n raise NotHandled\\n\\n\\n# Verify sub-templates are captured in write_story\\'s lexical context\\nassert story_with_moral in write_story.tools\\nassert story_funny in write_story.tools\\nprint(\"Sub-templates available to write_story:\", list(write_story.tools))\\n\\nwith handler(provider), handler(llm_logger):\\n print(\"=== Story with moral ===\")\\n print(write_story(\"a curious cat\", \"moral\"))\\n print()\\n print(\"=== Funny story ===\")\\n print(write_story(\"a curious cat\", \"funny\"))'], '_oh': {}, '_dh': [PosixPath('/Users/datnguyenthanh/Marc/effectful')], 'In': ['', 'import dataclasses\\nimport functools\\nimport inspect\\nimport logging\\nimport sys\\nfrom collections.abc import Callable\\n\\nfrom effectful.handlers.llm import Template\\nfrom effectful.handlers.llm.providers import (\\n CacheLLMRequestHandler,\\n LiteLLMProvider,\\n LLMLoggingHandler,\\n RetryLLMHandler,\\n completion,\\n tool_call,\\n)\\nfrom effectful.handlers.llm.synthesis import ProgramSynthesis\\nfrom effectful.ops.semantics import NotHandled, fwd, handler\\nfrom effectful.ops.syntax import defop\\n\\nprovider = LiteLLMProvider()', '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}.\"\"\"\\n raise NotHandled', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '@functools.cache\\n@Template.define\\ndef haiku(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef haiku_no_cache(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nprint()\\nwith handler(provider):\\n print(haiku(\"fish\"))\\n print(\"-\" * 40)\\n print(haiku(\"fish\"))\\n\\nprint()\\ncache_handler1 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler1):\\n print(haiku_no_cache(\"fish2\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish2\"))\\n\\nprint()\\ncache_handler2 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler2):\\n print(haiku_no_cache(\"fish3\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish3\"))', '@Template.define\\ndef primes(first_digit: int) -> int:\\n \"\"\"Give a prime number with {first_digit} as the first digit. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider):\\n assert type(primes(6)) is int', '@Template.define\\ndef count_char(char: str) -> Callable[[str], int]:\\n \"\"\"Write a function which takes a string and counts the occurrances of \\'{char}\\'. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider), handler(ProgramSynthesis()):\\n count_a = count_char(\"a\")\\n assert callable(count_a)\\n assert count_a(\"banana\") == 3\\n assert count_a(\"cherry\") == 0\\n # Print the source code of the generated function\\n print(inspect.getsource(count_a))', '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', '@dataclasses.dataclass\\nclass KnockKnockJoke:\\n whos_there: str\\n punchline: str\\n\\n\\n@Template.define\\ndef write_joke(theme: str) -> KnockKnockJoke:\\n \"\"\"Write a knock-knock joke on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef rate_joke(joke: KnockKnockJoke) -> bool:\\n \"\"\"Decide if {joke} is funny or not. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\ndef do_comedy():\\n joke = write_joke(\"lizards\")\\n print(\"> You are onstage at a comedy club. You tell the following joke:\")\\n print(\\n f\"Knock knock.\\\\nWho\\'s there?\\\\n{joke.whos_there}.\\\\n{joke.whos_there} who?\\\\n{joke.punchline}\"\\n )\\n if rate_joke(joke):\\n print(\"> The crowd laughs politely.\")\\n else:\\n print(\"> The crowd stares in stony silence.\")\\n\\n\\nwith handler(provider):\\n do_comedy()', 'def log_llm(*args, **kwargs):\\n result = fwd()\\n print(\"Request fired: \", args, kwargs, result)\\n return result\\n\\n\\n# Avoid cache\\ntry:\\n haiku.cache_clear()\\nexcept Exception:\\n pass\\n\\n# Put completion handler innermost so it has highest precedence during the call\\nwith handler(provider), handler({completion: log_llm}):\\n _ = haiku(\"fish2\")\\n _ = limerick(\"fish\") # or use haiku(\"fish-2\") to avoid cache', '# 1. Create a logger\\nlogger = logging.getLogger(\"effectful.llm\")\\nlogger.setLevel(logging.INFO)\\nlog_handler = logging.StreamHandler(sys.stdout)\\nlog_handler.setFormatter(logging.Formatter(\"%(levelname)s %(payload)s\"))\\nlogger.addHandler(log_handler)\\n# 2. Pass it to the handler\\nllm_logger = LLMLoggingHandler(logger=logger) # can also be LLMLoggingHandler()\\n\\n# Avoid cache for demonstration\\ntry:\\n haiku.cache_clear()\\n limerick.cache_clear()\\nexcept Exception:\\n pass\\n\\nwith handler(provider), handler(llm_logger):\\n _ = haiku(\"fish3\")\\n _ = limerick(\"fish4\")', '# Sub-templates for different story styles\\n@Template.define\\ndef story_with_moral(topic: str) -> str:\\n \"\"\"Write a short story about {topic} and end with a moral lesson. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef story_funny(topic: str) -> str:\\n \"\"\"Write a funny, humorous story about {topic}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n# Main orchestrator template - has access to sub-templates\\n@Template.define\\ndef write_story(topic: str, style: str) -> str:\\n \"\"\"Write a story about {topic} in the style: {style}.\\n Available styles: \\'moral\\' for a story with a lesson, \\'funny\\' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\"\"\"\\n raise NotHandled\\n\\n\\n# Verify sub-templates are captured in write_story\\'s lexical context\\nassert story_with_moral in write_story.tools\\nassert story_funny in write_story.tools\\nprint(\"Sub-templates available to write_story:\", list(write_story.tools))\\n\\nwith handler(provider), handler(llm_logger):\\n print(\"=== Story with moral ===\")\\n print(write_story(\"a curious cat\", \"moral\"))\\n print()\\n print(\"=== Funny story ===\")\\n print(write_story(\"a curious cat\", \"funny\"))'], 'Out': {}, 'get_ipython': >, 'exit': , 'quit': , 'open': , '_': 'In the ocean where fishies do play, \\nA big whale came swimming one day. \\nWith a splash and a dive, \\nHe felt so alive, \\nChasing fish in the blue, gleaming bay.', '__': '', '___': '', '__vsc_ipynb_file__': '/Users/datnguyenthanh/Marc/effectful/docs/source/llm.ipynb', '_i': '# 1. Create a logger\\nlogger = logging.getLogger(\"effectful.llm\")\\nlogger.setLevel(logging.INFO)\\nlog_handler = logging.StreamHandler(sys.stdout)\\nlog_handler.setFormatter(logging.Formatter(\"%(levelname)s %(payload)s\"))\\nlogger.addHandler(log_handler)\\n# 2. Pass it to the handler\\nllm_logger = LLMLoggingHandler(logger=logger) # can also be LLMLoggingHandler()\\n\\n# Avoid cache for demonstration\\ntry:\\n haiku.cache_clear()\\n limerick.cache_clear()\\nexcept Exception:\\n pass\\n\\nwith handler(provider), handler(llm_logger):\\n _ = haiku(\"fish3\")\\n _ = limerick(\"fish4\")', '_ii': 'def log_llm(*args, **kwargs):\\n result = fwd()\\n print(\"Request fired: \", args, kwargs, result)\\n return result\\n\\n\\n# Avoid cache\\ntry:\\n haiku.cache_clear()\\nexcept Exception:\\n pass\\n\\n# Put completion handler innermost so it has highest precedence during the call\\nwith handler(provider), handler({completion: log_llm}):\\n _ = haiku(\"fish2\")\\n _ = limerick(\"fish\") # or use haiku(\"fish-2\") to avoid cache', '_iii': '@dataclasses.dataclass\\nclass KnockKnockJoke:\\n whos_there: str\\n punchline: str\\n\\n\\n@Template.define\\ndef write_joke(theme: str) -> KnockKnockJoke:\\n \"\"\"Write a knock-knock joke on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef rate_joke(joke: KnockKnockJoke) -> bool:\\n \"\"\"Decide if {joke} is funny or not. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\ndef do_comedy():\\n joke = write_joke(\"lizards\")\\n print(\"> You are onstage at a comedy club. You tell the following joke:\")\\n print(\\n f\"Knock knock.\\\\nWho\\'s there?\\\\n{joke.whos_there}.\\\\n{joke.whos_there} who?\\\\n{joke.punchline}\"\\n )\\n if rate_joke(joke):\\n print(\"> The crowd laughs politely.\")\\n else:\\n print(\"> The crowd stares in stony silence.\")\\n\\n\\nwith handler(provider):\\n do_comedy()', '_i1': 'import dataclasses\\nimport functools\\nimport inspect\\nimport logging\\nimport sys\\nfrom collections.abc import Callable\\n\\nfrom effectful.handlers.llm import Template\\nfrom effectful.handlers.llm.providers import (\\n CacheLLMRequestHandler,\\n LiteLLMProvider,\\n LLMLoggingHandler,\\n RetryLLMHandler,\\n completion,\\n tool_call,\\n)\\nfrom effectful.handlers.llm.synthesis import ProgramSynthesis\\nfrom effectful.ops.semantics import NotHandled, fwd, handler\\nfrom effectful.ops.syntax import defop\\n\\nprovider = LiteLLMProvider()', 'dataclasses': , 'functools': , 'inspect': , 'logging': , 'sys': , 'Callable': , 'Template': , 'CacheLLMRequestHandler': , 'LiteLLMProvider': , 'LLMLoggingHandler': , 'RetryLLMHandler': , 'completion': Operation(completion, (model: str, messages: List = [], timeout: Union[float, str, openai.Timeout, NoneType] = None, temperature: Optional[float] = None, top_p: Optional[float] = None, n: Optional[int] = None, stream: Optional[bool] = None, stream_options: Optional[dict] = None, stop=None, max_completion_tokens: Optional[int] = None, max_tokens: Optional[int] = None, modalities: Optional[List[Literal['text', 'audio']]] = None, prediction: Optional[openai.types.chat.chat_completion_prediction_content_param.ChatCompletionPredictionContentParam] = None, audio: Optional[openai.types.chat.chat_completion_audio_param.ChatCompletionAudioParam] = None, presence_penalty: Optional[float] = None, frequency_penalty: Optional[float] = None, logit_bias: Optional[dict] = None, user: Optional[str] = None, reasoning_effort: Optional[Literal['none', 'minimal', 'low', 'medium', 'high', 'default']] = None, verbosity: Optional[Literal['low', 'medium', 'high']] = None, response_format: Union[dict, Type[pydantic.main.BaseModel], NoneType] = None, seed: Optional[int] = None, tools: Optional[List] = None, tool_choice: Union[str, dict, NoneType] = None, logprobs: Optional[bool] = None, top_logprobs: Optional[int] = None, parallel_tool_calls: Optional[bool] = None, web_search_options: Optional[litellm.types.llms.openai.OpenAIWebSearchOptions] = None, deployment_id=None, extra_headers: Optional[dict] = None, safety_identifier: Optional[str] = None, service_tier: Optional[str] = None, functions: Optional[List] = None, function_call: Optional[str] = None, base_url: Optional[str] = None, api_version: Optional[str] = None, api_key: Optional[str] = None, model_list: Optional[list] = None, thinking: Optional[litellm.types.llms.anthropic.AnthropicThinkingParam] = None, shared_session: Optional[ForwardRef('ClientSession')] = None, **kwargs) -> Union[litellm.types.utils.ModelResponse, litellm.litellm_core_utils.streaming_handler.CustomStreamWrapper]), 'tool_call': Operation(tool_call, (template: effectful.handlers.llm.Template, tool: Union[effectful.ops.types.Operation[..., T], effectful.handlers.llm.Template[..., T]], *args, **kwargs) -> T), 'ProgramSynthesis': , 'NotHandled': , 'fwd': Operation(fwd, (*args, **kwargs) -> Any), 'handler': , 'defop': , 'provider': , '_i2': '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}.\"\"\"\\n raise NotHandled', 'limerick': Template(__prompt_template__='Write a limerick on the theme of {theme}. Do not use any tools.', __signature__= str>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='limerick'), '_i3': 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '_i4': 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '_i5': '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled', '_i6': 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '_i7': '@functools.cache\\n@Template.define\\ndef haiku(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef haiku_no_cache(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nprint()\\nwith handler(provider):\\n print(haiku(\"fish\"))\\n print(\"-\" * 40)\\n print(haiku(\"fish\"))\\n\\nprint()\\ncache_handler1 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler1):\\n print(haiku_no_cache(\"fish2\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish2\"))\\n\\nprint()\\ncache_handler2 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler2):\\n print(haiku_no_cache(\"fish3\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish3\"))', 'haiku': , 'haiku_no_cache': ..., 'cache_handler1': , 'cache_handler2': , '_i8': '@Template.define\\ndef primes(first_digit: int) -> int:\\n \"\"\"Give a prime number with {first_digit} as the first digit. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider):\\n assert type(primes(6)) is int', 'primes': Template(__prompt_template__='Give a prime number with {first_digit} as the first digit. Do not use any tools.', __signature__= int>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='primes'), '_i9': '@Template.define\\ndef count_char(char: str) -> Callable[[str], int]:\\n \"\"\"Write a function which takes a string and counts the occurrances of \\'{char}\\'. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider), handler(ProgramSynthesis()):\\n count_a = count_char(\"a\")\\n assert callable(count_a)\\n assert count_a(\"banana\") == 3\\n assert count_a(\"cherry\") == 0\\n # Print the source code of the generated function\\n print(inspect.getsource(count_a))', 'count_char': Template(__prompt_template__=\"Write a function which takes a string and counts the occurrances of '{char}'. Do not use any tools.\", __signature__= collections.abc.Callable[[str], int]>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='count_char'), 'count_a': , '_i10': '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', 'cities': Operation(cities, () -> list[str]), 'weather': Operation(weather, (city: str) -> str), 'vacation': Template(__prompt_template__='Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.', __signature__= str>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='vacation'), 'log_tool_call': , '_i11': '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', '_i12': '@dataclasses.dataclass\\nclass KnockKnockJoke:\\n whos_there: str\\n punchline: str\\n\\n\\n@Template.define\\ndef write_joke(theme: str) -> KnockKnockJoke:\\n \"\"\"Write a knock-knock joke on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef rate_joke(joke: KnockKnockJoke) -> bool:\\n \"\"\"Decide if {joke} is funny or not. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\ndef do_comedy():\\n joke = write_joke(\"lizards\")\\n print(\"> You are onstage at a comedy club. You tell the following joke:\")\\n print(\\n f\"Knock knock.\\\\nWho\\'s there?\\\\n{joke.whos_there}.\\\\n{joke.whos_there} who?\\\\n{joke.punchline}\"\\n )\\n if rate_joke(joke):\\n print(\"> The crowd laughs politely.\")\\n else:\\n print(\"> The crowd stares in stony silence.\")\\n\\n\\nwith handler(provider):\\n do_comedy()', 'KnockKnockJoke': , 'write_joke': Template(__prompt_template__='Write a knock-knock joke on the theme of {theme}. Do not use any tools.', __signature__= __main__.KnockKnockJoke>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='write_joke'), 'rate_joke': Template(__prompt_template__='Decide if {joke} is funny or not. Do not use any tools.', __signature__= bool>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='rate_joke'), 'do_comedy': , '_i13': 'def log_llm(*args, **kwargs):\\n result = fwd()\\n print(\"Request fired: \", args, kwargs, result)\\n return result\\n\\n\\n# Avoid cache\\ntry:\\n haiku.cache_clear()\\nexcept Exception:\\n pass\\n\\n# Put completion handler innermost so it has highest precedence during the call\\nwith handler(provider), handler({completion: log_llm}):\\n _ = haiku(\"fish2\")\\n _ = limerick(\"fish\") # or use haiku(\"fish-2\") to avoid cache', 'log_llm': , '_i14': '# 1. Create a logger\\nlogger = logging.getLogger(\"effectful.llm\")\\nlogger.setLevel(logging.INFO)\\nlog_handler = logging.StreamHandler(sys.stdout)\\nlog_handler.setFormatter(logging.Formatter(\"%(levelname)s %(payload)s\"))\\nlogger.addHandler(log_handler)\\n# 2. Pass it to the handler\\nllm_logger = LLMLoggingHandler(logger=logger) # can also be LLMLoggingHandler()\\n\\n# Avoid cache for demonstration\\ntry:\\n haiku.cache_clear()\\n limerick.cache_clear()\\nexcept Exception:\\n pass\\n\\nwith handler(provider), handler(llm_logger):\\n _ = haiku(\"fish3\")\\n _ = limerick(\"fish4\")', 'logger': , 'log_handler': , 'llm_logger': , '_i15': '# Sub-templates for different story styles\\n@Template.define\\ndef story_with_moral(topic: str) -> str:\\n \"\"\"Write a short story about {topic} and end with a moral lesson. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef story_funny(topic: str) -> str:\\n \"\"\"Write a funny, humorous story about {topic}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n# Main orchestrator template - has access to sub-templates\\n@Template.define\\ndef write_story(topic: str, style: str) -> str:\\n \"\"\"Write a story about {topic} in the style: {style}.\\n Available styles: \\'moral\\' for a story with a lesson, \\'funny\\' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\"\"\"\\n raise NotHandled\\n\\n\\n# Verify sub-templates are captured in write_story\\'s lexical context\\nassert story_with_moral in write_story.tools\\nassert story_funny in write_story.tools\\nprint(\"Sub-templates available to write_story:\", list(write_story.tools))\\n\\nwith handler(provider), handler(llm_logger):\\n print(\"=== Story with moral ===\")\\n print(write_story(\"a curious cat\", \"moral\"))\\n print()\\n print(\"=== Funny story ===\")\\n print(write_story(\"a curious cat\", \"funny\"))', 'story_with_moral': Template(__prompt_template__='Write a short story about {topic} and end with a moral lesson. Do not use any tools.', __signature__= str>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='story_with_moral'), 'story_funny': Template(__prompt_template__='Write a funny, humorous story about {topic}. Do not use any tools.', __signature__= str>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='story_funny'), 'write_story': Template(__prompt_template__=\"Write a story about {topic} in the style: {style}.\\n Available styles: 'moral' for a story with a lesson, 'funny' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\", __signature__= str>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='write_story')}), mappingproxy({'__name__': '__main__', '__doc__': 'Automatically created module for IPython interactive environment', '__package__': None, '__loader__': None, '__spec__': None, '__builtin__': , '__builtins__': , '_ih': ['', 'import dataclasses\\nimport functools\\nimport inspect\\nimport logging\\nimport sys\\nfrom collections.abc import Callable\\n\\nfrom effectful.handlers.llm import Template\\nfrom effectful.handlers.llm.providers import (\\n CacheLLMRequestHandler,\\n LiteLLMProvider,\\n LLMLoggingHandler,\\n RetryLLMHandler,\\n completion,\\n tool_call,\\n)\\nfrom effectful.handlers.llm.synthesis import ProgramSynthesis\\nfrom effectful.ops.semantics import NotHandled, fwd, handler\\nfrom effectful.ops.syntax import defop\\n\\nprovider = LiteLLMProvider()', '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}.\"\"\"\\n raise NotHandled', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '@functools.cache\\n@Template.define\\ndef haiku(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef haiku_no_cache(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nprint()\\nwith handler(provider):\\n print(haiku(\"fish\"))\\n print(\"-\" * 40)\\n print(haiku(\"fish\"))\\n\\nprint()\\ncache_handler1 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler1):\\n print(haiku_no_cache(\"fish2\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish2\"))\\n\\nprint()\\ncache_handler2 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler2):\\n print(haiku_no_cache(\"fish3\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish3\"))', '@Template.define\\ndef primes(first_digit: int) -> int:\\n \"\"\"Give a prime number with {first_digit} as the first digit. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider):\\n assert type(primes(6)) is int', '@Template.define\\ndef count_char(char: str) -> Callable[[str], int]:\\n \"\"\"Write a function which takes a string and counts the occurrances of \\'{char}\\'. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider), handler(ProgramSynthesis()):\\n count_a = count_char(\"a\")\\n assert callable(count_a)\\n assert count_a(\"banana\") == 3\\n assert count_a(\"cherry\") == 0\\n # Print the source code of the generated function\\n print(inspect.getsource(count_a))', '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', '@dataclasses.dataclass\\nclass KnockKnockJoke:\\n whos_there: str\\n punchline: str\\n\\n\\n@Template.define\\ndef write_joke(theme: str) -> KnockKnockJoke:\\n \"\"\"Write a knock-knock joke on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef rate_joke(joke: KnockKnockJoke) -> bool:\\n \"\"\"Decide if {joke} is funny or not. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\ndef do_comedy():\\n joke = write_joke(\"lizards\")\\n print(\"> You are onstage at a comedy club. You tell the following joke:\")\\n print(\\n f\"Knock knock.\\\\nWho\\'s there?\\\\n{joke.whos_there}.\\\\n{joke.whos_there} who?\\\\n{joke.punchline}\"\\n )\\n if rate_joke(joke):\\n print(\"> The crowd laughs politely.\")\\n else:\\n print(\"> The crowd stares in stony silence.\")\\n\\n\\nwith handler(provider):\\n do_comedy()', 'def log_llm(*args, **kwargs):\\n result = fwd()\\n print(\"Request fired: \", args, kwargs, result)\\n return result\\n\\n\\n# Avoid cache\\ntry:\\n haiku.cache_clear()\\nexcept Exception:\\n pass\\n\\n# Put completion handler innermost so it has highest precedence during the call\\nwith handler(provider), handler({completion: log_llm}):\\n _ = haiku(\"fish2\")\\n _ = limerick(\"fish\") # or use haiku(\"fish-2\") to avoid cache', '# 1. Create a logger\\nlogger = logging.getLogger(\"effectful.llm\")\\nlogger.setLevel(logging.INFO)\\nlog_handler = logging.StreamHandler(sys.stdout)\\nlog_handler.setFormatter(logging.Formatter(\"%(levelname)s %(payload)s\"))\\nlogger.addHandler(log_handler)\\n# 2. Pass it to the handler\\nllm_logger = LLMLoggingHandler(logger=logger) # can also be LLMLoggingHandler()\\n\\n# Avoid cache for demonstration\\ntry:\\n haiku.cache_clear()\\n limerick.cache_clear()\\nexcept Exception:\\n pass\\n\\nwith handler(provider), handler(llm_logger):\\n _ = haiku(\"fish3\")\\n _ = limerick(\"fish4\")', '# Sub-templates for different story styles\\n@Template.define\\ndef story_with_moral(topic: str) -> str:\\n \"\"\"Write a short story about {topic} and end with a moral lesson. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef story_funny(topic: str) -> str:\\n \"\"\"Write a funny, humorous story about {topic}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n# Main orchestrator template - has access to sub-templates\\n@Template.define\\ndef write_story(topic: str, style: str) -> str:\\n \"\"\"Write a story about {topic} in the style: {style}.\\n Available styles: \\'moral\\' for a story with a lesson, \\'funny\\' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\"\"\"\\n raise NotHandled\\n\\n\\n# Verify sub-templates are captured in write_story\\'s lexical context\\nassert story_with_moral in write_story.tools\\nassert story_funny in write_story.tools\\nprint(\"Sub-templates available to write_story:\", list(write_story.tools))\\n\\nwith handler(provider), handler(llm_logger):\\n print(\"=== Story with moral ===\")\\n print(write_story(\"a curious cat\", \"moral\"))\\n print()\\n print(\"=== Funny story ===\")\\n print(write_story(\"a curious cat\", \"funny\"))'], '_oh': {}, '_dh': [PosixPath('/Users/datnguyenthanh/Marc/effectful')], 'In': ['', 'import dataclasses\\nimport functools\\nimport inspect\\nimport logging\\nimport sys\\nfrom collections.abc import Callable\\n\\nfrom effectful.handlers.llm import Template\\nfrom effectful.handlers.llm.providers import (\\n CacheLLMRequestHandler,\\n LiteLLMProvider,\\n LLMLoggingHandler,\\n RetryLLMHandler,\\n completion,\\n tool_call,\\n)\\nfrom effectful.handlers.llm.synthesis import ProgramSynthesis\\nfrom effectful.ops.semantics import NotHandled, fwd, handler\\nfrom effectful.ops.syntax import defop\\n\\nprovider = LiteLLMProvider()', '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}.\"\"\"\\n raise NotHandled', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '@functools.cache\\n@Template.define\\ndef haiku(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef haiku_no_cache(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nprint()\\nwith handler(provider):\\n print(haiku(\"fish\"))\\n print(\"-\" * 40)\\n print(haiku(\"fish\"))\\n\\nprint()\\ncache_handler1 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler1):\\n print(haiku_no_cache(\"fish2\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish2\"))\\n\\nprint()\\ncache_handler2 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler2):\\n print(haiku_no_cache(\"fish3\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish3\"))', '@Template.define\\ndef primes(first_digit: int) -> int:\\n \"\"\"Give a prime number with {first_digit} as the first digit. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider):\\n assert type(primes(6)) is int', '@Template.define\\ndef count_char(char: str) -> Callable[[str], int]:\\n \"\"\"Write a function which takes a string and counts the occurrances of \\'{char}\\'. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider), handler(ProgramSynthesis()):\\n count_a = count_char(\"a\")\\n assert callable(count_a)\\n assert count_a(\"banana\") == 3\\n assert count_a(\"cherry\") == 0\\n # Print the source code of the generated function\\n print(inspect.getsource(count_a))', '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', '@dataclasses.dataclass\\nclass KnockKnockJoke:\\n whos_there: str\\n punchline: str\\n\\n\\n@Template.define\\ndef write_joke(theme: str) -> KnockKnockJoke:\\n \"\"\"Write a knock-knock joke on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef rate_joke(joke: KnockKnockJoke) -> bool:\\n \"\"\"Decide if {joke} is funny or not. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\ndef do_comedy():\\n joke = write_joke(\"lizards\")\\n print(\"> You are onstage at a comedy club. You tell the following joke:\")\\n print(\\n f\"Knock knock.\\\\nWho\\'s there?\\\\n{joke.whos_there}.\\\\n{joke.whos_there} who?\\\\n{joke.punchline}\"\\n )\\n if rate_joke(joke):\\n print(\"> The crowd laughs politely.\")\\n else:\\n print(\"> The crowd stares in stony silence.\")\\n\\n\\nwith handler(provider):\\n do_comedy()', 'def log_llm(*args, **kwargs):\\n result = fwd()\\n print(\"Request fired: \", args, kwargs, result)\\n return result\\n\\n\\n# Avoid cache\\ntry:\\n haiku.cache_clear()\\nexcept Exception:\\n pass\\n\\n# Put completion handler innermost so it has highest precedence during the call\\nwith handler(provider), handler({completion: log_llm}):\\n _ = haiku(\"fish2\")\\n _ = limerick(\"fish\") # or use haiku(\"fish-2\") to avoid cache', '# 1. Create a logger\\nlogger = logging.getLogger(\"effectful.llm\")\\nlogger.setLevel(logging.INFO)\\nlog_handler = logging.StreamHandler(sys.stdout)\\nlog_handler.setFormatter(logging.Formatter(\"%(levelname)s %(payload)s\"))\\nlogger.addHandler(log_handler)\\n# 2. Pass it to the handler\\nllm_logger = LLMLoggingHandler(logger=logger) # can also be LLMLoggingHandler()\\n\\n# Avoid cache for demonstration\\ntry:\\n haiku.cache_clear()\\n limerick.cache_clear()\\nexcept Exception:\\n pass\\n\\nwith handler(provider), handler(llm_logger):\\n _ = haiku(\"fish3\")\\n _ = limerick(\"fish4\")', '# Sub-templates for different story styles\\n@Template.define\\ndef story_with_moral(topic: str) -> str:\\n \"\"\"Write a short story about {topic} and end with a moral lesson. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef story_funny(topic: str) -> str:\\n \"\"\"Write a funny, humorous story about {topic}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n# Main orchestrator template - has access to sub-templates\\n@Template.define\\ndef write_story(topic: str, style: str) -> str:\\n \"\"\"Write a story about {topic} in the style: {style}.\\n Available styles: \\'moral\\' for a story with a lesson, \\'funny\\' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\"\"\"\\n raise NotHandled\\n\\n\\n# Verify sub-templates are captured in write_story\\'s lexical context\\nassert story_with_moral in write_story.tools\\nassert story_funny in write_story.tools\\nprint(\"Sub-templates available to write_story:\", list(write_story.tools))\\n\\nwith handler(provider), handler(llm_logger):\\n print(\"=== Story with moral ===\")\\n print(write_story(\"a curious cat\", \"moral\"))\\n print()\\n print(\"=== Funny story ===\")\\n print(write_story(\"a curious cat\", \"funny\"))'], 'Out': {}, 'get_ipython': >, 'exit': , 'quit': , 'open': , '_': 'In the ocean where fishies do play, \\nA big whale came swimming one day. \\nWith a splash and a dive, \\nHe felt so alive, \\nChasing fish in the blue, gleaming bay.', '__': '', '___': '', '__vsc_ipynb_file__': '/Users/datnguyenthanh/Marc/effectful/docs/source/llm.ipynb', '_i': '# 1. Create a logger\\nlogger = logging.getLogger(\"effectful.llm\")\\nlogger.setLevel(logging.INFO)\\nlog_handler = logging.StreamHandler(sys.stdout)\\nlog_handler.setFormatter(logging.Formatter(\"%(levelname)s %(payload)s\"))\\nlogger.addHandler(log_handler)\\n# 2. Pass it to the handler\\nllm_logger = LLMLoggingHandler(logger=logger) # can also be LLMLoggingHandler()\\n\\n# Avoid cache for demonstration\\ntry:\\n haiku.cache_clear()\\n limerick.cache_clear()\\nexcept Exception:\\n pass\\n\\nwith handler(provider), handler(llm_logger):\\n _ = haiku(\"fish3\")\\n _ = limerick(\"fish4\")', '_ii': 'def log_llm(*args, **kwargs):\\n result = fwd()\\n print(\"Request fired: \", args, kwargs, result)\\n return result\\n\\n\\n# Avoid cache\\ntry:\\n haiku.cache_clear()\\nexcept Exception:\\n pass\\n\\n# Put completion handler innermost so it has highest precedence during the call\\nwith handler(provider), handler({completion: log_llm}):\\n _ = haiku(\"fish2\")\\n _ = limerick(\"fish\") # or use haiku(\"fish-2\") to avoid cache', '_iii': '@dataclasses.dataclass\\nclass KnockKnockJoke:\\n whos_there: str\\n punchline: str\\n\\n\\n@Template.define\\ndef write_joke(theme: str) -> KnockKnockJoke:\\n \"\"\"Write a knock-knock joke on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef rate_joke(joke: KnockKnockJoke) -> bool:\\n \"\"\"Decide if {joke} is funny or not. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\ndef do_comedy():\\n joke = write_joke(\"lizards\")\\n print(\"> You are onstage at a comedy club. You tell the following joke:\")\\n print(\\n f\"Knock knock.\\\\nWho\\'s there?\\\\n{joke.whos_there}.\\\\n{joke.whos_there} who?\\\\n{joke.punchline}\"\\n )\\n if rate_joke(joke):\\n print(\"> The crowd laughs politely.\")\\n else:\\n print(\"> The crowd stares in stony silence.\")\\n\\n\\nwith handler(provider):\\n do_comedy()', '_i1': 'import dataclasses\\nimport functools\\nimport inspect\\nimport logging\\nimport sys\\nfrom collections.abc import Callable\\n\\nfrom effectful.handlers.llm import Template\\nfrom effectful.handlers.llm.providers import (\\n CacheLLMRequestHandler,\\n LiteLLMProvider,\\n LLMLoggingHandler,\\n RetryLLMHandler,\\n completion,\\n tool_call,\\n)\\nfrom effectful.handlers.llm.synthesis import ProgramSynthesis\\nfrom effectful.ops.semantics import NotHandled, fwd, handler\\nfrom effectful.ops.syntax import defop\\n\\nprovider = LiteLLMProvider()', 'dataclasses': , 'functools': , 'inspect': , 'logging': , 'sys': , 'Callable': , 'Template': , 'CacheLLMRequestHandler': , 'LiteLLMProvider': , 'LLMLoggingHandler': , 'RetryLLMHandler': , 'completion': Operation(completion, (model: str, messages: List = [], timeout: Union[float, str, openai.Timeout, NoneType] = None, temperature: Optional[float] = None, top_p: Optional[float] = None, n: Optional[int] = None, stream: Optional[bool] = None, stream_options: Optional[dict] = None, stop=None, max_completion_tokens: Optional[int] = None, max_tokens: Optional[int] = None, modalities: Optional[List[Literal['text', 'audio']]] = None, prediction: Optional[openai.types.chat.chat_completion_prediction_content_param.ChatCompletionPredictionContentParam] = None, audio: Optional[openai.types.chat.chat_completion_audio_param.ChatCompletionAudioParam] = None, presence_penalty: Optional[float] = None, frequency_penalty: Optional[float] = None, logit_bias: Optional[dict] = None, user: Optional[str] = None, reasoning_effort: Optional[Literal['none', 'minimal', 'low', 'medium', 'high', 'default']] = None, verbosity: Optional[Literal['low', 'medium', 'high']] = None, response_format: Union[dict, Type[pydantic.main.BaseModel], NoneType] = None, seed: Optional[int] = None, tools: Optional[List] = None, tool_choice: Union[str, dict, NoneType] = None, logprobs: Optional[bool] = None, top_logprobs: Optional[int] = None, parallel_tool_calls: Optional[bool] = None, web_search_options: Optional[litellm.types.llms.openai.OpenAIWebSearchOptions] = None, deployment_id=None, extra_headers: Optional[dict] = None, safety_identifier: Optional[str] = None, service_tier: Optional[str] = None, functions: Optional[List] = None, function_call: Optional[str] = None, base_url: Optional[str] = None, api_version: Optional[str] = None, api_key: Optional[str] = None, model_list: Optional[list] = None, thinking: Optional[litellm.types.llms.anthropic.AnthropicThinkingParam] = None, shared_session: Optional[ForwardRef('ClientSession')] = None, **kwargs) -> Union[litellm.types.utils.ModelResponse, litellm.litellm_core_utils.streaming_handler.CustomStreamWrapper]), 'tool_call': Operation(tool_call, (template: effectful.handlers.llm.Template, tool: Union[effectful.ops.types.Operation[..., T], effectful.handlers.llm.Template[..., T]], *args, **kwargs) -> T), 'ProgramSynthesis': , 'NotHandled': , 'fwd': Operation(fwd, (*args, **kwargs) -> Any), 'handler': , 'defop': , 'provider': , '_i2': '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}.\"\"\"\\n raise NotHandled', 'limerick': Template(__prompt_template__='Write a limerick on the theme of {theme}. Do not use any tools.', __signature__= str>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='limerick'), '_i3': 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '_i4': 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '_i5': '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled', '_i6': 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '_i7': '@functools.cache\\n@Template.define\\ndef haiku(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef haiku_no_cache(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nprint()\\nwith handler(provider):\\n print(haiku(\"fish\"))\\n print(\"-\" * 40)\\n print(haiku(\"fish\"))\\n\\nprint()\\ncache_handler1 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler1):\\n print(haiku_no_cache(\"fish2\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish2\"))\\n\\nprint()\\ncache_handler2 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler2):\\n print(haiku_no_cache(\"fish3\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish3\"))', 'haiku': , 'haiku_no_cache': ..., 'cache_handler1': , 'cache_handler2': , '_i8': '@Template.define\\ndef primes(first_digit: int) -> int:\\n \"\"\"Give a prime number with {first_digit} as the first digit. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider):\\n assert type(primes(6)) is int', 'primes': Template(__prompt_template__='Give a prime number with {first_digit} as the first digit. Do not use any tools.', __signature__= int>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='primes'), '_i9': '@Template.define\\ndef count_char(char: str) -> Callable[[str], int]:\\n \"\"\"Write a function which takes a string and counts the occurrances of \\'{char}\\'. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider), handler(ProgramSynthesis()):\\n count_a = count_char(\"a\")\\n assert callable(count_a)\\n assert count_a(\"banana\") == 3\\n assert count_a(\"cherry\") == 0\\n # Print the source code of the generated function\\n print(inspect.getsource(count_a))', 'count_char': Template(__prompt_template__=\"Write a function which takes a string and counts the occurrances of '{char}'. Do not use any tools.\", __signature__= collections.abc.Callable[[str], int]>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='count_char'), 'count_a': , '_i10': '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', 'cities': Operation(cities, () -> list[str]), 'weather': Operation(weather, (city: str) -> str), 'vacation': Template(__prompt_template__='Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.', __signature__= str>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='vacation'), 'log_tool_call': , '_i11': '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', '_i12': '@dataclasses.dataclass\\nclass KnockKnockJoke:\\n whos_there: str\\n punchline: str\\n\\n\\n@Template.define\\ndef write_joke(theme: str) -> KnockKnockJoke:\\n \"\"\"Write a knock-knock joke on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef rate_joke(joke: KnockKnockJoke) -> bool:\\n \"\"\"Decide if {joke} is funny or not. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\ndef do_comedy():\\n joke = write_joke(\"lizards\")\\n print(\"> You are onstage at a comedy club. You tell the following joke:\")\\n print(\\n f\"Knock knock.\\\\nWho\\'s there?\\\\n{joke.whos_there}.\\\\n{joke.whos_there} who?\\\\n{joke.punchline}\"\\n )\\n if rate_joke(joke):\\n print(\"> The crowd laughs politely.\")\\n else:\\n print(\"> The crowd stares in stony silence.\")\\n\\n\\nwith handler(provider):\\n do_comedy()', 'KnockKnockJoke': , 'write_joke': Template(__prompt_template__='Write a knock-knock joke on the theme of {theme}. Do not use any tools.', __signature__= __main__.KnockKnockJoke>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='write_joke'), 'rate_joke': Template(__prompt_template__='Decide if {joke} is funny or not. Do not use any tools.', __signature__= bool>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='rate_joke'), 'do_comedy': , '_i13': 'def log_llm(*args, **kwargs):\\n result = fwd()\\n print(\"Request fired: \", args, kwargs, result)\\n return result\\n\\n\\n# Avoid cache\\ntry:\\n haiku.cache_clear()\\nexcept Exception:\\n pass\\n\\n# Put completion handler innermost so it has highest precedence during the call\\nwith handler(provider), handler({completion: log_llm}):\\n _ = haiku(\"fish2\")\\n _ = limerick(\"fish\") # or use haiku(\"fish-2\") to avoid cache', 'log_llm': , '_i14': '# 1. Create a logger\\nlogger = logging.getLogger(\"effectful.llm\")\\nlogger.setLevel(logging.INFO)\\nlog_handler = logging.StreamHandler(sys.stdout)\\nlog_handler.setFormatter(logging.Formatter(\"%(levelname)s %(payload)s\"))\\nlogger.addHandler(log_handler)\\n# 2. Pass it to the handler\\nllm_logger = LLMLoggingHandler(logger=logger) # can also be LLMLoggingHandler()\\n\\n# Avoid cache for demonstration\\ntry:\\n haiku.cache_clear()\\n limerick.cache_clear()\\nexcept Exception:\\n pass\\n\\nwith handler(provider), handler(llm_logger):\\n _ = haiku(\"fish3\")\\n _ = limerick(\"fish4\")', 'logger': , 'log_handler': , 'llm_logger': , '_i15': '# Sub-templates for different story styles\\n@Template.define\\ndef story_with_moral(topic: str) -> str:\\n \"\"\"Write a short story about {topic} and end with a moral lesson. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef story_funny(topic: str) -> str:\\n \"\"\"Write a funny, humorous story about {topic}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n# Main orchestrator template - has access to sub-templates\\n@Template.define\\ndef write_story(topic: str, style: str) -> str:\\n \"\"\"Write a story about {topic} in the style: {style}.\\n Available styles: \\'moral\\' for a story with a lesson, \\'funny\\' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\"\"\"\\n raise NotHandled\\n\\n\\n# Verify sub-templates are captured in write_story\\'s lexical context\\nassert story_with_moral in write_story.tools\\nassert story_funny in write_story.tools\\nprint(\"Sub-templates available to write_story:\", list(write_story.tools))\\n\\nwith handler(provider), handler(llm_logger):\\n print(\"=== Story with moral ===\")\\n print(write_story(\"a curious cat\", \"moral\"))\\n print()\\n print(\"=== Funny story ===\")\\n print(write_story(\"a curious cat\", \"funny\"))', 'story_with_moral': Template(__prompt_template__='Write a short story about {topic} and end with a moral lesson. Do not use any tools.', __signature__= str>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='story_with_moral'), 'story_funny': Template(__prompt_template__='Write a funny, humorous story about {topic}. Do not use any tools.', __signature__= str>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='story_funny'), 'write_story': Template(__prompt_template__=\"Write a story about {topic} in the style: {style}.\\n Available styles: 'moral' for a story with a lesson, 'funny' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\", __signature__= str>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='write_story')})), __name__='haiku_no_cache'), Template(__prompt_template__='Give a prime number with {first_digit} as the first digit. Do not use any tools.', __signature__= int>, __context__=LexicalContext(mappingproxy({'__name__': '__main__', '__doc__': 'Automatically created module for IPython interactive environment', '__package__': None, '__loader__': None, '__spec__': None, '__builtin__': , '__builtins__': , '_ih': ['', 'import dataclasses\\nimport functools\\nimport inspect\\nimport logging\\nimport sys\\nfrom collections.abc import Callable\\n\\nfrom effectful.handlers.llm import Template\\nfrom effectful.handlers.llm.providers import (\\n CacheLLMRequestHandler,\\n LiteLLMProvider,\\n LLMLoggingHandler,\\n RetryLLMHandler,\\n completion,\\n tool_call,\\n)\\nfrom effectful.handlers.llm.synthesis import ProgramSynthesis\\nfrom effectful.ops.semantics import NotHandled, fwd, handler\\nfrom effectful.ops.syntax import defop\\n\\nprovider = LiteLLMProvider()', '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}.\"\"\"\\n raise NotHandled', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '@functools.cache\\n@Template.define\\ndef haiku(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef haiku_no_cache(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nprint()\\nwith handler(provider):\\n print(haiku(\"fish\"))\\n print(\"-\" * 40)\\n print(haiku(\"fish\"))\\n\\nprint()\\ncache_handler1 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler1):\\n print(haiku_no_cache(\"fish2\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish2\"))\\n\\nprint()\\ncache_handler2 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler2):\\n print(haiku_no_cache(\"fish3\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish3\"))', '@Template.define\\ndef primes(first_digit: int) -> int:\\n \"\"\"Give a prime number with {first_digit} as the first digit. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider):\\n assert type(primes(6)) is int', '@Template.define\\ndef count_char(char: str) -> Callable[[str], int]:\\n \"\"\"Write a function which takes a string and counts the occurrances of \\'{char}\\'. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider), handler(ProgramSynthesis()):\\n count_a = count_char(\"a\")\\n assert callable(count_a)\\n assert count_a(\"banana\") == 3\\n assert count_a(\"cherry\") == 0\\n # Print the source code of the generated function\\n print(inspect.getsource(count_a))', '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', '@dataclasses.dataclass\\nclass KnockKnockJoke:\\n whos_there: str\\n punchline: str\\n\\n\\n@Template.define\\ndef write_joke(theme: str) -> KnockKnockJoke:\\n \"\"\"Write a knock-knock joke on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef rate_joke(joke: KnockKnockJoke) -> bool:\\n \"\"\"Decide if {joke} is funny or not. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\ndef do_comedy():\\n joke = write_joke(\"lizards\")\\n print(\"> You are onstage at a comedy club. You tell the following joke:\")\\n print(\\n f\"Knock knock.\\\\nWho\\'s there?\\\\n{joke.whos_there}.\\\\n{joke.whos_there} who?\\\\n{joke.punchline}\"\\n )\\n if rate_joke(joke):\\n print(\"> The crowd laughs politely.\")\\n else:\\n print(\"> The crowd stares in stony silence.\")\\n\\n\\nwith handler(provider):\\n do_comedy()', 'def log_llm(*args, **kwargs):\\n result = fwd()\\n print(\"Request fired: \", args, kwargs, result)\\n return result\\n\\n\\n# Avoid cache\\ntry:\\n haiku.cache_clear()\\nexcept Exception:\\n pass\\n\\n# Put completion handler innermost so it has highest precedence during the call\\nwith handler(provider), handler({completion: log_llm}):\\n _ = haiku(\"fish2\")\\n _ = limerick(\"fish\") # or use haiku(\"fish-2\") to avoid cache', '# 1. Create a logger\\nlogger = logging.getLogger(\"effectful.llm\")\\nlogger.setLevel(logging.INFO)\\nlog_handler = logging.StreamHandler(sys.stdout)\\nlog_handler.setFormatter(logging.Formatter(\"%(levelname)s %(payload)s\"))\\nlogger.addHandler(log_handler)\\n# 2. Pass it to the handler\\nllm_logger = LLMLoggingHandler(logger=logger) # can also be LLMLoggingHandler()\\n\\n# Avoid cache for demonstration\\ntry:\\n haiku.cache_clear()\\n limerick.cache_clear()\\nexcept Exception:\\n pass\\n\\nwith handler(provider), handler(llm_logger):\\n _ = haiku(\"fish3\")\\n _ = limerick(\"fish4\")', '# Sub-templates for different story styles\\n@Template.define\\ndef story_with_moral(topic: str) -> str:\\n \"\"\"Write a short story about {topic} and end with a moral lesson. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef story_funny(topic: str) -> str:\\n \"\"\"Write a funny, humorous story about {topic}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n# Main orchestrator template - has access to sub-templates\\n@Template.define\\ndef write_story(topic: str, style: str) -> str:\\n \"\"\"Write a story about {topic} in the style: {style}.\\n Available styles: \\'moral\\' for a story with a lesson, \\'funny\\' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\"\"\"\\n raise NotHandled\\n\\n\\n# Verify sub-templates are captured in write_story\\'s lexical context\\nassert story_with_moral in write_story.tools\\nassert story_funny in write_story.tools\\nprint(\"Sub-templates available to write_story:\", list(write_story.tools))\\n\\nwith handler(provider), handler(llm_logger):\\n print(\"=== Story with moral ===\")\\n print(write_story(\"a curious cat\", \"moral\"))\\n print()\\n print(\"=== Funny story ===\")\\n print(write_story(\"a curious cat\", \"funny\"))'], '_oh': {}, '_dh': [PosixPath('/Users/datnguyenthanh/Marc/effectful')], 'In': ['', 'import dataclasses\\nimport functools\\nimport inspect\\nimport logging\\nimport sys\\nfrom collections.abc import Callable\\n\\nfrom effectful.handlers.llm import Template\\nfrom effectful.handlers.llm.providers import (\\n CacheLLMRequestHandler,\\n LiteLLMProvider,\\n LLMLoggingHandler,\\n RetryLLMHandler,\\n completion,\\n tool_call,\\n)\\nfrom effectful.handlers.llm.synthesis import ProgramSynthesis\\nfrom effectful.ops.semantics import NotHandled, fwd, handler\\nfrom effectful.ops.syntax import defop\\n\\nprovider = LiteLLMProvider()', '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}.\"\"\"\\n raise NotHandled', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '@functools.cache\\n@Template.define\\ndef haiku(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef haiku_no_cache(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nprint()\\nwith handler(provider):\\n print(haiku(\"fish\"))\\n print(\"-\" * 40)\\n print(haiku(\"fish\"))\\n\\nprint()\\ncache_handler1 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler1):\\n print(haiku_no_cache(\"fish2\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish2\"))\\n\\nprint()\\ncache_handler2 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler2):\\n print(haiku_no_cache(\"fish3\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish3\"))', '@Template.define\\ndef primes(first_digit: int) -> int:\\n \"\"\"Give a prime number with {first_digit} as the first digit. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider):\\n assert type(primes(6)) is int', '@Template.define\\ndef count_char(char: str) -> Callable[[str], int]:\\n \"\"\"Write a function which takes a string and counts the occurrances of \\'{char}\\'. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider), handler(ProgramSynthesis()):\\n count_a = count_char(\"a\")\\n assert callable(count_a)\\n assert count_a(\"banana\") == 3\\n assert count_a(\"cherry\") == 0\\n # Print the source code of the generated function\\n print(inspect.getsource(count_a))', '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', '@dataclasses.dataclass\\nclass KnockKnockJoke:\\n whos_there: str\\n punchline: str\\n\\n\\n@Template.define\\ndef write_joke(theme: str) -> KnockKnockJoke:\\n \"\"\"Write a knock-knock joke on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef rate_joke(joke: KnockKnockJoke) -> bool:\\n \"\"\"Decide if {joke} is funny or not. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\ndef do_comedy():\\n joke = write_joke(\"lizards\")\\n print(\"> You are onstage at a comedy club. You tell the following joke:\")\\n print(\\n f\"Knock knock.\\\\nWho\\'s there?\\\\n{joke.whos_there}.\\\\n{joke.whos_there} who?\\\\n{joke.punchline}\"\\n )\\n if rate_joke(joke):\\n print(\"> The crowd laughs politely.\")\\n else:\\n print(\"> The crowd stares in stony silence.\")\\n\\n\\nwith handler(provider):\\n do_comedy()', 'def log_llm(*args, **kwargs):\\n result = fwd()\\n print(\"Request fired: \", args, kwargs, result)\\n return result\\n\\n\\n# Avoid cache\\ntry:\\n haiku.cache_clear()\\nexcept Exception:\\n pass\\n\\n# Put completion handler innermost so it has highest precedence during the call\\nwith handler(provider), handler({completion: log_llm}):\\n _ = haiku(\"fish2\")\\n _ = limerick(\"fish\") # or use haiku(\"fish-2\") to avoid cache', '# 1. Create a logger\\nlogger = logging.getLogger(\"effectful.llm\")\\nlogger.setLevel(logging.INFO)\\nlog_handler = logging.StreamHandler(sys.stdout)\\nlog_handler.setFormatter(logging.Formatter(\"%(levelname)s %(payload)s\"))\\nlogger.addHandler(log_handler)\\n# 2. Pass it to the handler\\nllm_logger = LLMLoggingHandler(logger=logger) # can also be LLMLoggingHandler()\\n\\n# Avoid cache for demonstration\\ntry:\\n haiku.cache_clear()\\n limerick.cache_clear()\\nexcept Exception:\\n pass\\n\\nwith handler(provider), handler(llm_logger):\\n _ = haiku(\"fish3\")\\n _ = limerick(\"fish4\")', '# Sub-templates for different story styles\\n@Template.define\\ndef story_with_moral(topic: str) -> str:\\n \"\"\"Write a short story about {topic} and end with a moral lesson. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef story_funny(topic: str) -> str:\\n \"\"\"Write a funny, humorous story about {topic}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n# Main orchestrator template - has access to sub-templates\\n@Template.define\\ndef write_story(topic: str, style: str) -> str:\\n \"\"\"Write a story about {topic} in the style: {style}.\\n Available styles: \\'moral\\' for a story with a lesson, \\'funny\\' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\"\"\"\\n raise NotHandled\\n\\n\\n# Verify sub-templates are captured in write_story\\'s lexical context\\nassert story_with_moral in write_story.tools\\nassert story_funny in write_story.tools\\nprint(\"Sub-templates available to write_story:\", list(write_story.tools))\\n\\nwith handler(provider), handler(llm_logger):\\n print(\"=== Story with moral ===\")\\n print(write_story(\"a curious cat\", \"moral\"))\\n print()\\n print(\"=== Funny story ===\")\\n print(write_story(\"a curious cat\", \"funny\"))'], 'Out': {}, 'get_ipython': >, 'exit': , 'quit': , 'open': , '_': 'In the ocean where fishies do play, \\nA big whale came swimming one day. \\nWith a splash and a dive, \\nHe felt so alive, \\nChasing fish in the blue, gleaming bay.', '__': '', '___': '', '__vsc_ipynb_file__': '/Users/datnguyenthanh/Marc/effectful/docs/source/llm.ipynb', '_i': '# 1. Create a logger\\nlogger = logging.getLogger(\"effectful.llm\")\\nlogger.setLevel(logging.INFO)\\nlog_handler = logging.StreamHandler(sys.stdout)\\nlog_handler.setFormatter(logging.Formatter(\"%(levelname)s %(payload)s\"))\\nlogger.addHandler(log_handler)\\n# 2. Pass it to the handler\\nllm_logger = LLMLoggingHandler(logger=logger) # can also be LLMLoggingHandler()\\n\\n# Avoid cache for demonstration\\ntry:\\n haiku.cache_clear()\\n limerick.cache_clear()\\nexcept Exception:\\n pass\\n\\nwith handler(provider), handler(llm_logger):\\n _ = haiku(\"fish3\")\\n _ = limerick(\"fish4\")', '_ii': 'def log_llm(*args, **kwargs):\\n result = fwd()\\n print(\"Request fired: \", args, kwargs, result)\\n return result\\n\\n\\n# Avoid cache\\ntry:\\n haiku.cache_clear()\\nexcept Exception:\\n pass\\n\\n# Put completion handler innermost so it has highest precedence during the call\\nwith handler(provider), handler({completion: log_llm}):\\n _ = haiku(\"fish2\")\\n _ = limerick(\"fish\") # or use haiku(\"fish-2\") to avoid cache', '_iii': '@dataclasses.dataclass\\nclass KnockKnockJoke:\\n whos_there: str\\n punchline: str\\n\\n\\n@Template.define\\ndef write_joke(theme: str) -> KnockKnockJoke:\\n \"\"\"Write a knock-knock joke on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef rate_joke(joke: KnockKnockJoke) -> bool:\\n \"\"\"Decide if {joke} is funny or not. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\ndef do_comedy():\\n joke = write_joke(\"lizards\")\\n print(\"> You are onstage at a comedy club. You tell the following joke:\")\\n print(\\n f\"Knock knock.\\\\nWho\\'s there?\\\\n{joke.whos_there}.\\\\n{joke.whos_there} who?\\\\n{joke.punchline}\"\\n )\\n if rate_joke(joke):\\n print(\"> The crowd laughs politely.\")\\n else:\\n print(\"> The crowd stares in stony silence.\")\\n\\n\\nwith handler(provider):\\n do_comedy()', '_i1': 'import dataclasses\\nimport functools\\nimport inspect\\nimport logging\\nimport sys\\nfrom collections.abc import Callable\\n\\nfrom effectful.handlers.llm import Template\\nfrom effectful.handlers.llm.providers import (\\n CacheLLMRequestHandler,\\n LiteLLMProvider,\\n LLMLoggingHandler,\\n RetryLLMHandler,\\n completion,\\n tool_call,\\n)\\nfrom effectful.handlers.llm.synthesis import ProgramSynthesis\\nfrom effectful.ops.semantics import NotHandled, fwd, handler\\nfrom effectful.ops.syntax import defop\\n\\nprovider = LiteLLMProvider()', 'dataclasses': , 'functools': , 'inspect': , 'logging': , 'sys': , 'Callable': , 'Template': , 'CacheLLMRequestHandler': , 'LiteLLMProvider': , 'LLMLoggingHandler': , 'RetryLLMHandler': , 'completion': Operation(completion, (model: str, messages: List = [], timeout: Union[float, str, openai.Timeout, NoneType] = None, temperature: Optional[float] = None, top_p: Optional[float] = None, n: Optional[int] = None, stream: Optional[bool] = None, stream_options: Optional[dict] = None, stop=None, max_completion_tokens: Optional[int] = None, max_tokens: Optional[int] = None, modalities: Optional[List[Literal['text', 'audio']]] = None, prediction: Optional[openai.types.chat.chat_completion_prediction_content_param.ChatCompletionPredictionContentParam] = None, audio: Optional[openai.types.chat.chat_completion_audio_param.ChatCompletionAudioParam] = None, presence_penalty: Optional[float] = None, frequency_penalty: Optional[float] = None, logit_bias: Optional[dict] = None, user: Optional[str] = None, reasoning_effort: Optional[Literal['none', 'minimal', 'low', 'medium', 'high', 'default']] = None, verbosity: Optional[Literal['low', 'medium', 'high']] = None, response_format: Union[dict, Type[pydantic.main.BaseModel], NoneType] = None, seed: Optional[int] = None, tools: Optional[List] = None, tool_choice: Union[str, dict, NoneType] = None, logprobs: Optional[bool] = None, top_logprobs: Optional[int] = None, parallel_tool_calls: Optional[bool] = None, web_search_options: Optional[litellm.types.llms.openai.OpenAIWebSearchOptions] = None, deployment_id=None, extra_headers: Optional[dict] = None, safety_identifier: Optional[str] = None, service_tier: Optional[str] = None, functions: Optional[List] = None, function_call: Optional[str] = None, base_url: Optional[str] = None, api_version: Optional[str] = None, api_key: Optional[str] = None, model_list: Optional[list] = None, thinking: Optional[litellm.types.llms.anthropic.AnthropicThinkingParam] = None, shared_session: Optional[ForwardRef('ClientSession')] = None, **kwargs) -> Union[litellm.types.utils.ModelResponse, litellm.litellm_core_utils.streaming_handler.CustomStreamWrapper]), 'tool_call': Operation(tool_call, (template: effectful.handlers.llm.Template, tool: Union[effectful.ops.types.Operation[..., T], effectful.handlers.llm.Template[..., T]], *args, **kwargs) -> T), 'ProgramSynthesis': , 'NotHandled': , 'fwd': Operation(fwd, (*args, **kwargs) -> Any), 'handler': , 'defop': , 'provider': , '_i2': '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}.\"\"\"\\n raise NotHandled', 'limerick': Template(__prompt_template__='Write a limerick on the theme of {theme}. Do not use any tools.', __signature__= str>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='limerick'), '_i3': 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '_i4': 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '_i5': '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled', '_i6': 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '_i7': '@functools.cache\\n@Template.define\\ndef haiku(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef haiku_no_cache(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nprint()\\nwith handler(provider):\\n print(haiku(\"fish\"))\\n print(\"-\" * 40)\\n print(haiku(\"fish\"))\\n\\nprint()\\ncache_handler1 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler1):\\n print(haiku_no_cache(\"fish2\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish2\"))\\n\\nprint()\\ncache_handler2 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler2):\\n print(haiku_no_cache(\"fish3\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish3\"))', 'haiku': , 'haiku_no_cache': Template(__prompt_template__='Write a haiku on the theme of {theme}. Do not use any tools.', __signature__= str>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='haiku_no_cache'), 'cache_handler1': , 'cache_handler2': , '_i8': '@Template.define\\ndef primes(first_digit: int) -> int:\\n \"\"\"Give a prime number with {first_digit} as the first digit. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider):\\n assert type(primes(6)) is int', 'primes': ..., '_i9': '@Template.define\\ndef count_char(char: str) -> Callable[[str], int]:\\n \"\"\"Write a function which takes a string and counts the occurrances of \\'{char}\\'. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider), handler(ProgramSynthesis()):\\n count_a = count_char(\"a\")\\n assert callable(count_a)\\n assert count_a(\"banana\") == 3\\n assert count_a(\"cherry\") == 0\\n # Print the source code of the generated function\\n print(inspect.getsource(count_a))', 'count_char': Template(__prompt_template__=\"Write a function which takes a string and counts the occurrances of '{char}'. Do not use any tools.\", __signature__= collections.abc.Callable[[str], int]>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='count_char'), 'count_a': , '_i10': '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', 'cities': Operation(cities, () -> list[str]), 'weather': Operation(weather, (city: str) -> str), 'vacation': Template(__prompt_template__='Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.', __signature__= str>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='vacation'), 'log_tool_call': , '_i11': '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', '_i12': '@dataclasses.dataclass\\nclass KnockKnockJoke:\\n whos_there: str\\n punchline: str\\n\\n\\n@Template.define\\ndef write_joke(theme: str) -> KnockKnockJoke:\\n \"\"\"Write a knock-knock joke on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef rate_joke(joke: KnockKnockJoke) -> bool:\\n \"\"\"Decide if {joke} is funny or not. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\ndef do_comedy():\\n joke = write_joke(\"lizards\")\\n print(\"> You are onstage at a comedy club. You tell the following joke:\")\\n print(\\n f\"Knock knock.\\\\nWho\\'s there?\\\\n{joke.whos_there}.\\\\n{joke.whos_there} who?\\\\n{joke.punchline}\"\\n )\\n if rate_joke(joke):\\n print(\"> The crowd laughs politely.\")\\n else:\\n print(\"> The crowd stares in stony silence.\")\\n\\n\\nwith handler(provider):\\n do_comedy()', 'KnockKnockJoke': , 'write_joke': Template(__prompt_template__='Write a knock-knock joke on the theme of {theme}. Do not use any tools.', __signature__= __main__.KnockKnockJoke>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='write_joke'), 'rate_joke': Template(__prompt_template__='Decide if {joke} is funny or not. Do not use any tools.', __signature__= bool>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='rate_joke'), 'do_comedy': , '_i13': 'def log_llm(*args, **kwargs):\\n result = fwd()\\n print(\"Request fired: \", args, kwargs, result)\\n return result\\n\\n\\n# Avoid cache\\ntry:\\n haiku.cache_clear()\\nexcept Exception:\\n pass\\n\\n# Put completion handler innermost so it has highest precedence during the call\\nwith handler(provider), handler({completion: log_llm}):\\n _ = haiku(\"fish2\")\\n _ = limerick(\"fish\") # or use haiku(\"fish-2\") to avoid cache', 'log_llm': , '_i14': '# 1. Create a logger\\nlogger = logging.getLogger(\"effectful.llm\")\\nlogger.setLevel(logging.INFO)\\nlog_handler = logging.StreamHandler(sys.stdout)\\nlog_handler.setFormatter(logging.Formatter(\"%(levelname)s %(payload)s\"))\\nlogger.addHandler(log_handler)\\n# 2. Pass it to the handler\\nllm_logger = LLMLoggingHandler(logger=logger) # can also be LLMLoggingHandler()\\n\\n# Avoid cache for demonstration\\ntry:\\n haiku.cache_clear()\\n limerick.cache_clear()\\nexcept Exception:\\n pass\\n\\nwith handler(provider), handler(llm_logger):\\n _ = haiku(\"fish3\")\\n _ = limerick(\"fish4\")', 'logger': , 'log_handler': , 'llm_logger': , '_i15': '# Sub-templates for different story styles\\n@Template.define\\ndef story_with_moral(topic: str) -> str:\\n \"\"\"Write a short story about {topic} and end with a moral lesson. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef story_funny(topic: str) -> str:\\n \"\"\"Write a funny, humorous story about {topic}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n# Main orchestrator template - has access to sub-templates\\n@Template.define\\ndef write_story(topic: str, style: str) -> str:\\n \"\"\"Write a story about {topic} in the style: {style}.\\n Available styles: \\'moral\\' for a story with a lesson, \\'funny\\' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\"\"\"\\n raise NotHandled\\n\\n\\n# Verify sub-templates are captured in write_story\\'s lexical context\\nassert story_with_moral in write_story.tools\\nassert story_funny in write_story.tools\\nprint(\"Sub-templates available to write_story:\", list(write_story.tools))\\n\\nwith handler(provider), handler(llm_logger):\\n print(\"=== Story with moral ===\")\\n print(write_story(\"a curious cat\", \"moral\"))\\n print()\\n print(\"=== Funny story ===\")\\n print(write_story(\"a curious cat\", \"funny\"))', 'story_with_moral': Template(__prompt_template__='Write a short story about {topic} and end with a moral lesson. Do not use any tools.', __signature__= str>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='story_with_moral'), 'story_funny': Template(__prompt_template__='Write a funny, humorous story about {topic}. Do not use any tools.', __signature__= str>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='story_funny'), 'write_story': Template(__prompt_template__=\"Write a story about {topic} in the style: {style}.\\n Available styles: 'moral' for a story with a lesson, 'funny' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\", __signature__= str>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='write_story')}), mappingproxy({'__name__': '__main__', '__doc__': 'Automatically created module for IPython interactive environment', '__package__': None, '__loader__': None, '__spec__': None, '__builtin__': , '__builtins__': , '_ih': ['', 'import dataclasses\\nimport functools\\nimport inspect\\nimport logging\\nimport sys\\nfrom collections.abc import Callable\\n\\nfrom effectful.handlers.llm import Template\\nfrom effectful.handlers.llm.providers import (\\n CacheLLMRequestHandler,\\n LiteLLMProvider,\\n LLMLoggingHandler,\\n RetryLLMHandler,\\n completion,\\n tool_call,\\n)\\nfrom effectful.handlers.llm.synthesis import ProgramSynthesis\\nfrom effectful.ops.semantics import NotHandled, fwd, handler\\nfrom effectful.ops.syntax import defop\\n\\nprovider = LiteLLMProvider()', '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}.\"\"\"\\n raise NotHandled', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '@functools.cache\\n@Template.define\\ndef haiku(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef haiku_no_cache(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nprint()\\nwith handler(provider):\\n print(haiku(\"fish\"))\\n print(\"-\" * 40)\\n print(haiku(\"fish\"))\\n\\nprint()\\ncache_handler1 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler1):\\n print(haiku_no_cache(\"fish2\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish2\"))\\n\\nprint()\\ncache_handler2 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler2):\\n print(haiku_no_cache(\"fish3\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish3\"))', '@Template.define\\ndef primes(first_digit: int) -> int:\\n \"\"\"Give a prime number with {first_digit} as the first digit. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider):\\n assert type(primes(6)) is int', '@Template.define\\ndef count_char(char: str) -> Callable[[str], int]:\\n \"\"\"Write a function which takes a string and counts the occurrances of \\'{char}\\'. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider), handler(ProgramSynthesis()):\\n count_a = count_char(\"a\")\\n assert callable(count_a)\\n assert count_a(\"banana\") == 3\\n assert count_a(\"cherry\") == 0\\n # Print the source code of the generated function\\n print(inspect.getsource(count_a))', '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', '@dataclasses.dataclass\\nclass KnockKnockJoke:\\n whos_there: str\\n punchline: str\\n\\n\\n@Template.define\\ndef write_joke(theme: str) -> KnockKnockJoke:\\n \"\"\"Write a knock-knock joke on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef rate_joke(joke: KnockKnockJoke) -> bool:\\n \"\"\"Decide if {joke} is funny or not. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\ndef do_comedy():\\n joke = write_joke(\"lizards\")\\n print(\"> You are onstage at a comedy club. You tell the following joke:\")\\n print(\\n f\"Knock knock.\\\\nWho\\'s there?\\\\n{joke.whos_there}.\\\\n{joke.whos_there} who?\\\\n{joke.punchline}\"\\n )\\n if rate_joke(joke):\\n print(\"> The crowd laughs politely.\")\\n else:\\n print(\"> The crowd stares in stony silence.\")\\n\\n\\nwith handler(provider):\\n do_comedy()', 'def log_llm(*args, **kwargs):\\n result = fwd()\\n print(\"Request fired: \", args, kwargs, result)\\n return result\\n\\n\\n# Avoid cache\\ntry:\\n haiku.cache_clear()\\nexcept Exception:\\n pass\\n\\n# Put completion handler innermost so it has highest precedence during the call\\nwith handler(provider), handler({completion: log_llm}):\\n _ = haiku(\"fish2\")\\n _ = limerick(\"fish\") # or use haiku(\"fish-2\") to avoid cache', '# 1. Create a logger\\nlogger = logging.getLogger(\"effectful.llm\")\\nlogger.setLevel(logging.INFO)\\nlog_handler = logging.StreamHandler(sys.stdout)\\nlog_handler.setFormatter(logging.Formatter(\"%(levelname)s %(payload)s\"))\\nlogger.addHandler(log_handler)\\n# 2. Pass it to the handler\\nllm_logger = LLMLoggingHandler(logger=logger) # can also be LLMLoggingHandler()\\n\\n# Avoid cache for demonstration\\ntry:\\n haiku.cache_clear()\\n limerick.cache_clear()\\nexcept Exception:\\n pass\\n\\nwith handler(provider), handler(llm_logger):\\n _ = haiku(\"fish3\")\\n _ = limerick(\"fish4\")', '# Sub-templates for different story styles\\n@Template.define\\ndef story_with_moral(topic: str) -> str:\\n \"\"\"Write a short story about {topic} and end with a moral lesson. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef story_funny(topic: str) -> str:\\n \"\"\"Write a funny, humorous story about {topic}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n# Main orchestrator template - has access to sub-templates\\n@Template.define\\ndef write_story(topic: str, style: str) -> str:\\n \"\"\"Write a story about {topic} in the style: {style}.\\n Available styles: \\'moral\\' for a story with a lesson, \\'funny\\' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\"\"\"\\n raise NotHandled\\n\\n\\n# Verify sub-templates are captured in write_story\\'s lexical context\\nassert story_with_moral in write_story.tools\\nassert story_funny in write_story.tools\\nprint(\"Sub-templates available to write_story:\", list(write_story.tools))\\n\\nwith handler(provider), handler(llm_logger):\\n print(\"=== Story with moral ===\")\\n print(write_story(\"a curious cat\", \"moral\"))\\n print()\\n print(\"=== Funny story ===\")\\n print(write_story(\"a curious cat\", \"funny\"))'], '_oh': {}, '_dh': [PosixPath('/Users/datnguyenthanh/Marc/effectful')], 'In': ['', 'import dataclasses\\nimport functools\\nimport inspect\\nimport logging\\nimport sys\\nfrom collections.abc import Callable\\n\\nfrom effectful.handlers.llm import Template\\nfrom effectful.handlers.llm.providers import (\\n CacheLLMRequestHandler,\\n LiteLLMProvider,\\n LLMLoggingHandler,\\n RetryLLMHandler,\\n completion,\\n tool_call,\\n)\\nfrom effectful.handlers.llm.synthesis import ProgramSynthesis\\nfrom effectful.ops.semantics import NotHandled, fwd, handler\\nfrom effectful.ops.syntax import defop\\n\\nprovider = LiteLLMProvider()', '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}.\"\"\"\\n raise NotHandled', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '@functools.cache\\n@Template.define\\ndef haiku(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef haiku_no_cache(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nprint()\\nwith handler(provider):\\n print(haiku(\"fish\"))\\n print(\"-\" * 40)\\n print(haiku(\"fish\"))\\n\\nprint()\\ncache_handler1 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler1):\\n print(haiku_no_cache(\"fish2\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish2\"))\\n\\nprint()\\ncache_handler2 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler2):\\n print(haiku_no_cache(\"fish3\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish3\"))', '@Template.define\\ndef primes(first_digit: int) -> int:\\n \"\"\"Give a prime number with {first_digit} as the first digit. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider):\\n assert type(primes(6)) is int', '@Template.define\\ndef count_char(char: str) -> Callable[[str], int]:\\n \"\"\"Write a function which takes a string and counts the occurrances of \\'{char}\\'. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider), handler(ProgramSynthesis()):\\n count_a = count_char(\"a\")\\n assert callable(count_a)\\n assert count_a(\"banana\") == 3\\n assert count_a(\"cherry\") == 0\\n # Print the source code of the generated function\\n print(inspect.getsource(count_a))', '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', '@dataclasses.dataclass\\nclass KnockKnockJoke:\\n whos_there: str\\n punchline: str\\n\\n\\n@Template.define\\ndef write_joke(theme: str) -> KnockKnockJoke:\\n \"\"\"Write a knock-knock joke on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef rate_joke(joke: KnockKnockJoke) -> bool:\\n \"\"\"Decide if {joke} is funny or not. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\ndef do_comedy():\\n joke = write_joke(\"lizards\")\\n print(\"> You are onstage at a comedy club. You tell the following joke:\")\\n print(\\n f\"Knock knock.\\\\nWho\\'s there?\\\\n{joke.whos_there}.\\\\n{joke.whos_there} who?\\\\n{joke.punchline}\"\\n )\\n if rate_joke(joke):\\n print(\"> The crowd laughs politely.\")\\n else:\\n print(\"> The crowd stares in stony silence.\")\\n\\n\\nwith handler(provider):\\n do_comedy()', 'def log_llm(*args, **kwargs):\\n result = fwd()\\n print(\"Request fired: \", args, kwargs, result)\\n return result\\n\\n\\n# Avoid cache\\ntry:\\n haiku.cache_clear()\\nexcept Exception:\\n pass\\n\\n# Put completion handler innermost so it has highest precedence during the call\\nwith handler(provider), handler({completion: log_llm}):\\n _ = haiku(\"fish2\")\\n _ = limerick(\"fish\") # or use haiku(\"fish-2\") to avoid cache', '# 1. Create a logger\\nlogger = logging.getLogger(\"effectful.llm\")\\nlogger.setLevel(logging.INFO)\\nlog_handler = logging.StreamHandler(sys.stdout)\\nlog_handler.setFormatter(logging.Formatter(\"%(levelname)s %(payload)s\"))\\nlogger.addHandler(log_handler)\\n# 2. Pass it to the handler\\nllm_logger = LLMLoggingHandler(logger=logger) # can also be LLMLoggingHandler()\\n\\n# Avoid cache for demonstration\\ntry:\\n haiku.cache_clear()\\n limerick.cache_clear()\\nexcept Exception:\\n pass\\n\\nwith handler(provider), handler(llm_logger):\\n _ = haiku(\"fish3\")\\n _ = limerick(\"fish4\")', '# Sub-templates for different story styles\\n@Template.define\\ndef story_with_moral(topic: str) -> str:\\n \"\"\"Write a short story about {topic} and end with a moral lesson. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef story_funny(topic: str) -> str:\\n \"\"\"Write a funny, humorous story about {topic}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n# Main orchestrator template - has access to sub-templates\\n@Template.define\\ndef write_story(topic: str, style: str) -> str:\\n \"\"\"Write a story about {topic} in the style: {style}.\\n Available styles: \\'moral\\' for a story with a lesson, \\'funny\\' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\"\"\"\\n raise NotHandled\\n\\n\\n# Verify sub-templates are captured in write_story\\'s lexical context\\nassert story_with_moral in write_story.tools\\nassert story_funny in write_story.tools\\nprint(\"Sub-templates available to write_story:\", list(write_story.tools))\\n\\nwith handler(provider), handler(llm_logger):\\n print(\"=== Story with moral ===\")\\n print(write_story(\"a curious cat\", \"moral\"))\\n print()\\n print(\"=== Funny story ===\")\\n print(write_story(\"a curious cat\", \"funny\"))'], 'Out': {}, 'get_ipython': >, 'exit': , 'quit': , 'open': , '_': 'In the ocean where fishies do play, \\nA big whale came swimming one day. \\nWith a splash and a dive, \\nHe felt so alive, \\nChasing fish in the blue, gleaming bay.', '__': '', '___': '', '__vsc_ipynb_file__': '/Users/datnguyenthanh/Marc/effectful/docs/source/llm.ipynb', '_i': '# 1. Create a logger\\nlogger = logging.getLogger(\"effectful.llm\")\\nlogger.setLevel(logging.INFO)\\nlog_handler = logging.StreamHandler(sys.stdout)\\nlog_handler.setFormatter(logging.Formatter(\"%(levelname)s %(payload)s\"))\\nlogger.addHandler(log_handler)\\n# 2. Pass it to the handler\\nllm_logger = LLMLoggingHandler(logger=logger) # can also be LLMLoggingHandler()\\n\\n# Avoid cache for demonstration\\ntry:\\n haiku.cache_clear()\\n limerick.cache_clear()\\nexcept Exception:\\n pass\\n\\nwith handler(provider), handler(llm_logger):\\n _ = haiku(\"fish3\")\\n _ = limerick(\"fish4\")', '_ii': 'def log_llm(*args, **kwargs):\\n result = fwd()\\n print(\"Request fired: \", args, kwargs, result)\\n return result\\n\\n\\n# Avoid cache\\ntry:\\n haiku.cache_clear()\\nexcept Exception:\\n pass\\n\\n# Put completion handler innermost so it has highest precedence during the call\\nwith handler(provider), handler({completion: log_llm}):\\n _ = haiku(\"fish2\")\\n _ = limerick(\"fish\") # or use haiku(\"fish-2\") to avoid cache', '_iii': '@dataclasses.dataclass\\nclass KnockKnockJoke:\\n whos_there: str\\n punchline: str\\n\\n\\n@Template.define\\ndef write_joke(theme: str) -> KnockKnockJoke:\\n \"\"\"Write a knock-knock joke on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef rate_joke(joke: KnockKnockJoke) -> bool:\\n \"\"\"Decide if {joke} is funny or not. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\ndef do_comedy():\\n joke = write_joke(\"lizards\")\\n print(\"> You are onstage at a comedy club. You tell the following joke:\")\\n print(\\n f\"Knock knock.\\\\nWho\\'s there?\\\\n{joke.whos_there}.\\\\n{joke.whos_there} who?\\\\n{joke.punchline}\"\\n )\\n if rate_joke(joke):\\n print(\"> The crowd laughs politely.\")\\n else:\\n print(\"> The crowd stares in stony silence.\")\\n\\n\\nwith handler(provider):\\n do_comedy()', '_i1': 'import dataclasses\\nimport functools\\nimport inspect\\nimport logging\\nimport sys\\nfrom collections.abc import Callable\\n\\nfrom effectful.handlers.llm import Template\\nfrom effectful.handlers.llm.providers import (\\n CacheLLMRequestHandler,\\n LiteLLMProvider,\\n LLMLoggingHandler,\\n RetryLLMHandler,\\n completion,\\n tool_call,\\n)\\nfrom effectful.handlers.llm.synthesis import ProgramSynthesis\\nfrom effectful.ops.semantics import NotHandled, fwd, handler\\nfrom effectful.ops.syntax import defop\\n\\nprovider = LiteLLMProvider()', 'dataclasses': , 'functools': , 'inspect': , 'logging': , 'sys': , 'Callable': , 'Template': , 'CacheLLMRequestHandler': , 'LiteLLMProvider': , 'LLMLoggingHandler': , 'RetryLLMHandler': , 'completion': Operation(completion, (model: str, messages: List = [], timeout: Union[float, str, openai.Timeout, NoneType] = None, temperature: Optional[float] = None, top_p: Optional[float] = None, n: Optional[int] = None, stream: Optional[bool] = None, stream_options: Optional[dict] = None, stop=None, max_completion_tokens: Optional[int] = None, max_tokens: Optional[int] = None, modalities: Optional[List[Literal['text', 'audio']]] = None, prediction: Optional[openai.types.chat.chat_completion_prediction_content_param.ChatCompletionPredictionContentParam] = None, audio: Optional[openai.types.chat.chat_completion_audio_param.ChatCompletionAudioParam] = None, presence_penalty: Optional[float] = None, frequency_penalty: Optional[float] = None, logit_bias: Optional[dict] = None, user: Optional[str] = None, reasoning_effort: Optional[Literal['none', 'minimal', 'low', 'medium', 'high', 'default']] = None, verbosity: Optional[Literal['low', 'medium', 'high']] = None, response_format: Union[dict, Type[pydantic.main.BaseModel], NoneType] = None, seed: Optional[int] = None, tools: Optional[List] = None, tool_choice: Union[str, dict, NoneType] = None, logprobs: Optional[bool] = None, top_logprobs: Optional[int] = None, parallel_tool_calls: Optional[bool] = None, web_search_options: Optional[litellm.types.llms.openai.OpenAIWebSearchOptions] = None, deployment_id=None, extra_headers: Optional[dict] = None, safety_identifier: Optional[str] = None, service_tier: Optional[str] = None, functions: Optional[List] = None, function_call: Optional[str] = None, base_url: Optional[str] = None, api_version: Optional[str] = None, api_key: Optional[str] = None, model_list: Optional[list] = None, thinking: Optional[litellm.types.llms.anthropic.AnthropicThinkingParam] = None, shared_session: Optional[ForwardRef('ClientSession')] = None, **kwargs) -> Union[litellm.types.utils.ModelResponse, litellm.litellm_core_utils.streaming_handler.CustomStreamWrapper]), 'tool_call': Operation(tool_call, (template: effectful.handlers.llm.Template, tool: Union[effectful.ops.types.Operation[..., T], effectful.handlers.llm.Template[..., T]], *args, **kwargs) -> T), 'ProgramSynthesis': , 'NotHandled': , 'fwd': Operation(fwd, (*args, **kwargs) -> Any), 'handler': , 'defop': , 'provider': , '_i2': '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}.\"\"\"\\n raise NotHandled', 'limerick': Template(__prompt_template__='Write a limerick on the theme of {theme}. Do not use any tools.', __signature__= str>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='limerick'), '_i3': 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '_i4': 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '_i5': '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled', '_i6': 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '_i7': '@functools.cache\\n@Template.define\\ndef haiku(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef haiku_no_cache(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nprint()\\nwith handler(provider):\\n print(haiku(\"fish\"))\\n print(\"-\" * 40)\\n print(haiku(\"fish\"))\\n\\nprint()\\ncache_handler1 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler1):\\n print(haiku_no_cache(\"fish2\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish2\"))\\n\\nprint()\\ncache_handler2 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler2):\\n print(haiku_no_cache(\"fish3\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish3\"))', 'haiku': , 'haiku_no_cache': Template(__prompt_template__='Write a haiku on the theme of {theme}. Do not use any tools.', __signature__= str>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='haiku_no_cache'), 'cache_handler1': , 'cache_handler2': , '_i8': '@Template.define\\ndef primes(first_digit: int) -> int:\\n \"\"\"Give a prime number with {first_digit} as the first digit. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider):\\n assert type(primes(6)) is int', 'primes': ..., '_i9': '@Template.define\\ndef count_char(char: str) -> Callable[[str], int]:\\n \"\"\"Write a function which takes a string and counts the occurrances of \\'{char}\\'. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider), handler(ProgramSynthesis()):\\n count_a = count_char(\"a\")\\n assert callable(count_a)\\n assert count_a(\"banana\") == 3\\n assert count_a(\"cherry\") == 0\\n # Print the source code of the generated function\\n print(inspect.getsource(count_a))', 'count_char': Template(__prompt_template__=\"Write a function which takes a string and counts the occurrances of '{char}'. Do not use any tools.\", __signature__= collections.abc.Callable[[str], int]>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='count_char'), 'count_a': , '_i10': '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', 'cities': Operation(cities, () -> list[str]), 'weather': Operation(weather, (city: str) -> str), 'vacation': Template(__prompt_template__='Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.', __signature__= str>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='vacation'), 'log_tool_call': , '_i11': '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', '_i12': '@dataclasses.dataclass\\nclass KnockKnockJoke:\\n whos_there: str\\n punchline: str\\n\\n\\n@Template.define\\ndef write_joke(theme: str) -> KnockKnockJoke:\\n \"\"\"Write a knock-knock joke on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef rate_joke(joke: KnockKnockJoke) -> bool:\\n \"\"\"Decide if {joke} is funny or not. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\ndef do_comedy():\\n joke = write_joke(\"lizards\")\\n print(\"> You are onstage at a comedy club. You tell the following joke:\")\\n print(\\n f\"Knock knock.\\\\nWho\\'s there?\\\\n{joke.whos_there}.\\\\n{joke.whos_there} who?\\\\n{joke.punchline}\"\\n )\\n if rate_joke(joke):\\n print(\"> The crowd laughs politely.\")\\n else:\\n print(\"> The crowd stares in stony silence.\")\\n\\n\\nwith handler(provider):\\n do_comedy()', 'KnockKnockJoke': , 'write_joke': Template(__prompt_template__='Write a knock-knock joke on the theme of {theme}. Do not use any tools.', __signature__= __main__.KnockKnockJoke>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='write_joke'), 'rate_joke': Template(__prompt_template__='Decide if {joke} is funny or not. Do not use any tools.', __signature__= bool>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='rate_joke'), 'do_comedy': , '_i13': 'def log_llm(*args, **kwargs):\\n result = fwd()\\n print(\"Request fired: \", args, kwargs, result)\\n return result\\n\\n\\n# Avoid cache\\ntry:\\n haiku.cache_clear()\\nexcept Exception:\\n pass\\n\\n# Put completion handler innermost so it has highest precedence during the call\\nwith handler(provider), handler({completion: log_llm}):\\n _ = haiku(\"fish2\")\\n _ = limerick(\"fish\") # or use haiku(\"fish-2\") to avoid cache', 'log_llm': , '_i14': '# 1. Create a logger\\nlogger = logging.getLogger(\"effectful.llm\")\\nlogger.setLevel(logging.INFO)\\nlog_handler = logging.StreamHandler(sys.stdout)\\nlog_handler.setFormatter(logging.Formatter(\"%(levelname)s %(payload)s\"))\\nlogger.addHandler(log_handler)\\n# 2. Pass it to the handler\\nllm_logger = LLMLoggingHandler(logger=logger) # can also be LLMLoggingHandler()\\n\\n# Avoid cache for demonstration\\ntry:\\n haiku.cache_clear()\\n limerick.cache_clear()\\nexcept Exception:\\n pass\\n\\nwith handler(provider), handler(llm_logger):\\n _ = haiku(\"fish3\")\\n _ = limerick(\"fish4\")', 'logger': , 'log_handler': , 'llm_logger': , '_i15': '# Sub-templates for different story styles\\n@Template.define\\ndef story_with_moral(topic: str) -> str:\\n \"\"\"Write a short story about {topic} and end with a moral lesson. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef story_funny(topic: str) -> str:\\n \"\"\"Write a funny, humorous story about {topic}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n# Main orchestrator template - has access to sub-templates\\n@Template.define\\ndef write_story(topic: str, style: str) -> str:\\n \"\"\"Write a story about {topic} in the style: {style}.\\n Available styles: \\'moral\\' for a story with a lesson, \\'funny\\' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\"\"\"\\n raise NotHandled\\n\\n\\n# Verify sub-templates are captured in write_story\\'s lexical context\\nassert story_with_moral in write_story.tools\\nassert story_funny in write_story.tools\\nprint(\"Sub-templates available to write_story:\", list(write_story.tools))\\n\\nwith handler(provider), handler(llm_logger):\\n print(\"=== Story with moral ===\")\\n print(write_story(\"a curious cat\", \"moral\"))\\n print()\\n print(\"=== Funny story ===\")\\n print(write_story(\"a curious cat\", \"funny\"))', 'story_with_moral': Template(__prompt_template__='Write a short story about {topic} and end with a moral lesson. Do not use any tools.', __signature__= str>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='story_with_moral'), 'story_funny': Template(__prompt_template__='Write a funny, humorous story about {topic}. Do not use any tools.', __signature__= str>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='story_funny'), 'write_story': Template(__prompt_template__=\"Write a story about {topic} in the style: {style}.\\n Available styles: 'moral' for a story with a lesson, 'funny' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\", __signature__= str>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='write_story')})), __name__='primes'), Template(__prompt_template__=\"Write a function which takes a string and counts the occurrances of '{char}'. Do not use any tools.\", __signature__= collections.abc.Callable[[str], int]>, __context__=LexicalContext(mappingproxy({'__name__': '__main__', '__doc__': 'Automatically created module for IPython interactive environment', '__package__': None, '__loader__': None, '__spec__': None, '__builtin__': , '__builtins__': , '_ih': ['', 'import dataclasses\\nimport functools\\nimport inspect\\nimport logging\\nimport sys\\nfrom collections.abc import Callable\\n\\nfrom effectful.handlers.llm import Template\\nfrom effectful.handlers.llm.providers import (\\n CacheLLMRequestHandler,\\n LiteLLMProvider,\\n LLMLoggingHandler,\\n RetryLLMHandler,\\n completion,\\n tool_call,\\n)\\nfrom effectful.handlers.llm.synthesis import ProgramSynthesis\\nfrom effectful.ops.semantics import NotHandled, fwd, handler\\nfrom effectful.ops.syntax import defop\\n\\nprovider = LiteLLMProvider()', '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}.\"\"\"\\n raise NotHandled', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '@functools.cache\\n@Template.define\\ndef haiku(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef haiku_no_cache(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nprint()\\nwith handler(provider):\\n print(haiku(\"fish\"))\\n print(\"-\" * 40)\\n print(haiku(\"fish\"))\\n\\nprint()\\ncache_handler1 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler1):\\n print(haiku_no_cache(\"fish2\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish2\"))\\n\\nprint()\\ncache_handler2 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler2):\\n print(haiku_no_cache(\"fish3\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish3\"))', '@Template.define\\ndef primes(first_digit: int) -> int:\\n \"\"\"Give a prime number with {first_digit} as the first digit. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider):\\n assert type(primes(6)) is int', '@Template.define\\ndef count_char(char: str) -> Callable[[str], int]:\\n \"\"\"Write a function which takes a string and counts the occurrances of \\'{char}\\'. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider), handler(ProgramSynthesis()):\\n count_a = count_char(\"a\")\\n assert callable(count_a)\\n assert count_a(\"banana\") == 3\\n assert count_a(\"cherry\") == 0\\n # Print the source code of the generated function\\n print(inspect.getsource(count_a))', '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', '@dataclasses.dataclass\\nclass KnockKnockJoke:\\n whos_there: str\\n punchline: str\\n\\n\\n@Template.define\\ndef write_joke(theme: str) -> KnockKnockJoke:\\n \"\"\"Write a knock-knock joke on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef rate_joke(joke: KnockKnockJoke) -> bool:\\n \"\"\"Decide if {joke} is funny or not. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\ndef do_comedy():\\n joke = write_joke(\"lizards\")\\n print(\"> You are onstage at a comedy club. You tell the following joke:\")\\n print(\\n f\"Knock knock.\\\\nWho\\'s there?\\\\n{joke.whos_there}.\\\\n{joke.whos_there} who?\\\\n{joke.punchline}\"\\n )\\n if rate_joke(joke):\\n print(\"> The crowd laughs politely.\")\\n else:\\n print(\"> The crowd stares in stony silence.\")\\n\\n\\nwith handler(provider):\\n do_comedy()', 'def log_llm(*args, **kwargs):\\n result = fwd()\\n print(\"Request fired: \", args, kwargs, result)\\n return result\\n\\n\\n# Avoid cache\\ntry:\\n haiku.cache_clear()\\nexcept Exception:\\n pass\\n\\n# Put completion handler innermost so it has highest precedence during the call\\nwith handler(provider), handler({completion: log_llm}):\\n _ = haiku(\"fish2\")\\n _ = limerick(\"fish\") # or use haiku(\"fish-2\") to avoid cache', '# 1. Create a logger\\nlogger = logging.getLogger(\"effectful.llm\")\\nlogger.setLevel(logging.INFO)\\nlog_handler = logging.StreamHandler(sys.stdout)\\nlog_handler.setFormatter(logging.Formatter(\"%(levelname)s %(payload)s\"))\\nlogger.addHandler(log_handler)\\n# 2. Pass it to the handler\\nllm_logger = LLMLoggingHandler(logger=logger) # can also be LLMLoggingHandler()\\n\\n# Avoid cache for demonstration\\ntry:\\n haiku.cache_clear()\\n limerick.cache_clear()\\nexcept Exception:\\n pass\\n\\nwith handler(provider), handler(llm_logger):\\n _ = haiku(\"fish3\")\\n _ = limerick(\"fish4\")', '# Sub-templates for different story styles\\n@Template.define\\ndef story_with_moral(topic: str) -> str:\\n \"\"\"Write a short story about {topic} and end with a moral lesson. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef story_funny(topic: str) -> str:\\n \"\"\"Write a funny, humorous story about {topic}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n# Main orchestrator template - has access to sub-templates\\n@Template.define\\ndef write_story(topic: str, style: str) -> str:\\n \"\"\"Write a story about {topic} in the style: {style}.\\n Available styles: \\'moral\\' for a story with a lesson, \\'funny\\' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\"\"\"\\n raise NotHandled\\n\\n\\n# Verify sub-templates are captured in write_story\\'s lexical context\\nassert story_with_moral in write_story.tools\\nassert story_funny in write_story.tools\\nprint(\"Sub-templates available to write_story:\", list(write_story.tools))\\n\\nwith handler(provider), handler(llm_logger):\\n print(\"=== Story with moral ===\")\\n print(write_story(\"a curious cat\", \"moral\"))\\n print()\\n print(\"=== Funny story ===\")\\n print(write_story(\"a curious cat\", \"funny\"))'], '_oh': {}, '_dh': [PosixPath('/Users/datnguyenthanh/Marc/effectful')], 'In': ['', 'import dataclasses\\nimport functools\\nimport inspect\\nimport logging\\nimport sys\\nfrom collections.abc import Callable\\n\\nfrom effectful.handlers.llm import Template\\nfrom effectful.handlers.llm.providers import (\\n CacheLLMRequestHandler,\\n LiteLLMProvider,\\n LLMLoggingHandler,\\n RetryLLMHandler,\\n completion,\\n tool_call,\\n)\\nfrom effectful.handlers.llm.synthesis import ProgramSynthesis\\nfrom effectful.ops.semantics import NotHandled, fwd, handler\\nfrom effectful.ops.syntax import defop\\n\\nprovider = LiteLLMProvider()', '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}.\"\"\"\\n raise NotHandled', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '@functools.cache\\n@Template.define\\ndef haiku(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef haiku_no_cache(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nprint()\\nwith handler(provider):\\n print(haiku(\"fish\"))\\n print(\"-\" * 40)\\n print(haiku(\"fish\"))\\n\\nprint()\\ncache_handler1 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler1):\\n print(haiku_no_cache(\"fish2\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish2\"))\\n\\nprint()\\ncache_handler2 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler2):\\n print(haiku_no_cache(\"fish3\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish3\"))', '@Template.define\\ndef primes(first_digit: int) -> int:\\n \"\"\"Give a prime number with {first_digit} as the first digit. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider):\\n assert type(primes(6)) is int', '@Template.define\\ndef count_char(char: str) -> Callable[[str], int]:\\n \"\"\"Write a function which takes a string and counts the occurrances of \\'{char}\\'. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider), handler(ProgramSynthesis()):\\n count_a = count_char(\"a\")\\n assert callable(count_a)\\n assert count_a(\"banana\") == 3\\n assert count_a(\"cherry\") == 0\\n # Print the source code of the generated function\\n print(inspect.getsource(count_a))', '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', '@dataclasses.dataclass\\nclass KnockKnockJoke:\\n whos_there: str\\n punchline: str\\n\\n\\n@Template.define\\ndef write_joke(theme: str) -> KnockKnockJoke:\\n \"\"\"Write a knock-knock joke on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef rate_joke(joke: KnockKnockJoke) -> bool:\\n \"\"\"Decide if {joke} is funny or not. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\ndef do_comedy():\\n joke = write_joke(\"lizards\")\\n print(\"> You are onstage at a comedy club. You tell the following joke:\")\\n print(\\n f\"Knock knock.\\\\nWho\\'s there?\\\\n{joke.whos_there}.\\\\n{joke.whos_there} who?\\\\n{joke.punchline}\"\\n )\\n if rate_joke(joke):\\n print(\"> The crowd laughs politely.\")\\n else:\\n print(\"> The crowd stares in stony silence.\")\\n\\n\\nwith handler(provider):\\n do_comedy()', 'def log_llm(*args, **kwargs):\\n result = fwd()\\n print(\"Request fired: \", args, kwargs, result)\\n return result\\n\\n\\n# Avoid cache\\ntry:\\n haiku.cache_clear()\\nexcept Exception:\\n pass\\n\\n# Put completion handler innermost so it has highest precedence during the call\\nwith handler(provider), handler({completion: log_llm}):\\n _ = haiku(\"fish2\")\\n _ = limerick(\"fish\") # or use haiku(\"fish-2\") to avoid cache', '# 1. Create a logger\\nlogger = logging.getLogger(\"effectful.llm\")\\nlogger.setLevel(logging.INFO)\\nlog_handler = logging.StreamHandler(sys.stdout)\\nlog_handler.setFormatter(logging.Formatter(\"%(levelname)s %(payload)s\"))\\nlogger.addHandler(log_handler)\\n# 2. Pass it to the handler\\nllm_logger = LLMLoggingHandler(logger=logger) # can also be LLMLoggingHandler()\\n\\n# Avoid cache for demonstration\\ntry:\\n haiku.cache_clear()\\n limerick.cache_clear()\\nexcept Exception:\\n pass\\n\\nwith handler(provider), handler(llm_logger):\\n _ = haiku(\"fish3\")\\n _ = limerick(\"fish4\")', '# Sub-templates for different story styles\\n@Template.define\\ndef story_with_moral(topic: str) -> str:\\n \"\"\"Write a short story about {topic} and end with a moral lesson. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef story_funny(topic: str) -> str:\\n \"\"\"Write a funny, humorous story about {topic}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n# Main orchestrator template - has access to sub-templates\\n@Template.define\\ndef write_story(topic: str, style: str) -> str:\\n \"\"\"Write a story about {topic} in the style: {style}.\\n Available styles: \\'moral\\' for a story with a lesson, \\'funny\\' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\"\"\"\\n raise NotHandled\\n\\n\\n# Verify sub-templates are captured in write_story\\'s lexical context\\nassert story_with_moral in write_story.tools\\nassert story_funny in write_story.tools\\nprint(\"Sub-templates available to write_story:\", list(write_story.tools))\\n\\nwith handler(provider), handler(llm_logger):\\n print(\"=== Story with moral ===\")\\n print(write_story(\"a curious cat\", \"moral\"))\\n print()\\n print(\"=== Funny story ===\")\\n print(write_story(\"a curious cat\", \"funny\"))'], 'Out': {}, 'get_ipython': >, 'exit': , 'quit': , 'open': , '_': 'In the ocean where fishies do play, \\nA big whale came swimming one day. \\nWith a splash and a dive, \\nHe felt so alive, \\nChasing fish in the blue, gleaming bay.', '__': '', '___': '', '__vsc_ipynb_file__': '/Users/datnguyenthanh/Marc/effectful/docs/source/llm.ipynb', '_i': '# 1. Create a logger\\nlogger = logging.getLogger(\"effectful.llm\")\\nlogger.setLevel(logging.INFO)\\nlog_handler = logging.StreamHandler(sys.stdout)\\nlog_handler.setFormatter(logging.Formatter(\"%(levelname)s %(payload)s\"))\\nlogger.addHandler(log_handler)\\n# 2. Pass it to the handler\\nllm_logger = LLMLoggingHandler(logger=logger) # can also be LLMLoggingHandler()\\n\\n# Avoid cache for demonstration\\ntry:\\n haiku.cache_clear()\\n limerick.cache_clear()\\nexcept Exception:\\n pass\\n\\nwith handler(provider), handler(llm_logger):\\n _ = haiku(\"fish3\")\\n _ = limerick(\"fish4\")', '_ii': 'def log_llm(*args, **kwargs):\\n result = fwd()\\n print(\"Request fired: \", args, kwargs, result)\\n return result\\n\\n\\n# Avoid cache\\ntry:\\n haiku.cache_clear()\\nexcept Exception:\\n pass\\n\\n# Put completion handler innermost so it has highest precedence during the call\\nwith handler(provider), handler({completion: log_llm}):\\n _ = haiku(\"fish2\")\\n _ = limerick(\"fish\") # or use haiku(\"fish-2\") to avoid cache', '_iii': '@dataclasses.dataclass\\nclass KnockKnockJoke:\\n whos_there: str\\n punchline: str\\n\\n\\n@Template.define\\ndef write_joke(theme: str) -> KnockKnockJoke:\\n \"\"\"Write a knock-knock joke on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef rate_joke(joke: KnockKnockJoke) -> bool:\\n \"\"\"Decide if {joke} is funny or not. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\ndef do_comedy():\\n joke = write_joke(\"lizards\")\\n print(\"> You are onstage at a comedy club. You tell the following joke:\")\\n print(\\n f\"Knock knock.\\\\nWho\\'s there?\\\\n{joke.whos_there}.\\\\n{joke.whos_there} who?\\\\n{joke.punchline}\"\\n )\\n if rate_joke(joke):\\n print(\"> The crowd laughs politely.\")\\n else:\\n print(\"> The crowd stares in stony silence.\")\\n\\n\\nwith handler(provider):\\n do_comedy()', '_i1': 'import dataclasses\\nimport functools\\nimport inspect\\nimport logging\\nimport sys\\nfrom collections.abc import Callable\\n\\nfrom effectful.handlers.llm import Template\\nfrom effectful.handlers.llm.providers import (\\n CacheLLMRequestHandler,\\n LiteLLMProvider,\\n LLMLoggingHandler,\\n RetryLLMHandler,\\n completion,\\n tool_call,\\n)\\nfrom effectful.handlers.llm.synthesis import ProgramSynthesis\\nfrom effectful.ops.semantics import NotHandled, fwd, handler\\nfrom effectful.ops.syntax import defop\\n\\nprovider = LiteLLMProvider()', 'dataclasses': , 'functools': , 'inspect': , 'logging': , 'sys': , 'Callable': , 'Template': , 'CacheLLMRequestHandler': , 'LiteLLMProvider': , 'LLMLoggingHandler': , 'RetryLLMHandler': , 'completion': Operation(completion, (model: str, messages: List = [], timeout: Union[float, str, openai.Timeout, NoneType] = None, temperature: Optional[float] = None, top_p: Optional[float] = None, n: Optional[int] = None, stream: Optional[bool] = None, stream_options: Optional[dict] = None, stop=None, max_completion_tokens: Optional[int] = None, max_tokens: Optional[int] = None, modalities: Optional[List[Literal['text', 'audio']]] = None, prediction: Optional[openai.types.chat.chat_completion_prediction_content_param.ChatCompletionPredictionContentParam] = None, audio: Optional[openai.types.chat.chat_completion_audio_param.ChatCompletionAudioParam] = None, presence_penalty: Optional[float] = None, frequency_penalty: Optional[float] = None, logit_bias: Optional[dict] = None, user: Optional[str] = None, reasoning_effort: Optional[Literal['none', 'minimal', 'low', 'medium', 'high', 'default']] = None, verbosity: Optional[Literal['low', 'medium', 'high']] = None, response_format: Union[dict, Type[pydantic.main.BaseModel], NoneType] = None, seed: Optional[int] = None, tools: Optional[List] = None, tool_choice: Union[str, dict, NoneType] = None, logprobs: Optional[bool] = None, top_logprobs: Optional[int] = None, parallel_tool_calls: Optional[bool] = None, web_search_options: Optional[litellm.types.llms.openai.OpenAIWebSearchOptions] = None, deployment_id=None, extra_headers: Optional[dict] = None, safety_identifier: Optional[str] = None, service_tier: Optional[str] = None, functions: Optional[List] = None, function_call: Optional[str] = None, base_url: Optional[str] = None, api_version: Optional[str] = None, api_key: Optional[str] = None, model_list: Optional[list] = None, thinking: Optional[litellm.types.llms.anthropic.AnthropicThinkingParam] = None, shared_session: Optional[ForwardRef('ClientSession')] = None, **kwargs) -> Union[litellm.types.utils.ModelResponse, litellm.litellm_core_utils.streaming_handler.CustomStreamWrapper]), 'tool_call': Operation(tool_call, (template: effectful.handlers.llm.Template, tool: Union[effectful.ops.types.Operation[..., T], effectful.handlers.llm.Template[..., T]], *args, **kwargs) -> T), 'ProgramSynthesis': , 'NotHandled': , 'fwd': Operation(fwd, (*args, **kwargs) -> Any), 'handler': , 'defop': , 'provider': , '_i2': '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}.\"\"\"\\n raise NotHandled', 'limerick': Template(__prompt_template__='Write a limerick on the theme of {theme}. Do not use any tools.', __signature__= str>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='limerick'), '_i3': 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '_i4': 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '_i5': '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled', '_i6': 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '_i7': '@functools.cache\\n@Template.define\\ndef haiku(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef haiku_no_cache(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nprint()\\nwith handler(provider):\\n print(haiku(\"fish\"))\\n print(\"-\" * 40)\\n print(haiku(\"fish\"))\\n\\nprint()\\ncache_handler1 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler1):\\n print(haiku_no_cache(\"fish2\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish2\"))\\n\\nprint()\\ncache_handler2 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler2):\\n print(haiku_no_cache(\"fish3\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish3\"))', 'haiku': , 'haiku_no_cache': Template(__prompt_template__='Write a haiku on the theme of {theme}. Do not use any tools.', __signature__= str>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='haiku_no_cache'), 'cache_handler1': , 'cache_handler2': , '_i8': '@Template.define\\ndef primes(first_digit: int) -> int:\\n \"\"\"Give a prime number with {first_digit} as the first digit. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider):\\n assert type(primes(6)) is int', 'primes': Template(__prompt_template__='Give a prime number with {first_digit} as the first digit. Do not use any tools.', __signature__= int>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='primes'), '_i9': '@Template.define\\ndef count_char(char: str) -> Callable[[str], int]:\\n \"\"\"Write a function which takes a string and counts the occurrances of \\'{char}\\'. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider), handler(ProgramSynthesis()):\\n count_a = count_char(\"a\")\\n assert callable(count_a)\\n assert count_a(\"banana\") == 3\\n assert count_a(\"cherry\") == 0\\n # Print the source code of the generated function\\n print(inspect.getsource(count_a))', 'count_char': ..., 'count_a': , '_i10': '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', 'cities': Operation(cities, () -> list[str]), 'weather': Operation(weather, (city: str) -> str), 'vacation': Template(__prompt_template__='Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.', __signature__= str>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='vacation'), 'log_tool_call': , '_i11': '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', '_i12': '@dataclasses.dataclass\\nclass KnockKnockJoke:\\n whos_there: str\\n punchline: str\\n\\n\\n@Template.define\\ndef write_joke(theme: str) -> KnockKnockJoke:\\n \"\"\"Write a knock-knock joke on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef rate_joke(joke: KnockKnockJoke) -> bool:\\n \"\"\"Decide if {joke} is funny or not. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\ndef do_comedy():\\n joke = write_joke(\"lizards\")\\n print(\"> You are onstage at a comedy club. You tell the following joke:\")\\n print(\\n f\"Knock knock.\\\\nWho\\'s there?\\\\n{joke.whos_there}.\\\\n{joke.whos_there} who?\\\\n{joke.punchline}\"\\n )\\n if rate_joke(joke):\\n print(\"> The crowd laughs politely.\")\\n else:\\n print(\"> The crowd stares in stony silence.\")\\n\\n\\nwith handler(provider):\\n do_comedy()', 'KnockKnockJoke': , 'write_joke': Template(__prompt_template__='Write a knock-knock joke on the theme of {theme}. Do not use any tools.', __signature__= __main__.KnockKnockJoke>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='write_joke'), 'rate_joke': Template(__prompt_template__='Decide if {joke} is funny or not. Do not use any tools.', __signature__= bool>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='rate_joke'), 'do_comedy': , '_i13': 'def log_llm(*args, **kwargs):\\n result = fwd()\\n print(\"Request fired: \", args, kwargs, result)\\n return result\\n\\n\\n# Avoid cache\\ntry:\\n haiku.cache_clear()\\nexcept Exception:\\n pass\\n\\n# Put completion handler innermost so it has highest precedence during the call\\nwith handler(provider), handler({completion: log_llm}):\\n _ = haiku(\"fish2\")\\n _ = limerick(\"fish\") # or use haiku(\"fish-2\") to avoid cache', 'log_llm': , '_i14': '# 1. Create a logger\\nlogger = logging.getLogger(\"effectful.llm\")\\nlogger.setLevel(logging.INFO)\\nlog_handler = logging.StreamHandler(sys.stdout)\\nlog_handler.setFormatter(logging.Formatter(\"%(levelname)s %(payload)s\"))\\nlogger.addHandler(log_handler)\\n# 2. Pass it to the handler\\nllm_logger = LLMLoggingHandler(logger=logger) # can also be LLMLoggingHandler()\\n\\n# Avoid cache for demonstration\\ntry:\\n haiku.cache_clear()\\n limerick.cache_clear()\\nexcept Exception:\\n pass\\n\\nwith handler(provider), handler(llm_logger):\\n _ = haiku(\"fish3\")\\n _ = limerick(\"fish4\")', 'logger': , 'log_handler': , 'llm_logger': , '_i15': '# Sub-templates for different story styles\\n@Template.define\\ndef story_with_moral(topic: str) -> str:\\n \"\"\"Write a short story about {topic} and end with a moral lesson. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef story_funny(topic: str) -> str:\\n \"\"\"Write a funny, humorous story about {topic}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n# Main orchestrator template - has access to sub-templates\\n@Template.define\\ndef write_story(topic: str, style: str) -> str:\\n \"\"\"Write a story about {topic} in the style: {style}.\\n Available styles: \\'moral\\' for a story with a lesson, \\'funny\\' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\"\"\"\\n raise NotHandled\\n\\n\\n# Verify sub-templates are captured in write_story\\'s lexical context\\nassert story_with_moral in write_story.tools\\nassert story_funny in write_story.tools\\nprint(\"Sub-templates available to write_story:\", list(write_story.tools))\\n\\nwith handler(provider), handler(llm_logger):\\n print(\"=== Story with moral ===\")\\n print(write_story(\"a curious cat\", \"moral\"))\\n print()\\n print(\"=== Funny story ===\")\\n print(write_story(\"a curious cat\", \"funny\"))', 'story_with_moral': Template(__prompt_template__='Write a short story about {topic} and end with a moral lesson. Do not use any tools.', __signature__= str>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='story_with_moral'), 'story_funny': Template(__prompt_template__='Write a funny, humorous story about {topic}. Do not use any tools.', __signature__= str>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='story_funny'), 'write_story': Template(__prompt_template__=\"Write a story about {topic} in the style: {style}.\\n Available styles: 'moral' for a story with a lesson, 'funny' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\", __signature__= str>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='write_story')}), mappingproxy({'__name__': '__main__', '__doc__': 'Automatically created module for IPython interactive environment', '__package__': None, '__loader__': None, '__spec__': None, '__builtin__': , '__builtins__': , '_ih': ['', 'import dataclasses\\nimport functools\\nimport inspect\\nimport logging\\nimport sys\\nfrom collections.abc import Callable\\n\\nfrom effectful.handlers.llm import Template\\nfrom effectful.handlers.llm.providers import (\\n CacheLLMRequestHandler,\\n LiteLLMProvider,\\n LLMLoggingHandler,\\n RetryLLMHandler,\\n completion,\\n tool_call,\\n)\\nfrom effectful.handlers.llm.synthesis import ProgramSynthesis\\nfrom effectful.ops.semantics import NotHandled, fwd, handler\\nfrom effectful.ops.syntax import defop\\n\\nprovider = LiteLLMProvider()', '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}.\"\"\"\\n raise NotHandled', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '@functools.cache\\n@Template.define\\ndef haiku(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef haiku_no_cache(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nprint()\\nwith handler(provider):\\n print(haiku(\"fish\"))\\n print(\"-\" * 40)\\n print(haiku(\"fish\"))\\n\\nprint()\\ncache_handler1 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler1):\\n print(haiku_no_cache(\"fish2\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish2\"))\\n\\nprint()\\ncache_handler2 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler2):\\n print(haiku_no_cache(\"fish3\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish3\"))', '@Template.define\\ndef primes(first_digit: int) -> int:\\n \"\"\"Give a prime number with {first_digit} as the first digit. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider):\\n assert type(primes(6)) is int', '@Template.define\\ndef count_char(char: str) -> Callable[[str], int]:\\n \"\"\"Write a function which takes a string and counts the occurrances of \\'{char}\\'. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider), handler(ProgramSynthesis()):\\n count_a = count_char(\"a\")\\n assert callable(count_a)\\n assert count_a(\"banana\") == 3\\n assert count_a(\"cherry\") == 0\\n # Print the source code of the generated function\\n print(inspect.getsource(count_a))', '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', '@dataclasses.dataclass\\nclass KnockKnockJoke:\\n whos_there: str\\n punchline: str\\n\\n\\n@Template.define\\ndef write_joke(theme: str) -> KnockKnockJoke:\\n \"\"\"Write a knock-knock joke on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef rate_joke(joke: KnockKnockJoke) -> bool:\\n \"\"\"Decide if {joke} is funny or not. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\ndef do_comedy():\\n joke = write_joke(\"lizards\")\\n print(\"> You are onstage at a comedy club. You tell the following joke:\")\\n print(\\n f\"Knock knock.\\\\nWho\\'s there?\\\\n{joke.whos_there}.\\\\n{joke.whos_there} who?\\\\n{joke.punchline}\"\\n )\\n if rate_joke(joke):\\n print(\"> The crowd laughs politely.\")\\n else:\\n print(\"> The crowd stares in stony silence.\")\\n\\n\\nwith handler(provider):\\n do_comedy()', 'def log_llm(*args, **kwargs):\\n result = fwd()\\n print(\"Request fired: \", args, kwargs, result)\\n return result\\n\\n\\n# Avoid cache\\ntry:\\n haiku.cache_clear()\\nexcept Exception:\\n pass\\n\\n# Put completion handler innermost so it has highest precedence during the call\\nwith handler(provider), handler({completion: log_llm}):\\n _ = haiku(\"fish2\")\\n _ = limerick(\"fish\") # or use haiku(\"fish-2\") to avoid cache', '# 1. Create a logger\\nlogger = logging.getLogger(\"effectful.llm\")\\nlogger.setLevel(logging.INFO)\\nlog_handler = logging.StreamHandler(sys.stdout)\\nlog_handler.setFormatter(logging.Formatter(\"%(levelname)s %(payload)s\"))\\nlogger.addHandler(log_handler)\\n# 2. Pass it to the handler\\nllm_logger = LLMLoggingHandler(logger=logger) # can also be LLMLoggingHandler()\\n\\n# Avoid cache for demonstration\\ntry:\\n haiku.cache_clear()\\n limerick.cache_clear()\\nexcept Exception:\\n pass\\n\\nwith handler(provider), handler(llm_logger):\\n _ = haiku(\"fish3\")\\n _ = limerick(\"fish4\")', '# Sub-templates for different story styles\\n@Template.define\\ndef story_with_moral(topic: str) -> str:\\n \"\"\"Write a short story about {topic} and end with a moral lesson. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef story_funny(topic: str) -> str:\\n \"\"\"Write a funny, humorous story about {topic}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n# Main orchestrator template - has access to sub-templates\\n@Template.define\\ndef write_story(topic: str, style: str) -> str:\\n \"\"\"Write a story about {topic} in the style: {style}.\\n Available styles: \\'moral\\' for a story with a lesson, \\'funny\\' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\"\"\"\\n raise NotHandled\\n\\n\\n# Verify sub-templates are captured in write_story\\'s lexical context\\nassert story_with_moral in write_story.tools\\nassert story_funny in write_story.tools\\nprint(\"Sub-templates available to write_story:\", list(write_story.tools))\\n\\nwith handler(provider), handler(llm_logger):\\n print(\"=== Story with moral ===\")\\n print(write_story(\"a curious cat\", \"moral\"))\\n print()\\n print(\"=== Funny story ===\")\\n print(write_story(\"a curious cat\", \"funny\"))'], '_oh': {}, '_dh': [PosixPath('/Users/datnguyenthanh/Marc/effectful')], 'In': ['', 'import dataclasses\\nimport functools\\nimport inspect\\nimport logging\\nimport sys\\nfrom collections.abc import Callable\\n\\nfrom effectful.handlers.llm import Template\\nfrom effectful.handlers.llm.providers import (\\n CacheLLMRequestHandler,\\n LiteLLMProvider,\\n LLMLoggingHandler,\\n RetryLLMHandler,\\n completion,\\n tool_call,\\n)\\nfrom effectful.handlers.llm.synthesis import ProgramSynthesis\\nfrom effectful.ops.semantics import NotHandled, fwd, handler\\nfrom effectful.ops.syntax import defop\\n\\nprovider = LiteLLMProvider()', '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}.\"\"\"\\n raise NotHandled', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '@functools.cache\\n@Template.define\\ndef haiku(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef haiku_no_cache(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nprint()\\nwith handler(provider):\\n print(haiku(\"fish\"))\\n print(\"-\" * 40)\\n print(haiku(\"fish\"))\\n\\nprint()\\ncache_handler1 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler1):\\n print(haiku_no_cache(\"fish2\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish2\"))\\n\\nprint()\\ncache_handler2 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler2):\\n print(haiku_no_cache(\"fish3\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish3\"))', '@Template.define\\ndef primes(first_digit: int) -> int:\\n \"\"\"Give a prime number with {first_digit} as the first digit. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider):\\n assert type(primes(6)) is int', '@Template.define\\ndef count_char(char: str) -> Callable[[str], int]:\\n \"\"\"Write a function which takes a string and counts the occurrances of \\'{char}\\'. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider), handler(ProgramSynthesis()):\\n count_a = count_char(\"a\")\\n assert callable(count_a)\\n assert count_a(\"banana\") == 3\\n assert count_a(\"cherry\") == 0\\n # Print the source code of the generated function\\n print(inspect.getsource(count_a))', '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', '@dataclasses.dataclass\\nclass KnockKnockJoke:\\n whos_there: str\\n punchline: str\\n\\n\\n@Template.define\\ndef write_joke(theme: str) -> KnockKnockJoke:\\n \"\"\"Write a knock-knock joke on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef rate_joke(joke: KnockKnockJoke) -> bool:\\n \"\"\"Decide if {joke} is funny or not. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\ndef do_comedy():\\n joke = write_joke(\"lizards\")\\n print(\"> You are onstage at a comedy club. You tell the following joke:\")\\n print(\\n f\"Knock knock.\\\\nWho\\'s there?\\\\n{joke.whos_there}.\\\\n{joke.whos_there} who?\\\\n{joke.punchline}\"\\n )\\n if rate_joke(joke):\\n print(\"> The crowd laughs politely.\")\\n else:\\n print(\"> The crowd stares in stony silence.\")\\n\\n\\nwith handler(provider):\\n do_comedy()', 'def log_llm(*args, **kwargs):\\n result = fwd()\\n print(\"Request fired: \", args, kwargs, result)\\n return result\\n\\n\\n# Avoid cache\\ntry:\\n haiku.cache_clear()\\nexcept Exception:\\n pass\\n\\n# Put completion handler innermost so it has highest precedence during the call\\nwith handler(provider), handler({completion: log_llm}):\\n _ = haiku(\"fish2\")\\n _ = limerick(\"fish\") # or use haiku(\"fish-2\") to avoid cache', '# 1. Create a logger\\nlogger = logging.getLogger(\"effectful.llm\")\\nlogger.setLevel(logging.INFO)\\nlog_handler = logging.StreamHandler(sys.stdout)\\nlog_handler.setFormatter(logging.Formatter(\"%(levelname)s %(payload)s\"))\\nlogger.addHandler(log_handler)\\n# 2. Pass it to the handler\\nllm_logger = LLMLoggingHandler(logger=logger) # can also be LLMLoggingHandler()\\n\\n# Avoid cache for demonstration\\ntry:\\n haiku.cache_clear()\\n limerick.cache_clear()\\nexcept Exception:\\n pass\\n\\nwith handler(provider), handler(llm_logger):\\n _ = haiku(\"fish3\")\\n _ = limerick(\"fish4\")', '# Sub-templates for different story styles\\n@Template.define\\ndef story_with_moral(topic: str) -> str:\\n \"\"\"Write a short story about {topic} and end with a moral lesson. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef story_funny(topic: str) -> str:\\n \"\"\"Write a funny, humorous story about {topic}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n# Main orchestrator template - has access to sub-templates\\n@Template.define\\ndef write_story(topic: str, style: str) -> str:\\n \"\"\"Write a story about {topic} in the style: {style}.\\n Available styles: \\'moral\\' for a story with a lesson, \\'funny\\' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\"\"\"\\n raise NotHandled\\n\\n\\n# Verify sub-templates are captured in write_story\\'s lexical context\\nassert story_with_moral in write_story.tools\\nassert story_funny in write_story.tools\\nprint(\"Sub-templates available to write_story:\", list(write_story.tools))\\n\\nwith handler(provider), handler(llm_logger):\\n print(\"=== Story with moral ===\")\\n print(write_story(\"a curious cat\", \"moral\"))\\n print()\\n print(\"=== Funny story ===\")\\n print(write_story(\"a curious cat\", \"funny\"))'], 'Out': {}, 'get_ipython': >, 'exit': , 'quit': , 'open': , '_': 'In the ocean where fishies do play, \\nA big whale came swimming one day. \\nWith a splash and a dive, \\nHe felt so alive, \\nChasing fish in the blue, gleaming bay.', '__': '', '___': '', '__vsc_ipynb_file__': '/Users/datnguyenthanh/Marc/effectful/docs/source/llm.ipynb', '_i': '# 1. Create a logger\\nlogger = logging.getLogger(\"effectful.llm\")\\nlogger.setLevel(logging.INFO)\\nlog_handler = logging.StreamHandler(sys.stdout)\\nlog_handler.setFormatter(logging.Formatter(\"%(levelname)s %(payload)s\"))\\nlogger.addHandler(log_handler)\\n# 2. Pass it to the handler\\nllm_logger = LLMLoggingHandler(logger=logger) # can also be LLMLoggingHandler()\\n\\n# Avoid cache for demonstration\\ntry:\\n haiku.cache_clear()\\n limerick.cache_clear()\\nexcept Exception:\\n pass\\n\\nwith handler(provider), handler(llm_logger):\\n _ = haiku(\"fish3\")\\n _ = limerick(\"fish4\")', '_ii': 'def log_llm(*args, **kwargs):\\n result = fwd()\\n print(\"Request fired: \", args, kwargs, result)\\n return result\\n\\n\\n# Avoid cache\\ntry:\\n haiku.cache_clear()\\nexcept Exception:\\n pass\\n\\n# Put completion handler innermost so it has highest precedence during the call\\nwith handler(provider), handler({completion: log_llm}):\\n _ = haiku(\"fish2\")\\n _ = limerick(\"fish\") # or use haiku(\"fish-2\") to avoid cache', '_iii': '@dataclasses.dataclass\\nclass KnockKnockJoke:\\n whos_there: str\\n punchline: str\\n\\n\\n@Template.define\\ndef write_joke(theme: str) -> KnockKnockJoke:\\n \"\"\"Write a knock-knock joke on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef rate_joke(joke: KnockKnockJoke) -> bool:\\n \"\"\"Decide if {joke} is funny or not. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\ndef do_comedy():\\n joke = write_joke(\"lizards\")\\n print(\"> You are onstage at a comedy club. You tell the following joke:\")\\n print(\\n f\"Knock knock.\\\\nWho\\'s there?\\\\n{joke.whos_there}.\\\\n{joke.whos_there} who?\\\\n{joke.punchline}\"\\n )\\n if rate_joke(joke):\\n print(\"> The crowd laughs politely.\")\\n else:\\n print(\"> The crowd stares in stony silence.\")\\n\\n\\nwith handler(provider):\\n do_comedy()', '_i1': 'import dataclasses\\nimport functools\\nimport inspect\\nimport logging\\nimport sys\\nfrom collections.abc import Callable\\n\\nfrom effectful.handlers.llm import Template\\nfrom effectful.handlers.llm.providers import (\\n CacheLLMRequestHandler,\\n LiteLLMProvider,\\n LLMLoggingHandler,\\n RetryLLMHandler,\\n completion,\\n tool_call,\\n)\\nfrom effectful.handlers.llm.synthesis import ProgramSynthesis\\nfrom effectful.ops.semantics import NotHandled, fwd, handler\\nfrom effectful.ops.syntax import defop\\n\\nprovider = LiteLLMProvider()', 'dataclasses': , 'functools': , 'inspect': , 'logging': , 'sys': , 'Callable': , 'Template': , 'CacheLLMRequestHandler': , 'LiteLLMProvider': , 'LLMLoggingHandler': , 'RetryLLMHandler': , 'completion': Operation(completion, (model: str, messages: List = [], timeout: Union[float, str, openai.Timeout, NoneType] = None, temperature: Optional[float] = None, top_p: Optional[float] = None, n: Optional[int] = None, stream: Optional[bool] = None, stream_options: Optional[dict] = None, stop=None, max_completion_tokens: Optional[int] = None, max_tokens: Optional[int] = None, modalities: Optional[List[Literal['text', 'audio']]] = None, prediction: Optional[openai.types.chat.chat_completion_prediction_content_param.ChatCompletionPredictionContentParam] = None, audio: Optional[openai.types.chat.chat_completion_audio_param.ChatCompletionAudioParam] = None, presence_penalty: Optional[float] = None, frequency_penalty: Optional[float] = None, logit_bias: Optional[dict] = None, user: Optional[str] = None, reasoning_effort: Optional[Literal['none', 'minimal', 'low', 'medium', 'high', 'default']] = None, verbosity: Optional[Literal['low', 'medium', 'high']] = None, response_format: Union[dict, Type[pydantic.main.BaseModel], NoneType] = None, seed: Optional[int] = None, tools: Optional[List] = None, tool_choice: Union[str, dict, NoneType] = None, logprobs: Optional[bool] = None, top_logprobs: Optional[int] = None, parallel_tool_calls: Optional[bool] = None, web_search_options: Optional[litellm.types.llms.openai.OpenAIWebSearchOptions] = None, deployment_id=None, extra_headers: Optional[dict] = None, safety_identifier: Optional[str] = None, service_tier: Optional[str] = None, functions: Optional[List] = None, function_call: Optional[str] = None, base_url: Optional[str] = None, api_version: Optional[str] = None, api_key: Optional[str] = None, model_list: Optional[list] = None, thinking: Optional[litellm.types.llms.anthropic.AnthropicThinkingParam] = None, shared_session: Optional[ForwardRef('ClientSession')] = None, **kwargs) -> Union[litellm.types.utils.ModelResponse, litellm.litellm_core_utils.streaming_handler.CustomStreamWrapper]), 'tool_call': Operation(tool_call, (template: effectful.handlers.llm.Template, tool: Union[effectful.ops.types.Operation[..., T], effectful.handlers.llm.Template[..., T]], *args, **kwargs) -> T), 'ProgramSynthesis': , 'NotHandled': , 'fwd': Operation(fwd, (*args, **kwargs) -> Any), 'handler': , 'defop': , 'provider': , '_i2': '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}.\"\"\"\\n raise NotHandled', 'limerick': Template(__prompt_template__='Write a limerick on the theme of {theme}. Do not use any tools.', __signature__= str>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='limerick'), '_i3': 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '_i4': 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '_i5': '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled', '_i6': 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '_i7': '@functools.cache\\n@Template.define\\ndef haiku(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef haiku_no_cache(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nprint()\\nwith handler(provider):\\n print(haiku(\"fish\"))\\n print(\"-\" * 40)\\n print(haiku(\"fish\"))\\n\\nprint()\\ncache_handler1 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler1):\\n print(haiku_no_cache(\"fish2\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish2\"))\\n\\nprint()\\ncache_handler2 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler2):\\n print(haiku_no_cache(\"fish3\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish3\"))', 'haiku': , 'haiku_no_cache': Template(__prompt_template__='Write a haiku on the theme of {theme}. Do not use any tools.', __signature__= str>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='haiku_no_cache'), 'cache_handler1': , 'cache_handler2': , '_i8': '@Template.define\\ndef primes(first_digit: int) -> int:\\n \"\"\"Give a prime number with {first_digit} as the first digit. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider):\\n assert type(primes(6)) is int', 'primes': Template(__prompt_template__='Give a prime number with {first_digit} as the first digit. Do not use any tools.', __signature__= int>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='primes'), '_i9': '@Template.define\\ndef count_char(char: str) -> Callable[[str], int]:\\n \"\"\"Write a function which takes a string and counts the occurrances of \\'{char}\\'. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider), handler(ProgramSynthesis()):\\n count_a = count_char(\"a\")\\n assert callable(count_a)\\n assert count_a(\"banana\") == 3\\n assert count_a(\"cherry\") == 0\\n # Print the source code of the generated function\\n print(inspect.getsource(count_a))', 'count_char': ..., 'count_a': , '_i10': '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', 'cities': Operation(cities, () -> list[str]), 'weather': Operation(weather, (city: str) -> str), 'vacation': Template(__prompt_template__='Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.', __signature__= str>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='vacation'), 'log_tool_call': , '_i11': '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', '_i12': '@dataclasses.dataclass\\nclass KnockKnockJoke:\\n whos_there: str\\n punchline: str\\n\\n\\n@Template.define\\ndef write_joke(theme: str) -> KnockKnockJoke:\\n \"\"\"Write a knock-knock joke on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef rate_joke(joke: KnockKnockJoke) -> bool:\\n \"\"\"Decide if {joke} is funny or not. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\ndef do_comedy():\\n joke = write_joke(\"lizards\")\\n print(\"> You are onstage at a comedy club. You tell the following joke:\")\\n print(\\n f\"Knock knock.\\\\nWho\\'s there?\\\\n{joke.whos_there}.\\\\n{joke.whos_there} who?\\\\n{joke.punchline}\"\\n )\\n if rate_joke(joke):\\n print(\"> The crowd laughs politely.\")\\n else:\\n print(\"> The crowd stares in stony silence.\")\\n\\n\\nwith handler(provider):\\n do_comedy()', 'KnockKnockJoke': , 'write_joke': Template(__prompt_template__='Write a knock-knock joke on the theme of {theme}. Do not use any tools.', __signature__= __main__.KnockKnockJoke>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='write_joke'), 'rate_joke': Template(__prompt_template__='Decide if {joke} is funny or not. Do not use any tools.', __signature__= bool>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='rate_joke'), 'do_comedy': , '_i13': 'def log_llm(*args, **kwargs):\\n result = fwd()\\n print(\"Request fired: \", args, kwargs, result)\\n return result\\n\\n\\n# Avoid cache\\ntry:\\n haiku.cache_clear()\\nexcept Exception:\\n pass\\n\\n# Put completion handler innermost so it has highest precedence during the call\\nwith handler(provider), handler({completion: log_llm}):\\n _ = haiku(\"fish2\")\\n _ = limerick(\"fish\") # or use haiku(\"fish-2\") to avoid cache', 'log_llm': , '_i14': '# 1. Create a logger\\nlogger = logging.getLogger(\"effectful.llm\")\\nlogger.setLevel(logging.INFO)\\nlog_handler = logging.StreamHandler(sys.stdout)\\nlog_handler.setFormatter(logging.Formatter(\"%(levelname)s %(payload)s\"))\\nlogger.addHandler(log_handler)\\n# 2. Pass it to the handler\\nllm_logger = LLMLoggingHandler(logger=logger) # can also be LLMLoggingHandler()\\n\\n# Avoid cache for demonstration\\ntry:\\n haiku.cache_clear()\\n limerick.cache_clear()\\nexcept Exception:\\n pass\\n\\nwith handler(provider), handler(llm_logger):\\n _ = haiku(\"fish3\")\\n _ = limerick(\"fish4\")', 'logger': , 'log_handler': , 'llm_logger': , '_i15': '# Sub-templates for different story styles\\n@Template.define\\ndef story_with_moral(topic: str) -> str:\\n \"\"\"Write a short story about {topic} and end with a moral lesson. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef story_funny(topic: str) -> str:\\n \"\"\"Write a funny, humorous story about {topic}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n# Main orchestrator template - has access to sub-templates\\n@Template.define\\ndef write_story(topic: str, style: str) -> str:\\n \"\"\"Write a story about {topic} in the style: {style}.\\n Available styles: \\'moral\\' for a story with a lesson, \\'funny\\' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\"\"\"\\n raise NotHandled\\n\\n\\n# Verify sub-templates are captured in write_story\\'s lexical context\\nassert story_with_moral in write_story.tools\\nassert story_funny in write_story.tools\\nprint(\"Sub-templates available to write_story:\", list(write_story.tools))\\n\\nwith handler(provider), handler(llm_logger):\\n print(\"=== Story with moral ===\")\\n print(write_story(\"a curious cat\", \"moral\"))\\n print()\\n print(\"=== Funny story ===\")\\n print(write_story(\"a curious cat\", \"funny\"))', 'story_with_moral': Template(__prompt_template__='Write a short story about {topic} and end with a moral lesson. Do not use any tools.', __signature__= str>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='story_with_moral'), 'story_funny': Template(__prompt_template__='Write a funny, humorous story about {topic}. Do not use any tools.', __signature__= str>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='story_funny'), 'write_story': Template(__prompt_template__=\"Write a story about {topic} in the style: {style}.\\n Available styles: 'moral' for a story with a lesson, 'funny' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\", __signature__= str>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='write_story')})), __name__='count_char'), Operation(cities, () -> list[str]), Operation(weather, (city: str) -> str), Template(__prompt_template__='Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.', __signature__= str>, __context__=LexicalContext(mappingproxy({'__name__': '__main__', '__doc__': 'Automatically created module for IPython interactive environment', '__package__': None, '__loader__': None, '__spec__': None, '__builtin__': , '__builtins__': , '_ih': ['', 'import dataclasses\\nimport functools\\nimport inspect\\nimport logging\\nimport sys\\nfrom collections.abc import Callable\\n\\nfrom effectful.handlers.llm import Template\\nfrom effectful.handlers.llm.providers import (\\n CacheLLMRequestHandler,\\n LiteLLMProvider,\\n LLMLoggingHandler,\\n RetryLLMHandler,\\n completion,\\n tool_call,\\n)\\nfrom effectful.handlers.llm.synthesis import ProgramSynthesis\\nfrom effectful.ops.semantics import NotHandled, fwd, handler\\nfrom effectful.ops.syntax import defop\\n\\nprovider = LiteLLMProvider()', '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}.\"\"\"\\n raise NotHandled', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '@functools.cache\\n@Template.define\\ndef haiku(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef haiku_no_cache(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nprint()\\nwith handler(provider):\\n print(haiku(\"fish\"))\\n print(\"-\" * 40)\\n print(haiku(\"fish\"))\\n\\nprint()\\ncache_handler1 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler1):\\n print(haiku_no_cache(\"fish2\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish2\"))\\n\\nprint()\\ncache_handler2 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler2):\\n print(haiku_no_cache(\"fish3\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish3\"))', '@Template.define\\ndef primes(first_digit: int) -> int:\\n \"\"\"Give a prime number with {first_digit} as the first digit. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider):\\n assert type(primes(6)) is int', '@Template.define\\ndef count_char(char: str) -> Callable[[str], int]:\\n \"\"\"Write a function which takes a string and counts the occurrances of \\'{char}\\'. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider), handler(ProgramSynthesis()):\\n count_a = count_char(\"a\")\\n assert callable(count_a)\\n assert count_a(\"banana\") == 3\\n assert count_a(\"cherry\") == 0\\n # Print the source code of the generated function\\n print(inspect.getsource(count_a))', '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', '@dataclasses.dataclass\\nclass KnockKnockJoke:\\n whos_there: str\\n punchline: str\\n\\n\\n@Template.define\\ndef write_joke(theme: str) -> KnockKnockJoke:\\n \"\"\"Write a knock-knock joke on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef rate_joke(joke: KnockKnockJoke) -> bool:\\n \"\"\"Decide if {joke} is funny or not. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\ndef do_comedy():\\n joke = write_joke(\"lizards\")\\n print(\"> You are onstage at a comedy club. You tell the following joke:\")\\n print(\\n f\"Knock knock.\\\\nWho\\'s there?\\\\n{joke.whos_there}.\\\\n{joke.whos_there} who?\\\\n{joke.punchline}\"\\n )\\n if rate_joke(joke):\\n print(\"> The crowd laughs politely.\")\\n else:\\n print(\"> The crowd stares in stony silence.\")\\n\\n\\nwith handler(provider):\\n do_comedy()', 'def log_llm(*args, **kwargs):\\n result = fwd()\\n print(\"Request fired: \", args, kwargs, result)\\n return result\\n\\n\\n# Avoid cache\\ntry:\\n haiku.cache_clear()\\nexcept Exception:\\n pass\\n\\n# Put completion handler innermost so it has highest precedence during the call\\nwith handler(provider), handler({completion: log_llm}):\\n _ = haiku(\"fish2\")\\n _ = limerick(\"fish\") # or use haiku(\"fish-2\") to avoid cache', '# 1. Create a logger\\nlogger = logging.getLogger(\"effectful.llm\")\\nlogger.setLevel(logging.INFO)\\nlog_handler = logging.StreamHandler(sys.stdout)\\nlog_handler.setFormatter(logging.Formatter(\"%(levelname)s %(payload)s\"))\\nlogger.addHandler(log_handler)\\n# 2. Pass it to the handler\\nllm_logger = LLMLoggingHandler(logger=logger) # can also be LLMLoggingHandler()\\n\\n# Avoid cache for demonstration\\ntry:\\n haiku.cache_clear()\\n limerick.cache_clear()\\nexcept Exception:\\n pass\\n\\nwith handler(provider), handler(llm_logger):\\n _ = haiku(\"fish3\")\\n _ = limerick(\"fish4\")', '# Sub-templates for different story styles\\n@Template.define\\ndef story_with_moral(topic: str) -> str:\\n \"\"\"Write a short story about {topic} and end with a moral lesson. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef story_funny(topic: str) -> str:\\n \"\"\"Write a funny, humorous story about {topic}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n# Main orchestrator template - has access to sub-templates\\n@Template.define\\ndef write_story(topic: str, style: str) -> str:\\n \"\"\"Write a story about {topic} in the style: {style}.\\n Available styles: \\'moral\\' for a story with a lesson, \\'funny\\' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\"\"\"\\n raise NotHandled\\n\\n\\n# Verify sub-templates are captured in write_story\\'s lexical context\\nassert story_with_moral in write_story.tools\\nassert story_funny in write_story.tools\\nprint(\"Sub-templates available to write_story:\", list(write_story.tools))\\n\\nwith handler(provider), handler(llm_logger):\\n print(\"=== Story with moral ===\")\\n print(write_story(\"a curious cat\", \"moral\"))\\n print()\\n print(\"=== Funny story ===\")\\n print(write_story(\"a curious cat\", \"funny\"))'], '_oh': {}, '_dh': [PosixPath('/Users/datnguyenthanh/Marc/effectful')], 'In': ['', 'import dataclasses\\nimport functools\\nimport inspect\\nimport logging\\nimport sys\\nfrom collections.abc import Callable\\n\\nfrom effectful.handlers.llm import Template\\nfrom effectful.handlers.llm.providers import (\\n CacheLLMRequestHandler,\\n LiteLLMProvider,\\n LLMLoggingHandler,\\n RetryLLMHandler,\\n completion,\\n tool_call,\\n)\\nfrom effectful.handlers.llm.synthesis import ProgramSynthesis\\nfrom effectful.ops.semantics import NotHandled, fwd, handler\\nfrom effectful.ops.syntax import defop\\n\\nprovider = LiteLLMProvider()', '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}.\"\"\"\\n raise NotHandled', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '@functools.cache\\n@Template.define\\ndef haiku(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef haiku_no_cache(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nprint()\\nwith handler(provider):\\n print(haiku(\"fish\"))\\n print(\"-\" * 40)\\n print(haiku(\"fish\"))\\n\\nprint()\\ncache_handler1 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler1):\\n print(haiku_no_cache(\"fish2\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish2\"))\\n\\nprint()\\ncache_handler2 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler2):\\n print(haiku_no_cache(\"fish3\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish3\"))', '@Template.define\\ndef primes(first_digit: int) -> int:\\n \"\"\"Give a prime number with {first_digit} as the first digit. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider):\\n assert type(primes(6)) is int', '@Template.define\\ndef count_char(char: str) -> Callable[[str], int]:\\n \"\"\"Write a function which takes a string and counts the occurrances of \\'{char}\\'. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider), handler(ProgramSynthesis()):\\n count_a = count_char(\"a\")\\n assert callable(count_a)\\n assert count_a(\"banana\") == 3\\n assert count_a(\"cherry\") == 0\\n # Print the source code of the generated function\\n print(inspect.getsource(count_a))', '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', '@dataclasses.dataclass\\nclass KnockKnockJoke:\\n whos_there: str\\n punchline: str\\n\\n\\n@Template.define\\ndef write_joke(theme: str) -> KnockKnockJoke:\\n \"\"\"Write a knock-knock joke on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef rate_joke(joke: KnockKnockJoke) -> bool:\\n \"\"\"Decide if {joke} is funny or not. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\ndef do_comedy():\\n joke = write_joke(\"lizards\")\\n print(\"> You are onstage at a comedy club. You tell the following joke:\")\\n print(\\n f\"Knock knock.\\\\nWho\\'s there?\\\\n{joke.whos_there}.\\\\n{joke.whos_there} who?\\\\n{joke.punchline}\"\\n )\\n if rate_joke(joke):\\n print(\"> The crowd laughs politely.\")\\n else:\\n print(\"> The crowd stares in stony silence.\")\\n\\n\\nwith handler(provider):\\n do_comedy()', 'def log_llm(*args, **kwargs):\\n result = fwd()\\n print(\"Request fired: \", args, kwargs, result)\\n return result\\n\\n\\n# Avoid cache\\ntry:\\n haiku.cache_clear()\\nexcept Exception:\\n pass\\n\\n# Put completion handler innermost so it has highest precedence during the call\\nwith handler(provider), handler({completion: log_llm}):\\n _ = haiku(\"fish2\")\\n _ = limerick(\"fish\") # or use haiku(\"fish-2\") to avoid cache', '# 1. Create a logger\\nlogger = logging.getLogger(\"effectful.llm\")\\nlogger.setLevel(logging.INFO)\\nlog_handler = logging.StreamHandler(sys.stdout)\\nlog_handler.setFormatter(logging.Formatter(\"%(levelname)s %(payload)s\"))\\nlogger.addHandler(log_handler)\\n# 2. Pass it to the handler\\nllm_logger = LLMLoggingHandler(logger=logger) # can also be LLMLoggingHandler()\\n\\n# Avoid cache for demonstration\\ntry:\\n haiku.cache_clear()\\n limerick.cache_clear()\\nexcept Exception:\\n pass\\n\\nwith handler(provider), handler(llm_logger):\\n _ = haiku(\"fish3\")\\n _ = limerick(\"fish4\")', '# Sub-templates for different story styles\\n@Template.define\\ndef story_with_moral(topic: str) -> str:\\n \"\"\"Write a short story about {topic} and end with a moral lesson. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef story_funny(topic: str) -> str:\\n \"\"\"Write a funny, humorous story about {topic}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n# Main orchestrator template - has access to sub-templates\\n@Template.define\\ndef write_story(topic: str, style: str) -> str:\\n \"\"\"Write a story about {topic} in the style: {style}.\\n Available styles: \\'moral\\' for a story with a lesson, \\'funny\\' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\"\"\"\\n raise NotHandled\\n\\n\\n# Verify sub-templates are captured in write_story\\'s lexical context\\nassert story_with_moral in write_story.tools\\nassert story_funny in write_story.tools\\nprint(\"Sub-templates available to write_story:\", list(write_story.tools))\\n\\nwith handler(provider), handler(llm_logger):\\n print(\"=== Story with moral ===\")\\n print(write_story(\"a curious cat\", \"moral\"))\\n print()\\n print(\"=== Funny story ===\")\\n print(write_story(\"a curious cat\", \"funny\"))'], 'Out': {}, 'get_ipython': >, 'exit': , 'quit': , 'open': , '_': 'In the ocean where fishies do play, \\nA big whale came swimming one day. \\nWith a splash and a dive, \\nHe felt so alive, \\nChasing fish in the blue, gleaming bay.', '__': '', '___': '', '__vsc_ipynb_file__': '/Users/datnguyenthanh/Marc/effectful/docs/source/llm.ipynb', '_i': '# 1. Create a logger\\nlogger = logging.getLogger(\"effectful.llm\")\\nlogger.setLevel(logging.INFO)\\nlog_handler = logging.StreamHandler(sys.stdout)\\nlog_handler.setFormatter(logging.Formatter(\"%(levelname)s %(payload)s\"))\\nlogger.addHandler(log_handler)\\n# 2. Pass it to the handler\\nllm_logger = LLMLoggingHandler(logger=logger) # can also be LLMLoggingHandler()\\n\\n# Avoid cache for demonstration\\ntry:\\n haiku.cache_clear()\\n limerick.cache_clear()\\nexcept Exception:\\n pass\\n\\nwith handler(provider), handler(llm_logger):\\n _ = haiku(\"fish3\")\\n _ = limerick(\"fish4\")', '_ii': 'def log_llm(*args, **kwargs):\\n result = fwd()\\n print(\"Request fired: \", args, kwargs, result)\\n return result\\n\\n\\n# Avoid cache\\ntry:\\n haiku.cache_clear()\\nexcept Exception:\\n pass\\n\\n# Put completion handler innermost so it has highest precedence during the call\\nwith handler(provider), handler({completion: log_llm}):\\n _ = haiku(\"fish2\")\\n _ = limerick(\"fish\") # or use haiku(\"fish-2\") to avoid cache', '_iii': '@dataclasses.dataclass\\nclass KnockKnockJoke:\\n whos_there: str\\n punchline: str\\n\\n\\n@Template.define\\ndef write_joke(theme: str) -> KnockKnockJoke:\\n \"\"\"Write a knock-knock joke on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef rate_joke(joke: KnockKnockJoke) -> bool:\\n \"\"\"Decide if {joke} is funny or not. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\ndef do_comedy():\\n joke = write_joke(\"lizards\")\\n print(\"> You are onstage at a comedy club. You tell the following joke:\")\\n print(\\n f\"Knock knock.\\\\nWho\\'s there?\\\\n{joke.whos_there}.\\\\n{joke.whos_there} who?\\\\n{joke.punchline}\"\\n )\\n if rate_joke(joke):\\n print(\"> The crowd laughs politely.\")\\n else:\\n print(\"> The crowd stares in stony silence.\")\\n\\n\\nwith handler(provider):\\n do_comedy()', '_i1': 'import dataclasses\\nimport functools\\nimport inspect\\nimport logging\\nimport sys\\nfrom collections.abc import Callable\\n\\nfrom effectful.handlers.llm import Template\\nfrom effectful.handlers.llm.providers import (\\n CacheLLMRequestHandler,\\n LiteLLMProvider,\\n LLMLoggingHandler,\\n RetryLLMHandler,\\n completion,\\n tool_call,\\n)\\nfrom effectful.handlers.llm.synthesis import ProgramSynthesis\\nfrom effectful.ops.semantics import NotHandled, fwd, handler\\nfrom effectful.ops.syntax import defop\\n\\nprovider = LiteLLMProvider()', 'dataclasses': , 'functools': , 'inspect': , 'logging': , 'sys': , 'Callable': , 'Template': , 'CacheLLMRequestHandler': , 'LiteLLMProvider': , 'LLMLoggingHandler': , 'RetryLLMHandler': , 'completion': Operation(completion, (model: str, messages: List = [], timeout: Union[float, str, openai.Timeout, NoneType] = None, temperature: Optional[float] = None, top_p: Optional[float] = None, n: Optional[int] = None, stream: Optional[bool] = None, stream_options: Optional[dict] = None, stop=None, max_completion_tokens: Optional[int] = None, max_tokens: Optional[int] = None, modalities: Optional[List[Literal['text', 'audio']]] = None, prediction: Optional[openai.types.chat.chat_completion_prediction_content_param.ChatCompletionPredictionContentParam] = None, audio: Optional[openai.types.chat.chat_completion_audio_param.ChatCompletionAudioParam] = None, presence_penalty: Optional[float] = None, frequency_penalty: Optional[float] = None, logit_bias: Optional[dict] = None, user: Optional[str] = None, reasoning_effort: Optional[Literal['none', 'minimal', 'low', 'medium', 'high', 'default']] = None, verbosity: Optional[Literal['low', 'medium', 'high']] = None, response_format: Union[dict, Type[pydantic.main.BaseModel], NoneType] = None, seed: Optional[int] = None, tools: Optional[List] = None, tool_choice: Union[str, dict, NoneType] = None, logprobs: Optional[bool] = None, top_logprobs: Optional[int] = None, parallel_tool_calls: Optional[bool] = None, web_search_options: Optional[litellm.types.llms.openai.OpenAIWebSearchOptions] = None, deployment_id=None, extra_headers: Optional[dict] = None, safety_identifier: Optional[str] = None, service_tier: Optional[str] = None, functions: Optional[List] = None, function_call: Optional[str] = None, base_url: Optional[str] = None, api_version: Optional[str] = None, api_key: Optional[str] = None, model_list: Optional[list] = None, thinking: Optional[litellm.types.llms.anthropic.AnthropicThinkingParam] = None, shared_session: Optional[ForwardRef('ClientSession')] = None, **kwargs) -> Union[litellm.types.utils.ModelResponse, litellm.litellm_core_utils.streaming_handler.CustomStreamWrapper]), 'tool_call': Operation(tool_call, (template: effectful.handlers.llm.Template, tool: Union[effectful.ops.types.Operation[..., T], effectful.handlers.llm.Template[..., T]], *args, **kwargs) -> T), 'ProgramSynthesis': , 'NotHandled': , 'fwd': Operation(fwd, (*args, **kwargs) -> Any), 'handler': , 'defop': , 'provider': , '_i2': '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}.\"\"\"\\n raise NotHandled', 'limerick': Template(__prompt_template__='Write a limerick on the theme of {theme}. Do not use any tools.', __signature__= str>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='limerick'), '_i3': 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '_i4': 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '_i5': '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled', '_i6': 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '_i7': '@functools.cache\\n@Template.define\\ndef haiku(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef haiku_no_cache(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nprint()\\nwith handler(provider):\\n print(haiku(\"fish\"))\\n print(\"-\" * 40)\\n print(haiku(\"fish\"))\\n\\nprint()\\ncache_handler1 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler1):\\n print(haiku_no_cache(\"fish2\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish2\"))\\n\\nprint()\\ncache_handler2 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler2):\\n print(haiku_no_cache(\"fish3\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish3\"))', 'haiku': , 'haiku_no_cache': Template(__prompt_template__='Write a haiku on the theme of {theme}. Do not use any tools.', __signature__= str>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='haiku_no_cache'), 'cache_handler1': , 'cache_handler2': , '_i8': '@Template.define\\ndef primes(first_digit: int) -> int:\\n \"\"\"Give a prime number with {first_digit} as the first digit. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider):\\n assert type(primes(6)) is int', 'primes': Template(__prompt_template__='Give a prime number with {first_digit} as the first digit. Do not use any tools.', __signature__= int>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='primes'), '_i9': '@Template.define\\ndef count_char(char: str) -> Callable[[str], int]:\\n \"\"\"Write a function which takes a string and counts the occurrances of \\'{char}\\'. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider), handler(ProgramSynthesis()):\\n count_a = count_char(\"a\")\\n assert callable(count_a)\\n assert count_a(\"banana\") == 3\\n assert count_a(\"cherry\") == 0\\n # Print the source code of the generated function\\n print(inspect.getsource(count_a))', 'count_char': Template(__prompt_template__=\"Write a function which takes a string and counts the occurrances of '{char}'. Do not use any tools.\", __signature__= collections.abc.Callable[[str], int]>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='count_char'), 'count_a': , '_i10': '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', 'cities': Operation(cities, () -> list[str]), 'weather': Operation(weather, (city: str) -> str), 'vacation': ..., 'log_tool_call': , '_i11': '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', '_i12': '@dataclasses.dataclass\\nclass KnockKnockJoke:\\n whos_there: str\\n punchline: str\\n\\n\\n@Template.define\\ndef write_joke(theme: str) -> KnockKnockJoke:\\n \"\"\"Write a knock-knock joke on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef rate_joke(joke: KnockKnockJoke) -> bool:\\n \"\"\"Decide if {joke} is funny or not. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\ndef do_comedy():\\n joke = write_joke(\"lizards\")\\n print(\"> You are onstage at a comedy club. You tell the following joke:\")\\n print(\\n f\"Knock knock.\\\\nWho\\'s there?\\\\n{joke.whos_there}.\\\\n{joke.whos_there} who?\\\\n{joke.punchline}\"\\n )\\n if rate_joke(joke):\\n print(\"> The crowd laughs politely.\")\\n else:\\n print(\"> The crowd stares in stony silence.\")\\n\\n\\nwith handler(provider):\\n do_comedy()', 'KnockKnockJoke': , 'write_joke': Template(__prompt_template__='Write a knock-knock joke on the theme of {theme}. Do not use any tools.', __signature__= __main__.KnockKnockJoke>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='write_joke'), 'rate_joke': Template(__prompt_template__='Decide if {joke} is funny or not. Do not use any tools.', __signature__= bool>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='rate_joke'), 'do_comedy': , '_i13': 'def log_llm(*args, **kwargs):\\n result = fwd()\\n print(\"Request fired: \", args, kwargs, result)\\n return result\\n\\n\\n# Avoid cache\\ntry:\\n haiku.cache_clear()\\nexcept Exception:\\n pass\\n\\n# Put completion handler innermost so it has highest precedence during the call\\nwith handler(provider), handler({completion: log_llm}):\\n _ = haiku(\"fish2\")\\n _ = limerick(\"fish\") # or use haiku(\"fish-2\") to avoid cache', 'log_llm': , '_i14': '# 1. Create a logger\\nlogger = logging.getLogger(\"effectful.llm\")\\nlogger.setLevel(logging.INFO)\\nlog_handler = logging.StreamHandler(sys.stdout)\\nlog_handler.setFormatter(logging.Formatter(\"%(levelname)s %(payload)s\"))\\nlogger.addHandler(log_handler)\\n# 2. Pass it to the handler\\nllm_logger = LLMLoggingHandler(logger=logger) # can also be LLMLoggingHandler()\\n\\n# Avoid cache for demonstration\\ntry:\\n haiku.cache_clear()\\n limerick.cache_clear()\\nexcept Exception:\\n pass\\n\\nwith handler(provider), handler(llm_logger):\\n _ = haiku(\"fish3\")\\n _ = limerick(\"fish4\")', 'logger': , 'log_handler': , 'llm_logger': , '_i15': '# Sub-templates for different story styles\\n@Template.define\\ndef story_with_moral(topic: str) -> str:\\n \"\"\"Write a short story about {topic} and end with a moral lesson. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef story_funny(topic: str) -> str:\\n \"\"\"Write a funny, humorous story about {topic}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n# Main orchestrator template - has access to sub-templates\\n@Template.define\\ndef write_story(topic: str, style: str) -> str:\\n \"\"\"Write a story about {topic} in the style: {style}.\\n Available styles: \\'moral\\' for a story with a lesson, \\'funny\\' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\"\"\"\\n raise NotHandled\\n\\n\\n# Verify sub-templates are captured in write_story\\'s lexical context\\nassert story_with_moral in write_story.tools\\nassert story_funny in write_story.tools\\nprint(\"Sub-templates available to write_story:\", list(write_story.tools))\\n\\nwith handler(provider), handler(llm_logger):\\n print(\"=== Story with moral ===\")\\n print(write_story(\"a curious cat\", \"moral\"))\\n print()\\n print(\"=== Funny story ===\")\\n print(write_story(\"a curious cat\", \"funny\"))', 'story_with_moral': Template(__prompt_template__='Write a short story about {topic} and end with a moral lesson. Do not use any tools.', __signature__= str>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='story_with_moral'), 'story_funny': Template(__prompt_template__='Write a funny, humorous story about {topic}. Do not use any tools.', __signature__= str>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='story_funny'), 'write_story': Template(__prompt_template__=\"Write a story about {topic} in the style: {style}.\\n Available styles: 'moral' for a story with a lesson, 'funny' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\", __signature__= str>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='write_story')}), mappingproxy({'__name__': '__main__', '__doc__': 'Automatically created module for IPython interactive environment', '__package__': None, '__loader__': None, '__spec__': None, '__builtin__': , '__builtins__': , '_ih': ['', 'import dataclasses\\nimport functools\\nimport inspect\\nimport logging\\nimport sys\\nfrom collections.abc import Callable\\n\\nfrom effectful.handlers.llm import Template\\nfrom effectful.handlers.llm.providers import (\\n CacheLLMRequestHandler,\\n LiteLLMProvider,\\n LLMLoggingHandler,\\n RetryLLMHandler,\\n completion,\\n tool_call,\\n)\\nfrom effectful.handlers.llm.synthesis import ProgramSynthesis\\nfrom effectful.ops.semantics import NotHandled, fwd, handler\\nfrom effectful.ops.syntax import defop\\n\\nprovider = LiteLLMProvider()', '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}.\"\"\"\\n raise NotHandled', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '@functools.cache\\n@Template.define\\ndef haiku(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef haiku_no_cache(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nprint()\\nwith handler(provider):\\n print(haiku(\"fish\"))\\n print(\"-\" * 40)\\n print(haiku(\"fish\"))\\n\\nprint()\\ncache_handler1 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler1):\\n print(haiku_no_cache(\"fish2\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish2\"))\\n\\nprint()\\ncache_handler2 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler2):\\n print(haiku_no_cache(\"fish3\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish3\"))', '@Template.define\\ndef primes(first_digit: int) -> int:\\n \"\"\"Give a prime number with {first_digit} as the first digit. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider):\\n assert type(primes(6)) is int', '@Template.define\\ndef count_char(char: str) -> Callable[[str], int]:\\n \"\"\"Write a function which takes a string and counts the occurrances of \\'{char}\\'. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider), handler(ProgramSynthesis()):\\n count_a = count_char(\"a\")\\n assert callable(count_a)\\n assert count_a(\"banana\") == 3\\n assert count_a(\"cherry\") == 0\\n # Print the source code of the generated function\\n print(inspect.getsource(count_a))', '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', '@dataclasses.dataclass\\nclass KnockKnockJoke:\\n whos_there: str\\n punchline: str\\n\\n\\n@Template.define\\ndef write_joke(theme: str) -> KnockKnockJoke:\\n \"\"\"Write a knock-knock joke on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef rate_joke(joke: KnockKnockJoke) -> bool:\\n \"\"\"Decide if {joke} is funny or not. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\ndef do_comedy():\\n joke = write_joke(\"lizards\")\\n print(\"> You are onstage at a comedy club. You tell the following joke:\")\\n print(\\n f\"Knock knock.\\\\nWho\\'s there?\\\\n{joke.whos_there}.\\\\n{joke.whos_there} who?\\\\n{joke.punchline}\"\\n )\\n if rate_joke(joke):\\n print(\"> The crowd laughs politely.\")\\n else:\\n print(\"> The crowd stares in stony silence.\")\\n\\n\\nwith handler(provider):\\n do_comedy()', 'def log_llm(*args, **kwargs):\\n result = fwd()\\n print(\"Request fired: \", args, kwargs, result)\\n return result\\n\\n\\n# Avoid cache\\ntry:\\n haiku.cache_clear()\\nexcept Exception:\\n pass\\n\\n# Put completion handler innermost so it has highest precedence during the call\\nwith handler(provider), handler({completion: log_llm}):\\n _ = haiku(\"fish2\")\\n _ = limerick(\"fish\") # or use haiku(\"fish-2\") to avoid cache', '# 1. Create a logger\\nlogger = logging.getLogger(\"effectful.llm\")\\nlogger.setLevel(logging.INFO)\\nlog_handler = logging.StreamHandler(sys.stdout)\\nlog_handler.setFormatter(logging.Formatter(\"%(levelname)s %(payload)s\"))\\nlogger.addHandler(log_handler)\\n# 2. Pass it to the handler\\nllm_logger = LLMLoggingHandler(logger=logger) # can also be LLMLoggingHandler()\\n\\n# Avoid cache for demonstration\\ntry:\\n haiku.cache_clear()\\n limerick.cache_clear()\\nexcept Exception:\\n pass\\n\\nwith handler(provider), handler(llm_logger):\\n _ = haiku(\"fish3\")\\n _ = limerick(\"fish4\")', '# Sub-templates for different story styles\\n@Template.define\\ndef story_with_moral(topic: str) -> str:\\n \"\"\"Write a short story about {topic} and end with a moral lesson. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef story_funny(topic: str) -> str:\\n \"\"\"Write a funny, humorous story about {topic}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n# Main orchestrator template - has access to sub-templates\\n@Template.define\\ndef write_story(topic: str, style: str) -> str:\\n \"\"\"Write a story about {topic} in the style: {style}.\\n Available styles: \\'moral\\' for a story with a lesson, \\'funny\\' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\"\"\"\\n raise NotHandled\\n\\n\\n# Verify sub-templates are captured in write_story\\'s lexical context\\nassert story_with_moral in write_story.tools\\nassert story_funny in write_story.tools\\nprint(\"Sub-templates available to write_story:\", list(write_story.tools))\\n\\nwith handler(provider), handler(llm_logger):\\n print(\"=== Story with moral ===\")\\n print(write_story(\"a curious cat\", \"moral\"))\\n print()\\n print(\"=== Funny story ===\")\\n print(write_story(\"a curious cat\", \"funny\"))'], '_oh': {}, '_dh': [PosixPath('/Users/datnguyenthanh/Marc/effectful')], 'In': ['', 'import dataclasses\\nimport functools\\nimport inspect\\nimport logging\\nimport sys\\nfrom collections.abc import Callable\\n\\nfrom effectful.handlers.llm import Template\\nfrom effectful.handlers.llm.providers import (\\n CacheLLMRequestHandler,\\n LiteLLMProvider,\\n LLMLoggingHandler,\\n RetryLLMHandler,\\n completion,\\n tool_call,\\n)\\nfrom effectful.handlers.llm.synthesis import ProgramSynthesis\\nfrom effectful.ops.semantics import NotHandled, fwd, handler\\nfrom effectful.ops.syntax import defop\\n\\nprovider = LiteLLMProvider()', '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}.\"\"\"\\n raise NotHandled', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '@functools.cache\\n@Template.define\\ndef haiku(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef haiku_no_cache(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nprint()\\nwith handler(provider):\\n print(haiku(\"fish\"))\\n print(\"-\" * 40)\\n print(haiku(\"fish\"))\\n\\nprint()\\ncache_handler1 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler1):\\n print(haiku_no_cache(\"fish2\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish2\"))\\n\\nprint()\\ncache_handler2 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler2):\\n print(haiku_no_cache(\"fish3\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish3\"))', '@Template.define\\ndef primes(first_digit: int) -> int:\\n \"\"\"Give a prime number with {first_digit} as the first digit. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider):\\n assert type(primes(6)) is int', '@Template.define\\ndef count_char(char: str) -> Callable[[str], int]:\\n \"\"\"Write a function which takes a string and counts the occurrances of \\'{char}\\'. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider), handler(ProgramSynthesis()):\\n count_a = count_char(\"a\")\\n assert callable(count_a)\\n assert count_a(\"banana\") == 3\\n assert count_a(\"cherry\") == 0\\n # Print the source code of the generated function\\n print(inspect.getsource(count_a))', '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', '@dataclasses.dataclass\\nclass KnockKnockJoke:\\n whos_there: str\\n punchline: str\\n\\n\\n@Template.define\\ndef write_joke(theme: str) -> KnockKnockJoke:\\n \"\"\"Write a knock-knock joke on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef rate_joke(joke: KnockKnockJoke) -> bool:\\n \"\"\"Decide if {joke} is funny or not. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\ndef do_comedy():\\n joke = write_joke(\"lizards\")\\n print(\"> You are onstage at a comedy club. You tell the following joke:\")\\n print(\\n f\"Knock knock.\\\\nWho\\'s there?\\\\n{joke.whos_there}.\\\\n{joke.whos_there} who?\\\\n{joke.punchline}\"\\n )\\n if rate_joke(joke):\\n print(\"> The crowd laughs politely.\")\\n else:\\n print(\"> The crowd stares in stony silence.\")\\n\\n\\nwith handler(provider):\\n do_comedy()', 'def log_llm(*args, **kwargs):\\n result = fwd()\\n print(\"Request fired: \", args, kwargs, result)\\n return result\\n\\n\\n# Avoid cache\\ntry:\\n haiku.cache_clear()\\nexcept Exception:\\n pass\\n\\n# Put completion handler innermost so it has highest precedence during the call\\nwith handler(provider), handler({completion: log_llm}):\\n _ = haiku(\"fish2\")\\n _ = limerick(\"fish\") # or use haiku(\"fish-2\") to avoid cache', '# 1. Create a logger\\nlogger = logging.getLogger(\"effectful.llm\")\\nlogger.setLevel(logging.INFO)\\nlog_handler = logging.StreamHandler(sys.stdout)\\nlog_handler.setFormatter(logging.Formatter(\"%(levelname)s %(payload)s\"))\\nlogger.addHandler(log_handler)\\n# 2. Pass it to the handler\\nllm_logger = LLMLoggingHandler(logger=logger) # can also be LLMLoggingHandler()\\n\\n# Avoid cache for demonstration\\ntry:\\n haiku.cache_clear()\\n limerick.cache_clear()\\nexcept Exception:\\n pass\\n\\nwith handler(provider), handler(llm_logger):\\n _ = haiku(\"fish3\")\\n _ = limerick(\"fish4\")', '# Sub-templates for different story styles\\n@Template.define\\ndef story_with_moral(topic: str) -> str:\\n \"\"\"Write a short story about {topic} and end with a moral lesson. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef story_funny(topic: str) -> str:\\n \"\"\"Write a funny, humorous story about {topic}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n# Main orchestrator template - has access to sub-templates\\n@Template.define\\ndef write_story(topic: str, style: str) -> str:\\n \"\"\"Write a story about {topic} in the style: {style}.\\n Available styles: \\'moral\\' for a story with a lesson, \\'funny\\' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\"\"\"\\n raise NotHandled\\n\\n\\n# Verify sub-templates are captured in write_story\\'s lexical context\\nassert story_with_moral in write_story.tools\\nassert story_funny in write_story.tools\\nprint(\"Sub-templates available to write_story:\", list(write_story.tools))\\n\\nwith handler(provider), handler(llm_logger):\\n print(\"=== Story with moral ===\")\\n print(write_story(\"a curious cat\", \"moral\"))\\n print()\\n print(\"=== Funny story ===\")\\n print(write_story(\"a curious cat\", \"funny\"))'], 'Out': {}, 'get_ipython': >, 'exit': , 'quit': , 'open': , '_': 'In the ocean where fishies do play, \\nA big whale came swimming one day. \\nWith a splash and a dive, \\nHe felt so alive, \\nChasing fish in the blue, gleaming bay.', '__': '', '___': '', '__vsc_ipynb_file__': '/Users/datnguyenthanh/Marc/effectful/docs/source/llm.ipynb', '_i': '# 1. Create a logger\\nlogger = logging.getLogger(\"effectful.llm\")\\nlogger.setLevel(logging.INFO)\\nlog_handler = logging.StreamHandler(sys.stdout)\\nlog_handler.setFormatter(logging.Formatter(\"%(levelname)s %(payload)s\"))\\nlogger.addHandler(log_handler)\\n# 2. Pass it to the handler\\nllm_logger = LLMLoggingHandler(logger=logger) # can also be LLMLoggingHandler()\\n\\n# Avoid cache for demonstration\\ntry:\\n haiku.cache_clear()\\n limerick.cache_clear()\\nexcept Exception:\\n pass\\n\\nwith handler(provider), handler(llm_logger):\\n _ = haiku(\"fish3\")\\n _ = limerick(\"fish4\")', '_ii': 'def log_llm(*args, **kwargs):\\n result = fwd()\\n print(\"Request fired: \", args, kwargs, result)\\n return result\\n\\n\\n# Avoid cache\\ntry:\\n haiku.cache_clear()\\nexcept Exception:\\n pass\\n\\n# Put completion handler innermost so it has highest precedence during the call\\nwith handler(provider), handler({completion: log_llm}):\\n _ = haiku(\"fish2\")\\n _ = limerick(\"fish\") # or use haiku(\"fish-2\") to avoid cache', '_iii': '@dataclasses.dataclass\\nclass KnockKnockJoke:\\n whos_there: str\\n punchline: str\\n\\n\\n@Template.define\\ndef write_joke(theme: str) -> KnockKnockJoke:\\n \"\"\"Write a knock-knock joke on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef rate_joke(joke: KnockKnockJoke) -> bool:\\n \"\"\"Decide if {joke} is funny or not. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\ndef do_comedy():\\n joke = write_joke(\"lizards\")\\n print(\"> You are onstage at a comedy club. You tell the following joke:\")\\n print(\\n f\"Knock knock.\\\\nWho\\'s there?\\\\n{joke.whos_there}.\\\\n{joke.whos_there} who?\\\\n{joke.punchline}\"\\n )\\n if rate_joke(joke):\\n print(\"> The crowd laughs politely.\")\\n else:\\n print(\"> The crowd stares in stony silence.\")\\n\\n\\nwith handler(provider):\\n do_comedy()', '_i1': 'import dataclasses\\nimport functools\\nimport inspect\\nimport logging\\nimport sys\\nfrom collections.abc import Callable\\n\\nfrom effectful.handlers.llm import Template\\nfrom effectful.handlers.llm.providers import (\\n CacheLLMRequestHandler,\\n LiteLLMProvider,\\n LLMLoggingHandler,\\n RetryLLMHandler,\\n completion,\\n tool_call,\\n)\\nfrom effectful.handlers.llm.synthesis import ProgramSynthesis\\nfrom effectful.ops.semantics import NotHandled, fwd, handler\\nfrom effectful.ops.syntax import defop\\n\\nprovider = LiteLLMProvider()', 'dataclasses': , 'functools': , 'inspect': , 'logging': , 'sys': , 'Callable': , 'Template': , 'CacheLLMRequestHandler': , 'LiteLLMProvider': , 'LLMLoggingHandler': , 'RetryLLMHandler': , 'completion': Operation(completion, (model: str, messages: List = [], timeout: Union[float, str, openai.Timeout, NoneType] = None, temperature: Optional[float] = None, top_p: Optional[float] = None, n: Optional[int] = None, stream: Optional[bool] = None, stream_options: Optional[dict] = None, stop=None, max_completion_tokens: Optional[int] = None, max_tokens: Optional[int] = None, modalities: Optional[List[Literal['text', 'audio']]] = None, prediction: Optional[openai.types.chat.chat_completion_prediction_content_param.ChatCompletionPredictionContentParam] = None, audio: Optional[openai.types.chat.chat_completion_audio_param.ChatCompletionAudioParam] = None, presence_penalty: Optional[float] = None, frequency_penalty: Optional[float] = None, logit_bias: Optional[dict] = None, user: Optional[str] = None, reasoning_effort: Optional[Literal['none', 'minimal', 'low', 'medium', 'high', 'default']] = None, verbosity: Optional[Literal['low', 'medium', 'high']] = None, response_format: Union[dict, Type[pydantic.main.BaseModel], NoneType] = None, seed: Optional[int] = None, tools: Optional[List] = None, tool_choice: Union[str, dict, NoneType] = None, logprobs: Optional[bool] = None, top_logprobs: Optional[int] = None, parallel_tool_calls: Optional[bool] = None, web_search_options: Optional[litellm.types.llms.openai.OpenAIWebSearchOptions] = None, deployment_id=None, extra_headers: Optional[dict] = None, safety_identifier: Optional[str] = None, service_tier: Optional[str] = None, functions: Optional[List] = None, function_call: Optional[str] = None, base_url: Optional[str] = None, api_version: Optional[str] = None, api_key: Optional[str] = None, model_list: Optional[list] = None, thinking: Optional[litellm.types.llms.anthropic.AnthropicThinkingParam] = None, shared_session: Optional[ForwardRef('ClientSession')] = None, **kwargs) -> Union[litellm.types.utils.ModelResponse, litellm.litellm_core_utils.streaming_handler.CustomStreamWrapper]), 'tool_call': Operation(tool_call, (template: effectful.handlers.llm.Template, tool: Union[effectful.ops.types.Operation[..., T], effectful.handlers.llm.Template[..., T]], *args, **kwargs) -> T), 'ProgramSynthesis': , 'NotHandled': , 'fwd': Operation(fwd, (*args, **kwargs) -> Any), 'handler': , 'defop': , 'provider': , '_i2': '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}.\"\"\"\\n raise NotHandled', 'limerick': Template(__prompt_template__='Write a limerick on the theme of {theme}. Do not use any tools.', __signature__= str>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='limerick'), '_i3': 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '_i4': 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '_i5': '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled', '_i6': 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '_i7': '@functools.cache\\n@Template.define\\ndef haiku(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef haiku_no_cache(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nprint()\\nwith handler(provider):\\n print(haiku(\"fish\"))\\n print(\"-\" * 40)\\n print(haiku(\"fish\"))\\n\\nprint()\\ncache_handler1 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler1):\\n print(haiku_no_cache(\"fish2\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish2\"))\\n\\nprint()\\ncache_handler2 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler2):\\n print(haiku_no_cache(\"fish3\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish3\"))', 'haiku': , 'haiku_no_cache': Template(__prompt_template__='Write a haiku on the theme of {theme}. Do not use any tools.', __signature__= str>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='haiku_no_cache'), 'cache_handler1': , 'cache_handler2': , '_i8': '@Template.define\\ndef primes(first_digit: int) -> int:\\n \"\"\"Give a prime number with {first_digit} as the first digit. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider):\\n assert type(primes(6)) is int', 'primes': Template(__prompt_template__='Give a prime number with {first_digit} as the first digit. Do not use any tools.', __signature__= int>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='primes'), '_i9': '@Template.define\\ndef count_char(char: str) -> Callable[[str], int]:\\n \"\"\"Write a function which takes a string and counts the occurrances of \\'{char}\\'. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider), handler(ProgramSynthesis()):\\n count_a = count_char(\"a\")\\n assert callable(count_a)\\n assert count_a(\"banana\") == 3\\n assert count_a(\"cherry\") == 0\\n # Print the source code of the generated function\\n print(inspect.getsource(count_a))', 'count_char': Template(__prompt_template__=\"Write a function which takes a string and counts the occurrances of '{char}'. Do not use any tools.\", __signature__= collections.abc.Callable[[str], int]>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='count_char'), 'count_a': , '_i10': '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', 'cities': Operation(cities, () -> list[str]), 'weather': Operation(weather, (city: str) -> str), 'vacation': ..., 'log_tool_call': , '_i11': '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', '_i12': '@dataclasses.dataclass\\nclass KnockKnockJoke:\\n whos_there: str\\n punchline: str\\n\\n\\n@Template.define\\ndef write_joke(theme: str) -> KnockKnockJoke:\\n \"\"\"Write a knock-knock joke on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef rate_joke(joke: KnockKnockJoke) -> bool:\\n \"\"\"Decide if {joke} is funny or not. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\ndef do_comedy():\\n joke = write_joke(\"lizards\")\\n print(\"> You are onstage at a comedy club. You tell the following joke:\")\\n print(\\n f\"Knock knock.\\\\nWho\\'s there?\\\\n{joke.whos_there}.\\\\n{joke.whos_there} who?\\\\n{joke.punchline}\"\\n )\\n if rate_joke(joke):\\n print(\"> The crowd laughs politely.\")\\n else:\\n print(\"> The crowd stares in stony silence.\")\\n\\n\\nwith handler(provider):\\n do_comedy()', 'KnockKnockJoke': , 'write_joke': Template(__prompt_template__='Write a knock-knock joke on the theme of {theme}. Do not use any tools.', __signature__= __main__.KnockKnockJoke>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='write_joke'), 'rate_joke': Template(__prompt_template__='Decide if {joke} is funny or not. Do not use any tools.', __signature__= bool>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='rate_joke'), 'do_comedy': , '_i13': 'def log_llm(*args, **kwargs):\\n result = fwd()\\n print(\"Request fired: \", args, kwargs, result)\\n return result\\n\\n\\n# Avoid cache\\ntry:\\n haiku.cache_clear()\\nexcept Exception:\\n pass\\n\\n# Put completion handler innermost so it has highest precedence during the call\\nwith handler(provider), handler({completion: log_llm}):\\n _ = haiku(\"fish2\")\\n _ = limerick(\"fish\") # or use haiku(\"fish-2\") to avoid cache', 'log_llm': , '_i14': '# 1. Create a logger\\nlogger = logging.getLogger(\"effectful.llm\")\\nlogger.setLevel(logging.INFO)\\nlog_handler = logging.StreamHandler(sys.stdout)\\nlog_handler.setFormatter(logging.Formatter(\"%(levelname)s %(payload)s\"))\\nlogger.addHandler(log_handler)\\n# 2. Pass it to the handler\\nllm_logger = LLMLoggingHandler(logger=logger) # can also be LLMLoggingHandler()\\n\\n# Avoid cache for demonstration\\ntry:\\n haiku.cache_clear()\\n limerick.cache_clear()\\nexcept Exception:\\n pass\\n\\nwith handler(provider), handler(llm_logger):\\n _ = haiku(\"fish3\")\\n _ = limerick(\"fish4\")', 'logger': , 'log_handler': , 'llm_logger': , '_i15': '# Sub-templates for different story styles\\n@Template.define\\ndef story_with_moral(topic: str) -> str:\\n \"\"\"Write a short story about {topic} and end with a moral lesson. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef story_funny(topic: str) -> str:\\n \"\"\"Write a funny, humorous story about {topic}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n# Main orchestrator template - has access to sub-templates\\n@Template.define\\ndef write_story(topic: str, style: str) -> str:\\n \"\"\"Write a story about {topic} in the style: {style}.\\n Available styles: \\'moral\\' for a story with a lesson, \\'funny\\' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\"\"\"\\n raise NotHandled\\n\\n\\n# Verify sub-templates are captured in write_story\\'s lexical context\\nassert story_with_moral in write_story.tools\\nassert story_funny in write_story.tools\\nprint(\"Sub-templates available to write_story:\", list(write_story.tools))\\n\\nwith handler(provider), handler(llm_logger):\\n print(\"=== Story with moral ===\")\\n print(write_story(\"a curious cat\", \"moral\"))\\n print()\\n print(\"=== Funny story ===\")\\n print(write_story(\"a curious cat\", \"funny\"))', 'story_with_moral': Template(__prompt_template__='Write a short story about {topic} and end with a moral lesson. Do not use any tools.', __signature__= str>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='story_with_moral'), 'story_funny': Template(__prompt_template__='Write a funny, humorous story about {topic}. Do not use any tools.', __signature__= str>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='story_funny'), 'write_story': Template(__prompt_template__=\"Write a story about {topic} in the style: {style}.\\n Available styles: 'moral' for a story with a lesson, 'funny' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\", __signature__= str>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='write_story')})), __name__='vacation'), Template(__prompt_template__='Write a knock-knock joke on the theme of {theme}. Do not use any tools.', __signature__= __main__.KnockKnockJoke>, __context__=LexicalContext(mappingproxy({'__name__': '__main__', '__doc__': 'Automatically created module for IPython interactive environment', '__package__': None, '__loader__': None, '__spec__': None, '__builtin__': , '__builtins__': , '_ih': ['', 'import dataclasses\\nimport functools\\nimport inspect\\nimport logging\\nimport sys\\nfrom collections.abc import Callable\\n\\nfrom effectful.handlers.llm import Template\\nfrom effectful.handlers.llm.providers import (\\n CacheLLMRequestHandler,\\n LiteLLMProvider,\\n LLMLoggingHandler,\\n RetryLLMHandler,\\n completion,\\n tool_call,\\n)\\nfrom effectful.handlers.llm.synthesis import ProgramSynthesis\\nfrom effectful.ops.semantics import NotHandled, fwd, handler\\nfrom effectful.ops.syntax import defop\\n\\nprovider = LiteLLMProvider()', '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}.\"\"\"\\n raise NotHandled', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '@functools.cache\\n@Template.define\\ndef haiku(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef haiku_no_cache(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nprint()\\nwith handler(provider):\\n print(haiku(\"fish\"))\\n print(\"-\" * 40)\\n print(haiku(\"fish\"))\\n\\nprint()\\ncache_handler1 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler1):\\n print(haiku_no_cache(\"fish2\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish2\"))\\n\\nprint()\\ncache_handler2 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler2):\\n print(haiku_no_cache(\"fish3\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish3\"))', '@Template.define\\ndef primes(first_digit: int) -> int:\\n \"\"\"Give a prime number with {first_digit} as the first digit. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider):\\n assert type(primes(6)) is int', '@Template.define\\ndef count_char(char: str) -> Callable[[str], int]:\\n \"\"\"Write a function which takes a string and counts the occurrances of \\'{char}\\'. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider), handler(ProgramSynthesis()):\\n count_a = count_char(\"a\")\\n assert callable(count_a)\\n assert count_a(\"banana\") == 3\\n assert count_a(\"cherry\") == 0\\n # Print the source code of the generated function\\n print(inspect.getsource(count_a))', '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', '@dataclasses.dataclass\\nclass KnockKnockJoke:\\n whos_there: str\\n punchline: str\\n\\n\\n@Template.define\\ndef write_joke(theme: str) -> KnockKnockJoke:\\n \"\"\"Write a knock-knock joke on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef rate_joke(joke: KnockKnockJoke) -> bool:\\n \"\"\"Decide if {joke} is funny or not. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\ndef do_comedy():\\n joke = write_joke(\"lizards\")\\n print(\"> You are onstage at a comedy club. You tell the following joke:\")\\n print(\\n f\"Knock knock.\\\\nWho\\'s there?\\\\n{joke.whos_there}.\\\\n{joke.whos_there} who?\\\\n{joke.punchline}\"\\n )\\n if rate_joke(joke):\\n print(\"> The crowd laughs politely.\")\\n else:\\n print(\"> The crowd stares in stony silence.\")\\n\\n\\nwith handler(provider):\\n do_comedy()', 'def log_llm(*args, **kwargs):\\n result = fwd()\\n print(\"Request fired: \", args, kwargs, result)\\n return result\\n\\n\\n# Avoid cache\\ntry:\\n haiku.cache_clear()\\nexcept Exception:\\n pass\\n\\n# Put completion handler innermost so it has highest precedence during the call\\nwith handler(provider), handler({completion: log_llm}):\\n _ = haiku(\"fish2\")\\n _ = limerick(\"fish\") # or use haiku(\"fish-2\") to avoid cache', '# 1. Create a logger\\nlogger = logging.getLogger(\"effectful.llm\")\\nlogger.setLevel(logging.INFO)\\nlog_handler = logging.StreamHandler(sys.stdout)\\nlog_handler.setFormatter(logging.Formatter(\"%(levelname)s %(payload)s\"))\\nlogger.addHandler(log_handler)\\n# 2. Pass it to the handler\\nllm_logger = LLMLoggingHandler(logger=logger) # can also be LLMLoggingHandler()\\n\\n# Avoid cache for demonstration\\ntry:\\n haiku.cache_clear()\\n limerick.cache_clear()\\nexcept Exception:\\n pass\\n\\nwith handler(provider), handler(llm_logger):\\n _ = haiku(\"fish3\")\\n _ = limerick(\"fish4\")', '# Sub-templates for different story styles\\n@Template.define\\ndef story_with_moral(topic: str) -> str:\\n \"\"\"Write a short story about {topic} and end with a moral lesson. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef story_funny(topic: str) -> str:\\n \"\"\"Write a funny, humorous story about {topic}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n# Main orchestrator template - has access to sub-templates\\n@Template.define\\ndef write_story(topic: str, style: str) -> str:\\n \"\"\"Write a story about {topic} in the style: {style}.\\n Available styles: \\'moral\\' for a story with a lesson, \\'funny\\' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\"\"\"\\n raise NotHandled\\n\\n\\n# Verify sub-templates are captured in write_story\\'s lexical context\\nassert story_with_moral in write_story.tools\\nassert story_funny in write_story.tools\\nprint(\"Sub-templates available to write_story:\", list(write_story.tools))\\n\\nwith handler(provider), handler(llm_logger):\\n print(\"=== Story with moral ===\")\\n print(write_story(\"a curious cat\", \"moral\"))\\n print()\\n print(\"=== Funny story ===\")\\n print(write_story(\"a curious cat\", \"funny\"))'], '_oh': {}, '_dh': [PosixPath('/Users/datnguyenthanh/Marc/effectful')], 'In': ['', 'import dataclasses\\nimport functools\\nimport inspect\\nimport logging\\nimport sys\\nfrom collections.abc import Callable\\n\\nfrom effectful.handlers.llm import Template\\nfrom effectful.handlers.llm.providers import (\\n CacheLLMRequestHandler,\\n LiteLLMProvider,\\n LLMLoggingHandler,\\n RetryLLMHandler,\\n completion,\\n tool_call,\\n)\\nfrom effectful.handlers.llm.synthesis import ProgramSynthesis\\nfrom effectful.ops.semantics import NotHandled, fwd, handler\\nfrom effectful.ops.syntax import defop\\n\\nprovider = LiteLLMProvider()', '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}.\"\"\"\\n raise NotHandled', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '@functools.cache\\n@Template.define\\ndef haiku(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef haiku_no_cache(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nprint()\\nwith handler(provider):\\n print(haiku(\"fish\"))\\n print(\"-\" * 40)\\n print(haiku(\"fish\"))\\n\\nprint()\\ncache_handler1 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler1):\\n print(haiku_no_cache(\"fish2\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish2\"))\\n\\nprint()\\ncache_handler2 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler2):\\n print(haiku_no_cache(\"fish3\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish3\"))', '@Template.define\\ndef primes(first_digit: int) -> int:\\n \"\"\"Give a prime number with {first_digit} as the first digit. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider):\\n assert type(primes(6)) is int', '@Template.define\\ndef count_char(char: str) -> Callable[[str], int]:\\n \"\"\"Write a function which takes a string and counts the occurrances of \\'{char}\\'. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider), handler(ProgramSynthesis()):\\n count_a = count_char(\"a\")\\n assert callable(count_a)\\n assert count_a(\"banana\") == 3\\n assert count_a(\"cherry\") == 0\\n # Print the source code of the generated function\\n print(inspect.getsource(count_a))', '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', '@dataclasses.dataclass\\nclass KnockKnockJoke:\\n whos_there: str\\n punchline: str\\n\\n\\n@Template.define\\ndef write_joke(theme: str) -> KnockKnockJoke:\\n \"\"\"Write a knock-knock joke on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef rate_joke(joke: KnockKnockJoke) -> bool:\\n \"\"\"Decide if {joke} is funny or not. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\ndef do_comedy():\\n joke = write_joke(\"lizards\")\\n print(\"> You are onstage at a comedy club. You tell the following joke:\")\\n print(\\n f\"Knock knock.\\\\nWho\\'s there?\\\\n{joke.whos_there}.\\\\n{joke.whos_there} who?\\\\n{joke.punchline}\"\\n )\\n if rate_joke(joke):\\n print(\"> The crowd laughs politely.\")\\n else:\\n print(\"> The crowd stares in stony silence.\")\\n\\n\\nwith handler(provider):\\n do_comedy()', 'def log_llm(*args, **kwargs):\\n result = fwd()\\n print(\"Request fired: \", args, kwargs, result)\\n return result\\n\\n\\n# Avoid cache\\ntry:\\n haiku.cache_clear()\\nexcept Exception:\\n pass\\n\\n# Put completion handler innermost so it has highest precedence during the call\\nwith handler(provider), handler({completion: log_llm}):\\n _ = haiku(\"fish2\")\\n _ = limerick(\"fish\") # or use haiku(\"fish-2\") to avoid cache', '# 1. Create a logger\\nlogger = logging.getLogger(\"effectful.llm\")\\nlogger.setLevel(logging.INFO)\\nlog_handler = logging.StreamHandler(sys.stdout)\\nlog_handler.setFormatter(logging.Formatter(\"%(levelname)s %(payload)s\"))\\nlogger.addHandler(log_handler)\\n# 2. Pass it to the handler\\nllm_logger = LLMLoggingHandler(logger=logger) # can also be LLMLoggingHandler()\\n\\n# Avoid cache for demonstration\\ntry:\\n haiku.cache_clear()\\n limerick.cache_clear()\\nexcept Exception:\\n pass\\n\\nwith handler(provider), handler(llm_logger):\\n _ = haiku(\"fish3\")\\n _ = limerick(\"fish4\")', '# Sub-templates for different story styles\\n@Template.define\\ndef story_with_moral(topic: str) -> str:\\n \"\"\"Write a short story about {topic} and end with a moral lesson. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef story_funny(topic: str) -> str:\\n \"\"\"Write a funny, humorous story about {topic}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n# Main orchestrator template - has access to sub-templates\\n@Template.define\\ndef write_story(topic: str, style: str) -> str:\\n \"\"\"Write a story about {topic} in the style: {style}.\\n Available styles: \\'moral\\' for a story with a lesson, \\'funny\\' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\"\"\"\\n raise NotHandled\\n\\n\\n# Verify sub-templates are captured in write_story\\'s lexical context\\nassert story_with_moral in write_story.tools\\nassert story_funny in write_story.tools\\nprint(\"Sub-templates available to write_story:\", list(write_story.tools))\\n\\nwith handler(provider), handler(llm_logger):\\n print(\"=== Story with moral ===\")\\n print(write_story(\"a curious cat\", \"moral\"))\\n print()\\n print(\"=== Funny story ===\")\\n print(write_story(\"a curious cat\", \"funny\"))'], 'Out': {}, 'get_ipython': >, 'exit': , 'quit': , 'open': , '_': 'In the ocean where fishies do play, \\nA big whale came swimming one day. \\nWith a splash and a dive, \\nHe felt so alive, \\nChasing fish in the blue, gleaming bay.', '__': '', '___': '', '__vsc_ipynb_file__': '/Users/datnguyenthanh/Marc/effectful/docs/source/llm.ipynb', '_i': '# 1. Create a logger\\nlogger = logging.getLogger(\"effectful.llm\")\\nlogger.setLevel(logging.INFO)\\nlog_handler = logging.StreamHandler(sys.stdout)\\nlog_handler.setFormatter(logging.Formatter(\"%(levelname)s %(payload)s\"))\\nlogger.addHandler(log_handler)\\n# 2. Pass it to the handler\\nllm_logger = LLMLoggingHandler(logger=logger) # can also be LLMLoggingHandler()\\n\\n# Avoid cache for demonstration\\ntry:\\n haiku.cache_clear()\\n limerick.cache_clear()\\nexcept Exception:\\n pass\\n\\nwith handler(provider), handler(llm_logger):\\n _ = haiku(\"fish3\")\\n _ = limerick(\"fish4\")', '_ii': 'def log_llm(*args, **kwargs):\\n result = fwd()\\n print(\"Request fired: \", args, kwargs, result)\\n return result\\n\\n\\n# Avoid cache\\ntry:\\n haiku.cache_clear()\\nexcept Exception:\\n pass\\n\\n# Put completion handler innermost so it has highest precedence during the call\\nwith handler(provider), handler({completion: log_llm}):\\n _ = haiku(\"fish2\")\\n _ = limerick(\"fish\") # or use haiku(\"fish-2\") to avoid cache', '_iii': '@dataclasses.dataclass\\nclass KnockKnockJoke:\\n whos_there: str\\n punchline: str\\n\\n\\n@Template.define\\ndef write_joke(theme: str) -> KnockKnockJoke:\\n \"\"\"Write a knock-knock joke on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef rate_joke(joke: KnockKnockJoke) -> bool:\\n \"\"\"Decide if {joke} is funny or not. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\ndef do_comedy():\\n joke = write_joke(\"lizards\")\\n print(\"> You are onstage at a comedy club. You tell the following joke:\")\\n print(\\n f\"Knock knock.\\\\nWho\\'s there?\\\\n{joke.whos_there}.\\\\n{joke.whos_there} who?\\\\n{joke.punchline}\"\\n )\\n if rate_joke(joke):\\n print(\"> The crowd laughs politely.\")\\n else:\\n print(\"> The crowd stares in stony silence.\")\\n\\n\\nwith handler(provider):\\n do_comedy()', '_i1': 'import dataclasses\\nimport functools\\nimport inspect\\nimport logging\\nimport sys\\nfrom collections.abc import Callable\\n\\nfrom effectful.handlers.llm import Template\\nfrom effectful.handlers.llm.providers import (\\n CacheLLMRequestHandler,\\n LiteLLMProvider,\\n LLMLoggingHandler,\\n RetryLLMHandler,\\n completion,\\n tool_call,\\n)\\nfrom effectful.handlers.llm.synthesis import ProgramSynthesis\\nfrom effectful.ops.semantics import NotHandled, fwd, handler\\nfrom effectful.ops.syntax import defop\\n\\nprovider = LiteLLMProvider()', 'dataclasses': , 'functools': , 'inspect': , 'logging': , 'sys': , 'Callable': , 'Template': , 'CacheLLMRequestHandler': , 'LiteLLMProvider': , 'LLMLoggingHandler': , 'RetryLLMHandler': , 'completion': Operation(completion, (model: str, messages: List = [], timeout: Union[float, str, openai.Timeout, NoneType] = None, temperature: Optional[float] = None, top_p: Optional[float] = None, n: Optional[int] = None, stream: Optional[bool] = None, stream_options: Optional[dict] = None, stop=None, max_completion_tokens: Optional[int] = None, max_tokens: Optional[int] = None, modalities: Optional[List[Literal['text', 'audio']]] = None, prediction: Optional[openai.types.chat.chat_completion_prediction_content_param.ChatCompletionPredictionContentParam] = None, audio: Optional[openai.types.chat.chat_completion_audio_param.ChatCompletionAudioParam] = None, presence_penalty: Optional[float] = None, frequency_penalty: Optional[float] = None, logit_bias: Optional[dict] = None, user: Optional[str] = None, reasoning_effort: Optional[Literal['none', 'minimal', 'low', 'medium', 'high', 'default']] = None, verbosity: Optional[Literal['low', 'medium', 'high']] = None, response_format: Union[dict, Type[pydantic.main.BaseModel], NoneType] = None, seed: Optional[int] = None, tools: Optional[List] = None, tool_choice: Union[str, dict, NoneType] = None, logprobs: Optional[bool] = None, top_logprobs: Optional[int] = None, parallel_tool_calls: Optional[bool] = None, web_search_options: Optional[litellm.types.llms.openai.OpenAIWebSearchOptions] = None, deployment_id=None, extra_headers: Optional[dict] = None, safety_identifier: Optional[str] = None, service_tier: Optional[str] = None, functions: Optional[List] = None, function_call: Optional[str] = None, base_url: Optional[str] = None, api_version: Optional[str] = None, api_key: Optional[str] = None, model_list: Optional[list] = None, thinking: Optional[litellm.types.llms.anthropic.AnthropicThinkingParam] = None, shared_session: Optional[ForwardRef('ClientSession')] = None, **kwargs) -> Union[litellm.types.utils.ModelResponse, litellm.litellm_core_utils.streaming_handler.CustomStreamWrapper]), 'tool_call': Operation(tool_call, (template: effectful.handlers.llm.Template, tool: Union[effectful.ops.types.Operation[..., T], effectful.handlers.llm.Template[..., T]], *args, **kwargs) -> T), 'ProgramSynthesis': , 'NotHandled': , 'fwd': Operation(fwd, (*args, **kwargs) -> Any), 'handler': , 'defop': , 'provider': , '_i2': '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}.\"\"\"\\n raise NotHandled', 'limerick': Template(__prompt_template__='Write a limerick on the theme of {theme}. Do not use any tools.', __signature__= str>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='limerick'), '_i3': 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '_i4': 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '_i5': '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled', '_i6': 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '_i7': '@functools.cache\\n@Template.define\\ndef haiku(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef haiku_no_cache(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nprint()\\nwith handler(provider):\\n print(haiku(\"fish\"))\\n print(\"-\" * 40)\\n print(haiku(\"fish\"))\\n\\nprint()\\ncache_handler1 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler1):\\n print(haiku_no_cache(\"fish2\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish2\"))\\n\\nprint()\\ncache_handler2 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler2):\\n print(haiku_no_cache(\"fish3\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish3\"))', 'haiku': , 'haiku_no_cache': Template(__prompt_template__='Write a haiku on the theme of {theme}. Do not use any tools.', __signature__= str>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='haiku_no_cache'), 'cache_handler1': , 'cache_handler2': , '_i8': '@Template.define\\ndef primes(first_digit: int) -> int:\\n \"\"\"Give a prime number with {first_digit} as the first digit. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider):\\n assert type(primes(6)) is int', 'primes': Template(__prompt_template__='Give a prime number with {first_digit} as the first digit. Do not use any tools.', __signature__= int>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='primes'), '_i9': '@Template.define\\ndef count_char(char: str) -> Callable[[str], int]:\\n \"\"\"Write a function which takes a string and counts the occurrances of \\'{char}\\'. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider), handler(ProgramSynthesis()):\\n count_a = count_char(\"a\")\\n assert callable(count_a)\\n assert count_a(\"banana\") == 3\\n assert count_a(\"cherry\") == 0\\n # Print the source code of the generated function\\n print(inspect.getsource(count_a))', 'count_char': Template(__prompt_template__=\"Write a function which takes a string and counts the occurrances of '{char}'. Do not use any tools.\", __signature__= collections.abc.Callable[[str], int]>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='count_char'), 'count_a': , '_i10': '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', 'cities': Operation(cities, () -> list[str]), 'weather': Operation(weather, (city: str) -> str), 'vacation': Template(__prompt_template__='Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.', __signature__= str>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='vacation'), 'log_tool_call': , '_i11': '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', '_i12': '@dataclasses.dataclass\\nclass KnockKnockJoke:\\n whos_there: str\\n punchline: str\\n\\n\\n@Template.define\\ndef write_joke(theme: str) -> KnockKnockJoke:\\n \"\"\"Write a knock-knock joke on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef rate_joke(joke: KnockKnockJoke) -> bool:\\n \"\"\"Decide if {joke} is funny or not. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\ndef do_comedy():\\n joke = write_joke(\"lizards\")\\n print(\"> You are onstage at a comedy club. You tell the following joke:\")\\n print(\\n f\"Knock knock.\\\\nWho\\'s there?\\\\n{joke.whos_there}.\\\\n{joke.whos_there} who?\\\\n{joke.punchline}\"\\n )\\n if rate_joke(joke):\\n print(\"> The crowd laughs politely.\")\\n else:\\n print(\"> The crowd stares in stony silence.\")\\n\\n\\nwith handler(provider):\\n do_comedy()', 'KnockKnockJoke': , 'write_joke': ..., 'rate_joke': Template(__prompt_template__='Decide if {joke} is funny or not. Do not use any tools.', __signature__= bool>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='rate_joke'), 'do_comedy': , '_i13': 'def log_llm(*args, **kwargs):\\n result = fwd()\\n print(\"Request fired: \", args, kwargs, result)\\n return result\\n\\n\\n# Avoid cache\\ntry:\\n haiku.cache_clear()\\nexcept Exception:\\n pass\\n\\n# Put completion handler innermost so it has highest precedence during the call\\nwith handler(provider), handler({completion: log_llm}):\\n _ = haiku(\"fish2\")\\n _ = limerick(\"fish\") # or use haiku(\"fish-2\") to avoid cache', 'log_llm': , '_i14': '# 1. Create a logger\\nlogger = logging.getLogger(\"effectful.llm\")\\nlogger.setLevel(logging.INFO)\\nlog_handler = logging.StreamHandler(sys.stdout)\\nlog_handler.setFormatter(logging.Formatter(\"%(levelname)s %(payload)s\"))\\nlogger.addHandler(log_handler)\\n# 2. Pass it to the handler\\nllm_logger = LLMLoggingHandler(logger=logger) # can also be LLMLoggingHandler()\\n\\n# Avoid cache for demonstration\\ntry:\\n haiku.cache_clear()\\n limerick.cache_clear()\\nexcept Exception:\\n pass\\n\\nwith handler(provider), handler(llm_logger):\\n _ = haiku(\"fish3\")\\n _ = limerick(\"fish4\")', 'logger': , 'log_handler': , 'llm_logger': , '_i15': '# Sub-templates for different story styles\\n@Template.define\\ndef story_with_moral(topic: str) -> str:\\n \"\"\"Write a short story about {topic} and end with a moral lesson. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef story_funny(topic: str) -> str:\\n \"\"\"Write a funny, humorous story about {topic}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n# Main orchestrator template - has access to sub-templates\\n@Template.define\\ndef write_story(topic: str, style: str) -> str:\\n \"\"\"Write a story about {topic} in the style: {style}.\\n Available styles: \\'moral\\' for a story with a lesson, \\'funny\\' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\"\"\"\\n raise NotHandled\\n\\n\\n# Verify sub-templates are captured in write_story\\'s lexical context\\nassert story_with_moral in write_story.tools\\nassert story_funny in write_story.tools\\nprint(\"Sub-templates available to write_story:\", list(write_story.tools))\\n\\nwith handler(provider), handler(llm_logger):\\n print(\"=== Story with moral ===\")\\n print(write_story(\"a curious cat\", \"moral\"))\\n print()\\n print(\"=== Funny story ===\")\\n print(write_story(\"a curious cat\", \"funny\"))', 'story_with_moral': Template(__prompt_template__='Write a short story about {topic} and end with a moral lesson. Do not use any tools.', __signature__= str>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='story_with_moral'), 'story_funny': Template(__prompt_template__='Write a funny, humorous story about {topic}. Do not use any tools.', __signature__= str>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='story_funny'), 'write_story': Template(__prompt_template__=\"Write a story about {topic} in the style: {style}.\\n Available styles: 'moral' for a story with a lesson, 'funny' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\", __signature__= str>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='write_story')}), mappingproxy({'__name__': '__main__', '__doc__': 'Automatically created module for IPython interactive environment', '__package__': None, '__loader__': None, '__spec__': None, '__builtin__': , '__builtins__': , '_ih': ['', 'import dataclasses\\nimport functools\\nimport inspect\\nimport logging\\nimport sys\\nfrom collections.abc import Callable\\n\\nfrom effectful.handlers.llm import Template\\nfrom effectful.handlers.llm.providers import (\\n CacheLLMRequestHandler,\\n LiteLLMProvider,\\n LLMLoggingHandler,\\n RetryLLMHandler,\\n completion,\\n tool_call,\\n)\\nfrom effectful.handlers.llm.synthesis import ProgramSynthesis\\nfrom effectful.ops.semantics import NotHandled, fwd, handler\\nfrom effectful.ops.syntax import defop\\n\\nprovider = LiteLLMProvider()', '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}.\"\"\"\\n raise NotHandled', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '@functools.cache\\n@Template.define\\ndef haiku(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef haiku_no_cache(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nprint()\\nwith handler(provider):\\n print(haiku(\"fish\"))\\n print(\"-\" * 40)\\n print(haiku(\"fish\"))\\n\\nprint()\\ncache_handler1 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler1):\\n print(haiku_no_cache(\"fish2\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish2\"))\\n\\nprint()\\ncache_handler2 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler2):\\n print(haiku_no_cache(\"fish3\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish3\"))', '@Template.define\\ndef primes(first_digit: int) -> int:\\n \"\"\"Give a prime number with {first_digit} as the first digit. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider):\\n assert type(primes(6)) is int', '@Template.define\\ndef count_char(char: str) -> Callable[[str], int]:\\n \"\"\"Write a function which takes a string and counts the occurrances of \\'{char}\\'. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider), handler(ProgramSynthesis()):\\n count_a = count_char(\"a\")\\n assert callable(count_a)\\n assert count_a(\"banana\") == 3\\n assert count_a(\"cherry\") == 0\\n # Print the source code of the generated function\\n print(inspect.getsource(count_a))', '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', '@dataclasses.dataclass\\nclass KnockKnockJoke:\\n whos_there: str\\n punchline: str\\n\\n\\n@Template.define\\ndef write_joke(theme: str) -> KnockKnockJoke:\\n \"\"\"Write a knock-knock joke on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef rate_joke(joke: KnockKnockJoke) -> bool:\\n \"\"\"Decide if {joke} is funny or not. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\ndef do_comedy():\\n joke = write_joke(\"lizards\")\\n print(\"> You are onstage at a comedy club. You tell the following joke:\")\\n print(\\n f\"Knock knock.\\\\nWho\\'s there?\\\\n{joke.whos_there}.\\\\n{joke.whos_there} who?\\\\n{joke.punchline}\"\\n )\\n if rate_joke(joke):\\n print(\"> The crowd laughs politely.\")\\n else:\\n print(\"> The crowd stares in stony silence.\")\\n\\n\\nwith handler(provider):\\n do_comedy()', 'def log_llm(*args, **kwargs):\\n result = fwd()\\n print(\"Request fired: \", args, kwargs, result)\\n return result\\n\\n\\n# Avoid cache\\ntry:\\n haiku.cache_clear()\\nexcept Exception:\\n pass\\n\\n# Put completion handler innermost so it has highest precedence during the call\\nwith handler(provider), handler({completion: log_llm}):\\n _ = haiku(\"fish2\")\\n _ = limerick(\"fish\") # or use haiku(\"fish-2\") to avoid cache', '# 1. Create a logger\\nlogger = logging.getLogger(\"effectful.llm\")\\nlogger.setLevel(logging.INFO)\\nlog_handler = logging.StreamHandler(sys.stdout)\\nlog_handler.setFormatter(logging.Formatter(\"%(levelname)s %(payload)s\"))\\nlogger.addHandler(log_handler)\\n# 2. Pass it to the handler\\nllm_logger = LLMLoggingHandler(logger=logger) # can also be LLMLoggingHandler()\\n\\n# Avoid cache for demonstration\\ntry:\\n haiku.cache_clear()\\n limerick.cache_clear()\\nexcept Exception:\\n pass\\n\\nwith handler(provider), handler(llm_logger):\\n _ = haiku(\"fish3\")\\n _ = limerick(\"fish4\")', '# Sub-templates for different story styles\\n@Template.define\\ndef story_with_moral(topic: str) -> str:\\n \"\"\"Write a short story about {topic} and end with a moral lesson. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef story_funny(topic: str) -> str:\\n \"\"\"Write a funny, humorous story about {topic}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n# Main orchestrator template - has access to sub-templates\\n@Template.define\\ndef write_story(topic: str, style: str) -> str:\\n \"\"\"Write a story about {topic} in the style: {style}.\\n Available styles: \\'moral\\' for a story with a lesson, \\'funny\\' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\"\"\"\\n raise NotHandled\\n\\n\\n# Verify sub-templates are captured in write_story\\'s lexical context\\nassert story_with_moral in write_story.tools\\nassert story_funny in write_story.tools\\nprint(\"Sub-templates available to write_story:\", list(write_story.tools))\\n\\nwith handler(provider), handler(llm_logger):\\n print(\"=== Story with moral ===\")\\n print(write_story(\"a curious cat\", \"moral\"))\\n print()\\n print(\"=== Funny story ===\")\\n print(write_story(\"a curious cat\", \"funny\"))'], '_oh': {}, '_dh': [PosixPath('/Users/datnguyenthanh/Marc/effectful')], 'In': ['', 'import dataclasses\\nimport functools\\nimport inspect\\nimport logging\\nimport sys\\nfrom collections.abc import Callable\\n\\nfrom effectful.handlers.llm import Template\\nfrom effectful.handlers.llm.providers import (\\n CacheLLMRequestHandler,\\n LiteLLMProvider,\\n LLMLoggingHandler,\\n RetryLLMHandler,\\n completion,\\n tool_call,\\n)\\nfrom effectful.handlers.llm.synthesis import ProgramSynthesis\\nfrom effectful.ops.semantics import NotHandled, fwd, handler\\nfrom effectful.ops.syntax import defop\\n\\nprovider = LiteLLMProvider()', '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}.\"\"\"\\n raise NotHandled', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '@functools.cache\\n@Template.define\\ndef haiku(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef haiku_no_cache(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nprint()\\nwith handler(provider):\\n print(haiku(\"fish\"))\\n print(\"-\" * 40)\\n print(haiku(\"fish\"))\\n\\nprint()\\ncache_handler1 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler1):\\n print(haiku_no_cache(\"fish2\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish2\"))\\n\\nprint()\\ncache_handler2 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler2):\\n print(haiku_no_cache(\"fish3\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish3\"))', '@Template.define\\ndef primes(first_digit: int) -> int:\\n \"\"\"Give a prime number with {first_digit} as the first digit. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider):\\n assert type(primes(6)) is int', '@Template.define\\ndef count_char(char: str) -> Callable[[str], int]:\\n \"\"\"Write a function which takes a string and counts the occurrances of \\'{char}\\'. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider), handler(ProgramSynthesis()):\\n count_a = count_char(\"a\")\\n assert callable(count_a)\\n assert count_a(\"banana\") == 3\\n assert count_a(\"cherry\") == 0\\n # Print the source code of the generated function\\n print(inspect.getsource(count_a))', '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', '@dataclasses.dataclass\\nclass KnockKnockJoke:\\n whos_there: str\\n punchline: str\\n\\n\\n@Template.define\\ndef write_joke(theme: str) -> KnockKnockJoke:\\n \"\"\"Write a knock-knock joke on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef rate_joke(joke: KnockKnockJoke) -> bool:\\n \"\"\"Decide if {joke} is funny or not. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\ndef do_comedy():\\n joke = write_joke(\"lizards\")\\n print(\"> You are onstage at a comedy club. You tell the following joke:\")\\n print(\\n f\"Knock knock.\\\\nWho\\'s there?\\\\n{joke.whos_there}.\\\\n{joke.whos_there} who?\\\\n{joke.punchline}\"\\n )\\n if rate_joke(joke):\\n print(\"> The crowd laughs politely.\")\\n else:\\n print(\"> The crowd stares in stony silence.\")\\n\\n\\nwith handler(provider):\\n do_comedy()', 'def log_llm(*args, **kwargs):\\n result = fwd()\\n print(\"Request fired: \", args, kwargs, result)\\n return result\\n\\n\\n# Avoid cache\\ntry:\\n haiku.cache_clear()\\nexcept Exception:\\n pass\\n\\n# Put completion handler innermost so it has highest precedence during the call\\nwith handler(provider), handler({completion: log_llm}):\\n _ = haiku(\"fish2\")\\n _ = limerick(\"fish\") # or use haiku(\"fish-2\") to avoid cache', '# 1. Create a logger\\nlogger = logging.getLogger(\"effectful.llm\")\\nlogger.setLevel(logging.INFO)\\nlog_handler = logging.StreamHandler(sys.stdout)\\nlog_handler.setFormatter(logging.Formatter(\"%(levelname)s %(payload)s\"))\\nlogger.addHandler(log_handler)\\n# 2. Pass it to the handler\\nllm_logger = LLMLoggingHandler(logger=logger) # can also be LLMLoggingHandler()\\n\\n# Avoid cache for demonstration\\ntry:\\n haiku.cache_clear()\\n limerick.cache_clear()\\nexcept Exception:\\n pass\\n\\nwith handler(provider), handler(llm_logger):\\n _ = haiku(\"fish3\")\\n _ = limerick(\"fish4\")', '# Sub-templates for different story styles\\n@Template.define\\ndef story_with_moral(topic: str) -> str:\\n \"\"\"Write a short story about {topic} and end with a moral lesson. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef story_funny(topic: str) -> str:\\n \"\"\"Write a funny, humorous story about {topic}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n# Main orchestrator template - has access to sub-templates\\n@Template.define\\ndef write_story(topic: str, style: str) -> str:\\n \"\"\"Write a story about {topic} in the style: {style}.\\n Available styles: \\'moral\\' for a story with a lesson, \\'funny\\' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\"\"\"\\n raise NotHandled\\n\\n\\n# Verify sub-templates are captured in write_story\\'s lexical context\\nassert story_with_moral in write_story.tools\\nassert story_funny in write_story.tools\\nprint(\"Sub-templates available to write_story:\", list(write_story.tools))\\n\\nwith handler(provider), handler(llm_logger):\\n print(\"=== Story with moral ===\")\\n print(write_story(\"a curious cat\", \"moral\"))\\n print()\\n print(\"=== Funny story ===\")\\n print(write_story(\"a curious cat\", \"funny\"))'], 'Out': {}, 'get_ipython': >, 'exit': , 'quit': , 'open': , '_': 'In the ocean where fishies do play, \\nA big whale came swimming one day. \\nWith a splash and a dive, \\nHe felt so alive, \\nChasing fish in the blue, gleaming bay.', '__': '', '___': '', '__vsc_ipynb_file__': '/Users/datnguyenthanh/Marc/effectful/docs/source/llm.ipynb', '_i': '# 1. Create a logger\\nlogger = logging.getLogger(\"effectful.llm\")\\nlogger.setLevel(logging.INFO)\\nlog_handler = logging.StreamHandler(sys.stdout)\\nlog_handler.setFormatter(logging.Formatter(\"%(levelname)s %(payload)s\"))\\nlogger.addHandler(log_handler)\\n# 2. Pass it to the handler\\nllm_logger = LLMLoggingHandler(logger=logger) # can also be LLMLoggingHandler()\\n\\n# Avoid cache for demonstration\\ntry:\\n haiku.cache_clear()\\n limerick.cache_clear()\\nexcept Exception:\\n pass\\n\\nwith handler(provider), handler(llm_logger):\\n _ = haiku(\"fish3\")\\n _ = limerick(\"fish4\")', '_ii': 'def log_llm(*args, **kwargs):\\n result = fwd()\\n print(\"Request fired: \", args, kwargs, result)\\n return result\\n\\n\\n# Avoid cache\\ntry:\\n haiku.cache_clear()\\nexcept Exception:\\n pass\\n\\n# Put completion handler innermost so it has highest precedence during the call\\nwith handler(provider), handler({completion: log_llm}):\\n _ = haiku(\"fish2\")\\n _ = limerick(\"fish\") # or use haiku(\"fish-2\") to avoid cache', '_iii': '@dataclasses.dataclass\\nclass KnockKnockJoke:\\n whos_there: str\\n punchline: str\\n\\n\\n@Template.define\\ndef write_joke(theme: str) -> KnockKnockJoke:\\n \"\"\"Write a knock-knock joke on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef rate_joke(joke: KnockKnockJoke) -> bool:\\n \"\"\"Decide if {joke} is funny or not. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\ndef do_comedy():\\n joke = write_joke(\"lizards\")\\n print(\"> You are onstage at a comedy club. You tell the following joke:\")\\n print(\\n f\"Knock knock.\\\\nWho\\'s there?\\\\n{joke.whos_there}.\\\\n{joke.whos_there} who?\\\\n{joke.punchline}\"\\n )\\n if rate_joke(joke):\\n print(\"> The crowd laughs politely.\")\\n else:\\n print(\"> The crowd stares in stony silence.\")\\n\\n\\nwith handler(provider):\\n do_comedy()', '_i1': 'import dataclasses\\nimport functools\\nimport inspect\\nimport logging\\nimport sys\\nfrom collections.abc import Callable\\n\\nfrom effectful.handlers.llm import Template\\nfrom effectful.handlers.llm.providers import (\\n CacheLLMRequestHandler,\\n LiteLLMProvider,\\n LLMLoggingHandler,\\n RetryLLMHandler,\\n completion,\\n tool_call,\\n)\\nfrom effectful.handlers.llm.synthesis import ProgramSynthesis\\nfrom effectful.ops.semantics import NotHandled, fwd, handler\\nfrom effectful.ops.syntax import defop\\n\\nprovider = LiteLLMProvider()', 'dataclasses': , 'functools': , 'inspect': , 'logging': , 'sys': , 'Callable': , 'Template': , 'CacheLLMRequestHandler': , 'LiteLLMProvider': , 'LLMLoggingHandler': , 'RetryLLMHandler': , 'completion': Operation(completion, (model: str, messages: List = [], timeout: Union[float, str, openai.Timeout, NoneType] = None, temperature: Optional[float] = None, top_p: Optional[float] = None, n: Optional[int] = None, stream: Optional[bool] = None, stream_options: Optional[dict] = None, stop=None, max_completion_tokens: Optional[int] = None, max_tokens: Optional[int] = None, modalities: Optional[List[Literal['text', 'audio']]] = None, prediction: Optional[openai.types.chat.chat_completion_prediction_content_param.ChatCompletionPredictionContentParam] = None, audio: Optional[openai.types.chat.chat_completion_audio_param.ChatCompletionAudioParam] = None, presence_penalty: Optional[float] = None, frequency_penalty: Optional[float] = None, logit_bias: Optional[dict] = None, user: Optional[str] = None, reasoning_effort: Optional[Literal['none', 'minimal', 'low', 'medium', 'high', 'default']] = None, verbosity: Optional[Literal['low', 'medium', 'high']] = None, response_format: Union[dict, Type[pydantic.main.BaseModel], NoneType] = None, seed: Optional[int] = None, tools: Optional[List] = None, tool_choice: Union[str, dict, NoneType] = None, logprobs: Optional[bool] = None, top_logprobs: Optional[int] = None, parallel_tool_calls: Optional[bool] = None, web_search_options: Optional[litellm.types.llms.openai.OpenAIWebSearchOptions] = None, deployment_id=None, extra_headers: Optional[dict] = None, safety_identifier: Optional[str] = None, service_tier: Optional[str] = None, functions: Optional[List] = None, function_call: Optional[str] = None, base_url: Optional[str] = None, api_version: Optional[str] = None, api_key: Optional[str] = None, model_list: Optional[list] = None, thinking: Optional[litellm.types.llms.anthropic.AnthropicThinkingParam] = None, shared_session: Optional[ForwardRef('ClientSession')] = None, **kwargs) -> Union[litellm.types.utils.ModelResponse, litellm.litellm_core_utils.streaming_handler.CustomStreamWrapper]), 'tool_call': Operation(tool_call, (template: effectful.handlers.llm.Template, tool: Union[effectful.ops.types.Operation[..., T], effectful.handlers.llm.Template[..., T]], *args, **kwargs) -> T), 'ProgramSynthesis': , 'NotHandled': , 'fwd': Operation(fwd, (*args, **kwargs) -> Any), 'handler': , 'defop': , 'provider': , '_i2': '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}.\"\"\"\\n raise NotHandled', 'limerick': Template(__prompt_template__='Write a limerick on the theme of {theme}. Do not use any tools.', __signature__= str>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='limerick'), '_i3': 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '_i4': 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '_i5': '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled', '_i6': 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '_i7': '@functools.cache\\n@Template.define\\ndef haiku(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef haiku_no_cache(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nprint()\\nwith handler(provider):\\n print(haiku(\"fish\"))\\n print(\"-\" * 40)\\n print(haiku(\"fish\"))\\n\\nprint()\\ncache_handler1 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler1):\\n print(haiku_no_cache(\"fish2\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish2\"))\\n\\nprint()\\ncache_handler2 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler2):\\n print(haiku_no_cache(\"fish3\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish3\"))', 'haiku': , 'haiku_no_cache': Template(__prompt_template__='Write a haiku on the theme of {theme}. Do not use any tools.', __signature__= str>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='haiku_no_cache'), 'cache_handler1': , 'cache_handler2': , '_i8': '@Template.define\\ndef primes(first_digit: int) -> int:\\n \"\"\"Give a prime number with {first_digit} as the first digit. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider):\\n assert type(primes(6)) is int', 'primes': Template(__prompt_template__='Give a prime number with {first_digit} as the first digit. Do not use any tools.', __signature__= int>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='primes'), '_i9': '@Template.define\\ndef count_char(char: str) -> Callable[[str], int]:\\n \"\"\"Write a function which takes a string and counts the occurrances of \\'{char}\\'. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider), handler(ProgramSynthesis()):\\n count_a = count_char(\"a\")\\n assert callable(count_a)\\n assert count_a(\"banana\") == 3\\n assert count_a(\"cherry\") == 0\\n # Print the source code of the generated function\\n print(inspect.getsource(count_a))', 'count_char': Template(__prompt_template__=\"Write a function which takes a string and counts the occurrances of '{char}'. Do not use any tools.\", __signature__= collections.abc.Callable[[str], int]>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='count_char'), 'count_a': , '_i10': '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', 'cities': Operation(cities, () -> list[str]), 'weather': Operation(weather, (city: str) -> str), 'vacation': Template(__prompt_template__='Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.', __signature__= str>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='vacation'), 'log_tool_call': , '_i11': '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', '_i12': '@dataclasses.dataclass\\nclass KnockKnockJoke:\\n whos_there: str\\n punchline: str\\n\\n\\n@Template.define\\ndef write_joke(theme: str) -> KnockKnockJoke:\\n \"\"\"Write a knock-knock joke on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef rate_joke(joke: KnockKnockJoke) -> bool:\\n \"\"\"Decide if {joke} is funny or not. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\ndef do_comedy():\\n joke = write_joke(\"lizards\")\\n print(\"> You are onstage at a comedy club. You tell the following joke:\")\\n print(\\n f\"Knock knock.\\\\nWho\\'s there?\\\\n{joke.whos_there}.\\\\n{joke.whos_there} who?\\\\n{joke.punchline}\"\\n )\\n if rate_joke(joke):\\n print(\"> The crowd laughs politely.\")\\n else:\\n print(\"> The crowd stares in stony silence.\")\\n\\n\\nwith handler(provider):\\n do_comedy()', 'KnockKnockJoke': , 'write_joke': ..., 'rate_joke': Template(__prompt_template__='Decide if {joke} is funny or not. Do not use any tools.', __signature__= bool>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='rate_joke'), 'do_comedy': , '_i13': 'def log_llm(*args, **kwargs):\\n result = fwd()\\n print(\"Request fired: \", args, kwargs, result)\\n return result\\n\\n\\n# Avoid cache\\ntry:\\n haiku.cache_clear()\\nexcept Exception:\\n pass\\n\\n# Put completion handler innermost so it has highest precedence during the call\\nwith handler(provider), handler({completion: log_llm}):\\n _ = haiku(\"fish2\")\\n _ = limerick(\"fish\") # or use haiku(\"fish-2\") to avoid cache', 'log_llm': , '_i14': '# 1. Create a logger\\nlogger = logging.getLogger(\"effectful.llm\")\\nlogger.setLevel(logging.INFO)\\nlog_handler = logging.StreamHandler(sys.stdout)\\nlog_handler.setFormatter(logging.Formatter(\"%(levelname)s %(payload)s\"))\\nlogger.addHandler(log_handler)\\n# 2. Pass it to the handler\\nllm_logger = LLMLoggingHandler(logger=logger) # can also be LLMLoggingHandler()\\n\\n# Avoid cache for demonstration\\ntry:\\n haiku.cache_clear()\\n limerick.cache_clear()\\nexcept Exception:\\n pass\\n\\nwith handler(provider), handler(llm_logger):\\n _ = haiku(\"fish3\")\\n _ = limerick(\"fish4\")', 'logger': , 'log_handler': , 'llm_logger': , '_i15': '# Sub-templates for different story styles\\n@Template.define\\ndef story_with_moral(topic: str) -> str:\\n \"\"\"Write a short story about {topic} and end with a moral lesson. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef story_funny(topic: str) -> str:\\n \"\"\"Write a funny, humorous story about {topic}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n# Main orchestrator template - has access to sub-templates\\n@Template.define\\ndef write_story(topic: str, style: str) -> str:\\n \"\"\"Write a story about {topic} in the style: {style}.\\n Available styles: \\'moral\\' for a story with a lesson, \\'funny\\' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\"\"\"\\n raise NotHandled\\n\\n\\n# Verify sub-templates are captured in write_story\\'s lexical context\\nassert story_with_moral in write_story.tools\\nassert story_funny in write_story.tools\\nprint(\"Sub-templates available to write_story:\", list(write_story.tools))\\n\\nwith handler(provider), handler(llm_logger):\\n print(\"=== Story with moral ===\")\\n print(write_story(\"a curious cat\", \"moral\"))\\n print()\\n print(\"=== Funny story ===\")\\n print(write_story(\"a curious cat\", \"funny\"))', 'story_with_moral': Template(__prompt_template__='Write a short story about {topic} and end with a moral lesson. Do not use any tools.', __signature__= str>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='story_with_moral'), 'story_funny': Template(__prompt_template__='Write a funny, humorous story about {topic}. Do not use any tools.', __signature__= str>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='story_funny'), 'write_story': Template(__prompt_template__=\"Write a story about {topic} in the style: {style}.\\n Available styles: 'moral' for a story with a lesson, 'funny' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\", __signature__= str>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='write_story')})), __name__='write_joke'), Template(__prompt_template__='Decide if {joke} is funny or not. Do not use any tools.', __signature__= bool>, __context__=LexicalContext(mappingproxy({'__name__': '__main__', '__doc__': 'Automatically created module for IPython interactive environment', '__package__': None, '__loader__': None, '__spec__': None, '__builtin__': , '__builtins__': , '_ih': ['', 'import dataclasses\\nimport functools\\nimport inspect\\nimport logging\\nimport sys\\nfrom collections.abc import Callable\\n\\nfrom effectful.handlers.llm import Template\\nfrom effectful.handlers.llm.providers import (\\n CacheLLMRequestHandler,\\n LiteLLMProvider,\\n LLMLoggingHandler,\\n RetryLLMHandler,\\n completion,\\n tool_call,\\n)\\nfrom effectful.handlers.llm.synthesis import ProgramSynthesis\\nfrom effectful.ops.semantics import NotHandled, fwd, handler\\nfrom effectful.ops.syntax import defop\\n\\nprovider = LiteLLMProvider()', '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}.\"\"\"\\n raise NotHandled', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '@functools.cache\\n@Template.define\\ndef haiku(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef haiku_no_cache(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nprint()\\nwith handler(provider):\\n print(haiku(\"fish\"))\\n print(\"-\" * 40)\\n print(haiku(\"fish\"))\\n\\nprint()\\ncache_handler1 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler1):\\n print(haiku_no_cache(\"fish2\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish2\"))\\n\\nprint()\\ncache_handler2 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler2):\\n print(haiku_no_cache(\"fish3\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish3\"))', '@Template.define\\ndef primes(first_digit: int) -> int:\\n \"\"\"Give a prime number with {first_digit} as the first digit. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider):\\n assert type(primes(6)) is int', '@Template.define\\ndef count_char(char: str) -> Callable[[str], int]:\\n \"\"\"Write a function which takes a string and counts the occurrances of \\'{char}\\'. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider), handler(ProgramSynthesis()):\\n count_a = count_char(\"a\")\\n assert callable(count_a)\\n assert count_a(\"banana\") == 3\\n assert count_a(\"cherry\") == 0\\n # Print the source code of the generated function\\n print(inspect.getsource(count_a))', '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', '@dataclasses.dataclass\\nclass KnockKnockJoke:\\n whos_there: str\\n punchline: str\\n\\n\\n@Template.define\\ndef write_joke(theme: str) -> KnockKnockJoke:\\n \"\"\"Write a knock-knock joke on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef rate_joke(joke: KnockKnockJoke) -> bool:\\n \"\"\"Decide if {joke} is funny or not. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\ndef do_comedy():\\n joke = write_joke(\"lizards\")\\n print(\"> You are onstage at a comedy club. You tell the following joke:\")\\n print(\\n f\"Knock knock.\\\\nWho\\'s there?\\\\n{joke.whos_there}.\\\\n{joke.whos_there} who?\\\\n{joke.punchline}\"\\n )\\n if rate_joke(joke):\\n print(\"> The crowd laughs politely.\")\\n else:\\n print(\"> The crowd stares in stony silence.\")\\n\\n\\nwith handler(provider):\\n do_comedy()', 'def log_llm(*args, **kwargs):\\n result = fwd()\\n print(\"Request fired: \", args, kwargs, result)\\n return result\\n\\n\\n# Avoid cache\\ntry:\\n haiku.cache_clear()\\nexcept Exception:\\n pass\\n\\n# Put completion handler innermost so it has highest precedence during the call\\nwith handler(provider), handler({completion: log_llm}):\\n _ = haiku(\"fish2\")\\n _ = limerick(\"fish\") # or use haiku(\"fish-2\") to avoid cache', '# 1. Create a logger\\nlogger = logging.getLogger(\"effectful.llm\")\\nlogger.setLevel(logging.INFO)\\nlog_handler = logging.StreamHandler(sys.stdout)\\nlog_handler.setFormatter(logging.Formatter(\"%(levelname)s %(payload)s\"))\\nlogger.addHandler(log_handler)\\n# 2. Pass it to the handler\\nllm_logger = LLMLoggingHandler(logger=logger) # can also be LLMLoggingHandler()\\n\\n# Avoid cache for demonstration\\ntry:\\n haiku.cache_clear()\\n limerick.cache_clear()\\nexcept Exception:\\n pass\\n\\nwith handler(provider), handler(llm_logger):\\n _ = haiku(\"fish3\")\\n _ = limerick(\"fish4\")', '# Sub-templates for different story styles\\n@Template.define\\ndef story_with_moral(topic: str) -> str:\\n \"\"\"Write a short story about {topic} and end with a moral lesson. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef story_funny(topic: str) -> str:\\n \"\"\"Write a funny, humorous story about {topic}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n# Main orchestrator template - has access to sub-templates\\n@Template.define\\ndef write_story(topic: str, style: str) -> str:\\n \"\"\"Write a story about {topic} in the style: {style}.\\n Available styles: \\'moral\\' for a story with a lesson, \\'funny\\' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\"\"\"\\n raise NotHandled\\n\\n\\n# Verify sub-templates are captured in write_story\\'s lexical context\\nassert story_with_moral in write_story.tools\\nassert story_funny in write_story.tools\\nprint(\"Sub-templates available to write_story:\", list(write_story.tools))\\n\\nwith handler(provider), handler(llm_logger):\\n print(\"=== Story with moral ===\")\\n print(write_story(\"a curious cat\", \"moral\"))\\n print()\\n print(\"=== Funny story ===\")\\n print(write_story(\"a curious cat\", \"funny\"))'], '_oh': {}, '_dh': [PosixPath('/Users/datnguyenthanh/Marc/effectful')], 'In': ['', 'import dataclasses\\nimport functools\\nimport inspect\\nimport logging\\nimport sys\\nfrom collections.abc import Callable\\n\\nfrom effectful.handlers.llm import Template\\nfrom effectful.handlers.llm.providers import (\\n CacheLLMRequestHandler,\\n LiteLLMProvider,\\n LLMLoggingHandler,\\n RetryLLMHandler,\\n completion,\\n tool_call,\\n)\\nfrom effectful.handlers.llm.synthesis import ProgramSynthesis\\nfrom effectful.ops.semantics import NotHandled, fwd, handler\\nfrom effectful.ops.syntax import defop\\n\\nprovider = LiteLLMProvider()', '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}.\"\"\"\\n raise NotHandled', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '@functools.cache\\n@Template.define\\ndef haiku(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef haiku_no_cache(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nprint()\\nwith handler(provider):\\n print(haiku(\"fish\"))\\n print(\"-\" * 40)\\n print(haiku(\"fish\"))\\n\\nprint()\\ncache_handler1 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler1):\\n print(haiku_no_cache(\"fish2\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish2\"))\\n\\nprint()\\ncache_handler2 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler2):\\n print(haiku_no_cache(\"fish3\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish3\"))', '@Template.define\\ndef primes(first_digit: int) -> int:\\n \"\"\"Give a prime number with {first_digit} as the first digit. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider):\\n assert type(primes(6)) is int', '@Template.define\\ndef count_char(char: str) -> Callable[[str], int]:\\n \"\"\"Write a function which takes a string and counts the occurrances of \\'{char}\\'. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider), handler(ProgramSynthesis()):\\n count_a = count_char(\"a\")\\n assert callable(count_a)\\n assert count_a(\"banana\") == 3\\n assert count_a(\"cherry\") == 0\\n # Print the source code of the generated function\\n print(inspect.getsource(count_a))', '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', '@dataclasses.dataclass\\nclass KnockKnockJoke:\\n whos_there: str\\n punchline: str\\n\\n\\n@Template.define\\ndef write_joke(theme: str) -> KnockKnockJoke:\\n \"\"\"Write a knock-knock joke on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef rate_joke(joke: KnockKnockJoke) -> bool:\\n \"\"\"Decide if {joke} is funny or not. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\ndef do_comedy():\\n joke = write_joke(\"lizards\")\\n print(\"> You are onstage at a comedy club. You tell the following joke:\")\\n print(\\n f\"Knock knock.\\\\nWho\\'s there?\\\\n{joke.whos_there}.\\\\n{joke.whos_there} who?\\\\n{joke.punchline}\"\\n )\\n if rate_joke(joke):\\n print(\"> The crowd laughs politely.\")\\n else:\\n print(\"> The crowd stares in stony silence.\")\\n\\n\\nwith handler(provider):\\n do_comedy()', 'def log_llm(*args, **kwargs):\\n result = fwd()\\n print(\"Request fired: \", args, kwargs, result)\\n return result\\n\\n\\n# Avoid cache\\ntry:\\n haiku.cache_clear()\\nexcept Exception:\\n pass\\n\\n# Put completion handler innermost so it has highest precedence during the call\\nwith handler(provider), handler({completion: log_llm}):\\n _ = haiku(\"fish2\")\\n _ = limerick(\"fish\") # or use haiku(\"fish-2\") to avoid cache', '# 1. Create a logger\\nlogger = logging.getLogger(\"effectful.llm\")\\nlogger.setLevel(logging.INFO)\\nlog_handler = logging.StreamHandler(sys.stdout)\\nlog_handler.setFormatter(logging.Formatter(\"%(levelname)s %(payload)s\"))\\nlogger.addHandler(log_handler)\\n# 2. Pass it to the handler\\nllm_logger = LLMLoggingHandler(logger=logger) # can also be LLMLoggingHandler()\\n\\n# Avoid cache for demonstration\\ntry:\\n haiku.cache_clear()\\n limerick.cache_clear()\\nexcept Exception:\\n pass\\n\\nwith handler(provider), handler(llm_logger):\\n _ = haiku(\"fish3\")\\n _ = limerick(\"fish4\")', '# Sub-templates for different story styles\\n@Template.define\\ndef story_with_moral(topic: str) -> str:\\n \"\"\"Write a short story about {topic} and end with a moral lesson. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef story_funny(topic: str) -> str:\\n \"\"\"Write a funny, humorous story about {topic}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n# Main orchestrator template - has access to sub-templates\\n@Template.define\\ndef write_story(topic: str, style: str) -> str:\\n \"\"\"Write a story about {topic} in the style: {style}.\\n Available styles: \\'moral\\' for a story with a lesson, \\'funny\\' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\"\"\"\\n raise NotHandled\\n\\n\\n# Verify sub-templates are captured in write_story\\'s lexical context\\nassert story_with_moral in write_story.tools\\nassert story_funny in write_story.tools\\nprint(\"Sub-templates available to write_story:\", list(write_story.tools))\\n\\nwith handler(provider), handler(llm_logger):\\n print(\"=== Story with moral ===\")\\n print(write_story(\"a curious cat\", \"moral\"))\\n print()\\n print(\"=== Funny story ===\")\\n print(write_story(\"a curious cat\", \"funny\"))'], 'Out': {}, 'get_ipython': >, 'exit': , 'quit': , 'open': , '_': 'In the ocean where fishies do play, \\nA big whale came swimming one day. \\nWith a splash and a dive, \\nHe felt so alive, \\nChasing fish in the blue, gleaming bay.', '__': '', '___': '', '__vsc_ipynb_file__': '/Users/datnguyenthanh/Marc/effectful/docs/source/llm.ipynb', '_i': '# 1. Create a logger\\nlogger = logging.getLogger(\"effectful.llm\")\\nlogger.setLevel(logging.INFO)\\nlog_handler = logging.StreamHandler(sys.stdout)\\nlog_handler.setFormatter(logging.Formatter(\"%(levelname)s %(payload)s\"))\\nlogger.addHandler(log_handler)\\n# 2. Pass it to the handler\\nllm_logger = LLMLoggingHandler(logger=logger) # can also be LLMLoggingHandler()\\n\\n# Avoid cache for demonstration\\ntry:\\n haiku.cache_clear()\\n limerick.cache_clear()\\nexcept Exception:\\n pass\\n\\nwith handler(provider), handler(llm_logger):\\n _ = haiku(\"fish3\")\\n _ = limerick(\"fish4\")', '_ii': 'def log_llm(*args, **kwargs):\\n result = fwd()\\n print(\"Request fired: \", args, kwargs, result)\\n return result\\n\\n\\n# Avoid cache\\ntry:\\n haiku.cache_clear()\\nexcept Exception:\\n pass\\n\\n# Put completion handler innermost so it has highest precedence during the call\\nwith handler(provider), handler({completion: log_llm}):\\n _ = haiku(\"fish2\")\\n _ = limerick(\"fish\") # or use haiku(\"fish-2\") to avoid cache', '_iii': '@dataclasses.dataclass\\nclass KnockKnockJoke:\\n whos_there: str\\n punchline: str\\n\\n\\n@Template.define\\ndef write_joke(theme: str) -> KnockKnockJoke:\\n \"\"\"Write a knock-knock joke on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef rate_joke(joke: KnockKnockJoke) -> bool:\\n \"\"\"Decide if {joke} is funny or not. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\ndef do_comedy():\\n joke = write_joke(\"lizards\")\\n print(\"> You are onstage at a comedy club. You tell the following joke:\")\\n print(\\n f\"Knock knock.\\\\nWho\\'s there?\\\\n{joke.whos_there}.\\\\n{joke.whos_there} who?\\\\n{joke.punchline}\"\\n )\\n if rate_joke(joke):\\n print(\"> The crowd laughs politely.\")\\n else:\\n print(\"> The crowd stares in stony silence.\")\\n\\n\\nwith handler(provider):\\n do_comedy()', '_i1': 'import dataclasses\\nimport functools\\nimport inspect\\nimport logging\\nimport sys\\nfrom collections.abc import Callable\\n\\nfrom effectful.handlers.llm import Template\\nfrom effectful.handlers.llm.providers import (\\n CacheLLMRequestHandler,\\n LiteLLMProvider,\\n LLMLoggingHandler,\\n RetryLLMHandler,\\n completion,\\n tool_call,\\n)\\nfrom effectful.handlers.llm.synthesis import ProgramSynthesis\\nfrom effectful.ops.semantics import NotHandled, fwd, handler\\nfrom effectful.ops.syntax import defop\\n\\nprovider = LiteLLMProvider()', 'dataclasses': , 'functools': , 'inspect': , 'logging': , 'sys': , 'Callable': , 'Template': , 'CacheLLMRequestHandler': , 'LiteLLMProvider': , 'LLMLoggingHandler': , 'RetryLLMHandler': , 'completion': Operation(completion, (model: str, messages: List = [], timeout: Union[float, str, openai.Timeout, NoneType] = None, temperature: Optional[float] = None, top_p: Optional[float] = None, n: Optional[int] = None, stream: Optional[bool] = None, stream_options: Optional[dict] = None, stop=None, max_completion_tokens: Optional[int] = None, max_tokens: Optional[int] = None, modalities: Optional[List[Literal['text', 'audio']]] = None, prediction: Optional[openai.types.chat.chat_completion_prediction_content_param.ChatCompletionPredictionContentParam] = None, audio: Optional[openai.types.chat.chat_completion_audio_param.ChatCompletionAudioParam] = None, presence_penalty: Optional[float] = None, frequency_penalty: Optional[float] = None, logit_bias: Optional[dict] = None, user: Optional[str] = None, reasoning_effort: Optional[Literal['none', 'minimal', 'low', 'medium', 'high', 'default']] = None, verbosity: Optional[Literal['low', 'medium', 'high']] = None, response_format: Union[dict, Type[pydantic.main.BaseModel], NoneType] = None, seed: Optional[int] = None, tools: Optional[List] = None, tool_choice: Union[str, dict, NoneType] = None, logprobs: Optional[bool] = None, top_logprobs: Optional[int] = None, parallel_tool_calls: Optional[bool] = None, web_search_options: Optional[litellm.types.llms.openai.OpenAIWebSearchOptions] = None, deployment_id=None, extra_headers: Optional[dict] = None, safety_identifier: Optional[str] = None, service_tier: Optional[str] = None, functions: Optional[List] = None, function_call: Optional[str] = None, base_url: Optional[str] = None, api_version: Optional[str] = None, api_key: Optional[str] = None, model_list: Optional[list] = None, thinking: Optional[litellm.types.llms.anthropic.AnthropicThinkingParam] = None, shared_session: Optional[ForwardRef('ClientSession')] = None, **kwargs) -> Union[litellm.types.utils.ModelResponse, litellm.litellm_core_utils.streaming_handler.CustomStreamWrapper]), 'tool_call': Operation(tool_call, (template: effectful.handlers.llm.Template, tool: Union[effectful.ops.types.Operation[..., T], effectful.handlers.llm.Template[..., T]], *args, **kwargs) -> T), 'ProgramSynthesis': , 'NotHandled': , 'fwd': Operation(fwd, (*args, **kwargs) -> Any), 'handler': , 'defop': , 'provider': , '_i2': '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}.\"\"\"\\n raise NotHandled', 'limerick': Template(__prompt_template__='Write a limerick on the theme of {theme}. Do not use any tools.', __signature__= str>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='limerick'), '_i3': 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '_i4': 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '_i5': '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled', '_i6': 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '_i7': '@functools.cache\\n@Template.define\\ndef haiku(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef haiku_no_cache(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nprint()\\nwith handler(provider):\\n print(haiku(\"fish\"))\\n print(\"-\" * 40)\\n print(haiku(\"fish\"))\\n\\nprint()\\ncache_handler1 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler1):\\n print(haiku_no_cache(\"fish2\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish2\"))\\n\\nprint()\\ncache_handler2 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler2):\\n print(haiku_no_cache(\"fish3\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish3\"))', 'haiku': , 'haiku_no_cache': Template(__prompt_template__='Write a haiku on the theme of {theme}. Do not use any tools.', __signature__= str>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='haiku_no_cache'), 'cache_handler1': , 'cache_handler2': , '_i8': '@Template.define\\ndef primes(first_digit: int) -> int:\\n \"\"\"Give a prime number with {first_digit} as the first digit. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider):\\n assert type(primes(6)) is int', 'primes': Template(__prompt_template__='Give a prime number with {first_digit} as the first digit. Do not use any tools.', __signature__= int>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='primes'), '_i9': '@Template.define\\ndef count_char(char: str) -> Callable[[str], int]:\\n \"\"\"Write a function which takes a string and counts the occurrances of \\'{char}\\'. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider), handler(ProgramSynthesis()):\\n count_a = count_char(\"a\")\\n assert callable(count_a)\\n assert count_a(\"banana\") == 3\\n assert count_a(\"cherry\") == 0\\n # Print the source code of the generated function\\n print(inspect.getsource(count_a))', 'count_char': Template(__prompt_template__=\"Write a function which takes a string and counts the occurrances of '{char}'. Do not use any tools.\", __signature__= collections.abc.Callable[[str], int]>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='count_char'), 'count_a': , '_i10': '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', 'cities': Operation(cities, () -> list[str]), 'weather': Operation(weather, (city: str) -> str), 'vacation': Template(__prompt_template__='Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.', __signature__= str>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='vacation'), 'log_tool_call': , '_i11': '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', '_i12': '@dataclasses.dataclass\\nclass KnockKnockJoke:\\n whos_there: str\\n punchline: str\\n\\n\\n@Template.define\\ndef write_joke(theme: str) -> KnockKnockJoke:\\n \"\"\"Write a knock-knock joke on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef rate_joke(joke: KnockKnockJoke) -> bool:\\n \"\"\"Decide if {joke} is funny or not. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\ndef do_comedy():\\n joke = write_joke(\"lizards\")\\n print(\"> You are onstage at a comedy club. You tell the following joke:\")\\n print(\\n f\"Knock knock.\\\\nWho\\'s there?\\\\n{joke.whos_there}.\\\\n{joke.whos_there} who?\\\\n{joke.punchline}\"\\n )\\n if rate_joke(joke):\\n print(\"> The crowd laughs politely.\")\\n else:\\n print(\"> The crowd stares in stony silence.\")\\n\\n\\nwith handler(provider):\\n do_comedy()', 'KnockKnockJoke': , 'write_joke': Template(__prompt_template__='Write a knock-knock joke on the theme of {theme}. Do not use any tools.', __signature__= __main__.KnockKnockJoke>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='write_joke'), 'rate_joke': ..., 'do_comedy': , '_i13': 'def log_llm(*args, **kwargs):\\n result = fwd()\\n print(\"Request fired: \", args, kwargs, result)\\n return result\\n\\n\\n# Avoid cache\\ntry:\\n haiku.cache_clear()\\nexcept Exception:\\n pass\\n\\n# Put completion handler innermost so it has highest precedence during the call\\nwith handler(provider), handler({completion: log_llm}):\\n _ = haiku(\"fish2\")\\n _ = limerick(\"fish\") # or use haiku(\"fish-2\") to avoid cache', 'log_llm': , '_i14': '# 1. Create a logger\\nlogger = logging.getLogger(\"effectful.llm\")\\nlogger.setLevel(logging.INFO)\\nlog_handler = logging.StreamHandler(sys.stdout)\\nlog_handler.setFormatter(logging.Formatter(\"%(levelname)s %(payload)s\"))\\nlogger.addHandler(log_handler)\\n# 2. Pass it to the handler\\nllm_logger = LLMLoggingHandler(logger=logger) # can also be LLMLoggingHandler()\\n\\n# Avoid cache for demonstration\\ntry:\\n haiku.cache_clear()\\n limerick.cache_clear()\\nexcept Exception:\\n pass\\n\\nwith handler(provider), handler(llm_logger):\\n _ = haiku(\"fish3\")\\n _ = limerick(\"fish4\")', 'logger': , 'log_handler': , 'llm_logger': , '_i15': '# Sub-templates for different story styles\\n@Template.define\\ndef story_with_moral(topic: str) -> str:\\n \"\"\"Write a short story about {topic} and end with a moral lesson. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef story_funny(topic: str) -> str:\\n \"\"\"Write a funny, humorous story about {topic}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n# Main orchestrator template - has access to sub-templates\\n@Template.define\\ndef write_story(topic: str, style: str) -> str:\\n \"\"\"Write a story about {topic} in the style: {style}.\\n Available styles: \\'moral\\' for a story with a lesson, \\'funny\\' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\"\"\"\\n raise NotHandled\\n\\n\\n# Verify sub-templates are captured in write_story\\'s lexical context\\nassert story_with_moral in write_story.tools\\nassert story_funny in write_story.tools\\nprint(\"Sub-templates available to write_story:\", list(write_story.tools))\\n\\nwith handler(provider), handler(llm_logger):\\n print(\"=== Story with moral ===\")\\n print(write_story(\"a curious cat\", \"moral\"))\\n print()\\n print(\"=== Funny story ===\")\\n print(write_story(\"a curious cat\", \"funny\"))', 'story_with_moral': Template(__prompt_template__='Write a short story about {topic} and end with a moral lesson. Do not use any tools.', __signature__= str>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='story_with_moral'), 'story_funny': Template(__prompt_template__='Write a funny, humorous story about {topic}. Do not use any tools.', __signature__= str>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='story_funny'), 'write_story': Template(__prompt_template__=\"Write a story about {topic} in the style: {style}.\\n Available styles: 'moral' for a story with a lesson, 'funny' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\", __signature__= str>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='write_story')}), mappingproxy({'__name__': '__main__', '__doc__': 'Automatically created module for IPython interactive environment', '__package__': None, '__loader__': None, '__spec__': None, '__builtin__': , '__builtins__': , '_ih': ['', 'import dataclasses\\nimport functools\\nimport inspect\\nimport logging\\nimport sys\\nfrom collections.abc import Callable\\n\\nfrom effectful.handlers.llm import Template\\nfrom effectful.handlers.llm.providers import (\\n CacheLLMRequestHandler,\\n LiteLLMProvider,\\n LLMLoggingHandler,\\n RetryLLMHandler,\\n completion,\\n tool_call,\\n)\\nfrom effectful.handlers.llm.synthesis import ProgramSynthesis\\nfrom effectful.ops.semantics import NotHandled, fwd, handler\\nfrom effectful.ops.syntax import defop\\n\\nprovider = LiteLLMProvider()', '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}.\"\"\"\\n raise NotHandled', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '@functools.cache\\n@Template.define\\ndef haiku(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef haiku_no_cache(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nprint()\\nwith handler(provider):\\n print(haiku(\"fish\"))\\n print(\"-\" * 40)\\n print(haiku(\"fish\"))\\n\\nprint()\\ncache_handler1 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler1):\\n print(haiku_no_cache(\"fish2\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish2\"))\\n\\nprint()\\ncache_handler2 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler2):\\n print(haiku_no_cache(\"fish3\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish3\"))', '@Template.define\\ndef primes(first_digit: int) -> int:\\n \"\"\"Give a prime number with {first_digit} as the first digit. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider):\\n assert type(primes(6)) is int', '@Template.define\\ndef count_char(char: str) -> Callable[[str], int]:\\n \"\"\"Write a function which takes a string and counts the occurrances of \\'{char}\\'. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider), handler(ProgramSynthesis()):\\n count_a = count_char(\"a\")\\n assert callable(count_a)\\n assert count_a(\"banana\") == 3\\n assert count_a(\"cherry\") == 0\\n # Print the source code of the generated function\\n print(inspect.getsource(count_a))', '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', '@dataclasses.dataclass\\nclass KnockKnockJoke:\\n whos_there: str\\n punchline: str\\n\\n\\n@Template.define\\ndef write_joke(theme: str) -> KnockKnockJoke:\\n \"\"\"Write a knock-knock joke on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef rate_joke(joke: KnockKnockJoke) -> bool:\\n \"\"\"Decide if {joke} is funny or not. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\ndef do_comedy():\\n joke = write_joke(\"lizards\")\\n print(\"> You are onstage at a comedy club. You tell the following joke:\")\\n print(\\n f\"Knock knock.\\\\nWho\\'s there?\\\\n{joke.whos_there}.\\\\n{joke.whos_there} who?\\\\n{joke.punchline}\"\\n )\\n if rate_joke(joke):\\n print(\"> The crowd laughs politely.\")\\n else:\\n print(\"> The crowd stares in stony silence.\")\\n\\n\\nwith handler(provider):\\n do_comedy()', 'def log_llm(*args, **kwargs):\\n result = fwd()\\n print(\"Request fired: \", args, kwargs, result)\\n return result\\n\\n\\n# Avoid cache\\ntry:\\n haiku.cache_clear()\\nexcept Exception:\\n pass\\n\\n# Put completion handler innermost so it has highest precedence during the call\\nwith handler(provider), handler({completion: log_llm}):\\n _ = haiku(\"fish2\")\\n _ = limerick(\"fish\") # or use haiku(\"fish-2\") to avoid cache', '# 1. Create a logger\\nlogger = logging.getLogger(\"effectful.llm\")\\nlogger.setLevel(logging.INFO)\\nlog_handler = logging.StreamHandler(sys.stdout)\\nlog_handler.setFormatter(logging.Formatter(\"%(levelname)s %(payload)s\"))\\nlogger.addHandler(log_handler)\\n# 2. Pass it to the handler\\nllm_logger = LLMLoggingHandler(logger=logger) # can also be LLMLoggingHandler()\\n\\n# Avoid cache for demonstration\\ntry:\\n haiku.cache_clear()\\n limerick.cache_clear()\\nexcept Exception:\\n pass\\n\\nwith handler(provider), handler(llm_logger):\\n _ = haiku(\"fish3\")\\n _ = limerick(\"fish4\")', '# Sub-templates for different story styles\\n@Template.define\\ndef story_with_moral(topic: str) -> str:\\n \"\"\"Write a short story about {topic} and end with a moral lesson. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef story_funny(topic: str) -> str:\\n \"\"\"Write a funny, humorous story about {topic}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n# Main orchestrator template - has access to sub-templates\\n@Template.define\\ndef write_story(topic: str, style: str) -> str:\\n \"\"\"Write a story about {topic} in the style: {style}.\\n Available styles: \\'moral\\' for a story with a lesson, \\'funny\\' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\"\"\"\\n raise NotHandled\\n\\n\\n# Verify sub-templates are captured in write_story\\'s lexical context\\nassert story_with_moral in write_story.tools\\nassert story_funny in write_story.tools\\nprint(\"Sub-templates available to write_story:\", list(write_story.tools))\\n\\nwith handler(provider), handler(llm_logger):\\n print(\"=== Story with moral ===\")\\n print(write_story(\"a curious cat\", \"moral\"))\\n print()\\n print(\"=== Funny story ===\")\\n print(write_story(\"a curious cat\", \"funny\"))'], '_oh': {}, '_dh': [PosixPath('/Users/datnguyenthanh/Marc/effectful')], 'In': ['', 'import dataclasses\\nimport functools\\nimport inspect\\nimport logging\\nimport sys\\nfrom collections.abc import Callable\\n\\nfrom effectful.handlers.llm import Template\\nfrom effectful.handlers.llm.providers import (\\n CacheLLMRequestHandler,\\n LiteLLMProvider,\\n LLMLoggingHandler,\\n RetryLLMHandler,\\n completion,\\n tool_call,\\n)\\nfrom effectful.handlers.llm.synthesis import ProgramSynthesis\\nfrom effectful.ops.semantics import NotHandled, fwd, handler\\nfrom effectful.ops.syntax import defop\\n\\nprovider = LiteLLMProvider()', '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}.\"\"\"\\n raise NotHandled', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '@functools.cache\\n@Template.define\\ndef haiku(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef haiku_no_cache(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nprint()\\nwith handler(provider):\\n print(haiku(\"fish\"))\\n print(\"-\" * 40)\\n print(haiku(\"fish\"))\\n\\nprint()\\ncache_handler1 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler1):\\n print(haiku_no_cache(\"fish2\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish2\"))\\n\\nprint()\\ncache_handler2 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler2):\\n print(haiku_no_cache(\"fish3\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish3\"))', '@Template.define\\ndef primes(first_digit: int) -> int:\\n \"\"\"Give a prime number with {first_digit} as the first digit. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider):\\n assert type(primes(6)) is int', '@Template.define\\ndef count_char(char: str) -> Callable[[str], int]:\\n \"\"\"Write a function which takes a string and counts the occurrances of \\'{char}\\'. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider), handler(ProgramSynthesis()):\\n count_a = count_char(\"a\")\\n assert callable(count_a)\\n assert count_a(\"banana\") == 3\\n assert count_a(\"cherry\") == 0\\n # Print the source code of the generated function\\n print(inspect.getsource(count_a))', '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', '@dataclasses.dataclass\\nclass KnockKnockJoke:\\n whos_there: str\\n punchline: str\\n\\n\\n@Template.define\\ndef write_joke(theme: str) -> KnockKnockJoke:\\n \"\"\"Write a knock-knock joke on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef rate_joke(joke: KnockKnockJoke) -> bool:\\n \"\"\"Decide if {joke} is funny or not. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\ndef do_comedy():\\n joke = write_joke(\"lizards\")\\n print(\"> You are onstage at a comedy club. You tell the following joke:\")\\n print(\\n f\"Knock knock.\\\\nWho\\'s there?\\\\n{joke.whos_there}.\\\\n{joke.whos_there} who?\\\\n{joke.punchline}\"\\n )\\n if rate_joke(joke):\\n print(\"> The crowd laughs politely.\")\\n else:\\n print(\"> The crowd stares in stony silence.\")\\n\\n\\nwith handler(provider):\\n do_comedy()', 'def log_llm(*args, **kwargs):\\n result = fwd()\\n print(\"Request fired: \", args, kwargs, result)\\n return result\\n\\n\\n# Avoid cache\\ntry:\\n haiku.cache_clear()\\nexcept Exception:\\n pass\\n\\n# Put completion handler innermost so it has highest precedence during the call\\nwith handler(provider), handler({completion: log_llm}):\\n _ = haiku(\"fish2\")\\n _ = limerick(\"fish\") # or use haiku(\"fish-2\") to avoid cache', '# 1. Create a logger\\nlogger = logging.getLogger(\"effectful.llm\")\\nlogger.setLevel(logging.INFO)\\nlog_handler = logging.StreamHandler(sys.stdout)\\nlog_handler.setFormatter(logging.Formatter(\"%(levelname)s %(payload)s\"))\\nlogger.addHandler(log_handler)\\n# 2. Pass it to the handler\\nllm_logger = LLMLoggingHandler(logger=logger) # can also be LLMLoggingHandler()\\n\\n# Avoid cache for demonstration\\ntry:\\n haiku.cache_clear()\\n limerick.cache_clear()\\nexcept Exception:\\n pass\\n\\nwith handler(provider), handler(llm_logger):\\n _ = haiku(\"fish3\")\\n _ = limerick(\"fish4\")', '# Sub-templates for different story styles\\n@Template.define\\ndef story_with_moral(topic: str) -> str:\\n \"\"\"Write a short story about {topic} and end with a moral lesson. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef story_funny(topic: str) -> str:\\n \"\"\"Write a funny, humorous story about {topic}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n# Main orchestrator template - has access to sub-templates\\n@Template.define\\ndef write_story(topic: str, style: str) -> str:\\n \"\"\"Write a story about {topic} in the style: {style}.\\n Available styles: \\'moral\\' for a story with a lesson, \\'funny\\' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\"\"\"\\n raise NotHandled\\n\\n\\n# Verify sub-templates are captured in write_story\\'s lexical context\\nassert story_with_moral in write_story.tools\\nassert story_funny in write_story.tools\\nprint(\"Sub-templates available to write_story:\", list(write_story.tools))\\n\\nwith handler(provider), handler(llm_logger):\\n print(\"=== Story with moral ===\")\\n print(write_story(\"a curious cat\", \"moral\"))\\n print()\\n print(\"=== Funny story ===\")\\n print(write_story(\"a curious cat\", \"funny\"))'], 'Out': {}, 'get_ipython': >, 'exit': , 'quit': , 'open': , '_': 'In the ocean where fishies do play, \\nA big whale came swimming one day. \\nWith a splash and a dive, \\nHe felt so alive, \\nChasing fish in the blue, gleaming bay.', '__': '', '___': '', '__vsc_ipynb_file__': '/Users/datnguyenthanh/Marc/effectful/docs/source/llm.ipynb', '_i': '# 1. Create a logger\\nlogger = logging.getLogger(\"effectful.llm\")\\nlogger.setLevel(logging.INFO)\\nlog_handler = logging.StreamHandler(sys.stdout)\\nlog_handler.setFormatter(logging.Formatter(\"%(levelname)s %(payload)s\"))\\nlogger.addHandler(log_handler)\\n# 2. Pass it to the handler\\nllm_logger = LLMLoggingHandler(logger=logger) # can also be LLMLoggingHandler()\\n\\n# Avoid cache for demonstration\\ntry:\\n haiku.cache_clear()\\n limerick.cache_clear()\\nexcept Exception:\\n pass\\n\\nwith handler(provider), handler(llm_logger):\\n _ = haiku(\"fish3\")\\n _ = limerick(\"fish4\")', '_ii': 'def log_llm(*args, **kwargs):\\n result = fwd()\\n print(\"Request fired: \", args, kwargs, result)\\n return result\\n\\n\\n# Avoid cache\\ntry:\\n haiku.cache_clear()\\nexcept Exception:\\n pass\\n\\n# Put completion handler innermost so it has highest precedence during the call\\nwith handler(provider), handler({completion: log_llm}):\\n _ = haiku(\"fish2\")\\n _ = limerick(\"fish\") # or use haiku(\"fish-2\") to avoid cache', '_iii': '@dataclasses.dataclass\\nclass KnockKnockJoke:\\n whos_there: str\\n punchline: str\\n\\n\\n@Template.define\\ndef write_joke(theme: str) -> KnockKnockJoke:\\n \"\"\"Write a knock-knock joke on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef rate_joke(joke: KnockKnockJoke) -> bool:\\n \"\"\"Decide if {joke} is funny or not. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\ndef do_comedy():\\n joke = write_joke(\"lizards\")\\n print(\"> You are onstage at a comedy club. You tell the following joke:\")\\n print(\\n f\"Knock knock.\\\\nWho\\'s there?\\\\n{joke.whos_there}.\\\\n{joke.whos_there} who?\\\\n{joke.punchline}\"\\n )\\n if rate_joke(joke):\\n print(\"> The crowd laughs politely.\")\\n else:\\n print(\"> The crowd stares in stony silence.\")\\n\\n\\nwith handler(provider):\\n do_comedy()', '_i1': 'import dataclasses\\nimport functools\\nimport inspect\\nimport logging\\nimport sys\\nfrom collections.abc import Callable\\n\\nfrom effectful.handlers.llm import Template\\nfrom effectful.handlers.llm.providers import (\\n CacheLLMRequestHandler,\\n LiteLLMProvider,\\n LLMLoggingHandler,\\n RetryLLMHandler,\\n completion,\\n tool_call,\\n)\\nfrom effectful.handlers.llm.synthesis import ProgramSynthesis\\nfrom effectful.ops.semantics import NotHandled, fwd, handler\\nfrom effectful.ops.syntax import defop\\n\\nprovider = LiteLLMProvider()', 'dataclasses': , 'functools': , 'inspect': , 'logging': , 'sys': , 'Callable': , 'Template': , 'CacheLLMRequestHandler': , 'LiteLLMProvider': , 'LLMLoggingHandler': , 'RetryLLMHandler': , 'completion': Operation(completion, (model: str, messages: List = [], timeout: Union[float, str, openai.Timeout, NoneType] = None, temperature: Optional[float] = None, top_p: Optional[float] = None, n: Optional[int] = None, stream: Optional[bool] = None, stream_options: Optional[dict] = None, stop=None, max_completion_tokens: Optional[int] = None, max_tokens: Optional[int] = None, modalities: Optional[List[Literal['text', 'audio']]] = None, prediction: Optional[openai.types.chat.chat_completion_prediction_content_param.ChatCompletionPredictionContentParam] = None, audio: Optional[openai.types.chat.chat_completion_audio_param.ChatCompletionAudioParam] = None, presence_penalty: Optional[float] = None, frequency_penalty: Optional[float] = None, logit_bias: Optional[dict] = None, user: Optional[str] = None, reasoning_effort: Optional[Literal['none', 'minimal', 'low', 'medium', 'high', 'default']] = None, verbosity: Optional[Literal['low', 'medium', 'high']] = None, response_format: Union[dict, Type[pydantic.main.BaseModel], NoneType] = None, seed: Optional[int] = None, tools: Optional[List] = None, tool_choice: Union[str, dict, NoneType] = None, logprobs: Optional[bool] = None, top_logprobs: Optional[int] = None, parallel_tool_calls: Optional[bool] = None, web_search_options: Optional[litellm.types.llms.openai.OpenAIWebSearchOptions] = None, deployment_id=None, extra_headers: Optional[dict] = None, safety_identifier: Optional[str] = None, service_tier: Optional[str] = None, functions: Optional[List] = None, function_call: Optional[str] = None, base_url: Optional[str] = None, api_version: Optional[str] = None, api_key: Optional[str] = None, model_list: Optional[list] = None, thinking: Optional[litellm.types.llms.anthropic.AnthropicThinkingParam] = None, shared_session: Optional[ForwardRef('ClientSession')] = None, **kwargs) -> Union[litellm.types.utils.ModelResponse, litellm.litellm_core_utils.streaming_handler.CustomStreamWrapper]), 'tool_call': Operation(tool_call, (template: effectful.handlers.llm.Template, tool: Union[effectful.ops.types.Operation[..., T], effectful.handlers.llm.Template[..., T]], *args, **kwargs) -> T), 'ProgramSynthesis': , 'NotHandled': , 'fwd': Operation(fwd, (*args, **kwargs) -> Any), 'handler': , 'defop': , 'provider': , '_i2': '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}.\"\"\"\\n raise NotHandled', 'limerick': Template(__prompt_template__='Write a limerick on the theme of {theme}. Do not use any tools.', __signature__= str>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='limerick'), '_i3': 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '_i4': 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '_i5': '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled', '_i6': 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '_i7': '@functools.cache\\n@Template.define\\ndef haiku(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef haiku_no_cache(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nprint()\\nwith handler(provider):\\n print(haiku(\"fish\"))\\n print(\"-\" * 40)\\n print(haiku(\"fish\"))\\n\\nprint()\\ncache_handler1 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler1):\\n print(haiku_no_cache(\"fish2\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish2\"))\\n\\nprint()\\ncache_handler2 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler2):\\n print(haiku_no_cache(\"fish3\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish3\"))', 'haiku': , 'haiku_no_cache': Template(__prompt_template__='Write a haiku on the theme of {theme}. Do not use any tools.', __signature__= str>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='haiku_no_cache'), 'cache_handler1': , 'cache_handler2': , '_i8': '@Template.define\\ndef primes(first_digit: int) -> int:\\n \"\"\"Give a prime number with {first_digit} as the first digit. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider):\\n assert type(primes(6)) is int', 'primes': Template(__prompt_template__='Give a prime number with {first_digit} as the first digit. Do not use any tools.', __signature__= int>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='primes'), '_i9': '@Template.define\\ndef count_char(char: str) -> Callable[[str], int]:\\n \"\"\"Write a function which takes a string and counts the occurrances of \\'{char}\\'. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider), handler(ProgramSynthesis()):\\n count_a = count_char(\"a\")\\n assert callable(count_a)\\n assert count_a(\"banana\") == 3\\n assert count_a(\"cherry\") == 0\\n # Print the source code of the generated function\\n print(inspect.getsource(count_a))', 'count_char': Template(__prompt_template__=\"Write a function which takes a string and counts the occurrances of '{char}'. Do not use any tools.\", __signature__= collections.abc.Callable[[str], int]>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='count_char'), 'count_a': , '_i10': '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', 'cities': Operation(cities, () -> list[str]), 'weather': Operation(weather, (city: str) -> str), 'vacation': Template(__prompt_template__='Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.', __signature__= str>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='vacation'), 'log_tool_call': , '_i11': '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', '_i12': '@dataclasses.dataclass\\nclass KnockKnockJoke:\\n whos_there: str\\n punchline: str\\n\\n\\n@Template.define\\ndef write_joke(theme: str) -> KnockKnockJoke:\\n \"\"\"Write a knock-knock joke on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef rate_joke(joke: KnockKnockJoke) -> bool:\\n \"\"\"Decide if {joke} is funny or not. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\ndef do_comedy():\\n joke = write_joke(\"lizards\")\\n print(\"> You are onstage at a comedy club. You tell the following joke:\")\\n print(\\n f\"Knock knock.\\\\nWho\\'s there?\\\\n{joke.whos_there}.\\\\n{joke.whos_there} who?\\\\n{joke.punchline}\"\\n )\\n if rate_joke(joke):\\n print(\"> The crowd laughs politely.\")\\n else:\\n print(\"> The crowd stares in stony silence.\")\\n\\n\\nwith handler(provider):\\n do_comedy()', 'KnockKnockJoke': , 'write_joke': Template(__prompt_template__='Write a knock-knock joke on the theme of {theme}. Do not use any tools.', __signature__= __main__.KnockKnockJoke>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='write_joke'), 'rate_joke': ..., 'do_comedy': , '_i13': 'def log_llm(*args, **kwargs):\\n result = fwd()\\n print(\"Request fired: \", args, kwargs, result)\\n return result\\n\\n\\n# Avoid cache\\ntry:\\n haiku.cache_clear()\\nexcept Exception:\\n pass\\n\\n# Put completion handler innermost so it has highest precedence during the call\\nwith handler(provider), handler({completion: log_llm}):\\n _ = haiku(\"fish2\")\\n _ = limerick(\"fish\") # or use haiku(\"fish-2\") to avoid cache', 'log_llm': , '_i14': '# 1. Create a logger\\nlogger = logging.getLogger(\"effectful.llm\")\\nlogger.setLevel(logging.INFO)\\nlog_handler = logging.StreamHandler(sys.stdout)\\nlog_handler.setFormatter(logging.Formatter(\"%(levelname)s %(payload)s\"))\\nlogger.addHandler(log_handler)\\n# 2. Pass it to the handler\\nllm_logger = LLMLoggingHandler(logger=logger) # can also be LLMLoggingHandler()\\n\\n# Avoid cache for demonstration\\ntry:\\n haiku.cache_clear()\\n limerick.cache_clear()\\nexcept Exception:\\n pass\\n\\nwith handler(provider), handler(llm_logger):\\n _ = haiku(\"fish3\")\\n _ = limerick(\"fish4\")', 'logger': , 'log_handler': , 'llm_logger': , '_i15': '# Sub-templates for different story styles\\n@Template.define\\ndef story_with_moral(topic: str) -> str:\\n \"\"\"Write a short story about {topic} and end with a moral lesson. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef story_funny(topic: str) -> str:\\n \"\"\"Write a funny, humorous story about {topic}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n# Main orchestrator template - has access to sub-templates\\n@Template.define\\ndef write_story(topic: str, style: str) -> str:\\n \"\"\"Write a story about {topic} in the style: {style}.\\n Available styles: \\'moral\\' for a story with a lesson, \\'funny\\' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\"\"\"\\n raise NotHandled\\n\\n\\n# Verify sub-templates are captured in write_story\\'s lexical context\\nassert story_with_moral in write_story.tools\\nassert story_funny in write_story.tools\\nprint(\"Sub-templates available to write_story:\", list(write_story.tools))\\n\\nwith handler(provider), handler(llm_logger):\\n print(\"=== Story with moral ===\")\\n print(write_story(\"a curious cat\", \"moral\"))\\n print()\\n print(\"=== Funny story ===\")\\n print(write_story(\"a curious cat\", \"funny\"))', 'story_with_moral': Template(__prompt_template__='Write a short story about {topic} and end with a moral lesson. Do not use any tools.', __signature__= str>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='story_with_moral'), 'story_funny': Template(__prompt_template__='Write a funny, humorous story about {topic}. Do not use any tools.', __signature__= str>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='story_funny'), 'write_story': Template(__prompt_template__=\"Write a story about {topic} in the style: {style}.\\n Available styles: 'moral' for a story with a lesson, 'funny' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\", __signature__= str>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='write_story')})), __name__='rate_joke'), Template(__prompt_template__='Write a short story about {topic} and end with a moral lesson. Do not use any tools.', __signature__= str>, __context__=LexicalContext(mappingproxy({'__name__': '__main__', '__doc__': 'Automatically created module for IPython interactive environment', '__package__': None, '__loader__': None, '__spec__': None, '__builtin__': , '__builtins__': , '_ih': ['', 'import dataclasses\\nimport functools\\nimport inspect\\nimport logging\\nimport sys\\nfrom collections.abc import Callable\\n\\nfrom effectful.handlers.llm import Template\\nfrom effectful.handlers.llm.providers import (\\n CacheLLMRequestHandler,\\n LiteLLMProvider,\\n LLMLoggingHandler,\\n RetryLLMHandler,\\n completion,\\n tool_call,\\n)\\nfrom effectful.handlers.llm.synthesis import ProgramSynthesis\\nfrom effectful.ops.semantics import NotHandled, fwd, handler\\nfrom effectful.ops.syntax import defop\\n\\nprovider = LiteLLMProvider()', '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}.\"\"\"\\n raise NotHandled', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '@functools.cache\\n@Template.define\\ndef haiku(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef haiku_no_cache(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nprint()\\nwith handler(provider):\\n print(haiku(\"fish\"))\\n print(\"-\" * 40)\\n print(haiku(\"fish\"))\\n\\nprint()\\ncache_handler1 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler1):\\n print(haiku_no_cache(\"fish2\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish2\"))\\n\\nprint()\\ncache_handler2 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler2):\\n print(haiku_no_cache(\"fish3\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish3\"))', '@Template.define\\ndef primes(first_digit: int) -> int:\\n \"\"\"Give a prime number with {first_digit} as the first digit. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider):\\n assert type(primes(6)) is int', '@Template.define\\ndef count_char(char: str) -> Callable[[str], int]:\\n \"\"\"Write a function which takes a string and counts the occurrances of \\'{char}\\'. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider), handler(ProgramSynthesis()):\\n count_a = count_char(\"a\")\\n assert callable(count_a)\\n assert count_a(\"banana\") == 3\\n assert count_a(\"cherry\") == 0\\n # Print the source code of the generated function\\n print(inspect.getsource(count_a))', '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', '@dataclasses.dataclass\\nclass KnockKnockJoke:\\n whos_there: str\\n punchline: str\\n\\n\\n@Template.define\\ndef write_joke(theme: str) -> KnockKnockJoke:\\n \"\"\"Write a knock-knock joke on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef rate_joke(joke: KnockKnockJoke) -> bool:\\n \"\"\"Decide if {joke} is funny or not. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\ndef do_comedy():\\n joke = write_joke(\"lizards\")\\n print(\"> You are onstage at a comedy club. You tell the following joke:\")\\n print(\\n f\"Knock knock.\\\\nWho\\'s there?\\\\n{joke.whos_there}.\\\\n{joke.whos_there} who?\\\\n{joke.punchline}\"\\n )\\n if rate_joke(joke):\\n print(\"> The crowd laughs politely.\")\\n else:\\n print(\"> The crowd stares in stony silence.\")\\n\\n\\nwith handler(provider):\\n do_comedy()', 'def log_llm(*args, **kwargs):\\n result = fwd()\\n print(\"Request fired: \", args, kwargs, result)\\n return result\\n\\n\\n# Avoid cache\\ntry:\\n haiku.cache_clear()\\nexcept Exception:\\n pass\\n\\n# Put completion handler innermost so it has highest precedence during the call\\nwith handler(provider), handler({completion: log_llm}):\\n _ = haiku(\"fish2\")\\n _ = limerick(\"fish\") # or use haiku(\"fish-2\") to avoid cache', '# 1. Create a logger\\nlogger = logging.getLogger(\"effectful.llm\")\\nlogger.setLevel(logging.INFO)\\nlog_handler = logging.StreamHandler(sys.stdout)\\nlog_handler.setFormatter(logging.Formatter(\"%(levelname)s %(payload)s\"))\\nlogger.addHandler(log_handler)\\n# 2. Pass it to the handler\\nllm_logger = LLMLoggingHandler(logger=logger) # can also be LLMLoggingHandler()\\n\\n# Avoid cache for demonstration\\ntry:\\n haiku.cache_clear()\\n limerick.cache_clear()\\nexcept Exception:\\n pass\\n\\nwith handler(provider), handler(llm_logger):\\n _ = haiku(\"fish3\")\\n _ = limerick(\"fish4\")', '# Sub-templates for different story styles\\n@Template.define\\ndef story_with_moral(topic: str) -> str:\\n \"\"\"Write a short story about {topic} and end with a moral lesson. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef story_funny(topic: str) -> str:\\n \"\"\"Write a funny, humorous story about {topic}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n# Main orchestrator template - has access to sub-templates\\n@Template.define\\ndef write_story(topic: str, style: str) -> str:\\n \"\"\"Write a story about {topic} in the style: {style}.\\n Available styles: \\'moral\\' for a story with a lesson, \\'funny\\' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\"\"\"\\n raise NotHandled\\n\\n\\n# Verify sub-templates are captured in write_story\\'s lexical context\\nassert story_with_moral in write_story.tools\\nassert story_funny in write_story.tools\\nprint(\"Sub-templates available to write_story:\", list(write_story.tools))\\n\\nwith handler(provider), handler(llm_logger):\\n print(\"=== Story with moral ===\")\\n print(write_story(\"a curious cat\", \"moral\"))\\n print()\\n print(\"=== Funny story ===\")\\n print(write_story(\"a curious cat\", \"funny\"))'], '_oh': {}, '_dh': [PosixPath('/Users/datnguyenthanh/Marc/effectful')], 'In': ['', 'import dataclasses\\nimport functools\\nimport inspect\\nimport logging\\nimport sys\\nfrom collections.abc import Callable\\n\\nfrom effectful.handlers.llm import Template\\nfrom effectful.handlers.llm.providers import (\\n CacheLLMRequestHandler,\\n LiteLLMProvider,\\n LLMLoggingHandler,\\n RetryLLMHandler,\\n completion,\\n tool_call,\\n)\\nfrom effectful.handlers.llm.synthesis import ProgramSynthesis\\nfrom effectful.ops.semantics import NotHandled, fwd, handler\\nfrom effectful.ops.syntax import defop\\n\\nprovider = LiteLLMProvider()', '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}.\"\"\"\\n raise NotHandled', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '@functools.cache\\n@Template.define\\ndef haiku(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef haiku_no_cache(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nprint()\\nwith handler(provider):\\n print(haiku(\"fish\"))\\n print(\"-\" * 40)\\n print(haiku(\"fish\"))\\n\\nprint()\\ncache_handler1 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler1):\\n print(haiku_no_cache(\"fish2\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish2\"))\\n\\nprint()\\ncache_handler2 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler2):\\n print(haiku_no_cache(\"fish3\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish3\"))', '@Template.define\\ndef primes(first_digit: int) -> int:\\n \"\"\"Give a prime number with {first_digit} as the first digit. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider):\\n assert type(primes(6)) is int', '@Template.define\\ndef count_char(char: str) -> Callable[[str], int]:\\n \"\"\"Write a function which takes a string and counts the occurrances of \\'{char}\\'. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider), handler(ProgramSynthesis()):\\n count_a = count_char(\"a\")\\n assert callable(count_a)\\n assert count_a(\"banana\") == 3\\n assert count_a(\"cherry\") == 0\\n # Print the source code of the generated function\\n print(inspect.getsource(count_a))', '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', '@dataclasses.dataclass\\nclass KnockKnockJoke:\\n whos_there: str\\n punchline: str\\n\\n\\n@Template.define\\ndef write_joke(theme: str) -> KnockKnockJoke:\\n \"\"\"Write a knock-knock joke on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef rate_joke(joke: KnockKnockJoke) -> bool:\\n \"\"\"Decide if {joke} is funny or not. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\ndef do_comedy():\\n joke = write_joke(\"lizards\")\\n print(\"> You are onstage at a comedy club. You tell the following joke:\")\\n print(\\n f\"Knock knock.\\\\nWho\\'s there?\\\\n{joke.whos_there}.\\\\n{joke.whos_there} who?\\\\n{joke.punchline}\"\\n )\\n if rate_joke(joke):\\n print(\"> The crowd laughs politely.\")\\n else:\\n print(\"> The crowd stares in stony silence.\")\\n\\n\\nwith handler(provider):\\n do_comedy()', 'def log_llm(*args, **kwargs):\\n result = fwd()\\n print(\"Request fired: \", args, kwargs, result)\\n return result\\n\\n\\n# Avoid cache\\ntry:\\n haiku.cache_clear()\\nexcept Exception:\\n pass\\n\\n# Put completion handler innermost so it has highest precedence during the call\\nwith handler(provider), handler({completion: log_llm}):\\n _ = haiku(\"fish2\")\\n _ = limerick(\"fish\") # or use haiku(\"fish-2\") to avoid cache', '# 1. Create a logger\\nlogger = logging.getLogger(\"effectful.llm\")\\nlogger.setLevel(logging.INFO)\\nlog_handler = logging.StreamHandler(sys.stdout)\\nlog_handler.setFormatter(logging.Formatter(\"%(levelname)s %(payload)s\"))\\nlogger.addHandler(log_handler)\\n# 2. Pass it to the handler\\nllm_logger = LLMLoggingHandler(logger=logger) # can also be LLMLoggingHandler()\\n\\n# Avoid cache for demonstration\\ntry:\\n haiku.cache_clear()\\n limerick.cache_clear()\\nexcept Exception:\\n pass\\n\\nwith handler(provider), handler(llm_logger):\\n _ = haiku(\"fish3\")\\n _ = limerick(\"fish4\")', '# Sub-templates for different story styles\\n@Template.define\\ndef story_with_moral(topic: str) -> str:\\n \"\"\"Write a short story about {topic} and end with a moral lesson. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef story_funny(topic: str) -> str:\\n \"\"\"Write a funny, humorous story about {topic}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n# Main orchestrator template - has access to sub-templates\\n@Template.define\\ndef write_story(topic: str, style: str) -> str:\\n \"\"\"Write a story about {topic} in the style: {style}.\\n Available styles: \\'moral\\' for a story with a lesson, \\'funny\\' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\"\"\"\\n raise NotHandled\\n\\n\\n# Verify sub-templates are captured in write_story\\'s lexical context\\nassert story_with_moral in write_story.tools\\nassert story_funny in write_story.tools\\nprint(\"Sub-templates available to write_story:\", list(write_story.tools))\\n\\nwith handler(provider), handler(llm_logger):\\n print(\"=== Story with moral ===\")\\n print(write_story(\"a curious cat\", \"moral\"))\\n print()\\n print(\"=== Funny story ===\")\\n print(write_story(\"a curious cat\", \"funny\"))'], 'Out': {}, 'get_ipython': >, 'exit': , 'quit': , 'open': , '_': 'In the ocean where fishies do play, \\nA big whale came swimming one day. \\nWith a splash and a dive, \\nHe felt so alive, \\nChasing fish in the blue, gleaming bay.', '__': '', '___': '', '__vsc_ipynb_file__': '/Users/datnguyenthanh/Marc/effectful/docs/source/llm.ipynb', '_i': '# 1. Create a logger\\nlogger = logging.getLogger(\"effectful.llm\")\\nlogger.setLevel(logging.INFO)\\nlog_handler = logging.StreamHandler(sys.stdout)\\nlog_handler.setFormatter(logging.Formatter(\"%(levelname)s %(payload)s\"))\\nlogger.addHandler(log_handler)\\n# 2. Pass it to the handler\\nllm_logger = LLMLoggingHandler(logger=logger) # can also be LLMLoggingHandler()\\n\\n# Avoid cache for demonstration\\ntry:\\n haiku.cache_clear()\\n limerick.cache_clear()\\nexcept Exception:\\n pass\\n\\nwith handler(provider), handler(llm_logger):\\n _ = haiku(\"fish3\")\\n _ = limerick(\"fish4\")', '_ii': 'def log_llm(*args, **kwargs):\\n result = fwd()\\n print(\"Request fired: \", args, kwargs, result)\\n return result\\n\\n\\n# Avoid cache\\ntry:\\n haiku.cache_clear()\\nexcept Exception:\\n pass\\n\\n# Put completion handler innermost so it has highest precedence during the call\\nwith handler(provider), handler({completion: log_llm}):\\n _ = haiku(\"fish2\")\\n _ = limerick(\"fish\") # or use haiku(\"fish-2\") to avoid cache', '_iii': '@dataclasses.dataclass\\nclass KnockKnockJoke:\\n whos_there: str\\n punchline: str\\n\\n\\n@Template.define\\ndef write_joke(theme: str) -> KnockKnockJoke:\\n \"\"\"Write a knock-knock joke on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef rate_joke(joke: KnockKnockJoke) -> bool:\\n \"\"\"Decide if {joke} is funny or not. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\ndef do_comedy():\\n joke = write_joke(\"lizards\")\\n print(\"> You are onstage at a comedy club. You tell the following joke:\")\\n print(\\n f\"Knock knock.\\\\nWho\\'s there?\\\\n{joke.whos_there}.\\\\n{joke.whos_there} who?\\\\n{joke.punchline}\"\\n )\\n if rate_joke(joke):\\n print(\"> The crowd laughs politely.\")\\n else:\\n print(\"> The crowd stares in stony silence.\")\\n\\n\\nwith handler(provider):\\n do_comedy()', '_i1': 'import dataclasses\\nimport functools\\nimport inspect\\nimport logging\\nimport sys\\nfrom collections.abc import Callable\\n\\nfrom effectful.handlers.llm import Template\\nfrom effectful.handlers.llm.providers import (\\n CacheLLMRequestHandler,\\n LiteLLMProvider,\\n LLMLoggingHandler,\\n RetryLLMHandler,\\n completion,\\n tool_call,\\n)\\nfrom effectful.handlers.llm.synthesis import ProgramSynthesis\\nfrom effectful.ops.semantics import NotHandled, fwd, handler\\nfrom effectful.ops.syntax import defop\\n\\nprovider = LiteLLMProvider()', 'dataclasses': , 'functools': , 'inspect': , 'logging': , 'sys': , 'Callable': , 'Template': , 'CacheLLMRequestHandler': , 'LiteLLMProvider': , 'LLMLoggingHandler': , 'RetryLLMHandler': , 'completion': Operation(completion, (model: str, messages: List = [], timeout: Union[float, str, openai.Timeout, NoneType] = None, temperature: Optional[float] = None, top_p: Optional[float] = None, n: Optional[int] = None, stream: Optional[bool] = None, stream_options: Optional[dict] = None, stop=None, max_completion_tokens: Optional[int] = None, max_tokens: Optional[int] = None, modalities: Optional[List[Literal['text', 'audio']]] = None, prediction: Optional[openai.types.chat.chat_completion_prediction_content_param.ChatCompletionPredictionContentParam] = None, audio: Optional[openai.types.chat.chat_completion_audio_param.ChatCompletionAudioParam] = None, presence_penalty: Optional[float] = None, frequency_penalty: Optional[float] = None, logit_bias: Optional[dict] = None, user: Optional[str] = None, reasoning_effort: Optional[Literal['none', 'minimal', 'low', 'medium', 'high', 'default']] = None, verbosity: Optional[Literal['low', 'medium', 'high']] = None, response_format: Union[dict, Type[pydantic.main.BaseModel], NoneType] = None, seed: Optional[int] = None, tools: Optional[List] = None, tool_choice: Union[str, dict, NoneType] = None, logprobs: Optional[bool] = None, top_logprobs: Optional[int] = None, parallel_tool_calls: Optional[bool] = None, web_search_options: Optional[litellm.types.llms.openai.OpenAIWebSearchOptions] = None, deployment_id=None, extra_headers: Optional[dict] = None, safety_identifier: Optional[str] = None, service_tier: Optional[str] = None, functions: Optional[List] = None, function_call: Optional[str] = None, base_url: Optional[str] = None, api_version: Optional[str] = None, api_key: Optional[str] = None, model_list: Optional[list] = None, thinking: Optional[litellm.types.llms.anthropic.AnthropicThinkingParam] = None, shared_session: Optional[ForwardRef('ClientSession')] = None, **kwargs) -> Union[litellm.types.utils.ModelResponse, litellm.litellm_core_utils.streaming_handler.CustomStreamWrapper]), 'tool_call': Operation(tool_call, (template: effectful.handlers.llm.Template, tool: Union[effectful.ops.types.Operation[..., T], effectful.handlers.llm.Template[..., T]], *args, **kwargs) -> T), 'ProgramSynthesis': , 'NotHandled': , 'fwd': Operation(fwd, (*args, **kwargs) -> Any), 'handler': , 'defop': , 'provider': , '_i2': '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}.\"\"\"\\n raise NotHandled', 'limerick': Template(__prompt_template__='Write a limerick on the theme of {theme}. Do not use any tools.', __signature__= str>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='limerick'), '_i3': 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '_i4': 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '_i5': '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled', '_i6': 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '_i7': '@functools.cache\\n@Template.define\\ndef haiku(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef haiku_no_cache(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nprint()\\nwith handler(provider):\\n print(haiku(\"fish\"))\\n print(\"-\" * 40)\\n print(haiku(\"fish\"))\\n\\nprint()\\ncache_handler1 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler1):\\n print(haiku_no_cache(\"fish2\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish2\"))\\n\\nprint()\\ncache_handler2 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler2):\\n print(haiku_no_cache(\"fish3\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish3\"))', 'haiku': , 'haiku_no_cache': Template(__prompt_template__='Write a haiku on the theme of {theme}. Do not use any tools.', __signature__= str>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='haiku_no_cache'), 'cache_handler1': , 'cache_handler2': , '_i8': '@Template.define\\ndef primes(first_digit: int) -> int:\\n \"\"\"Give a prime number with {first_digit} as the first digit. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider):\\n assert type(primes(6)) is int', 'primes': Template(__prompt_template__='Give a prime number with {first_digit} as the first digit. Do not use any tools.', __signature__= int>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='primes'), '_i9': '@Template.define\\ndef count_char(char: str) -> Callable[[str], int]:\\n \"\"\"Write a function which takes a string and counts the occurrances of \\'{char}\\'. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider), handler(ProgramSynthesis()):\\n count_a = count_char(\"a\")\\n assert callable(count_a)\\n assert count_a(\"banana\") == 3\\n assert count_a(\"cherry\") == 0\\n # Print the source code of the generated function\\n print(inspect.getsource(count_a))', 'count_char': Template(__prompt_template__=\"Write a function which takes a string and counts the occurrances of '{char}'. Do not use any tools.\", __signature__= collections.abc.Callable[[str], int]>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='count_char'), 'count_a': , '_i10': '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', 'cities': Operation(cities, () -> list[str]), 'weather': Operation(weather, (city: str) -> str), 'vacation': Template(__prompt_template__='Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.', __signature__= str>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='vacation'), 'log_tool_call': , '_i11': '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', '_i12': '@dataclasses.dataclass\\nclass KnockKnockJoke:\\n whos_there: str\\n punchline: str\\n\\n\\n@Template.define\\ndef write_joke(theme: str) -> KnockKnockJoke:\\n \"\"\"Write a knock-knock joke on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef rate_joke(joke: KnockKnockJoke) -> bool:\\n \"\"\"Decide if {joke} is funny or not. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\ndef do_comedy():\\n joke = write_joke(\"lizards\")\\n print(\"> You are onstage at a comedy club. You tell the following joke:\")\\n print(\\n f\"Knock knock.\\\\nWho\\'s there?\\\\n{joke.whos_there}.\\\\n{joke.whos_there} who?\\\\n{joke.punchline}\"\\n )\\n if rate_joke(joke):\\n print(\"> The crowd laughs politely.\")\\n else:\\n print(\"> The crowd stares in stony silence.\")\\n\\n\\nwith handler(provider):\\n do_comedy()', 'KnockKnockJoke': , 'write_joke': Template(__prompt_template__='Write a knock-knock joke on the theme of {theme}. Do not use any tools.', __signature__= __main__.KnockKnockJoke>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='write_joke'), 'rate_joke': Template(__prompt_template__='Decide if {joke} is funny or not. Do not use any tools.', __signature__= bool>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='rate_joke'), 'do_comedy': , '_i13': 'def log_llm(*args, **kwargs):\\n result = fwd()\\n print(\"Request fired: \", args, kwargs, result)\\n return result\\n\\n\\n# Avoid cache\\ntry:\\n haiku.cache_clear()\\nexcept Exception:\\n pass\\n\\n# Put completion handler innermost so it has highest precedence during the call\\nwith handler(provider), handler({completion: log_llm}):\\n _ = haiku(\"fish2\")\\n _ = limerick(\"fish\") # or use haiku(\"fish-2\") to avoid cache', 'log_llm': , '_i14': '# 1. Create a logger\\nlogger = logging.getLogger(\"effectful.llm\")\\nlogger.setLevel(logging.INFO)\\nlog_handler = logging.StreamHandler(sys.stdout)\\nlog_handler.setFormatter(logging.Formatter(\"%(levelname)s %(payload)s\"))\\nlogger.addHandler(log_handler)\\n# 2. Pass it to the handler\\nllm_logger = LLMLoggingHandler(logger=logger) # can also be LLMLoggingHandler()\\n\\n# Avoid cache for demonstration\\ntry:\\n haiku.cache_clear()\\n limerick.cache_clear()\\nexcept Exception:\\n pass\\n\\nwith handler(provider), handler(llm_logger):\\n _ = haiku(\"fish3\")\\n _ = limerick(\"fish4\")', 'logger': , 'log_handler': , 'llm_logger': , '_i15': '# Sub-templates for different story styles\\n@Template.define\\ndef story_with_moral(topic: str) -> str:\\n \"\"\"Write a short story about {topic} and end with a moral lesson. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef story_funny(topic: str) -> str:\\n \"\"\"Write a funny, humorous story about {topic}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n# Main orchestrator template - has access to sub-templates\\n@Template.define\\ndef write_story(topic: str, style: str) -> str:\\n \"\"\"Write a story about {topic} in the style: {style}.\\n Available styles: \\'moral\\' for a story with a lesson, \\'funny\\' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\"\"\"\\n raise NotHandled\\n\\n\\n# Verify sub-templates are captured in write_story\\'s lexical context\\nassert story_with_moral in write_story.tools\\nassert story_funny in write_story.tools\\nprint(\"Sub-templates available to write_story:\", list(write_story.tools))\\n\\nwith handler(provider), handler(llm_logger):\\n print(\"=== Story with moral ===\")\\n print(write_story(\"a curious cat\", \"moral\"))\\n print()\\n print(\"=== Funny story ===\")\\n print(write_story(\"a curious cat\", \"funny\"))', 'story_with_moral': ..., 'story_funny': Template(__prompt_template__='Write a funny, humorous story about {topic}. Do not use any tools.', __signature__= str>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='story_funny'), 'write_story': Template(__prompt_template__=\"Write a story about {topic} in the style: {style}.\\n Available styles: 'moral' for a story with a lesson, 'funny' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\", __signature__= str>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='write_story')}), mappingproxy({'__name__': '__main__', '__doc__': 'Automatically created module for IPython interactive environment', '__package__': None, '__loader__': None, '__spec__': None, '__builtin__': , '__builtins__': , '_ih': ['', 'import dataclasses\\nimport functools\\nimport inspect\\nimport logging\\nimport sys\\nfrom collections.abc import Callable\\n\\nfrom effectful.handlers.llm import Template\\nfrom effectful.handlers.llm.providers import (\\n CacheLLMRequestHandler,\\n LiteLLMProvider,\\n LLMLoggingHandler,\\n RetryLLMHandler,\\n completion,\\n tool_call,\\n)\\nfrom effectful.handlers.llm.synthesis import ProgramSynthesis\\nfrom effectful.ops.semantics import NotHandled, fwd, handler\\nfrom effectful.ops.syntax import defop\\n\\nprovider = LiteLLMProvider()', '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}.\"\"\"\\n raise NotHandled', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '@functools.cache\\n@Template.define\\ndef haiku(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef haiku_no_cache(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nprint()\\nwith handler(provider):\\n print(haiku(\"fish\"))\\n print(\"-\" * 40)\\n print(haiku(\"fish\"))\\n\\nprint()\\ncache_handler1 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler1):\\n print(haiku_no_cache(\"fish2\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish2\"))\\n\\nprint()\\ncache_handler2 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler2):\\n print(haiku_no_cache(\"fish3\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish3\"))', '@Template.define\\ndef primes(first_digit: int) -> int:\\n \"\"\"Give a prime number with {first_digit} as the first digit. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider):\\n assert type(primes(6)) is int', '@Template.define\\ndef count_char(char: str) -> Callable[[str], int]:\\n \"\"\"Write a function which takes a string and counts the occurrances of \\'{char}\\'. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider), handler(ProgramSynthesis()):\\n count_a = count_char(\"a\")\\n assert callable(count_a)\\n assert count_a(\"banana\") == 3\\n assert count_a(\"cherry\") == 0\\n # Print the source code of the generated function\\n print(inspect.getsource(count_a))', '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', '@dataclasses.dataclass\\nclass KnockKnockJoke:\\n whos_there: str\\n punchline: str\\n\\n\\n@Template.define\\ndef write_joke(theme: str) -> KnockKnockJoke:\\n \"\"\"Write a knock-knock joke on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef rate_joke(joke: KnockKnockJoke) -> bool:\\n \"\"\"Decide if {joke} is funny or not. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\ndef do_comedy():\\n joke = write_joke(\"lizards\")\\n print(\"> You are onstage at a comedy club. You tell the following joke:\")\\n print(\\n f\"Knock knock.\\\\nWho\\'s there?\\\\n{joke.whos_there}.\\\\n{joke.whos_there} who?\\\\n{joke.punchline}\"\\n )\\n if rate_joke(joke):\\n print(\"> The crowd laughs politely.\")\\n else:\\n print(\"> The crowd stares in stony silence.\")\\n\\n\\nwith handler(provider):\\n do_comedy()', 'def log_llm(*args, **kwargs):\\n result = fwd()\\n print(\"Request fired: \", args, kwargs, result)\\n return result\\n\\n\\n# Avoid cache\\ntry:\\n haiku.cache_clear()\\nexcept Exception:\\n pass\\n\\n# Put completion handler innermost so it has highest precedence during the call\\nwith handler(provider), handler({completion: log_llm}):\\n _ = haiku(\"fish2\")\\n _ = limerick(\"fish\") # or use haiku(\"fish-2\") to avoid cache', '# 1. Create a logger\\nlogger = logging.getLogger(\"effectful.llm\")\\nlogger.setLevel(logging.INFO)\\nlog_handler = logging.StreamHandler(sys.stdout)\\nlog_handler.setFormatter(logging.Formatter(\"%(levelname)s %(payload)s\"))\\nlogger.addHandler(log_handler)\\n# 2. Pass it to the handler\\nllm_logger = LLMLoggingHandler(logger=logger) # can also be LLMLoggingHandler()\\n\\n# Avoid cache for demonstration\\ntry:\\n haiku.cache_clear()\\n limerick.cache_clear()\\nexcept Exception:\\n pass\\n\\nwith handler(provider), handler(llm_logger):\\n _ = haiku(\"fish3\")\\n _ = limerick(\"fish4\")', '# Sub-templates for different story styles\\n@Template.define\\ndef story_with_moral(topic: str) -> str:\\n \"\"\"Write a short story about {topic} and end with a moral lesson. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef story_funny(topic: str) -> str:\\n \"\"\"Write a funny, humorous story about {topic}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n# Main orchestrator template - has access to sub-templates\\n@Template.define\\ndef write_story(topic: str, style: str) -> str:\\n \"\"\"Write a story about {topic} in the style: {style}.\\n Available styles: \\'moral\\' for a story with a lesson, \\'funny\\' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\"\"\"\\n raise NotHandled\\n\\n\\n# Verify sub-templates are captured in write_story\\'s lexical context\\nassert story_with_moral in write_story.tools\\nassert story_funny in write_story.tools\\nprint(\"Sub-templates available to write_story:\", list(write_story.tools))\\n\\nwith handler(provider), handler(llm_logger):\\n print(\"=== Story with moral ===\")\\n print(write_story(\"a curious cat\", \"moral\"))\\n print()\\n print(\"=== Funny story ===\")\\n print(write_story(\"a curious cat\", \"funny\"))'], '_oh': {}, '_dh': [PosixPath('/Users/datnguyenthanh/Marc/effectful')], 'In': ['', 'import dataclasses\\nimport functools\\nimport inspect\\nimport logging\\nimport sys\\nfrom collections.abc import Callable\\n\\nfrom effectful.handlers.llm import Template\\nfrom effectful.handlers.llm.providers import (\\n CacheLLMRequestHandler,\\n LiteLLMProvider,\\n LLMLoggingHandler,\\n RetryLLMHandler,\\n completion,\\n tool_call,\\n)\\nfrom effectful.handlers.llm.synthesis import ProgramSynthesis\\nfrom effectful.ops.semantics import NotHandled, fwd, handler\\nfrom effectful.ops.syntax import defop\\n\\nprovider = LiteLLMProvider()', '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}.\"\"\"\\n raise NotHandled', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '@functools.cache\\n@Template.define\\ndef haiku(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef haiku_no_cache(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nprint()\\nwith handler(provider):\\n print(haiku(\"fish\"))\\n print(\"-\" * 40)\\n print(haiku(\"fish\"))\\n\\nprint()\\ncache_handler1 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler1):\\n print(haiku_no_cache(\"fish2\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish2\"))\\n\\nprint()\\ncache_handler2 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler2):\\n print(haiku_no_cache(\"fish3\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish3\"))', '@Template.define\\ndef primes(first_digit: int) -> int:\\n \"\"\"Give a prime number with {first_digit} as the first digit. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider):\\n assert type(primes(6)) is int', '@Template.define\\ndef count_char(char: str) -> Callable[[str], int]:\\n \"\"\"Write a function which takes a string and counts the occurrances of \\'{char}\\'. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider), handler(ProgramSynthesis()):\\n count_a = count_char(\"a\")\\n assert callable(count_a)\\n assert count_a(\"banana\") == 3\\n assert count_a(\"cherry\") == 0\\n # Print the source code of the generated function\\n print(inspect.getsource(count_a))', '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', '@dataclasses.dataclass\\nclass KnockKnockJoke:\\n whos_there: str\\n punchline: str\\n\\n\\n@Template.define\\ndef write_joke(theme: str) -> KnockKnockJoke:\\n \"\"\"Write a knock-knock joke on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef rate_joke(joke: KnockKnockJoke) -> bool:\\n \"\"\"Decide if {joke} is funny or not. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\ndef do_comedy():\\n joke = write_joke(\"lizards\")\\n print(\"> You are onstage at a comedy club. You tell the following joke:\")\\n print(\\n f\"Knock knock.\\\\nWho\\'s there?\\\\n{joke.whos_there}.\\\\n{joke.whos_there} who?\\\\n{joke.punchline}\"\\n )\\n if rate_joke(joke):\\n print(\"> The crowd laughs politely.\")\\n else:\\n print(\"> The crowd stares in stony silence.\")\\n\\n\\nwith handler(provider):\\n do_comedy()', 'def log_llm(*args, **kwargs):\\n result = fwd()\\n print(\"Request fired: \", args, kwargs, result)\\n return result\\n\\n\\n# Avoid cache\\ntry:\\n haiku.cache_clear()\\nexcept Exception:\\n pass\\n\\n# Put completion handler innermost so it has highest precedence during the call\\nwith handler(provider), handler({completion: log_llm}):\\n _ = haiku(\"fish2\")\\n _ = limerick(\"fish\") # or use haiku(\"fish-2\") to avoid cache', '# 1. Create a logger\\nlogger = logging.getLogger(\"effectful.llm\")\\nlogger.setLevel(logging.INFO)\\nlog_handler = logging.StreamHandler(sys.stdout)\\nlog_handler.setFormatter(logging.Formatter(\"%(levelname)s %(payload)s\"))\\nlogger.addHandler(log_handler)\\n# 2. Pass it to the handler\\nllm_logger = LLMLoggingHandler(logger=logger) # can also be LLMLoggingHandler()\\n\\n# Avoid cache for demonstration\\ntry:\\n haiku.cache_clear()\\n limerick.cache_clear()\\nexcept Exception:\\n pass\\n\\nwith handler(provider), handler(llm_logger):\\n _ = haiku(\"fish3\")\\n _ = limerick(\"fish4\")', '# Sub-templates for different story styles\\n@Template.define\\ndef story_with_moral(topic: str) -> str:\\n \"\"\"Write a short story about {topic} and end with a moral lesson. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef story_funny(topic: str) -> str:\\n \"\"\"Write a funny, humorous story about {topic}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n# Main orchestrator template - has access to sub-templates\\n@Template.define\\ndef write_story(topic: str, style: str) -> str:\\n \"\"\"Write a story about {topic} in the style: {style}.\\n Available styles: \\'moral\\' for a story with a lesson, \\'funny\\' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\"\"\"\\n raise NotHandled\\n\\n\\n# Verify sub-templates are captured in write_story\\'s lexical context\\nassert story_with_moral in write_story.tools\\nassert story_funny in write_story.tools\\nprint(\"Sub-templates available to write_story:\", list(write_story.tools))\\n\\nwith handler(provider), handler(llm_logger):\\n print(\"=== Story with moral ===\")\\n print(write_story(\"a curious cat\", \"moral\"))\\n print()\\n print(\"=== Funny story ===\")\\n print(write_story(\"a curious cat\", \"funny\"))'], 'Out': {}, 'get_ipython': >, 'exit': , 'quit': , 'open': , '_': 'In the ocean where fishies do play, \\nA big whale came swimming one day. \\nWith a splash and a dive, \\nHe felt so alive, \\nChasing fish in the blue, gleaming bay.', '__': '', '___': '', '__vsc_ipynb_file__': '/Users/datnguyenthanh/Marc/effectful/docs/source/llm.ipynb', '_i': '# 1. Create a logger\\nlogger = logging.getLogger(\"effectful.llm\")\\nlogger.setLevel(logging.INFO)\\nlog_handler = logging.StreamHandler(sys.stdout)\\nlog_handler.setFormatter(logging.Formatter(\"%(levelname)s %(payload)s\"))\\nlogger.addHandler(log_handler)\\n# 2. Pass it to the handler\\nllm_logger = LLMLoggingHandler(logger=logger) # can also be LLMLoggingHandler()\\n\\n# Avoid cache for demonstration\\ntry:\\n haiku.cache_clear()\\n limerick.cache_clear()\\nexcept Exception:\\n pass\\n\\nwith handler(provider), handler(llm_logger):\\n _ = haiku(\"fish3\")\\n _ = limerick(\"fish4\")', '_ii': 'def log_llm(*args, **kwargs):\\n result = fwd()\\n print(\"Request fired: \", args, kwargs, result)\\n return result\\n\\n\\n# Avoid cache\\ntry:\\n haiku.cache_clear()\\nexcept Exception:\\n pass\\n\\n# Put completion handler innermost so it has highest precedence during the call\\nwith handler(provider), handler({completion: log_llm}):\\n _ = haiku(\"fish2\")\\n _ = limerick(\"fish\") # or use haiku(\"fish-2\") to avoid cache', '_iii': '@dataclasses.dataclass\\nclass KnockKnockJoke:\\n whos_there: str\\n punchline: str\\n\\n\\n@Template.define\\ndef write_joke(theme: str) -> KnockKnockJoke:\\n \"\"\"Write a knock-knock joke on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef rate_joke(joke: KnockKnockJoke) -> bool:\\n \"\"\"Decide if {joke} is funny or not. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\ndef do_comedy():\\n joke = write_joke(\"lizards\")\\n print(\"> You are onstage at a comedy club. You tell the following joke:\")\\n print(\\n f\"Knock knock.\\\\nWho\\'s there?\\\\n{joke.whos_there}.\\\\n{joke.whos_there} who?\\\\n{joke.punchline}\"\\n )\\n if rate_joke(joke):\\n print(\"> The crowd laughs politely.\")\\n else:\\n print(\"> The crowd stares in stony silence.\")\\n\\n\\nwith handler(provider):\\n do_comedy()', '_i1': 'import dataclasses\\nimport functools\\nimport inspect\\nimport logging\\nimport sys\\nfrom collections.abc import Callable\\n\\nfrom effectful.handlers.llm import Template\\nfrom effectful.handlers.llm.providers import (\\n CacheLLMRequestHandler,\\n LiteLLMProvider,\\n LLMLoggingHandler,\\n RetryLLMHandler,\\n completion,\\n tool_call,\\n)\\nfrom effectful.handlers.llm.synthesis import ProgramSynthesis\\nfrom effectful.ops.semantics import NotHandled, fwd, handler\\nfrom effectful.ops.syntax import defop\\n\\nprovider = LiteLLMProvider()', 'dataclasses': , 'functools': , 'inspect': , 'logging': , 'sys': , 'Callable': , 'Template': , 'CacheLLMRequestHandler': , 'LiteLLMProvider': , 'LLMLoggingHandler': , 'RetryLLMHandler': , 'completion': Operation(completion, (model: str, messages: List = [], timeout: Union[float, str, openai.Timeout, NoneType] = None, temperature: Optional[float] = None, top_p: Optional[float] = None, n: Optional[int] = None, stream: Optional[bool] = None, stream_options: Optional[dict] = None, stop=None, max_completion_tokens: Optional[int] = None, max_tokens: Optional[int] = None, modalities: Optional[List[Literal['text', 'audio']]] = None, prediction: Optional[openai.types.chat.chat_completion_prediction_content_param.ChatCompletionPredictionContentParam] = None, audio: Optional[openai.types.chat.chat_completion_audio_param.ChatCompletionAudioParam] = None, presence_penalty: Optional[float] = None, frequency_penalty: Optional[float] = None, logit_bias: Optional[dict] = None, user: Optional[str] = None, reasoning_effort: Optional[Literal['none', 'minimal', 'low', 'medium', 'high', 'default']] = None, verbosity: Optional[Literal['low', 'medium', 'high']] = None, response_format: Union[dict, Type[pydantic.main.BaseModel], NoneType] = None, seed: Optional[int] = None, tools: Optional[List] = None, tool_choice: Union[str, dict, NoneType] = None, logprobs: Optional[bool] = None, top_logprobs: Optional[int] = None, parallel_tool_calls: Optional[bool] = None, web_search_options: Optional[litellm.types.llms.openai.OpenAIWebSearchOptions] = None, deployment_id=None, extra_headers: Optional[dict] = None, safety_identifier: Optional[str] = None, service_tier: Optional[str] = None, functions: Optional[List] = None, function_call: Optional[str] = None, base_url: Optional[str] = None, api_version: Optional[str] = None, api_key: Optional[str] = None, model_list: Optional[list] = None, thinking: Optional[litellm.types.llms.anthropic.AnthropicThinkingParam] = None, shared_session: Optional[ForwardRef('ClientSession')] = None, **kwargs) -> Union[litellm.types.utils.ModelResponse, litellm.litellm_core_utils.streaming_handler.CustomStreamWrapper]), 'tool_call': Operation(tool_call, (template: effectful.handlers.llm.Template, tool: Union[effectful.ops.types.Operation[..., T], effectful.handlers.llm.Template[..., T]], *args, **kwargs) -> T), 'ProgramSynthesis': , 'NotHandled': , 'fwd': Operation(fwd, (*args, **kwargs) -> Any), 'handler': , 'defop': , 'provider': , '_i2': '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}.\"\"\"\\n raise NotHandled', 'limerick': Template(__prompt_template__='Write a limerick on the theme of {theme}. Do not use any tools.', __signature__= str>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='limerick'), '_i3': 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '_i4': 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '_i5': '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled', '_i6': 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '_i7': '@functools.cache\\n@Template.define\\ndef haiku(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef haiku_no_cache(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nprint()\\nwith handler(provider):\\n print(haiku(\"fish\"))\\n print(\"-\" * 40)\\n print(haiku(\"fish\"))\\n\\nprint()\\ncache_handler1 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler1):\\n print(haiku_no_cache(\"fish2\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish2\"))\\n\\nprint()\\ncache_handler2 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler2):\\n print(haiku_no_cache(\"fish3\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish3\"))', 'haiku': , 'haiku_no_cache': Template(__prompt_template__='Write a haiku on the theme of {theme}. Do not use any tools.', __signature__= str>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='haiku_no_cache'), 'cache_handler1': , 'cache_handler2': , '_i8': '@Template.define\\ndef primes(first_digit: int) -> int:\\n \"\"\"Give a prime number with {first_digit} as the first digit. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider):\\n assert type(primes(6)) is int', 'primes': Template(__prompt_template__='Give a prime number with {first_digit} as the first digit. Do not use any tools.', __signature__= int>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='primes'), '_i9': '@Template.define\\ndef count_char(char: str) -> Callable[[str], int]:\\n \"\"\"Write a function which takes a string and counts the occurrances of \\'{char}\\'. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider), handler(ProgramSynthesis()):\\n count_a = count_char(\"a\")\\n assert callable(count_a)\\n assert count_a(\"banana\") == 3\\n assert count_a(\"cherry\") == 0\\n # Print the source code of the generated function\\n print(inspect.getsource(count_a))', 'count_char': Template(__prompt_template__=\"Write a function which takes a string and counts the occurrances of '{char}'. Do not use any tools.\", __signature__= collections.abc.Callable[[str], int]>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='count_char'), 'count_a': , '_i10': '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', 'cities': Operation(cities, () -> list[str]), 'weather': Operation(weather, (city: str) -> str), 'vacation': Template(__prompt_template__='Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.', __signature__= str>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='vacation'), 'log_tool_call': , '_i11': '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', '_i12': '@dataclasses.dataclass\\nclass KnockKnockJoke:\\n whos_there: str\\n punchline: str\\n\\n\\n@Template.define\\ndef write_joke(theme: str) -> KnockKnockJoke:\\n \"\"\"Write a knock-knock joke on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef rate_joke(joke: KnockKnockJoke) -> bool:\\n \"\"\"Decide if {joke} is funny or not. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\ndef do_comedy():\\n joke = write_joke(\"lizards\")\\n print(\"> You are onstage at a comedy club. You tell the following joke:\")\\n print(\\n f\"Knock knock.\\\\nWho\\'s there?\\\\n{joke.whos_there}.\\\\n{joke.whos_there} who?\\\\n{joke.punchline}\"\\n )\\n if rate_joke(joke):\\n print(\"> The crowd laughs politely.\")\\n else:\\n print(\"> The crowd stares in stony silence.\")\\n\\n\\nwith handler(provider):\\n do_comedy()', 'KnockKnockJoke': , 'write_joke': Template(__prompt_template__='Write a knock-knock joke on the theme of {theme}. Do not use any tools.', __signature__= __main__.KnockKnockJoke>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='write_joke'), 'rate_joke': Template(__prompt_template__='Decide if {joke} is funny or not. Do not use any tools.', __signature__= bool>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='rate_joke'), 'do_comedy': , '_i13': 'def log_llm(*args, **kwargs):\\n result = fwd()\\n print(\"Request fired: \", args, kwargs, result)\\n return result\\n\\n\\n# Avoid cache\\ntry:\\n haiku.cache_clear()\\nexcept Exception:\\n pass\\n\\n# Put completion handler innermost so it has highest precedence during the call\\nwith handler(provider), handler({completion: log_llm}):\\n _ = haiku(\"fish2\")\\n _ = limerick(\"fish\") # or use haiku(\"fish-2\") to avoid cache', 'log_llm': , '_i14': '# 1. Create a logger\\nlogger = logging.getLogger(\"effectful.llm\")\\nlogger.setLevel(logging.INFO)\\nlog_handler = logging.StreamHandler(sys.stdout)\\nlog_handler.setFormatter(logging.Formatter(\"%(levelname)s %(payload)s\"))\\nlogger.addHandler(log_handler)\\n# 2. Pass it to the handler\\nllm_logger = LLMLoggingHandler(logger=logger) # can also be LLMLoggingHandler()\\n\\n# Avoid cache for demonstration\\ntry:\\n haiku.cache_clear()\\n limerick.cache_clear()\\nexcept Exception:\\n pass\\n\\nwith handler(provider), handler(llm_logger):\\n _ = haiku(\"fish3\")\\n _ = limerick(\"fish4\")', 'logger': , 'log_handler': , 'llm_logger': , '_i15': '# Sub-templates for different story styles\\n@Template.define\\ndef story_with_moral(topic: str) -> str:\\n \"\"\"Write a short story about {topic} and end with a moral lesson. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef story_funny(topic: str) -> str:\\n \"\"\"Write a funny, humorous story about {topic}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n# Main orchestrator template - has access to sub-templates\\n@Template.define\\ndef write_story(topic: str, style: str) -> str:\\n \"\"\"Write a story about {topic} in the style: {style}.\\n Available styles: \\'moral\\' for a story with a lesson, \\'funny\\' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\"\"\"\\n raise NotHandled\\n\\n\\n# Verify sub-templates are captured in write_story\\'s lexical context\\nassert story_with_moral in write_story.tools\\nassert story_funny in write_story.tools\\nprint(\"Sub-templates available to write_story:\", list(write_story.tools))\\n\\nwith handler(provider), handler(llm_logger):\\n print(\"=== Story with moral ===\")\\n print(write_story(\"a curious cat\", \"moral\"))\\n print()\\n print(\"=== Funny story ===\")\\n print(write_story(\"a curious cat\", \"funny\"))', 'story_with_moral': ..., 'story_funny': Template(__prompt_template__='Write a funny, humorous story about {topic}. Do not use any tools.', __signature__= str>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='story_funny'), 'write_story': Template(__prompt_template__=\"Write a story about {topic} in the style: {style}.\\n Available styles: 'moral' for a story with a lesson, 'funny' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\", __signature__= str>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='write_story')})), __name__='story_with_moral'), Template(__prompt_template__='Write a funny, humorous story about {topic}. Do not use any tools.', __signature__= str>, __context__=LexicalContext(mappingproxy({'__name__': '__main__', '__doc__': 'Automatically created module for IPython interactive environment', '__package__': None, '__loader__': None, '__spec__': None, '__builtin__': , '__builtins__': , '_ih': ['', 'import dataclasses\\nimport functools\\nimport inspect\\nimport logging\\nimport sys\\nfrom collections.abc import Callable\\n\\nfrom effectful.handlers.llm import Template\\nfrom effectful.handlers.llm.providers import (\\n CacheLLMRequestHandler,\\n LiteLLMProvider,\\n LLMLoggingHandler,\\n RetryLLMHandler,\\n completion,\\n tool_call,\\n)\\nfrom effectful.handlers.llm.synthesis import ProgramSynthesis\\nfrom effectful.ops.semantics import NotHandled, fwd, handler\\nfrom effectful.ops.syntax import defop\\n\\nprovider = LiteLLMProvider()', '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}.\"\"\"\\n raise NotHandled', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '@functools.cache\\n@Template.define\\ndef haiku(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef haiku_no_cache(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nprint()\\nwith handler(provider):\\n print(haiku(\"fish\"))\\n print(\"-\" * 40)\\n print(haiku(\"fish\"))\\n\\nprint()\\ncache_handler1 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler1):\\n print(haiku_no_cache(\"fish2\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish2\"))\\n\\nprint()\\ncache_handler2 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler2):\\n print(haiku_no_cache(\"fish3\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish3\"))', '@Template.define\\ndef primes(first_digit: int) -> int:\\n \"\"\"Give a prime number with {first_digit} as the first digit. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider):\\n assert type(primes(6)) is int', '@Template.define\\ndef count_char(char: str) -> Callable[[str], int]:\\n \"\"\"Write a function which takes a string and counts the occurrances of \\'{char}\\'. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider), handler(ProgramSynthesis()):\\n count_a = count_char(\"a\")\\n assert callable(count_a)\\n assert count_a(\"banana\") == 3\\n assert count_a(\"cherry\") == 0\\n # Print the source code of the generated function\\n print(inspect.getsource(count_a))', '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', '@dataclasses.dataclass\\nclass KnockKnockJoke:\\n whos_there: str\\n punchline: str\\n\\n\\n@Template.define\\ndef write_joke(theme: str) -> KnockKnockJoke:\\n \"\"\"Write a knock-knock joke on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef rate_joke(joke: KnockKnockJoke) -> bool:\\n \"\"\"Decide if {joke} is funny or not. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\ndef do_comedy():\\n joke = write_joke(\"lizards\")\\n print(\"> You are onstage at a comedy club. You tell the following joke:\")\\n print(\\n f\"Knock knock.\\\\nWho\\'s there?\\\\n{joke.whos_there}.\\\\n{joke.whos_there} who?\\\\n{joke.punchline}\"\\n )\\n if rate_joke(joke):\\n print(\"> The crowd laughs politely.\")\\n else:\\n print(\"> The crowd stares in stony silence.\")\\n\\n\\nwith handler(provider):\\n do_comedy()', 'def log_llm(*args, **kwargs):\\n result = fwd()\\n print(\"Request fired: \", args, kwargs, result)\\n return result\\n\\n\\n# Avoid cache\\ntry:\\n haiku.cache_clear()\\nexcept Exception:\\n pass\\n\\n# Put completion handler innermost so it has highest precedence during the call\\nwith handler(provider), handler({completion: log_llm}):\\n _ = haiku(\"fish2\")\\n _ = limerick(\"fish\") # or use haiku(\"fish-2\") to avoid cache', '# 1. Create a logger\\nlogger = logging.getLogger(\"effectful.llm\")\\nlogger.setLevel(logging.INFO)\\nlog_handler = logging.StreamHandler(sys.stdout)\\nlog_handler.setFormatter(logging.Formatter(\"%(levelname)s %(payload)s\"))\\nlogger.addHandler(log_handler)\\n# 2. Pass it to the handler\\nllm_logger = LLMLoggingHandler(logger=logger) # can also be LLMLoggingHandler()\\n\\n# Avoid cache for demonstration\\ntry:\\n haiku.cache_clear()\\n limerick.cache_clear()\\nexcept Exception:\\n pass\\n\\nwith handler(provider), handler(llm_logger):\\n _ = haiku(\"fish3\")\\n _ = limerick(\"fish4\")', '# Sub-templates for different story styles\\n@Template.define\\ndef story_with_moral(topic: str) -> str:\\n \"\"\"Write a short story about {topic} and end with a moral lesson. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef story_funny(topic: str) -> str:\\n \"\"\"Write a funny, humorous story about {topic}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n# Main orchestrator template - has access to sub-templates\\n@Template.define\\ndef write_story(topic: str, style: str) -> str:\\n \"\"\"Write a story about {topic} in the style: {style}.\\n Available styles: \\'moral\\' for a story with a lesson, \\'funny\\' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\"\"\"\\n raise NotHandled\\n\\n\\n# Verify sub-templates are captured in write_story\\'s lexical context\\nassert story_with_moral in write_story.tools\\nassert story_funny in write_story.tools\\nprint(\"Sub-templates available to write_story:\", list(write_story.tools))\\n\\nwith handler(provider), handler(llm_logger):\\n print(\"=== Story with moral ===\")\\n print(write_story(\"a curious cat\", \"moral\"))\\n print()\\n print(\"=== Funny story ===\")\\n print(write_story(\"a curious cat\", \"funny\"))'], '_oh': {}, '_dh': [PosixPath('/Users/datnguyenthanh/Marc/effectful')], 'In': ['', 'import dataclasses\\nimport functools\\nimport inspect\\nimport logging\\nimport sys\\nfrom collections.abc import Callable\\n\\nfrom effectful.handlers.llm import Template\\nfrom effectful.handlers.llm.providers import (\\n CacheLLMRequestHandler,\\n LiteLLMProvider,\\n LLMLoggingHandler,\\n RetryLLMHandler,\\n completion,\\n tool_call,\\n)\\nfrom effectful.handlers.llm.synthesis import ProgramSynthesis\\nfrom effectful.ops.semantics import NotHandled, fwd, handler\\nfrom effectful.ops.syntax import defop\\n\\nprovider = LiteLLMProvider()', '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}.\"\"\"\\n raise NotHandled', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '@functools.cache\\n@Template.define\\ndef haiku(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef haiku_no_cache(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nprint()\\nwith handler(provider):\\n print(haiku(\"fish\"))\\n print(\"-\" * 40)\\n print(haiku(\"fish\"))\\n\\nprint()\\ncache_handler1 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler1):\\n print(haiku_no_cache(\"fish2\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish2\"))\\n\\nprint()\\ncache_handler2 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler2):\\n print(haiku_no_cache(\"fish3\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish3\"))', '@Template.define\\ndef primes(first_digit: int) -> int:\\n \"\"\"Give a prime number with {first_digit} as the first digit. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider):\\n assert type(primes(6)) is int', '@Template.define\\ndef count_char(char: str) -> Callable[[str], int]:\\n \"\"\"Write a function which takes a string and counts the occurrances of \\'{char}\\'. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider), handler(ProgramSynthesis()):\\n count_a = count_char(\"a\")\\n assert callable(count_a)\\n assert count_a(\"banana\") == 3\\n assert count_a(\"cherry\") == 0\\n # Print the source code of the generated function\\n print(inspect.getsource(count_a))', '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', '@dataclasses.dataclass\\nclass KnockKnockJoke:\\n whos_there: str\\n punchline: str\\n\\n\\n@Template.define\\ndef write_joke(theme: str) -> KnockKnockJoke:\\n \"\"\"Write a knock-knock joke on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef rate_joke(joke: KnockKnockJoke) -> bool:\\n \"\"\"Decide if {joke} is funny or not. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\ndef do_comedy():\\n joke = write_joke(\"lizards\")\\n print(\"> You are onstage at a comedy club. You tell the following joke:\")\\n print(\\n f\"Knock knock.\\\\nWho\\'s there?\\\\n{joke.whos_there}.\\\\n{joke.whos_there} who?\\\\n{joke.punchline}\"\\n )\\n if rate_joke(joke):\\n print(\"> The crowd laughs politely.\")\\n else:\\n print(\"> The crowd stares in stony silence.\")\\n\\n\\nwith handler(provider):\\n do_comedy()', 'def log_llm(*args, **kwargs):\\n result = fwd()\\n print(\"Request fired: \", args, kwargs, result)\\n return result\\n\\n\\n# Avoid cache\\ntry:\\n haiku.cache_clear()\\nexcept Exception:\\n pass\\n\\n# Put completion handler innermost so it has highest precedence during the call\\nwith handler(provider), handler({completion: log_llm}):\\n _ = haiku(\"fish2\")\\n _ = limerick(\"fish\") # or use haiku(\"fish-2\") to avoid cache', '# 1. Create a logger\\nlogger = logging.getLogger(\"effectful.llm\")\\nlogger.setLevel(logging.INFO)\\nlog_handler = logging.StreamHandler(sys.stdout)\\nlog_handler.setFormatter(logging.Formatter(\"%(levelname)s %(payload)s\"))\\nlogger.addHandler(log_handler)\\n# 2. Pass it to the handler\\nllm_logger = LLMLoggingHandler(logger=logger) # can also be LLMLoggingHandler()\\n\\n# Avoid cache for demonstration\\ntry:\\n haiku.cache_clear()\\n limerick.cache_clear()\\nexcept Exception:\\n pass\\n\\nwith handler(provider), handler(llm_logger):\\n _ = haiku(\"fish3\")\\n _ = limerick(\"fish4\")', '# Sub-templates for different story styles\\n@Template.define\\ndef story_with_moral(topic: str) -> str:\\n \"\"\"Write a short story about {topic} and end with a moral lesson. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef story_funny(topic: str) -> str:\\n \"\"\"Write a funny, humorous story about {topic}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n# Main orchestrator template - has access to sub-templates\\n@Template.define\\ndef write_story(topic: str, style: str) -> str:\\n \"\"\"Write a story about {topic} in the style: {style}.\\n Available styles: \\'moral\\' for a story with a lesson, \\'funny\\' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\"\"\"\\n raise NotHandled\\n\\n\\n# Verify sub-templates are captured in write_story\\'s lexical context\\nassert story_with_moral in write_story.tools\\nassert story_funny in write_story.tools\\nprint(\"Sub-templates available to write_story:\", list(write_story.tools))\\n\\nwith handler(provider), handler(llm_logger):\\n print(\"=== Story with moral ===\")\\n print(write_story(\"a curious cat\", \"moral\"))\\n print()\\n print(\"=== Funny story ===\")\\n print(write_story(\"a curious cat\", \"funny\"))'], 'Out': {}, 'get_ipython': >, 'exit': , 'quit': , 'open': , '_': 'In the ocean where fishies do play, \\nA big whale came swimming one day. \\nWith a splash and a dive, \\nHe felt so alive, \\nChasing fish in the blue, gleaming bay.', '__': '', '___': '', '__vsc_ipynb_file__': '/Users/datnguyenthanh/Marc/effectful/docs/source/llm.ipynb', '_i': '# 1. Create a logger\\nlogger = logging.getLogger(\"effectful.llm\")\\nlogger.setLevel(logging.INFO)\\nlog_handler = logging.StreamHandler(sys.stdout)\\nlog_handler.setFormatter(logging.Formatter(\"%(levelname)s %(payload)s\"))\\nlogger.addHandler(log_handler)\\n# 2. Pass it to the handler\\nllm_logger = LLMLoggingHandler(logger=logger) # can also be LLMLoggingHandler()\\n\\n# Avoid cache for demonstration\\ntry:\\n haiku.cache_clear()\\n limerick.cache_clear()\\nexcept Exception:\\n pass\\n\\nwith handler(provider), handler(llm_logger):\\n _ = haiku(\"fish3\")\\n _ = limerick(\"fish4\")', '_ii': 'def log_llm(*args, **kwargs):\\n result = fwd()\\n print(\"Request fired: \", args, kwargs, result)\\n return result\\n\\n\\n# Avoid cache\\ntry:\\n haiku.cache_clear()\\nexcept Exception:\\n pass\\n\\n# Put completion handler innermost so it has highest precedence during the call\\nwith handler(provider), handler({completion: log_llm}):\\n _ = haiku(\"fish2\")\\n _ = limerick(\"fish\") # or use haiku(\"fish-2\") to avoid cache', '_iii': '@dataclasses.dataclass\\nclass KnockKnockJoke:\\n whos_there: str\\n punchline: str\\n\\n\\n@Template.define\\ndef write_joke(theme: str) -> KnockKnockJoke:\\n \"\"\"Write a knock-knock joke on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef rate_joke(joke: KnockKnockJoke) -> bool:\\n \"\"\"Decide if {joke} is funny or not. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\ndef do_comedy():\\n joke = write_joke(\"lizards\")\\n print(\"> You are onstage at a comedy club. You tell the following joke:\")\\n print(\\n f\"Knock knock.\\\\nWho\\'s there?\\\\n{joke.whos_there}.\\\\n{joke.whos_there} who?\\\\n{joke.punchline}\"\\n )\\n if rate_joke(joke):\\n print(\"> The crowd laughs politely.\")\\n else:\\n print(\"> The crowd stares in stony silence.\")\\n\\n\\nwith handler(provider):\\n do_comedy()', '_i1': 'import dataclasses\\nimport functools\\nimport inspect\\nimport logging\\nimport sys\\nfrom collections.abc import Callable\\n\\nfrom effectful.handlers.llm import Template\\nfrom effectful.handlers.llm.providers import (\\n CacheLLMRequestHandler,\\n LiteLLMProvider,\\n LLMLoggingHandler,\\n RetryLLMHandler,\\n completion,\\n tool_call,\\n)\\nfrom effectful.handlers.llm.synthesis import ProgramSynthesis\\nfrom effectful.ops.semantics import NotHandled, fwd, handler\\nfrom effectful.ops.syntax import defop\\n\\nprovider = LiteLLMProvider()', 'dataclasses': , 'functools': , 'inspect': , 'logging': , 'sys': , 'Callable': , 'Template': , 'CacheLLMRequestHandler': , 'LiteLLMProvider': , 'LLMLoggingHandler': , 'RetryLLMHandler': , 'completion': Operation(completion, (model: str, messages: List = [], timeout: Union[float, str, openai.Timeout, NoneType] = None, temperature: Optional[float] = None, top_p: Optional[float] = None, n: Optional[int] = None, stream: Optional[bool] = None, stream_options: Optional[dict] = None, stop=None, max_completion_tokens: Optional[int] = None, max_tokens: Optional[int] = None, modalities: Optional[List[Literal['text', 'audio']]] = None, prediction: Optional[openai.types.chat.chat_completion_prediction_content_param.ChatCompletionPredictionContentParam] = None, audio: Optional[openai.types.chat.chat_completion_audio_param.ChatCompletionAudioParam] = None, presence_penalty: Optional[float] = None, frequency_penalty: Optional[float] = None, logit_bias: Optional[dict] = None, user: Optional[str] = None, reasoning_effort: Optional[Literal['none', 'minimal', 'low', 'medium', 'high', 'default']] = None, verbosity: Optional[Literal['low', 'medium', 'high']] = None, response_format: Union[dict, Type[pydantic.main.BaseModel], NoneType] = None, seed: Optional[int] = None, tools: Optional[List] = None, tool_choice: Union[str, dict, NoneType] = None, logprobs: Optional[bool] = None, top_logprobs: Optional[int] = None, parallel_tool_calls: Optional[bool] = None, web_search_options: Optional[litellm.types.llms.openai.OpenAIWebSearchOptions] = None, deployment_id=None, extra_headers: Optional[dict] = None, safety_identifier: Optional[str] = None, service_tier: Optional[str] = None, functions: Optional[List] = None, function_call: Optional[str] = None, base_url: Optional[str] = None, api_version: Optional[str] = None, api_key: Optional[str] = None, model_list: Optional[list] = None, thinking: Optional[litellm.types.llms.anthropic.AnthropicThinkingParam] = None, shared_session: Optional[ForwardRef('ClientSession')] = None, **kwargs) -> Union[litellm.types.utils.ModelResponse, litellm.litellm_core_utils.streaming_handler.CustomStreamWrapper]), 'tool_call': Operation(tool_call, (template: effectful.handlers.llm.Template, tool: Union[effectful.ops.types.Operation[..., T], effectful.handlers.llm.Template[..., T]], *args, **kwargs) -> T), 'ProgramSynthesis': , 'NotHandled': , 'fwd': Operation(fwd, (*args, **kwargs) -> Any), 'handler': , 'defop': , 'provider': , '_i2': '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}.\"\"\"\\n raise NotHandled', 'limerick': Template(__prompt_template__='Write a limerick on the theme of {theme}. Do not use any tools.', __signature__= str>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='limerick'), '_i3': 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '_i4': 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '_i5': '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled', '_i6': 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '_i7': '@functools.cache\\n@Template.define\\ndef haiku(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef haiku_no_cache(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nprint()\\nwith handler(provider):\\n print(haiku(\"fish\"))\\n print(\"-\" * 40)\\n print(haiku(\"fish\"))\\n\\nprint()\\ncache_handler1 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler1):\\n print(haiku_no_cache(\"fish2\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish2\"))\\n\\nprint()\\ncache_handler2 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler2):\\n print(haiku_no_cache(\"fish3\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish3\"))', 'haiku': , 'haiku_no_cache': Template(__prompt_template__='Write a haiku on the theme of {theme}. Do not use any tools.', __signature__= str>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='haiku_no_cache'), 'cache_handler1': , 'cache_handler2': , '_i8': '@Template.define\\ndef primes(first_digit: int) -> int:\\n \"\"\"Give a prime number with {first_digit} as the first digit. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider):\\n assert type(primes(6)) is int', 'primes': Template(__prompt_template__='Give a prime number with {first_digit} as the first digit. Do not use any tools.', __signature__= int>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='primes'), '_i9': '@Template.define\\ndef count_char(char: str) -> Callable[[str], int]:\\n \"\"\"Write a function which takes a string and counts the occurrances of \\'{char}\\'. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider), handler(ProgramSynthesis()):\\n count_a = count_char(\"a\")\\n assert callable(count_a)\\n assert count_a(\"banana\") == 3\\n assert count_a(\"cherry\") == 0\\n # Print the source code of the generated function\\n print(inspect.getsource(count_a))', 'count_char': Template(__prompt_template__=\"Write a function which takes a string and counts the occurrances of '{char}'. Do not use any tools.\", __signature__= collections.abc.Callable[[str], int]>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='count_char'), 'count_a': , '_i10': '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', 'cities': Operation(cities, () -> list[str]), 'weather': Operation(weather, (city: str) -> str), 'vacation': Template(__prompt_template__='Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.', __signature__= str>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='vacation'), 'log_tool_call': , '_i11': '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', '_i12': '@dataclasses.dataclass\\nclass KnockKnockJoke:\\n whos_there: str\\n punchline: str\\n\\n\\n@Template.define\\ndef write_joke(theme: str) -> KnockKnockJoke:\\n \"\"\"Write a knock-knock joke on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef rate_joke(joke: KnockKnockJoke) -> bool:\\n \"\"\"Decide if {joke} is funny or not. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\ndef do_comedy():\\n joke = write_joke(\"lizards\")\\n print(\"> You are onstage at a comedy club. You tell the following joke:\")\\n print(\\n f\"Knock knock.\\\\nWho\\'s there?\\\\n{joke.whos_there}.\\\\n{joke.whos_there} who?\\\\n{joke.punchline}\"\\n )\\n if rate_joke(joke):\\n print(\"> The crowd laughs politely.\")\\n else:\\n print(\"> The crowd stares in stony silence.\")\\n\\n\\nwith handler(provider):\\n do_comedy()', 'KnockKnockJoke': , 'write_joke': Template(__prompt_template__='Write a knock-knock joke on the theme of {theme}. Do not use any tools.', __signature__= __main__.KnockKnockJoke>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='write_joke'), 'rate_joke': Template(__prompt_template__='Decide if {joke} is funny or not. Do not use any tools.', __signature__= bool>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='rate_joke'), 'do_comedy': , '_i13': 'def log_llm(*args, **kwargs):\\n result = fwd()\\n print(\"Request fired: \", args, kwargs, result)\\n return result\\n\\n\\n# Avoid cache\\ntry:\\n haiku.cache_clear()\\nexcept Exception:\\n pass\\n\\n# Put completion handler innermost so it has highest precedence during the call\\nwith handler(provider), handler({completion: log_llm}):\\n _ = haiku(\"fish2\")\\n _ = limerick(\"fish\") # or use haiku(\"fish-2\") to avoid cache', 'log_llm': , '_i14': '# 1. Create a logger\\nlogger = logging.getLogger(\"effectful.llm\")\\nlogger.setLevel(logging.INFO)\\nlog_handler = logging.StreamHandler(sys.stdout)\\nlog_handler.setFormatter(logging.Formatter(\"%(levelname)s %(payload)s\"))\\nlogger.addHandler(log_handler)\\n# 2. Pass it to the handler\\nllm_logger = LLMLoggingHandler(logger=logger) # can also be LLMLoggingHandler()\\n\\n# Avoid cache for demonstration\\ntry:\\n haiku.cache_clear()\\n limerick.cache_clear()\\nexcept Exception:\\n pass\\n\\nwith handler(provider), handler(llm_logger):\\n _ = haiku(\"fish3\")\\n _ = limerick(\"fish4\")', 'logger': , 'log_handler': , 'llm_logger': , '_i15': '# Sub-templates for different story styles\\n@Template.define\\ndef story_with_moral(topic: str) -> str:\\n \"\"\"Write a short story about {topic} and end with a moral lesson. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef story_funny(topic: str) -> str:\\n \"\"\"Write a funny, humorous story about {topic}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n# Main orchestrator template - has access to sub-templates\\n@Template.define\\ndef write_story(topic: str, style: str) -> str:\\n \"\"\"Write a story about {topic} in the style: {style}.\\n Available styles: \\'moral\\' for a story with a lesson, \\'funny\\' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\"\"\"\\n raise NotHandled\\n\\n\\n# Verify sub-templates are captured in write_story\\'s lexical context\\nassert story_with_moral in write_story.tools\\nassert story_funny in write_story.tools\\nprint(\"Sub-templates available to write_story:\", list(write_story.tools))\\n\\nwith handler(provider), handler(llm_logger):\\n print(\"=== Story with moral ===\")\\n print(write_story(\"a curious cat\", \"moral\"))\\n print()\\n print(\"=== Funny story ===\")\\n print(write_story(\"a curious cat\", \"funny\"))', 'story_with_moral': Template(__prompt_template__='Write a short story about {topic} and end with a moral lesson. Do not use any tools.', __signature__= str>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='story_with_moral'), 'story_funny': ..., 'write_story': Template(__prompt_template__=\"Write a story about {topic} in the style: {style}.\\n Available styles: 'moral' for a story with a lesson, 'funny' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\", __signature__= str>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='write_story')}), mappingproxy({'__name__': '__main__', '__doc__': 'Automatically created module for IPython interactive environment', '__package__': None, '__loader__': None, '__spec__': None, '__builtin__': , '__builtins__': , '_ih': ['', 'import dataclasses\\nimport functools\\nimport inspect\\nimport logging\\nimport sys\\nfrom collections.abc import Callable\\n\\nfrom effectful.handlers.llm import Template\\nfrom effectful.handlers.llm.providers import (\\n CacheLLMRequestHandler,\\n LiteLLMProvider,\\n LLMLoggingHandler,\\n RetryLLMHandler,\\n completion,\\n tool_call,\\n)\\nfrom effectful.handlers.llm.synthesis import ProgramSynthesis\\nfrom effectful.ops.semantics import NotHandled, fwd, handler\\nfrom effectful.ops.syntax import defop\\n\\nprovider = LiteLLMProvider()', '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}.\"\"\"\\n raise NotHandled', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '@functools.cache\\n@Template.define\\ndef haiku(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef haiku_no_cache(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nprint()\\nwith handler(provider):\\n print(haiku(\"fish\"))\\n print(\"-\" * 40)\\n print(haiku(\"fish\"))\\n\\nprint()\\ncache_handler1 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler1):\\n print(haiku_no_cache(\"fish2\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish2\"))\\n\\nprint()\\ncache_handler2 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler2):\\n print(haiku_no_cache(\"fish3\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish3\"))', '@Template.define\\ndef primes(first_digit: int) -> int:\\n \"\"\"Give a prime number with {first_digit} as the first digit. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider):\\n assert type(primes(6)) is int', '@Template.define\\ndef count_char(char: str) -> Callable[[str], int]:\\n \"\"\"Write a function which takes a string and counts the occurrances of \\'{char}\\'. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider), handler(ProgramSynthesis()):\\n count_a = count_char(\"a\")\\n assert callable(count_a)\\n assert count_a(\"banana\") == 3\\n assert count_a(\"cherry\") == 0\\n # Print the source code of the generated function\\n print(inspect.getsource(count_a))', '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', '@dataclasses.dataclass\\nclass KnockKnockJoke:\\n whos_there: str\\n punchline: str\\n\\n\\n@Template.define\\ndef write_joke(theme: str) -> KnockKnockJoke:\\n \"\"\"Write a knock-knock joke on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef rate_joke(joke: KnockKnockJoke) -> bool:\\n \"\"\"Decide if {joke} is funny or not. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\ndef do_comedy():\\n joke = write_joke(\"lizards\")\\n print(\"> You are onstage at a comedy club. You tell the following joke:\")\\n print(\\n f\"Knock knock.\\\\nWho\\'s there?\\\\n{joke.whos_there}.\\\\n{joke.whos_there} who?\\\\n{joke.punchline}\"\\n )\\n if rate_joke(joke):\\n print(\"> The crowd laughs politely.\")\\n else:\\n print(\"> The crowd stares in stony silence.\")\\n\\n\\nwith handler(provider):\\n do_comedy()', 'def log_llm(*args, **kwargs):\\n result = fwd()\\n print(\"Request fired: \", args, kwargs, result)\\n return result\\n\\n\\n# Avoid cache\\ntry:\\n haiku.cache_clear()\\nexcept Exception:\\n pass\\n\\n# Put completion handler innermost so it has highest precedence during the call\\nwith handler(provider), handler({completion: log_llm}):\\n _ = haiku(\"fish2\")\\n _ = limerick(\"fish\") # or use haiku(\"fish-2\") to avoid cache', '# 1. Create a logger\\nlogger = logging.getLogger(\"effectful.llm\")\\nlogger.setLevel(logging.INFO)\\nlog_handler = logging.StreamHandler(sys.stdout)\\nlog_handler.setFormatter(logging.Formatter(\"%(levelname)s %(payload)s\"))\\nlogger.addHandler(log_handler)\\n# 2. Pass it to the handler\\nllm_logger = LLMLoggingHandler(logger=logger) # can also be LLMLoggingHandler()\\n\\n# Avoid cache for demonstration\\ntry:\\n haiku.cache_clear()\\n limerick.cache_clear()\\nexcept Exception:\\n pass\\n\\nwith handler(provider), handler(llm_logger):\\n _ = haiku(\"fish3\")\\n _ = limerick(\"fish4\")', '# Sub-templates for different story styles\\n@Template.define\\ndef story_with_moral(topic: str) -> str:\\n \"\"\"Write a short story about {topic} and end with a moral lesson. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef story_funny(topic: str) -> str:\\n \"\"\"Write a funny, humorous story about {topic}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n# Main orchestrator template - has access to sub-templates\\n@Template.define\\ndef write_story(topic: str, style: str) -> str:\\n \"\"\"Write a story about {topic} in the style: {style}.\\n Available styles: \\'moral\\' for a story with a lesson, \\'funny\\' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\"\"\"\\n raise NotHandled\\n\\n\\n# Verify sub-templates are captured in write_story\\'s lexical context\\nassert story_with_moral in write_story.tools\\nassert story_funny in write_story.tools\\nprint(\"Sub-templates available to write_story:\", list(write_story.tools))\\n\\nwith handler(provider), handler(llm_logger):\\n print(\"=== Story with moral ===\")\\n print(write_story(\"a curious cat\", \"moral\"))\\n print()\\n print(\"=== Funny story ===\")\\n print(write_story(\"a curious cat\", \"funny\"))'], '_oh': {}, '_dh': [PosixPath('/Users/datnguyenthanh/Marc/effectful')], 'In': ['', 'import dataclasses\\nimport functools\\nimport inspect\\nimport logging\\nimport sys\\nfrom collections.abc import Callable\\n\\nfrom effectful.handlers.llm import Template\\nfrom effectful.handlers.llm.providers import (\\n CacheLLMRequestHandler,\\n LiteLLMProvider,\\n LLMLoggingHandler,\\n RetryLLMHandler,\\n completion,\\n tool_call,\\n)\\nfrom effectful.handlers.llm.synthesis import ProgramSynthesis\\nfrom effectful.ops.semantics import NotHandled, fwd, handler\\nfrom effectful.ops.syntax import defop\\n\\nprovider = LiteLLMProvider()', '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}.\"\"\"\\n raise NotHandled', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '@functools.cache\\n@Template.define\\ndef haiku(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef haiku_no_cache(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nprint()\\nwith handler(provider):\\n print(haiku(\"fish\"))\\n print(\"-\" * 40)\\n print(haiku(\"fish\"))\\n\\nprint()\\ncache_handler1 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler1):\\n print(haiku_no_cache(\"fish2\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish2\"))\\n\\nprint()\\ncache_handler2 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler2):\\n print(haiku_no_cache(\"fish3\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish3\"))', '@Template.define\\ndef primes(first_digit: int) -> int:\\n \"\"\"Give a prime number with {first_digit} as the first digit. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider):\\n assert type(primes(6)) is int', '@Template.define\\ndef count_char(char: str) -> Callable[[str], int]:\\n \"\"\"Write a function which takes a string and counts the occurrances of \\'{char}\\'. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider), handler(ProgramSynthesis()):\\n count_a = count_char(\"a\")\\n assert callable(count_a)\\n assert count_a(\"banana\") == 3\\n assert count_a(\"cherry\") == 0\\n # Print the source code of the generated function\\n print(inspect.getsource(count_a))', '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', '@dataclasses.dataclass\\nclass KnockKnockJoke:\\n whos_there: str\\n punchline: str\\n\\n\\n@Template.define\\ndef write_joke(theme: str) -> KnockKnockJoke:\\n \"\"\"Write a knock-knock joke on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef rate_joke(joke: KnockKnockJoke) -> bool:\\n \"\"\"Decide if {joke} is funny or not. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\ndef do_comedy():\\n joke = write_joke(\"lizards\")\\n print(\"> You are onstage at a comedy club. You tell the following joke:\")\\n print(\\n f\"Knock knock.\\\\nWho\\'s there?\\\\n{joke.whos_there}.\\\\n{joke.whos_there} who?\\\\n{joke.punchline}\"\\n )\\n if rate_joke(joke):\\n print(\"> The crowd laughs politely.\")\\n else:\\n print(\"> The crowd stares in stony silence.\")\\n\\n\\nwith handler(provider):\\n do_comedy()', 'def log_llm(*args, **kwargs):\\n result = fwd()\\n print(\"Request fired: \", args, kwargs, result)\\n return result\\n\\n\\n# Avoid cache\\ntry:\\n haiku.cache_clear()\\nexcept Exception:\\n pass\\n\\n# Put completion handler innermost so it has highest precedence during the call\\nwith handler(provider), handler({completion: log_llm}):\\n _ = haiku(\"fish2\")\\n _ = limerick(\"fish\") # or use haiku(\"fish-2\") to avoid cache', '# 1. Create a logger\\nlogger = logging.getLogger(\"effectful.llm\")\\nlogger.setLevel(logging.INFO)\\nlog_handler = logging.StreamHandler(sys.stdout)\\nlog_handler.setFormatter(logging.Formatter(\"%(levelname)s %(payload)s\"))\\nlogger.addHandler(log_handler)\\n# 2. Pass it to the handler\\nllm_logger = LLMLoggingHandler(logger=logger) # can also be LLMLoggingHandler()\\n\\n# Avoid cache for demonstration\\ntry:\\n haiku.cache_clear()\\n limerick.cache_clear()\\nexcept Exception:\\n pass\\n\\nwith handler(provider), handler(llm_logger):\\n _ = haiku(\"fish3\")\\n _ = limerick(\"fish4\")', '# Sub-templates for different story styles\\n@Template.define\\ndef story_with_moral(topic: str) -> str:\\n \"\"\"Write a short story about {topic} and end with a moral lesson. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef story_funny(topic: str) -> str:\\n \"\"\"Write a funny, humorous story about {topic}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n# Main orchestrator template - has access to sub-templates\\n@Template.define\\ndef write_story(topic: str, style: str) -> str:\\n \"\"\"Write a story about {topic} in the style: {style}.\\n Available styles: \\'moral\\' for a story with a lesson, \\'funny\\' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\"\"\"\\n raise NotHandled\\n\\n\\n# Verify sub-templates are captured in write_story\\'s lexical context\\nassert story_with_moral in write_story.tools\\nassert story_funny in write_story.tools\\nprint(\"Sub-templates available to write_story:\", list(write_story.tools))\\n\\nwith handler(provider), handler(llm_logger):\\n print(\"=== Story with moral ===\")\\n print(write_story(\"a curious cat\", \"moral\"))\\n print()\\n print(\"=== Funny story ===\")\\n print(write_story(\"a curious cat\", \"funny\"))'], 'Out': {}, 'get_ipython': >, 'exit': , 'quit': , 'open': , '_': 'In the ocean where fishies do play, \\nA big whale came swimming one day. \\nWith a splash and a dive, \\nHe felt so alive, \\nChasing fish in the blue, gleaming bay.', '__': '', '___': '', '__vsc_ipynb_file__': '/Users/datnguyenthanh/Marc/effectful/docs/source/llm.ipynb', '_i': '# 1. Create a logger\\nlogger = logging.getLogger(\"effectful.llm\")\\nlogger.setLevel(logging.INFO)\\nlog_handler = logging.StreamHandler(sys.stdout)\\nlog_handler.setFormatter(logging.Formatter(\"%(levelname)s %(payload)s\"))\\nlogger.addHandler(log_handler)\\n# 2. Pass it to the handler\\nllm_logger = LLMLoggingHandler(logger=logger) # can also be LLMLoggingHandler()\\n\\n# Avoid cache for demonstration\\ntry:\\n haiku.cache_clear()\\n limerick.cache_clear()\\nexcept Exception:\\n pass\\n\\nwith handler(provider), handler(llm_logger):\\n _ = haiku(\"fish3\")\\n _ = limerick(\"fish4\")', '_ii': 'def log_llm(*args, **kwargs):\\n result = fwd()\\n print(\"Request fired: \", args, kwargs, result)\\n return result\\n\\n\\n# Avoid cache\\ntry:\\n haiku.cache_clear()\\nexcept Exception:\\n pass\\n\\n# Put completion handler innermost so it has highest precedence during the call\\nwith handler(provider), handler({completion: log_llm}):\\n _ = haiku(\"fish2\")\\n _ = limerick(\"fish\") # or use haiku(\"fish-2\") to avoid cache', '_iii': '@dataclasses.dataclass\\nclass KnockKnockJoke:\\n whos_there: str\\n punchline: str\\n\\n\\n@Template.define\\ndef write_joke(theme: str) -> KnockKnockJoke:\\n \"\"\"Write a knock-knock joke on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef rate_joke(joke: KnockKnockJoke) -> bool:\\n \"\"\"Decide if {joke} is funny or not. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\ndef do_comedy():\\n joke = write_joke(\"lizards\")\\n print(\"> You are onstage at a comedy club. You tell the following joke:\")\\n print(\\n f\"Knock knock.\\\\nWho\\'s there?\\\\n{joke.whos_there}.\\\\n{joke.whos_there} who?\\\\n{joke.punchline}\"\\n )\\n if rate_joke(joke):\\n print(\"> The crowd laughs politely.\")\\n else:\\n print(\"> The crowd stares in stony silence.\")\\n\\n\\nwith handler(provider):\\n do_comedy()', '_i1': 'import dataclasses\\nimport functools\\nimport inspect\\nimport logging\\nimport sys\\nfrom collections.abc import Callable\\n\\nfrom effectful.handlers.llm import Template\\nfrom effectful.handlers.llm.providers import (\\n CacheLLMRequestHandler,\\n LiteLLMProvider,\\n LLMLoggingHandler,\\n RetryLLMHandler,\\n completion,\\n tool_call,\\n)\\nfrom effectful.handlers.llm.synthesis import ProgramSynthesis\\nfrom effectful.ops.semantics import NotHandled, fwd, handler\\nfrom effectful.ops.syntax import defop\\n\\nprovider = LiteLLMProvider()', 'dataclasses': , 'functools': , 'inspect': , 'logging': , 'sys': , 'Callable': , 'Template': , 'CacheLLMRequestHandler': , 'LiteLLMProvider': , 'LLMLoggingHandler': , 'RetryLLMHandler': , 'completion': Operation(completion, (model: str, messages: List = [], timeout: Union[float, str, openai.Timeout, NoneType] = None, temperature: Optional[float] = None, top_p: Optional[float] = None, n: Optional[int] = None, stream: Optional[bool] = None, stream_options: Optional[dict] = None, stop=None, max_completion_tokens: Optional[int] = None, max_tokens: Optional[int] = None, modalities: Optional[List[Literal['text', 'audio']]] = None, prediction: Optional[openai.types.chat.chat_completion_prediction_content_param.ChatCompletionPredictionContentParam] = None, audio: Optional[openai.types.chat.chat_completion_audio_param.ChatCompletionAudioParam] = None, presence_penalty: Optional[float] = None, frequency_penalty: Optional[float] = None, logit_bias: Optional[dict] = None, user: Optional[str] = None, reasoning_effort: Optional[Literal['none', 'minimal', 'low', 'medium', 'high', 'default']] = None, verbosity: Optional[Literal['low', 'medium', 'high']] = None, response_format: Union[dict, Type[pydantic.main.BaseModel], NoneType] = None, seed: Optional[int] = None, tools: Optional[List] = None, tool_choice: Union[str, dict, NoneType] = None, logprobs: Optional[bool] = None, top_logprobs: Optional[int] = None, parallel_tool_calls: Optional[bool] = None, web_search_options: Optional[litellm.types.llms.openai.OpenAIWebSearchOptions] = None, deployment_id=None, extra_headers: Optional[dict] = None, safety_identifier: Optional[str] = None, service_tier: Optional[str] = None, functions: Optional[List] = None, function_call: Optional[str] = None, base_url: Optional[str] = None, api_version: Optional[str] = None, api_key: Optional[str] = None, model_list: Optional[list] = None, thinking: Optional[litellm.types.llms.anthropic.AnthropicThinkingParam] = None, shared_session: Optional[ForwardRef('ClientSession')] = None, **kwargs) -> Union[litellm.types.utils.ModelResponse, litellm.litellm_core_utils.streaming_handler.CustomStreamWrapper]), 'tool_call': Operation(tool_call, (template: effectful.handlers.llm.Template, tool: Union[effectful.ops.types.Operation[..., T], effectful.handlers.llm.Template[..., T]], *args, **kwargs) -> T), 'ProgramSynthesis': , 'NotHandled': , 'fwd': Operation(fwd, (*args, **kwargs) -> Any), 'handler': , 'defop': , 'provider': , '_i2': '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}.\"\"\"\\n raise NotHandled', 'limerick': Template(__prompt_template__='Write a limerick on the theme of {theme}. Do not use any tools.', __signature__= str>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='limerick'), '_i3': 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '_i4': 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '_i5': '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled', '_i6': 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '_i7': '@functools.cache\\n@Template.define\\ndef haiku(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef haiku_no_cache(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nprint()\\nwith handler(provider):\\n print(haiku(\"fish\"))\\n print(\"-\" * 40)\\n print(haiku(\"fish\"))\\n\\nprint()\\ncache_handler1 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler1):\\n print(haiku_no_cache(\"fish2\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish2\"))\\n\\nprint()\\ncache_handler2 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler2):\\n print(haiku_no_cache(\"fish3\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish3\"))', 'haiku': , 'haiku_no_cache': Template(__prompt_template__='Write a haiku on the theme of {theme}. Do not use any tools.', __signature__= str>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='haiku_no_cache'), 'cache_handler1': , 'cache_handler2': , '_i8': '@Template.define\\ndef primes(first_digit: int) -> int:\\n \"\"\"Give a prime number with {first_digit} as the first digit. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider):\\n assert type(primes(6)) is int', 'primes': Template(__prompt_template__='Give a prime number with {first_digit} as the first digit. Do not use any tools.', __signature__= int>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='primes'), '_i9': '@Template.define\\ndef count_char(char: str) -> Callable[[str], int]:\\n \"\"\"Write a function which takes a string and counts the occurrances of \\'{char}\\'. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider), handler(ProgramSynthesis()):\\n count_a = count_char(\"a\")\\n assert callable(count_a)\\n assert count_a(\"banana\") == 3\\n assert count_a(\"cherry\") == 0\\n # Print the source code of the generated function\\n print(inspect.getsource(count_a))', 'count_char': Template(__prompt_template__=\"Write a function which takes a string and counts the occurrances of '{char}'. Do not use any tools.\", __signature__= collections.abc.Callable[[str], int]>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='count_char'), 'count_a': , '_i10': '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', 'cities': Operation(cities, () -> list[str]), 'weather': Operation(weather, (city: str) -> str), 'vacation': Template(__prompt_template__='Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.', __signature__= str>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='vacation'), 'log_tool_call': , '_i11': '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', '_i12': '@dataclasses.dataclass\\nclass KnockKnockJoke:\\n whos_there: str\\n punchline: str\\n\\n\\n@Template.define\\ndef write_joke(theme: str) -> KnockKnockJoke:\\n \"\"\"Write a knock-knock joke on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef rate_joke(joke: KnockKnockJoke) -> bool:\\n \"\"\"Decide if {joke} is funny or not. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\ndef do_comedy():\\n joke = write_joke(\"lizards\")\\n print(\"> You are onstage at a comedy club. You tell the following joke:\")\\n print(\\n f\"Knock knock.\\\\nWho\\'s there?\\\\n{joke.whos_there}.\\\\n{joke.whos_there} who?\\\\n{joke.punchline}\"\\n )\\n if rate_joke(joke):\\n print(\"> The crowd laughs politely.\")\\n else:\\n print(\"> The crowd stares in stony silence.\")\\n\\n\\nwith handler(provider):\\n do_comedy()', 'KnockKnockJoke': , 'write_joke': Template(__prompt_template__='Write a knock-knock joke on the theme of {theme}. Do not use any tools.', __signature__= __main__.KnockKnockJoke>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='write_joke'), 'rate_joke': Template(__prompt_template__='Decide if {joke} is funny or not. Do not use any tools.', __signature__= bool>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='rate_joke'), 'do_comedy': , '_i13': 'def log_llm(*args, **kwargs):\\n result = fwd()\\n print(\"Request fired: \", args, kwargs, result)\\n return result\\n\\n\\n# Avoid cache\\ntry:\\n haiku.cache_clear()\\nexcept Exception:\\n pass\\n\\n# Put completion handler innermost so it has highest precedence during the call\\nwith handler(provider), handler({completion: log_llm}):\\n _ = haiku(\"fish2\")\\n _ = limerick(\"fish\") # or use haiku(\"fish-2\") to avoid cache', 'log_llm': , '_i14': '# 1. Create a logger\\nlogger = logging.getLogger(\"effectful.llm\")\\nlogger.setLevel(logging.INFO)\\nlog_handler = logging.StreamHandler(sys.stdout)\\nlog_handler.setFormatter(logging.Formatter(\"%(levelname)s %(payload)s\"))\\nlogger.addHandler(log_handler)\\n# 2. Pass it to the handler\\nllm_logger = LLMLoggingHandler(logger=logger) # can also be LLMLoggingHandler()\\n\\n# Avoid cache for demonstration\\ntry:\\n haiku.cache_clear()\\n limerick.cache_clear()\\nexcept Exception:\\n pass\\n\\nwith handler(provider), handler(llm_logger):\\n _ = haiku(\"fish3\")\\n _ = limerick(\"fish4\")', 'logger': , 'log_handler': , 'llm_logger': , '_i15': '# Sub-templates for different story styles\\n@Template.define\\ndef story_with_moral(topic: str) -> str:\\n \"\"\"Write a short story about {topic} and end with a moral lesson. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef story_funny(topic: str) -> str:\\n \"\"\"Write a funny, humorous story about {topic}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n# Main orchestrator template - has access to sub-templates\\n@Template.define\\ndef write_story(topic: str, style: str) -> str:\\n \"\"\"Write a story about {topic} in the style: {style}.\\n Available styles: \\'moral\\' for a story with a lesson, \\'funny\\' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\"\"\"\\n raise NotHandled\\n\\n\\n# Verify sub-templates are captured in write_story\\'s lexical context\\nassert story_with_moral in write_story.tools\\nassert story_funny in write_story.tools\\nprint(\"Sub-templates available to write_story:\", list(write_story.tools))\\n\\nwith handler(provider), handler(llm_logger):\\n print(\"=== Story with moral ===\")\\n print(write_story(\"a curious cat\", \"moral\"))\\n print()\\n print(\"=== Funny story ===\")\\n print(write_story(\"a curious cat\", \"funny\"))', 'story_with_moral': Template(__prompt_template__='Write a short story about {topic} and end with a moral lesson. Do not use any tools.', __signature__= str>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='story_with_moral'), 'story_funny': ..., 'write_story': Template(__prompt_template__=\"Write a story about {topic} in the style: {style}.\\n Available styles: 'moral' for a story with a lesson, 'funny' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\", __signature__= str>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='write_story')})), __name__='story_funny'), Template(__prompt_template__=\"Write a story about {topic} in the style: {style}.\\n Available styles: 'moral' for a story with a lesson, 'funny' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\", __signature__= str>, __context__=LexicalContext(mappingproxy({'__name__': '__main__', '__doc__': 'Automatically created module for IPython interactive environment', '__package__': None, '__loader__': None, '__spec__': None, '__builtin__': , '__builtins__': , '_ih': ['', 'import dataclasses\\nimport functools\\nimport inspect\\nimport logging\\nimport sys\\nfrom collections.abc import Callable\\n\\nfrom effectful.handlers.llm import Template\\nfrom effectful.handlers.llm.providers import (\\n CacheLLMRequestHandler,\\n LiteLLMProvider,\\n LLMLoggingHandler,\\n RetryLLMHandler,\\n completion,\\n tool_call,\\n)\\nfrom effectful.handlers.llm.synthesis import ProgramSynthesis\\nfrom effectful.ops.semantics import NotHandled, fwd, handler\\nfrom effectful.ops.syntax import defop\\n\\nprovider = LiteLLMProvider()', '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}.\"\"\"\\n raise NotHandled', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '@functools.cache\\n@Template.define\\ndef haiku(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef haiku_no_cache(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nprint()\\nwith handler(provider):\\n print(haiku(\"fish\"))\\n print(\"-\" * 40)\\n print(haiku(\"fish\"))\\n\\nprint()\\ncache_handler1 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler1):\\n print(haiku_no_cache(\"fish2\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish2\"))\\n\\nprint()\\ncache_handler2 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler2):\\n print(haiku_no_cache(\"fish3\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish3\"))', '@Template.define\\ndef primes(first_digit: int) -> int:\\n \"\"\"Give a prime number with {first_digit} as the first digit. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider):\\n assert type(primes(6)) is int', '@Template.define\\ndef count_char(char: str) -> Callable[[str], int]:\\n \"\"\"Write a function which takes a string and counts the occurrances of \\'{char}\\'. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider), handler(ProgramSynthesis()):\\n count_a = count_char(\"a\")\\n assert callable(count_a)\\n assert count_a(\"banana\") == 3\\n assert count_a(\"cherry\") == 0\\n # Print the source code of the generated function\\n print(inspect.getsource(count_a))', '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', '@dataclasses.dataclass\\nclass KnockKnockJoke:\\n whos_there: str\\n punchline: str\\n\\n\\n@Template.define\\ndef write_joke(theme: str) -> KnockKnockJoke:\\n \"\"\"Write a knock-knock joke on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef rate_joke(joke: KnockKnockJoke) -> bool:\\n \"\"\"Decide if {joke} is funny or not. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\ndef do_comedy():\\n joke = write_joke(\"lizards\")\\n print(\"> You are onstage at a comedy club. You tell the following joke:\")\\n print(\\n f\"Knock knock.\\\\nWho\\'s there?\\\\n{joke.whos_there}.\\\\n{joke.whos_there} who?\\\\n{joke.punchline}\"\\n )\\n if rate_joke(joke):\\n print(\"> The crowd laughs politely.\")\\n else:\\n print(\"> The crowd stares in stony silence.\")\\n\\n\\nwith handler(provider):\\n do_comedy()', 'def log_llm(*args, **kwargs):\\n result = fwd()\\n print(\"Request fired: \", args, kwargs, result)\\n return result\\n\\n\\n# Avoid cache\\ntry:\\n haiku.cache_clear()\\nexcept Exception:\\n pass\\n\\n# Put completion handler innermost so it has highest precedence during the call\\nwith handler(provider), handler({completion: log_llm}):\\n _ = haiku(\"fish2\")\\n _ = limerick(\"fish\") # or use haiku(\"fish-2\") to avoid cache', '# 1. Create a logger\\nlogger = logging.getLogger(\"effectful.llm\")\\nlogger.setLevel(logging.INFO)\\nlog_handler = logging.StreamHandler(sys.stdout)\\nlog_handler.setFormatter(logging.Formatter(\"%(levelname)s %(payload)s\"))\\nlogger.addHandler(log_handler)\\n# 2. Pass it to the handler\\nllm_logger = LLMLoggingHandler(logger=logger) # can also be LLMLoggingHandler()\\n\\n# Avoid cache for demonstration\\ntry:\\n haiku.cache_clear()\\n limerick.cache_clear()\\nexcept Exception:\\n pass\\n\\nwith handler(provider), handler(llm_logger):\\n _ = haiku(\"fish3\")\\n _ = limerick(\"fish4\")', '# Sub-templates for different story styles\\n@Template.define\\ndef story_with_moral(topic: str) -> str:\\n \"\"\"Write a short story about {topic} and end with a moral lesson. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef story_funny(topic: str) -> str:\\n \"\"\"Write a funny, humorous story about {topic}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n# Main orchestrator template - has access to sub-templates\\n@Template.define\\ndef write_story(topic: str, style: str) -> str:\\n \"\"\"Write a story about {topic} in the style: {style}.\\n Available styles: \\'moral\\' for a story with a lesson, \\'funny\\' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\"\"\"\\n raise NotHandled\\n\\n\\n# Verify sub-templates are captured in write_story\\'s lexical context\\nassert story_with_moral in write_story.tools\\nassert story_funny in write_story.tools\\nprint(\"Sub-templates available to write_story:\", list(write_story.tools))\\n\\nwith handler(provider), handler(llm_logger):\\n print(\"=== Story with moral ===\")\\n print(write_story(\"a curious cat\", \"moral\"))\\n print()\\n print(\"=== Funny story ===\")\\n print(write_story(\"a curious cat\", \"funny\"))'], '_oh': {}, '_dh': [PosixPath('/Users/datnguyenthanh/Marc/effectful')], 'In': ['', 'import dataclasses\\nimport functools\\nimport inspect\\nimport logging\\nimport sys\\nfrom collections.abc import Callable\\n\\nfrom effectful.handlers.llm import Template\\nfrom effectful.handlers.llm.providers import (\\n CacheLLMRequestHandler,\\n LiteLLMProvider,\\n LLMLoggingHandler,\\n RetryLLMHandler,\\n completion,\\n tool_call,\\n)\\nfrom effectful.handlers.llm.synthesis import ProgramSynthesis\\nfrom effectful.ops.semantics import NotHandled, fwd, handler\\nfrom effectful.ops.syntax import defop\\n\\nprovider = LiteLLMProvider()', '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}.\"\"\"\\n raise NotHandled', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '@functools.cache\\n@Template.define\\ndef haiku(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef haiku_no_cache(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nprint()\\nwith handler(provider):\\n print(haiku(\"fish\"))\\n print(\"-\" * 40)\\n print(haiku(\"fish\"))\\n\\nprint()\\ncache_handler1 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler1):\\n print(haiku_no_cache(\"fish2\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish2\"))\\n\\nprint()\\ncache_handler2 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler2):\\n print(haiku_no_cache(\"fish3\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish3\"))', '@Template.define\\ndef primes(first_digit: int) -> int:\\n \"\"\"Give a prime number with {first_digit} as the first digit. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider):\\n assert type(primes(6)) is int', '@Template.define\\ndef count_char(char: str) -> Callable[[str], int]:\\n \"\"\"Write a function which takes a string and counts the occurrances of \\'{char}\\'. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider), handler(ProgramSynthesis()):\\n count_a = count_char(\"a\")\\n assert callable(count_a)\\n assert count_a(\"banana\") == 3\\n assert count_a(\"cherry\") == 0\\n # Print the source code of the generated function\\n print(inspect.getsource(count_a))', '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', '@dataclasses.dataclass\\nclass KnockKnockJoke:\\n whos_there: str\\n punchline: str\\n\\n\\n@Template.define\\ndef write_joke(theme: str) -> KnockKnockJoke:\\n \"\"\"Write a knock-knock joke on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef rate_joke(joke: KnockKnockJoke) -> bool:\\n \"\"\"Decide if {joke} is funny or not. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\ndef do_comedy():\\n joke = write_joke(\"lizards\")\\n print(\"> You are onstage at a comedy club. You tell the following joke:\")\\n print(\\n f\"Knock knock.\\\\nWho\\'s there?\\\\n{joke.whos_there}.\\\\n{joke.whos_there} who?\\\\n{joke.punchline}\"\\n )\\n if rate_joke(joke):\\n print(\"> The crowd laughs politely.\")\\n else:\\n print(\"> The crowd stares in stony silence.\")\\n\\n\\nwith handler(provider):\\n do_comedy()', 'def log_llm(*args, **kwargs):\\n result = fwd()\\n print(\"Request fired: \", args, kwargs, result)\\n return result\\n\\n\\n# Avoid cache\\ntry:\\n haiku.cache_clear()\\nexcept Exception:\\n pass\\n\\n# Put completion handler innermost so it has highest precedence during the call\\nwith handler(provider), handler({completion: log_llm}):\\n _ = haiku(\"fish2\")\\n _ = limerick(\"fish\") # or use haiku(\"fish-2\") to avoid cache', '# 1. Create a logger\\nlogger = logging.getLogger(\"effectful.llm\")\\nlogger.setLevel(logging.INFO)\\nlog_handler = logging.StreamHandler(sys.stdout)\\nlog_handler.setFormatter(logging.Formatter(\"%(levelname)s %(payload)s\"))\\nlogger.addHandler(log_handler)\\n# 2. Pass it to the handler\\nllm_logger = LLMLoggingHandler(logger=logger) # can also be LLMLoggingHandler()\\n\\n# Avoid cache for demonstration\\ntry:\\n haiku.cache_clear()\\n limerick.cache_clear()\\nexcept Exception:\\n pass\\n\\nwith handler(provider), handler(llm_logger):\\n _ = haiku(\"fish3\")\\n _ = limerick(\"fish4\")', '# Sub-templates for different story styles\\n@Template.define\\ndef story_with_moral(topic: str) -> str:\\n \"\"\"Write a short story about {topic} and end with a moral lesson. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef story_funny(topic: str) -> str:\\n \"\"\"Write a funny, humorous story about {topic}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n# Main orchestrator template - has access to sub-templates\\n@Template.define\\ndef write_story(topic: str, style: str) -> str:\\n \"\"\"Write a story about {topic} in the style: {style}.\\n Available styles: \\'moral\\' for a story with a lesson, \\'funny\\' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\"\"\"\\n raise NotHandled\\n\\n\\n# Verify sub-templates are captured in write_story\\'s lexical context\\nassert story_with_moral in write_story.tools\\nassert story_funny in write_story.tools\\nprint(\"Sub-templates available to write_story:\", list(write_story.tools))\\n\\nwith handler(provider), handler(llm_logger):\\n print(\"=== Story with moral ===\")\\n print(write_story(\"a curious cat\", \"moral\"))\\n print()\\n print(\"=== Funny story ===\")\\n print(write_story(\"a curious cat\", \"funny\"))'], 'Out': {}, 'get_ipython': >, 'exit': , 'quit': , 'open': , '_': 'In the ocean where fishies do play, \\nA big whale came swimming one day. \\nWith a splash and a dive, \\nHe felt so alive, \\nChasing fish in the blue, gleaming bay.', '__': '', '___': '', '__vsc_ipynb_file__': '/Users/datnguyenthanh/Marc/effectful/docs/source/llm.ipynb', '_i': '# 1. Create a logger\\nlogger = logging.getLogger(\"effectful.llm\")\\nlogger.setLevel(logging.INFO)\\nlog_handler = logging.StreamHandler(sys.stdout)\\nlog_handler.setFormatter(logging.Formatter(\"%(levelname)s %(payload)s\"))\\nlogger.addHandler(log_handler)\\n# 2. Pass it to the handler\\nllm_logger = LLMLoggingHandler(logger=logger) # can also be LLMLoggingHandler()\\n\\n# Avoid cache for demonstration\\ntry:\\n haiku.cache_clear()\\n limerick.cache_clear()\\nexcept Exception:\\n pass\\n\\nwith handler(provider), handler(llm_logger):\\n _ = haiku(\"fish3\")\\n _ = limerick(\"fish4\")', '_ii': 'def log_llm(*args, **kwargs):\\n result = fwd()\\n print(\"Request fired: \", args, kwargs, result)\\n return result\\n\\n\\n# Avoid cache\\ntry:\\n haiku.cache_clear()\\nexcept Exception:\\n pass\\n\\n# Put completion handler innermost so it has highest precedence during the call\\nwith handler(provider), handler({completion: log_llm}):\\n _ = haiku(\"fish2\")\\n _ = limerick(\"fish\") # or use haiku(\"fish-2\") to avoid cache', '_iii': '@dataclasses.dataclass\\nclass KnockKnockJoke:\\n whos_there: str\\n punchline: str\\n\\n\\n@Template.define\\ndef write_joke(theme: str) -> KnockKnockJoke:\\n \"\"\"Write a knock-knock joke on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef rate_joke(joke: KnockKnockJoke) -> bool:\\n \"\"\"Decide if {joke} is funny or not. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\ndef do_comedy():\\n joke = write_joke(\"lizards\")\\n print(\"> You are onstage at a comedy club. You tell the following joke:\")\\n print(\\n f\"Knock knock.\\\\nWho\\'s there?\\\\n{joke.whos_there}.\\\\n{joke.whos_there} who?\\\\n{joke.punchline}\"\\n )\\n if rate_joke(joke):\\n print(\"> The crowd laughs politely.\")\\n else:\\n print(\"> The crowd stares in stony silence.\")\\n\\n\\nwith handler(provider):\\n do_comedy()', '_i1': 'import dataclasses\\nimport functools\\nimport inspect\\nimport logging\\nimport sys\\nfrom collections.abc import Callable\\n\\nfrom effectful.handlers.llm import Template\\nfrom effectful.handlers.llm.providers import (\\n CacheLLMRequestHandler,\\n LiteLLMProvider,\\n LLMLoggingHandler,\\n RetryLLMHandler,\\n completion,\\n tool_call,\\n)\\nfrom effectful.handlers.llm.synthesis import ProgramSynthesis\\nfrom effectful.ops.semantics import NotHandled, fwd, handler\\nfrom effectful.ops.syntax import defop\\n\\nprovider = LiteLLMProvider()', 'dataclasses': , 'functools': , 'inspect': , 'logging': , 'sys': , 'Callable': , 'Template': , 'CacheLLMRequestHandler': , 'LiteLLMProvider': , 'LLMLoggingHandler': , 'RetryLLMHandler': , 'completion': Operation(completion, (model: str, messages: List = [], timeout: Union[float, str, openai.Timeout, NoneType] = None, temperature: Optional[float] = None, top_p: Optional[float] = None, n: Optional[int] = None, stream: Optional[bool] = None, stream_options: Optional[dict] = None, stop=None, max_completion_tokens: Optional[int] = None, max_tokens: Optional[int] = None, modalities: Optional[List[Literal['text', 'audio']]] = None, prediction: Optional[openai.types.chat.chat_completion_prediction_content_param.ChatCompletionPredictionContentParam] = None, audio: Optional[openai.types.chat.chat_completion_audio_param.ChatCompletionAudioParam] = None, presence_penalty: Optional[float] = None, frequency_penalty: Optional[float] = None, logit_bias: Optional[dict] = None, user: Optional[str] = None, reasoning_effort: Optional[Literal['none', 'minimal', 'low', 'medium', 'high', 'default']] = None, verbosity: Optional[Literal['low', 'medium', 'high']] = None, response_format: Union[dict, Type[pydantic.main.BaseModel], NoneType] = None, seed: Optional[int] = None, tools: Optional[List] = None, tool_choice: Union[str, dict, NoneType] = None, logprobs: Optional[bool] = None, top_logprobs: Optional[int] = None, parallel_tool_calls: Optional[bool] = None, web_search_options: Optional[litellm.types.llms.openai.OpenAIWebSearchOptions] = None, deployment_id=None, extra_headers: Optional[dict] = None, safety_identifier: Optional[str] = None, service_tier: Optional[str] = None, functions: Optional[List] = None, function_call: Optional[str] = None, base_url: Optional[str] = None, api_version: Optional[str] = None, api_key: Optional[str] = None, model_list: Optional[list] = None, thinking: Optional[litellm.types.llms.anthropic.AnthropicThinkingParam] = None, shared_session: Optional[ForwardRef('ClientSession')] = None, **kwargs) -> Union[litellm.types.utils.ModelResponse, litellm.litellm_core_utils.streaming_handler.CustomStreamWrapper]), 'tool_call': Operation(tool_call, (template: effectful.handlers.llm.Template, tool: Union[effectful.ops.types.Operation[..., T], effectful.handlers.llm.Template[..., T]], *args, **kwargs) -> T), 'ProgramSynthesis': , 'NotHandled': , 'fwd': Operation(fwd, (*args, **kwargs) -> Any), 'handler': , 'defop': , 'provider': , '_i2': '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}.\"\"\"\\n raise NotHandled', 'limerick': Template(__prompt_template__='Write a limerick on the theme of {theme}. Do not use any tools.', __signature__= str>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='limerick'), '_i3': 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '_i4': 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '_i5': '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled', '_i6': 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '_i7': '@functools.cache\\n@Template.define\\ndef haiku(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef haiku_no_cache(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nprint()\\nwith handler(provider):\\n print(haiku(\"fish\"))\\n print(\"-\" * 40)\\n print(haiku(\"fish\"))\\n\\nprint()\\ncache_handler1 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler1):\\n print(haiku_no_cache(\"fish2\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish2\"))\\n\\nprint()\\ncache_handler2 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler2):\\n print(haiku_no_cache(\"fish3\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish3\"))', 'haiku': , 'haiku_no_cache': Template(__prompt_template__='Write a haiku on the theme of {theme}. Do not use any tools.', __signature__= str>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='haiku_no_cache'), 'cache_handler1': , 'cache_handler2': , '_i8': '@Template.define\\ndef primes(first_digit: int) -> int:\\n \"\"\"Give a prime number with {first_digit} as the first digit. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider):\\n assert type(primes(6)) is int', 'primes': Template(__prompt_template__='Give a prime number with {first_digit} as the first digit. Do not use any tools.', __signature__= int>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='primes'), '_i9': '@Template.define\\ndef count_char(char: str) -> Callable[[str], int]:\\n \"\"\"Write a function which takes a string and counts the occurrances of \\'{char}\\'. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider), handler(ProgramSynthesis()):\\n count_a = count_char(\"a\")\\n assert callable(count_a)\\n assert count_a(\"banana\") == 3\\n assert count_a(\"cherry\") == 0\\n # Print the source code of the generated function\\n print(inspect.getsource(count_a))', 'count_char': Template(__prompt_template__=\"Write a function which takes a string and counts the occurrances of '{char}'. Do not use any tools.\", __signature__= collections.abc.Callable[[str], int]>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='count_char'), 'count_a': , '_i10': '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', 'cities': Operation(cities, () -> list[str]), 'weather': Operation(weather, (city: str) -> str), 'vacation': Template(__prompt_template__='Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.', __signature__= str>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='vacation'), 'log_tool_call': , '_i11': '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', '_i12': '@dataclasses.dataclass\\nclass KnockKnockJoke:\\n whos_there: str\\n punchline: str\\n\\n\\n@Template.define\\ndef write_joke(theme: str) -> KnockKnockJoke:\\n \"\"\"Write a knock-knock joke on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef rate_joke(joke: KnockKnockJoke) -> bool:\\n \"\"\"Decide if {joke} is funny or not. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\ndef do_comedy():\\n joke = write_joke(\"lizards\")\\n print(\"> You are onstage at a comedy club. You tell the following joke:\")\\n print(\\n f\"Knock knock.\\\\nWho\\'s there?\\\\n{joke.whos_there}.\\\\n{joke.whos_there} who?\\\\n{joke.punchline}\"\\n )\\n if rate_joke(joke):\\n print(\"> The crowd laughs politely.\")\\n else:\\n print(\"> The crowd stares in stony silence.\")\\n\\n\\nwith handler(provider):\\n do_comedy()', 'KnockKnockJoke': , 'write_joke': Template(__prompt_template__='Write a knock-knock joke on the theme of {theme}. Do not use any tools.', __signature__= __main__.KnockKnockJoke>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='write_joke'), 'rate_joke': Template(__prompt_template__='Decide if {joke} is funny or not. Do not use any tools.', __signature__= bool>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='rate_joke'), 'do_comedy': , '_i13': 'def log_llm(*args, **kwargs):\\n result = fwd()\\n print(\"Request fired: \", args, kwargs, result)\\n return result\\n\\n\\n# Avoid cache\\ntry:\\n haiku.cache_clear()\\nexcept Exception:\\n pass\\n\\n# Put completion handler innermost so it has highest precedence during the call\\nwith handler(provider), handler({completion: log_llm}):\\n _ = haiku(\"fish2\")\\n _ = limerick(\"fish\") # or use haiku(\"fish-2\") to avoid cache', 'log_llm': , '_i14': '# 1. Create a logger\\nlogger = logging.getLogger(\"effectful.llm\")\\nlogger.setLevel(logging.INFO)\\nlog_handler = logging.StreamHandler(sys.stdout)\\nlog_handler.setFormatter(logging.Formatter(\"%(levelname)s %(payload)s\"))\\nlogger.addHandler(log_handler)\\n# 2. Pass it to the handler\\nllm_logger = LLMLoggingHandler(logger=logger) # can also be LLMLoggingHandler()\\n\\n# Avoid cache for demonstration\\ntry:\\n haiku.cache_clear()\\n limerick.cache_clear()\\nexcept Exception:\\n pass\\n\\nwith handler(provider), handler(llm_logger):\\n _ = haiku(\"fish3\")\\n _ = limerick(\"fish4\")', 'logger': , 'log_handler': , 'llm_logger': , '_i15': '# Sub-templates for different story styles\\n@Template.define\\ndef story_with_moral(topic: str) -> str:\\n \"\"\"Write a short story about {topic} and end with a moral lesson. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef story_funny(topic: str) -> str:\\n \"\"\"Write a funny, humorous story about {topic}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n# Main orchestrator template - has access to sub-templates\\n@Template.define\\ndef write_story(topic: str, style: str) -> str:\\n \"\"\"Write a story about {topic} in the style: {style}.\\n Available styles: \\'moral\\' for a story with a lesson, \\'funny\\' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\"\"\"\\n raise NotHandled\\n\\n\\n# Verify sub-templates are captured in write_story\\'s lexical context\\nassert story_with_moral in write_story.tools\\nassert story_funny in write_story.tools\\nprint(\"Sub-templates available to write_story:\", list(write_story.tools))\\n\\nwith handler(provider), handler(llm_logger):\\n print(\"=== Story with moral ===\")\\n print(write_story(\"a curious cat\", \"moral\"))\\n print()\\n print(\"=== Funny story ===\")\\n print(write_story(\"a curious cat\", \"funny\"))', 'story_with_moral': Template(__prompt_template__='Write a short story about {topic} and end with a moral lesson. Do not use any tools.', __signature__= str>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='story_with_moral'), 'story_funny': Template(__prompt_template__='Write a funny, humorous story about {topic}. Do not use any tools.', __signature__= str>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='story_funny'), 'write_story': ...}), mappingproxy({'__name__': '__main__', '__doc__': 'Automatically created module for IPython interactive environment', '__package__': None, '__loader__': None, '__spec__': None, '__builtin__': , '__builtins__': , '_ih': ['', 'import dataclasses\\nimport functools\\nimport inspect\\nimport logging\\nimport sys\\nfrom collections.abc import Callable\\n\\nfrom effectful.handlers.llm import Template\\nfrom effectful.handlers.llm.providers import (\\n CacheLLMRequestHandler,\\n LiteLLMProvider,\\n LLMLoggingHandler,\\n RetryLLMHandler,\\n completion,\\n tool_call,\\n)\\nfrom effectful.handlers.llm.synthesis import ProgramSynthesis\\nfrom effectful.ops.semantics import NotHandled, fwd, handler\\nfrom effectful.ops.syntax import defop\\n\\nprovider = LiteLLMProvider()', '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}.\"\"\"\\n raise NotHandled', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '@functools.cache\\n@Template.define\\ndef haiku(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef haiku_no_cache(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nprint()\\nwith handler(provider):\\n print(haiku(\"fish\"))\\n print(\"-\" * 40)\\n print(haiku(\"fish\"))\\n\\nprint()\\ncache_handler1 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler1):\\n print(haiku_no_cache(\"fish2\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish2\"))\\n\\nprint()\\ncache_handler2 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler2):\\n print(haiku_no_cache(\"fish3\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish3\"))', '@Template.define\\ndef primes(first_digit: int) -> int:\\n \"\"\"Give a prime number with {first_digit} as the first digit. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider):\\n assert type(primes(6)) is int', '@Template.define\\ndef count_char(char: str) -> Callable[[str], int]:\\n \"\"\"Write a function which takes a string and counts the occurrances of \\'{char}\\'. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider), handler(ProgramSynthesis()):\\n count_a = count_char(\"a\")\\n assert callable(count_a)\\n assert count_a(\"banana\") == 3\\n assert count_a(\"cherry\") == 0\\n # Print the source code of the generated function\\n print(inspect.getsource(count_a))', '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', '@dataclasses.dataclass\\nclass KnockKnockJoke:\\n whos_there: str\\n punchline: str\\n\\n\\n@Template.define\\ndef write_joke(theme: str) -> KnockKnockJoke:\\n \"\"\"Write a knock-knock joke on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef rate_joke(joke: KnockKnockJoke) -> bool:\\n \"\"\"Decide if {joke} is funny or not. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\ndef do_comedy():\\n joke = write_joke(\"lizards\")\\n print(\"> You are onstage at a comedy club. You tell the following joke:\")\\n print(\\n f\"Knock knock.\\\\nWho\\'s there?\\\\n{joke.whos_there}.\\\\n{joke.whos_there} who?\\\\n{joke.punchline}\"\\n )\\n if rate_joke(joke):\\n print(\"> The crowd laughs politely.\")\\n else:\\n print(\"> The crowd stares in stony silence.\")\\n\\n\\nwith handler(provider):\\n do_comedy()', 'def log_llm(*args, **kwargs):\\n result = fwd()\\n print(\"Request fired: \", args, kwargs, result)\\n return result\\n\\n\\n# Avoid cache\\ntry:\\n haiku.cache_clear()\\nexcept Exception:\\n pass\\n\\n# Put completion handler innermost so it has highest precedence during the call\\nwith handler(provider), handler({completion: log_llm}):\\n _ = haiku(\"fish2\")\\n _ = limerick(\"fish\") # or use haiku(\"fish-2\") to avoid cache', '# 1. Create a logger\\nlogger = logging.getLogger(\"effectful.llm\")\\nlogger.setLevel(logging.INFO)\\nlog_handler = logging.StreamHandler(sys.stdout)\\nlog_handler.setFormatter(logging.Formatter(\"%(levelname)s %(payload)s\"))\\nlogger.addHandler(log_handler)\\n# 2. Pass it to the handler\\nllm_logger = LLMLoggingHandler(logger=logger) # can also be LLMLoggingHandler()\\n\\n# Avoid cache for demonstration\\ntry:\\n haiku.cache_clear()\\n limerick.cache_clear()\\nexcept Exception:\\n pass\\n\\nwith handler(provider), handler(llm_logger):\\n _ = haiku(\"fish3\")\\n _ = limerick(\"fish4\")', '# Sub-templates for different story styles\\n@Template.define\\ndef story_with_moral(topic: str) -> str:\\n \"\"\"Write a short story about {topic} and end with a moral lesson. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef story_funny(topic: str) -> str:\\n \"\"\"Write a funny, humorous story about {topic}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n# Main orchestrator template - has access to sub-templates\\n@Template.define\\ndef write_story(topic: str, style: str) -> str:\\n \"\"\"Write a story about {topic} in the style: {style}.\\n Available styles: \\'moral\\' for a story with a lesson, \\'funny\\' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\"\"\"\\n raise NotHandled\\n\\n\\n# Verify sub-templates are captured in write_story\\'s lexical context\\nassert story_with_moral in write_story.tools\\nassert story_funny in write_story.tools\\nprint(\"Sub-templates available to write_story:\", list(write_story.tools))\\n\\nwith handler(provider), handler(llm_logger):\\n print(\"=== Story with moral ===\")\\n print(write_story(\"a curious cat\", \"moral\"))\\n print()\\n print(\"=== Funny story ===\")\\n print(write_story(\"a curious cat\", \"funny\"))'], '_oh': {}, '_dh': [PosixPath('/Users/datnguyenthanh/Marc/effectful')], 'In': ['', 'import dataclasses\\nimport functools\\nimport inspect\\nimport logging\\nimport sys\\nfrom collections.abc import Callable\\n\\nfrom effectful.handlers.llm import Template\\nfrom effectful.handlers.llm.providers import (\\n CacheLLMRequestHandler,\\n LiteLLMProvider,\\n LLMLoggingHandler,\\n RetryLLMHandler,\\n completion,\\n tool_call,\\n)\\nfrom effectful.handlers.llm.synthesis import ProgramSynthesis\\nfrom effectful.ops.semantics import NotHandled, fwd, handler\\nfrom effectful.ops.syntax import defop\\n\\nprovider = LiteLLMProvider()', '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}.\"\"\"\\n raise NotHandled', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled', 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '@functools.cache\\n@Template.define\\ndef haiku(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef haiku_no_cache(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nprint()\\nwith handler(provider):\\n print(haiku(\"fish\"))\\n print(\"-\" * 40)\\n print(haiku(\"fish\"))\\n\\nprint()\\ncache_handler1 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler1):\\n print(haiku_no_cache(\"fish2\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish2\"))\\n\\nprint()\\ncache_handler2 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler2):\\n print(haiku_no_cache(\"fish3\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish3\"))', '@Template.define\\ndef primes(first_digit: int) -> int:\\n \"\"\"Give a prime number with {first_digit} as the first digit. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider):\\n assert type(primes(6)) is int', '@Template.define\\ndef count_char(char: str) -> Callable[[str], int]:\\n \"\"\"Write a function which takes a string and counts the occurrances of \\'{char}\\'. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider), handler(ProgramSynthesis()):\\n count_a = count_char(\"a\")\\n assert callable(count_a)\\n assert count_a(\"banana\") == 3\\n assert count_a(\"cherry\") == 0\\n # Print the source code of the generated function\\n print(inspect.getsource(count_a))', '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', '@dataclasses.dataclass\\nclass KnockKnockJoke:\\n whos_there: str\\n punchline: str\\n\\n\\n@Template.define\\ndef write_joke(theme: str) -> KnockKnockJoke:\\n \"\"\"Write a knock-knock joke on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef rate_joke(joke: KnockKnockJoke) -> bool:\\n \"\"\"Decide if {joke} is funny or not. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\ndef do_comedy():\\n joke = write_joke(\"lizards\")\\n print(\"> You are onstage at a comedy club. You tell the following joke:\")\\n print(\\n f\"Knock knock.\\\\nWho\\'s there?\\\\n{joke.whos_there}.\\\\n{joke.whos_there} who?\\\\n{joke.punchline}\"\\n )\\n if rate_joke(joke):\\n print(\"> The crowd laughs politely.\")\\n else:\\n print(\"> The crowd stares in stony silence.\")\\n\\n\\nwith handler(provider):\\n do_comedy()', 'def log_llm(*args, **kwargs):\\n result = fwd()\\n print(\"Request fired: \", args, kwargs, result)\\n return result\\n\\n\\n# Avoid cache\\ntry:\\n haiku.cache_clear()\\nexcept Exception:\\n pass\\n\\n# Put completion handler innermost so it has highest precedence during the call\\nwith handler(provider), handler({completion: log_llm}):\\n _ = haiku(\"fish2\")\\n _ = limerick(\"fish\") # or use haiku(\"fish-2\") to avoid cache', '# 1. Create a logger\\nlogger = logging.getLogger(\"effectful.llm\")\\nlogger.setLevel(logging.INFO)\\nlog_handler = logging.StreamHandler(sys.stdout)\\nlog_handler.setFormatter(logging.Formatter(\"%(levelname)s %(payload)s\"))\\nlogger.addHandler(log_handler)\\n# 2. Pass it to the handler\\nllm_logger = LLMLoggingHandler(logger=logger) # can also be LLMLoggingHandler()\\n\\n# Avoid cache for demonstration\\ntry:\\n haiku.cache_clear()\\n limerick.cache_clear()\\nexcept Exception:\\n pass\\n\\nwith handler(provider), handler(llm_logger):\\n _ = haiku(\"fish3\")\\n _ = limerick(\"fish4\")', '# Sub-templates for different story styles\\n@Template.define\\ndef story_with_moral(topic: str) -> str:\\n \"\"\"Write a short story about {topic} and end with a moral lesson. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef story_funny(topic: str) -> str:\\n \"\"\"Write a funny, humorous story about {topic}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n# Main orchestrator template - has access to sub-templates\\n@Template.define\\ndef write_story(topic: str, style: str) -> str:\\n \"\"\"Write a story about {topic} in the style: {style}.\\n Available styles: \\'moral\\' for a story with a lesson, \\'funny\\' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\"\"\"\\n raise NotHandled\\n\\n\\n# Verify sub-templates are captured in write_story\\'s lexical context\\nassert story_with_moral in write_story.tools\\nassert story_funny in write_story.tools\\nprint(\"Sub-templates available to write_story:\", list(write_story.tools))\\n\\nwith handler(provider), handler(llm_logger):\\n print(\"=== Story with moral ===\")\\n print(write_story(\"a curious cat\", \"moral\"))\\n print()\\n print(\"=== Funny story ===\")\\n print(write_story(\"a curious cat\", \"funny\"))'], 'Out': {}, 'get_ipython': >, 'exit': , 'quit': , 'open': , '_': 'In the ocean where fishies do play, \\nA big whale came swimming one day. \\nWith a splash and a dive, \\nHe felt so alive, \\nChasing fish in the blue, gleaming bay.', '__': '', '___': '', '__vsc_ipynb_file__': '/Users/datnguyenthanh/Marc/effectful/docs/source/llm.ipynb', '_i': '# 1. Create a logger\\nlogger = logging.getLogger(\"effectful.llm\")\\nlogger.setLevel(logging.INFO)\\nlog_handler = logging.StreamHandler(sys.stdout)\\nlog_handler.setFormatter(logging.Formatter(\"%(levelname)s %(payload)s\"))\\nlogger.addHandler(log_handler)\\n# 2. Pass it to the handler\\nllm_logger = LLMLoggingHandler(logger=logger) # can also be LLMLoggingHandler()\\n\\n# Avoid cache for demonstration\\ntry:\\n haiku.cache_clear()\\n limerick.cache_clear()\\nexcept Exception:\\n pass\\n\\nwith handler(provider), handler(llm_logger):\\n _ = haiku(\"fish3\")\\n _ = limerick(\"fish4\")', '_ii': 'def log_llm(*args, **kwargs):\\n result = fwd()\\n print(\"Request fired: \", args, kwargs, result)\\n return result\\n\\n\\n# Avoid cache\\ntry:\\n haiku.cache_clear()\\nexcept Exception:\\n pass\\n\\n# Put completion handler innermost so it has highest precedence during the call\\nwith handler(provider), handler({completion: log_llm}):\\n _ = haiku(\"fish2\")\\n _ = limerick(\"fish\") # or use haiku(\"fish-2\") to avoid cache', '_iii': '@dataclasses.dataclass\\nclass KnockKnockJoke:\\n whos_there: str\\n punchline: str\\n\\n\\n@Template.define\\ndef write_joke(theme: str) -> KnockKnockJoke:\\n \"\"\"Write a knock-knock joke on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef rate_joke(joke: KnockKnockJoke) -> bool:\\n \"\"\"Decide if {joke} is funny or not. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\ndef do_comedy():\\n joke = write_joke(\"lizards\")\\n print(\"> You are onstage at a comedy club. You tell the following joke:\")\\n print(\\n f\"Knock knock.\\\\nWho\\'s there?\\\\n{joke.whos_there}.\\\\n{joke.whos_there} who?\\\\n{joke.punchline}\"\\n )\\n if rate_joke(joke):\\n print(\"> The crowd laughs politely.\")\\n else:\\n print(\"> The crowd stares in stony silence.\")\\n\\n\\nwith handler(provider):\\n do_comedy()', '_i1': 'import dataclasses\\nimport functools\\nimport inspect\\nimport logging\\nimport sys\\nfrom collections.abc import Callable\\n\\nfrom effectful.handlers.llm import Template\\nfrom effectful.handlers.llm.providers import (\\n CacheLLMRequestHandler,\\n LiteLLMProvider,\\n LLMLoggingHandler,\\n RetryLLMHandler,\\n completion,\\n tool_call,\\n)\\nfrom effectful.handlers.llm.synthesis import ProgramSynthesis\\nfrom effectful.ops.semantics import NotHandled, fwd, handler\\nfrom effectful.ops.syntax import defop\\n\\nprovider = LiteLLMProvider()', 'dataclasses': , 'functools': , 'inspect': , 'logging': , 'sys': , 'Callable': , 'Template': , 'CacheLLMRequestHandler': , 'LiteLLMProvider': , 'LLMLoggingHandler': , 'RetryLLMHandler': , 'completion': Operation(completion, (model: str, messages: List = [], timeout: Union[float, str, openai.Timeout, NoneType] = None, temperature: Optional[float] = None, top_p: Optional[float] = None, n: Optional[int] = None, stream: Optional[bool] = None, stream_options: Optional[dict] = None, stop=None, max_completion_tokens: Optional[int] = None, max_tokens: Optional[int] = None, modalities: Optional[List[Literal['text', 'audio']]] = None, prediction: Optional[openai.types.chat.chat_completion_prediction_content_param.ChatCompletionPredictionContentParam] = None, audio: Optional[openai.types.chat.chat_completion_audio_param.ChatCompletionAudioParam] = None, presence_penalty: Optional[float] = None, frequency_penalty: Optional[float] = None, logit_bias: Optional[dict] = None, user: Optional[str] = None, reasoning_effort: Optional[Literal['none', 'minimal', 'low', 'medium', 'high', 'default']] = None, verbosity: Optional[Literal['low', 'medium', 'high']] = None, response_format: Union[dict, Type[pydantic.main.BaseModel], NoneType] = None, seed: Optional[int] = None, tools: Optional[List] = None, tool_choice: Union[str, dict, NoneType] = None, logprobs: Optional[bool] = None, top_logprobs: Optional[int] = None, parallel_tool_calls: Optional[bool] = None, web_search_options: Optional[litellm.types.llms.openai.OpenAIWebSearchOptions] = None, deployment_id=None, extra_headers: Optional[dict] = None, safety_identifier: Optional[str] = None, service_tier: Optional[str] = None, functions: Optional[List] = None, function_call: Optional[str] = None, base_url: Optional[str] = None, api_version: Optional[str] = None, api_key: Optional[str] = None, model_list: Optional[list] = None, thinking: Optional[litellm.types.llms.anthropic.AnthropicThinkingParam] = None, shared_session: Optional[ForwardRef('ClientSession')] = None, **kwargs) -> Union[litellm.types.utils.ModelResponse, litellm.litellm_core_utils.streaming_handler.CustomStreamWrapper]), 'tool_call': Operation(tool_call, (template: effectful.handlers.llm.Template, tool: Union[effectful.ops.types.Operation[..., T], effectful.handlers.llm.Template[..., T]], *args, **kwargs) -> T), 'ProgramSynthesis': , 'NotHandled': , 'fwd': Operation(fwd, (*args, **kwargs) -> Any), 'handler': , 'defop': , 'provider': , '_i2': '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}.\"\"\"\\n raise NotHandled', 'limerick': Template(__prompt_template__='Write a limerick on the theme of {theme}. Do not use any tools.', __signature__= str>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='limerick'), '_i3': 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '_i4': 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '_i5': '@Template.define\\ndef limerick(theme: str) -> str:\\n \"\"\"Write a limerick on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled', '_i6': 'with handler(provider):\\n print(limerick(\"fish\"))\\n print(\"-\" * 40)\\n print(limerick(\"fish\"))', '_i7': '@functools.cache\\n@Template.define\\ndef haiku(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef haiku_no_cache(theme: str) -> str:\\n \"\"\"Write a haiku on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nprint()\\nwith handler(provider):\\n print(haiku(\"fish\"))\\n print(\"-\" * 40)\\n print(haiku(\"fish\"))\\n\\nprint()\\ncache_handler1 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler1):\\n print(haiku_no_cache(\"fish2\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish2\"))\\n\\nprint()\\ncache_handler2 = CacheLLMRequestHandler()\\nwith handler(provider), handler(cache_handler2):\\n print(haiku_no_cache(\"fish3\"))\\n print(\"-\" * 40)\\n print(haiku_no_cache(\"fish3\"))', 'haiku': , 'haiku_no_cache': Template(__prompt_template__='Write a haiku on the theme of {theme}. Do not use any tools.', __signature__= str>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='haiku_no_cache'), 'cache_handler1': , 'cache_handler2': , '_i8': '@Template.define\\ndef primes(first_digit: int) -> int:\\n \"\"\"Give a prime number with {first_digit} as the first digit. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider):\\n assert type(primes(6)) is int', 'primes': Template(__prompt_template__='Give a prime number with {first_digit} as the first digit. Do not use any tools.', __signature__= int>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='primes'), '_i9': '@Template.define\\ndef count_char(char: str) -> Callable[[str], int]:\\n \"\"\"Write a function which takes a string and counts the occurrances of \\'{char}\\'. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\nwith handler(provider), handler(ProgramSynthesis()):\\n count_a = count_char(\"a\")\\n assert callable(count_a)\\n assert count_a(\"banana\") == 3\\n assert count_a(\"cherry\") == 0\\n # Print the source code of the generated function\\n print(inspect.getsource(count_a))', 'count_char': Template(__prompt_template__=\"Write a function which takes a string and counts the occurrances of '{char}'. Do not use any tools.\", __signature__= collections.abc.Callable[[str], int]>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='count_char'), 'count_a': , '_i10': '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', 'cities': Operation(cities, () -> list[str]), 'weather': Operation(weather, (city: str) -> str), 'vacation': Template(__prompt_template__='Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.', __signature__= str>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='vacation'), 'log_tool_call': , '_i11': '@defop\\ndef cities() -> list[str]:\\n return [\"Chicago\", \"New York\", \"Barcelona\"]\\n\\n\\n@defop\\ndef weather(city: str) -> str:\\n status = {\"Chicago\": \"cold\", \"New York\": \"wet\", \"Barcelona\": \"sunny\"}\\n return status.get(city, \"unknown\")\\n\\n\\n@Template.define # cities and weather auto-captured from lexical scope\\ndef vacation() -> str:\\n \"\"\"Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.\"\"\"\\n raise NotHandled\\n\\n\\ndef log_tool_call(_, tool, *args, **kwargs):\\n result = fwd()\\n print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\\n return result\\n\\n\\nwith handler(provider), handler({tool_call: log_tool_call}):\\n print(vacation())', '_i12': '@dataclasses.dataclass\\nclass KnockKnockJoke:\\n whos_there: str\\n punchline: str\\n\\n\\n@Template.define\\ndef write_joke(theme: str) -> KnockKnockJoke:\\n \"\"\"Write a knock-knock joke on the theme of {theme}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef rate_joke(joke: KnockKnockJoke) -> bool:\\n \"\"\"Decide if {joke} is funny or not. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\ndef do_comedy():\\n joke = write_joke(\"lizards\")\\n print(\"> You are onstage at a comedy club. You tell the following joke:\")\\n print(\\n f\"Knock knock.\\\\nWho\\'s there?\\\\n{joke.whos_there}.\\\\n{joke.whos_there} who?\\\\n{joke.punchline}\"\\n )\\n if rate_joke(joke):\\n print(\"> The crowd laughs politely.\")\\n else:\\n print(\"> The crowd stares in stony silence.\")\\n\\n\\nwith handler(provider):\\n do_comedy()', 'KnockKnockJoke': , 'write_joke': Template(__prompt_template__='Write a knock-knock joke on the theme of {theme}. Do not use any tools.', __signature__= __main__.KnockKnockJoke>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='write_joke'), 'rate_joke': Template(__prompt_template__='Decide if {joke} is funny or not. Do not use any tools.', __signature__= bool>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='rate_joke'), 'do_comedy': , '_i13': 'def log_llm(*args, **kwargs):\\n result = fwd()\\n print(\"Request fired: \", args, kwargs, result)\\n return result\\n\\n\\n# Avoid cache\\ntry:\\n haiku.cache_clear()\\nexcept Exception:\\n pass\\n\\n# Put completion handler innermost so it has highest precedence during the call\\nwith handler(provider), handler({completion: log_llm}):\\n _ = haiku(\"fish2\")\\n _ = limerick(\"fish\") # or use haiku(\"fish-2\") to avoid cache', 'log_llm': , '_i14': '# 1. Create a logger\\nlogger = logging.getLogger(\"effectful.llm\")\\nlogger.setLevel(logging.INFO)\\nlog_handler = logging.StreamHandler(sys.stdout)\\nlog_handler.setFormatter(logging.Formatter(\"%(levelname)s %(payload)s\"))\\nlogger.addHandler(log_handler)\\n# 2. Pass it to the handler\\nllm_logger = LLMLoggingHandler(logger=logger) # can also be LLMLoggingHandler()\\n\\n# Avoid cache for demonstration\\ntry:\\n haiku.cache_clear()\\n limerick.cache_clear()\\nexcept Exception:\\n pass\\n\\nwith handler(provider), handler(llm_logger):\\n _ = haiku(\"fish3\")\\n _ = limerick(\"fish4\")', 'logger': , 'log_handler': , 'llm_logger': , '_i15': '# Sub-templates for different story styles\\n@Template.define\\ndef story_with_moral(topic: str) -> str:\\n \"\"\"Write a short story about {topic} and end with a moral lesson. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n@Template.define\\ndef story_funny(topic: str) -> str:\\n \"\"\"Write a funny, humorous story about {topic}. Do not use any tools.\"\"\"\\n raise NotHandled\\n\\n\\n# Main orchestrator template - has access to sub-templates\\n@Template.define\\ndef write_story(topic: str, style: str) -> str:\\n \"\"\"Write a story about {topic} in the style: {style}.\\n Available styles: \\'moral\\' for a story with a lesson, \\'funny\\' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\"\"\"\\n raise NotHandled\\n\\n\\n# Verify sub-templates are captured in write_story\\'s lexical context\\nassert story_with_moral in write_story.tools\\nassert story_funny in write_story.tools\\nprint(\"Sub-templates available to write_story:\", list(write_story.tools))\\n\\nwith handler(provider), handler(llm_logger):\\n print(\"=== Story with moral ===\")\\n print(write_story(\"a curious cat\", \"moral\"))\\n print()\\n print(\"=== Funny story ===\")\\n print(write_story(\"a curious cat\", \"funny\"))', 'story_with_moral': Template(__prompt_template__='Write a short story about {topic} and end with a moral lesson. Do not use any tools.', __signature__= str>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='story_with_moral'), 'story_funny': Template(__prompt_template__='Write a funny, humorous story about {topic}. Do not use any tools.', __signature__= str>, __context__=LexicalContext(mappingproxy({...}), mappingproxy({...})), __name__='story_funny'), 'write_story': ...})), __name__='write_story')]\n", + "Sub-templates available to write_story: ['vacation', 'count_char', 'primes', 'weather', 'story_with_moral', 'haiku_no_cache', 'write_joke', 'story_funny', 'unstable_service', 'rate_joke', 'write_story', 'fetch_data', 'limerick', 'give_rating_for_movie', 'cities']\n", "=== Story with moral ===\n", - "INFO {'args': (), 'kwargs': {'messages': [{'type': 'message', 'content': [{'type': 'text', 'text': \"Write a story about a curious cat in the style: moral.\\n Available styles: 'moral' for a story with a lesson, 'funny' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\"}], 'role': 'user'}], 'response_format': None, 'tools': [{'type': 'function', 'function': {'name': 'limerick', 'description': 'Write a limerick on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'haiku_no_cache', 'description': 'Write a haiku on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'primes', 'description': 'Give a prime number with {first_digit} as the first digit. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'first_digit': {'title': 'First Digit', 'type': 'integer'}}, 'required': ['first_digit'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'count_char', 'description': \"Write a function which takes a string and counts the occurrances of '{char}'. Do not use any tools.\", 'parameters': {'additionalProperties': False, 'properties': {'char': {'title': 'Char', 'type': 'string'}}, 'required': ['char'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'cities', 'description': '', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}, {'type': 'function', 'function': {'name': 'weather', 'description': '', 'parameters': {'additionalProperties': False, 'properties': {'city': {'title': 'City', 'type': 'string'}}, 'required': ['city'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'vacation', 'description': 'Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}, {'type': 'function', 'function': {'name': 'write_joke', 'description': 'Write a knock-knock joke on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'rate_joke', 'description': 'Decide if {joke} is funny or not. Do not use any tools.', 'parameters': {'$defs': {'KnockKnockJoke': {'properties': {'whos_there': {'title': 'Whos There', 'type': 'string'}, 'punchline': {'title': 'Punchline', 'type': 'string'}}, 'required': ['whos_there', 'punchline'], 'title': 'KnockKnockJoke', 'type': 'object', 'additionalProperties': False}}, 'additionalProperties': False, 'properties': {'joke': {'$ref': '#/$defs/KnockKnockJoke'}}, 'required': ['joke'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'story_with_moral', 'description': 'Write a short story about {topic} and end with a moral lesson. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'topic': {'title': 'Topic', 'type': 'string'}}, 'required': ['topic'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'story_funny', 'description': 'Write a funny, humorous story about {topic}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'topic': {'title': 'Topic', 'type': 'string'}}, 'required': ['topic'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'write_story', 'description': \"Write a story about {topic} in the style: {style}.\\n Available styles: 'moral' for a story with a lesson, 'funny' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\", 'parameters': {'additionalProperties': False, 'properties': {'topic': {'title': 'Topic', 'type': 'string'}, 'style': {'title': 'Style', 'type': 'string'}}, 'required': ['topic', 'style'], 'title': 'Params', 'type': 'object'}, 'strict': True}}]}, 'response': ModelResponse(id='chatcmpl-CnAOJJzdrkeZmPdyh1h0cMSFZlFJE', created=1765834051, model='gpt-4o-2024-08-06', object='chat.completion', system_fingerprint='fp_83554c687e', choices=[Choices(finish_reason='tool_calls', index=0, message=Message(content=None, role='assistant', tool_calls=[ChatCompletionMessageToolCall(function=Function(arguments='{\"topic\":\"a curious cat\"}', name='story_with_moral'), id='call_nJMDv3AxDTvyxxoDKXAzH4aB', type='function')], function_call=None, provider_specific_fields={'refusal': None}, annotations=[]), provider_specific_fields={})], usage=Usage(completion_tokens=18, prompt_tokens=560, total_tokens=578, completion_tokens_details=CompletionTokensDetailsWrapper(accepted_prediction_tokens=0, audio_tokens=0, reasoning_tokens=0, rejected_prediction_tokens=0, text_tokens=None, image_tokens=None), prompt_tokens_details=PromptTokensDetailsWrapper(audio_tokens=0, cached_tokens=0, text_tokens=None, image_tokens=None)), service_tier='default')}\n", - "INFO {'args': (), 'kwargs': {'messages': [{'type': 'message', 'content': [{'type': 'text', 'text': 'Write a short story about a curious cat and end with a moral lesson. Do not use any tools.'}], 'role': 'user'}], 'response_format': None, 'tools': [{'type': 'function', 'function': {'name': 'limerick', 'description': 'Write a limerick on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'haiku_no_cache', 'description': 'Write a haiku on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'primes', 'description': 'Give a prime number with {first_digit} as the first digit. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'first_digit': {'title': 'First Digit', 'type': 'integer'}}, 'required': ['first_digit'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'count_char', 'description': \"Write a function which takes a string and counts the occurrances of '{char}'. Do not use any tools.\", 'parameters': {'additionalProperties': False, 'properties': {'char': {'title': 'Char', 'type': 'string'}}, 'required': ['char'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'cities', 'description': '', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}, {'type': 'function', 'function': {'name': 'weather', 'description': '', 'parameters': {'additionalProperties': False, 'properties': {'city': {'title': 'City', 'type': 'string'}}, 'required': ['city'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'vacation', 'description': 'Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}, {'type': 'function', 'function': {'name': 'write_joke', 'description': 'Write a knock-knock joke on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'rate_joke', 'description': 'Decide if {joke} is funny or not. Do not use any tools.', 'parameters': {'$defs': {'KnockKnockJoke': {'properties': {'whos_there': {'title': 'Whos There', 'type': 'string'}, 'punchline': {'title': 'Punchline', 'type': 'string'}}, 'required': ['whos_there', 'punchline'], 'title': 'KnockKnockJoke', 'type': 'object', 'additionalProperties': False}}, 'additionalProperties': False, 'properties': {'joke': {'$ref': '#/$defs/KnockKnockJoke'}}, 'required': ['joke'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'story_with_moral', 'description': 'Write a short story about {topic} and end with a moral lesson. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'topic': {'title': 'Topic', 'type': 'string'}}, 'required': ['topic'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'story_funny', 'description': 'Write a funny, humorous story about {topic}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'topic': {'title': 'Topic', 'type': 'string'}}, 'required': ['topic'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'write_story', 'description': \"Write a story about {topic} in the style: {style}.\\n Available styles: 'moral' for a story with a lesson, 'funny' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\", 'parameters': {'additionalProperties': False, 'properties': {'topic': {'title': 'Topic', 'type': 'string'}, 'style': {'title': 'Style', 'type': 'string'}}, 'required': ['topic', 'style'], 'title': 'Params', 'type': 'object'}, 'strict': True}}]}, 'response': ModelResponse(id='chatcmpl-CnAOJnGjxZtPLcZ9ekNXtCUneEitd', created=1765834051, model='gpt-4o-2024-08-06', object='chat.completion', system_fingerprint='fp_83554c687e', choices=[Choices(finish_reason='stop', index=0, message=Message(content='In a quaint village nestled between rolling hills and whispering streams, there lived a cat named Whiskers. Whiskers was not an ordinary cat; his sleek, shiny coat gleamed under the sun, and his eyes sparkled with a clever curiosity that set him apart. His inquisitive nature drove him to explore every nook and cranny of the village, always seeking new adventures.\\n\\nOne day, while wandering near the woods, Whiskers stumbled upon a mysterious path he had never seen before. The path was lined with wildflowers and arched by towering trees that seemed to stretch on forever. Intrigued, Whiskers decided to follow it to see where it would lead.\\n\\nThe further he ventured, the stranger the path became. He encountered bubbling brooks, frogs that croaked like they were sharing secrets, and birds that sang unfamiliar melodies. Despite the eerie feeling curling around his paws, Whiskers pressed on.\\n\\nAfter what felt like hours, he arrived at a clearing with a peculiar sight: a large cage in the center with a small bird trapped inside. The bird chirped desperately, its tiny eyes pleading for help. Whiskers, though naturally inclined to chase birds, felt a tug of compassion watching the helpless creature.\\n\\nUsing his sharp claws, Whiskers carefully picked at the lock until it clicked open. The bird flapped its wings gratefully and soared into the sky, singing a joyful tune. Whiskers watched it disappear among the clouds, a warm feeling blossoming in his chest.\\n\\nContent with his good deed, Whiskers made his way back home, sticking to the original path. As he lay in his favorite sun-dappled spot on the porch, he reflected on his adventure.\\n\\nThe moral of the story is: Curiosity is a beautiful thing that leads to new discoveries, but it must be guided by kindness and the courage to act, for it is in helping others that we find our true purpose.', role='assistant', tool_calls=None, function_call=None, provider_specific_fields={'refusal': None}, annotations=[]), provider_specific_fields={})], usage=Usage(completion_tokens=395, prompt_tokens=528, total_tokens=923, completion_tokens_details=CompletionTokensDetailsWrapper(accepted_prediction_tokens=0, audio_tokens=0, reasoning_tokens=0, rejected_prediction_tokens=0, text_tokens=None, image_tokens=None), prompt_tokens_details=PromptTokensDetailsWrapper(audio_tokens=0, cached_tokens=0, text_tokens=None, image_tokens=None)), service_tier='default')}\n", - "INFO {'tool': 'story_with_moral', 'args': (), 'kwargs': {'topic': 'a curious cat'}}\n", - "INFO {'args': (), 'kwargs': {'messages': [{'type': 'message', 'content': [{'type': 'text', 'text': \"Write a story about a curious cat in the style: moral.\\n Available styles: 'moral' for a story with a lesson, 'funny' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\"}], 'role': 'user'}, {'content': None, 'role': 'assistant', 'tool_calls': [{'function': {'arguments': '{\"topic\":\"a curious cat\"}', 'name': 'story_with_moral'}, 'id': 'call_nJMDv3AxDTvyxxoDKXAzH4aB', 'type': 'function'}], 'function_call': None, 'provider_specific_fields': {'refusal': None}, 'annotations': []}, {'role': 'tool', 'tool_call_id': 'call_nJMDv3AxDTvyxxoDKXAzH4aB', 'name': 'story_with_moral', 'content': [{'type': 'text', 'text': 'In a quaint village nestled between rolling hills and whispering streams, there lived a cat named Whiskers. Whiskers was not an ordinary cat; his sleek, shiny coat gleamed under the sun, and his eyes sparkled with a clever curiosity that set him apart. His inquisitive nature drove him to explore every nook and cranny of the village, always seeking new adventures.\\n\\nOne day, while wandering near the woods, Whiskers stumbled upon a mysterious path he had never seen before. The path was lined with wildflowers and arched by towering trees that seemed to stretch on forever. Intrigued, Whiskers decided to follow it to see where it would lead.\\n\\nThe further he ventured, the stranger the path became. He encountered bubbling brooks, frogs that croaked like they were sharing secrets, and birds that sang unfamiliar melodies. Despite the eerie feeling curling around his paws, Whiskers pressed on.\\n\\nAfter what felt like hours, he arrived at a clearing with a peculiar sight: a large cage in the center with a small bird trapped inside. The bird chirped desperately, its tiny eyes pleading for help. Whiskers, though naturally inclined to chase birds, felt a tug of compassion watching the helpless creature.\\n\\nUsing his sharp claws, Whiskers carefully picked at the lock until it clicked open. The bird flapped its wings gratefully and soared into the sky, singing a joyful tune. Whiskers watched it disappear among the clouds, a warm feeling blossoming in his chest.\\n\\nContent with his good deed, Whiskers made his way back home, sticking to the original path. As he lay in his favorite sun-dappled spot on the porch, he reflected on his adventure.\\n\\nThe moral of the story is: Curiosity is a beautiful thing that leads to new discoveries, but it must be guided by kindness and the courage to act, for it is in helping others that we find our true purpose.'}]}], 'response_format': None, 'tools': [{'type': 'function', 'function': {'name': 'limerick', 'description': 'Write a limerick on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'haiku_no_cache', 'description': 'Write a haiku on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'primes', 'description': 'Give a prime number with {first_digit} as the first digit. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'first_digit': {'title': 'First Digit', 'type': 'integer'}}, 'required': ['first_digit'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'count_char', 'description': \"Write a function which takes a string and counts the occurrances of '{char}'. Do not use any tools.\", 'parameters': {'additionalProperties': False, 'properties': {'char': {'title': 'Char', 'type': 'string'}}, 'required': ['char'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'cities', 'description': '', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}, {'type': 'function', 'function': {'name': 'weather', 'description': '', 'parameters': {'additionalProperties': False, 'properties': {'city': {'title': 'City', 'type': 'string'}}, 'required': ['city'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'vacation', 'description': 'Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}, {'type': 'function', 'function': {'name': 'write_joke', 'description': 'Write a knock-knock joke on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'rate_joke', 'description': 'Decide if {joke} is funny or not. Do not use any tools.', 'parameters': {'$defs': {'KnockKnockJoke': {'properties': {'whos_there': {'title': 'Whos There', 'type': 'string'}, 'punchline': {'title': 'Punchline', 'type': 'string'}}, 'required': ['whos_there', 'punchline'], 'title': 'KnockKnockJoke', 'type': 'object', 'additionalProperties': False}}, 'additionalProperties': False, 'properties': {'joke': {'$ref': '#/$defs/KnockKnockJoke'}}, 'required': ['joke'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'story_with_moral', 'description': 'Write a short story about {topic} and end with a moral lesson. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'topic': {'title': 'Topic', 'type': 'string'}}, 'required': ['topic'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'story_funny', 'description': 'Write a funny, humorous story about {topic}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'topic': {'title': 'Topic', 'type': 'string'}}, 'required': ['topic'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'write_story', 'description': \"Write a story about {topic} in the style: {style}.\\n Available styles: 'moral' for a story with a lesson, 'funny' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\", 'parameters': {'additionalProperties': False, 'properties': {'topic': {'title': 'Topic', 'type': 'string'}, 'style': {'title': 'Style', 'type': 'string'}}, 'required': ['topic', 'style'], 'title': 'Params', 'type': 'object'}, 'strict': True}}]}, 'response': ModelResponse(id='chatcmpl-CnAOQIJsyohvBlsBGz9cztpetifUP', created=1765834058, model='gpt-4o-2024-08-06', object='chat.completion', system_fingerprint='fp_83554c687e', choices=[Choices(finish_reason='stop', index=0, message=Message(content=\"Here's a story about a curious cat named Whiskers who embarks on an adventure that teaches him an important lesson about kindness and courage. Whiskers' curiosity leads him to explore a mysterious path where he eventually discovers a trapped bird. Instead of succumbing to his natural instincts, Whiskers chooses to help the bird, freeing it from its cage. Through this act of compassion, Whiskers learns that while curiosity can lead to new discoveries, it is the courage to act with kindness that truly defines us.\", role='assistant', tool_calls=None, function_call=None, provider_specific_fields={'refusal': None}, annotations=[]), provider_specific_fields={})], usage=Usage(completion_tokens=105, prompt_tokens=982, total_tokens=1087, completion_tokens_details=CompletionTokensDetailsWrapper(accepted_prediction_tokens=0, audio_tokens=0, reasoning_tokens=0, rejected_prediction_tokens=0, text_tokens=None, image_tokens=None), prompt_tokens_details=PromptTokensDetailsWrapper(audio_tokens=0, cached_tokens=0, text_tokens=None, image_tokens=None)), service_tier='default')}\n", - "Here's a story about a curious cat named Whiskers who embarks on an adventure that teaches him an important lesson about kindness and courage. Whiskers' curiosity leads him to explore a mysterious path where he eventually discovers a trapped bird. Instead of succumbing to his natural instincts, Whiskers chooses to help the bird, freeing it from its cage. Through this act of compassion, Whiskers learns that while curiosity can lead to new discoveries, it is the courage to act with kindness that truly defines us.\n", + "> Write a story about a curious cat in the style: moral.\n", + " Available styles: 'moral' for a story with a lesson, 'funny' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\n", + "None\n", + "> Write a story about curious cat in the style: moral.\n", + " Available styles: 'moral' for a story with a lesson, 'funny' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\n", + "None\n", + "> Write a short story about curious cat and end with a moral lesson. Do not use any tools.\n", + "Once upon a time, in a quaint little village, there lived a curious cat named Whiskers. Whiskers had soft, fluffy fur and eyes that sparkled with mischief. Unlike the other cats in the village, Whiskers had a penchant for exploring places he wasn't supposed to be. He loved sneaking into the baker's shop to inhale the sweet aroma of freshly baked bread and creeping through the forest to watch the owls hoot under the moonlight.\n", + "\n", + "One sunny afternoon, Whiskers noticed a peculiar, shimmering light coming from the top of the old, abandoned tower at the end of the village. Unable to contain his curiosity, he decided to investigate. He wove through the cobblestone streets, avoided the prying eyes of Mrs. Hilda's dog, and climbed the crumbling stairs of the tower.\n", + "\n", + "Upon reaching the top, he discovered a stunning, glittering crystal lying in the center of the room. Whiskers was mesmerized. However, just as he reached out his paw to touch it, he heard a low growl. Emerging from the shadows was a large, scruffy dog named Bruno, the tower's unexpected guardian. Whiskers froze, his tiny heart pounding in his chest.\n", + "\n", + "Bruno barked, \"What are you doing here, meddling cat?\"\n", + "\n", + "Whiskers, in his sweetest voice, replied, \"I was just curious about the light. I didn't mean any harm.\"\n", + "\n", + "Bruno's stern expression softened a bit. \"Curiosity is fine, but it must be tempered with caution,\" he said. \"Had you touched the crystal, you would've been trapped here, as it binds whoever possesses it to the tower.\"\n", + "\n", + "Realizing the gravity of his actions, Whiskers thanked Bruno for the warning and slowly backed away, his curiosity sated but also a lesson learned.\n", + "\n", + "From that day on, Whiskers continued to explore, but with a newfound sense of responsibility and caution. He became known not only for his curiosity but also for his wisdom.\n", + "\n", + "Moral of the story: Curiosity can lead to wonderful discoveries, but it is essential to balance it with caution and awareness of potential dangers.\n", + "> Write a story about curious cat in the style: moral.\n", + " Available styles: 'moral' for a story with a lesson, 'funny' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\n", + "Here's a story about a curious cat named Whiskers. Once upon a time, in a quaint little village, there lived a curious cat named Whiskers. Whiskers had soft, fluffy fur and eyes that sparkled with mischief. Unlike the other cats, Whiskers loved exploring places he wasn't supposed to be. One day, Whiskers noticed a shimmering light at the top of an old tower. Driven by curiosity, he climbed up and found a mesmerizing crystal.\n", + "\n", + "However, a large scruffy dog named Bruno, the tower's guardian, warned him that touching the crystal would bind him to the tower forever. Whiskers realized his curiosity almost led to trouble. From then on, he explored with caution and became wise.\n", + "\n", + "**Moral of the story:** Curiosity can lead to wonderful discoveries, but it must be tempered with caution and awareness of potential dangers.\n", + "> Write a story about a curious cat in the style: moral.\n", + " Available styles: 'moral' for a story with a lesson, 'funny' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\n", + "Here's a story about a curious cat named Whiskers. Once upon a time, in a quaint little village, there lived a curious cat named Whiskers. Whiskers had soft, fluffy fur and eyes that sparkled with mischief. Unlike the other cats, Whiskers loved exploring places he wasn't supposed to be. One day, Whiskers noticed a shimmering light at the top of an old tower. Driven by curiosity, he climbed up and found a mesmerizing crystal.\n", + "\n", + "However, a large scruffy dog named Bruno, the tower's guardian, warned him that touching the crystal would bind him to the tower forever. Whiskers realized his curiosity almost led to trouble. From then on, he explored with caution and became wise.\n", + "\n", + "**Moral of the story:** Curiosity can lead to wonderful discoveries, but it must be tempered with caution and awareness of potential dangers.\n", + "Here's a story about a curious cat named Whiskers. Once upon a time, in a quaint little village, there lived a curious cat named Whiskers. Whiskers had soft, fluffy fur and eyes that sparkled with mischief. Unlike the other cats, Whiskers loved exploring places he wasn't supposed to be. One day, Whiskers noticed a shimmering light at the top of an old tower. Driven by curiosity, he climbed up and found a mesmerizing crystal.\n", + "\n", + "However, a large scruffy dog named Bruno, the tower's guardian, warned him that touching the crystal would bind him to the tower forever. Whiskers realized his curiosity almost led to trouble. From then on, he explored with caution and became wise.\n", + "\n", + "**Moral of the story:** Curiosity can lead to wonderful discoveries, but it must be tempered with caution and awareness of potential dangers.\n", "\n", "=== Funny story ===\n", - "INFO {'args': (), 'kwargs': {'messages': [{'type': 'message', 'content': [{'type': 'text', 'text': \"Write a story about a curious cat in the style: funny.\\n Available styles: 'moral' for a story with a lesson, 'funny' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\"}], 'role': 'user'}], 'response_format': None, 'tools': [{'type': 'function', 'function': {'name': 'limerick', 'description': 'Write a limerick on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'haiku_no_cache', 'description': 'Write a haiku on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'primes', 'description': 'Give a prime number with {first_digit} as the first digit. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'first_digit': {'title': 'First Digit', 'type': 'integer'}}, 'required': ['first_digit'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'count_char', 'description': \"Write a function which takes a string and counts the occurrances of '{char}'. Do not use any tools.\", 'parameters': {'additionalProperties': False, 'properties': {'char': {'title': 'Char', 'type': 'string'}}, 'required': ['char'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'cities', 'description': '', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}, {'type': 'function', 'function': {'name': 'weather', 'description': '', 'parameters': {'additionalProperties': False, 'properties': {'city': {'title': 'City', 'type': 'string'}}, 'required': ['city'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'vacation', 'description': 'Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}, {'type': 'function', 'function': {'name': 'write_joke', 'description': 'Write a knock-knock joke on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'rate_joke', 'description': 'Decide if {joke} is funny or not. Do not use any tools.', 'parameters': {'$defs': {'KnockKnockJoke': {'properties': {'whos_there': {'title': 'Whos There', 'type': 'string'}, 'punchline': {'title': 'Punchline', 'type': 'string'}}, 'required': ['whos_there', 'punchline'], 'title': 'KnockKnockJoke', 'type': 'object', 'additionalProperties': False}}, 'additionalProperties': False, 'properties': {'joke': {'$ref': '#/$defs/KnockKnockJoke'}}, 'required': ['joke'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'story_with_moral', 'description': 'Write a short story about {topic} and end with a moral lesson. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'topic': {'title': 'Topic', 'type': 'string'}}, 'required': ['topic'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'story_funny', 'description': 'Write a funny, humorous story about {topic}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'topic': {'title': 'Topic', 'type': 'string'}}, 'required': ['topic'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'write_story', 'description': \"Write a story about {topic} in the style: {style}.\\n Available styles: 'moral' for a story with a lesson, 'funny' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\", 'parameters': {'additionalProperties': False, 'properties': {'topic': {'title': 'Topic', 'type': 'string'}, 'style': {'title': 'Style', 'type': 'string'}}, 'required': ['topic', 'style'], 'title': 'Params', 'type': 'object'}, 'strict': True}}]}, 'response': ModelResponse(id='chatcmpl-CnAOTXQYpAjE6AhrSHrREbvtgiSzs', created=1765834061, model='gpt-4o-2024-08-06', object='chat.completion', system_fingerprint='fp_83554c687e', choices=[Choices(finish_reason='tool_calls', index=0, message=Message(content=None, role='assistant', tool_calls=[ChatCompletionMessageToolCall(function=Function(arguments='{\"topic\":\"a curious cat\"}', name='story_funny'), id='call_zMjPfWzaDFKuswF7HiHFFu4R', type='function')], function_call=None, provider_specific_fields={'refusal': None}, annotations=[]), provider_specific_fields={})], usage=Usage(completion_tokens=17, prompt_tokens=560, total_tokens=577, completion_tokens_details=CompletionTokensDetailsWrapper(accepted_prediction_tokens=0, audio_tokens=0, reasoning_tokens=0, rejected_prediction_tokens=0, text_tokens=None, image_tokens=None), prompt_tokens_details=PromptTokensDetailsWrapper(audio_tokens=0, cached_tokens=0, text_tokens=None, image_tokens=None)), service_tier='default')}\n", - "INFO {'args': (), 'kwargs': {'messages': [{'type': 'message', 'content': [{'type': 'text', 'text': 'Write a funny, humorous story about a curious cat. Do not use any tools.'}], 'role': 'user'}], 'response_format': None, 'tools': [{'type': 'function', 'function': {'name': 'limerick', 'description': 'Write a limerick on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'haiku_no_cache', 'description': 'Write a haiku on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'primes', 'description': 'Give a prime number with {first_digit} as the first digit. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'first_digit': {'title': 'First Digit', 'type': 'integer'}}, 'required': ['first_digit'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'count_char', 'description': \"Write a function which takes a string and counts the occurrances of '{char}'. Do not use any tools.\", 'parameters': {'additionalProperties': False, 'properties': {'char': {'title': 'Char', 'type': 'string'}}, 'required': ['char'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'cities', 'description': '', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}, {'type': 'function', 'function': {'name': 'weather', 'description': '', 'parameters': {'additionalProperties': False, 'properties': {'city': {'title': 'City', 'type': 'string'}}, 'required': ['city'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'vacation', 'description': 'Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}, {'type': 'function', 'function': {'name': 'write_joke', 'description': 'Write a knock-knock joke on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'rate_joke', 'description': 'Decide if {joke} is funny or not. Do not use any tools.', 'parameters': {'$defs': {'KnockKnockJoke': {'properties': {'whos_there': {'title': 'Whos There', 'type': 'string'}, 'punchline': {'title': 'Punchline', 'type': 'string'}}, 'required': ['whos_there', 'punchline'], 'title': 'KnockKnockJoke', 'type': 'object', 'additionalProperties': False}}, 'additionalProperties': False, 'properties': {'joke': {'$ref': '#/$defs/KnockKnockJoke'}}, 'required': ['joke'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'story_with_moral', 'description': 'Write a short story about {topic} and end with a moral lesson. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'topic': {'title': 'Topic', 'type': 'string'}}, 'required': ['topic'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'story_funny', 'description': 'Write a funny, humorous story about {topic}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'topic': {'title': 'Topic', 'type': 'string'}}, 'required': ['topic'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'write_story', 'description': \"Write a story about {topic} in the style: {style}.\\n Available styles: 'moral' for a story with a lesson, 'funny' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\", 'parameters': {'additionalProperties': False, 'properties': {'topic': {'title': 'Topic', 'type': 'string'}, 'style': {'title': 'Style', 'type': 'string'}}, 'required': ['topic', 'style'], 'title': 'Params', 'type': 'object'}, 'strict': True}}]}, 'response': ModelResponse(id='chatcmpl-CnAOUficJNRttQKM2b9t8qJDzfXnQ', created=1765834062, model='gpt-4o-2024-08-06', object='chat.completion', system_fingerprint='fp_83554c687e', choices=[Choices(finish_reason='stop', index=0, message=Message(content=\"Once upon a time in the quaint little town of Whiskerfield, there lived a particularly curious cat named Whiskers. Now, Whiskers wasn't your average feline; he had a knack for getting himself into the most bizarre situations, much to the amusement of the townsfolk.\\n\\nOne sunny morning, as Whiskers ventured out of his cozy basket, he noticed a peculiar shiny object gleaming in the garden. It was unlike anything he had ever seen before – a mix between a large spoon and a tiny satellite dish. His curiosity piqued, Whiskers approached with his usual stealth, attempting to decipher this mysterious contraption.\\n\\nUnbeknownst to Whiskers, the shiny object was none other than the town's new state-of-the-art bird feeder, designed with reflective surfaces to keep the squirrels away. But to Whiskers, it was the most intriguing puzzle he'd ever encountered. With his tail twitching like a metronome, Whiskers pounced at the feeder, only to collide with its slippery surface and land unceremoniously on his back, paws in the air.\\n\\nUndeterred by his clumsy introduction, Whiskers began his investigation with fervor. He circled the feeder, pawing at it and meowing loudly, as if expecting a response. The neighborhood birds watched from a safe distance, chirping in a chorus that resembled laughter. Whiskers, paying no mind to his feathered audience, was determined to unlock the secrets of this shiny beacon.\\n\\nAs noon approached, Whiskers, now slightly exasperated and hungry, decided to enlist the help of his best friend, Rover the golden retriever. Rover, although quite good-natured, wasn't exactly the brains of their operation, but he was always up for an adventure. With wagging tails and determined purrs, the duo devised a plan. Rover would use his weight to tip the feeder, while Whiskers would keep an eye out for any unexpected critters.\\n\\nThe plan was in motion. Rover, in his typical bounding style, lunged at the feeder, causing it to wobble precariously. Just as it began to tip, a sudden gust of wind swung the contraption in a whirlwind of seeds and reflections. Whiskers and Rover, caught in the midst of this flying feast, found themselves covered in birdseed, with Whiskers' fur boasting a collection of tiny sunflower hats.\\n\\nAs they sat there, bewildered and giggling in their own peculiar way, the townsfolk couldn't help but chuckle at the antics of Whiskers and Rover. Even the birds stopped their fluttering to admire the spectacle. From that day on, the bird feeder was not just a source of food for the birds, but also a stage for Whiskerfield's most unexpected entertainment duo.\\n\\nAnd so, Whiskers the curious cat learned an important lesson: sometimes, curiosity might not uncover the mysteries you expect, but it certainly creates the most memorable adventures. And as for Rover, well, he just loved being part of the fun, birdseed hats and all.\", role='assistant', tool_calls=None, function_call=None, provider_specific_fields={'refusal': None}, annotations=[]), provider_specific_fields={})], usage=Usage(completion_tokens=632, prompt_tokens=524, total_tokens=1156, completion_tokens_details=CompletionTokensDetailsWrapper(accepted_prediction_tokens=0, audio_tokens=0, reasoning_tokens=0, rejected_prediction_tokens=0, text_tokens=None, image_tokens=None), prompt_tokens_details=PromptTokensDetailsWrapper(audio_tokens=0, cached_tokens=0, text_tokens=None, image_tokens=None)), service_tier='default')}\n", - "INFO {'tool': 'story_funny', 'args': (), 'kwargs': {'topic': 'a curious cat'}}\n", - "INFO {'args': (), 'kwargs': {'messages': [{'type': 'message', 'content': [{'type': 'text', 'text': \"Write a story about a curious cat in the style: funny.\\n Available styles: 'moral' for a story with a lesson, 'funny' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\"}], 'role': 'user'}, {'content': None, 'role': 'assistant', 'tool_calls': [{'function': {'arguments': '{\"topic\":\"a curious cat\"}', 'name': 'story_funny'}, 'id': 'call_zMjPfWzaDFKuswF7HiHFFu4R', 'type': 'function'}], 'function_call': None, 'provider_specific_fields': {'refusal': None}, 'annotations': []}, {'role': 'tool', 'tool_call_id': 'call_zMjPfWzaDFKuswF7HiHFFu4R', 'name': 'story_funny', 'content': [{'type': 'text', 'text': \"Once upon a time in the quaint little town of Whiskerfield, there lived a particularly curious cat named Whiskers. Now, Whiskers wasn't your average feline; he had a knack for getting himself into the most bizarre situations, much to the amusement of the townsfolk.\\n\\nOne sunny morning, as Whiskers ventured out of his cozy basket, he noticed a peculiar shiny object gleaming in the garden. It was unlike anything he had ever seen before – a mix between a large spoon and a tiny satellite dish. His curiosity piqued, Whiskers approached with his usual stealth, attempting to decipher this mysterious contraption.\\n\\nUnbeknownst to Whiskers, the shiny object was none other than the town's new state-of-the-art bird feeder, designed with reflective surfaces to keep the squirrels away. But to Whiskers, it was the most intriguing puzzle he'd ever encountered. With his tail twitching like a metronome, Whiskers pounced at the feeder, only to collide with its slippery surface and land unceremoniously on his back, paws in the air.\\n\\nUndeterred by his clumsy introduction, Whiskers began his investigation with fervor. He circled the feeder, pawing at it and meowing loudly, as if expecting a response. The neighborhood birds watched from a safe distance, chirping in a chorus that resembled laughter. Whiskers, paying no mind to his feathered audience, was determined to unlock the secrets of this shiny beacon.\\n\\nAs noon approached, Whiskers, now slightly exasperated and hungry, decided to enlist the help of his best friend, Rover the golden retriever. Rover, although quite good-natured, wasn't exactly the brains of their operation, but he was always up for an adventure. With wagging tails and determined purrs, the duo devised a plan. Rover would use his weight to tip the feeder, while Whiskers would keep an eye out for any unexpected critters.\\n\\nThe plan was in motion. Rover, in his typical bounding style, lunged at the feeder, causing it to wobble precariously. Just as it began to tip, a sudden gust of wind swung the contraption in a whirlwind of seeds and reflections. Whiskers and Rover, caught in the midst of this flying feast, found themselves covered in birdseed, with Whiskers' fur boasting a collection of tiny sunflower hats.\\n\\nAs they sat there, bewildered and giggling in their own peculiar way, the townsfolk couldn't help but chuckle at the antics of Whiskers and Rover. Even the birds stopped their fluttering to admire the spectacle. From that day on, the bird feeder was not just a source of food for the birds, but also a stage for Whiskerfield's most unexpected entertainment duo.\\n\\nAnd so, Whiskers the curious cat learned an important lesson: sometimes, curiosity might not uncover the mysteries you expect, but it certainly creates the most memorable adventures. And as for Rover, well, he just loved being part of the fun, birdseed hats and all.\"}]}], 'response_format': None, 'tools': [{'type': 'function', 'function': {'name': 'limerick', 'description': 'Write a limerick on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'haiku_no_cache', 'description': 'Write a haiku on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'primes', 'description': 'Give a prime number with {first_digit} as the first digit. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'first_digit': {'title': 'First Digit', 'type': 'integer'}}, 'required': ['first_digit'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'count_char', 'description': \"Write a function which takes a string and counts the occurrances of '{char}'. Do not use any tools.\", 'parameters': {'additionalProperties': False, 'properties': {'char': {'title': 'Char', 'type': 'string'}}, 'required': ['char'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'cities', 'description': '', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}, {'type': 'function', 'function': {'name': 'weather', 'description': '', 'parameters': {'additionalProperties': False, 'properties': {'city': {'title': 'City', 'type': 'string'}}, 'required': ['city'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'vacation', 'description': 'Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}, {'type': 'function', 'function': {'name': 'write_joke', 'description': 'Write a knock-knock joke on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'rate_joke', 'description': 'Decide if {joke} is funny or not. Do not use any tools.', 'parameters': {'$defs': {'KnockKnockJoke': {'properties': {'whos_there': {'title': 'Whos There', 'type': 'string'}, 'punchline': {'title': 'Punchline', 'type': 'string'}}, 'required': ['whos_there', 'punchline'], 'title': 'KnockKnockJoke', 'type': 'object', 'additionalProperties': False}}, 'additionalProperties': False, 'properties': {'joke': {'$ref': '#/$defs/KnockKnockJoke'}}, 'required': ['joke'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'story_with_moral', 'description': 'Write a short story about {topic} and end with a moral lesson. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'topic': {'title': 'Topic', 'type': 'string'}}, 'required': ['topic'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'story_funny', 'description': 'Write a funny, humorous story about {topic}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'topic': {'title': 'Topic', 'type': 'string'}}, 'required': ['topic'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'write_story', 'description': \"Write a story about {topic} in the style: {style}.\\n Available styles: 'moral' for a story with a lesson, 'funny' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\", 'parameters': {'additionalProperties': False, 'properties': {'topic': {'title': 'Topic', 'type': 'string'}, 'style': {'title': 'Style', 'type': 'string'}}, 'required': ['topic', 'style'], 'title': 'Params', 'type': 'object'}, 'strict': True}}]}, 'response': ModelResponse(id='chatcmpl-CnAOfnzkwylzEgoQjoqCoqso1JcbO', created=1765834073, model='gpt-4o-2024-08-06', object='chat.completion', system_fingerprint='fp_83554c687e', choices=[Choices(finish_reason='stop', index=0, message=Message(content=\"Here's a funny story about a curious cat named Whiskers. In the quaint town of Whiskerfield, Whiskers is known for getting into bizarre situations. One day, he discovers a shiny bird feeder and mistakes it for a mysterious contraption. Despite several comedic mishaps, including enlisting the help of Rover the golden retriever, Whiskers ends up covered in birdseed, providing entertainment for the entire town. Through his antics, Whiskers realizes that while curiosity might not solve mysteries, it sure makes for adventurous tales.\", role='assistant', tool_calls=None, function_call=None, provider_specific_fields={'refusal': None}, annotations=[]), provider_specific_fields={})], usage=Usage(completion_tokens=111, prompt_tokens=1217, total_tokens=1328, completion_tokens_details=CompletionTokensDetailsWrapper(accepted_prediction_tokens=0, audio_tokens=0, reasoning_tokens=0, rejected_prediction_tokens=0, text_tokens=None, image_tokens=None), prompt_tokens_details=PromptTokensDetailsWrapper(audio_tokens=0, cached_tokens=0, text_tokens=None, image_tokens=None)), service_tier='default')}\n", - "Here's a funny story about a curious cat named Whiskers. In the quaint town of Whiskerfield, Whiskers is known for getting into bizarre situations. One day, he discovers a shiny bird feeder and mistakes it for a mysterious contraption. Despite several comedic mishaps, including enlisting the help of Rover the golden retriever, Whiskers ends up covered in birdseed, providing entertainment for the entire town. Through his antics, Whiskers realizes that while curiosity might not solve mysteries, it sure makes for adventurous tales.\n" + "> Write a story about a curious cat in the style: funny.\n", + " Available styles: 'moral' for a story with a lesson, 'funny' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\n", + "None\n", + "> Write a funny, humorous story about a curious cat. Do not use any tools.\n", + "Once upon a time in a quaint little village, there lived a curious cat named Whiskers. Whiskers had a reputation around the village for his relentless curiosity, constantly sticking his nose into everything, much to the amusement and occasional frustration of the townspeople.\n", + "\n", + "One sunny afternoon, Whiskers was prowling around the village square when he spotted the baker's delivery truck. As the driver was busy unloading trays of freshly baked bread, Whiskers couldn't resist the delicious aroma wafting through the air. His whiskers twitching, he quietly tiptoed closer to the truck.\n", + "\n", + "As soon as the baker turned his back, Whiskers leapt onto the delivery truck, his little paws barely making a sound. He sniffed around, eyes wide with amazement, as he found himself surrounded by an entire universe of bread. There were baguettes, loaves of rye, fluffy rolls, and croissants that seemed to glow under the afternoon sun.\n", + "\n", + "Suddenly, the truck door swung shut with a thud, and before Whiskers knew what was happening, the driver hopped back in, and the truck was on its way to the next delivery.\n", + "\n", + "Inside the truck, Whiskers was having the adventure of his life. With every jolt and turn of the vehicle, he found himself buried in a pile of delicious pastries. It was like being inside a warm, yeasty snow globe. He nibbled on a sourdough corner here, pawed at a ciabatta there, completely oblivious to his unplanned journey.\n", + "\n", + "Meanwhile, back in the village, the baker was puzzled. Where was that curious cat? He often told stories to his customers about Whiskers’ antics. But today, the star of his stories was missing in action.\n", + "\n", + "Meanwhile, the truck had reached a bustling city market. When the doors finally opened, Whiskers was greeted by a bustling scene of city dwellers and market stalls. Confused at first, he quickly recomposed himself, as if this had been his intended destination all along.\n", + "\n", + "Prancing off the truck with a regal air, bits of bread still clinging to his fur, Whiskers strutted through the market as if he owned the place. The city folk, charmed by this bread-dusted feline, began snapping pictures. One amazed observer even commented, \"Look at this celebrity cat! He's got a career in showbiz!\"\n", + "\n", + "By the end of the day, Whiskers was returned home by a kind market lady who recognized him from the baker's stories. As she gently placed him down at the village square, he gave a nonchalant flick of his tail and walked off, leaving behind a trail of breadcrumbs and a story that the villagers would tell for years to come.\n", + "\n", + "And so, dear reader, the moral of this tale is simple: Curiosity may take you far and wide, but please remember where home is—because you'll definitely want to come back after an adventure-snack or two!\n", + "> Write a story about a curious cat in the style: funny.\n", + " Available styles: 'moral' for a story with a lesson, 'funny' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\n", + "Once upon a time in a quaint little village, there lived a curious cat named Whiskers. Whiskers had a reputation around the village for his relentless curiosity, constantly sticking his nose into everything, much to the amusement and occasional frustration of the townspeople.\n", + "\n", + "One sunny afternoon, Whiskers was prowling around the village square when he spotted the baker's delivery truck. As the driver was busy unloading trays of freshly baked bread, Whiskers couldn't resist the delicious aroma wafting through the air. His whiskers twitching, he quietly tiptoed closer to the truck.\n", + "\n", + "As soon as the baker turned his back, Whiskers leapt onto the delivery truck, his little paws barely making a sound. He sniffed around, eyes wide with amazement, as he found himself surrounded by an entire universe of bread. There were baguettes, loaves of rye, fluffy rolls, and croissants that seemed to glow under the afternoon sun.\n", + "\n", + "Suddenly, the truck door swung shut with a thud, and before Whiskers knew what was happening, the driver hopped back in, and the truck was on its way to the next delivery.\n", + "\n", + "Inside the truck, Whiskers was having the adventure of his life. With every jolt and turn of the vehicle, he found himself buried in a pile of delicious pastries. It was like being inside a warm, yeasty snow globe. He nibbled on a sourdough corner here, pawed at a ciabatta there, completely oblivious to his unplanned journey.\n", + "\n", + "Meanwhile, back in the village, the baker was puzzled. Where was that curious cat? He often told stories to his customers about Whiskers’ antics. But today, the star of his stories was missing in action.\n", + "\n", + "Meanwhile, the truck had reached a bustling city market. When the doors finally opened, Whiskers was greeted by a bustling scene of city dwellers and market stalls. Confused at first, he quickly recomposed himself, as if this had been his intended destination all along.\n", + "\n", + "Prancing off the truck with a regal air, bits of bread still clinging to his fur, Whiskers strutted through the market as if he owned the place. The city folk, charmed by this bread-dusted feline, began snapping pictures. One amazed observer even commented, \"Look at this celebrity cat! He's got a career in showbiz!\"\n", + "\n", + "By the end of the day, Whiskers was returned home by a kind market lady who recognized him from the baker's stories. As she gently placed him down at the village square, he gave a nonchalant flick of his tail and walked off, leaving behind a trail of breadcrumbs and a story that the villagers would tell for years to come.\n", + "\n", + "And so, dear reader, the moral of this tale is simple: Curiosity may take you far and wide, but please remember where home is—because you'll definitely want to come back after an adventure-snack or two!\n", + "Once upon a time in a quaint little village, there lived a curious cat named Whiskers. Whiskers had a reputation around the village for his relentless curiosity, constantly sticking his nose into everything, much to the amusement and occasional frustration of the townspeople.\n", + "\n", + "One sunny afternoon, Whiskers was prowling around the village square when he spotted the baker's delivery truck. As the driver was busy unloading trays of freshly baked bread, Whiskers couldn't resist the delicious aroma wafting through the air. His whiskers twitching, he quietly tiptoed closer to the truck.\n", + "\n", + "As soon as the baker turned his back, Whiskers leapt onto the delivery truck, his little paws barely making a sound. He sniffed around, eyes wide with amazement, as he found himself surrounded by an entire universe of bread. There were baguettes, loaves of rye, fluffy rolls, and croissants that seemed to glow under the afternoon sun.\n", + "\n", + "Suddenly, the truck door swung shut with a thud, and before Whiskers knew what was happening, the driver hopped back in, and the truck was on its way to the next delivery.\n", + "\n", + "Inside the truck, Whiskers was having the adventure of his life. With every jolt and turn of the vehicle, he found himself buried in a pile of delicious pastries. It was like being inside a warm, yeasty snow globe. He nibbled on a sourdough corner here, pawed at a ciabatta there, completely oblivious to his unplanned journey.\n", + "\n", + "Meanwhile, back in the village, the baker was puzzled. Where was that curious cat? He often told stories to his customers about Whiskers’ antics. But today, the star of his stories was missing in action.\n", + "\n", + "Meanwhile, the truck had reached a bustling city market. When the doors finally opened, Whiskers was greeted by a bustling scene of city dwellers and market stalls. Confused at first, he quickly recomposed himself, as if this had been his intended destination all along.\n", + "\n", + "Prancing off the truck with a regal air, bits of bread still clinging to his fur, Whiskers strutted through the market as if he owned the place. The city folk, charmed by this bread-dusted feline, began snapping pictures. One amazed observer even commented, \"Look at this celebrity cat! He's got a career in showbiz!\"\n", + "\n", + "By the end of the day, Whiskers was returned home by a kind market lady who recognized him from the baker's stories. As she gently placed him down at the village square, he gave a nonchalant flick of his tail and walked off, leaving behind a trail of breadcrumbs and a story that the villagers would tell for years to come.\n", + "\n", + "And so, dear reader, the moral of this tale is simple: Curiosity may take you far and wide, but please remember where home is—because you'll definitely want to come back after an adventure-snack or two!\n" ] } ], @@ -538,11 +643,11 @@ "\n", "\n", "# Verify sub-templates are captured in write_story's lexical context\n", - "assert story_with_moral in write_story.tools\n", - "assert story_funny in write_story.tools\n", - "print(\"Sub-templates available to write_story:\", list(write_story.tools))\n", + "assert story_with_moral in write_story.tools.values()\n", + "assert story_funny in write_story.tools.values()\n", + "print(\"Sub-templates available to write_story:\", write_story.tools.keys())\n", "\n", - "with handler(provider), handler(llm_logger):\n", + "with handler(provider), handler({completion: log_llm}):\n", " print(\"=== Story with moral ===\")\n", " print(write_story(\"a curious cat\", \"moral\"))\n", " print()\n", @@ -573,7 +678,7 @@ }, { "cell_type": "code", - "execution_count": 16, + "execution_count": 44, "id": "4334d07a", "metadata": {}, "outputs": [ @@ -581,36 +686,15 @@ "name": "stdout", "output_type": "stream", "text": [ - "INFO {'args': (), 'kwargs': {'messages': [{'type': 'message', 'content': [{'type': 'text', 'text': 'Use the unstable_service tool to fetch data.'}], 'role': 'user'}], 'response_format': None, 'tools': [{'type': 'function', 'function': {'name': 'limerick', 'description': 'Write a limerick on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'haiku_no_cache', 'description': 'Write a haiku on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'primes', 'description': 'Give a prime number with {first_digit} as the first digit. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'first_digit': {'title': 'First Digit', 'type': 'integer'}}, 'required': ['first_digit'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'count_char', 'description': \"Write a function which takes a string and counts the occurrances of '{char}'. Do not use any tools.\", 'parameters': {'additionalProperties': False, 'properties': {'char': {'title': 'Char', 'type': 'string'}}, 'required': ['char'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'cities', 'description': '', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}, {'type': 'function', 'function': {'name': 'weather', 'description': '', 'parameters': {'additionalProperties': False, 'properties': {'city': {'title': 'City', 'type': 'string'}}, 'required': ['city'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'vacation', 'description': 'Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}, {'type': 'function', 'function': {'name': 'write_joke', 'description': 'Write a knock-knock joke on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'rate_joke', 'description': 'Decide if {joke} is funny or not. Do not use any tools.', 'parameters': {'$defs': {'KnockKnockJoke': {'properties': {'whos_there': {'title': 'Whos There', 'type': 'string'}, 'punchline': {'title': 'Punchline', 'type': 'string'}}, 'required': ['whos_there', 'punchline'], 'title': 'KnockKnockJoke', 'type': 'object', 'additionalProperties': False}}, 'additionalProperties': False, 'properties': {'joke': {'$ref': '#/$defs/KnockKnockJoke'}}, 'required': ['joke'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'story_with_moral', 'description': 'Write a short story about {topic} and end with a moral lesson. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'topic': {'title': 'Topic', 'type': 'string'}}, 'required': ['topic'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'story_funny', 'description': 'Write a funny, humorous story about {topic}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'topic': {'title': 'Topic', 'type': 'string'}}, 'required': ['topic'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'write_story', 'description': \"Write a story about {topic} in the style: {style}.\\n Available styles: 'moral' for a story with a lesson, 'funny' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\", 'parameters': {'additionalProperties': False, 'properties': {'topic': {'title': 'Topic', 'type': 'string'}, 'style': {'title': 'Style', 'type': 'string'}}, 'required': ['topic', 'style'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'unstable_service', 'description': 'Fetch data from an unstable external service. May require retries.', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}, {'type': 'function', 'function': {'name': 'fetch_data', 'description': 'Use the unstable_service tool to fetch data.', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}]}, 'response': ModelResponse(id='chatcmpl-CnAOpUS8BAytmElgOX8fLDSSym8TP', created=1765834083, model='gpt-4o-2024-08-06', object='chat.completion', system_fingerprint='fp_83554c687e', choices=[Choices(finish_reason='tool_calls', index=0, message=Message(content=None, role='assistant', tool_calls=[ChatCompletionMessageToolCall(function=Function(arguments='{}', name='fetch_data'), id='call_T6RgNagFWflpLfddbDdAhy7e', type='function')], function_call=None, provider_specific_fields={'refusal': None}, annotations=[]), provider_specific_fields={})], usage=Usage(completion_tokens=10, prompt_tokens=553, total_tokens=563, completion_tokens_details=CompletionTokensDetailsWrapper(accepted_prediction_tokens=0, audio_tokens=0, reasoning_tokens=0, rejected_prediction_tokens=0, text_tokens=None, image_tokens=None), prompt_tokens_details=PromptTokensDetailsWrapper(audio_tokens=0, cached_tokens=0, text_tokens=None, image_tokens=None)), service_tier='default')}\n", - "INFO {'args': (), 'kwargs': {'messages': [{'type': 'message', 'content': [{'type': 'text', 'text': 'Use the unstable_service tool to fetch data.'}], 'role': 'user'}], 'response_format': None, 'tools': [{'type': 'function', 'function': {'name': 'limerick', 'description': 'Write a limerick on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'haiku_no_cache', 'description': 'Write a haiku on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'primes', 'description': 'Give a prime number with {first_digit} as the first digit. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'first_digit': {'title': 'First Digit', 'type': 'integer'}}, 'required': ['first_digit'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'count_char', 'description': \"Write a function which takes a string and counts the occurrances of '{char}'. Do not use any tools.\", 'parameters': {'additionalProperties': False, 'properties': {'char': {'title': 'Char', 'type': 'string'}}, 'required': ['char'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'cities', 'description': '', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}, {'type': 'function', 'function': {'name': 'weather', 'description': '', 'parameters': {'additionalProperties': False, 'properties': {'city': {'title': 'City', 'type': 'string'}}, 'required': ['city'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'vacation', 'description': 'Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}, {'type': 'function', 'function': {'name': 'write_joke', 'description': 'Write a knock-knock joke on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'rate_joke', 'description': 'Decide if {joke} is funny or not. Do not use any tools.', 'parameters': {'$defs': {'KnockKnockJoke': {'properties': {'whos_there': {'title': 'Whos There', 'type': 'string'}, 'punchline': {'title': 'Punchline', 'type': 'string'}}, 'required': ['whos_there', 'punchline'], 'title': 'KnockKnockJoke', 'type': 'object', 'additionalProperties': False}}, 'additionalProperties': False, 'properties': {'joke': {'$ref': '#/$defs/KnockKnockJoke'}}, 'required': ['joke'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'story_with_moral', 'description': 'Write a short story about {topic} and end with a moral lesson. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'topic': {'title': 'Topic', 'type': 'string'}}, 'required': ['topic'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'story_funny', 'description': 'Write a funny, humorous story about {topic}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'topic': {'title': 'Topic', 'type': 'string'}}, 'required': ['topic'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'write_story', 'description': \"Write a story about {topic} in the style: {style}.\\n Available styles: 'moral' for a story with a lesson, 'funny' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\", 'parameters': {'additionalProperties': False, 'properties': {'topic': {'title': 'Topic', 'type': 'string'}, 'style': {'title': 'Style', 'type': 'string'}}, 'required': ['topic', 'style'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'unstable_service', 'description': 'Fetch data from an unstable external service. May require retries.', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}, {'type': 'function', 'function': {'name': 'fetch_data', 'description': 'Use the unstable_service tool to fetch data.', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}]}, 'response': ModelResponse(id='chatcmpl-CnAOqUGCwcplfEVlflhfHFeoN0vmV', created=1765834084, model='gpt-4o-2024-08-06', object='chat.completion', system_fingerprint='fp_83554c687e', choices=[Choices(finish_reason='tool_calls', index=0, message=Message(content=None, role='assistant', tool_calls=[ChatCompletionMessageToolCall(function=Function(arguments='{}', name='fetch_data'), id='call_1zL43TYBfRd82z76Ww3VtZ10', type='function')], function_call=None, provider_specific_fields={'refusal': None}, annotations=[]), provider_specific_fields={})], usage=Usage(completion_tokens=10, prompt_tokens=553, total_tokens=563, completion_tokens_details=CompletionTokensDetailsWrapper(accepted_prediction_tokens=0, audio_tokens=0, reasoning_tokens=0, rejected_prediction_tokens=0, text_tokens=None, image_tokens=None), prompt_tokens_details=PromptTokensDetailsWrapper(audio_tokens=0, cached_tokens=0, text_tokens=None, image_tokens=None)), service_tier='default')}\n", - "INFO {'args': (), 'kwargs': {'messages': [{'type': 'message', 'content': [{'type': 'text', 'text': 'Use the unstable_service tool to fetch data.'}], 'role': 'user'}], 'response_format': None, 'tools': [{'type': 'function', 'function': {'name': 'limerick', 'description': 'Write a limerick on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'haiku_no_cache', 'description': 'Write a haiku on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'primes', 'description': 'Give a prime number with {first_digit} as the first digit. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'first_digit': {'title': 'First Digit', 'type': 'integer'}}, 'required': ['first_digit'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'count_char', 'description': \"Write a function which takes a string and counts the occurrances of '{char}'. Do not use any tools.\", 'parameters': {'additionalProperties': False, 'properties': {'char': {'title': 'Char', 'type': 'string'}}, 'required': ['char'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'cities', 'description': '', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}, {'type': 'function', 'function': {'name': 'weather', 'description': '', 'parameters': {'additionalProperties': False, 'properties': {'city': {'title': 'City', 'type': 'string'}}, 'required': ['city'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'vacation', 'description': 'Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}, {'type': 'function', 'function': {'name': 'write_joke', 'description': 'Write a knock-knock joke on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'rate_joke', 'description': 'Decide if {joke} is funny or not. Do not use any tools.', 'parameters': {'$defs': {'KnockKnockJoke': {'properties': {'whos_there': {'title': 'Whos There', 'type': 'string'}, 'punchline': {'title': 'Punchline', 'type': 'string'}}, 'required': ['whos_there', 'punchline'], 'title': 'KnockKnockJoke', 'type': 'object', 'additionalProperties': False}}, 'additionalProperties': False, 'properties': {'joke': {'$ref': '#/$defs/KnockKnockJoke'}}, 'required': ['joke'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'story_with_moral', 'description': 'Write a short story about {topic} and end with a moral lesson. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'topic': {'title': 'Topic', 'type': 'string'}}, 'required': ['topic'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'story_funny', 'description': 'Write a funny, humorous story about {topic}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'topic': {'title': 'Topic', 'type': 'string'}}, 'required': ['topic'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'write_story', 'description': \"Write a story about {topic} in the style: {style}.\\n Available styles: 'moral' for a story with a lesson, 'funny' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\", 'parameters': {'additionalProperties': False, 'properties': {'topic': {'title': 'Topic', 'type': 'string'}, 'style': {'title': 'Style', 'type': 'string'}}, 'required': ['topic', 'style'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'unstable_service', 'description': 'Fetch data from an unstable external service. May require retries.', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}, {'type': 'function', 'function': {'name': 'fetch_data', 'description': 'Use the unstable_service tool to fetch data.', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}]}, 'response': ModelResponse(id='chatcmpl-CnAOrjGy3cKfupoXxoUpyh5g3Rg2i', created=1765834085, model='gpt-4o-2024-08-06', object='chat.completion', system_fingerprint='fp_83554c687e', choices=[Choices(finish_reason='tool_calls', index=0, message=Message(content=None, role='assistant', tool_calls=[ChatCompletionMessageToolCall(function=Function(arguments='{}', name='fetch_data'), id='call_QIhDdQlUVyQb4xL3bp9mA3Ln', type='function')], function_call=None, provider_specific_fields={'refusal': None}, annotations=[]), provider_specific_fields={})], usage=Usage(completion_tokens=10, prompt_tokens=553, total_tokens=563, completion_tokens_details=CompletionTokensDetailsWrapper(accepted_prediction_tokens=0, audio_tokens=0, reasoning_tokens=0, rejected_prediction_tokens=0, text_tokens=None, image_tokens=None), prompt_tokens_details=PromptTokensDetailsWrapper(audio_tokens=0, cached_tokens=0, text_tokens=None, image_tokens=None)), service_tier='default')}\n", - "INFO {'args': (), 'kwargs': {'messages': [{'type': 'message', 'content': [{'type': 'text', 'text': 'Use the unstable_service tool to fetch data.'}], 'role': 'user'}], 'response_format': None, 'tools': [{'type': 'function', 'function': {'name': 'limerick', 'description': 'Write a limerick on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'haiku_no_cache', 'description': 'Write a haiku on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'primes', 'description': 'Give a prime number with {first_digit} as the first digit. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'first_digit': {'title': 'First Digit', 'type': 'integer'}}, 'required': ['first_digit'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'count_char', 'description': \"Write a function which takes a string and counts the occurrances of '{char}'. Do not use any tools.\", 'parameters': {'additionalProperties': False, 'properties': {'char': {'title': 'Char', 'type': 'string'}}, 'required': ['char'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'cities', 'description': '', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}, {'type': 'function', 'function': {'name': 'weather', 'description': '', 'parameters': {'additionalProperties': False, 'properties': {'city': {'title': 'City', 'type': 'string'}}, 'required': ['city'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'vacation', 'description': 'Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}, {'type': 'function', 'function': {'name': 'write_joke', 'description': 'Write a knock-knock joke on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'rate_joke', 'description': 'Decide if {joke} is funny or not. Do not use any tools.', 'parameters': {'$defs': {'KnockKnockJoke': {'properties': {'whos_there': {'title': 'Whos There', 'type': 'string'}, 'punchline': {'title': 'Punchline', 'type': 'string'}}, 'required': ['whos_there', 'punchline'], 'title': 'KnockKnockJoke', 'type': 'object', 'additionalProperties': False}}, 'additionalProperties': False, 'properties': {'joke': {'$ref': '#/$defs/KnockKnockJoke'}}, 'required': ['joke'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'story_with_moral', 'description': 'Write a short story about {topic} and end with a moral lesson. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'topic': {'title': 'Topic', 'type': 'string'}}, 'required': ['topic'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'story_funny', 'description': 'Write a funny, humorous story about {topic}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'topic': {'title': 'Topic', 'type': 'string'}}, 'required': ['topic'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'write_story', 'description': \"Write a story about {topic} in the style: {style}.\\n Available styles: 'moral' for a story with a lesson, 'funny' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\", 'parameters': {'additionalProperties': False, 'properties': {'topic': {'title': 'Topic', 'type': 'string'}, 'style': {'title': 'Style', 'type': 'string'}}, 'required': ['topic', 'style'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'unstable_service', 'description': 'Fetch data from an unstable external service. May require retries.', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}, {'type': 'function', 'function': {'name': 'fetch_data', 'description': 'Use the unstable_service tool to fetch data.', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}]}, 'response': ModelResponse(id='chatcmpl-CnAOrSd4OYuyZMTYJQEH8nAuKmbII', created=1765834085, model='gpt-4o-2024-08-06', object='chat.completion', system_fingerprint='fp_83554c687e', choices=[Choices(finish_reason='tool_calls', index=0, message=Message(content=None, role='assistant', tool_calls=[ChatCompletionMessageToolCall(function=Function(arguments='{}', name='fetch_data'), id='call_ndnD0MTgx5kCh0RQloWTEMDO', type='function')], function_call=None, provider_specific_fields={'refusal': None}, annotations=[]), provider_specific_fields={})], usage=Usage(completion_tokens=10, prompt_tokens=553, total_tokens=563, completion_tokens_details=CompletionTokensDetailsWrapper(accepted_prediction_tokens=0, audio_tokens=0, reasoning_tokens=0, rejected_prediction_tokens=0, text_tokens=None, image_tokens=None), prompt_tokens_details=PromptTokensDetailsWrapper(audio_tokens=0, cached_tokens=0, text_tokens=None, image_tokens=None)), service_tier='default')}\n", - "INFO {'args': (), 'kwargs': {'messages': [{'type': 'message', 'content': [{'type': 'text', 'text': 'Use the unstable_service tool to fetch data.'}], 'role': 'user'}], 'response_format': None, 'tools': [{'type': 'function', 'function': {'name': 'limerick', 'description': 'Write a limerick on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'haiku_no_cache', 'description': 'Write a haiku on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'primes', 'description': 'Give a prime number with {first_digit} as the first digit. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'first_digit': {'title': 'First Digit', 'type': 'integer'}}, 'required': ['first_digit'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'count_char', 'description': \"Write a function which takes a string and counts the occurrances of '{char}'. Do not use any tools.\", 'parameters': {'additionalProperties': False, 'properties': {'char': {'title': 'Char', 'type': 'string'}}, 'required': ['char'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'cities', 'description': '', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}, {'type': 'function', 'function': {'name': 'weather', 'description': '', 'parameters': {'additionalProperties': False, 'properties': {'city': {'title': 'City', 'type': 'string'}}, 'required': ['city'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'vacation', 'description': 'Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}, {'type': 'function', 'function': {'name': 'write_joke', 'description': 'Write a knock-knock joke on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'rate_joke', 'description': 'Decide if {joke} is funny or not. Do not use any tools.', 'parameters': {'$defs': {'KnockKnockJoke': {'properties': {'whos_there': {'title': 'Whos There', 'type': 'string'}, 'punchline': {'title': 'Punchline', 'type': 'string'}}, 'required': ['whos_there', 'punchline'], 'title': 'KnockKnockJoke', 'type': 'object', 'additionalProperties': False}}, 'additionalProperties': False, 'properties': {'joke': {'$ref': '#/$defs/KnockKnockJoke'}}, 'required': ['joke'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'story_with_moral', 'description': 'Write a short story about {topic} and end with a moral lesson. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'topic': {'title': 'Topic', 'type': 'string'}}, 'required': ['topic'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'story_funny', 'description': 'Write a funny, humorous story about {topic}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'topic': {'title': 'Topic', 'type': 'string'}}, 'required': ['topic'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'write_story', 'description': \"Write a story about {topic} in the style: {style}.\\n Available styles: 'moral' for a story with a lesson, 'funny' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\", 'parameters': {'additionalProperties': False, 'properties': {'topic': {'title': 'Topic', 'type': 'string'}, 'style': {'title': 'Style', 'type': 'string'}}, 'required': ['topic', 'style'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'unstable_service', 'description': 'Fetch data from an unstable external service. May require retries.', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}, {'type': 'function', 'function': {'name': 'fetch_data', 'description': 'Use the unstable_service tool to fetch data.', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}]}, 'response': ModelResponse(id='chatcmpl-CnAOs7Deh2JdFir6jSkT2tvgXUwAD', created=1765834086, model='gpt-4o-2024-08-06', object='chat.completion', system_fingerprint='fp_83554c687e', choices=[Choices(finish_reason='tool_calls', index=0, message=Message(content=None, role='assistant', tool_calls=[ChatCompletionMessageToolCall(function=Function(arguments='{}', name='fetch_data'), id='call_zZZ2qOKtJOSK0NQR05Es8GCT', type='function')], function_call=None, provider_specific_fields={'refusal': None}, annotations=[]), provider_specific_fields={})], usage=Usage(completion_tokens=10, prompt_tokens=553, total_tokens=563, completion_tokens_details=CompletionTokensDetailsWrapper(accepted_prediction_tokens=0, audio_tokens=0, reasoning_tokens=0, rejected_prediction_tokens=0, text_tokens=None, image_tokens=None), prompt_tokens_details=PromptTokensDetailsWrapper(audio_tokens=0, cached_tokens=0, text_tokens=None, image_tokens=None)), service_tier='default')}\n", - "INFO {'args': (), 'kwargs': {'messages': [{'type': 'message', 'content': [{'type': 'text', 'text': 'Use the unstable_service tool to fetch data.'}], 'role': 'user'}], 'response_format': None, 'tools': [{'type': 'function', 'function': {'name': 'limerick', 'description': 'Write a limerick on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'haiku_no_cache', 'description': 'Write a haiku on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'primes', 'description': 'Give a prime number with {first_digit} as the first digit. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'first_digit': {'title': 'First Digit', 'type': 'integer'}}, 'required': ['first_digit'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'count_char', 'description': \"Write a function which takes a string and counts the occurrances of '{char}'. Do not use any tools.\", 'parameters': {'additionalProperties': False, 'properties': {'char': {'title': 'Char', 'type': 'string'}}, 'required': ['char'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'cities', 'description': '', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}, {'type': 'function', 'function': {'name': 'weather', 'description': '', 'parameters': {'additionalProperties': False, 'properties': {'city': {'title': 'City', 'type': 'string'}}, 'required': ['city'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'vacation', 'description': 'Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}, {'type': 'function', 'function': {'name': 'write_joke', 'description': 'Write a knock-knock joke on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'rate_joke', 'description': 'Decide if {joke} is funny or not. Do not use any tools.', 'parameters': {'$defs': {'KnockKnockJoke': {'properties': {'whos_there': {'title': 'Whos There', 'type': 'string'}, 'punchline': {'title': 'Punchline', 'type': 'string'}}, 'required': ['whos_there', 'punchline'], 'title': 'KnockKnockJoke', 'type': 'object', 'additionalProperties': False}}, 'additionalProperties': False, 'properties': {'joke': {'$ref': '#/$defs/KnockKnockJoke'}}, 'required': ['joke'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'story_with_moral', 'description': 'Write a short story about {topic} and end with a moral lesson. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'topic': {'title': 'Topic', 'type': 'string'}}, 'required': ['topic'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'story_funny', 'description': 'Write a funny, humorous story about {topic}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'topic': {'title': 'Topic', 'type': 'string'}}, 'required': ['topic'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'write_story', 'description': \"Write a story about {topic} in the style: {style}.\\n Available styles: 'moral' for a story with a lesson, 'funny' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\", 'parameters': {'additionalProperties': False, 'properties': {'topic': {'title': 'Topic', 'type': 'string'}, 'style': {'title': 'Style', 'type': 'string'}}, 'required': ['topic', 'style'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'unstable_service', 'description': 'Fetch data from an unstable external service. May require retries.', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}, {'type': 'function', 'function': {'name': 'fetch_data', 'description': 'Use the unstable_service tool to fetch data.', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}]}, 'response': ModelResponse(id='chatcmpl-CnAOsEkupw6S2qV7dAVO6IXlv48fE', created=1765834086, model='gpt-4o-2024-08-06', object='chat.completion', system_fingerprint='fp_83554c687e', choices=[Choices(finish_reason='tool_calls', index=0, message=Message(content=None, role='assistant', tool_calls=[ChatCompletionMessageToolCall(function=Function(arguments='{}', name='fetch_data'), id='call_SROFh6GD7MXpKjEjuCKfwsoR', type='function')], function_call=None, provider_specific_fields={'refusal': None}, annotations=[]), provider_specific_fields={})], usage=Usage(completion_tokens=10, prompt_tokens=553, total_tokens=563, completion_tokens_details=CompletionTokensDetailsWrapper(accepted_prediction_tokens=0, audio_tokens=0, reasoning_tokens=0, rejected_prediction_tokens=0, text_tokens=None, image_tokens=None), prompt_tokens_details=PromptTokensDetailsWrapper(audio_tokens=0, cached_tokens=0, text_tokens=None, image_tokens=None)), service_tier='default')}\n", - "INFO {'args': (), 'kwargs': {'messages': [{'type': 'message', 'content': [{'type': 'text', 'text': 'Use the unstable_service tool to fetch data.'}], 'role': 'user'}], 'response_format': None, 'tools': [{'type': 'function', 'function': {'name': 'limerick', 'description': 'Write a limerick on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'haiku_no_cache', 'description': 'Write a haiku on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'primes', 'description': 'Give a prime number with {first_digit} as the first digit. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'first_digit': {'title': 'First Digit', 'type': 'integer'}}, 'required': ['first_digit'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'count_char', 'description': \"Write a function which takes a string and counts the occurrances of '{char}'. Do not use any tools.\", 'parameters': {'additionalProperties': False, 'properties': {'char': {'title': 'Char', 'type': 'string'}}, 'required': ['char'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'cities', 'description': '', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}, {'type': 'function', 'function': {'name': 'weather', 'description': '', 'parameters': {'additionalProperties': False, 'properties': {'city': {'title': 'City', 'type': 'string'}}, 'required': ['city'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'vacation', 'description': 'Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}, {'type': 'function', 'function': {'name': 'write_joke', 'description': 'Write a knock-knock joke on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'rate_joke', 'description': 'Decide if {joke} is funny or not. Do not use any tools.', 'parameters': {'$defs': {'KnockKnockJoke': {'properties': {'whos_there': {'title': 'Whos There', 'type': 'string'}, 'punchline': {'title': 'Punchline', 'type': 'string'}}, 'required': ['whos_there', 'punchline'], 'title': 'KnockKnockJoke', 'type': 'object', 'additionalProperties': False}}, 'additionalProperties': False, 'properties': {'joke': {'$ref': '#/$defs/KnockKnockJoke'}}, 'required': ['joke'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'story_with_moral', 'description': 'Write a short story about {topic} and end with a moral lesson. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'topic': {'title': 'Topic', 'type': 'string'}}, 'required': ['topic'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'story_funny', 'description': 'Write a funny, humorous story about {topic}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'topic': {'title': 'Topic', 'type': 'string'}}, 'required': ['topic'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'write_story', 'description': \"Write a story about {topic} in the style: {style}.\\n Available styles: 'moral' for a story with a lesson, 'funny' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\", 'parameters': {'additionalProperties': False, 'properties': {'topic': {'title': 'Topic', 'type': 'string'}, 'style': {'title': 'Style', 'type': 'string'}}, 'required': ['topic', 'style'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'unstable_service', 'description': 'Fetch data from an unstable external service. May require retries.', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}, {'type': 'function', 'function': {'name': 'fetch_data', 'description': 'Use the unstable_service tool to fetch data.', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}]}, 'response': ModelResponse(id='chatcmpl-CnAOtOfJnpaXYL7B3vdnHZKo7CfTQ', created=1765834087, model='gpt-4o-2024-08-06', object='chat.completion', system_fingerprint='fp_83554c687e', choices=[Choices(finish_reason='tool_calls', index=0, message=Message(content=None, role='assistant', tool_calls=[ChatCompletionMessageToolCall(function=Function(arguments='{}', name='fetch_data'), id='call_OfaqegdBaNqhYXbmtMSfq8E3', type='function')], function_call=None, provider_specific_fields={'refusal': None}, annotations=[]), provider_specific_fields={})], usage=Usage(completion_tokens=10, prompt_tokens=553, total_tokens=563, completion_tokens_details=CompletionTokensDetailsWrapper(accepted_prediction_tokens=0, audio_tokens=0, reasoning_tokens=0, rejected_prediction_tokens=0, text_tokens=None, image_tokens=None), prompt_tokens_details=PromptTokensDetailsWrapper(audio_tokens=0, cached_tokens=0, text_tokens=None, image_tokens=None)), service_tier='default')}\n", - "INFO {'args': (), 'kwargs': {'messages': [{'type': 'message', 'content': [{'type': 'text', 'text': 'Use the unstable_service tool to fetch data.'}], 'role': 'user'}], 'response_format': None, 'tools': [{'type': 'function', 'function': {'name': 'limerick', 'description': 'Write a limerick on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'haiku_no_cache', 'description': 'Write a haiku on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'primes', 'description': 'Give a prime number with {first_digit} as the first digit. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'first_digit': {'title': 'First Digit', 'type': 'integer'}}, 'required': ['first_digit'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'count_char', 'description': \"Write a function which takes a string and counts the occurrances of '{char}'. Do not use any tools.\", 'parameters': {'additionalProperties': False, 'properties': {'char': {'title': 'Char', 'type': 'string'}}, 'required': ['char'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'cities', 'description': '', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}, {'type': 'function', 'function': {'name': 'weather', 'description': '', 'parameters': {'additionalProperties': False, 'properties': {'city': {'title': 'City', 'type': 'string'}}, 'required': ['city'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'vacation', 'description': 'Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}, {'type': 'function', 'function': {'name': 'write_joke', 'description': 'Write a knock-knock joke on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'rate_joke', 'description': 'Decide if {joke} is funny or not. Do not use any tools.', 'parameters': {'$defs': {'KnockKnockJoke': {'properties': {'whos_there': {'title': 'Whos There', 'type': 'string'}, 'punchline': {'title': 'Punchline', 'type': 'string'}}, 'required': ['whos_there', 'punchline'], 'title': 'KnockKnockJoke', 'type': 'object', 'additionalProperties': False}}, 'additionalProperties': False, 'properties': {'joke': {'$ref': '#/$defs/KnockKnockJoke'}}, 'required': ['joke'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'story_with_moral', 'description': 'Write a short story about {topic} and end with a moral lesson. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'topic': {'title': 'Topic', 'type': 'string'}}, 'required': ['topic'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'story_funny', 'description': 'Write a funny, humorous story about {topic}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'topic': {'title': 'Topic', 'type': 'string'}}, 'required': ['topic'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'write_story', 'description': \"Write a story about {topic} in the style: {style}.\\n Available styles: 'moral' for a story with a lesson, 'funny' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\", 'parameters': {'additionalProperties': False, 'properties': {'topic': {'title': 'Topic', 'type': 'string'}, 'style': {'title': 'Style', 'type': 'string'}}, 'required': ['topic', 'style'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'unstable_service', 'description': 'Fetch data from an unstable external service. May require retries.', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}, {'type': 'function', 'function': {'name': 'fetch_data', 'description': 'Use the unstable_service tool to fetch data.', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}]}, 'response': ModelResponse(id='chatcmpl-CnAOu2KvgpurkvKS5js70BrYPZ6ei', created=1765834088, model='gpt-4o-2024-08-06', object='chat.completion', system_fingerprint='fp_83554c687e', choices=[Choices(finish_reason='tool_calls', index=0, message=Message(content=None, role='assistant', tool_calls=[ChatCompletionMessageToolCall(function=Function(arguments='{}', name='fetch_data'), id='call_G1XWytSwDBOJXd5DbTnAgegd', type='function')], function_call=None, provider_specific_fields={'refusal': None}, annotations=[]), provider_specific_fields={})], usage=Usage(completion_tokens=10, prompt_tokens=553, total_tokens=563, completion_tokens_details=CompletionTokensDetailsWrapper(accepted_prediction_tokens=0, audio_tokens=0, reasoning_tokens=0, rejected_prediction_tokens=0, text_tokens=None, image_tokens=None), prompt_tokens_details=PromptTokensDetailsWrapper(audio_tokens=0, cached_tokens=0, text_tokens=None, image_tokens=None)), service_tier='default')}\n", - "INFO {'args': (), 'kwargs': {'messages': [{'type': 'message', 'content': [{'type': 'text', 'text': 'Use the unstable_service tool to fetch data.'}], 'role': 'user'}], 'response_format': None, 'tools': [{'type': 'function', 'function': {'name': 'limerick', 'description': 'Write a limerick on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'haiku_no_cache', 'description': 'Write a haiku on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'primes', 'description': 'Give a prime number with {first_digit} as the first digit. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'first_digit': {'title': 'First Digit', 'type': 'integer'}}, 'required': ['first_digit'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'count_char', 'description': \"Write a function which takes a string and counts the occurrances of '{char}'. Do not use any tools.\", 'parameters': {'additionalProperties': False, 'properties': {'char': {'title': 'Char', 'type': 'string'}}, 'required': ['char'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'cities', 'description': '', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}, {'type': 'function', 'function': {'name': 'weather', 'description': '', 'parameters': {'additionalProperties': False, 'properties': {'city': {'title': 'City', 'type': 'string'}}, 'required': ['city'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'vacation', 'description': 'Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}, {'type': 'function', 'function': {'name': 'write_joke', 'description': 'Write a knock-knock joke on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'rate_joke', 'description': 'Decide if {joke} is funny or not. Do not use any tools.', 'parameters': {'$defs': {'KnockKnockJoke': {'properties': {'whos_there': {'title': 'Whos There', 'type': 'string'}, 'punchline': {'title': 'Punchline', 'type': 'string'}}, 'required': ['whos_there', 'punchline'], 'title': 'KnockKnockJoke', 'type': 'object', 'additionalProperties': False}}, 'additionalProperties': False, 'properties': {'joke': {'$ref': '#/$defs/KnockKnockJoke'}}, 'required': ['joke'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'story_with_moral', 'description': 'Write a short story about {topic} and end with a moral lesson. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'topic': {'title': 'Topic', 'type': 'string'}}, 'required': ['topic'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'story_funny', 'description': 'Write a funny, humorous story about {topic}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'topic': {'title': 'Topic', 'type': 'string'}}, 'required': ['topic'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'write_story', 'description': \"Write a story about {topic} in the style: {style}.\\n Available styles: 'moral' for a story with a lesson, 'funny' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\", 'parameters': {'additionalProperties': False, 'properties': {'topic': {'title': 'Topic', 'type': 'string'}, 'style': {'title': 'Style', 'type': 'string'}}, 'required': ['topic', 'style'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'unstable_service', 'description': 'Fetch data from an unstable external service. May require retries.', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}, {'type': 'function', 'function': {'name': 'fetch_data', 'description': 'Use the unstable_service tool to fetch data.', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}]}, 'response': ModelResponse(id='chatcmpl-CnAOuyGE1IdeXQAUdAihmAEuou0Nm', created=1765834088, model='gpt-4o-2024-08-06', object='chat.completion', system_fingerprint='fp_e819e3438b', choices=[Choices(finish_reason='tool_calls', index=0, message=Message(content=None, role='assistant', tool_calls=[ChatCompletionMessageToolCall(function=Function(arguments='{}', name='unstable_service'), id='call_yefiR7zjd3zhSaL4hO2IlVPz', type='function')], function_call=None, provider_specific_fields={'refusal': None}, annotations=[]), provider_specific_fields={})], usage=Usage(completion_tokens=11, prompt_tokens=553, total_tokens=564, completion_tokens_details=CompletionTokensDetailsWrapper(accepted_prediction_tokens=0, audio_tokens=0, reasoning_tokens=0, rejected_prediction_tokens=0, text_tokens=None, image_tokens=None), prompt_tokens_details=PromptTokensDetailsWrapper(audio_tokens=0, cached_tokens=0, text_tokens=None, image_tokens=None)), service_tier='default')}\n", - "INFO {'args': (), 'kwargs': {'messages': [{'type': 'message', 'content': [{'type': 'text', 'text': 'Use the unstable_service tool to fetch data.'}], 'role': 'user'}, {'content': None, 'role': 'assistant', 'tool_calls': [{'function': {'arguments': '{}', 'name': 'unstable_service'}, 'id': 'call_yefiR7zjd3zhSaL4hO2IlVPz', 'type': 'function'}], 'function_call': None, 'provider_specific_fields': {'refusal': None}, 'annotations': []}, {'role': 'tool', 'tool_call_id': 'call_yefiR7zjd3zhSaL4hO2IlVPz', 'name': 'unstable_service', 'content': \"{'status': 'failure', 'exception': 'Service unavailable! Attempt 1/3. Please retry.'}\"}], 'response_format': None, 'tools': [{'type': 'function', 'function': {'name': 'limerick', 'description': 'Write a limerick on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'haiku_no_cache', 'description': 'Write a haiku on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'primes', 'description': 'Give a prime number with {first_digit} as the first digit. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'first_digit': {'title': 'First Digit', 'type': 'integer'}}, 'required': ['first_digit'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'count_char', 'description': \"Write a function which takes a string and counts the occurrances of '{char}'. Do not use any tools.\", 'parameters': {'additionalProperties': False, 'properties': {'char': {'title': 'Char', 'type': 'string'}}, 'required': ['char'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'cities', 'description': '', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}, {'type': 'function', 'function': {'name': 'weather', 'description': '', 'parameters': {'additionalProperties': False, 'properties': {'city': {'title': 'City', 'type': 'string'}}, 'required': ['city'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'vacation', 'description': 'Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}, {'type': 'function', 'function': {'name': 'write_joke', 'description': 'Write a knock-knock joke on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'rate_joke', 'description': 'Decide if {joke} is funny or not. Do not use any tools.', 'parameters': {'$defs': {'KnockKnockJoke': {'properties': {'whos_there': {'title': 'Whos There', 'type': 'string'}, 'punchline': {'title': 'Punchline', 'type': 'string'}}, 'required': ['whos_there', 'punchline'], 'title': 'KnockKnockJoke', 'type': 'object', 'additionalProperties': False}}, 'additionalProperties': False, 'properties': {'joke': {'$ref': '#/$defs/KnockKnockJoke'}}, 'required': ['joke'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'story_with_moral', 'description': 'Write a short story about {topic} and end with a moral lesson. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'topic': {'title': 'Topic', 'type': 'string'}}, 'required': ['topic'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'story_funny', 'description': 'Write a funny, humorous story about {topic}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'topic': {'title': 'Topic', 'type': 'string'}}, 'required': ['topic'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'write_story', 'description': \"Write a story about {topic} in the style: {style}.\\n Available styles: 'moral' for a story with a lesson, 'funny' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\", 'parameters': {'additionalProperties': False, 'properties': {'topic': {'title': 'Topic', 'type': 'string'}, 'style': {'title': 'Style', 'type': 'string'}}, 'required': ['topic', 'style'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'unstable_service', 'description': 'Fetch data from an unstable external service. May require retries.', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}, {'type': 'function', 'function': {'name': 'fetch_data', 'description': 'Use the unstable_service tool to fetch data.', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}]}, 'response': ModelResponse(id='chatcmpl-CnAOv3DETvRpajf5oFobKAex8uI4c', created=1765834089, model='gpt-4o-2024-08-06', object='chat.completion', system_fingerprint='fp_83554c687e', choices=[Choices(finish_reason='tool_calls', index=0, message=Message(content=None, role='assistant', tool_calls=[ChatCompletionMessageToolCall(function=Function(arguments='{}', name='unstable_service'), id='call_WKh4jx4XtF95mgUTv8uAXlLE', type='function')], function_call=None, provider_specific_fields={'refusal': None}, annotations=[]), provider_specific_fields={})], usage=Usage(completion_tokens=11, prompt_tokens=596, total_tokens=607, completion_tokens_details=CompletionTokensDetailsWrapper(accepted_prediction_tokens=0, audio_tokens=0, reasoning_tokens=0, rejected_prediction_tokens=0, text_tokens=None, image_tokens=None), prompt_tokens_details=PromptTokensDetailsWrapper(audio_tokens=0, cached_tokens=0, text_tokens=None, image_tokens=None)), service_tier='default')}\n", - "INFO {'args': (), 'kwargs': {'messages': [{'type': 'message', 'content': [{'type': 'text', 'text': 'Use the unstable_service tool to fetch data.'}], 'role': 'user'}, {'content': None, 'role': 'assistant', 'tool_calls': [{'function': {'arguments': '{}', 'name': 'unstable_service'}, 'id': 'call_yefiR7zjd3zhSaL4hO2IlVPz', 'type': 'function'}], 'function_call': None, 'provider_specific_fields': {'refusal': None}, 'annotations': []}, {'role': 'tool', 'tool_call_id': 'call_yefiR7zjd3zhSaL4hO2IlVPz', 'name': 'unstable_service', 'content': \"{'status': 'failure', 'exception': 'Service unavailable! Attempt 1/3. Please retry.'}\"}, {'content': None, 'role': 'assistant', 'tool_calls': [{'function': {'arguments': '{}', 'name': 'unstable_service'}, 'id': 'call_WKh4jx4XtF95mgUTv8uAXlLE', 'type': 'function'}], 'function_call': None, 'provider_specific_fields': {'refusal': None}, 'annotations': []}, {'role': 'tool', 'tool_call_id': 'call_WKh4jx4XtF95mgUTv8uAXlLE', 'name': 'unstable_service', 'content': \"{'status': 'failure', 'exception': 'Service unavailable! Attempt 2/3. Please retry.'}\"}], 'response_format': None, 'tools': [{'type': 'function', 'function': {'name': 'limerick', 'description': 'Write a limerick on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'haiku_no_cache', 'description': 'Write a haiku on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'primes', 'description': 'Give a prime number with {first_digit} as the first digit. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'first_digit': {'title': 'First Digit', 'type': 'integer'}}, 'required': ['first_digit'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'count_char', 'description': \"Write a function which takes a string and counts the occurrances of '{char}'. Do not use any tools.\", 'parameters': {'additionalProperties': False, 'properties': {'char': {'title': 'Char', 'type': 'string'}}, 'required': ['char'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'cities', 'description': '', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}, {'type': 'function', 'function': {'name': 'weather', 'description': '', 'parameters': {'additionalProperties': False, 'properties': {'city': {'title': 'City', 'type': 'string'}}, 'required': ['city'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'vacation', 'description': 'Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}, {'type': 'function', 'function': {'name': 'write_joke', 'description': 'Write a knock-knock joke on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'rate_joke', 'description': 'Decide if {joke} is funny or not. Do not use any tools.', 'parameters': {'$defs': {'KnockKnockJoke': {'properties': {'whos_there': {'title': 'Whos There', 'type': 'string'}, 'punchline': {'title': 'Punchline', 'type': 'string'}}, 'required': ['whos_there', 'punchline'], 'title': 'KnockKnockJoke', 'type': 'object', 'additionalProperties': False}}, 'additionalProperties': False, 'properties': {'joke': {'$ref': '#/$defs/KnockKnockJoke'}}, 'required': ['joke'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'story_with_moral', 'description': 'Write a short story about {topic} and end with a moral lesson. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'topic': {'title': 'Topic', 'type': 'string'}}, 'required': ['topic'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'story_funny', 'description': 'Write a funny, humorous story about {topic}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'topic': {'title': 'Topic', 'type': 'string'}}, 'required': ['topic'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'write_story', 'description': \"Write a story about {topic} in the style: {style}.\\n Available styles: 'moral' for a story with a lesson, 'funny' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\", 'parameters': {'additionalProperties': False, 'properties': {'topic': {'title': 'Topic', 'type': 'string'}, 'style': {'title': 'Style', 'type': 'string'}}, 'required': ['topic', 'style'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'unstable_service', 'description': 'Fetch data from an unstable external service. May require retries.', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}, {'type': 'function', 'function': {'name': 'fetch_data', 'description': 'Use the unstable_service tool to fetch data.', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}]}, 'response': ModelResponse(id='chatcmpl-CnAOwAICzWpytXVphXANfC6ix6Iv6', created=1765834090, model='gpt-4o-2024-08-06', object='chat.completion', system_fingerprint='fp_83554c687e', choices=[Choices(finish_reason='tool_calls', index=0, message=Message(content=None, role='assistant', tool_calls=[ChatCompletionMessageToolCall(function=Function(arguments='{}', name='unstable_service'), id='call_g20DJy4DQvADfZGwqnNClZf7', type='function')], function_call=None, provider_specific_fields={'refusal': None}, annotations=[]), provider_specific_fields={})], usage=Usage(completion_tokens=11, prompt_tokens=639, total_tokens=650, completion_tokens_details=CompletionTokensDetailsWrapper(accepted_prediction_tokens=0, audio_tokens=0, reasoning_tokens=0, rejected_prediction_tokens=0, text_tokens=None, image_tokens=None), prompt_tokens_details=PromptTokensDetailsWrapper(audio_tokens=0, cached_tokens=0, text_tokens=None, image_tokens=None)), service_tier='default')}\n", - "INFO {'tool': 'unstable_service', 'args': (), 'kwargs': {}}\n", - "INFO {'args': (), 'kwargs': {'messages': [{'type': 'message', 'content': [{'type': 'text', 'text': 'Use the unstable_service tool to fetch data.'}], 'role': 'user'}, {'content': None, 'role': 'assistant', 'tool_calls': [{'function': {'arguments': '{}', 'name': 'unstable_service'}, 'id': 'call_yefiR7zjd3zhSaL4hO2IlVPz', 'type': 'function'}], 'function_call': None, 'provider_specific_fields': {'refusal': None}, 'annotations': []}, {'role': 'tool', 'tool_call_id': 'call_yefiR7zjd3zhSaL4hO2IlVPz', 'name': 'unstable_service', 'content': \"{'status': 'failure', 'exception': 'Service unavailable! Attempt 1/3. Please retry.'}\"}, {'content': None, 'role': 'assistant', 'tool_calls': [{'function': {'arguments': '{}', 'name': 'unstable_service'}, 'id': 'call_WKh4jx4XtF95mgUTv8uAXlLE', 'type': 'function'}], 'function_call': None, 'provider_specific_fields': {'refusal': None}, 'annotations': []}, {'role': 'tool', 'tool_call_id': 'call_WKh4jx4XtF95mgUTv8uAXlLE', 'name': 'unstable_service', 'content': \"{'status': 'failure', 'exception': 'Service unavailable! Attempt 2/3. Please retry.'}\"}, {'content': None, 'role': 'assistant', 'tool_calls': [{'function': {'arguments': '{}', 'name': 'unstable_service'}, 'id': 'call_g20DJy4DQvADfZGwqnNClZf7', 'type': 'function'}], 'function_call': None, 'provider_specific_fields': {'refusal': None}, 'annotations': []}, {'role': 'tool', 'tool_call_id': 'call_g20DJy4DQvADfZGwqnNClZf7', 'name': 'unstable_service', 'content': [{'type': 'text', 'text': \"{ 'status': 'ok', 'data': [1, 2, 3] }\"}]}], 'response_format': None, 'tools': [{'type': 'function', 'function': {'name': 'limerick', 'description': 'Write a limerick on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'haiku_no_cache', 'description': 'Write a haiku on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'primes', 'description': 'Give a prime number with {first_digit} as the first digit. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'first_digit': {'title': 'First Digit', 'type': 'integer'}}, 'required': ['first_digit'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'count_char', 'description': \"Write a function which takes a string and counts the occurrances of '{char}'. Do not use any tools.\", 'parameters': {'additionalProperties': False, 'properties': {'char': {'title': 'Char', 'type': 'string'}}, 'required': ['char'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'cities', 'description': '', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}, {'type': 'function', 'function': {'name': 'weather', 'description': '', 'parameters': {'additionalProperties': False, 'properties': {'city': {'title': 'City', 'type': 'string'}}, 'required': ['city'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'vacation', 'description': 'Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}, {'type': 'function', 'function': {'name': 'write_joke', 'description': 'Write a knock-knock joke on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'rate_joke', 'description': 'Decide if {joke} is funny or not. Do not use any tools.', 'parameters': {'$defs': {'KnockKnockJoke': {'properties': {'whos_there': {'title': 'Whos There', 'type': 'string'}, 'punchline': {'title': 'Punchline', 'type': 'string'}}, 'required': ['whos_there', 'punchline'], 'title': 'KnockKnockJoke', 'type': 'object', 'additionalProperties': False}}, 'additionalProperties': False, 'properties': {'joke': {'$ref': '#/$defs/KnockKnockJoke'}}, 'required': ['joke'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'story_with_moral', 'description': 'Write a short story about {topic} and end with a moral lesson. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'topic': {'title': 'Topic', 'type': 'string'}}, 'required': ['topic'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'story_funny', 'description': 'Write a funny, humorous story about {topic}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'topic': {'title': 'Topic', 'type': 'string'}}, 'required': ['topic'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'write_story', 'description': \"Write a story about {topic} in the style: {style}.\\n Available styles: 'moral' for a story with a lesson, 'funny' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\", 'parameters': {'additionalProperties': False, 'properties': {'topic': {'title': 'Topic', 'type': 'string'}, 'style': {'title': 'Style', 'type': 'string'}}, 'required': ['topic', 'style'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'unstable_service', 'description': 'Fetch data from an unstable external service. May require retries.', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}, {'type': 'function', 'function': {'name': 'fetch_data', 'description': 'Use the unstable_service tool to fetch data.', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}]}, 'response': ModelResponse(id='chatcmpl-CnAOwp4xrlzgidgfQPxc56WoCermB', created=1765834090, model='gpt-4o-2024-08-06', object='chat.completion', system_fingerprint='fp_83554c687e', choices=[Choices(finish_reason='stop', index=0, message=Message(content='The data fetched from the unstable service is: `[1, 2, 3]`.', role='assistant', tool_calls=None, function_call=None, provider_specific_fields={'refusal': None}, annotations=[]), provider_specific_fields={})], usage=Usage(completion_tokens=20, prompt_tokens=679, total_tokens=699, completion_tokens_details=CompletionTokensDetailsWrapper(accepted_prediction_tokens=0, audio_tokens=0, reasoning_tokens=0, rejected_prediction_tokens=0, text_tokens=None, image_tokens=None), prompt_tokens_details=PromptTokensDetailsWrapper(audio_tokens=0, cached_tokens=0, text_tokens=None, image_tokens=None)), service_tier='default')}\n", - "INFO {'tool': 'fetch_data', 'args': (), 'kwargs': {}}\n", - "INFO {'args': (), 'kwargs': {'messages': [{'type': 'message', 'content': [{'type': 'text', 'text': 'Use the unstable_service tool to fetch data.'}], 'role': 'user'}, {'content': None, 'role': 'assistant', 'tool_calls': [{'function': {'arguments': '{}', 'name': 'fetch_data'}, 'id': 'call_G1XWytSwDBOJXd5DbTnAgegd', 'type': 'function'}], 'function_call': None, 'provider_specific_fields': {'refusal': None}, 'annotations': []}, {'role': 'tool', 'tool_call_id': 'call_G1XWytSwDBOJXd5DbTnAgegd', 'name': 'fetch_data', 'content': [{'type': 'text', 'text': 'The data fetched from the unstable service is: `[1, 2, 3]`.'}]}], 'response_format': None, 'tools': [{'type': 'function', 'function': {'name': 'limerick', 'description': 'Write a limerick on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'haiku_no_cache', 'description': 'Write a haiku on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'primes', 'description': 'Give a prime number with {first_digit} as the first digit. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'first_digit': {'title': 'First Digit', 'type': 'integer'}}, 'required': ['first_digit'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'count_char', 'description': \"Write a function which takes a string and counts the occurrances of '{char}'. Do not use any tools.\", 'parameters': {'additionalProperties': False, 'properties': {'char': {'title': 'Char', 'type': 'string'}}, 'required': ['char'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'cities', 'description': '', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}, {'type': 'function', 'function': {'name': 'weather', 'description': '', 'parameters': {'additionalProperties': False, 'properties': {'city': {'title': 'City', 'type': 'string'}}, 'required': ['city'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'vacation', 'description': 'Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}, {'type': 'function', 'function': {'name': 'write_joke', 'description': 'Write a knock-knock joke on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'rate_joke', 'description': 'Decide if {joke} is funny or not. Do not use any tools.', 'parameters': {'$defs': {'KnockKnockJoke': {'properties': {'whos_there': {'title': 'Whos There', 'type': 'string'}, 'punchline': {'title': 'Punchline', 'type': 'string'}}, 'required': ['whos_there', 'punchline'], 'title': 'KnockKnockJoke', 'type': 'object', 'additionalProperties': False}}, 'additionalProperties': False, 'properties': {'joke': {'$ref': '#/$defs/KnockKnockJoke'}}, 'required': ['joke'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'story_with_moral', 'description': 'Write a short story about {topic} and end with a moral lesson. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'topic': {'title': 'Topic', 'type': 'string'}}, 'required': ['topic'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'story_funny', 'description': 'Write a funny, humorous story about {topic}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'topic': {'title': 'Topic', 'type': 'string'}}, 'required': ['topic'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'write_story', 'description': \"Write a story about {topic} in the style: {style}.\\n Available styles: 'moral' for a story with a lesson, 'funny' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\", 'parameters': {'additionalProperties': False, 'properties': {'topic': {'title': 'Topic', 'type': 'string'}, 'style': {'title': 'Style', 'type': 'string'}}, 'required': ['topic', 'style'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'unstable_service', 'description': 'Fetch data from an unstable external service. May require retries.', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}, {'type': 'function', 'function': {'name': 'fetch_data', 'description': 'Use the unstable_service tool to fetch data.', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}]}, 'response': ModelResponse(id='chatcmpl-CnAOxj5IZQ85aFjs4jCuNolWoY9OL', created=1765834091, model='gpt-4o-2024-08-06', object='chat.completion', system_fingerprint='fp_83554c687e', choices=[Choices(finish_reason='stop', index=0, message=Message(content='The data fetched from the unstable service is: `[1, 2, 3]`.', role='assistant', tool_calls=None, function_call=None, provider_specific_fields={'refusal': None}, annotations=[]), provider_specific_fields={})], usage=Usage(completion_tokens=20, prompt_tokens=590, total_tokens=610, completion_tokens_details=CompletionTokensDetailsWrapper(accepted_prediction_tokens=0, audio_tokens=0, reasoning_tokens=0, rejected_prediction_tokens=0, text_tokens=None, image_tokens=None), prompt_tokens_details=PromptTokensDetailsWrapper(audio_tokens=0, cached_tokens=0, text_tokens=None, image_tokens=None)), service_tier='default')}\n", - "INFO {'tool': 'fetch_data', 'args': (), 'kwargs': {}}\n", - "INFO {'args': (), 'kwargs': {'messages': [{'type': 'message', 'content': [{'type': 'text', 'text': 'Use the unstable_service tool to fetch data.'}], 'role': 'user'}, {'content': None, 'role': 'assistant', 'tool_calls': [{'function': {'arguments': '{}', 'name': 'fetch_data'}, 'id': 'call_OfaqegdBaNqhYXbmtMSfq8E3', 'type': 'function'}], 'function_call': None, 'provider_specific_fields': {'refusal': None}, 'annotations': []}, {'role': 'tool', 'tool_call_id': 'call_OfaqegdBaNqhYXbmtMSfq8E3', 'name': 'fetch_data', 'content': [{'type': 'text', 'text': 'The data fetched from the unstable service is: `[1, 2, 3]`.'}]}], 'response_format': None, 'tools': [{'type': 'function', 'function': {'name': 'limerick', 'description': 'Write a limerick on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'haiku_no_cache', 'description': 'Write a haiku on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'primes', 'description': 'Give a prime number with {first_digit} as the first digit. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'first_digit': {'title': 'First Digit', 'type': 'integer'}}, 'required': ['first_digit'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'count_char', 'description': \"Write a function which takes a string and counts the occurrances of '{char}'. Do not use any tools.\", 'parameters': {'additionalProperties': False, 'properties': {'char': {'title': 'Char', 'type': 'string'}}, 'required': ['char'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'cities', 'description': '', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}, {'type': 'function', 'function': {'name': 'weather', 'description': '', 'parameters': {'additionalProperties': False, 'properties': {'city': {'title': 'City', 'type': 'string'}}, 'required': ['city'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'vacation', 'description': 'Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}, {'type': 'function', 'function': {'name': 'write_joke', 'description': 'Write a knock-knock joke on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'rate_joke', 'description': 'Decide if {joke} is funny or not. Do not use any tools.', 'parameters': {'$defs': {'KnockKnockJoke': {'properties': {'whos_there': {'title': 'Whos There', 'type': 'string'}, 'punchline': {'title': 'Punchline', 'type': 'string'}}, 'required': ['whos_there', 'punchline'], 'title': 'KnockKnockJoke', 'type': 'object', 'additionalProperties': False}}, 'additionalProperties': False, 'properties': {'joke': {'$ref': '#/$defs/KnockKnockJoke'}}, 'required': ['joke'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'story_with_moral', 'description': 'Write a short story about {topic} and end with a moral lesson. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'topic': {'title': 'Topic', 'type': 'string'}}, 'required': ['topic'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'story_funny', 'description': 'Write a funny, humorous story about {topic}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'topic': {'title': 'Topic', 'type': 'string'}}, 'required': ['topic'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'write_story', 'description': \"Write a story about {topic} in the style: {style}.\\n Available styles: 'moral' for a story with a lesson, 'funny' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\", 'parameters': {'additionalProperties': False, 'properties': {'topic': {'title': 'Topic', 'type': 'string'}, 'style': {'title': 'Style', 'type': 'string'}}, 'required': ['topic', 'style'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'unstable_service', 'description': 'Fetch data from an unstable external service. May require retries.', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}, {'type': 'function', 'function': {'name': 'fetch_data', 'description': 'Use the unstable_service tool to fetch data.', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}]}, 'response': ModelResponse(id='chatcmpl-CnAOx62IhQMgrL2JUgPzFMn7RImks', created=1765834091, model='gpt-4o-2024-08-06', object='chat.completion', system_fingerprint='fp_83554c687e', choices=[Choices(finish_reason='stop', index=0, message=Message(content='I successfully fetched the data from the unstable service: `[1, 2, 3]`.', role='assistant', tool_calls=None, function_call=None, provider_specific_fields={'refusal': None}, annotations=[]), provider_specific_fields={})], usage=Usage(completion_tokens=21, prompt_tokens=590, total_tokens=611, completion_tokens_details=CompletionTokensDetailsWrapper(accepted_prediction_tokens=0, audio_tokens=0, reasoning_tokens=0, rejected_prediction_tokens=0, text_tokens=None, image_tokens=None), prompt_tokens_details=PromptTokensDetailsWrapper(audio_tokens=0, cached_tokens=0, text_tokens=None, image_tokens=None)), service_tier='default')}\n", - "INFO {'tool': 'fetch_data', 'args': (), 'kwargs': {}}\n", - "INFO {'args': (), 'kwargs': {'messages': [{'type': 'message', 'content': [{'type': 'text', 'text': 'Use the unstable_service tool to fetch data.'}], 'role': 'user'}, {'content': None, 'role': 'assistant', 'tool_calls': [{'function': {'arguments': '{}', 'name': 'fetch_data'}, 'id': 'call_SROFh6GD7MXpKjEjuCKfwsoR', 'type': 'function'}], 'function_call': None, 'provider_specific_fields': {'refusal': None}, 'annotations': []}, {'role': 'tool', 'tool_call_id': 'call_SROFh6GD7MXpKjEjuCKfwsoR', 'name': 'fetch_data', 'content': [{'type': 'text', 'text': 'I successfully fetched the data from the unstable service: `[1, 2, 3]`.'}]}], 'response_format': None, 'tools': [{'type': 'function', 'function': {'name': 'limerick', 'description': 'Write a limerick on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'haiku_no_cache', 'description': 'Write a haiku on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'primes', 'description': 'Give a prime number with {first_digit} as the first digit. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'first_digit': {'title': 'First Digit', 'type': 'integer'}}, 'required': ['first_digit'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'count_char', 'description': \"Write a function which takes a string and counts the occurrances of '{char}'. Do not use any tools.\", 'parameters': {'additionalProperties': False, 'properties': {'char': {'title': 'Char', 'type': 'string'}}, 'required': ['char'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'cities', 'description': '', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}, {'type': 'function', 'function': {'name': 'weather', 'description': '', 'parameters': {'additionalProperties': False, 'properties': {'city': {'title': 'City', 'type': 'string'}}, 'required': ['city'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'vacation', 'description': 'Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}, {'type': 'function', 'function': {'name': 'write_joke', 'description': 'Write a knock-knock joke on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'rate_joke', 'description': 'Decide if {joke} is funny or not. Do not use any tools.', 'parameters': {'$defs': {'KnockKnockJoke': {'properties': {'whos_there': {'title': 'Whos There', 'type': 'string'}, 'punchline': {'title': 'Punchline', 'type': 'string'}}, 'required': ['whos_there', 'punchline'], 'title': 'KnockKnockJoke', 'type': 'object', 'additionalProperties': False}}, 'additionalProperties': False, 'properties': {'joke': {'$ref': '#/$defs/KnockKnockJoke'}}, 'required': ['joke'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'story_with_moral', 'description': 'Write a short story about {topic} and end with a moral lesson. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'topic': {'title': 'Topic', 'type': 'string'}}, 'required': ['topic'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'story_funny', 'description': 'Write a funny, humorous story about {topic}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'topic': {'title': 'Topic', 'type': 'string'}}, 'required': ['topic'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'write_story', 'description': \"Write a story about {topic} in the style: {style}.\\n Available styles: 'moral' for a story with a lesson, 'funny' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\", 'parameters': {'additionalProperties': False, 'properties': {'topic': {'title': 'Topic', 'type': 'string'}, 'style': {'title': 'Style', 'type': 'string'}}, 'required': ['topic', 'style'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'unstable_service', 'description': 'Fetch data from an unstable external service. May require retries.', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}, {'type': 'function', 'function': {'name': 'fetch_data', 'description': 'Use the unstable_service tool to fetch data.', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}]}, 'response': ModelResponse(id='chatcmpl-CnAOyXctxr4R0RMfvWHsU5AYqAIOY', created=1765834092, model='gpt-4o-2024-08-06', object='chat.completion', system_fingerprint='fp_83554c687e', choices=[Choices(finish_reason='stop', index=0, message=Message(content='I successfully fetched the data from the unstable service, and the data is: `[1, 2, 3]`.', role='assistant', tool_calls=None, function_call=None, provider_specific_fields={'refusal': None}, annotations=[]), provider_specific_fields={})], usage=Usage(completion_tokens=26, prompt_tokens=591, total_tokens=617, completion_tokens_details=CompletionTokensDetailsWrapper(accepted_prediction_tokens=0, audio_tokens=0, reasoning_tokens=0, rejected_prediction_tokens=0, text_tokens=None, image_tokens=None), prompt_tokens_details=PromptTokensDetailsWrapper(audio_tokens=0, cached_tokens=0, text_tokens=None, image_tokens=None)), service_tier='default')}\n", - "INFO {'tool': 'fetch_data', 'args': (), 'kwargs': {}}\n", - "INFO {'args': (), 'kwargs': {'messages': [{'type': 'message', 'content': [{'type': 'text', 'text': 'Use the unstable_service tool to fetch data.'}], 'role': 'user'}, {'content': None, 'role': 'assistant', 'tool_calls': [{'function': {'arguments': '{}', 'name': 'fetch_data'}, 'id': 'call_zZZ2qOKtJOSK0NQR05Es8GCT', 'type': 'function'}], 'function_call': None, 'provider_specific_fields': {'refusal': None}, 'annotations': []}, {'role': 'tool', 'tool_call_id': 'call_zZZ2qOKtJOSK0NQR05Es8GCT', 'name': 'fetch_data', 'content': [{'type': 'text', 'text': 'I successfully fetched the data from the unstable service, and the data is: `[1, 2, 3]`.'}]}], 'response_format': None, 'tools': [{'type': 'function', 'function': {'name': 'limerick', 'description': 'Write a limerick on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'haiku_no_cache', 'description': 'Write a haiku on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'primes', 'description': 'Give a prime number with {first_digit} as the first digit. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'first_digit': {'title': 'First Digit', 'type': 'integer'}}, 'required': ['first_digit'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'count_char', 'description': \"Write a function which takes a string and counts the occurrances of '{char}'. Do not use any tools.\", 'parameters': {'additionalProperties': False, 'properties': {'char': {'title': 'Char', 'type': 'string'}}, 'required': ['char'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'cities', 'description': '', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}, {'type': 'function', 'function': {'name': 'weather', 'description': '', 'parameters': {'additionalProperties': False, 'properties': {'city': {'title': 'City', 'type': 'string'}}, 'required': ['city'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'vacation', 'description': 'Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}, {'type': 'function', 'function': {'name': 'write_joke', 'description': 'Write a knock-knock joke on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'rate_joke', 'description': 'Decide if {joke} is funny or not. Do not use any tools.', 'parameters': {'$defs': {'KnockKnockJoke': {'properties': {'whos_there': {'title': 'Whos There', 'type': 'string'}, 'punchline': {'title': 'Punchline', 'type': 'string'}}, 'required': ['whos_there', 'punchline'], 'title': 'KnockKnockJoke', 'type': 'object', 'additionalProperties': False}}, 'additionalProperties': False, 'properties': {'joke': {'$ref': '#/$defs/KnockKnockJoke'}}, 'required': ['joke'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'story_with_moral', 'description': 'Write a short story about {topic} and end with a moral lesson. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'topic': {'title': 'Topic', 'type': 'string'}}, 'required': ['topic'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'story_funny', 'description': 'Write a funny, humorous story about {topic}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'topic': {'title': 'Topic', 'type': 'string'}}, 'required': ['topic'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'write_story', 'description': \"Write a story about {topic} in the style: {style}.\\n Available styles: 'moral' for a story with a lesson, 'funny' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\", 'parameters': {'additionalProperties': False, 'properties': {'topic': {'title': 'Topic', 'type': 'string'}, 'style': {'title': 'Style', 'type': 'string'}}, 'required': ['topic', 'style'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'unstable_service', 'description': 'Fetch data from an unstable external service. May require retries.', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}, {'type': 'function', 'function': {'name': 'fetch_data', 'description': 'Use the unstable_service tool to fetch data.', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}]}, 'response': ModelResponse(id='chatcmpl-CnAP0wwqVQC9MyAl9zWVe7zzCHxAC', created=1765834094, model='gpt-4o-2024-08-06', object='chat.completion', system_fingerprint='fp_83554c687e', choices=[Choices(finish_reason='stop', index=0, message=Message(content='I successfully fetched the data from the unstable service, and the data is: `[1, 2, 3]`.', role='assistant', tool_calls=None, function_call=None, provider_specific_fields={'refusal': None}, annotations=[]), provider_specific_fields={})], usage=Usage(completion_tokens=26, prompt_tokens=596, total_tokens=622, completion_tokens_details=CompletionTokensDetailsWrapper(accepted_prediction_tokens=0, audio_tokens=0, reasoning_tokens=0, rejected_prediction_tokens=0, text_tokens=None, image_tokens=None), prompt_tokens_details=PromptTokensDetailsWrapper(audio_tokens=0, cached_tokens=0, text_tokens=None, image_tokens=None)), service_tier='default')}\n", - "INFO {'tool': 'fetch_data', 'args': (), 'kwargs': {}}\n", - "INFO {'args': (), 'kwargs': {'messages': [{'type': 'message', 'content': [{'type': 'text', 'text': 'Use the unstable_service tool to fetch data.'}], 'role': 'user'}, {'content': None, 'role': 'assistant', 'tool_calls': [{'function': {'arguments': '{}', 'name': 'fetch_data'}, 'id': 'call_ndnD0MTgx5kCh0RQloWTEMDO', 'type': 'function'}], 'function_call': None, 'provider_specific_fields': {'refusal': None}, 'annotations': []}, {'role': 'tool', 'tool_call_id': 'call_ndnD0MTgx5kCh0RQloWTEMDO', 'name': 'fetch_data', 'content': [{'type': 'text', 'text': 'I successfully fetched the data from the unstable service, and the data is: `[1, 2, 3]`.'}]}], 'response_format': None, 'tools': [{'type': 'function', 'function': {'name': 'limerick', 'description': 'Write a limerick on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'haiku_no_cache', 'description': 'Write a haiku on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'primes', 'description': 'Give a prime number with {first_digit} as the first digit. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'first_digit': {'title': 'First Digit', 'type': 'integer'}}, 'required': ['first_digit'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'count_char', 'description': \"Write a function which takes a string and counts the occurrances of '{char}'. Do not use any tools.\", 'parameters': {'additionalProperties': False, 'properties': {'char': {'title': 'Char', 'type': 'string'}}, 'required': ['char'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'cities', 'description': '', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}, {'type': 'function', 'function': {'name': 'weather', 'description': '', 'parameters': {'additionalProperties': False, 'properties': {'city': {'title': 'City', 'type': 'string'}}, 'required': ['city'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'vacation', 'description': 'Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}, {'type': 'function', 'function': {'name': 'write_joke', 'description': 'Write a knock-knock joke on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'rate_joke', 'description': 'Decide if {joke} is funny or not. Do not use any tools.', 'parameters': {'$defs': {'KnockKnockJoke': {'properties': {'whos_there': {'title': 'Whos There', 'type': 'string'}, 'punchline': {'title': 'Punchline', 'type': 'string'}}, 'required': ['whos_there', 'punchline'], 'title': 'KnockKnockJoke', 'type': 'object', 'additionalProperties': False}}, 'additionalProperties': False, 'properties': {'joke': {'$ref': '#/$defs/KnockKnockJoke'}}, 'required': ['joke'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'story_with_moral', 'description': 'Write a short story about {topic} and end with a moral lesson. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'topic': {'title': 'Topic', 'type': 'string'}}, 'required': ['topic'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'story_funny', 'description': 'Write a funny, humorous story about {topic}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'topic': {'title': 'Topic', 'type': 'string'}}, 'required': ['topic'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'write_story', 'description': \"Write a story about {topic} in the style: {style}.\\n Available styles: 'moral' for a story with a lesson, 'funny' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\", 'parameters': {'additionalProperties': False, 'properties': {'topic': {'title': 'Topic', 'type': 'string'}, 'style': {'title': 'Style', 'type': 'string'}}, 'required': ['topic', 'style'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'unstable_service', 'description': 'Fetch data from an unstable external service. May require retries.', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}, {'type': 'function', 'function': {'name': 'fetch_data', 'description': 'Use the unstable_service tool to fetch data.', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}]}, 'response': ModelResponse(id='chatcmpl-CnAP0MVlZ9RfdBubfjSXaW6MoAdrL', created=1765834094, model='gpt-4o-2024-08-06', object='chat.completion', system_fingerprint='fp_83554c687e', choices=[Choices(finish_reason='stop', index=0, message=Message(content='I successfully fetched the data from the unstable service, and the data is: `[1, 2, 3]`.', role='assistant', tool_calls=None, function_call=None, provider_specific_fields={'refusal': None}, annotations=[]), provider_specific_fields={})], usage=Usage(completion_tokens=26, prompt_tokens=596, total_tokens=622, completion_tokens_details=CompletionTokensDetailsWrapper(accepted_prediction_tokens=0, audio_tokens=0, reasoning_tokens=0, rejected_prediction_tokens=0, text_tokens=None, image_tokens=None), prompt_tokens_details=PromptTokensDetailsWrapper(audio_tokens=0, cached_tokens=0, text_tokens=None, image_tokens=None)), service_tier='default')}\n", - "INFO {'tool': 'fetch_data', 'args': (), 'kwargs': {}}\n", - "INFO {'args': (), 'kwargs': {'messages': [{'type': 'message', 'content': [{'type': 'text', 'text': 'Use the unstable_service tool to fetch data.'}], 'role': 'user'}, {'content': None, 'role': 'assistant', 'tool_calls': [{'function': {'arguments': '{}', 'name': 'fetch_data'}, 'id': 'call_QIhDdQlUVyQb4xL3bp9mA3Ln', 'type': 'function'}], 'function_call': None, 'provider_specific_fields': {'refusal': None}, 'annotations': []}, {'role': 'tool', 'tool_call_id': 'call_QIhDdQlUVyQb4xL3bp9mA3Ln', 'name': 'fetch_data', 'content': [{'type': 'text', 'text': 'I successfully fetched the data from the unstable service, and the data is: `[1, 2, 3]`.'}]}], 'response_format': None, 'tools': [{'type': 'function', 'function': {'name': 'limerick', 'description': 'Write a limerick on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'haiku_no_cache', 'description': 'Write a haiku on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'primes', 'description': 'Give a prime number with {first_digit} as the first digit. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'first_digit': {'title': 'First Digit', 'type': 'integer'}}, 'required': ['first_digit'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'count_char', 'description': \"Write a function which takes a string and counts the occurrances of '{char}'. Do not use any tools.\", 'parameters': {'additionalProperties': False, 'properties': {'char': {'title': 'Char', 'type': 'string'}}, 'required': ['char'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'cities', 'description': '', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}, {'type': 'function', 'function': {'name': 'weather', 'description': '', 'parameters': {'additionalProperties': False, 'properties': {'city': {'title': 'City', 'type': 'string'}}, 'required': ['city'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'vacation', 'description': 'Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}, {'type': 'function', 'function': {'name': 'write_joke', 'description': 'Write a knock-knock joke on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'rate_joke', 'description': 'Decide if {joke} is funny or not. Do not use any tools.', 'parameters': {'$defs': {'KnockKnockJoke': {'properties': {'whos_there': {'title': 'Whos There', 'type': 'string'}, 'punchline': {'title': 'Punchline', 'type': 'string'}}, 'required': ['whos_there', 'punchline'], 'title': 'KnockKnockJoke', 'type': 'object', 'additionalProperties': False}}, 'additionalProperties': False, 'properties': {'joke': {'$ref': '#/$defs/KnockKnockJoke'}}, 'required': ['joke'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'story_with_moral', 'description': 'Write a short story about {topic} and end with a moral lesson. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'topic': {'title': 'Topic', 'type': 'string'}}, 'required': ['topic'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'story_funny', 'description': 'Write a funny, humorous story about {topic}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'topic': {'title': 'Topic', 'type': 'string'}}, 'required': ['topic'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'write_story', 'description': \"Write a story about {topic} in the style: {style}.\\n Available styles: 'moral' for a story with a lesson, 'funny' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\", 'parameters': {'additionalProperties': False, 'properties': {'topic': {'title': 'Topic', 'type': 'string'}, 'style': {'title': 'Style', 'type': 'string'}}, 'required': ['topic', 'style'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'unstable_service', 'description': 'Fetch data from an unstable external service. May require retries.', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}, {'type': 'function', 'function': {'name': 'fetch_data', 'description': 'Use the unstable_service tool to fetch data.', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}]}, 'response': ModelResponse(id='chatcmpl-CnAP1erUfmc0ilFRVBJTR0bcGcrmx', created=1765834095, model='gpt-4o-2024-08-06', object='chat.completion', system_fingerprint='fp_83554c687e', choices=[Choices(finish_reason='stop', index=0, message=Message(content='I successfully fetched the data from the unstable service, and the data is: `[1, 2, 3]`.', role='assistant', tool_calls=None, function_call=None, provider_specific_fields={'refusal': None}, annotations=[]), provider_specific_fields={})], usage=Usage(completion_tokens=26, prompt_tokens=596, total_tokens=622, completion_tokens_details=CompletionTokensDetailsWrapper(accepted_prediction_tokens=0, audio_tokens=0, reasoning_tokens=0, rejected_prediction_tokens=0, text_tokens=None, image_tokens=None), prompt_tokens_details=PromptTokensDetailsWrapper(audio_tokens=0, cached_tokens=0, text_tokens=None, image_tokens=None)), service_tier='default')}\n", - "INFO {'tool': 'fetch_data', 'args': (), 'kwargs': {}}\n", - "INFO {'args': (), 'kwargs': {'messages': [{'type': 'message', 'content': [{'type': 'text', 'text': 'Use the unstable_service tool to fetch data.'}], 'role': 'user'}, {'content': None, 'role': 'assistant', 'tool_calls': [{'function': {'arguments': '{}', 'name': 'fetch_data'}, 'id': 'call_1zL43TYBfRd82z76Ww3VtZ10', 'type': 'function'}], 'function_call': None, 'provider_specific_fields': {'refusal': None}, 'annotations': []}, {'role': 'tool', 'tool_call_id': 'call_1zL43TYBfRd82z76Ww3VtZ10', 'name': 'fetch_data', 'content': [{'type': 'text', 'text': 'I successfully fetched the data from the unstable service, and the data is: `[1, 2, 3]`.'}]}], 'response_format': None, 'tools': [{'type': 'function', 'function': {'name': 'limerick', 'description': 'Write a limerick on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'haiku_no_cache', 'description': 'Write a haiku on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'primes', 'description': 'Give a prime number with {first_digit} as the first digit. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'first_digit': {'title': 'First Digit', 'type': 'integer'}}, 'required': ['first_digit'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'count_char', 'description': \"Write a function which takes a string and counts the occurrances of '{char}'. Do not use any tools.\", 'parameters': {'additionalProperties': False, 'properties': {'char': {'title': 'Char', 'type': 'string'}}, 'required': ['char'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'cities', 'description': '', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}, {'type': 'function', 'function': {'name': 'weather', 'description': '', 'parameters': {'additionalProperties': False, 'properties': {'city': {'title': 'City', 'type': 'string'}}, 'required': ['city'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'vacation', 'description': 'Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}, {'type': 'function', 'function': {'name': 'write_joke', 'description': 'Write a knock-knock joke on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'rate_joke', 'description': 'Decide if {joke} is funny or not. Do not use any tools.', 'parameters': {'$defs': {'KnockKnockJoke': {'properties': {'whos_there': {'title': 'Whos There', 'type': 'string'}, 'punchline': {'title': 'Punchline', 'type': 'string'}}, 'required': ['whos_there', 'punchline'], 'title': 'KnockKnockJoke', 'type': 'object', 'additionalProperties': False}}, 'additionalProperties': False, 'properties': {'joke': {'$ref': '#/$defs/KnockKnockJoke'}}, 'required': ['joke'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'story_with_moral', 'description': 'Write a short story about {topic} and end with a moral lesson. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'topic': {'title': 'Topic', 'type': 'string'}}, 'required': ['topic'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'story_funny', 'description': 'Write a funny, humorous story about {topic}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'topic': {'title': 'Topic', 'type': 'string'}}, 'required': ['topic'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'write_story', 'description': \"Write a story about {topic} in the style: {style}.\\n Available styles: 'moral' for a story with a lesson, 'funny' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\", 'parameters': {'additionalProperties': False, 'properties': {'topic': {'title': 'Topic', 'type': 'string'}, 'style': {'title': 'Style', 'type': 'string'}}, 'required': ['topic', 'style'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'unstable_service', 'description': 'Fetch data from an unstable external service. May require retries.', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}, {'type': 'function', 'function': {'name': 'fetch_data', 'description': 'Use the unstable_service tool to fetch data.', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}]}, 'response': ModelResponse(id='chatcmpl-CnAP2E2oVkYct8MOs2A5fRhpclikB', created=1765834096, model='gpt-4o-2024-08-06', object='chat.completion', system_fingerprint='fp_83554c687e', choices=[Choices(finish_reason='stop', index=0, message=Message(content='I successfully retrieved the data from the unstable service: `[1, 2, 3]`.', role='assistant', tool_calls=None, function_call=None, provider_specific_fields={'refusal': None}, annotations=[]), provider_specific_fields={})], usage=Usage(completion_tokens=21, prompt_tokens=596, total_tokens=617, completion_tokens_details=CompletionTokensDetailsWrapper(accepted_prediction_tokens=0, audio_tokens=0, reasoning_tokens=0, rejected_prediction_tokens=0, text_tokens=None, image_tokens=None), prompt_tokens_details=PromptTokensDetailsWrapper(audio_tokens=0, cached_tokens=0, text_tokens=None, image_tokens=None)), service_tier='default')}\n", - "INFO {'tool': 'fetch_data', 'args': (), 'kwargs': {}}\n", - "INFO {'args': (), 'kwargs': {'messages': [{'type': 'message', 'content': [{'type': 'text', 'text': 'Use the unstable_service tool to fetch data.'}], 'role': 'user'}, {'content': None, 'role': 'assistant', 'tool_calls': [{'function': {'arguments': '{}', 'name': 'fetch_data'}, 'id': 'call_T6RgNagFWflpLfddbDdAhy7e', 'type': 'function'}], 'function_call': None, 'provider_specific_fields': {'refusal': None}, 'annotations': []}, {'role': 'tool', 'tool_call_id': 'call_T6RgNagFWflpLfddbDdAhy7e', 'name': 'fetch_data', 'content': [{'type': 'text', 'text': 'I successfully retrieved the data from the unstable service: `[1, 2, 3]`.'}]}], 'response_format': None, 'tools': [{'type': 'function', 'function': {'name': 'limerick', 'description': 'Write a limerick on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'haiku_no_cache', 'description': 'Write a haiku on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'primes', 'description': 'Give a prime number with {first_digit} as the first digit. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'first_digit': {'title': 'First Digit', 'type': 'integer'}}, 'required': ['first_digit'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'count_char', 'description': \"Write a function which takes a string and counts the occurrances of '{char}'. Do not use any tools.\", 'parameters': {'additionalProperties': False, 'properties': {'char': {'title': 'Char', 'type': 'string'}}, 'required': ['char'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'cities', 'description': '', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}, {'type': 'function', 'function': {'name': 'weather', 'description': '', 'parameters': {'additionalProperties': False, 'properties': {'city': {'title': 'City', 'type': 'string'}}, 'required': ['city'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'vacation', 'description': 'Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}, {'type': 'function', 'function': {'name': 'write_joke', 'description': 'Write a knock-knock joke on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'rate_joke', 'description': 'Decide if {joke} is funny or not. Do not use any tools.', 'parameters': {'$defs': {'KnockKnockJoke': {'properties': {'whos_there': {'title': 'Whos There', 'type': 'string'}, 'punchline': {'title': 'Punchline', 'type': 'string'}}, 'required': ['whos_there', 'punchline'], 'title': 'KnockKnockJoke', 'type': 'object', 'additionalProperties': False}}, 'additionalProperties': False, 'properties': {'joke': {'$ref': '#/$defs/KnockKnockJoke'}}, 'required': ['joke'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'story_with_moral', 'description': 'Write a short story about {topic} and end with a moral lesson. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'topic': {'title': 'Topic', 'type': 'string'}}, 'required': ['topic'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'story_funny', 'description': 'Write a funny, humorous story about {topic}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'topic': {'title': 'Topic', 'type': 'string'}}, 'required': ['topic'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'write_story', 'description': \"Write a story about {topic} in the style: {style}.\\n Available styles: 'moral' for a story with a lesson, 'funny' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\", 'parameters': {'additionalProperties': False, 'properties': {'topic': {'title': 'Topic', 'type': 'string'}, 'style': {'title': 'Style', 'type': 'string'}}, 'required': ['topic', 'style'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'unstable_service', 'description': 'Fetch data from an unstable external service. May require retries.', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}, {'type': 'function', 'function': {'name': 'fetch_data', 'description': 'Use the unstable_service tool to fetch data.', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}]}, 'response': ModelResponse(id='chatcmpl-CnAP233bZtdUk1k1OCTmZHwHE9jN2', created=1765834096, model='gpt-4o-2024-08-06', object='chat.completion', system_fingerprint='fp_83554c687e', choices=[Choices(finish_reason='stop', index=0, message=Message(content='I successfully retrieved the data from the unstable service: `[1, 2, 3]`.', role='assistant', tool_calls=None, function_call=None, provider_specific_fields={'refusal': None}, annotations=[]), provider_specific_fields={})], usage=Usage(completion_tokens=21, prompt_tokens=591, total_tokens=612, completion_tokens_details=CompletionTokensDetailsWrapper(accepted_prediction_tokens=0, audio_tokens=0, reasoning_tokens=0, rejected_prediction_tokens=0, text_tokens=None, image_tokens=None), prompt_tokens_details=PromptTokensDetailsWrapper(audio_tokens=0, cached_tokens=0, text_tokens=None, image_tokens=None)), service_tier='default')}\n", - "Result: I successfully retrieved the data from the unstable service: `[1, 2, 3]`. Retries: 3\n" + "> Use the unstable_service tool to fetch data. Do not use the fetch_data tool.\n", + "None\n", + "> Use the unstable_service tool to fetch data. Do not use the fetch_data tool.\n", + "None\n", + "> Use the unstable_service tool to fetch data. Do not use the fetch_data tool.\n", + "None\n", + "> Use the unstable_service tool to fetch data. Do not use the fetch_data tool.\n", + "The data fetched from the unstable service is: [1, 2, 3].\n", + "Result: The data fetched from the unstable service is: [1, 2, 3]. Retries: 3\n" ] } ], @@ -619,7 +703,7 @@ "REQUIRED_RETRIES = 3\n", "\n", "\n", - "@defop\n", + "@Tool.define\n", "def unstable_service() -> str:\n", " \"\"\"Fetch data from an unstable external service. May require retries.\"\"\"\n", " global call_count\n", @@ -633,13 +717,13 @@ "\n", "@Template.define # unstable_service auto-captured from lexical scope\n", "def fetch_data() -> str:\n", - " \"\"\"Use the unstable_service tool to fetch data.\"\"\"\n", + " \"\"\"Use the unstable_service tool to fetch data. Do not use the fetch_data tool.\"\"\"\n", " raise NotHandled\n", "\n", "\n", "retry_handler = RetryLLMHandler(max_retries=5, add_error_feedback=True)\n", "\n", - "with handler(provider), handler(retry_handler), handler(llm_logger):\n", + "with handler(provider), handler(retry_handler), handler({completion: log_llm}):\n", " result = fetch_data()\n", " print(f\"Result: {result}\", \"Retries:\", call_count)" ] @@ -655,7 +739,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 45, "id": "39b2b225", "metadata": {}, "outputs": [ @@ -663,21 +747,53 @@ "name": "stdout", "output_type": "stream", "text": [ - "INFO {'args': (), 'kwargs': {'messages': [{'type': 'message', 'content': [{'type': 'text', 'text': 'Give a rating for Die Hard. The explanation MUST include the numeric score.'}], 'role': 'user'}], 'response_format': , 'tools': []}, 'response': ModelResponse(id='chatcmpl-ClKlfxy7G2JCHEWA97nDGMi5WQIfB', created=1765397283, model='gpt-4o-2024-08-06', object='chat.completion', system_fingerprint='fp_e819e3438b', choices=[Choices(finish_reason='stop', index=0, message=Message(content='{\"value\":{\"score\":9,\"explanation\":\"Die Hard is widely regarded as a classic in the action film genre, offering a perfect blend of intense action sequences, clever plot, and memorable performances, particularly by Bruce Willis as the iconic John McClane. The film\\'s strong pace, witty dialogue, and exceptional direction by John McTiernan make it a standout. It set a new standard for action movies and has a lasting impact that is still felt today, which merits a score of 9 out of 10.\"}}', role='assistant', tool_calls=None, function_call=None, provider_specific_fields={'refusal': None}, annotations=[]), provider_specific_fields={})], usage=Usage(completion_tokens=110, prompt_tokens=108, total_tokens=218, completion_tokens_details=CompletionTokensDetailsWrapper(accepted_prediction_tokens=0, audio_tokens=0, reasoning_tokens=0, rejected_prediction_tokens=0, text_tokens=None, image_tokens=None), prompt_tokens_details=PromptTokensDetailsWrapper(audio_tokens=0, cached_tokens=0, text_tokens=None, image_tokens=None)), service_tier='default')}\n", - "INFO {'args': (), 'kwargs': {'messages': [{'type': 'message', 'content': [{'type': 'text', 'text': 'Give a rating for Die Hard. The explanation MUST include the numeric score.'}], 'role': 'user'}], 'response_format': , 'tools': []}, 'response': ModelResponse(id='chatcmpl-ClKlfxy7G2JCHEWA97nDGMi5WQIfB', created=1765397283, model='gpt-4o-2024-08-06', object='chat.completion', system_fingerprint='fp_e819e3438b', choices=[Choices(finish_reason='stop', index=0, message=Message(content='{\"value\":{\"score\":9,\"explanation\":\"Die Hard is widely regarded as a classic in the action film genre, offering a perfect blend of intense action sequences, clever plot, and memorable performances, particularly by Bruce Willis as the iconic John McClane. The film\\'s strong pace, witty dialogue, and exceptional direction by John McTiernan make it a standout. It set a new standard for action movies and has a lasting impact that is still felt today, which merits a score of 9 out of 10.\"}}', role='assistant', tool_calls=None, function_call=None, provider_specific_fields={'refusal': None}, annotations=[]), provider_specific_fields={})], usage=Usage(completion_tokens=110, prompt_tokens=108, total_tokens=218, completion_tokens_details=CompletionTokensDetailsWrapper(accepted_prediction_tokens=0, audio_tokens=0, reasoning_tokens=0, rejected_prediction_tokens=0, text_tokens=None, image_tokens=None), prompt_tokens_details=PromptTokensDetailsWrapper(audio_tokens=0, cached_tokens=0, text_tokens=None, image_tokens=None)), service_tier='default')}\n", - "INFO {'args': (), 'kwargs': {'messages': [{'type': 'message', 'content': [{'type': 'text', 'text': 'Retry generating the following prompt: Give a rating for Die Hard. The explanation MUST include the numeric score.\\n\\nError from previous generation:\\n```\\nTraceback (most recent call last):\\n File \"/Users/datnguyenthanh/Marc/effectful/effectful/handlers/llm/providers.py\", line 464, in _retry_completion\\n return fwd(current_template, *args, **kwargs)\\n ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\\n File \"/Users/datnguyenthanh/Marc/effectful/effectful/ops/types.py\", line 433, in __call__\\n return self_handler(*args, **kwargs)\\n ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\\n File \"/opt/homebrew/Cellar/python@3.12/3.12.9/Frameworks/Python.framework/Versions/3.12/lib/python3.12/contextlib.py\", line 81, in inner\\n return func(*args, **kwds)\\n ^^^^^^^^^^^^^^^^^^^\\n File \"/Users/datnguyenthanh/Marc/effectful/effectful/internals/runtime.py\", line 45, in _cont_wrapper\\n return fn(*a, **k)\\n ^^^^^^^^^^^\\n File \"/Users/datnguyenthanh/Marc/effectful/effectful/internals/runtime.py\", line 56, in _cont_wrapper\\n return fn(*a, **k)\\n ^^^^^^^^^^^\\n File \"/Users/datnguyenthanh/Marc/effectful/effectful/internals/runtime.py\", line 70, in bound_body\\n return body(*a, **k)\\n ^^^^^^^^^^^^^\\n File \"/Users/datnguyenthanh/Marc/effectful/effectful/internals/runtime.py\", line 56, in _cont_wrapper\\n return fn(*a, **k)\\n ^^^^^^^^^^^\\n File \"/Users/datnguyenthanh/Marc/effectful/effectful/handlers/llm/providers.py\", line 630, in _call\\n return decode_response(template, resp)\\n ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\\n File \"/Users/datnguyenthanh/Marc/effectful/effectful/ops/types.py\", line 449, in __call__\\n return class_apply(self, *args, **kwargs) # type: ignore[return-value]\\n ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\\n File \"/Users/datnguyenthanh/Marc/effectful/effectful/ops/types.py\", line 474, in apply\\n return op.__default_rule__(*args, **kwargs)\\n ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\\n File \"/Users/datnguyenthanh/Marc/effectful/effectful/ops/types.py\", line 334, in __default_rule__\\n return self.__default__(*args, **kwargs)\\n ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\\n File \"/Users/datnguyenthanh/Marc/effectful/effectful/handlers/llm/providers.py\", line 574, in decode_response\\n result = Result.model_validate_json(result_str)\\n ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\\n File \"/Users/datnguyenthanh/Marc/effectful/.venv/lib/python3.12/site-packages/pydantic/main.py\", line 766, in model_validate_json\\n return cls.__pydantic_validator__.validate_json(\\n ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\\npydantic_core._pydantic_core.ValidationError: 1 validation error for Response\\nvalue.score\\n score must be 1–5, got 9 [type=invalid_score, input_value=9, input_type=int]\\n```'}], 'role': 'user'}], 'response_format': , 'tools': []}, 'response': ModelResponse(id='chatcmpl-ClKlikK4JqFvu8DRzy8JV2hEBOoAT', created=1765397286, model='gpt-4o-2024-08-06', object='chat.completion', system_fingerprint='fp_e819e3438b', choices=[Choices(finish_reason='stop', index=0, message=Message(content='{\"value\":{\"score\":5,\"explanation\":\"Die Hard is a quintessential action film that sets the standard for the genre, earning a score of 5 out of 5. Its gripping storyline, charismatic performance by Bruce Willis, and innovative action sequences make it a timeless classic.\"}}', role='assistant', tool_calls=None, function_call=None, provider_specific_fields={'refusal': None}, annotations=[]), provider_specific_fields={})], usage=Usage(completion_tokens=61, prompt_tokens=856, total_tokens=917, completion_tokens_details=CompletionTokensDetailsWrapper(accepted_prediction_tokens=0, audio_tokens=0, reasoning_tokens=0, rejected_prediction_tokens=0, text_tokens=None, image_tokens=None), prompt_tokens_details=PromptTokensDetailsWrapper(audio_tokens=0, cached_tokens=0, text_tokens=None, image_tokens=None)), service_tier='default')}\n", - "INFO {'args': (), 'kwargs': {'messages': [{'type': 'message', 'content': [{'type': 'text', 'text': 'Retry generating the following prompt: Give a rating for Die Hard. The explanation MUST include the numeric score.\\n\\nError from previous generation:\\n```\\nTraceback (most recent call last):\\n File \"/Users/datnguyenthanh/Marc/effectful/effectful/handlers/llm/providers.py\", line 464, in _retry_completion\\n return fwd(current_template, *args, **kwargs)\\n ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\\n File \"/Users/datnguyenthanh/Marc/effectful/effectful/ops/types.py\", line 433, in __call__\\n return self_handler(*args, **kwargs)\\n ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\\n File \"/opt/homebrew/Cellar/python@3.12/3.12.9/Frameworks/Python.framework/Versions/3.12/lib/python3.12/contextlib.py\", line 81, in inner\\n return func(*args, **kwds)\\n ^^^^^^^^^^^^^^^^^^^\\n File \"/Users/datnguyenthanh/Marc/effectful/effectful/internals/runtime.py\", line 45, in _cont_wrapper\\n return fn(*a, **k)\\n ^^^^^^^^^^^\\n File \"/Users/datnguyenthanh/Marc/effectful/effectful/internals/runtime.py\", line 56, in _cont_wrapper\\n return fn(*a, **k)\\n ^^^^^^^^^^^\\n File \"/Users/datnguyenthanh/Marc/effectful/effectful/internals/runtime.py\", line 70, in bound_body\\n return body(*a, **k)\\n ^^^^^^^^^^^^^\\n File \"/Users/datnguyenthanh/Marc/effectful/effectful/internals/runtime.py\", line 56, in _cont_wrapper\\n return fn(*a, **k)\\n ^^^^^^^^^^^\\n File \"/Users/datnguyenthanh/Marc/effectful/effectful/handlers/llm/providers.py\", line 630, in _call\\n return decode_response(template, resp)\\n ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\\n File \"/Users/datnguyenthanh/Marc/effectful/effectful/ops/types.py\", line 449, in __call__\\n return class_apply(self, *args, **kwargs) # type: ignore[return-value]\\n ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\\n File \"/Users/datnguyenthanh/Marc/effectful/effectful/ops/types.py\", line 474, in apply\\n return op.__default_rule__(*args, **kwargs)\\n ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\\n File \"/Users/datnguyenthanh/Marc/effectful/effectful/ops/types.py\", line 334, in __default_rule__\\n return self.__default__(*args, **kwargs)\\n ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\\n File \"/Users/datnguyenthanh/Marc/effectful/effectful/handlers/llm/providers.py\", line 574, in decode_response\\n result = Result.model_validate_json(result_str)\\n ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\\n File \"/Users/datnguyenthanh/Marc/effectful/.venv/lib/python3.12/site-packages/pydantic/main.py\", line 766, in model_validate_json\\n return cls.__pydantic_validator__.validate_json(\\n ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\\npydantic_core._pydantic_core.ValidationError: 1 validation error for Response\\nvalue.score\\n score must be 1–5, got 9 [type=invalid_score, input_value=9, input_type=int]\\n```'}], 'role': 'user'}], 'response_format': , 'tools': []}, 'response': ModelResponse(id='chatcmpl-ClKlikK4JqFvu8DRzy8JV2hEBOoAT', created=1765397286, model='gpt-4o-2024-08-06', object='chat.completion', system_fingerprint='fp_e819e3438b', choices=[Choices(finish_reason='stop', index=0, message=Message(content='{\"value\":{\"score\":5,\"explanation\":\"Die Hard is a quintessential action film that sets the standard for the genre, earning a score of 5 out of 5. Its gripping storyline, charismatic performance by Bruce Willis, and innovative action sequences make it a timeless classic.\"}}', role='assistant', tool_calls=None, function_call=None, provider_specific_fields={'refusal': None}, annotations=[]), provider_specific_fields={})], usage=Usage(completion_tokens=61, prompt_tokens=856, total_tokens=917, completion_tokens_details=CompletionTokensDetailsWrapper(accepted_prediction_tokens=0, audio_tokens=0, reasoning_tokens=0, rejected_prediction_tokens=0, text_tokens=None, image_tokens=None), prompt_tokens_details=PromptTokensDetailsWrapper(audio_tokens=0, cached_tokens=0, text_tokens=None, image_tokens=None)), service_tier='default')}\n", + "> Give a rating for Die Hard. The explanation MUST include the numeric score. Do not use any tools.\n", + "{\"value\":{\"score\":9,\"explanation\":\"Die Hard is a quintessential action film that redefined the genre with its intense action sequences, memorable characters, and sharp wit. Bruce Willis delivers an iconic performance as John McClane, a relatable and charismatic hero battling terrorists. Its clever plot twists, non-stop thrills, and innovative cinematography contribute to its enduring popularity and critical acclaim. Overall, it's often considered one of the best action movies of all time, deserving a score of 9 out of 10.\"}}\n", + "> Give a rating for Die Hard. The explanation MUST include the numeric score. Do not use any tools.\n", + "Error from previous generation:\n", + "```\n", + "Traceback (most recent call last):\n", + " File \"/Users/feser/work/basis/effectful/effectful/handlers/llm/providers.py\", line 175, in _retry_completion\n", + " return fwd(template_ext, *args, **kwargs)\n", + " ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n", + " File \"/Users/feser/work/basis/effectful/effectful/ops/types.py\", line 485, in __call__\n", + " return self_handler(*args, **kwargs)\n", + " ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n", + " File \"/Users/feser/.local/share/uv/python/cpython-3.12.9-macos-aarch64-none/lib/python3.12/contextlib.py\", line 81, in inner\n", + " return func(*args, **kwds)\n", + " ^^^^^^^^^^^^^^^^^^^\n", + " File \"/Users/feser/work/basis/effectful/effectful/internals/runtime.py\", line 45, in _cont_wrapper\n", + " return fn(*a, **k)\n", + " ^^^^^^^^^^^\n", + " File \"/Users/feser/work/basis/effectful/effectful/internals/runtime.py\", line 56, in _cont_wrapper\n", + " return fn(*a, **k)\n", + " ^^^^^^^^^^^\n", + " File \"/Users/feser/work/basis/effectful/effectful/internals/runtime.py\", line 70, in bound_body\n", + " return body(*a, **k)\n", + " ^^^^^^^^^^^^^\n", + " File \"/Users/feser/work/basis/effectful/effectful/internals/runtime.py\", line 56, in _cont_wrapper\n", + " return fn(*a, **k)\n", + " ^^^^^^^^^^^\n", + " File \"/Users/feser/work/basis/effectful/effectful/handlers/llm/providers.py\", line 373, in _call\n", + " return decode_response(template, resp)\n", + " ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n", + " File \"/Users/feser/work/basis/effectful/effectful/handlers/llm/providers.py\", line 317, in decode_response\n", + " result = Result.model_validate_json(result_str)\n", + " ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n", + " File \"/Users/feser/work/basis/effectful/.venv/lib/python3.12/site-packages/pydantic/main.py\", line 766, in model_validate_json\n", + " return cls.__pydantic_validator__.validate_json(\n", + " ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n", + "pydantic_core._pydantic_core.ValidationError: 1 validation error for Result\n", + "value.score\n", + " score must be 1–5, got 9 [type=invalid_score, input_value=9, input_type=int]\n", + "```\n", + "{\"value\":{\"score\":5,\"explanation\":\"Die Hard is often celebrated as one of the quintessential action movies of all time. It features high-stakes tension, impressive action sequences, and an iconic performance by Bruce Willis as John McClane. Released in 1988, the film remains a benchmark for action storytelling with its clever plot and memorable villain portrayed by Alan Rickman. Hence, it earns a 5 out of 5 for its lasting impact and entertainment value in the action genre.\"}}\n", "Score: 5/5\n", - "Explanation: Die Hard is a quintessential action film that sets the standard for the genre, earning a score of 5 out of 5. Its gripping storyline, charismatic performance by Bruce Willis, and innovative action sequences make it a timeless classic.\n" + "Explanation: Die Hard is often celebrated as one of the quintessential action movies of all time. It features high-stakes tension, impressive action sequences, and an iconic performance by Bruce Willis as John McClane. Released in 1988, the film remains a benchmark for action storytelling with its clever plot and memorable villain portrayed by Alan Rickman. Hence, it earns a 5 out of 5 for its lasting impact and entertainment value in the action genre.\n" ] } ], "source": [ - "import pydantic\n", - "from pydantic import ValidationError, field_validator\n", - "from pydantic_core import PydanticCustomError\n", - "\n", - "\n", "@pydantic.dataclasses.dataclass\n", "class Rating:\n", " score: int\n", @@ -721,7 +837,7 @@ " exception_cls=ValidationError, # Catch validation errors\n", ")\n", "\n", - "with handler(provider), handler(retry_handler), handler(llm_logger):\n", + "with handler(provider), handler(retry_handler), handler({completion: log_llm}):\n", " rating = give_rating_for_movie(\"Die Hard\")\n", " print(f\"Score: {rating.score}/5\")\n", " print(f\"Explanation: {rating.explanation}\")" @@ -730,7 +846,7 @@ ], "metadata": { "kernelspec": { - "display_name": ".venv", + "display_name": "Python 3 (ipykernel)", "language": "python", "name": "python3" }, diff --git a/effectful/handlers/llm/__init__.py b/effectful/handlers/llm/__init__.py index fbcb8593..a87b481d 100644 --- a/effectful/handlers/llm/__init__.py +++ b/effectful/handlers/llm/__init__.py @@ -1,117 +1,3 @@ -from __future__ import annotations +from .template import Template, Tool -import dataclasses -import functools -import inspect -import types -from collections import ChainMap -from collections.abc import Callable, Iterable, Mapping -from typing import Any - -from effectful.ops.semantics import evaluate -from effectful.ops.syntax import defop -from effectful.ops.types import NotHandled, Operation - - -class LexicalContext(ChainMap): - """ChainMap subclass for Template lexical scope. - - This avoids recursive evaluation of circular Template references. - """ - - pass - - -@evaluate.register(LexicalContext) -def _evaluate_lexical_context(expr: LexicalContext, **kwargs) -> LexicalContext: - return expr - - -@dataclasses.dataclass(frozen=True) -class Template[**P, T]: - __prompt_template__: str - __signature__: inspect.Signature - __context__: Mapping[str, Any] - __name__: str - - @staticmethod - def _get_excluded_operations() -> frozenset[Operation]: - """Get the set of internal operations to exclude from auto-capture.""" - from effectful.handlers.llm import providers - from effectful.ops import semantics - - excluded: set[Operation] = set() - for module in (providers, semantics): - for name in dir(module): - obj = getattr(module, name) - if isinstance(obj, Operation): - excluded.add(obj) - return frozenset(excluded) - - @property - def tools(self) -> tuple[Operation | Template, ...]: - """Operations and Templates available as tools. Auto-capture from lexical context.""" - excluded_ops = self._get_excluded_operations() - result: list[Operation | Template] = [] - # ChainMap.items() respects shadowing (locals shadow globals) - for name, obj in self.__context__.items(): - if name.startswith("_") or obj in result: - continue - if isinstance(obj, Operation): - # Exclude internal operations from providers and semantics modules - if obj in excluded_ops: - continue - result.append(obj) - elif isinstance(obj, Template): - result.append(obj) - return tuple(result) - - @defop - def __call__(self, *args: P.args, **kwargs: P.kwargs) -> T: - raise NotHandled - - def __get__(self, instance, _owner): - if instance is not None: - return functools.partial(self, instance) - else: - return self - - @classmethod - def define( - cls, - _func=None, - *, - tools: Iterable[Operation | Template] | str | None = None, - ): - """Define a prompt template. - - Args: - tools: Tools to expose to the LLM: - - None (default): no tools - - "auto": auto-capture from lexical scope - - list: explicit list of Operations/Templates - """ - frame: types.FrameType = inspect.currentframe().f_back # type: ignore - globals_proxy: types.MappingProxyType[str, Any] = types.MappingProxyType( - frame.f_globals - ) - locals_proxy: types.MappingProxyType[str, Any] = types.MappingProxyType( - frame.f_locals - ) - # LexicalContext: locals first (shadow globals), then globals - context = LexicalContext(locals_proxy, globals_proxy) # type: ignore[arg-type] - - def decorator(body: Callable[P, T]): - if not body.__doc__: - raise ValueError("Expected a docstring on body") - - return cls( - __prompt_template__=body.__doc__, - __signature__=inspect.signature(body), - __name__=body.__name__, - __context__=context, - ) - - if _func is None: - return decorator - return decorator(_func) +__all__ = ["Template", "Tool"] diff --git a/effectful/handlers/llm/providers.py b/effectful/handlers/llm/providers.py index aa091d67..e99e4ce4 100644 --- a/effectful/handlers/llm/providers.py +++ b/effectful/handlers/llm/providers.py @@ -1,25 +1,14 @@ -import base64 -import dataclasses import functools import inspect -import io import logging import string import traceback import typing -from collections.abc import Callable, Hashable, Iterable, Mapping -from typing import Any, get_type_hints +from collections.abc import Callable, Hashable +from typing import Any import litellm import pydantic - -from effectful.handlers.llm.encoding import type_to_encodable_type - -try: - from PIL import Image -except ImportError: - raise ImportError("'pillow' is required to use effectful.handlers.providers") - from litellm import ( Choices, Message, @@ -29,156 +18,13 @@ ) from litellm.types.utils import ModelResponse -from effectful.handlers.llm import Template +from effectful.handlers.llm import Template, Tool +from effectful.handlers.llm.encoding import type_to_encodable_type from effectful.ops.semantics import fwd from effectful.ops.syntax import ObjectInterpretation, defop, implements from effectful.ops.types import Operation -def _pil_image_to_base64_data(pil_image: Image.Image) -> str: - buf = io.BytesIO() - pil_image.save(buf, format="PNG") - return base64.b64encode(buf.getvalue()).decode("utf-8") - - -def _pil_image_to_base64_data_uri(pil_image: Image.Image) -> str: - return f"data:image/png;base64,{_pil_image_to_base64_data(pil_image)}" - - -@dataclasses.dataclass -class Tool[**P, T]: - callable: Operation[P, T] | Template[P, T] - name: str - parameter_annotations: dict[str, type] - description: str - - def serialise_return_value(self, value) -> OpenAIMessageContent: - """Serializes a value returned by the function into a json format suitable for the OpenAI API.""" - sig = inspect.signature(self.callable) - encoded_ty = type_to_encodable_type(sig.return_annotation) - encoded_value = encoded_ty.encode(value) - return encoded_ty.serialize(encoded_value) - - @functools.cached_property - def parameter_model(self) -> type[pydantic.BaseModel]: - fields = { - param_name: type_to_encodable_type(param_type).t - for param_name, param_type in self.parameter_annotations.items() - } - parameter_model = pydantic.create_model( - "Params", - __config__={"extra": "forbid"}, - **fields, # type: ignore - ) - return parameter_model - - def call_with_json_args( - self, template: Template, json_str: str - ) -> OpenAIMessageContent: - """Implements a roundtrip call to a python function. Input is a json string representing an LLM tool call request parameters. The output is the serialised response to the model.""" - try: - # build dict of raw encodable types U - raw_args = self.parameter_model.model_validate_json(json_str) - - # use encoders to decode Us to python types T - params: dict[str, Any] = { - param_name: type_to_encodable_type( - self.parameter_annotations[param_name] - ).decode(getattr(raw_args, param_name)) - for param_name in raw_args.model_fields_set - } - - # call tool with python types - result = tool_call( - template, - self.callable, - **params, - ) - # serialize back to U using encoder for return type - sig = inspect.signature(self.callable) - encoded_ty = type_to_encodable_type(sig.return_annotation) - encoded_value = encoded_ty.encode(result) - # serialise back to Json - return encoded_ty.serialize(encoded_value) - except Exception as exn: - return str({"status": "failure", "exception": str(exn)}) - - @classmethod - def define(cls, obj: Operation[P, T] | Template[P, T]): - """Create a Tool from an Operation or Template. - - Returns None if the object cannot be converted to a tool (e.g., missing type annotations). - """ - sig = inspect.signature(obj) - tool_name = obj.__name__ - - description = ( - obj.__prompt_template__ if isinstance(obj, Template) else obj.__doc__ or "" - ) - - # Try to get type hints, fall back to signature annotations if that fails - try: - hints = get_type_hints(obj) - except Exception: - hints = { - p.name: p.annotation - for p in sig.parameters.values() - if p.annotation is not inspect.Parameter.empty - } - - parameter_annotations: dict[str, type] = {} - for param_name, param in sig.parameters.items(): - # Skip parameters without type annotations - if param.annotation is inspect.Parameter.empty: - raise TypeError( - f"Parameter '{param_name}' in '{obj.__name__}' " - "does not have a type annotation" - ) - # get_type_hints might not include the parameter if annotation is invalid - if param_name not in hints: - raise TypeError( - f"Parameter '{param_name}' in '{obj.__name__}' " - "does not have a valid type annotation" - ) - parameter_annotations[param_name] = hints[param_name] - - return cls( - callable=obj, - name=tool_name, - parameter_annotations=parameter_annotations, - description=description, - ) - - @property - def function_definition(self) -> OpenAIChatCompletionToolParam: - response_format = litellm.utils.type_to_response_format_param( - self.parameter_model - ) - assert response_format is not None - return { - "type": "function", - "function": { - "name": self.name, - "description": self.description, - "parameters": response_format["json_schema"][ - "schema" - ], # extract the schema - "strict": True, - }, - } - - -def _tools_of_operations( - ops: Iterable[Operation | Template], -) -> Mapping[str, Tool]: - tools = {} - for op in ops: - tool = Tool.define(op) - # NOTE: Because lexical handling is already guaranteeing unique names, we can just use the tool's name directly. - tools[tool.name] = tool - return tools - - class _OpenAIPromptFormatter(string.Formatter): def format_as_messages( self, format_str: str, /, *args, **kwargs @@ -217,21 +63,16 @@ def push_current_text(): return prompt_parts -# Emitted for model request/response rounds so handlers can observe/log requests. @defop @functools.wraps(litellm.completion) def completion(*args, **kwargs) -> Any: - """Low-level LLM request. Handlers may log/modify requests and delegate via fwd().""" - return litellm.completion(*args, **kwargs) + """Low-level LLM request. Handlers may log/modify requests and delegate via fwd(). + This effect is emitted for model request/response rounds so handlers can + observe/log requests. -# Note: attempting to type the tool arguments causes type-checker failures -@defop -def tool_call[T]( - template: Template, tool: Operation[..., T] | Template[..., T], *args, **kwargs -) -> T: - """Perform a model-initiated tool call (can be an Operation or another Template).""" - return tool(*args, **kwargs) + """ + return litellm.completion(*args, **kwargs) class CacheLLMRequestHandler(ObjectInterpretation): @@ -287,33 +128,19 @@ def _log_completion(self, *args, **kwargs) -> Any: response = fwd() self.logger.info( "llm.request", - extra={ - "payload": { - "args": args, - "kwargs": kwargs, - "response": response, - } - }, + extra={"payload": {"args": args, "kwargs": kwargs, "response": response}}, ) return response - @implements(tool_call) - def _log_tool_call( - self, template: Template, tool: Operation, *args, **kwargs - ) -> Any: + @implements(Tool.__apply__) + def _log_tool_call(self, tool: Operation, *args, **kwargs) -> Any: """Log the tool call and result.""" tool_name = tool.__name__ result = fwd() self.logger.info( "llm.tool_call", - extra={ - "payload": { - "tool": tool_name, - "args": args, - "kwargs": kwargs, - } - }, + extra={"payload": {"tool": tool_name, "args": args, "kwargs": kwargs}}, ) return result @@ -338,33 +165,85 @@ def __init__( self.add_error_feedback = add_error_feedback self.exception_cls = exception_cls - @implements(Template.__call__) + @implements(Template.__apply__) def _retry_completion(self, template: Template, *args, **kwargs) -> Any: - max_retries = self.max_retries - current_template = template - while max_retries > 0: + prompt_ext = template.__prompt_template__ + for _ in range(self.max_retries - 1): + template_ext = Template.replace(template, prompt_template=prompt_ext) + try: - return fwd(current_template, *args, **kwargs) - except self.exception_cls as exn: - max_retries -= 1 - if max_retries == 0: - raise exn + return fwd(template_ext, *args, **kwargs) + except self.exception_cls: if self.add_error_feedback: # Capture the full traceback for better error context tb = traceback.format_exc() - prompt_ext = ( - f"Retry generating the following prompt: {template.__prompt_template__}\n\n" - f"Error from previous generation:\n```\n{tb}```" - ) - current_template = dataclasses.replace( - template, __prompt_template__=prompt_ext - ) - # Continue the loop to retry - raise Exception("Max retries reached") + prompt_ext += f"\nError from previous generation:\n```\n{tb}```" + + template_ext = Template.replace(template, prompt_template=prompt_ext) + return fwd(template_ext, *args, **kwargs) + + +def parameter_model(tool: Tool) -> type[pydantic.BaseModel]: + fields = { + name: type_to_encodable_type(param.annotation).t + for name, param in tool.__signature__.parameters.items() + } + parameter_model = pydantic.create_model( + "Params", + __config__={"extra": "forbid"}, + **fields, # type: ignore + ) + return parameter_model + + +def function_definition(tool: Tool) -> OpenAIChatCompletionToolParam: + param_model = parameter_model(tool) + response_format = litellm.utils.type_to_response_format_param(param_model) + description = tool.__default__.__doc__ + assert response_format is not None + assert description is not None + return { + "type": "function", + "function": { + "name": tool.__name__, + "description": description, + "parameters": response_format["json_schema"]["schema"], + "strict": True, + }, + } + + +def call_with_json_args(tool: Tool, json_str: str) -> OpenAIMessageContent: + """Implements a roundtrip call to a python function. Input is a json + string representing an LLM tool call request parameters. The output is + the serialised response to the model. + """ + sig = tool.__signature__ + param_model = parameter_model(tool) + try: + # build dict of raw encodable types U + raw_args = param_model.model_validate_json(json_str) + + # use encoders to decode Us to python types T + params: dict[str, Any] = { + param_name: type_to_encodable_type( + sig.parameters[param_name].annotation + ).decode(getattr(raw_args, param_name)) + for param_name in raw_args.model_fields_set + } + + # call tool with python types + result = tool(**params) -def _pydantic_model_from_type(typ: type): - return pydantic.create_model("Response", value=typ, __config__={"extra": "forbid"}) + # serialize back to U using encoder for return type + encoded_ty = type_to_encodable_type(sig.return_annotation) + encoded_value = encoded_ty.encode(result) + + # serialise back to Json + return encoded_ty.serialize(encoded_value) + except Exception as exn: + return str({"status": "failure", "exception": str(exn)}) @defop @@ -374,9 +253,9 @@ def compute_response(template: Template, model_input: list[Any]) -> ModelRespons """ ret_type = template.__signature__.return_annotation + tools = template.tools - tools = _tools_of_operations(template.tools) - tool_schemas = [t.function_definition for t in tools.values()] + tool_schemas = [function_definition(t) for t in tools.values()] response_encoding_type: type | None = type_to_encodable_type(ret_type).t if response_encoding_type == str: response_encoding_type = None @@ -401,9 +280,10 @@ def compute_response(template: Template, model_input: list[Any]) -> ModelRespons for tool_call in message.tool_calls: function = tool_call.function - function_name = typing.cast(str, function.name) + function_name = function.name + assert function_name is not None tool = tools[function_name] - tool_result = tool.call_with_json_args(template, function.arguments) + tool_result = call_with_json_args(tool, function.arguments) model_input.append( { "role": "tool", @@ -483,7 +363,7 @@ def __init__(self, model_name: str = "gpt-4o", **config): def _completion(self, *args, **kwargs): return fwd(self.model_name, *args, **(self.config | kwargs)) - @implements(Template.__call__) + @implements(Template.__apply__) def _call[**P, T]( self, template: Template[P, T], *args: P.args, **kwargs: P.kwargs ) -> T: diff --git a/effectful/handlers/llm/sampling.py b/effectful/handlers/llm/sampling.py index effc197d..8497d88a 100644 --- a/effectful/handlers/llm/sampling.py +++ b/effectful/handlers/llm/sampling.py @@ -18,7 +18,7 @@ def __init__(self, no_voters: int = 6, k: int = 3): self.no_voters = no_voters self.k = k - @implements(Template.__call__) + @implements(Template.__apply__) def __call__(self, *args: P.args, **kwargs: P.kwargs) -> T: executor = ThreadPoolExecutor() intp = get_interpretation() diff --git a/effectful/handlers/llm/synthesis.py b/effectful/handlers/llm/synthesis.py index b9308776..6624b72a 100644 --- a/effectful/handlers/llm/synthesis.py +++ b/effectful/handlers/llm/synthesis.py @@ -1,6 +1,5 @@ import ast import collections.abc -import dataclasses import linecache import re import textwrap @@ -61,8 +60,8 @@ def _parse_and_eval[T](self, t: type[T], content: str) -> T: return gs[last_decl.name] - @implements(Template.__call__) - def _call(self, template, *args, **kwargs) -> None: + @implements(Template.__apply__) + def _apply[**P, T](self, template: Template[P, T], *args, **kwargs) -> T: ret_type = template.__signature__.return_annotation origin = typing.get_origin(ret_type) ret_type = ret_type if origin is None else origin @@ -85,16 +84,11 @@ def _call(self, template, *args, **kwargs) -> None: """).strip() - response = fwd( - dataclasses.replace( - template, - __prompt_template__=prompt_ext, - __signature__=template.__signature__.replace(return_annotation=str), - ), - *args, - **kwargs, + fresh_template: Template[P, str] = Template.replace( + template, + prompt_template=prompt_ext, + signature=template.__signature__.replace(return_annotation=str), ) - + response = fresh_template(*args, **kwargs) functional = self._parse_and_eval(ret_type, response) - return functional diff --git a/effectful/handlers/llm/template.py b/effectful/handlers/llm/template.py new file mode 100644 index 00000000..9c999ce2 --- /dev/null +++ b/effectful/handlers/llm/template.py @@ -0,0 +1,144 @@ +import inspect +import types +import typing +from collections import ChainMap +from collections.abc import Callable, Mapping, MutableMapping +from dataclasses import dataclass +from typing import Any + +from effectful.ops.types import INSTANCE_OP_PREFIX, NotHandled, Operation + + +class Tool[**P, T](Operation[P, T]): + def __init__( + self, signature: inspect.Signature, name: str, default: Callable[P, T] + ): + if not default.__doc__: + raise ValueError("Tools must have docstrings.") + + super().__init__(signature, name, default) + + @classmethod + def define(cls, *args, **kwargs) -> "Tool[P, T]": + return typing.cast("Tool[P, T]", super().define(*args, **kwargs)) + + +@dataclass +class _BoundInstance[T]: + instance: T + + +class Template[**P, T](Tool[P, T]): + __context__: ChainMap[str, Any] + + @property + def __prompt_template__(self) -> str: + assert self.__default__.__doc__ is not None + return self.__default__.__doc__ + + @property + def tools(self) -> Mapping[str, Tool]: + """Operations and Templates available as tools. Auto-capture from lexical context.""" + result = {} + + for name, obj in self.__context__.items(): + # Collect tools in context + if isinstance(obj, Tool): + result[name] = obj + + if isinstance(obj, staticmethod) and isinstance(obj.__func__, Tool): + result[name] = obj.__func__ + + # Collect tools as methods on any bound instances + if isinstance(obj, _BoundInstance): + for instance_name in obj.instance.__dir__(): + if instance_name.startswith(INSTANCE_OP_PREFIX): + continue + instance_obj = getattr(obj.instance, instance_name) + if isinstance(instance_obj, Tool): + result[instance_name] = instance_obj + + return result + + def __get__[S](self, instance: S | None, owner: type[S] | None = None): + if hasattr(self, "_name_on_instance") and hasattr( + instance, self._name_on_instance + ): + return getattr(instance, self._name_on_instance) + + result = super().__get__(instance, owner) + self_param_name = list(self.__signature__.parameters.keys())[0] + self_context = {self_param_name: _BoundInstance(instance)} + result.__context__ = self.__context__.new_child(self_context) + return result + + @classmethod + def define[**Q, V]( + cls, default: Callable[Q, V], *args, **kwargs + ) -> "Template[Q, V]": + """Define a prompt template. + + `define` takes a function, and can be used as a decorator. The + function's docstring should be a prompt, which may be templated in the + function arguments. The prompt will be provided with any instances of + `Tool` that exist in the lexical context as callable tools. + + """ + frame = inspect.currentframe() + assert frame is not None + frame = frame.f_back + assert frame is not None + + # Check if we're in a class definition by looking for __qualname__ + qualname = frame.f_locals.get("__qualname__") + n_frames = 1 + if qualname is not None: + name_components = qualname.split(".") + for name in reversed(name_components): + if name == "": + break + n_frames += 1 + + contexts = [] + for offset in range(n_frames): + assert frame is not None + locals_proxy: types.MappingProxyType[str, Any] = types.MappingProxyType( + frame.f_locals + ) + globals_proxy: types.MappingProxyType[str, Any] = types.MappingProxyType( + frame.f_globals + ) + contexts.append(locals_proxy) + frame = frame.f_back + + contexts.append(globals_proxy) + context: ChainMap[str, Any] = ChainMap( + *typing.cast(list[MutableMapping[str, Any]], contexts) + ) + + op = super().define(default, *args, **kwargs) + op.__context__ = context # type: ignore[attr-defined] + return typing.cast(Template[Q, V], op) + + def replace( + self, + signature: inspect.Signature | None = None, + prompt_template: str | None = None, + name: str | None = None, + ) -> "Template": + signature = signature or self.__signature__ + prompt_template = prompt_template or self.__prompt_template__ + name = name or self.__name__ + + if prompt_template: + + def default(*args, **kwargs): + raise NotHandled + + default.__doc__ = prompt_template + else: + default = self.__default__ + + op = Template(signature, name, default) + op.__context__ = self.__context__ + return op diff --git a/effectful/ops/types.py b/effectful/ops/types.py index 9a670f1e..722c3986 100644 --- a/effectful/ops/types.py +++ b/effectful/ops/types.py @@ -62,6 +62,9 @@ def __get__(self, instance, owner: type | None = None): return bound_op +INSTANCE_OP_PREFIX = "__instanceop" + + @functools.total_ordering class Operation[**Q, V]: """An abstract class representing an effect that can be implemented by an effect handler. @@ -432,7 +435,7 @@ def __str__(self): def __set_name__[T](self, owner: type[T], name: str) -> None: if not issubclass(owner, Term): assert not hasattr(self, "_name_on_instance"), "should only be called once" - self._name_on_instance: str = f"__instanceop_{name}" + self._name_on_instance: str = f"{INSTANCE_OP_PREFIX}_{name}" def __get__[T](self, instance: T | None, owner: type[T] | None = None): if hasattr(instance, "__dict__") and hasattr(self, "_name_on_instance"): diff --git a/tests/test_handlers_llm.py b/tests/test_handlers_llm.py index 9e0e510f..ad1c9b9c 100644 --- a/tests/test_handlers_llm.py +++ b/tests/test_handlers_llm.py @@ -23,7 +23,7 @@ def __init__(self, prompt_responses: dict[str, T]): """ self.prompt_responses = prompt_responses - @implements(Template.__call__) + @implements(Template.__apply__) def _call[**P]( self, template: Template[P, T], *args: P.args, **kwargs: P.kwargs ) -> T: @@ -49,7 +49,7 @@ def __init__(self, response: T): """ self.response = response - @implements(Template.__call__) + @implements(Template.__apply__) def _call[**P]( self, template: Template[P, T], *args: P.args, **kwargs: P.kwargs ) -> T: @@ -69,7 +69,7 @@ def haiku(theme: str) -> str: raise NotHandled -@Template.define() +@Template.define def primes(first_digit: int) -> int: """Give exactly one prime number with {first_digit} as the first digit. Respond with only the number.""" raise NotHandled @@ -154,7 +154,7 @@ def __init__( self.exception_factory = exception_factory self.call_count = 0 - @implements(Template.__call__) + @implements(Template.__apply__) def _call[**P]( self, template: Template[P, T], *args: P.args, **kwargs: P.kwargs ) -> T: @@ -222,7 +222,7 @@ class PromptCapturingProvider(ObjectInterpretation): def __init__(self): self.call_count = 0 - @implements(Template.__call__) + @implements(Template.__apply__) def _call(self, template: Template, *args, **kwargs): self.call_count += 1 call_prompts.append(template.__prompt_template__) @@ -243,7 +243,6 @@ def _call(self, template: Template, *args, **kwargs): # First call has original prompt assert "Write a limerick on the theme of {theme}." in call_prompts[0] # Second call should include error feedback with traceback - assert "Retry generating" in call_prompts[1] assert "First attempt failed" in call_prompts[1] @@ -275,8 +274,8 @@ def write_story(topic: str, style: str) -> str: assert write_story.__context__["story_funny"] is story_funny # Templates in lexical context are exposed as callable tools - assert story_with_moral in write_story.tools - assert story_funny in write_story.tools + assert story_with_moral in write_story.tools.values() + assert story_funny in write_story.tools.values() def test_template_composition_with_chained_calls(): @@ -321,11 +320,11 @@ def test_mutually_recursive_templates(): assert "mutual_b" in mutual_b.__context__ # They should also be in each other's tools - assert mutual_a in mutual_b.tools - assert mutual_b in mutual_a.tools + assert mutual_a in mutual_b.tools.values() + assert mutual_b in mutual_a.tools.values() # And themselves (self-recursion) - assert mutual_a in mutual_a.tools - assert mutual_b in mutual_b.tools + assert mutual_a in mutual_a.tools.values() + assert mutual_b in mutual_b.tools.values() # Module-level variable for shadowing test diff --git a/tests/test_handlers_llm_template.py b/tests/test_handlers_llm_template.py new file mode 100644 index 00000000..1194c701 --- /dev/null +++ b/tests/test_handlers_llm_template.py @@ -0,0 +1,206 @@ +from dataclasses import dataclass + +import pytest + +from effectful.handlers.llm import Template, Tool +from effectful.handlers.llm.providers import format_model_input +from effectful.ops.semantics import NotHandled, handler +from effectful.ops.syntax import ObjectInterpretation, implements + + +def test_template_method(): + """Test that methods can be used as templates.""" + local_variable = None # noqa: F841 + + @dataclass + class A: + x: int + + @Tool.define + def random(self) -> int: + """Returns a random number, chosen by fair dice roll.""" + return 4 + + @Template.define + def f(self) -> int: + """What is the number after 3?""" + raise NotHandled + + a = A(0) + assert isinstance(a.f, Template) + assert "random" in a.f.tools + assert "f" in a.f.tools + assert "local_variable" in a.f.__context__ and "local_variable" not in a.f.tools + assert a.f.tools["random"]() == 4 + + class B(A): + @Tool.define + def reverse(self, s: str) -> str: + """Reverses a string.""" + return str(reversed(s)) + + b = B(1) + assert isinstance(b.f, Template) + assert "random" in b.f.tools + assert "reverse" in b.f.tools + assert "local_variable" in b.f.__context__ and "local_variable" not in a.f.tools + + +def test_template_method_nested_class(): + """Test that template methods work on nested classes.""" + local_variable = "test" # noqa: F841 + + @dataclass + class A: + x: int + + @Tool.define + @staticmethod + def random() -> int: + """Returns a random number, chosen by fair dice roll.""" + return 4 + + @dataclass + class B: + y: bool + + @Template.define + def f(self) -> int: + """What is the number after 3?""" + raise NotHandled + + a = A.B(True) + assert isinstance(a.f, Template) + assert "random" in a.f.tools + assert "f" in a.f.tools + assert "local_variable" in a.f.__context__ and "local_variable" not in a.f.tools + assert a.f.tools["random"]() == 4 + + +class A: + @Template.define + def f(self) -> str: + """Do stuff""" + raise NotImplementedError + + +def test_template_method_module(): + """Test that template methods work when defined on module-level classes.""" + a = A() + assert isinstance(a.f, Template) + + +def _define_scoped_templates(): + @Tool.define + def shown(self) -> int: + """Should be able to see this tool.""" + return 0 + + class A: + @Template.define + def f(self) -> str: + """test""" + return "" + + @Template.define + def g() -> int: + """test""" + return 0 + + def _nested(): + nonlocal shown + + @Template.define + def h() -> int: + """test""" + return 0 + + return h + + class B: + @Template.define + def i(self) -> str: + """test""" + return "" + + class C: + @Template.define + def j(self) -> str: + """test""" + return "" + + return [A().f, g, _nested(), B().i, B.C().j] + + +def test_template_method_scoping(): + @Tool.define + def hidden(self) -> int: + """Shouldn't be able to see this tool.""" + return 0 + + templates = _define_scoped_templates() + for t in templates: + assert isinstance(t, Template) + assert "shown" in t.__context__ + assert "hidden" not in t.__context__ + + +class TemplateStringIntp(ObjectInterpretation): + """Returns the result of template formatting as a string. Only supports + templates that produce string prompts. + + """ + + @implements(Template.__apply__) + def _[**P, T]( + self, template: Template[P, T], *args: P.args, **kwargs: P.kwargs + ) -> T: + model_input = format_model_input(template, *args, **kwargs) + template_result = model_input[0]["content"] + assert len(template_result) == 1 + return template_result[0]["text"] + + +def test_template_formatting_simple(): + @Template.define + @staticmethod + def rhyme(a: str, b: str) -> str: + """The {a} sat in the {b}.""" + raise NotHandled + + with handler(TemplateStringIntp()): + assert rhyme("cat", "hat") == "The cat sat in the hat." + + +@pytest.mark.xfail +def test_template_formatting_scoped(): + feet_per_mile = 5280 # noqa: F841 + + @Template.define + def convert(feet: int) -> float: + """How many miles is {feet} feet? There are {feet_per_mile} feet per mile.""" + raise NotHandled + + with handler(TemplateStringIntp()): + assert ( + convert(7920) + == "How many miles is 7920 feet? There are 5280 feet per mile." + ) + + +@pytest.mark.xfail +def test_template_formatting_method(): + @dataclass + class User: + name: str + + @Template.define + def greet(self, day: str) -> float: + """Greet the user '{self.name}' and wish them a good {day}.""" + raise NotHandled + + with handler(TemplateStringIntp()): + user = User("Bob") + assert ( + user.greet("Monday") == "Greet the user 'Bob' and wish them a good Monday." + ) diff --git a/tests/test_handlers_llm_tool_calling_book.py b/tests/test_handlers_llm_tool_calling_book.py index bcdb1468..e7333e3e 100644 --- a/tests/test_handlers_llm_tool_calling_book.py +++ b/tests/test_handlers_llm_tool_calling_book.py @@ -9,13 +9,10 @@ import pytest from pydantic import BaseModel, Field -from effectful.handlers.llm import Template -from effectful.handlers.llm.providers import ( - LiteLLMProvider, - completion, -) +from effectful.handlers.llm import Template, Tool +from effectful.handlers.llm.providers import LiteLLMProvider, completion from effectful.ops.semantics import fwd, handler -from effectful.ops.syntax import ObjectInterpretation, defop, implements +from effectful.ops.syntax import ObjectInterpretation, implements from effectful.ops.types import NotHandled # Check for API keys @@ -56,7 +53,7 @@ class BookRecommendation(BaseModel): reason: str = Field(..., description="Why this book is recommended") -@defop +@Tool.define def recommend_book_tool(genre: str, mood: str) -> BookRecommendation: """Recommend a book based on genre and mood. diff --git a/tests/test_handlers_llm_tool_calling_poem.py b/tests/test_handlers_llm_tool_calling_poem.py index 32acb637..8cef1e32 100644 --- a/tests/test_handlers_llm_tool_calling_poem.py +++ b/tests/test_handlers_llm_tool_calling_poem.py @@ -11,13 +11,13 @@ from pydantic import Field from pydantic.dataclasses import dataclass as pydantic_dataclass -from effectful.handlers.llm import Template +from effectful.handlers.llm import Template, Tool from effectful.handlers.llm.providers import ( LiteLLMProvider, completion, ) from effectful.ops.semantics import fwd, handler -from effectful.ops.syntax import ObjectInterpretation, defop, implements +from effectful.ops.syntax import ObjectInterpretation, implements from effectful.ops.types import NotHandled # Check for API keys @@ -67,7 +67,7 @@ class PoemQuality(str, Enum): BAD = "BAD" -@defop +@Tool.define def evaluate_poem_tool(poem: Poem, explanation: str) -> PoemQuality: """Evaluate the quality of a poem. @@ -105,6 +105,8 @@ def generate_good_poem(topic: str) -> Poem: You MUST use the evaluate_poem_tool to check poem quality. Keep iterating until evaluate_poem_tool returns GOOD. Return your final poem as JSON with 'content' and 'form' fields. + + Do not call the 'generate_good_poem' tool. """ raise NotHandled From 3311d1bba5efb7028a349f760ce9e83646177776 Mon Sep 17 00:00:00 2001 From: Jack Feser Date: Mon, 29 Dec 2025 14:55:53 -0500 Subject: [PATCH 26/39] Fail when encoding terms or operations (#474) * raise error when encoding terms or operations * change error type --- effectful/handlers/llm/encoding.py | 11 +++++++++++ tests/test_handlers_llm_encoding.py | 11 +++++++++++ 2 files changed, 22 insertions(+) diff --git a/effectful/handlers/llm/encoding.py b/effectful/handlers/llm/encoding.py index fe8b7e21..241634c3 100644 --- a/effectful/handlers/llm/encoding.py +++ b/effectful/handlers/llm/encoding.py @@ -12,6 +12,7 @@ from PIL import Image from effectful.ops.syntax import _CustomSingleDispatchCallable +from effectful.ops.types import Operation, Term def _pil_image_to_base64_data(pil_image: Image.Image) -> str: @@ -73,6 +74,16 @@ def decode(cls, vl: T) -> T: return typing.cast(Encodable[T], BaseEncodable()) +@type_to_encodable_type.register(Term) +def _type_encodable_type_term[T: Term](ty: type[T]) -> Encodable[T]: + raise TypeError("Terms cannot be encoded or decoded in general.") + + +@type_to_encodable_type.register(Operation) +def _type_encodable_type_operation[T: Operation](ty: type[T]) -> Encodable[T]: + raise TypeError("Operations cannot be encoded or decoded in general.") + + @type_to_encodable_type.register(pydantic.BaseModel) def _type_encodable_type_pydantic_base_model[T: pydantic.BaseModel]( ty: type[T], diff --git a/tests/test_handlers_llm_encoding.py b/tests/test_handlers_llm_encoding.py index ce50979e..41999a45 100644 --- a/tests/test_handlers_llm_encoding.py +++ b/tests/test_handlers_llm_encoding.py @@ -6,6 +6,17 @@ from PIL import Image from effectful.handlers.llm.encoding import type_to_encodable_type +from effectful.ops.types import Operation, Term + + +def test_type_to_encodable_type_term(): + with pytest.raises(TypeError): + type_to_encodable_type(Term) + + +def test_type_to_encodable_type_operation(): + with pytest.raises(TypeError): + type_to_encodable_type(Operation) def test_type_to_encodable_type_str(): From 23f95efb9dcd5ae97bb32bd74705575435a9e2b7 Mon Sep 17 00:00:00 2001 From: Kiran Gopinathan <23038502+kiranandcode@users.noreply.github.com> Date: Wed, 31 Dec 2025 14:46:44 +0000 Subject: [PATCH 27/39] Implemented record and replay fixtures for LLM calls (#467) * implemented record and replay fixtures for LLM calls * changed defaults to not rebuild fixtures, with dedicated make rule to rebuild * switched pytest to use request fixture instead of env --- Makefile | 5 + ...LLMLoggingHandler__test_custom_logger.json | 44 + ...LLMLoggingHandler__test_logs_requests.json | 44 + ...LLMProvider__test_integer_return_type.json | 44 + ...ompt_cross_endpoint[claude-haiku-4-5].json | 42 + ...le_prompt_cross_endpoint[gpt-4o-mini].json | 44 + ...e_prompt_multiple_models[gpt-4o-mini].json | 44 + ...le_prompt_multiple_models[gpt-5-nano].json | 44 + ...teLLMProvider__test_structured_output.json | 44 + ...eLLMProvider__test_with_config_params.json | 44 + ...ramSynthesis__test_generates_callable.json | 44 + ...eturn__test_pydantic_basemodel_return.json | 44 + ...ers_llm_provider.py__test_image_input.json | 44 + tests/test_handlers_llm_provider.py | 78 +- uv.lock | 3047 +++++++++++++++++ 15 files changed, 3635 insertions(+), 21 deletions(-) create mode 100644 tests/fixtures/tests_test_handlers_llm_provider.py__TestLLMLoggingHandler__test_custom_logger.json create mode 100644 tests/fixtures/tests_test_handlers_llm_provider.py__TestLLMLoggingHandler__test_logs_requests.json create mode 100644 tests/fixtures/tests_test_handlers_llm_provider.py__TestLiteLLMProvider__test_integer_return_type.json create mode 100644 tests/fixtures/tests_test_handlers_llm_provider.py__TestLiteLLMProvider__test_simple_prompt_cross_endpoint[claude-haiku-4-5].json create mode 100644 tests/fixtures/tests_test_handlers_llm_provider.py__TestLiteLLMProvider__test_simple_prompt_cross_endpoint[gpt-4o-mini].json create mode 100644 tests/fixtures/tests_test_handlers_llm_provider.py__TestLiteLLMProvider__test_simple_prompt_multiple_models[gpt-4o-mini].json create mode 100644 tests/fixtures/tests_test_handlers_llm_provider.py__TestLiteLLMProvider__test_simple_prompt_multiple_models[gpt-5-nano].json create mode 100644 tests/fixtures/tests_test_handlers_llm_provider.py__TestLiteLLMProvider__test_structured_output.json create mode 100644 tests/fixtures/tests_test_handlers_llm_provider.py__TestLiteLLMProvider__test_with_config_params.json create mode 100644 tests/fixtures/tests_test_handlers_llm_provider.py__TestProgramSynthesis__test_generates_callable.json create mode 100644 tests/fixtures/tests_test_handlers_llm_provider.py__TestPydanticBaseModelReturn__test_pydantic_basemodel_return.json create mode 100644 tests/fixtures/tests_test_handlers_llm_provider.py__test_image_input.json create mode 100644 uv.lock diff --git a/Makefile b/Makefile index c7d071aa..e7a111d8 100644 --- a/Makefile +++ b/Makefile @@ -1,3 +1,5 @@ +.PHONY: lint format test test-notebooks rebuild-fixtures FORCE + lint: FORCE ./scripts/lint.sh @@ -10,4 +12,7 @@ test: lint FORCE test-notebooks: lint FORCE ./scripts/test_notebooks.sh +rebuild-fixtures: + REBUILD_FIXTURES=true uv run pytest tests/test_handlers_llm_provider.py + FORCE: diff --git a/tests/fixtures/tests_test_handlers_llm_provider.py__TestLLMLoggingHandler__test_custom_logger.json b/tests/fixtures/tests_test_handlers_llm_provider.py__TestLLMLoggingHandler__test_custom_logger.json new file mode 100644 index 00000000..c9d68532 --- /dev/null +++ b/tests/fixtures/tests_test_handlers_llm_provider.py__TestLLMLoggingHandler__test_custom_logger.json @@ -0,0 +1,44 @@ +{ + "choices": [ + { + "finish_reason": "stop", + "index": 0, + "message": { + "annotations": [], + "content": "Testing is a systematic process used to evaluate the quality and performance of a product or system.", + "function_call": null, + "provider_specific_fields": { + "refusal": null + }, + "role": "assistant", + "tool_calls": null + }, + "provider_specific_fields": {} + } + ], + "created": 1767182746, + "id": "chatcmpl-CspFS6atQlFopsRIEKHYNzzq5Xzv1", + "model": "gpt-4o-mini-2024-07-18", + "object": "chat.completion", + "service_tier": "default", + "system_fingerprint": "fp_c4585b5b9c", + "usage": { + "completion_tokens": 19, + "completion_tokens_details": { + "accepted_prediction_tokens": 0, + "audio_tokens": 0, + "image_tokens": null, + "reasoning_tokens": 0, + "rejected_prediction_tokens": 0, + "text_tokens": null + }, + "prompt_tokens": 313, + "prompt_tokens_details": { + "audio_tokens": 0, + "cached_tokens": 0, + "image_tokens": null, + "text_tokens": null + }, + "total_tokens": 332 + } +} \ No newline at end of file diff --git a/tests/fixtures/tests_test_handlers_llm_provider.py__TestLLMLoggingHandler__test_logs_requests.json b/tests/fixtures/tests_test_handlers_llm_provider.py__TestLLMLoggingHandler__test_logs_requests.json new file mode 100644 index 00000000..022def82 --- /dev/null +++ b/tests/fixtures/tests_test_handlers_llm_provider.py__TestLLMLoggingHandler__test_logs_requests.json @@ -0,0 +1,44 @@ +{ + "choices": [ + { + "finish_reason": "stop", + "index": 0, + "message": { + "annotations": [], + "content": "Testing is a crucial process that validates the functionality and quality of software before it is released.", + "function_call": null, + "provider_specific_fields": { + "refusal": null + }, + "role": "assistant", + "tool_calls": null + }, + "provider_specific_fields": {} + } + ], + "created": 1767182745, + "id": "chatcmpl-CspFR2Z5zotyRiEpqYi3ccOKBxshS", + "model": "gpt-4o-mini-2024-07-18", + "object": "chat.completion", + "service_tier": "default", + "system_fingerprint": "fp_c4585b5b9c", + "usage": { + "completion_tokens": 19, + "completion_tokens_details": { + "accepted_prediction_tokens": 0, + "audio_tokens": 0, + "image_tokens": null, + "reasoning_tokens": 0, + "rejected_prediction_tokens": 0, + "text_tokens": null + }, + "prompt_tokens": 313, + "prompt_tokens_details": { + "audio_tokens": 0, + "cached_tokens": 0, + "image_tokens": null, + "text_tokens": null + }, + "total_tokens": 332 + } +} \ No newline at end of file diff --git a/tests/fixtures/tests_test_handlers_llm_provider.py__TestLiteLLMProvider__test_integer_return_type.json b/tests/fixtures/tests_test_handlers_llm_provider.py__TestLiteLLMProvider__test_integer_return_type.json new file mode 100644 index 00000000..51d944a3 --- /dev/null +++ b/tests/fixtures/tests_test_handlers_llm_provider.py__TestLiteLLMProvider__test_integer_return_type.json @@ -0,0 +1,44 @@ +{ + "choices": [ + { + "finish_reason": "stop", + "index": 0, + "message": { + "annotations": [], + "content": "{\"value\":67}", + "function_call": null, + "provider_specific_fields": { + "refusal": null + }, + "role": "assistant", + "tool_calls": null + }, + "provider_specific_fields": {} + } + ], + "created": 1767182739, + "id": "chatcmpl-CspFLXojfuOibqKzI1QdRgfOJtd36", + "model": "gpt-5-nano-2025-08-07", + "object": "chat.completion", + "service_tier": "default", + "system_fingerprint": null, + "usage": { + "completion_tokens": 529, + "completion_tokens_details": { + "accepted_prediction_tokens": 0, + "audio_tokens": 0, + "image_tokens": null, + "reasoning_tokens": 512, + "rejected_prediction_tokens": 0, + "text_tokens": null + }, + "prompt_tokens": 429, + "prompt_tokens_details": { + "audio_tokens": 0, + "cached_tokens": 0, + "image_tokens": null, + "text_tokens": null + }, + "total_tokens": 958 + } +} \ No newline at end of file diff --git a/tests/fixtures/tests_test_handlers_llm_provider.py__TestLiteLLMProvider__test_simple_prompt_cross_endpoint[claude-haiku-4-5].json b/tests/fixtures/tests_test_handlers_llm_provider.py__TestLiteLLMProvider__test_simple_prompt_cross_endpoint[claude-haiku-4-5].json new file mode 100644 index 00000000..a3092084 --- /dev/null +++ b/tests/fixtures/tests_test_handlers_llm_provider.py__TestLiteLLMProvider__test_simple_prompt_cross_endpoint[claude-haiku-4-5].json @@ -0,0 +1,42 @@ +{ + "choices": [ + { + "finish_reason": "stop", + "index": 0, + "message": { + "content": "Testing is a critical process that helps identify bugs, verify functionality, and ensure that software meets quality standards before deployment.", + "function_call": null, + "provider_specific_fields": { + "citations": null, + "thinking_blocks": null + }, + "role": "assistant", + "tool_calls": null + } + } + ], + "created": 1767182732, + "id": "chatcmpl-aa66067c-df8b-4adf-8978-68e8cdcaaa4f", + "model": "claude-haiku-4-5-20251001", + "object": "chat.completion", + "system_fingerprint": null, + "usage": { + "cache_creation_input_tokens": 0, + "cache_read_input_tokens": 0, + "completion_tokens": 26, + "completion_tokens_details": null, + "prompt_tokens": 1145, + "prompt_tokens_details": { + "audio_tokens": null, + "cache_creation_token_details": { + "ephemeral_1h_input_tokens": 0, + "ephemeral_5m_input_tokens": 0 + }, + "cache_creation_tokens": 0, + "cached_tokens": 0, + "image_tokens": null, + "text_tokens": null + }, + "total_tokens": 1171 + } +} \ No newline at end of file diff --git a/tests/fixtures/tests_test_handlers_llm_provider.py__TestLiteLLMProvider__test_simple_prompt_cross_endpoint[gpt-4o-mini].json b/tests/fixtures/tests_test_handlers_llm_provider.py__TestLiteLLMProvider__test_simple_prompt_cross_endpoint[gpt-4o-mini].json new file mode 100644 index 00000000..acf9c8d6 --- /dev/null +++ b/tests/fixtures/tests_test_handlers_llm_provider.py__TestLiteLLMProvider__test_simple_prompt_cross_endpoint[gpt-4o-mini].json @@ -0,0 +1,44 @@ +{ + "choices": [ + { + "finish_reason": "stop", + "index": 0, + "message": { + "annotations": [], + "content": "Testing is essential for ensuring the quality and reliability of software and products before they are released.", + "function_call": null, + "provider_specific_fields": { + "refusal": null + }, + "role": "assistant", + "tool_calls": null + }, + "provider_specific_fields": {} + } + ], + "created": 1767182730, + "id": "chatcmpl-CspFCkNxMBr1YyZtqzWJU1rMAQykg", + "model": "gpt-4o-mini-2024-07-18", + "object": "chat.completion", + "service_tier": "default", + "system_fingerprint": "fp_c4585b5b9c", + "usage": { + "completion_tokens": 19, + "completion_tokens_details": { + "accepted_prediction_tokens": 0, + "audio_tokens": 0, + "image_tokens": null, + "reasoning_tokens": 0, + "rejected_prediction_tokens": 0, + "text_tokens": null + }, + "prompt_tokens": 313, + "prompt_tokens_details": { + "audio_tokens": 0, + "cached_tokens": 0, + "image_tokens": null, + "text_tokens": null + }, + "total_tokens": 332 + } +} \ No newline at end of file diff --git a/tests/fixtures/tests_test_handlers_llm_provider.py__TestLiteLLMProvider__test_simple_prompt_multiple_models[gpt-4o-mini].json b/tests/fixtures/tests_test_handlers_llm_provider.py__TestLiteLLMProvider__test_simple_prompt_multiple_models[gpt-4o-mini].json new file mode 100644 index 00000000..4e924fce --- /dev/null +++ b/tests/fixtures/tests_test_handlers_llm_provider.py__TestLiteLLMProvider__test_simple_prompt_multiple_models[gpt-4o-mini].json @@ -0,0 +1,44 @@ +{ + "choices": [ + { + "finish_reason": "stop", + "index": 0, + "message": { + "annotations": [], + "content": "Testing is a crucial process that ensures the quality and functionality of a product or system before its release.", + "function_call": null, + "provider_specific_fields": { + "refusal": null + }, + "role": "assistant", + "tool_calls": null + }, + "provider_specific_fields": {} + } + ], + "created": 1767182727, + "id": "chatcmpl-CspF9mdsKgygvf8Pogy7DCFgjydme", + "model": "gpt-4o-mini-2024-07-18", + "object": "chat.completion", + "service_tier": "default", + "system_fingerprint": "fp_c4585b5b9c", + "usage": { + "completion_tokens": 21, + "completion_tokens_details": { + "accepted_prediction_tokens": 0, + "audio_tokens": 0, + "image_tokens": null, + "reasoning_tokens": 0, + "rejected_prediction_tokens": 0, + "text_tokens": null + }, + "prompt_tokens": 313, + "prompt_tokens_details": { + "audio_tokens": 0, + "cached_tokens": 0, + "image_tokens": null, + "text_tokens": null + }, + "total_tokens": 334 + } +} \ No newline at end of file diff --git a/tests/fixtures/tests_test_handlers_llm_provider.py__TestLiteLLMProvider__test_simple_prompt_multiple_models[gpt-5-nano].json b/tests/fixtures/tests_test_handlers_llm_provider.py__TestLiteLLMProvider__test_simple_prompt_multiple_models[gpt-5-nano].json new file mode 100644 index 00000000..18a1d429 --- /dev/null +++ b/tests/fixtures/tests_test_handlers_llm_provider.py__TestLiteLLMProvider__test_simple_prompt_multiple_models[gpt-5-nano].json @@ -0,0 +1,44 @@ +{ + "choices": [ + { + "finish_reason": "stop", + "index": 0, + "message": { + "annotations": [], + "content": "Testing helps ensure product quality by catching issues early.", + "function_call": null, + "provider_specific_fields": { + "refusal": null + }, + "role": "assistant", + "tool_calls": null + }, + "provider_specific_fields": {} + } + ], + "created": 1767182728, + "id": "chatcmpl-CspFA6VcwnTzTxjuvpG76RCuRt3KM", + "model": "gpt-5-nano-2025-08-07", + "object": "chat.completion", + "service_tier": "default", + "system_fingerprint": null, + "usage": { + "completion_tokens": 211, + "completion_tokens_details": { + "accepted_prediction_tokens": 0, + "audio_tokens": 0, + "image_tokens": null, + "reasoning_tokens": 192, + "rejected_prediction_tokens": 0, + "text_tokens": null + }, + "prompt_tokens": 394, + "prompt_tokens_details": { + "audio_tokens": 0, + "cached_tokens": 0, + "image_tokens": null, + "text_tokens": null + }, + "total_tokens": 605 + } +} \ No newline at end of file diff --git a/tests/fixtures/tests_test_handlers_llm_provider.py__TestLiteLLMProvider__test_structured_output.json b/tests/fixtures/tests_test_handlers_llm_provider.py__TestLiteLLMProvider__test_structured_output.json new file mode 100644 index 00000000..796d447f --- /dev/null +++ b/tests/fixtures/tests_test_handlers_llm_provider.py__TestLiteLLMProvider__test_structured_output.json @@ -0,0 +1,44 @@ +{ + "choices": [ + { + "finish_reason": "stop", + "index": 0, + "message": { + "annotations": [], + "content": "{\"value\":{\"genre\":\"action\",\"explanation\":\"The story centers on a rogue cop confronting an organized threat in a high-stakes, high-adrenaline setting (a skyscraper), with emphasis on pursuits, gunfights, and stunts typical of action films.\"}}", + "function_call": null, + "provider_specific_fields": { + "refusal": null + }, + "role": "assistant", + "tool_calls": null + }, + "provider_specific_fields": {} + } + ], + "created": 1767182732, + "id": "chatcmpl-CspFEhYoCToW0c9B0GjXeKbjiIn0K", + "model": "gpt-5-nano-2025-08-07", + "object": "chat.completion", + "service_tier": "default", + "system_fingerprint": null, + "usage": { + "completion_tokens": 451, + "completion_tokens_details": { + "accepted_prediction_tokens": 0, + "audio_tokens": 0, + "image_tokens": null, + "reasoning_tokens": 384, + "rejected_prediction_tokens": 0, + "text_tokens": null + }, + "prompt_tokens": 541, + "prompt_tokens_details": { + "audio_tokens": 0, + "cached_tokens": 0, + "image_tokens": null, + "text_tokens": null + }, + "total_tokens": 992 + } +} \ No newline at end of file diff --git a/tests/fixtures/tests_test_handlers_llm_provider.py__TestLiteLLMProvider__test_with_config_params.json b/tests/fixtures/tests_test_handlers_llm_provider.py__TestLiteLLMProvider__test_with_config_params.json new file mode 100644 index 00000000..5e3b75b5 --- /dev/null +++ b/tests/fixtures/tests_test_handlers_llm_provider.py__TestLiteLLMProvider__test_with_config_params.json @@ -0,0 +1,44 @@ +{ + "choices": [ + { + "finish_reason": "stop", + "index": 0, + "message": { + "annotations": [], + "content": "A deterministic test consistently produces the same results under the same conditions, ensuring reliability and repeatability in software testing.", + "function_call": null, + "provider_specific_fields": { + "refusal": null + }, + "role": "assistant", + "tool_calls": null + }, + "provider_specific_fields": {} + } + ], + "created": 1767182744, + "id": "chatcmpl-CspFQyXeEzOBDFdjK0p7UJn2Fe6RV", + "model": "gpt-4o-mini-2024-07-18", + "object": "chat.completion", + "service_tier": "default", + "system_fingerprint": "fp_c4585b5b9c", + "usage": { + "completion_tokens": 23, + "completion_tokens_details": { + "accepted_prediction_tokens": 0, + "audio_tokens": 0, + "image_tokens": null, + "reasoning_tokens": 0, + "rejected_prediction_tokens": 0, + "text_tokens": null + }, + "prompt_tokens": 314, + "prompt_tokens_details": { + "audio_tokens": 0, + "cached_tokens": 0, + "image_tokens": null, + "text_tokens": null + }, + "total_tokens": 337 + } +} \ No newline at end of file diff --git a/tests/fixtures/tests_test_handlers_llm_provider.py__TestProgramSynthesis__test_generates_callable.json b/tests/fixtures/tests_test_handlers_llm_provider.py__TestProgramSynthesis__test_generates_callable.json new file mode 100644 index 00000000..b511db5f --- /dev/null +++ b/tests/fixtures/tests_test_handlers_llm_provider.py__TestProgramSynthesis__test_generates_callable.json @@ -0,0 +1,44 @@ +{ + "choices": [ + { + "finish_reason": "stop", + "index": 0, + "message": { + "annotations": [], + "content": "\ndef count_a_occurrences(input_string: str) -> int:\n count = 0\n for char in input_string:\n if char == 'a':\n count += 1\n return count\n", + "function_call": null, + "provider_specific_fields": { + "refusal": null + }, + "role": "assistant", + "tool_calls": null + }, + "provider_specific_fields": {} + } + ], + "created": 1767182747, + "id": "chatcmpl-CspFT15GIFOwA3ImBdg5GYJHVjZxP", + "model": "gpt-4o-mini-2024-07-18", + "object": "chat.completion", + "service_tier": "default", + "system_fingerprint": "fp_c4585b5b9c", + "usage": { + "completion_tokens": 50, + "completion_tokens_details": { + "accepted_prediction_tokens": 0, + "audio_tokens": 0, + "image_tokens": null, + "reasoning_tokens": 0, + "rejected_prediction_tokens": 0, + "text_tokens": null + }, + "prompt_tokens": 439, + "prompt_tokens_details": { + "audio_tokens": 0, + "cached_tokens": 0, + "image_tokens": null, + "text_tokens": null + }, + "total_tokens": 489 + } +} \ No newline at end of file diff --git a/tests/fixtures/tests_test_handlers_llm_provider.py__TestPydanticBaseModelReturn__test_pydantic_basemodel_return.json b/tests/fixtures/tests_test_handlers_llm_provider.py__TestPydanticBaseModelReturn__test_pydantic_basemodel_return.json new file mode 100644 index 00000000..ec46553f --- /dev/null +++ b/tests/fixtures/tests_test_handlers_llm_provider.py__TestPydanticBaseModelReturn__test_pydantic_basemodel_return.json @@ -0,0 +1,44 @@ +{ + "choices": [ + { + "finish_reason": "stop", + "index": 0, + "message": { + "annotations": [], + "content": "{\"value\":{\"title\":\"The Spark Within the Spire\",\"rating\":4,\"summary\":\"The Spark Within the Spire follows a young student who discovers latent magical powers and earns a place at a venerable wizarding academy. Across his first year, he navigates challenging classes, budding friendships, and a growing sense of destiny as a looming threat quietly unfolds. The book blends classic wizard-school charm with inventive magic, delivering moments of wonder, humor, and quiet courage. While it uses familiar tropes, its strong character work and brisk pace make it a heartfelt coming-of-age tale with enough fresh twists to stay compelling for readers who enjoy magical school adventures.\"}}", + "function_call": null, + "provider_specific_fields": { + "refusal": null + }, + "role": "assistant", + "tool_calls": null + }, + "provider_specific_fields": {} + } + ], + "created": 1767182755, + "id": "chatcmpl-CspFbHJXFBDmP4RrqnjVI0CDIiePi", + "model": "gpt-5-nano-2025-08-07", + "object": "chat.completion", + "service_tier": "default", + "system_fingerprint": null, + "usage": { + "completion_tokens": 2192, + "completion_tokens_details": { + "accepted_prediction_tokens": 0, + "audio_tokens": 0, + "image_tokens": null, + "reasoning_tokens": 2048, + "rejected_prediction_tokens": 0, + "text_tokens": null + }, + "prompt_tokens": 526, + "prompt_tokens_details": { + "audio_tokens": 0, + "cached_tokens": 0, + "image_tokens": null, + "text_tokens": null + }, + "total_tokens": 2718 + } +} \ No newline at end of file diff --git a/tests/fixtures/tests_test_handlers_llm_provider.py__test_image_input.json b/tests/fixtures/tests_test_handlers_llm_provider.py__test_image_input.json new file mode 100644 index 00000000..713c124b --- /dev/null +++ b/tests/fixtures/tests_test_handlers_llm_provider.py__test_image_input.json @@ -0,0 +1,44 @@ +{ + "choices": [ + { + "finish_reason": "stop", + "index": 0, + "message": { + "annotations": [], + "content": "The image appears to depict a simple, pixelated smiley face with two rectangular eyes and a wide, curved mouth, giving a cheerful expression.", + "function_call": null, + "provider_specific_fields": { + "refusal": null + }, + "role": "assistant", + "tool_calls": null + }, + "provider_specific_fields": {} + } + ], + "created": 1767182821, + "id": "chatcmpl-CspGfBSEI7umvGbclbA4o1OsXZsil", + "model": "gpt-4o-2024-08-06", + "object": "chat.completion", + "service_tier": "default", + "system_fingerprint": "fp_deacdd5f6f", + "usage": { + "completion_tokens": 30, + "completion_tokens_details": { + "accepted_prediction_tokens": 0, + "audio_tokens": 0, + "image_tokens": null, + "reasoning_tokens": 0, + "rejected_prediction_tokens": 0, + "text_tokens": null + }, + "prompt_tokens": 567, + "prompt_tokens_details": { + "audio_tokens": 0, + "cached_tokens": 0, + "image_tokens": null, + "text_tokens": null + }, + "total_tokens": 597 + } +} \ No newline at end of file diff --git a/tests/test_handlers_llm_provider.py b/tests/test_handlers_llm_provider.py index ac51bebd..7c460905 100644 --- a/tests/test_handlers_llm_provider.py +++ b/tests/test_handlers_llm_provider.py @@ -5,12 +5,15 @@ """ import functools +import json import logging import os from collections.abc import Callable from enum import Enum +from pathlib import Path import pytest +from litellm.files.main import ModelResponse from PIL import Image from pydantic import BaseModel, Field from pydantic.dataclasses import dataclass @@ -26,6 +29,8 @@ from effectful.ops.syntax import ObjectInterpretation, implements from effectful.ops.types import NotHandled +FIXTURE_DIR = Path(__file__).resolve().parent / "fixtures" + # Check for API keys HAS_OPENAI_KEY = "OPENAI_API_KEY" in os.environ and os.environ["OPENAI_API_KEY"] HAS_ANTHROPIC_KEY = ( @@ -40,8 +45,11 @@ not HAS_ANTHROPIC_KEY, reason="ANTHROPIC_API_KEY environment variable not set" ) +REBUILD_FIXTURES = os.getenv("REBUILD_FIXTURES") == "true" # ============================================================================ + + # Test Fixtures and Mock Data # ============================================================================ def retry_on_error(error: type[Exception], n: int): @@ -61,6 +69,30 @@ def wrapper(*args, **kwargs): return decorator +class ReplayLiteLLMProvider(LiteLLMProvider): + test_id: str + + def __init__(self, request: pytest.FixtureRequest, *args, **kwargs): + super().__init__(*args, **kwargs) + self.test_id = request.node.nodeid + self.test_id = self.test_id.replace("/", "_").replace(":", "_") + + @implements(completion) + def _completion(self, *args, **kwargs): + path = FIXTURE_DIR / f"{self.test_id}.json" + if not REBUILD_FIXTURES: + if not path.exists(): + raise RuntimeError(f"Missing replay fixture: {path}") + with path.open() as f: + result = ModelResponse.model_validate(json.load(f)) + return result + result = fwd(self.model_name, *args, **(self.config | kwargs)) + path.parent.mkdir(exist_ok=True, parents=True) + with path.open("w") as f: + json.dump(result.model_dump(), f, indent=2, sort_keys=True) + return result + + class LimitLLMCallsHandler(ObjectInterpretation): max_calls: int no_calls: int = 0 @@ -132,10 +164,10 @@ class TestLiteLLMProvider: @requires_openai @pytest.mark.parametrize("model_name", ["gpt-4o-mini", "gpt-5-nano"]) - def test_simple_prompt_multiple_models(self, model_name): + def test_simple_prompt_multiple_models(self, request, model_name): """Test that LiteLLMProvider works with different model configurations.""" with ( - handler(LiteLLMProvider(model_name=model_name)), + handler(ReplayLiteLLMProvider(request, model_name=model_name)), handler(LimitLLMCallsHandler(max_calls=1)), ): result = simple_prompt("testing") @@ -149,10 +181,10 @@ def test_simple_prompt_multiple_models(self, model_name): pytest.param("claude-haiku-4-5", marks=requires_anthropic), ], ) - def test_simple_prompt_cross_endpoint(self, model_name): - """Test that LiteLLMProvider works across different API endpoints.""" + def test_simple_prompt_cross_endpoint(self, request, model_name): + """Test that ReplayLiteLLMProvider works across different API endpoints.""" with ( - handler(LiteLLMProvider(model_name=model_name)), + handler(ReplayLiteLLMProvider(request, model_name=model_name)), handler(LimitLLMCallsHandler(max_calls=1)), ): result = simple_prompt("testing") @@ -160,12 +192,12 @@ def test_simple_prompt_cross_endpoint(self, model_name): assert len(result) > 0 @requires_openai - def test_structured_output(self): + def test_structured_output(self, request): """Test LiteLLMProvider with structured Pydantic output.""" plot = "A rogue cop must stop a evil group from taking over a skyscraper." with ( - handler(LiteLLMProvider(model_name="gpt-5-nano")), + handler(ReplayLiteLLMProvider(request, model_name="gpt-5-nano")), handler(LimitLLMCallsHandler(max_calls=1)), ): classification = classify_genre(plot) @@ -177,10 +209,10 @@ def test_structured_output(self): assert len(classification.explanation) > 0 @requires_openai - def test_integer_return_type(self): + def test_integer_return_type(self, request): """Test LiteLLMProvider with integer return type.""" with ( - handler(LiteLLMProvider(model_name="gpt-5-nano")), + handler(ReplayLiteLLMProvider(request, model_name="gpt-5-nano")), handler(LimitLLMCallsHandler(max_calls=1)), ): result = generate_number(100) @@ -189,11 +221,15 @@ def test_integer_return_type(self): assert 1 <= result <= 100 @requires_openai - def test_with_config_params(self): + def test_with_config_params(self, request): """Test LiteLLMProvider accepts and uses additional configuration parameters.""" # Test with temperature parameter with ( - handler(LiteLLMProvider(model_name="gpt-4o-mini", temperature=0.1)), + handler( + ReplayLiteLLMProvider( + request, model_name="gpt-4o-mini", temperature=0.1 + ) + ), handler(LimitLLMCallsHandler(max_calls=1)), ): result = simple_prompt("deterministic test") @@ -204,12 +240,12 @@ class TestLLMLoggingHandler: """Tests for LLMLoggingHandler functionality.""" @requires_openai - def test_logs_requests(self, caplog): + def test_logs_requests(self, request, caplog): """Test that LLMLoggingHandler properly logs LLM requests.""" with caplog.at_level(logging.INFO): with ( + handler(ReplayLiteLLMProvider(request, model_name="gpt-4o-mini")), handler(LLMLoggingHandler()), - handler(LiteLLMProvider(model_name="gpt-4o-mini")), handler(LimitLLMCallsHandler(max_calls=1)), ): result = simple_prompt("testing") @@ -219,14 +255,14 @@ def test_logs_requests(self, caplog): assert any("llm.request" in record.message for record in caplog.records) @requires_openai - def test_custom_logger(self, caplog): + def test_custom_logger(self, request, caplog): """Test LLMLoggingHandler with a custom logger.""" custom_logger = logging.getLogger("test_custom_logger") with caplog.at_level(logging.INFO, logger="test_custom_logger"): with ( + handler(ReplayLiteLLMProvider(request, model_name="gpt-4o-mini")), handler(LLMLoggingHandler(logger=custom_logger)), - handler(LiteLLMProvider(model_name="gpt-4o-mini")), handler(LimitLLMCallsHandler(max_calls=1)), ): result = simple_prompt("testing") @@ -244,10 +280,10 @@ class TestProgramSynthesis: @requires_openai @retry_on_error(error=SynthesisError, n=3) - def test_generates_callable(self): + def test_generates_callable(self, request): """Test ProgramSynthesis handler generates executable code.""" with ( - handler(LiteLLMProvider(model_name="gpt-4o-mini")), + handler(ReplayLiteLLMProvider(request, model_name="gpt-4o-mini")), handler(ProgramSynthesis()), handler(LimitLLMCallsHandler(max_calls=1)), ): @@ -287,9 +323,9 @@ def categorise_image(image: Image.Image) -> str: @requires_openai -def test_image_input(): +def test_image_input(request): with ( - handler(LiteLLMProvider(model_name="gpt-4o")), + handler(ReplayLiteLLMProvider(request, model_name="gpt-4o")), handler(LimitLLMCallsHandler(max_calls=3)), ): assert any("smile" in categorise_image(smiley_face()) for _ in range(3)) @@ -311,11 +347,11 @@ def review_book(plot: str) -> BookReview: class TestPydanticBaseModelReturn: @requires_openai - def test_pydantic_basemodel_return(self): + def test_pydantic_basemodel_return(self, request): plot = "A young wizard discovers he has magical powers and goes to a school for wizards." with ( - handler(LiteLLMProvider(model_name="gpt-5-nano")), + handler(ReplayLiteLLMProvider(request, model_name="gpt-5-nano")), handler(LimitLLMCallsHandler(max_calls=1)), ): review = review_book(plot) diff --git a/uv.lock b/uv.lock new file mode 100644 index 00000000..0f896036 --- /dev/null +++ b/uv.lock @@ -0,0 +1,3047 @@ +version = 1 +revision = 2 +requires-python = ">=3.12, <3.14" +resolution-markers = [ + "python_full_version >= '3.13'", + "python_full_version < '3.13'", +] + +[[package]] +name = "absl-py" +version = "2.3.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/10/2a/c93173ffa1b39c1d0395b7e842bbdc62e556ca9d8d3b5572926f3e4ca752/absl_py-2.3.1.tar.gz", hash = "sha256:a97820526f7fbfd2ec1bce83f3f25e3a14840dac0d8e02a0b71cd75db3f77fc9", size = 116588, upload-time = "2025-07-03T09:31:44.05Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/8f/aa/ba0014cc4659328dc818a28827be78e6d97312ab0cb98105a770924dc11e/absl_py-2.3.1-py3-none-any.whl", hash = "sha256:eeecf07f0c2a93ace0772c92e596ace6d3d3996c042b2128459aaae2a76de11d", size = 135811, upload-time = "2025-07-03T09:31:42.253Z" }, +] + +[[package]] +name = "aiohappyeyeballs" +version = "2.6.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/26/30/f84a107a9c4331c14b2b586036f40965c128aa4fee4dda5d3d51cb14ad54/aiohappyeyeballs-2.6.1.tar.gz", hash = "sha256:c3f9d0113123803ccadfdf3f0faa505bc78e6a72d1cc4806cbd719826e943558", size = 22760, upload-time = "2025-03-12T01:42:48.764Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/0f/15/5bf3b99495fb160b63f95972b81750f18f7f4e02ad051373b669d17d44f2/aiohappyeyeballs-2.6.1-py3-none-any.whl", hash = "sha256:f349ba8f4b75cb25c99c5c2d84e997e485204d2902a9597802b0371f09331fb8", size = 15265, upload-time = "2025-03-12T01:42:47.083Z" }, +] + +[[package]] +name = "aiohttp" +version = "3.13.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "aiohappyeyeballs" }, + { name = "aiosignal" }, + { name = "attrs" }, + { name = "frozenlist" }, + { name = "multidict" }, + { name = "propcache" }, + { name = "yarl" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/1c/ce/3b83ebba6b3207a7135e5fcaba49706f8a4b6008153b4e30540c982fae26/aiohttp-3.13.2.tar.gz", hash = "sha256:40176a52c186aefef6eb3cad2cdd30cd06e3afbe88fe8ab2af9c0b90f228daca", size = 7837994, upload-time = "2025-10-28T20:59:39.937Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/29/9b/01f00e9856d0a73260e86dd8ed0c2234a466c5c1712ce1c281548df39777/aiohttp-3.13.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:b1e56bab2e12b2b9ed300218c351ee2a3d8c8fdab5b1ec6193e11a817767e47b", size = 737623, upload-time = "2025-10-28T20:56:30.797Z" }, + { url = "https://files.pythonhosted.org/packages/5a/1b/4be39c445e2b2bd0aab4ba736deb649fabf14f6757f405f0c9685019b9e9/aiohttp-3.13.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:364e25edaabd3d37b1db1f0cbcee8c73c9a3727bfa262b83e5e4cf3489a2a9dc", size = 492664, upload-time = "2025-10-28T20:56:32.708Z" }, + { url = "https://files.pythonhosted.org/packages/28/66/d35dcfea8050e131cdd731dff36434390479b4045a8d0b9d7111b0a968f1/aiohttp-3.13.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:c5c94825f744694c4b8db20b71dba9a257cd2ba8e010a803042123f3a25d50d7", size = 491808, upload-time = "2025-10-28T20:56:34.57Z" }, + { url = "https://files.pythonhosted.org/packages/00/29/8e4609b93e10a853b65f8291e64985de66d4f5848c5637cddc70e98f01f8/aiohttp-3.13.2-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ba2715d842ffa787be87cbfce150d5e88c87a98e0b62e0f5aa489169a393dbbb", size = 1738863, upload-time = "2025-10-28T20:56:36.377Z" }, + { url = "https://files.pythonhosted.org/packages/9d/fa/4ebdf4adcc0def75ced1a0d2d227577cd7b1b85beb7edad85fcc87693c75/aiohttp-3.13.2-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:585542825c4bc662221fb257889e011a5aa00f1ae4d75d1d246a5225289183e3", size = 1700586, upload-time = "2025-10-28T20:56:38.034Z" }, + { url = "https://files.pythonhosted.org/packages/da/04/73f5f02ff348a3558763ff6abe99c223381b0bace05cd4530a0258e52597/aiohttp-3.13.2-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:39d02cb6025fe1aabca329c5632f48c9532a3dabccd859e7e2f110668972331f", size = 1768625, upload-time = "2025-10-28T20:56:39.75Z" }, + { url = "https://files.pythonhosted.org/packages/f8/49/a825b79ffec124317265ca7d2344a86bcffeb960743487cb11988ffb3494/aiohttp-3.13.2-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:e67446b19e014d37342f7195f592a2a948141d15a312fe0e700c2fd2f03124f6", size = 1867281, upload-time = "2025-10-28T20:56:41.471Z" }, + { url = "https://files.pythonhosted.org/packages/b9/48/adf56e05f81eac31edcfae45c90928f4ad50ef2e3ea72cb8376162a368f8/aiohttp-3.13.2-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4356474ad6333e41ccefd39eae869ba15a6c5299c9c01dfdcfdd5c107be4363e", size = 1752431, upload-time = "2025-10-28T20:56:43.162Z" }, + { url = "https://files.pythonhosted.org/packages/30/ab/593855356eead019a74e862f21523db09c27f12fd24af72dbc3555b9bfd9/aiohttp-3.13.2-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:eeacf451c99b4525f700f078becff32c32ec327b10dcf31306a8a52d78166de7", size = 1562846, upload-time = "2025-10-28T20:56:44.85Z" }, + { url = "https://files.pythonhosted.org/packages/39/0f/9f3d32271aa8dc35036e9668e31870a9d3b9542dd6b3e2c8a30931cb27ae/aiohttp-3.13.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:d8a9b889aeabd7a4e9af0b7f4ab5ad94d42e7ff679aaec6d0db21e3b639ad58d", size = 1699606, upload-time = "2025-10-28T20:56:46.519Z" }, + { url = "https://files.pythonhosted.org/packages/2c/3c/52d2658c5699b6ef7692a3f7128b2d2d4d9775f2a68093f74bca06cf01e1/aiohttp-3.13.2-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:fa89cb11bc71a63b69568d5b8a25c3ca25b6d54c15f907ca1c130d72f320b76b", size = 1720663, upload-time = "2025-10-28T20:56:48.528Z" }, + { url = "https://files.pythonhosted.org/packages/9b/d4/8f8f3ff1fb7fb9e3f04fcad4e89d8a1cd8fc7d05de67e3de5b15b33008ff/aiohttp-3.13.2-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:8aa7c807df234f693fed0ecd507192fc97692e61fee5702cdc11155d2e5cadc8", size = 1737939, upload-time = "2025-10-28T20:56:50.77Z" }, + { url = "https://files.pythonhosted.org/packages/03/d3/ddd348f8a27a634daae39a1b8e291ff19c77867af438af844bf8b7e3231b/aiohttp-3.13.2-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:9eb3e33fdbe43f88c3c75fa608c25e7c47bbd80f48d012763cb67c47f39a7e16", size = 1555132, upload-time = "2025-10-28T20:56:52.568Z" }, + { url = "https://files.pythonhosted.org/packages/39/b8/46790692dc46218406f94374903ba47552f2f9f90dad554eed61bfb7b64c/aiohttp-3.13.2-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:9434bc0d80076138ea986833156c5a48c9c7a8abb0c96039ddbb4afc93184169", size = 1764802, upload-time = "2025-10-28T20:56:54.292Z" }, + { url = "https://files.pythonhosted.org/packages/ba/e4/19ce547b58ab2a385e5f0b8aa3db38674785085abcf79b6e0edd1632b12f/aiohttp-3.13.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ff15c147b2ad66da1f2cbb0622313f2242d8e6e8f9b79b5206c84523a4473248", size = 1719512, upload-time = "2025-10-28T20:56:56.428Z" }, + { url = "https://files.pythonhosted.org/packages/70/30/6355a737fed29dcb6dfdd48682d5790cb5eab050f7b4e01f49b121d3acad/aiohttp-3.13.2-cp312-cp312-win32.whl", hash = "sha256:27e569eb9d9e95dbd55c0fc3ec3a9335defbf1d8bc1d20171a49f3c4c607b93e", size = 426690, upload-time = "2025-10-28T20:56:58.736Z" }, + { url = "https://files.pythonhosted.org/packages/0a/0d/b10ac09069973d112de6ef980c1f6bb31cb7dcd0bc363acbdad58f927873/aiohttp-3.13.2-cp312-cp312-win_amd64.whl", hash = "sha256:8709a0f05d59a71f33fd05c17fc11fcb8c30140506e13c2f5e8ee1b8964e1b45", size = 453465, upload-time = "2025-10-28T20:57:00.795Z" }, + { url = "https://files.pythonhosted.org/packages/bf/78/7e90ca79e5aa39f9694dcfd74f4720782d3c6828113bb1f3197f7e7c4a56/aiohttp-3.13.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:7519bdc7dfc1940d201651b52bf5e03f5503bda45ad6eacf64dda98be5b2b6be", size = 732139, upload-time = "2025-10-28T20:57:02.455Z" }, + { url = "https://files.pythonhosted.org/packages/db/ed/1f59215ab6853fbaa5c8495fa6cbc39edfc93553426152b75d82a5f32b76/aiohttp-3.13.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:088912a78b4d4f547a1f19c099d5a506df17eacec3c6f4375e2831ec1d995742", size = 490082, upload-time = "2025-10-28T20:57:04.784Z" }, + { url = "https://files.pythonhosted.org/packages/68/7b/fe0fe0f5e05e13629d893c760465173a15ad0039c0a5b0d0040995c8075e/aiohttp-3.13.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:5276807b9de9092af38ed23ce120539ab0ac955547b38563a9ba4f5b07b95293", size = 489035, upload-time = "2025-10-28T20:57:06.894Z" }, + { url = "https://files.pythonhosted.org/packages/d2/04/db5279e38471b7ac801d7d36a57d1230feeee130bbe2a74f72731b23c2b1/aiohttp-3.13.2-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1237c1375eaef0db4dcd7c2559f42e8af7b87ea7d295b118c60c36a6e61cb811", size = 1720387, upload-time = "2025-10-28T20:57:08.685Z" }, + { url = "https://files.pythonhosted.org/packages/31/07/8ea4326bd7dae2bd59828f69d7fdc6e04523caa55e4a70f4a8725a7e4ed2/aiohttp-3.13.2-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:96581619c57419c3d7d78703d5b78c1e5e5fc0172d60f555bdebaced82ded19a", size = 1688314, upload-time = "2025-10-28T20:57:10.693Z" }, + { url = "https://files.pythonhosted.org/packages/48/ab/3d98007b5b87ffd519d065225438cc3b668b2f245572a8cb53da5dd2b1bc/aiohttp-3.13.2-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:a2713a95b47374169409d18103366de1050fe0ea73db358fc7a7acb2880422d4", size = 1756317, upload-time = "2025-10-28T20:57:12.563Z" }, + { url = "https://files.pythonhosted.org/packages/97/3d/801ca172b3d857fafb7b50c7c03f91b72b867a13abca982ed6b3081774ef/aiohttp-3.13.2-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:228a1cd556b3caca590e9511a89444925da87d35219a49ab5da0c36d2d943a6a", size = 1858539, upload-time = "2025-10-28T20:57:14.623Z" }, + { url = "https://files.pythonhosted.org/packages/f7/0d/4764669bdf47bd472899b3d3db91fffbe925c8e3038ec591a2fd2ad6a14d/aiohttp-3.13.2-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ac6cde5fba8d7d8c6ac963dbb0256a9854e9fafff52fbcc58fdf819357892c3e", size = 1739597, upload-time = "2025-10-28T20:57:16.399Z" }, + { url = "https://files.pythonhosted.org/packages/c4/52/7bd3c6693da58ba16e657eb904a5b6decfc48ecd06e9ac098591653b1566/aiohttp-3.13.2-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:f2bef8237544f4e42878c61cef4e2839fee6346dc60f5739f876a9c50be7fcdb", size = 1555006, upload-time = "2025-10-28T20:57:18.288Z" }, + { url = "https://files.pythonhosted.org/packages/48/30/9586667acec5993b6f41d2ebcf96e97a1255a85f62f3c653110a5de4d346/aiohttp-3.13.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:16f15a4eac3bc2d76c45f7ebdd48a65d41b242eb6c31c2245463b40b34584ded", size = 1683220, upload-time = "2025-10-28T20:57:20.241Z" }, + { url = "https://files.pythonhosted.org/packages/71/01/3afe4c96854cfd7b30d78333852e8e851dceaec1c40fd00fec90c6402dd2/aiohttp-3.13.2-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:bb7fb776645af5cc58ab804c58d7eba545a97e047254a52ce89c157b5af6cd0b", size = 1712570, upload-time = "2025-10-28T20:57:22.253Z" }, + { url = "https://files.pythonhosted.org/packages/11/2c/22799d8e720f4697a9e66fd9c02479e40a49de3de2f0bbe7f9f78a987808/aiohttp-3.13.2-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:e1b4951125ec10c70802f2cb09736c895861cd39fd9dcb35107b4dc8ae6220b8", size = 1733407, upload-time = "2025-10-28T20:57:24.37Z" }, + { url = "https://files.pythonhosted.org/packages/34/cb/90f15dd029f07cebbd91f8238a8b363978b530cd128488085b5703683594/aiohttp-3.13.2-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:550bf765101ae721ee1d37d8095f47b1f220650f85fe1af37a90ce75bab89d04", size = 1550093, upload-time = "2025-10-28T20:57:26.257Z" }, + { url = "https://files.pythonhosted.org/packages/69/46/12dce9be9d3303ecbf4d30ad45a7683dc63d90733c2d9fe512be6716cd40/aiohttp-3.13.2-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:fe91b87fc295973096251e2d25a811388e7d8adf3bd2b97ef6ae78bc4ac6c476", size = 1758084, upload-time = "2025-10-28T20:57:28.349Z" }, + { url = "https://files.pythonhosted.org/packages/f9/c8/0932b558da0c302ffd639fc6362a313b98fdf235dc417bc2493da8394df7/aiohttp-3.13.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:e0c8e31cfcc4592cb200160344b2fb6ae0f9e4effe06c644b5a125d4ae5ebe23", size = 1716987, upload-time = "2025-10-28T20:57:30.233Z" }, + { url = "https://files.pythonhosted.org/packages/5d/8b/f5bd1a75003daed099baec373aed678f2e9b34f2ad40d85baa1368556396/aiohttp-3.13.2-cp313-cp313-win32.whl", hash = "sha256:0740f31a60848d6edb296a0df827473eede90c689b8f9f2a4cdde74889eb2254", size = 425859, upload-time = "2025-10-28T20:57:32.105Z" }, + { url = "https://files.pythonhosted.org/packages/5d/28/a8a9fc6957b2cee8902414e41816b5ab5536ecf43c3b1843c10e82c559b2/aiohttp-3.13.2-cp313-cp313-win_amd64.whl", hash = "sha256:a88d13e7ca367394908f8a276b89d04a3652044612b9a408a0bb22a5ed976a1a", size = 452192, upload-time = "2025-10-28T20:57:34.166Z" }, +] + +[[package]] +name = "aiosignal" +version = "1.4.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "frozenlist" }, + { name = "typing-extensions", marker = "python_full_version < '3.13'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/61/62/06741b579156360248d1ec624842ad0edf697050bbaf7c3e46394e106ad1/aiosignal-1.4.0.tar.gz", hash = "sha256:f47eecd9468083c2029cc99945502cb7708b082c232f9aca65da147157b251c7", size = 25007, upload-time = "2025-07-03T22:54:43.528Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/fb/76/641ae371508676492379f16e2fa48f4e2c11741bd63c48be4b12a6b09cba/aiosignal-1.4.0-py3-none-any.whl", hash = "sha256:053243f8b92b990551949e63930a839ff0cf0b0ebbe0597b0f3fb19e1a0fe82e", size = 7490, upload-time = "2025-07-03T22:54:42.156Z" }, +] + +[[package]] +name = "alabaster" +version = "1.0.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a6/f8/d9c74d0daf3f742840fd818d69cfae176fa332022fd44e3469487d5a9420/alabaster-1.0.0.tar.gz", hash = "sha256:c00dca57bca26fa62a6d7d0a9fcce65f3e026e9bfe33e9c538fd3fbb2144fd9e", size = 24210, upload-time = "2024-07-26T18:15:03.762Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7e/b3/6b4067be973ae96ba0d615946e314c5ae35f9f993eca561b356540bb0c2b/alabaster-1.0.0-py3-none-any.whl", hash = "sha256:fc6786402dc3fcb2de3cabd5fe455a2db534b371124f1f21de8731783dec828b", size = 13929, upload-time = "2024-07-26T18:15:02.05Z" }, +] + +[[package]] +name = "annotated-types" +version = "0.7.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ee/67/531ea369ba64dcff5ec9c3402f9f51bf748cec26dde048a2f973a4eea7f5/annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89", size = 16081, upload-time = "2024-05-20T21:33:25.928Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/78/b6/6307fbef88d9b5ee7421e68d78a9f162e0da4900bc5f5793f6d3d0e34fb8/annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53", size = 13643, upload-time = "2024-05-20T21:33:24.1Z" }, +] + +[[package]] +name = "anyio" +version = "4.11.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "idna" }, + { name = "sniffio" }, + { name = "typing-extensions", marker = "python_full_version < '3.13'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/c6/78/7d432127c41b50bccba979505f272c16cbcadcc33645d5fa3a738110ae75/anyio-4.11.0.tar.gz", hash = "sha256:82a8d0b81e318cc5ce71a5f1f8b5c4e63619620b63141ef8c995fa0db95a57c4", size = 219094, upload-time = "2025-09-23T09:19:12.58Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/15/b3/9b1a8074496371342ec1e796a96f99c82c945a339cd81a8e73de28b4cf9e/anyio-4.11.0-py3-none-any.whl", hash = "sha256:0287e96f4d26d4149305414d4e3bc32f0dcd0862365a4bddea19d7a1ec38c4fc", size = 109097, upload-time = "2025-09-23T09:19:10.601Z" }, +] + +[[package]] +name = "appnope" +version = "0.1.4" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/35/5d/752690df9ef5b76e169e68d6a129fa6d08a7100ca7f754c89495db3c6019/appnope-0.1.4.tar.gz", hash = "sha256:1de3860566df9caf38f01f86f65e0e13e379af54f9e4bee1e66b48f2efffd1ee", size = 4170, upload-time = "2024-02-06T09:43:11.258Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/81/29/5ecc3a15d5a33e31b26c11426c45c501e439cb865d0bff96315d86443b78/appnope-0.1.4-py2.py3-none-any.whl", hash = "sha256:502575ee11cd7a28c0205f379b525beefebab9d161b7c964670864014ed7213c", size = 4321, upload-time = "2024-02-06T09:43:09.663Z" }, +] + +[[package]] +name = "asttokens" +version = "3.0.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/be/a5/8e3f9b6771b0b408517c82d97aed8f2036509bc247d46114925e32fe33f0/asttokens-3.0.1.tar.gz", hash = "sha256:71a4ee5de0bde6a31d64f6b13f2293ac190344478f081c3d1bccfcf5eacb0cb7", size = 62308, upload-time = "2025-11-15T16:43:48.578Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d2/39/e7eaf1799466a4aef85b6a4fe7bd175ad2b1c6345066aa33f1f58d4b18d0/asttokens-3.0.1-py3-none-any.whl", hash = "sha256:15a3ebc0f43c2d0a50eeafea25e19046c68398e487b9f1f5b517f7c0f40f976a", size = 27047, upload-time = "2025-11-15T16:43:16.109Z" }, +] + +[[package]] +name = "attrs" +version = "25.4.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/6b/5c/685e6633917e101e5dcb62b9dd76946cbb57c26e133bae9e0cd36033c0a9/attrs-25.4.0.tar.gz", hash = "sha256:16d5969b87f0859ef33a48b35d55ac1be6e42ae49d5e853b597db70c35c57e11", size = 934251, upload-time = "2025-10-06T13:54:44.725Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/3a/2a/7cc015f5b9f5db42b7d48157e23356022889fc354a2813c15934b7cb5c0e/attrs-25.4.0-py3-none-any.whl", hash = "sha256:adcf7e2a1fb3b36ac48d97835bb6d8ade15b8dcce26aba8bf1d14847b57a3373", size = 67615, upload-time = "2025-10-06T13:54:43.17Z" }, +] + +[[package]] +name = "autopep8" +version = "2.3.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pycodestyle" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/50/d8/30873d2b7b57dee9263e53d142da044c4600a46f2d28374b3e38b023df16/autopep8-2.3.2.tar.gz", hash = "sha256:89440a4f969197b69a995e4ce0661b031f455a9f776d2c5ba3dbd83466931758", size = 92210, upload-time = "2025-01-14T14:46:18.454Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/9e/43/53afb8ba17218f19b77c7834128566c5bbb100a0ad9ba2e8e89d089d7079/autopep8-2.3.2-py2.py3-none-any.whl", hash = "sha256:ce8ad498672c845a0c3de2629c15b635ec2b05ef8177a6e7c91c74f3e9b51128", size = 45807, upload-time = "2025-01-14T14:46:15.466Z" }, +] + +[[package]] +name = "babel" +version = "2.17.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/7d/6b/d52e42361e1aa00709585ecc30b3f9684b3ab62530771402248b1b1d6240/babel-2.17.0.tar.gz", hash = "sha256:0c54cffb19f690cdcc52a3b50bcbf71e07a808d1c80d549f2459b9d2cf0afb9d", size = 9951852, upload-time = "2025-02-01T15:17:41.026Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b7/b8/3fe70c75fe32afc4bb507f75563d39bc5642255d1d94f1f23604725780bf/babel-2.17.0-py3-none-any.whl", hash = "sha256:4d0b53093fdfb4b21c92b5213dba5a1b23885afa8383709427046b21c366e5f2", size = 10182537, upload-time = "2025-02-01T15:17:37.39Z" }, +] + +[[package]] +name = "beautifulsoup4" +version = "4.14.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "soupsieve" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/77/e9/df2358efd7659577435e2177bfa69cba6c33216681af51a707193dec162a/beautifulsoup4-4.14.2.tar.gz", hash = "sha256:2a98ab9f944a11acee9cc848508ec28d9228abfd522ef0fad6a02a72e0ded69e", size = 625822, upload-time = "2025-09-29T10:05:42.613Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/94/fe/3aed5d0be4d404d12d36ab97e2f1791424d9ca39c2f754a6285d59a3b01d/beautifulsoup4-4.14.2-py3-none-any.whl", hash = "sha256:5ef6fa3a8cbece8488d66985560f97ed091e22bbc4e9c2338508a9d5de6d4515", size = 106392, upload-time = "2025-09-29T10:05:43.771Z" }, +] + +[[package]] +name = "bleach" +version = "6.3.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "webencodings" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/07/18/3c8523962314be6bf4c8989c79ad9531c825210dd13a8669f6b84336e8bd/bleach-6.3.0.tar.gz", hash = "sha256:6f3b91b1c0a02bb9a78b5a454c92506aa0fdf197e1d5e114d2e00c6f64306d22", size = 203533, upload-time = "2025-10-27T17:57:39.211Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/cd/3a/577b549de0cc09d95f11087ee63c739bba856cd3952697eec4c4bb91350a/bleach-6.3.0-py3-none-any.whl", hash = "sha256:fe10ec77c93ddf3d13a73b035abaac7a9f5e436513864ccdad516693213c65d6", size = 164437, upload-time = "2025-10-27T17:57:37.538Z" }, +] + +[package.optional-dependencies] +css = [ + { name = "tinycss2" }, +] + +[[package]] +name = "certifi" +version = "2025.11.12" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a2/8c/58f469717fa48465e4a50c014a0400602d3c437d7c0c468e17ada824da3a/certifi-2025.11.12.tar.gz", hash = "sha256:d8ab5478f2ecd78af242878415affce761ca6bc54a22a27e026d7c25357c3316", size = 160538, upload-time = "2025-11-12T02:54:51.517Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/70/7d/9bc192684cea499815ff478dfcdc13835ddf401365057044fb721ec6bddb/certifi-2025.11.12-py3-none-any.whl", hash = "sha256:97de8790030bbd5c2d96b7ec782fc2f7820ef8dba6db909ccf95449f2d062d4b", size = 159438, upload-time = "2025-11-12T02:54:49.735Z" }, +] + +[[package]] +name = "cffi" +version = "2.0.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pycparser", marker = "implementation_name != 'PyPy'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/eb/56/b1ba7935a17738ae8453301356628e8147c79dbb825bcbc73dc7401f9846/cffi-2.0.0.tar.gz", hash = "sha256:44d1b5909021139fe36001ae048dbdde8214afa20200eda0f64c068cac5d5529", size = 523588, upload-time = "2025-09-08T23:24:04.541Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ea/47/4f61023ea636104d4f16ab488e268b93008c3d0bb76893b1b31db1f96802/cffi-2.0.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:6d02d6655b0e54f54c4ef0b94eb6be0607b70853c45ce98bd278dc7de718be5d", size = 185271, upload-time = "2025-09-08T23:22:44.795Z" }, + { url = "https://files.pythonhosted.org/packages/df/a2/781b623f57358e360d62cdd7a8c681f074a71d445418a776eef0aadb4ab4/cffi-2.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8eca2a813c1cb7ad4fb74d368c2ffbbb4789d377ee5bb8df98373c2cc0dee76c", size = 181048, upload-time = "2025-09-08T23:22:45.938Z" }, + { url = "https://files.pythonhosted.org/packages/ff/df/a4f0fbd47331ceeba3d37c2e51e9dfc9722498becbeec2bd8bc856c9538a/cffi-2.0.0-cp312-cp312-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:21d1152871b019407d8ac3985f6775c079416c282e431a4da6afe7aefd2bccbe", size = 212529, upload-time = "2025-09-08T23:22:47.349Z" }, + { url = "https://files.pythonhosted.org/packages/d5/72/12b5f8d3865bf0f87cf1404d8c374e7487dcf097a1c91c436e72e6badd83/cffi-2.0.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:b21e08af67b8a103c71a250401c78d5e0893beff75e28c53c98f4de42f774062", size = 220097, upload-time = "2025-09-08T23:22:48.677Z" }, + { url = "https://files.pythonhosted.org/packages/c2/95/7a135d52a50dfa7c882ab0ac17e8dc11cec9d55d2c18dda414c051c5e69e/cffi-2.0.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:1e3a615586f05fc4065a8b22b8152f0c1b00cdbc60596d187c2a74f9e3036e4e", size = 207983, upload-time = "2025-09-08T23:22:50.06Z" }, + { url = "https://files.pythonhosted.org/packages/3a/c8/15cb9ada8895957ea171c62dc78ff3e99159ee7adb13c0123c001a2546c1/cffi-2.0.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:81afed14892743bbe14dacb9e36d9e0e504cd204e0b165062c488942b9718037", size = 206519, upload-time = "2025-09-08T23:22:51.364Z" }, + { url = "https://files.pythonhosted.org/packages/78/2d/7fa73dfa841b5ac06c7b8855cfc18622132e365f5b81d02230333ff26e9e/cffi-2.0.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:3e17ed538242334bf70832644a32a7aae3d83b57567f9fd60a26257e992b79ba", size = 219572, upload-time = "2025-09-08T23:22:52.902Z" }, + { url = "https://files.pythonhosted.org/packages/07/e0/267e57e387b4ca276b90f0434ff88b2c2241ad72b16d31836adddfd6031b/cffi-2.0.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3925dd22fa2b7699ed2617149842d2e6adde22b262fcbfada50e3d195e4b3a94", size = 222963, upload-time = "2025-09-08T23:22:54.518Z" }, + { url = "https://files.pythonhosted.org/packages/b6/75/1f2747525e06f53efbd878f4d03bac5b859cbc11c633d0fb81432d98a795/cffi-2.0.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:2c8f814d84194c9ea681642fd164267891702542f028a15fc97d4674b6206187", size = 221361, upload-time = "2025-09-08T23:22:55.867Z" }, + { url = "https://files.pythonhosted.org/packages/7b/2b/2b6435f76bfeb6bbf055596976da087377ede68df465419d192acf00c437/cffi-2.0.0-cp312-cp312-win32.whl", hash = "sha256:da902562c3e9c550df360bfa53c035b2f241fed6d9aef119048073680ace4a18", size = 172932, upload-time = "2025-09-08T23:22:57.188Z" }, + { url = "https://files.pythonhosted.org/packages/f8/ed/13bd4418627013bec4ed6e54283b1959cf6db888048c7cf4b4c3b5b36002/cffi-2.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:da68248800ad6320861f129cd9c1bf96ca849a2771a59e0344e88681905916f5", size = 183557, upload-time = "2025-09-08T23:22:58.351Z" }, + { url = "https://files.pythonhosted.org/packages/95/31/9f7f93ad2f8eff1dbc1c3656d7ca5bfd8fb52c9d786b4dcf19b2d02217fa/cffi-2.0.0-cp312-cp312-win_arm64.whl", hash = "sha256:4671d9dd5ec934cb9a73e7ee9676f9362aba54f7f34910956b84d727b0d73fb6", size = 177762, upload-time = "2025-09-08T23:22:59.668Z" }, + { url = "https://files.pythonhosted.org/packages/4b/8d/a0a47a0c9e413a658623d014e91e74a50cdd2c423f7ccfd44086ef767f90/cffi-2.0.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:00bdf7acc5f795150faa6957054fbbca2439db2f775ce831222b66f192f03beb", size = 185230, upload-time = "2025-09-08T23:23:00.879Z" }, + { url = "https://files.pythonhosted.org/packages/4a/d2/a6c0296814556c68ee32009d9c2ad4f85f2707cdecfd7727951ec228005d/cffi-2.0.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:45d5e886156860dc35862657e1494b9bae8dfa63bf56796f2fb56e1679fc0bca", size = 181043, upload-time = "2025-09-08T23:23:02.231Z" }, + { url = "https://files.pythonhosted.org/packages/b0/1e/d22cc63332bd59b06481ceaac49d6c507598642e2230f201649058a7e704/cffi-2.0.0-cp313-cp313-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:07b271772c100085dd28b74fa0cd81c8fb1a3ba18b21e03d7c27f3436a10606b", size = 212446, upload-time = "2025-09-08T23:23:03.472Z" }, + { url = "https://files.pythonhosted.org/packages/a9/f5/a2c23eb03b61a0b8747f211eb716446c826ad66818ddc7810cc2cc19b3f2/cffi-2.0.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:d48a880098c96020b02d5a1f7d9251308510ce8858940e6fa99ece33f610838b", size = 220101, upload-time = "2025-09-08T23:23:04.792Z" }, + { url = "https://files.pythonhosted.org/packages/f2/7f/e6647792fc5850d634695bc0e6ab4111ae88e89981d35ac269956605feba/cffi-2.0.0-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:f93fd8e5c8c0a4aa1f424d6173f14a892044054871c771f8566e4008eaa359d2", size = 207948, upload-time = "2025-09-08T23:23:06.127Z" }, + { url = "https://files.pythonhosted.org/packages/cb/1e/a5a1bd6f1fb30f22573f76533de12a00bf274abcdc55c8edab639078abb6/cffi-2.0.0-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:dd4f05f54a52fb558f1ba9f528228066954fee3ebe629fc1660d874d040ae5a3", size = 206422, upload-time = "2025-09-08T23:23:07.753Z" }, + { url = "https://files.pythonhosted.org/packages/98/df/0a1755e750013a2081e863e7cd37e0cdd02664372c754e5560099eb7aa44/cffi-2.0.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:c8d3b5532fc71b7a77c09192b4a5a200ea992702734a2e9279a37f2478236f26", size = 219499, upload-time = "2025-09-08T23:23:09.648Z" }, + { url = "https://files.pythonhosted.org/packages/50/e1/a969e687fcf9ea58e6e2a928ad5e2dd88cc12f6f0ab477e9971f2309b57c/cffi-2.0.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:d9b29c1f0ae438d5ee9acb31cadee00a58c46cc9c0b2f9038c6b0b3470877a8c", size = 222928, upload-time = "2025-09-08T23:23:10.928Z" }, + { url = "https://files.pythonhosted.org/packages/36/54/0362578dd2c9e557a28ac77698ed67323ed5b9775ca9d3fe73fe191bb5d8/cffi-2.0.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6d50360be4546678fc1b79ffe7a66265e28667840010348dd69a314145807a1b", size = 221302, upload-time = "2025-09-08T23:23:12.42Z" }, + { url = "https://files.pythonhosted.org/packages/eb/6d/bf9bda840d5f1dfdbf0feca87fbdb64a918a69bca42cfa0ba7b137c48cb8/cffi-2.0.0-cp313-cp313-win32.whl", hash = "sha256:74a03b9698e198d47562765773b4a8309919089150a0bb17d829ad7b44b60d27", size = 172909, upload-time = "2025-09-08T23:23:14.32Z" }, + { url = "https://files.pythonhosted.org/packages/37/18/6519e1ee6f5a1e579e04b9ddb6f1676c17368a7aba48299c3759bbc3c8b3/cffi-2.0.0-cp313-cp313-win_amd64.whl", hash = "sha256:19f705ada2530c1167abacb171925dd886168931e0a7b78f5bffcae5c6b5be75", size = 183402, upload-time = "2025-09-08T23:23:15.535Z" }, + { url = "https://files.pythonhosted.org/packages/cb/0e/02ceeec9a7d6ee63bb596121c2c8e9b3a9e150936f4fbef6ca1943e6137c/cffi-2.0.0-cp313-cp313-win_arm64.whl", hash = "sha256:256f80b80ca3853f90c21b23ee78cd008713787b1b1e93eae9f3d6a7134abd91", size = 177780, upload-time = "2025-09-08T23:23:16.761Z" }, +] + +[[package]] +name = "charset-normalizer" +version = "3.4.4" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/13/69/33ddede1939fdd074bce5434295f38fae7136463422fe4fd3e0e89b98062/charset_normalizer-3.4.4.tar.gz", hash = "sha256:94537985111c35f28720e43603b8e7b43a6ecfb2ce1d3058bbe955b73404e21a", size = 129418, upload-time = "2025-10-14T04:42:32.879Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f3/85/1637cd4af66fa687396e757dec650f28025f2a2f5a5531a3208dc0ec43f2/charset_normalizer-3.4.4-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:0a98e6759f854bd25a58a73fa88833fba3b7c491169f86ce1180c948ab3fd394", size = 208425, upload-time = "2025-10-14T04:40:53.353Z" }, + { url = "https://files.pythonhosted.org/packages/9d/6a/04130023fef2a0d9c62d0bae2649b69f7b7d8d24ea5536feef50551029df/charset_normalizer-3.4.4-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b5b290ccc2a263e8d185130284f8501e3e36c5e02750fc6b6bdeb2e9e96f1e25", size = 148162, upload-time = "2025-10-14T04:40:54.558Z" }, + { url = "https://files.pythonhosted.org/packages/78/29/62328d79aa60da22c9e0b9a66539feae06ca0f5a4171ac4f7dc285b83688/charset_normalizer-3.4.4-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:74bb723680f9f7a6234dcf67aea57e708ec1fbdf5699fb91dfd6f511b0a320ef", size = 144558, upload-time = "2025-10-14T04:40:55.677Z" }, + { url = "https://files.pythonhosted.org/packages/86/bb/b32194a4bf15b88403537c2e120b817c61cd4ecffa9b6876e941c3ee38fe/charset_normalizer-3.4.4-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:f1e34719c6ed0b92f418c7c780480b26b5d9c50349e9a9af7d76bf757530350d", size = 161497, upload-time = "2025-10-14T04:40:57.217Z" }, + { url = "https://files.pythonhosted.org/packages/19/89/a54c82b253d5b9b111dc74aca196ba5ccfcca8242d0fb64146d4d3183ff1/charset_normalizer-3.4.4-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:2437418e20515acec67d86e12bf70056a33abdacb5cb1655042f6538d6b085a8", size = 159240, upload-time = "2025-10-14T04:40:58.358Z" }, + { url = "https://files.pythonhosted.org/packages/c0/10/d20b513afe03acc89ec33948320a5544d31f21b05368436d580dec4e234d/charset_normalizer-3.4.4-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:11d694519d7f29d6cd09f6ac70028dba10f92f6cdd059096db198c283794ac86", size = 153471, upload-time = "2025-10-14T04:40:59.468Z" }, + { url = "https://files.pythonhosted.org/packages/61/fa/fbf177b55bdd727010f9c0a3c49eefa1d10f960e5f09d1d887bf93c2e698/charset_normalizer-3.4.4-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:ac1c4a689edcc530fc9d9aa11f5774b9e2f33f9a0c6a57864e90908f5208d30a", size = 150864, upload-time = "2025-10-14T04:41:00.623Z" }, + { url = "https://files.pythonhosted.org/packages/05/12/9fbc6a4d39c0198adeebbde20b619790e9236557ca59fc40e0e3cebe6f40/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:21d142cc6c0ec30d2efee5068ca36c128a30b0f2c53c1c07bd78cb6bc1d3be5f", size = 150647, upload-time = "2025-10-14T04:41:01.754Z" }, + { url = "https://files.pythonhosted.org/packages/ad/1f/6a9a593d52e3e8c5d2b167daf8c6b968808efb57ef4c210acb907c365bc4/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:5dbe56a36425d26d6cfb40ce79c314a2e4dd6211d51d6d2191c00bed34f354cc", size = 145110, upload-time = "2025-10-14T04:41:03.231Z" }, + { url = "https://files.pythonhosted.org/packages/30/42/9a52c609e72471b0fc54386dc63c3781a387bb4fe61c20231a4ebcd58bdd/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:5bfbb1b9acf3334612667b61bd3002196fe2a1eb4dd74d247e0f2a4d50ec9bbf", size = 162839, upload-time = "2025-10-14T04:41:04.715Z" }, + { url = "https://files.pythonhosted.org/packages/c4/5b/c0682bbf9f11597073052628ddd38344a3d673fda35a36773f7d19344b23/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:d055ec1e26e441f6187acf818b73564e6e6282709e9bcb5b63f5b23068356a15", size = 150667, upload-time = "2025-10-14T04:41:05.827Z" }, + { url = "https://files.pythonhosted.org/packages/e4/24/a41afeab6f990cf2daf6cb8c67419b63b48cf518e4f56022230840c9bfb2/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:af2d8c67d8e573d6de5bc30cdb27e9b95e49115cd9baad5ddbd1a6207aaa82a9", size = 160535, upload-time = "2025-10-14T04:41:06.938Z" }, + { url = "https://files.pythonhosted.org/packages/2a/e5/6a4ce77ed243c4a50a1fecca6aaaab419628c818a49434be428fe24c9957/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:780236ac706e66881f3b7f2f32dfe90507a09e67d1d454c762cf642e6e1586e0", size = 154816, upload-time = "2025-10-14T04:41:08.101Z" }, + { url = "https://files.pythonhosted.org/packages/a8/ef/89297262b8092b312d29cdb2517cb1237e51db8ecef2e9af5edbe7b683b1/charset_normalizer-3.4.4-cp312-cp312-win32.whl", hash = "sha256:5833d2c39d8896e4e19b689ffc198f08ea58116bee26dea51e362ecc7cd3ed26", size = 99694, upload-time = "2025-10-14T04:41:09.23Z" }, + { url = "https://files.pythonhosted.org/packages/3d/2d/1e5ed9dd3b3803994c155cd9aacb60c82c331bad84daf75bcb9c91b3295e/charset_normalizer-3.4.4-cp312-cp312-win_amd64.whl", hash = "sha256:a79cfe37875f822425b89a82333404539ae63dbdddf97f84dcbc3d339aae9525", size = 107131, upload-time = "2025-10-14T04:41:10.467Z" }, + { url = "https://files.pythonhosted.org/packages/d0/d9/0ed4c7098a861482a7b6a95603edce4c0d9db2311af23da1fb2b75ec26fc/charset_normalizer-3.4.4-cp312-cp312-win_arm64.whl", hash = "sha256:376bec83a63b8021bb5c8ea75e21c4ccb86e7e45ca4eb81146091b56599b80c3", size = 100390, upload-time = "2025-10-14T04:41:11.915Z" }, + { url = "https://files.pythonhosted.org/packages/97/45/4b3a1239bbacd321068ea6e7ac28875b03ab8bc0aa0966452db17cd36714/charset_normalizer-3.4.4-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:e1f185f86a6f3403aa2420e815904c67b2f9ebc443f045edd0de921108345794", size = 208091, upload-time = "2025-10-14T04:41:13.346Z" }, + { url = "https://files.pythonhosted.org/packages/7d/62/73a6d7450829655a35bb88a88fca7d736f9882a27eacdca2c6d505b57e2e/charset_normalizer-3.4.4-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6b39f987ae8ccdf0d2642338faf2abb1862340facc796048b604ef14919e55ed", size = 147936, upload-time = "2025-10-14T04:41:14.461Z" }, + { url = "https://files.pythonhosted.org/packages/89/c5/adb8c8b3d6625bef6d88b251bbb0d95f8205831b987631ab0c8bb5d937c2/charset_normalizer-3.4.4-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:3162d5d8ce1bb98dd51af660f2121c55d0fa541b46dff7bb9b9f86ea1d87de72", size = 144180, upload-time = "2025-10-14T04:41:15.588Z" }, + { url = "https://files.pythonhosted.org/packages/91/ed/9706e4070682d1cc219050b6048bfd293ccf67b3d4f5a4f39207453d4b99/charset_normalizer-3.4.4-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:81d5eb2a312700f4ecaa977a8235b634ce853200e828fbadf3a9c50bab278328", size = 161346, upload-time = "2025-10-14T04:41:16.738Z" }, + { url = "https://files.pythonhosted.org/packages/d5/0d/031f0d95e4972901a2f6f09ef055751805ff541511dc1252ba3ca1f80cf5/charset_normalizer-3.4.4-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:5bd2293095d766545ec1a8f612559f6b40abc0eb18bb2f5d1171872d34036ede", size = 158874, upload-time = "2025-10-14T04:41:17.923Z" }, + { url = "https://files.pythonhosted.org/packages/f5/83/6ab5883f57c9c801ce5e5677242328aa45592be8a00644310a008d04f922/charset_normalizer-3.4.4-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a8a8b89589086a25749f471e6a900d3f662d1d3b6e2e59dcecf787b1cc3a1894", size = 153076, upload-time = "2025-10-14T04:41:19.106Z" }, + { url = "https://files.pythonhosted.org/packages/75/1e/5ff781ddf5260e387d6419959ee89ef13878229732732ee73cdae01800f2/charset_normalizer-3.4.4-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:bc7637e2f80d8530ee4a78e878bce464f70087ce73cf7c1caf142416923b98f1", size = 150601, upload-time = "2025-10-14T04:41:20.245Z" }, + { url = "https://files.pythonhosted.org/packages/d7/57/71be810965493d3510a6ca79b90c19e48696fb1ff964da319334b12677f0/charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f8bf04158c6b607d747e93949aa60618b61312fe647a6369f88ce2ff16043490", size = 150376, upload-time = "2025-10-14T04:41:21.398Z" }, + { url = "https://files.pythonhosted.org/packages/e5/d5/c3d057a78c181d007014feb7e9f2e65905a6c4ef182c0ddf0de2924edd65/charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:554af85e960429cf30784dd47447d5125aaa3b99a6f0683589dbd27e2f45da44", size = 144825, upload-time = "2025-10-14T04:41:22.583Z" }, + { url = "https://files.pythonhosted.org/packages/e6/8c/d0406294828d4976f275ffbe66f00266c4b3136b7506941d87c00cab5272/charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:74018750915ee7ad843a774364e13a3db91682f26142baddf775342c3f5b1133", size = 162583, upload-time = "2025-10-14T04:41:23.754Z" }, + { url = "https://files.pythonhosted.org/packages/d7/24/e2aa1f18c8f15c4c0e932d9287b8609dd30ad56dbe41d926bd846e22fb8d/charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:c0463276121fdee9c49b98908b3a89c39be45d86d1dbaa22957e38f6321d4ce3", size = 150366, upload-time = "2025-10-14T04:41:25.27Z" }, + { url = "https://files.pythonhosted.org/packages/e4/5b/1e6160c7739aad1e2df054300cc618b06bf784a7a164b0f238360721ab86/charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:362d61fd13843997c1c446760ef36f240cf81d3ebf74ac62652aebaf7838561e", size = 160300, upload-time = "2025-10-14T04:41:26.725Z" }, + { url = "https://files.pythonhosted.org/packages/7a/10/f882167cd207fbdd743e55534d5d9620e095089d176d55cb22d5322f2afd/charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:9a26f18905b8dd5d685d6d07b0cdf98a79f3c7a918906af7cc143ea2e164c8bc", size = 154465, upload-time = "2025-10-14T04:41:28.322Z" }, + { url = "https://files.pythonhosted.org/packages/89/66/c7a9e1b7429be72123441bfdbaf2bc13faab3f90b933f664db506dea5915/charset_normalizer-3.4.4-cp313-cp313-win32.whl", hash = "sha256:9b35f4c90079ff2e2edc5b26c0c77925e5d2d255c42c74fdb70fb49b172726ac", size = 99404, upload-time = "2025-10-14T04:41:29.95Z" }, + { url = "https://files.pythonhosted.org/packages/c4/26/b9924fa27db384bdcd97ab83b4f0a8058d96ad9626ead570674d5e737d90/charset_normalizer-3.4.4-cp313-cp313-win_amd64.whl", hash = "sha256:b435cba5f4f750aa6c0a0d92c541fb79f69a387c91e61f1795227e4ed9cece14", size = 107092, upload-time = "2025-10-14T04:41:31.188Z" }, + { url = "https://files.pythonhosted.org/packages/af/8f/3ed4bfa0c0c72a7ca17f0380cd9e4dd842b09f664e780c13cff1dcf2ef1b/charset_normalizer-3.4.4-cp313-cp313-win_arm64.whl", hash = "sha256:542d2cee80be6f80247095cc36c418f7bddd14f4a6de45af91dfad36d817bba2", size = 100408, upload-time = "2025-10-14T04:41:32.624Z" }, + { url = "https://files.pythonhosted.org/packages/0a/4c/925909008ed5a988ccbb72dcc897407e5d6d3bd72410d69e051fc0c14647/charset_normalizer-3.4.4-py3-none-any.whl", hash = "sha256:7a32c560861a02ff789ad905a2fe94e3f840803362c84fecf1851cb4cf3dc37f", size = 53402, upload-time = "2025-10-14T04:42:31.76Z" }, +] + +[[package]] +name = "click" +version = "8.3.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "colorama", marker = "sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/3d/fa/656b739db8587d7b5dfa22e22ed02566950fbfbcdc20311993483657a5c0/click-8.3.1.tar.gz", hash = "sha256:12ff4785d337a1bb490bb7e9c2b1ee5da3112e94a8622f26a6c77f5d2fc6842a", size = 295065, upload-time = "2025-11-15T20:45:42.706Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/98/78/01c019cdb5d6498122777c1a43056ebb3ebfeef2076d9d026bfe15583b2b/click-8.3.1-py3-none-any.whl", hash = "sha256:981153a64e25f12d547d3426c367a4857371575ee7ad18df2a6183ab0545b2a6", size = 108274, upload-time = "2025-11-15T20:45:41.139Z" }, +] + +[[package]] +name = "colorama" +version = "0.4.6" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d8/53/6f443c9a4a8358a93a6792e2acffb9d9d5cb0a5cfd8802644b7b1c9a02e4/colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44", size = 27697, upload-time = "2022-10-25T02:36:22.414Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6", size = 25335, upload-time = "2022-10-25T02:36:20.889Z" }, +] + +[[package]] +name = "colorful" +version = "0.5.8" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "colorama", marker = "sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/82/31/109ef4bedeb32b4202e02ddb133162457adc4eb890a9ed9c05c9dd126ed0/colorful-0.5.8.tar.gz", hash = "sha256:bb16502b198be2f1c42ba3c52c703d5f651d826076817185f0294c1a549a7445", size = 209361, upload-time = "2025-10-29T11:53:21.663Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c3/11/25cdf9d5fc21efd30134fc74c43702c6f7ef09ebae8ed927f1283403ad8d/colorful-0.5.8-py2.py3-none-any.whl", hash = "sha256:a9381fdda3337fbaba5771991020abc69676afa102646650b759927892875992", size = 201334, upload-time = "2025-10-29T11:53:20.251Z" }, +] + +[[package]] +name = "comm" +version = "0.2.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/4c/13/7d740c5849255756bc17888787313b61fd38a0a8304fc4f073dfc46122aa/comm-0.2.3.tar.gz", hash = "sha256:2dc8048c10962d55d7ad693be1e7045d891b7ce8d999c97963a5e3e99c055971", size = 6319, upload-time = "2025-07-25T14:02:04.452Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/60/97/891a0971e1e4a8c5d2b20bbe0e524dc04548d2307fee33cdeba148fd4fc7/comm-0.2.3-py3-none-any.whl", hash = "sha256:c615d91d75f7f04f095b30d1c1711babd43bdc6419c1be9886a85f2f4e489417", size = 7294, upload-time = "2025-07-25T14:02:02.896Z" }, +] + +[[package]] +name = "coverage" +version = "7.12.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/89/26/4a96807b193b011588099c3b5c89fbb05294e5b90e71018e065465f34eb6/coverage-7.12.0.tar.gz", hash = "sha256:fc11e0a4e372cb5f282f16ef90d4a585034050ccda536451901abfb19a57f40c", size = 819341, upload-time = "2025-11-18T13:34:20.766Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/02/bf/638c0427c0f0d47638242e2438127f3c8ee3cfc06c7fdeb16778ed47f836/coverage-7.12.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:29644c928772c78512b48e14156b81255000dcfd4817574ff69def189bcb3647", size = 217704, upload-time = "2025-11-18T13:32:28.906Z" }, + { url = "https://files.pythonhosted.org/packages/08/e1/706fae6692a66c2d6b871a608bbde0da6281903fa0e9f53a39ed441da36a/coverage-7.12.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8638cbb002eaa5d7c8d04da667813ce1067080b9a91099801a0053086e52b736", size = 218064, upload-time = "2025-11-18T13:32:30.161Z" }, + { url = "https://files.pythonhosted.org/packages/a9/8b/eb0231d0540f8af3ffda39720ff43cb91926489d01524e68f60e961366e4/coverage-7.12.0-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:083631eeff5eb9992c923e14b810a179798bb598e6a0dd60586819fc23be6e60", size = 249560, upload-time = "2025-11-18T13:32:31.835Z" }, + { url = "https://files.pythonhosted.org/packages/e9/a1/67fb52af642e974d159b5b379e4d4c59d0ebe1288677fbd04bbffe665a82/coverage-7.12.0-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:99d5415c73ca12d558e07776bd957c4222c687b9f1d26fa0e1b57e3598bdcde8", size = 252318, upload-time = "2025-11-18T13:32:33.178Z" }, + { url = "https://files.pythonhosted.org/packages/41/e5/38228f31b2c7665ebf9bdfdddd7a184d56450755c7e43ac721c11a4b8dab/coverage-7.12.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e949ebf60c717c3df63adb4a1a366c096c8d7fd8472608cd09359e1bd48ef59f", size = 253403, upload-time = "2025-11-18T13:32:34.45Z" }, + { url = "https://files.pythonhosted.org/packages/ec/4b/df78e4c8188f9960684267c5a4897836f3f0f20a20c51606ee778a1d9749/coverage-7.12.0-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:6d907ddccbca819afa2cd014bc69983b146cca2735a0b1e6259b2a6c10be1e70", size = 249984, upload-time = "2025-11-18T13:32:35.747Z" }, + { url = "https://files.pythonhosted.org/packages/ba/51/bb163933d195a345c6f63eab9e55743413d064c291b6220df754075c2769/coverage-7.12.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:b1518ecbad4e6173f4c6e6c4a46e49555ea5679bf3feda5edb1b935c7c44e8a0", size = 251339, upload-time = "2025-11-18T13:32:37.352Z" }, + { url = "https://files.pythonhosted.org/packages/15/40/c9b29cdb8412c837cdcbc2cfa054547dd83affe6cbbd4ce4fdb92b6ba7d1/coverage-7.12.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:51777647a749abdf6f6fd8c7cffab12de68ab93aab15efc72fbbb83036c2a068", size = 249489, upload-time = "2025-11-18T13:32:39.212Z" }, + { url = "https://files.pythonhosted.org/packages/c8/da/b3131e20ba07a0de4437a50ef3b47840dfabf9293675b0cd5c2c7f66dd61/coverage-7.12.0-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:42435d46d6461a3b305cdfcad7cdd3248787771f53fe18305548cba474e6523b", size = 249070, upload-time = "2025-11-18T13:32:40.598Z" }, + { url = "https://files.pythonhosted.org/packages/70/81/b653329b5f6302c08d683ceff6785bc60a34be9ae92a5c7b63ee7ee7acec/coverage-7.12.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:5bcead88c8423e1855e64b8057d0544e33e4080b95b240c2a355334bb7ced937", size = 250929, upload-time = "2025-11-18T13:32:42.915Z" }, + { url = "https://files.pythonhosted.org/packages/a3/00/250ac3bca9f252a5fb1338b5ad01331ebb7b40223f72bef5b1b2cb03aa64/coverage-7.12.0-cp312-cp312-win32.whl", hash = "sha256:dcbb630ab034e86d2a0f79aefd2be07e583202f41e037602d438c80044957baa", size = 220241, upload-time = "2025-11-18T13:32:44.665Z" }, + { url = "https://files.pythonhosted.org/packages/64/1c/77e79e76d37ce83302f6c21980b45e09f8aa4551965213a10e62d71ce0ab/coverage-7.12.0-cp312-cp312-win_amd64.whl", hash = "sha256:2fd8354ed5d69775ac42986a691fbf68b4084278710cee9d7c3eaa0c28fa982a", size = 221051, upload-time = "2025-11-18T13:32:46.008Z" }, + { url = "https://files.pythonhosted.org/packages/31/f5/641b8a25baae564f9e52cac0e2667b123de961985709a004e287ee7663cc/coverage-7.12.0-cp312-cp312-win_arm64.whl", hash = "sha256:737c3814903be30695b2de20d22bcc5428fdae305c61ba44cdc8b3252984c49c", size = 219692, upload-time = "2025-11-18T13:32:47.372Z" }, + { url = "https://files.pythonhosted.org/packages/b8/14/771700b4048774e48d2c54ed0c674273702713c9ee7acdfede40c2666747/coverage-7.12.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:47324fffca8d8eae7e185b5bb20c14645f23350f870c1649003618ea91a78941", size = 217725, upload-time = "2025-11-18T13:32:49.22Z" }, + { url = "https://files.pythonhosted.org/packages/17/a7/3aa4144d3bcb719bf67b22d2d51c2d577bf801498c13cb08f64173e80497/coverage-7.12.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:ccf3b2ede91decd2fb53ec73c1f949c3e034129d1e0b07798ff1d02ea0c8fa4a", size = 218098, upload-time = "2025-11-18T13:32:50.78Z" }, + { url = "https://files.pythonhosted.org/packages/fc/9c/b846bbc774ff81091a12a10203e70562c91ae71badda00c5ae5b613527b1/coverage-7.12.0-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:b365adc70a6936c6b0582dc38746b33b2454148c02349345412c6e743efb646d", size = 249093, upload-time = "2025-11-18T13:32:52.554Z" }, + { url = "https://files.pythonhosted.org/packages/76/b6/67d7c0e1f400b32c883e9342de4a8c2ae7c1a0b57c5de87622b7262e2309/coverage-7.12.0-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:bc13baf85cd8a4cfcf4a35c7bc9d795837ad809775f782f697bf630b7e200211", size = 251686, upload-time = "2025-11-18T13:32:54.862Z" }, + { url = "https://files.pythonhosted.org/packages/cc/75/b095bd4b39d49c3be4bffbb3135fea18a99a431c52dd7513637c0762fecb/coverage-7.12.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:099d11698385d572ceafb3288a5b80fe1fc58bf665b3f9d362389de488361d3d", size = 252930, upload-time = "2025-11-18T13:32:56.417Z" }, + { url = "https://files.pythonhosted.org/packages/6e/f3/466f63015c7c80550bead3093aacabf5380c1220a2a93c35d374cae8f762/coverage-7.12.0-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:473dc45d69694069adb7680c405fb1e81f60b2aff42c81e2f2c3feaf544d878c", size = 249296, upload-time = "2025-11-18T13:32:58.074Z" }, + { url = "https://files.pythonhosted.org/packages/27/86/eba2209bf2b7e28c68698fc13437519a295b2d228ba9e0ec91673e09fa92/coverage-7.12.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:583f9adbefd278e9de33c33d6846aa8f5d164fa49b47144180a0e037f0688bb9", size = 251068, upload-time = "2025-11-18T13:32:59.646Z" }, + { url = "https://files.pythonhosted.org/packages/ec/55/ca8ae7dbba962a3351f18940b359b94c6bafdd7757945fdc79ec9e452dc7/coverage-7.12.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:b2089cc445f2dc0af6f801f0d1355c025b76c24481935303cf1af28f636688f0", size = 249034, upload-time = "2025-11-18T13:33:01.481Z" }, + { url = "https://files.pythonhosted.org/packages/7a/d7/39136149325cad92d420b023b5fd900dabdd1c3a0d1d5f148ef4a8cedef5/coverage-7.12.0-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:950411f1eb5d579999c5f66c62a40961f126fc71e5e14419f004471957b51508", size = 248853, upload-time = "2025-11-18T13:33:02.935Z" }, + { url = "https://files.pythonhosted.org/packages/fe/b6/76e1add8b87ef60e00643b0b7f8f7bb73d4bf5249a3be19ebefc5793dd25/coverage-7.12.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:b1aab7302a87bafebfe76b12af681b56ff446dc6f32ed178ff9c092ca776e6bc", size = 250619, upload-time = "2025-11-18T13:33:04.336Z" }, + { url = "https://files.pythonhosted.org/packages/95/87/924c6dc64f9203f7a3c1832a6a0eee5a8335dbe5f1bdadcc278d6f1b4d74/coverage-7.12.0-cp313-cp313-win32.whl", hash = "sha256:d7e0d0303c13b54db495eb636bc2465b2fb8475d4c8bcec8fe4b5ca454dfbae8", size = 220261, upload-time = "2025-11-18T13:33:06.493Z" }, + { url = "https://files.pythonhosted.org/packages/91/77/dd4aff9af16ff776bf355a24d87eeb48fc6acde54c907cc1ea89b14a8804/coverage-7.12.0-cp313-cp313-win_amd64.whl", hash = "sha256:ce61969812d6a98a981d147d9ac583a36ac7db7766f2e64a9d4d059c2fe29d07", size = 221072, upload-time = "2025-11-18T13:33:07.926Z" }, + { url = "https://files.pythonhosted.org/packages/70/49/5c9dc46205fef31b1b226a6e16513193715290584317fd4df91cdaf28b22/coverage-7.12.0-cp313-cp313-win_arm64.whl", hash = "sha256:bcec6f47e4cb8a4c2dc91ce507f6eefc6a1b10f58df32cdc61dff65455031dfc", size = 219702, upload-time = "2025-11-18T13:33:09.631Z" }, + { url = "https://files.pythonhosted.org/packages/9b/62/f87922641c7198667994dd472a91e1d9b829c95d6c29529ceb52132436ad/coverage-7.12.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:459443346509476170d553035e4a3eed7b860f4fe5242f02de1010501956ce87", size = 218420, upload-time = "2025-11-18T13:33:11.153Z" }, + { url = "https://files.pythonhosted.org/packages/85/dd/1cc13b2395ef15dbb27d7370a2509b4aee77890a464fb35d72d428f84871/coverage-7.12.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:04a79245ab2b7a61688958f7a855275997134bc84f4a03bc240cf64ff132abf6", size = 218773, upload-time = "2025-11-18T13:33:12.569Z" }, + { url = "https://files.pythonhosted.org/packages/74/40/35773cc4bb1e9d4658d4fb669eb4195b3151bef3bbd6f866aba5cd5dac82/coverage-7.12.0-cp313-cp313t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:09a86acaaa8455f13d6a99221d9654df249b33937b4e212b4e5a822065f12aa7", size = 260078, upload-time = "2025-11-18T13:33:14.037Z" }, + { url = "https://files.pythonhosted.org/packages/ec/ee/231bb1a6ffc2905e396557585ebc6bdc559e7c66708376d245a1f1d330fc/coverage-7.12.0-cp313-cp313t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:907e0df1b71ba77463687a74149c6122c3f6aac56c2510a5d906b2f368208560", size = 262144, upload-time = "2025-11-18T13:33:15.601Z" }, + { url = "https://files.pythonhosted.org/packages/28/be/32f4aa9f3bf0b56f3971001b56508352c7753915345d45fab4296a986f01/coverage-7.12.0-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9b57e2d0ddd5f0582bae5437c04ee71c46cd908e7bc5d4d0391f9a41e812dd12", size = 264574, upload-time = "2025-11-18T13:33:17.354Z" }, + { url = "https://files.pythonhosted.org/packages/68/7c/00489fcbc2245d13ab12189b977e0cf06ff3351cb98bc6beba8bd68c5902/coverage-7.12.0-cp313-cp313t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:58c1c6aa677f3a1411fe6fb28ec3a942e4f665df036a3608816e0847fad23296", size = 259298, upload-time = "2025-11-18T13:33:18.958Z" }, + { url = "https://files.pythonhosted.org/packages/96/b4/f0760d65d56c3bea95b449e02570d4abd2549dc784bf39a2d4721a2d8ceb/coverage-7.12.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:4c589361263ab2953e3c4cd2a94db94c4ad4a8e572776ecfbad2389c626e4507", size = 262150, upload-time = "2025-11-18T13:33:20.644Z" }, + { url = "https://files.pythonhosted.org/packages/c5/71/9a9314df00f9326d78c1e5a910f520d599205907432d90d1c1b7a97aa4b1/coverage-7.12.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:91b810a163ccad2e43b1faa11d70d3cf4b6f3d83f9fd5f2df82a32d47b648e0d", size = 259763, upload-time = "2025-11-18T13:33:22.189Z" }, + { url = "https://files.pythonhosted.org/packages/10/34/01a0aceed13fbdf925876b9a15d50862eb8845454301fe3cdd1df08b2182/coverage-7.12.0-cp313-cp313t-musllinux_1_2_riscv64.whl", hash = "sha256:40c867af715f22592e0d0fb533a33a71ec9e0f73a6945f722a0c85c8c1cbe3a2", size = 258653, upload-time = "2025-11-18T13:33:24.239Z" }, + { url = "https://files.pythonhosted.org/packages/8d/04/81d8fd64928acf1574bbb0181f66901c6c1c6279c8ccf5f84259d2c68ae9/coverage-7.12.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:68b0d0a2d84f333de875666259dadf28cc67858bc8fd8b3f1eae84d3c2bec455", size = 260856, upload-time = "2025-11-18T13:33:26.365Z" }, + { url = "https://files.pythonhosted.org/packages/f2/76/fa2a37bfaeaf1f766a2d2360a25a5297d4fb567098112f6517475eee120b/coverage-7.12.0-cp313-cp313t-win32.whl", hash = "sha256:73f9e7fbd51a221818fd11b7090eaa835a353ddd59c236c57b2199486b116c6d", size = 220936, upload-time = "2025-11-18T13:33:28.165Z" }, + { url = "https://files.pythonhosted.org/packages/f9/52/60f64d932d555102611c366afb0eb434b34266b1d9266fc2fe18ab641c47/coverage-7.12.0-cp313-cp313t-win_amd64.whl", hash = "sha256:24cff9d1f5743f67db7ba46ff284018a6e9aeb649b67aa1e70c396aa1b7cb23c", size = 222001, upload-time = "2025-11-18T13:33:29.656Z" }, + { url = "https://files.pythonhosted.org/packages/77/df/c303164154a5a3aea7472bf323b7c857fed93b26618ed9fc5c2955566bb0/coverage-7.12.0-cp313-cp313t-win_arm64.whl", hash = "sha256:c87395744f5c77c866d0f5a43d97cc39e17c7f1cb0115e54a2fe67ca75c5d14d", size = 220273, upload-time = "2025-11-18T13:33:31.415Z" }, + { url = "https://files.pythonhosted.org/packages/ce/a3/43b749004e3c09452e39bb56347a008f0a0668aad37324a99b5c8ca91d9e/coverage-7.12.0-py3-none-any.whl", hash = "sha256:159d50c0b12e060b15ed3d39f87ed43d4f7f7ad40b8a534f4dd331adbb51104a", size = 209503, upload-time = "2025-11-18T13:34:18.892Z" }, +] + +[[package]] +name = "debugpy" +version = "1.8.17" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/15/ad/71e708ff4ca377c4230530d6a7aa7992592648c122a2cd2b321cf8b35a76/debugpy-1.8.17.tar.gz", hash = "sha256:fd723b47a8c08892b1a16b2c6239a8b96637c62a59b94bb5dab4bac592a58a8e", size = 1644129, upload-time = "2025-09-17T16:33:20.633Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/08/2b/9d8e65beb2751876c82e1aceb32f328c43ec872711fa80257c7674f45650/debugpy-1.8.17-cp312-cp312-macosx_15_0_universal2.whl", hash = "sha256:f14467edef672195c6f6b8e27ce5005313cb5d03c9239059bc7182b60c176e2d", size = 2549522, upload-time = "2025-09-17T16:33:38.466Z" }, + { url = "https://files.pythonhosted.org/packages/b4/78/eb0d77f02971c05fca0eb7465b18058ba84bd957062f5eec82f941ac792a/debugpy-1.8.17-cp312-cp312-manylinux_2_34_x86_64.whl", hash = "sha256:24693179ef9dfa20dca8605905a42b392be56d410c333af82f1c5dff807a64cc", size = 4309417, upload-time = "2025-09-17T16:33:41.299Z" }, + { url = "https://files.pythonhosted.org/packages/37/42/c40f1d8cc1fed1e75ea54298a382395b8b937d923fcf41ab0797a554f555/debugpy-1.8.17-cp312-cp312-win32.whl", hash = "sha256:6a4e9dacf2cbb60d2514ff7b04b4534b0139facbf2abdffe0639ddb6088e59cf", size = 5277130, upload-time = "2025-09-17T16:33:43.554Z" }, + { url = "https://files.pythonhosted.org/packages/72/22/84263b205baad32b81b36eac076de0cdbe09fe2d0637f5b32243dc7c925b/debugpy-1.8.17-cp312-cp312-win_amd64.whl", hash = "sha256:e8f8f61c518952fb15f74a302e068b48d9c4691768ade433e4adeea961993464", size = 5319053, upload-time = "2025-09-17T16:33:53.033Z" }, + { url = "https://files.pythonhosted.org/packages/50/76/597e5cb97d026274ba297af8d89138dfd9e695767ba0e0895edb20963f40/debugpy-1.8.17-cp313-cp313-macosx_15_0_universal2.whl", hash = "sha256:857c1dd5d70042502aef1c6d1c2801211f3ea7e56f75e9c335f434afb403e464", size = 2538386, upload-time = "2025-09-17T16:33:54.594Z" }, + { url = "https://files.pythonhosted.org/packages/5f/60/ce5c34fcdfec493701f9d1532dba95b21b2f6394147234dce21160bd923f/debugpy-1.8.17-cp313-cp313-manylinux_2_34_x86_64.whl", hash = "sha256:3bea3b0b12f3946e098cce9b43c3c46e317b567f79570c3f43f0b96d00788088", size = 4292100, upload-time = "2025-09-17T16:33:56.353Z" }, + { url = "https://files.pythonhosted.org/packages/e8/95/7873cf2146577ef71d2a20bf553f12df865922a6f87b9e8ee1df04f01785/debugpy-1.8.17-cp313-cp313-win32.whl", hash = "sha256:e34ee844c2f17b18556b5bbe59e1e2ff4e86a00282d2a46edab73fd7f18f4a83", size = 5277002, upload-time = "2025-09-17T16:33:58.231Z" }, + { url = "https://files.pythonhosted.org/packages/46/11/18c79a1cee5ff539a94ec4aa290c1c069a5580fd5cfd2fb2e282f8e905da/debugpy-1.8.17-cp313-cp313-win_amd64.whl", hash = "sha256:6c5cd6f009ad4fca8e33e5238210dc1e5f42db07d4b6ab21ac7ffa904a196420", size = 5319047, upload-time = "2025-09-17T16:34:00.586Z" }, + { url = "https://files.pythonhosted.org/packages/b0/d0/89247ec250369fc76db477720a26b2fce7ba079ff1380e4ab4529d2fe233/debugpy-1.8.17-py2.py3-none-any.whl", hash = "sha256:60c7dca6571efe660ccb7a9508d73ca14b8796c4ed484c2002abba714226cfef", size = 5283210, upload-time = "2025-09-17T16:34:25.835Z" }, +] + +[[package]] +name = "decorator" +version = "5.2.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/43/fa/6d96a0978d19e17b68d634497769987b16c8f4cd0a7a05048bec693caa6b/decorator-5.2.1.tar.gz", hash = "sha256:65f266143752f734b0a7cc83c46f4618af75b8c5911b00ccb61d0ac9b6da0360", size = 56711, upload-time = "2025-02-24T04:41:34.073Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/4e/8c/f3147f5c4b73e7550fe5f9352eaa956ae838d5c51eb58e7a25b9f3e2643b/decorator-5.2.1-py3-none-any.whl", hash = "sha256:d316bb415a2d9e2d2b3abcc4084c6502fc09240e292cd76a76afc106a1c8e04a", size = 9190, upload-time = "2025-02-24T04:41:32.565Z" }, +] + +[[package]] +name = "defusedxml" +version = "0.7.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/0f/d5/c66da9b79e5bdb124974bfe172b4daf3c984ebd9c2a06e2b8a4dc7331c72/defusedxml-0.7.1.tar.gz", hash = "sha256:1bb3032db185915b62d7c6209c5a8792be6a32ab2fedacc84e01b52c51aa3e69", size = 75520, upload-time = "2021-03-08T10:59:26.269Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/07/6c/aa3f2f849e01cb6a001cd8554a88d4c77c5c1a31c95bdf1cf9301e6d9ef4/defusedxml-0.7.1-py2.py3-none-any.whl", hash = "sha256:a352e7e428770286cc899e2542b6cdaedb2b4953ff269a210103ec58f6198a61", size = 25604, upload-time = "2021-03-08T10:59:24.45Z" }, +] + +[[package]] +name = "distro" +version = "1.9.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/fc/f8/98eea607f65de6527f8a2e8885fc8015d3e6f5775df186e443e0964a11c3/distro-1.9.0.tar.gz", hash = "sha256:2fa77c6fd8940f116ee1d6b94a2f90b13b5ea8d019b98bc8bafdcabcdd9bdbed", size = 60722, upload-time = "2023-12-24T09:54:32.31Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/12/b3/231ffd4ab1fc9d679809f356cebee130ac7daa00d6d6f3206dd4fd137e9e/distro-1.9.0-py3-none-any.whl", hash = "sha256:7bffd925d65168f85027d8da9af6bddab658135b840670a223589bc0c8ef02b2", size = 20277, upload-time = "2023-12-24T09:54:30.421Z" }, +] + +[[package]] +name = "dm-tree" +version = "0.1.9" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "absl-py" }, + { name = "attrs" }, + { name = "numpy" }, + { name = "wrapt" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/a6/83/ce29720ccf934c6cfa9b9c95ebbe96558386e66886626066632b5e44afed/dm_tree-0.1.9.tar.gz", hash = "sha256:a4c7db3d3935a5a2d5e4b383fc26c6b0cd6f78c6d4605d3e7b518800ecd5342b", size = 35623, upload-time = "2025-01-30T20:45:37.13Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ee/02/61aa90ab695918b4389d75c99bf0ec3cd0abacf1cadbef4053626f23ce34/dm_tree-0.1.9-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:a8d20eeab7fde77a3ed71f07716021eb0edfb4812a128eb381d108af3a310257", size = 175012, upload-time = "2025-03-31T08:35:41.476Z" }, + { url = "https://files.pythonhosted.org/packages/81/10/120cd40556407879c1069941bd8b0d1a75754128c1a5bf0e27dbcf2a49fc/dm_tree-0.1.9-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:80c43417814b1181d3367b335460bfdd30b79ee187a64220e11f6ddd093a4b15", size = 147204, upload-time = "2025-01-30T20:45:25.541Z" }, + { url = "https://files.pythonhosted.org/packages/86/52/27607a275c12858b979b8e943d2bd3bd0f9028503bb7079d5830a8b3cac0/dm_tree-0.1.9-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2334cfe9d2ed4293f9f1c7aefba0657deaab9ea74b5fadd966f6d01d9b6b42d9", size = 153013, upload-time = "2025-01-30T20:45:26.886Z" }, + { url = "https://files.pythonhosted.org/packages/ea/97/4f78412f73a9350bc8f934441bae5b68b102c8f4240a7f06b4114b51d6de/dm_tree-0.1.9-cp312-cp312-win_amd64.whl", hash = "sha256:9020a5ce256fcc83aa4bc190cc96dd66e87685db0a6e501b0c06aa492c2e38fc", size = 102022, upload-time = "2025-01-30T20:45:28.701Z" }, + { url = "https://files.pythonhosted.org/packages/5f/13/823788cd0f7964cadcfa56d1e0f9e5e987ee73b5db6273bc00168f524f1a/dm_tree-0.1.9-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:cfa33c2e028155810ad1b4e11928707bf47489516763a86e79cab2954d23bf68", size = 175000, upload-time = "2025-03-31T08:35:42.483Z" }, + { url = "https://files.pythonhosted.org/packages/37/6a/512abdf7f20acc6cd6fce77f7663014d129aa313b5953aa2603d58fdb0c9/dm_tree-0.1.9-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d05622d074353cf434049206e53c12147903a048c4bd7d77f2800d427413ad78", size = 147210, upload-time = "2025-01-30T20:45:29.732Z" }, + { url = "https://files.pythonhosted.org/packages/e5/0a/f4d72ffb64ab3edc1fa66261f81ee3b4142ab14cd8aa1dfc7bbeca5ee4ba/dm_tree-0.1.9-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f68b0efad76703dd4648586c75618a48cdd671b68c3266fe980e323c15423607", size = 153043, upload-time = "2025-01-30T20:45:30.834Z" }, + { url = "https://files.pythonhosted.org/packages/0d/ee/529ce999770b4d621a64af86c60cfee52f0cdd7294752105179ebf1c07c6/dm_tree-0.1.9-cp313-cp313-win_amd64.whl", hash = "sha256:e97c34fcb44941c36b7ee81dcdbceba0fbe728bddcc77e5837ab2eb665bcbff8", size = 102043, upload-time = "2025-01-30T20:45:32.004Z" }, + { url = "https://files.pythonhosted.org/packages/ee/3c/5b40f8862390e9172e776cf610f3791c1af01f140a5698799fbe4a97206f/dm_tree-0.1.9-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:b06e7a5da1c31a82521a60060573527e8d24b9920fdd20b2ec86f08412737598", size = 180821, upload-time = "2025-03-31T08:35:44.474Z" }, + { url = "https://files.pythonhosted.org/packages/84/1d/3cdbeeb3f6937a47a26cee502bffeccc2e55b97dfcce8a1d1135ea1b5b47/dm_tree-0.1.9-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6893fcdc5cf1a4f459cfc383526d35d42e7c671ae565d7e429a2f2cb2cb93e89", size = 147282, upload-time = "2025-01-30T20:45:33.896Z" }, + { url = "https://files.pythonhosted.org/packages/c5/37/15603079854394f16e3833a7b50696c1f3cbf30a2243a119f64f18a16f36/dm_tree-0.1.9-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e1f5d1e96b3a7de22b25b13a5eb30f41f8cf9c02dd4479a24920de99e780903c", size = 153052, upload-time = "2025-01-30T20:45:35.907Z" }, +] + +[[package]] +name = "docutils" +version = "0.21.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ae/ed/aefcc8cd0ba62a0560c3c18c33925362d46c6075480bfa4df87b28e169a9/docutils-0.21.2.tar.gz", hash = "sha256:3a6b18732edf182daa3cd12775bbb338cf5691468f91eeeb109deff6ebfa986f", size = 2204444, upload-time = "2024-04-23T18:57:18.24Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/8f/d7/9322c609343d929e75e7e5e6255e614fcc67572cfd083959cdef3b7aad79/docutils-0.21.2-py3-none-any.whl", hash = "sha256:dafca5b9e384f0e419294eb4d2ff9fa826435bf15f15b7bd45723e8ad76811b2", size = 587408, upload-time = "2024-04-23T18:57:14.835Z" }, +] + +[[package]] +name = "effectful" +version = "0.2.3" +source = { editable = "." } + +[package.optional-dependencies] +docs = [ + { name = "dm-tree" }, + { name = "jax" }, + { name = "myst-parser" }, + { name = "nbsphinx" }, + { name = "numpyro" }, + { name = "prettyprinter" }, + { name = "pypandoc-binary" }, + { name = "pyro-ppl" }, + { name = "sphinx" }, + { name = "sphinx-autodoc-typehints" }, + { name = "sphinx-rtd-theme" }, + { name = "sphinxcontrib-bibtex" }, + { name = "torch" }, +] +jax = [ + { name = "dm-tree" }, + { name = "jax" }, +] +llm = [ + { name = "litellm" }, + { name = "pillow" }, + { name = "pydantic" }, +] +numpyro = [ + { name = "dm-tree" }, + { name = "numpyro" }, +] +prettyprinter = [ + { name = "prettyprinter" }, +] +pyro = [ + { name = "dm-tree" }, + { name = "pyro-ppl" }, +] +test = [ + { name = "dm-tree" }, + { name = "jax" }, + { name = "mypy" }, + { name = "myst-parser" }, + { name = "nbqa" }, + { name = "nbsphinx" }, + { name = "nbval" }, + { name = "numpyro" }, + { name = "prettyprinter" }, + { name = "pypandoc-binary" }, + { name = "pyro-ppl" }, + { name = "pytest" }, + { name = "pytest-benchmark" }, + { name = "pytest-cov" }, + { name = "pytest-xdist" }, + { name = "ruff" }, + { name = "sphinx" }, + { name = "sphinx-autodoc-typehints" }, + { name = "sphinx-rtd-theme" }, + { name = "sphinxcontrib-bibtex" }, + { name = "torch" }, +] +torch = [ + { name = "dm-tree" }, + { name = "torch" }, +] + +[package.dev-dependencies] +dev = [ + { name = "effectful", extra = ["docs", "jax", "llm", "numpyro", "pyro", "test", "torch"] }, +] + +[package.metadata] +requires-dist = [ + { name = "dm-tree", marker = "extra == 'jax'" }, + { name = "dm-tree", marker = "extra == 'numpyro'" }, + { name = "dm-tree", marker = "extra == 'pyro'" }, + { name = "dm-tree", marker = "extra == 'torch'" }, + { name = "effectful", extras = ["torch", "pyro", "jax", "numpyro", "docs", "prettyprinter"], marker = "extra == 'test'" }, + { name = "effectful", extras = ["torch", "pyro", "jax", "numpyro", "prettyprinter"], marker = "extra == 'docs'" }, + { name = "jax", marker = "extra == 'jax'" }, + { name = "litellm", marker = "extra == 'llm'" }, + { name = "mypy", marker = "extra == 'test'" }, + { name = "myst-parser", marker = "extra == 'docs'" }, + { name = "nbqa", marker = "extra == 'test'" }, + { name = "nbsphinx", marker = "extra == 'docs'" }, + { name = "nbval", marker = "extra == 'test'" }, + { name = "numpyro", marker = "extra == 'numpyro'", specifier = ">=0.19" }, + { name = "pillow", marker = "extra == 'llm'" }, + { name = "prettyprinter", marker = "extra == 'prettyprinter'" }, + { name = "pydantic", marker = "extra == 'llm'" }, + { name = "pypandoc-binary", marker = "extra == 'docs'", specifier = "<1.16" }, + { name = "pyro-ppl", marker = "extra == 'pyro'", specifier = ">=1.9.1" }, + { name = "pytest", marker = "extra == 'test'" }, + { name = "pytest-benchmark", marker = "extra == 'test'" }, + { name = "pytest-cov", marker = "extra == 'test'" }, + { name = "pytest-xdist", marker = "extra == 'test'" }, + { name = "ruff", marker = "extra == 'test'" }, + { name = "sphinx", marker = "extra == 'docs'" }, + { name = "sphinx-autodoc-typehints", marker = "extra == 'docs'" }, + { name = "sphinx-rtd-theme", marker = "extra == 'docs'" }, + { name = "sphinxcontrib-bibtex", marker = "extra == 'docs'" }, + { name = "torch", marker = "extra == 'torch'" }, +] +provides-extras = ["torch", "pyro", "jax", "numpyro", "llm", "prettyprinter", "docs", "test"] + +[package.metadata.requires-dev] +dev = [{ name = "effectful", extras = ["torch", "pyro", "jax", "numpyro", "llm", "docs", "test"] }] + +[[package]] +name = "execnet" +version = "2.1.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/bf/89/780e11f9588d9e7128a3f87788354c7946a9cbb1401ad38a48c4db9a4f07/execnet-2.1.2.tar.gz", hash = "sha256:63d83bfdd9a23e35b9c6a3261412324f964c2ec8dcd8d3c6916ee9373e0befcd", size = 166622, upload-time = "2025-11-12T09:56:37.75Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ab/84/02fc1827e8cdded4aa65baef11296a9bbe595c474f0d6d758af082d849fd/execnet-2.1.2-py3-none-any.whl", hash = "sha256:67fba928dd5a544b783f6056f449e5e3931a5c378b128bc18501f7ea79e296ec", size = 40708, upload-time = "2025-11-12T09:56:36.333Z" }, +] + +[[package]] +name = "executing" +version = "2.2.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/cc/28/c14e053b6762b1044f34a13aab6859bbf40456d37d23aa286ac24cfd9a5d/executing-2.2.1.tar.gz", hash = "sha256:3632cc370565f6648cc328b32435bd120a1e4ebb20c77e3fdde9a13cd1e533c4", size = 1129488, upload-time = "2025-09-01T09:48:10.866Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c1/ea/53f2148663b321f21b5a606bd5f191517cf40b7072c0497d3c92c4a13b1e/executing-2.2.1-py2.py3-none-any.whl", hash = "sha256:760643d3452b4d777d295bb167ccc74c64a81df23fb5e08eff250c425a4b2017", size = 28317, upload-time = "2025-09-01T09:48:08.5Z" }, +] + +[[package]] +name = "fastjsonschema" +version = "2.21.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/20/b5/23b216d9d985a956623b6bd12d4086b60f0059b27799f23016af04a74ea1/fastjsonschema-2.21.2.tar.gz", hash = "sha256:b1eb43748041c880796cd077f1a07c3d94e93ae84bba5ed36800a33554ae05de", size = 374130, upload-time = "2025-08-14T18:49:36.666Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/cb/a8/20d0723294217e47de6d9e2e40fd4a9d2f7c4b6ef974babd482a59743694/fastjsonschema-2.21.2-py3-none-any.whl", hash = "sha256:1c797122d0a86c5cace2e54bf4e819c36223b552017172f32c5c024a6b77e463", size = 24024, upload-time = "2025-08-14T18:49:34.776Z" }, +] + +[[package]] +name = "fastuuid" +version = "0.14.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/c3/7d/d9daedf0f2ebcacd20d599928f8913e9d2aea1d56d2d355a93bfa2b611d7/fastuuid-0.14.0.tar.gz", hash = "sha256:178947fc2f995b38497a74172adee64fdeb8b7ec18f2a5934d037641ba265d26", size = 18232, upload-time = "2025-10-19T22:19:22.402Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/02/a2/e78fcc5df65467f0d207661b7ef86c5b7ac62eea337c0c0fcedbeee6fb13/fastuuid-0.14.0-cp312-cp312-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:77e94728324b63660ebf8adb27055e92d2e4611645bf12ed9d88d30486471d0a", size = 510164, upload-time = "2025-10-19T22:31:45.635Z" }, + { url = "https://files.pythonhosted.org/packages/2b/b3/c846f933f22f581f558ee63f81f29fa924acd971ce903dab1a9b6701816e/fastuuid-0.14.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:caa1f14d2102cb8d353096bc6ef6c13b2c81f347e6ab9d6fbd48b9dea41c153d", size = 261837, upload-time = "2025-10-19T22:38:38.53Z" }, + { url = "https://files.pythonhosted.org/packages/54/ea/682551030f8c4fa9a769d9825570ad28c0c71e30cf34020b85c1f7ee7382/fastuuid-0.14.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:d23ef06f9e67163be38cece704170486715b177f6baae338110983f99a72c070", size = 251370, upload-time = "2025-10-19T22:40:26.07Z" }, + { url = "https://files.pythonhosted.org/packages/14/dd/5927f0a523d8e6a76b70968e6004966ee7df30322f5fc9b6cdfb0276646a/fastuuid-0.14.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0c9ec605ace243b6dbe3bd27ebdd5d33b00d8d1d3f580b39fdd15cd96fd71796", size = 277766, upload-time = "2025-10-19T22:37:23.779Z" }, + { url = "https://files.pythonhosted.org/packages/16/6e/c0fb547eef61293153348f12e0f75a06abb322664b34a1573a7760501336/fastuuid-0.14.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:808527f2407f58a76c916d6aa15d58692a4a019fdf8d4c32ac7ff303b7d7af09", size = 278105, upload-time = "2025-10-19T22:26:56.821Z" }, + { url = "https://files.pythonhosted.org/packages/2d/b1/b9c75e03b768f61cf2e84ee193dc18601aeaf89a4684b20f2f0e9f52b62c/fastuuid-0.14.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2fb3c0d7fef6674bbeacdd6dbd386924a7b60b26de849266d1ff6602937675c8", size = 301564, upload-time = "2025-10-19T22:30:31.604Z" }, + { url = "https://files.pythonhosted.org/packages/fc/fa/f7395fdac07c7a54f18f801744573707321ca0cee082e638e36452355a9d/fastuuid-0.14.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:ab3f5d36e4393e628a4df337c2c039069344db5f4b9d2a3c9cea48284f1dd741", size = 459659, upload-time = "2025-10-19T22:31:32.341Z" }, + { url = "https://files.pythonhosted.org/packages/66/49/c9fd06a4a0b1f0f048aacb6599e7d96e5d6bc6fa680ed0d46bf111929d1b/fastuuid-0.14.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:b9a0ca4f03b7e0b01425281ffd44e99d360e15c895f1907ca105854ed85e2057", size = 478430, upload-time = "2025-10-19T22:26:22.962Z" }, + { url = "https://files.pythonhosted.org/packages/be/9c/909e8c95b494e8e140e8be6165d5fc3f61fdc46198c1554df7b3e1764471/fastuuid-0.14.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:3acdf655684cc09e60fb7e4cf524e8f42ea760031945aa8086c7eae2eeeabeb8", size = 450894, upload-time = "2025-10-19T22:27:01.647Z" }, + { url = "https://files.pythonhosted.org/packages/90/eb/d29d17521976e673c55ef7f210d4cdd72091a9ec6755d0fd4710d9b3c871/fastuuid-0.14.0-cp312-cp312-win32.whl", hash = "sha256:9579618be6280700ae36ac42c3efd157049fe4dd40ca49b021280481c78c3176", size = 154374, upload-time = "2025-10-19T22:29:19.879Z" }, + { url = "https://files.pythonhosted.org/packages/cc/fc/f5c799a6ea6d877faec0472d0b27c079b47c86b1cdc577720a5386483b36/fastuuid-0.14.0-cp312-cp312-win_amd64.whl", hash = "sha256:d9e4332dc4ba054434a9594cbfaf7823b57993d7d8e7267831c3e059857cf397", size = 156550, upload-time = "2025-10-19T22:27:49.658Z" }, + { url = "https://files.pythonhosted.org/packages/a5/83/ae12dd39b9a39b55d7f90abb8971f1a5f3c321fd72d5aa83f90dc67fe9ed/fastuuid-0.14.0-cp313-cp313-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:77a09cb7427e7af74c594e409f7731a0cf887221de2f698e1ca0ebf0f3139021", size = 510720, upload-time = "2025-10-19T22:42:34.633Z" }, + { url = "https://files.pythonhosted.org/packages/53/b0/a4b03ff5d00f563cc7546b933c28cb3f2a07344b2aec5834e874f7d44143/fastuuid-0.14.0-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:9bd57289daf7b153bfa3e8013446aa144ce5e8c825e9e366d455155ede5ea2dc", size = 262024, upload-time = "2025-10-19T22:30:25.482Z" }, + { url = "https://files.pythonhosted.org/packages/9c/6d/64aee0a0f6a58eeabadd582e55d0d7d70258ffdd01d093b30c53d668303b/fastuuid-0.14.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:ac60fc860cdf3c3f327374db87ab8e064c86566ca8c49d2e30df15eda1b0c2d5", size = 251679, upload-time = "2025-10-19T22:36:14.096Z" }, + { url = "https://files.pythonhosted.org/packages/60/f5/a7e9cda8369e4f7919d36552db9b2ae21db7915083bc6336f1b0082c8b2e/fastuuid-0.14.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ab32f74bd56565b186f036e33129da77db8be09178cd2f5206a5d4035fb2a23f", size = 277862, upload-time = "2025-10-19T22:36:23.302Z" }, + { url = "https://files.pythonhosted.org/packages/f0/d3/8ce11827c783affffd5bd4d6378b28eb6cc6d2ddf41474006b8d62e7448e/fastuuid-0.14.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:33e678459cf4addaedd9936bbb038e35b3f6b2061330fd8f2f6a1d80414c0f87", size = 278278, upload-time = "2025-10-19T22:29:43.809Z" }, + { url = "https://files.pythonhosted.org/packages/a2/51/680fb6352d0bbade04036da46264a8001f74b7484e2fd1f4da9e3db1c666/fastuuid-0.14.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1e3cc56742f76cd25ecb98e4b82a25f978ccffba02e4bdce8aba857b6d85d87b", size = 301788, upload-time = "2025-10-19T22:36:06.825Z" }, + { url = "https://files.pythonhosted.org/packages/fa/7c/2014b5785bd8ebdab04ec857635ebd84d5ee4950186a577db9eff0fb8ff6/fastuuid-0.14.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:cb9a030f609194b679e1660f7e32733b7a0f332d519c5d5a6a0a580991290022", size = 459819, upload-time = "2025-10-19T22:35:31.623Z" }, + { url = "https://files.pythonhosted.org/packages/01/d2/524d4ceeba9160e7a9bc2ea3e8f4ccf1ad78f3bde34090ca0c51f09a5e91/fastuuid-0.14.0-cp313-cp313-musllinux_1_1_i686.whl", hash = "sha256:09098762aad4f8da3a888eb9ae01c84430c907a297b97166b8abc07b640f2995", size = 478546, upload-time = "2025-10-19T22:26:03.023Z" }, + { url = "https://files.pythonhosted.org/packages/bc/17/354d04951ce114bf4afc78e27a18cfbd6ee319ab1829c2d5fb5e94063ac6/fastuuid-0.14.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:1383fff584fa249b16329a059c68ad45d030d5a4b70fb7c73a08d98fd53bcdab", size = 450921, upload-time = "2025-10-19T22:31:02.151Z" }, + { url = "https://files.pythonhosted.org/packages/fb/be/d7be8670151d16d88f15bb121c5b66cdb5ea6a0c2a362d0dcf30276ade53/fastuuid-0.14.0-cp313-cp313-win32.whl", hash = "sha256:a0809f8cc5731c066c909047f9a314d5f536c871a7a22e815cc4967c110ac9ad", size = 154559, upload-time = "2025-10-19T22:36:36.011Z" }, + { url = "https://files.pythonhosted.org/packages/22/1d/5573ef3624ceb7abf4a46073d3554e37191c868abc3aecd5289a72f9810a/fastuuid-0.14.0-cp313-cp313-win_amd64.whl", hash = "sha256:0df14e92e7ad3276327631c9e7cec09e32572ce82089c55cb1bb8df71cf394ed", size = 156539, upload-time = "2025-10-19T22:33:35.898Z" }, +] + +[[package]] +name = "filelock" +version = "3.20.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/58/46/0028a82567109b5ef6e4d2a1f04a583fb513e6cf9527fcdd09afd817deeb/filelock-3.20.0.tar.gz", hash = "sha256:711e943b4ec6be42e1d4e6690b48dc175c822967466bb31c0c293f34334c13f4", size = 18922, upload-time = "2025-10-08T18:03:50.056Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/76/91/7216b27286936c16f5b4d0c530087e4a54eead683e6b0b73dd0c64844af6/filelock-3.20.0-py3-none-any.whl", hash = "sha256:339b4732ffda5cd79b13f4e2711a31b0365ce445d95d243bb996273d072546a2", size = 16054, upload-time = "2025-10-08T18:03:48.35Z" }, +] + +[[package]] +name = "frozenlist" +version = "1.8.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/2d/f5/c831fac6cc817d26fd54c7eaccd04ef7e0288806943f7cc5bbf69f3ac1f0/frozenlist-1.8.0.tar.gz", hash = "sha256:3ede829ed8d842f6cd48fc7081d7a41001a56f1f38603f9d49bf3020d59a31ad", size = 45875, upload-time = "2025-10-06T05:38:17.865Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/69/29/948b9aa87e75820a38650af445d2ef2b6b8a6fab1a23b6bb9e4ef0be2d59/frozenlist-1.8.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:78f7b9e5d6f2fdb88cdde9440dc147259b62b9d3b019924def9f6478be254ac1", size = 87782, upload-time = "2025-10-06T05:36:06.649Z" }, + { url = "https://files.pythonhosted.org/packages/64/80/4f6e318ee2a7c0750ed724fa33a4bdf1eacdc5a39a7a24e818a773cd91af/frozenlist-1.8.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:229bf37d2e4acdaf808fd3f06e854a4a7a3661e871b10dc1f8f1896a3b05f18b", size = 50594, upload-time = "2025-10-06T05:36:07.69Z" }, + { url = "https://files.pythonhosted.org/packages/2b/94/5c8a2b50a496b11dd519f4a24cb5496cf125681dd99e94c604ccdea9419a/frozenlist-1.8.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f833670942247a14eafbb675458b4e61c82e002a148f49e68257b79296e865c4", size = 50448, upload-time = "2025-10-06T05:36:08.78Z" }, + { url = "https://files.pythonhosted.org/packages/6a/bd/d91c5e39f490a49df14320f4e8c80161cfcce09f1e2cde1edd16a551abb3/frozenlist-1.8.0-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:494a5952b1c597ba44e0e78113a7266e656b9794eec897b19ead706bd7074383", size = 242411, upload-time = "2025-10-06T05:36:09.801Z" }, + { url = "https://files.pythonhosted.org/packages/8f/83/f61505a05109ef3293dfb1ff594d13d64a2324ac3482be2cedc2be818256/frozenlist-1.8.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:96f423a119f4777a4a056b66ce11527366a8bb92f54e541ade21f2374433f6d4", size = 243014, upload-time = "2025-10-06T05:36:11.394Z" }, + { url = "https://files.pythonhosted.org/packages/d8/cb/cb6c7b0f7d4023ddda30cf56b8b17494eb3a79e3fda666bf735f63118b35/frozenlist-1.8.0-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:3462dd9475af2025c31cc61be6652dfa25cbfb56cbbf52f4ccfe029f38decaf8", size = 234909, upload-time = "2025-10-06T05:36:12.598Z" }, + { url = "https://files.pythonhosted.org/packages/31/c5/cd7a1f3b8b34af009fb17d4123c5a778b44ae2804e3ad6b86204255f9ec5/frozenlist-1.8.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:c4c800524c9cd9bac5166cd6f55285957fcfc907db323e193f2afcd4d9abd69b", size = 250049, upload-time = "2025-10-06T05:36:14.065Z" }, + { url = "https://files.pythonhosted.org/packages/c0/01/2f95d3b416c584a1e7f0e1d6d31998c4a795f7544069ee2e0962a4b60740/frozenlist-1.8.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:d6a5df73acd3399d893dafc71663ad22534b5aa4f94e8a2fabfe856c3c1b6a52", size = 256485, upload-time = "2025-10-06T05:36:15.39Z" }, + { url = "https://files.pythonhosted.org/packages/ce/03/024bf7720b3abaebcff6d0793d73c154237b85bdf67b7ed55e5e9596dc9a/frozenlist-1.8.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:405e8fe955c2280ce66428b3ca55e12b3c4e9c336fb2103a4937e891c69a4a29", size = 237619, upload-time = "2025-10-06T05:36:16.558Z" }, + { url = "https://files.pythonhosted.org/packages/69/fa/f8abdfe7d76b731f5d8bd217827cf6764d4f1d9763407e42717b4bed50a0/frozenlist-1.8.0-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:908bd3f6439f2fef9e85031b59fd4f1297af54415fb60e4254a95f75b3cab3f3", size = 250320, upload-time = "2025-10-06T05:36:17.821Z" }, + { url = "https://files.pythonhosted.org/packages/f5/3c/b051329f718b463b22613e269ad72138cc256c540f78a6de89452803a47d/frozenlist-1.8.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:294e487f9ec720bd8ffcebc99d575f7eff3568a08a253d1ee1a0378754b74143", size = 246820, upload-time = "2025-10-06T05:36:19.046Z" }, + { url = "https://files.pythonhosted.org/packages/0f/ae/58282e8f98e444b3f4dd42448ff36fa38bef29e40d40f330b22e7108f565/frozenlist-1.8.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:74c51543498289c0c43656701be6b077f4b265868fa7f8a8859c197006efb608", size = 250518, upload-time = "2025-10-06T05:36:20.763Z" }, + { url = "https://files.pythonhosted.org/packages/8f/96/007e5944694d66123183845a106547a15944fbbb7154788cbf7272789536/frozenlist-1.8.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:776f352e8329135506a1d6bf16ac3f87bc25b28e765949282dcc627af36123aa", size = 239096, upload-time = "2025-10-06T05:36:22.129Z" }, + { url = "https://files.pythonhosted.org/packages/66/bb/852b9d6db2fa40be96f29c0d1205c306288f0684df8fd26ca1951d461a56/frozenlist-1.8.0-cp312-cp312-win32.whl", hash = "sha256:433403ae80709741ce34038da08511d4a77062aa924baf411ef73d1146e74faf", size = 39985, upload-time = "2025-10-06T05:36:23.661Z" }, + { url = "https://files.pythonhosted.org/packages/b8/af/38e51a553dd66eb064cdf193841f16f077585d4d28394c2fa6235cb41765/frozenlist-1.8.0-cp312-cp312-win_amd64.whl", hash = "sha256:34187385b08f866104f0c0617404c8eb08165ab1272e884abc89c112e9c00746", size = 44591, upload-time = "2025-10-06T05:36:24.958Z" }, + { url = "https://files.pythonhosted.org/packages/a7/06/1dc65480ab147339fecc70797e9c2f69d9cea9cf38934ce08df070fdb9cb/frozenlist-1.8.0-cp312-cp312-win_arm64.whl", hash = "sha256:fe3c58d2f5db5fbd18c2987cba06d51b0529f52bc3a6cdc33d3f4eab725104bd", size = 40102, upload-time = "2025-10-06T05:36:26.333Z" }, + { url = "https://files.pythonhosted.org/packages/2d/40/0832c31a37d60f60ed79e9dfb5a92e1e2af4f40a16a29abcc7992af9edff/frozenlist-1.8.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:8d92f1a84bb12d9e56f818b3a746f3efba93c1b63c8387a73dde655e1e42282a", size = 85717, upload-time = "2025-10-06T05:36:27.341Z" }, + { url = "https://files.pythonhosted.org/packages/30/ba/b0b3de23f40bc55a7057bd38434e25c34fa48e17f20ee273bbde5e0650f3/frozenlist-1.8.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:96153e77a591c8adc2ee805756c61f59fef4cf4073a9275ee86fe8cba41241f7", size = 49651, upload-time = "2025-10-06T05:36:28.855Z" }, + { url = "https://files.pythonhosted.org/packages/0c/ab/6e5080ee374f875296c4243c381bbdef97a9ac39c6e3ce1d5f7d42cb78d6/frozenlist-1.8.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f21f00a91358803399890ab167098c131ec2ddd5f8f5fd5fe9c9f2c6fcd91e40", size = 49417, upload-time = "2025-10-06T05:36:29.877Z" }, + { url = "https://files.pythonhosted.org/packages/d5/4e/e4691508f9477ce67da2015d8c00acd751e6287739123113a9fca6f1604e/frozenlist-1.8.0-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:fb30f9626572a76dfe4293c7194a09fb1fe93ba94c7d4f720dfae3b646b45027", size = 234391, upload-time = "2025-10-06T05:36:31.301Z" }, + { url = "https://files.pythonhosted.org/packages/40/76/c202df58e3acdf12969a7895fd6f3bc016c642e6726aa63bd3025e0fc71c/frozenlist-1.8.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:eaa352d7047a31d87dafcacbabe89df0aa506abb5b1b85a2fb91bc3faa02d822", size = 233048, upload-time = "2025-10-06T05:36:32.531Z" }, + { url = "https://files.pythonhosted.org/packages/f9/c0/8746afb90f17b73ca5979c7a3958116e105ff796e718575175319b5bb4ce/frozenlist-1.8.0-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:03ae967b4e297f58f8c774c7eabcce57fe3c2434817d4385c50661845a058121", size = 226549, upload-time = "2025-10-06T05:36:33.706Z" }, + { url = "https://files.pythonhosted.org/packages/7e/eb/4c7eefc718ff72f9b6c4893291abaae5fbc0c82226a32dcd8ef4f7a5dbef/frozenlist-1.8.0-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:f6292f1de555ffcc675941d65fffffb0a5bcd992905015f85d0592201793e0e5", size = 239833, upload-time = "2025-10-06T05:36:34.947Z" }, + { url = "https://files.pythonhosted.org/packages/c2/4e/e5c02187cf704224f8b21bee886f3d713ca379535f16893233b9d672ea71/frozenlist-1.8.0-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:29548f9b5b5e3460ce7378144c3010363d8035cea44bc0bf02d57f5a685e084e", size = 245363, upload-time = "2025-10-06T05:36:36.534Z" }, + { url = "https://files.pythonhosted.org/packages/1f/96/cb85ec608464472e82ad37a17f844889c36100eed57bea094518bf270692/frozenlist-1.8.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ec3cc8c5d4084591b4237c0a272cc4f50a5b03396a47d9caaf76f5d7b38a4f11", size = 229314, upload-time = "2025-10-06T05:36:38.582Z" }, + { url = "https://files.pythonhosted.org/packages/5d/6f/4ae69c550e4cee66b57887daeebe006fe985917c01d0fff9caab9883f6d0/frozenlist-1.8.0-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:517279f58009d0b1f2e7c1b130b377a349405da3f7621ed6bfae50b10adf20c1", size = 243365, upload-time = "2025-10-06T05:36:40.152Z" }, + { url = "https://files.pythonhosted.org/packages/7a/58/afd56de246cf11780a40a2c28dc7cbabbf06337cc8ddb1c780a2d97e88d8/frozenlist-1.8.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:db1e72ede2d0d7ccb213f218df6a078a9c09a7de257c2fe8fcef16d5925230b1", size = 237763, upload-time = "2025-10-06T05:36:41.355Z" }, + { url = "https://files.pythonhosted.org/packages/cb/36/cdfaf6ed42e2644740d4a10452d8e97fa1c062e2a8006e4b09f1b5fd7d63/frozenlist-1.8.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:b4dec9482a65c54a5044486847b8a66bf10c9cb4926d42927ec4e8fd5db7fed8", size = 240110, upload-time = "2025-10-06T05:36:42.716Z" }, + { url = "https://files.pythonhosted.org/packages/03/a8/9ea226fbefad669f11b52e864c55f0bd57d3c8d7eb07e9f2e9a0b39502e1/frozenlist-1.8.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:21900c48ae04d13d416f0e1e0c4d81f7931f73a9dfa0b7a8746fb2fe7dd970ed", size = 233717, upload-time = "2025-10-06T05:36:44.251Z" }, + { url = "https://files.pythonhosted.org/packages/1e/0b/1b5531611e83ba7d13ccc9988967ea1b51186af64c42b7a7af465dcc9568/frozenlist-1.8.0-cp313-cp313-win32.whl", hash = "sha256:8b7b94a067d1c504ee0b16def57ad5738701e4ba10cec90529f13fa03c833496", size = 39628, upload-time = "2025-10-06T05:36:45.423Z" }, + { url = "https://files.pythonhosted.org/packages/d8/cf/174c91dbc9cc49bc7b7aab74d8b734e974d1faa8f191c74af9b7e80848e6/frozenlist-1.8.0-cp313-cp313-win_amd64.whl", hash = "sha256:878be833caa6a3821caf85eb39c5ba92d28e85df26d57afb06b35b2efd937231", size = 43882, upload-time = "2025-10-06T05:36:46.796Z" }, + { url = "https://files.pythonhosted.org/packages/c1/17/502cd212cbfa96eb1388614fe39a3fc9ab87dbbe042b66f97acb57474834/frozenlist-1.8.0-cp313-cp313-win_arm64.whl", hash = "sha256:44389d135b3ff43ba8cc89ff7f51f5a0bb6b63d829c8300f79a2fe4fe61bcc62", size = 39676, upload-time = "2025-10-06T05:36:47.8Z" }, + { url = "https://files.pythonhosted.org/packages/d2/5c/3bbfaa920dfab09e76946a5d2833a7cbdf7b9b4a91c714666ac4855b88b4/frozenlist-1.8.0-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:e25ac20a2ef37e91c1b39938b591457666a0fa835c7783c3a8f33ea42870db94", size = 89235, upload-time = "2025-10-06T05:36:48.78Z" }, + { url = "https://files.pythonhosted.org/packages/d2/d6/f03961ef72166cec1687e84e8925838442b615bd0b8854b54923ce5b7b8a/frozenlist-1.8.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:07cdca25a91a4386d2e76ad992916a85038a9b97561bf7a3fd12d5d9ce31870c", size = 50742, upload-time = "2025-10-06T05:36:49.837Z" }, + { url = "https://files.pythonhosted.org/packages/1e/bb/a6d12b7ba4c3337667d0e421f7181c82dda448ce4e7ad7ecd249a16fa806/frozenlist-1.8.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:4e0c11f2cc6717e0a741f84a527c52616140741cd812a50422f83dc31749fb52", size = 51725, upload-time = "2025-10-06T05:36:50.851Z" }, + { url = "https://files.pythonhosted.org/packages/bc/71/d1fed0ffe2c2ccd70b43714c6cab0f4188f09f8a67a7914a6b46ee30f274/frozenlist-1.8.0-cp313-cp313t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:b3210649ee28062ea6099cfda39e147fa1bc039583c8ee4481cb7811e2448c51", size = 284533, upload-time = "2025-10-06T05:36:51.898Z" }, + { url = "https://files.pythonhosted.org/packages/c9/1f/fb1685a7b009d89f9bf78a42d94461bc06581f6e718c39344754a5d9bada/frozenlist-1.8.0-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:581ef5194c48035a7de2aefc72ac6539823bb71508189e5de01d60c9dcd5fa65", size = 292506, upload-time = "2025-10-06T05:36:53.101Z" }, + { url = "https://files.pythonhosted.org/packages/e6/3b/b991fe1612703f7e0d05c0cf734c1b77aaf7c7d321df4572e8d36e7048c8/frozenlist-1.8.0-cp313-cp313t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:3ef2d026f16a2b1866e1d86fc4e1291e1ed8a387b2c333809419a2f8b3a77b82", size = 274161, upload-time = "2025-10-06T05:36:54.309Z" }, + { url = "https://files.pythonhosted.org/packages/ca/ec/c5c618767bcdf66e88945ec0157d7f6c4a1322f1473392319b7a2501ded7/frozenlist-1.8.0-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:5500ef82073f599ac84d888e3a8c1f77ac831183244bfd7f11eaa0289fb30714", size = 294676, upload-time = "2025-10-06T05:36:55.566Z" }, + { url = "https://files.pythonhosted.org/packages/7c/ce/3934758637d8f8a88d11f0585d6495ef54b2044ed6ec84492a91fa3b27aa/frozenlist-1.8.0-cp313-cp313t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:50066c3997d0091c411a66e710f4e11752251e6d2d73d70d8d5d4c76442a199d", size = 300638, upload-time = "2025-10-06T05:36:56.758Z" }, + { url = "https://files.pythonhosted.org/packages/fc/4f/a7e4d0d467298f42de4b41cbc7ddaf19d3cfeabaf9ff97c20c6c7ee409f9/frozenlist-1.8.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:5c1c8e78426e59b3f8005e9b19f6ff46e5845895adbde20ece9218319eca6506", size = 283067, upload-time = "2025-10-06T05:36:57.965Z" }, + { url = "https://files.pythonhosted.org/packages/dc/48/c7b163063d55a83772b268e6d1affb960771b0e203b632cfe09522d67ea5/frozenlist-1.8.0-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:eefdba20de0d938cec6a89bd4d70f346a03108a19b9df4248d3cf0d88f1b0f51", size = 292101, upload-time = "2025-10-06T05:36:59.237Z" }, + { url = "https://files.pythonhosted.org/packages/9f/d0/2366d3c4ecdc2fd391e0afa6e11500bfba0ea772764d631bbf82f0136c9d/frozenlist-1.8.0-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:cf253e0e1c3ceb4aaff6df637ce033ff6535fb8c70a764a8f46aafd3d6ab798e", size = 289901, upload-time = "2025-10-06T05:37:00.811Z" }, + { url = "https://files.pythonhosted.org/packages/b8/94/daff920e82c1b70e3618a2ac39fbc01ae3e2ff6124e80739ce5d71c9b920/frozenlist-1.8.0-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:032efa2674356903cd0261c4317a561a6850f3ac864a63fc1583147fb05a79b0", size = 289395, upload-time = "2025-10-06T05:37:02.115Z" }, + { url = "https://files.pythonhosted.org/packages/e3/20/bba307ab4235a09fdcd3cc5508dbabd17c4634a1af4b96e0f69bfe551ebd/frozenlist-1.8.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:6da155091429aeba16851ecb10a9104a108bcd32f6c1642867eadaee401c1c41", size = 283659, upload-time = "2025-10-06T05:37:03.711Z" }, + { url = "https://files.pythonhosted.org/packages/fd/00/04ca1c3a7a124b6de4f8a9a17cc2fcad138b4608e7a3fc5877804b8715d7/frozenlist-1.8.0-cp313-cp313t-win32.whl", hash = "sha256:0f96534f8bfebc1a394209427d0f8a63d343c9779cda6fc25e8e121b5fd8555b", size = 43492, upload-time = "2025-10-06T05:37:04.915Z" }, + { url = "https://files.pythonhosted.org/packages/59/5e/c69f733a86a94ab10f68e496dc6b7e8bc078ebb415281d5698313e3af3a1/frozenlist-1.8.0-cp313-cp313t-win_amd64.whl", hash = "sha256:5d63a068f978fc69421fb0e6eb91a9603187527c86b7cd3f534a5b77a592b888", size = 48034, upload-time = "2025-10-06T05:37:06.343Z" }, + { url = "https://files.pythonhosted.org/packages/16/6c/be9d79775d8abe79b05fa6d23da99ad6e7763a1d080fbae7290b286093fd/frozenlist-1.8.0-cp313-cp313t-win_arm64.whl", hash = "sha256:bf0a7e10b077bf5fb9380ad3ae8ce20ef919a6ad93b4552896419ac7e1d8e042", size = 41749, upload-time = "2025-10-06T05:37:07.431Z" }, + { url = "https://files.pythonhosted.org/packages/9a/9a/e35b4a917281c0b8419d4207f4334c8e8c5dbf4f3f5f9ada73958d937dcc/frozenlist-1.8.0-py3-none-any.whl", hash = "sha256:0c18a16eab41e82c295618a77502e17b195883241c563b00f0aa5106fc4eaa0d", size = 13409, upload-time = "2025-10-06T05:38:16.721Z" }, +] + +[[package]] +name = "fsspec" +version = "2025.10.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/24/7f/2747c0d332b9acfa75dc84447a066fdf812b5a6b8d30472b74d309bfe8cb/fsspec-2025.10.0.tar.gz", hash = "sha256:b6789427626f068f9a83ca4e8a3cc050850b6c0f71f99ddb4f542b8266a26a59", size = 309285, upload-time = "2025-10-30T14:58:44.036Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/eb/02/a6b21098b1d5d6249b7c5ab69dde30108a71e4e819d4a9778f1de1d5b70d/fsspec-2025.10.0-py3-none-any.whl", hash = "sha256:7c7712353ae7d875407f97715f0e1ffcc21e33d5b24556cb1e090ae9409ec61d", size = 200966, upload-time = "2025-10-30T14:58:42.53Z" }, +] + +[[package]] +name = "grpcio" +version = "1.67.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/20/53/d9282a66a5db45981499190b77790570617a604a38f3d103d0400974aeb5/grpcio-1.67.1.tar.gz", hash = "sha256:3dc2ed4cabea4dc14d5e708c2b426205956077cc5de419b4d4079315017e9732", size = 12580022, upload-time = "2024-10-29T06:30:07.787Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/6e/25/6f95bd18d5f506364379eabc0d5874873cc7dbdaf0757df8d1e82bc07a88/grpcio-1.67.1-cp312-cp312-linux_armv7l.whl", hash = "sha256:267d1745894200e4c604958da5f856da6293f063327cb049a51fe67348e4f953", size = 5089809, upload-time = "2024-10-29T06:24:31.24Z" }, + { url = "https://files.pythonhosted.org/packages/10/3f/d79e32e5d0354be33a12db2267c66d3cfeff700dd5ccdd09fd44a3ff4fb6/grpcio-1.67.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:85f69fdc1d28ce7cff8de3f9c67db2b0ca9ba4449644488c1e0303c146135ddb", size = 10981985, upload-time = "2024-10-29T06:24:34.942Z" }, + { url = "https://files.pythonhosted.org/packages/21/f2/36fbc14b3542e3a1c20fb98bd60c4732c55a44e374a4eb68f91f28f14aab/grpcio-1.67.1-cp312-cp312-manylinux_2_17_aarch64.whl", hash = "sha256:f26b0b547eb8d00e195274cdfc63ce64c8fc2d3e2d00b12bf468ece41a0423a0", size = 5588770, upload-time = "2024-10-29T06:24:38.145Z" }, + { url = "https://files.pythonhosted.org/packages/0d/af/bbc1305df60c4e65de8c12820a942b5e37f9cf684ef5e49a63fbb1476a73/grpcio-1.67.1-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4422581cdc628f77302270ff839a44f4c24fdc57887dc2a45b7e53d8fc2376af", size = 6214476, upload-time = "2024-10-29T06:24:41.006Z" }, + { url = "https://files.pythonhosted.org/packages/92/cf/1d4c3e93efa93223e06a5c83ac27e32935f998bc368e276ef858b8883154/grpcio-1.67.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1d7616d2ded471231c701489190379e0c311ee0a6c756f3c03e6a62b95a7146e", size = 5850129, upload-time = "2024-10-29T06:24:43.553Z" }, + { url = "https://files.pythonhosted.org/packages/ae/ca/26195b66cb253ac4d5ef59846e354d335c9581dba891624011da0e95d67b/grpcio-1.67.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8a00efecde9d6fcc3ab00c13f816313c040a28450e5e25739c24f432fc6d3c75", size = 6568489, upload-time = "2024-10-29T06:24:46.453Z" }, + { url = "https://files.pythonhosted.org/packages/d1/94/16550ad6b3f13b96f0856ee5dfc2554efac28539ee84a51d7b14526da985/grpcio-1.67.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:699e964923b70f3101393710793289e42845791ea07565654ada0969522d0a38", size = 6149369, upload-time = "2024-10-29T06:24:49.112Z" }, + { url = "https://files.pythonhosted.org/packages/33/0d/4c3b2587e8ad7f121b597329e6c2620374fccbc2e4e1aa3c73ccc670fde4/grpcio-1.67.1-cp312-cp312-win32.whl", hash = "sha256:4e7b904484a634a0fff132958dabdb10d63e0927398273917da3ee103e8d1f78", size = 3599176, upload-time = "2024-10-29T06:24:51.443Z" }, + { url = "https://files.pythonhosted.org/packages/7d/36/0c03e2d80db69e2472cf81c6123aa7d14741de7cf790117291a703ae6ae1/grpcio-1.67.1-cp312-cp312-win_amd64.whl", hash = "sha256:5721e66a594a6c4204458004852719b38f3d5522082be9061d6510b455c90afc", size = 4346574, upload-time = "2024-10-29T06:24:54.587Z" }, + { url = "https://files.pythonhosted.org/packages/12/d2/2f032b7a153c7723ea3dea08bffa4bcaca9e0e5bdf643ce565b76da87461/grpcio-1.67.1-cp313-cp313-linux_armv7l.whl", hash = "sha256:aa0162e56fd10a5547fac8774c4899fc3e18c1aa4a4759d0ce2cd00d3696ea6b", size = 5091487, upload-time = "2024-10-29T06:24:57.416Z" }, + { url = "https://files.pythonhosted.org/packages/d0/ae/ea2ff6bd2475a082eb97db1104a903cf5fc57c88c87c10b3c3f41a184fc0/grpcio-1.67.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:beee96c8c0b1a75d556fe57b92b58b4347c77a65781ee2ac749d550f2a365dc1", size = 10943530, upload-time = "2024-10-29T06:25:01.062Z" }, + { url = "https://files.pythonhosted.org/packages/07/62/646be83d1a78edf8d69b56647327c9afc223e3140a744c59b25fbb279c3b/grpcio-1.67.1-cp313-cp313-manylinux_2_17_aarch64.whl", hash = "sha256:a93deda571a1bf94ec1f6fcda2872dad3ae538700d94dc283c672a3b508ba3af", size = 5589079, upload-time = "2024-10-29T06:25:04.254Z" }, + { url = "https://files.pythonhosted.org/packages/d0/25/71513d0a1b2072ce80d7f5909a93596b7ed10348b2ea4fdcbad23f6017bf/grpcio-1.67.1-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0e6f255980afef598a9e64a24efce87b625e3e3c80a45162d111a461a9f92955", size = 6213542, upload-time = "2024-10-29T06:25:06.824Z" }, + { url = "https://files.pythonhosted.org/packages/76/9a/d21236297111052dcb5dc85cd77dc7bf25ba67a0f55ae028b2af19a704bc/grpcio-1.67.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9e838cad2176ebd5d4a8bb03955138d6589ce9e2ce5d51c3ada34396dbd2dba8", size = 5850211, upload-time = "2024-10-29T06:25:10.149Z" }, + { url = "https://files.pythonhosted.org/packages/2d/fe/70b1da9037f5055be14f359026c238821b9bcf6ca38a8d760f59a589aacd/grpcio-1.67.1-cp313-cp313-musllinux_1_1_i686.whl", hash = "sha256:a6703916c43b1d468d0756c8077b12017a9fcb6a1ef13faf49e67d20d7ebda62", size = 6572129, upload-time = "2024-10-29T06:25:12.853Z" }, + { url = "https://files.pythonhosted.org/packages/74/0d/7df509a2cd2a54814598caf2fb759f3e0b93764431ff410f2175a6efb9e4/grpcio-1.67.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:917e8d8994eed1d86b907ba2a61b9f0aef27a2155bca6cbb322430fc7135b7bb", size = 6149819, upload-time = "2024-10-29T06:25:15.803Z" }, + { url = "https://files.pythonhosted.org/packages/0a/08/bc3b0155600898fd10f16b79054e1cca6cb644fa3c250c0fe59385df5e6f/grpcio-1.67.1-cp313-cp313-win32.whl", hash = "sha256:e279330bef1744040db8fc432becc8a727b84f456ab62b744d3fdb83f327e121", size = 3596561, upload-time = "2024-10-29T06:25:19.348Z" }, + { url = "https://files.pythonhosted.org/packages/5a/96/44759eca966720d0f3e1b105c43f8ad4590c97bf8eb3cd489656e9590baa/grpcio-1.67.1-cp313-cp313-win_amd64.whl", hash = "sha256:fa0c739ad8b1996bd24823950e3cb5152ae91fca1c09cc791190bf1627ffefba", size = 4346042, upload-time = "2024-10-29T06:25:21.939Z" }, +] + +[[package]] +name = "h11" +version = "0.16.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/01/ee/02a2c011bdab74c6fb3c75474d40b3052059d95df7e73351460c8588d963/h11-0.16.0.tar.gz", hash = "sha256:4e35b956cf45792e4caa5885e69fba00bdbc6ffafbfa020300e549b208ee5ff1", size = 101250, upload-time = "2025-04-24T03:35:25.427Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/04/4b/29cac41a4d98d144bf5f6d33995617b185d14b22401f75ca86f384e87ff1/h11-0.16.0-py3-none-any.whl", hash = "sha256:63cf8bbe7522de3bf65932fda1d9c2772064ffb3dae62d55932da54b31cb6c86", size = 37515, upload-time = "2025-04-24T03:35:24.344Z" }, +] + +[[package]] +name = "hf-xet" +version = "1.2.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/5e/6e/0f11bacf08a67f7fb5ee09740f2ca54163863b07b70d579356e9222ce5d8/hf_xet-1.2.0.tar.gz", hash = "sha256:a8c27070ca547293b6890c4bf389f713f80e8c478631432962bb7f4bc0bd7d7f", size = 506020, upload-time = "2025-10-24T19:04:32.129Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/9e/a5/85ef910a0aa034a2abcfadc360ab5ac6f6bc4e9112349bd40ca97551cff0/hf_xet-1.2.0-cp313-cp313t-macosx_10_12_x86_64.whl", hash = "sha256:ceeefcd1b7aed4956ae8499e2199607765fbd1c60510752003b6cc0b8413b649", size = 2861870, upload-time = "2025-10-24T19:04:11.422Z" }, + { url = "https://files.pythonhosted.org/packages/ea/40/e2e0a7eb9a51fe8828ba2d47fe22a7e74914ea8a0db68a18c3aa7449c767/hf_xet-1.2.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:b70218dd548e9840224df5638fdc94bd033552963cfa97f9170829381179c813", size = 2717584, upload-time = "2025-10-24T19:04:09.586Z" }, + { url = "https://files.pythonhosted.org/packages/a5/7d/daf7f8bc4594fdd59a8a596f9e3886133fdc68e675292218a5e4c1b7e834/hf_xet-1.2.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7d40b18769bb9a8bc82a9ede575ce1a44c75eb80e7375a01d76259089529b5dc", size = 3315004, upload-time = "2025-10-24T19:04:00.314Z" }, + { url = "https://files.pythonhosted.org/packages/b1/ba/45ea2f605fbf6d81c8b21e4d970b168b18a53515923010c312c06cd83164/hf_xet-1.2.0-cp313-cp313t-manylinux_2_28_aarch64.whl", hash = "sha256:cd3a6027d59cfb60177c12d6424e31f4b5ff13d8e3a1247b3a584bf8977e6df5", size = 3222636, upload-time = "2025-10-24T19:03:58.111Z" }, + { url = "https://files.pythonhosted.org/packages/4a/1d/04513e3cab8f29ab8c109d309ddd21a2705afab9d52f2ba1151e0c14f086/hf_xet-1.2.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:6de1fc44f58f6dd937956c8d304d8c2dea264c80680bcfa61ca4a15e7b76780f", size = 3408448, upload-time = "2025-10-24T19:04:20.951Z" }, + { url = "https://files.pythonhosted.org/packages/f0/7c/60a2756d7feec7387db3a1176c632357632fbe7849fce576c5559d4520c7/hf_xet-1.2.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:f182f264ed2acd566c514e45da9f2119110e48a87a327ca271027904c70c5832", size = 3503401, upload-time = "2025-10-24T19:04:22.549Z" }, + { url = "https://files.pythonhosted.org/packages/4e/64/48fffbd67fb418ab07451e4ce641a70de1c40c10a13e25325e24858ebe5a/hf_xet-1.2.0-cp313-cp313t-win_amd64.whl", hash = "sha256:293a7a3787e5c95d7be1857358a9130694a9c6021de3f27fa233f37267174382", size = 2900866, upload-time = "2025-10-24T19:04:33.461Z" }, + { url = "https://files.pythonhosted.org/packages/96/2d/22338486473df5923a9ab7107d375dbef9173c338ebef5098ef593d2b560/hf_xet-1.2.0-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:46740d4ac024a7ca9b22bebf77460ff43332868b661186a8e46c227fdae01848", size = 2866099, upload-time = "2025-10-24T19:04:15.366Z" }, + { url = "https://files.pythonhosted.org/packages/7f/8c/c5becfa53234299bc2210ba314eaaae36c2875e0045809b82e40a9544f0c/hf_xet-1.2.0-cp37-abi3-macosx_11_0_arm64.whl", hash = "sha256:27df617a076420d8845bea087f59303da8be17ed7ec0cd7ee3b9b9f579dff0e4", size = 2722178, upload-time = "2025-10-24T19:04:13.695Z" }, + { url = "https://files.pythonhosted.org/packages/9a/92/cf3ab0b652b082e66876d08da57fcc6fa2f0e6c70dfbbafbd470bb73eb47/hf_xet-1.2.0-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3651fd5bfe0281951b988c0facbe726aa5e347b103a675f49a3fa8144c7968fd", size = 3320214, upload-time = "2025-10-24T19:04:03.596Z" }, + { url = "https://files.pythonhosted.org/packages/46/92/3f7ec4a1b6a65bf45b059b6d4a5d38988f63e193056de2f420137e3c3244/hf_xet-1.2.0-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:d06fa97c8562fb3ee7a378dd9b51e343bc5bc8190254202c9771029152f5e08c", size = 3229054, upload-time = "2025-10-24T19:04:01.949Z" }, + { url = "https://files.pythonhosted.org/packages/0b/dd/7ac658d54b9fb7999a0ccb07ad863b413cbaf5cf172f48ebcd9497ec7263/hf_xet-1.2.0-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:4c1428c9ae73ec0939410ec73023c4f842927f39db09b063b9482dac5a3bb737", size = 3413812, upload-time = "2025-10-24T19:04:24.585Z" }, + { url = "https://files.pythonhosted.org/packages/92/68/89ac4e5b12a9ff6286a12174c8538a5930e2ed662091dd2572bbe0a18c8a/hf_xet-1.2.0-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:a55558084c16b09b5ed32ab9ed38421e2d87cf3f1f89815764d1177081b99865", size = 3508920, upload-time = "2025-10-24T19:04:26.927Z" }, + { url = "https://files.pythonhosted.org/packages/cb/44/870d44b30e1dcfb6a65932e3e1506c103a8a5aea9103c337e7a53180322c/hf_xet-1.2.0-cp37-abi3-win_amd64.whl", hash = "sha256:e6584a52253f72c9f52f9e549d5895ca7a471608495c4ecaa6cc73dba2b24d69", size = 2905735, upload-time = "2025-10-24T19:04:35.928Z" }, +] + +[[package]] +name = "httpcore" +version = "1.0.9" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "certifi" }, + { name = "h11" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/06/94/82699a10bca87a5556c9c59b5963f2d039dbd239f25bc2a63907a05a14cb/httpcore-1.0.9.tar.gz", hash = "sha256:6e34463af53fd2ab5d807f399a9b45ea31c3dfa2276f15a2c3f00afff6e176e8", size = 85484, upload-time = "2025-04-24T22:06:22.219Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7e/f5/f66802a942d491edb555dd61e3a9961140fd64c90bce1eafd741609d334d/httpcore-1.0.9-py3-none-any.whl", hash = "sha256:2d400746a40668fc9dec9810239072b40b4484b640a8c38fd654a024c7a1bf55", size = 78784, upload-time = "2025-04-24T22:06:20.566Z" }, +] + +[[package]] +name = "httpx" +version = "0.28.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "anyio" }, + { name = "certifi" }, + { name = "httpcore" }, + { name = "idna" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/b1/df/48c586a5fe32a0f01324ee087459e112ebb7224f646c0b5023f5e79e9956/httpx-0.28.1.tar.gz", hash = "sha256:75e98c5f16b0f35b567856f597f06ff2270a374470a5c2392242528e3e3e42fc", size = 141406, upload-time = "2024-12-06T15:37:23.222Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/2a/39/e50c7c3a983047577ee07d2a9e53faf5a69493943ec3f6a384bdc792deb2/httpx-0.28.1-py3-none-any.whl", hash = "sha256:d909fcccc110f8c7faf814ca82a9a4d816bc5a6dbfea25d6591d6985b8ba59ad", size = 73517, upload-time = "2024-12-06T15:37:21.509Z" }, +] + +[[package]] +name = "huggingface-hub" +version = "1.1.6" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "filelock" }, + { name = "fsspec" }, + { name = "hf-xet", marker = "platform_machine == 'AMD64' or platform_machine == 'aarch64' or platform_machine == 'amd64' or platform_machine == 'arm64' or platform_machine == 'x86_64'" }, + { name = "httpx" }, + { name = "packaging" }, + { name = "pyyaml" }, + { name = "shellingham" }, + { name = "tqdm" }, + { name = "typer-slim" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/4c/08/dc669fa8c7267752ce2d536683436f0c46661aca45e9450c635a365ca2df/huggingface_hub-1.1.6.tar.gz", hash = "sha256:e1beacb611d74a8189b4c5298e8675fb518256af73b38143171f6efa7d822cf6", size = 607477, upload-time = "2025-11-28T10:23:35.223Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c2/3c/168062db8c0068315ed3f137db450869eb14d98f00144234c118f294b461/huggingface_hub-1.1.6-py3-none-any.whl", hash = "sha256:09726c4fc4c0dc5d83568234daff1ccb815c39b310784359c9d8b5906f679de2", size = 516110, upload-time = "2025-11-28T10:23:33.63Z" }, +] + +[[package]] +name = "idna" +version = "3.11" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/6f/6d/0703ccc57f3a7233505399edb88de3cbd678da106337b9fcde432b65ed60/idna-3.11.tar.gz", hash = "sha256:795dafcc9c04ed0c1fb032c2aa73654d8e8c5023a7df64a53f39190ada629902", size = 194582, upload-time = "2025-10-12T14:55:20.501Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/0e/61/66938bbb5fc52dbdf84594873d5b51fb1f7c7794e9c0f5bd885f30bc507b/idna-3.11-py3-none-any.whl", hash = "sha256:771a87f49d9defaf64091e6e6fe9c18d4833f140bd19464795bc32d966ca37ea", size = 71008, upload-time = "2025-10-12T14:55:18.883Z" }, +] + +[[package]] +name = "imagesize" +version = "1.4.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a7/84/62473fb57d61e31fef6e36d64a179c8781605429fd927b5dd608c997be31/imagesize-1.4.1.tar.gz", hash = "sha256:69150444affb9cb0d5cc5a92b3676f0b2fb7cd9ae39e947a5e11a36b4497cd4a", size = 1280026, upload-time = "2022-07-01T12:21:05.687Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ff/62/85c4c919272577931d407be5ba5d71c20f0b616d31a0befe0ae45bb79abd/imagesize-1.4.1-py2.py3-none-any.whl", hash = "sha256:0d8d18d08f840c19d0ee7ca1fd82490fdc3729b7ac93f49870406ddde8ef8d8b", size = 8769, upload-time = "2022-07-01T12:21:02.467Z" }, +] + +[[package]] +name = "importlib-metadata" +version = "8.7.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "zipp" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/76/66/650a33bd90f786193e4de4b3ad86ea60b53c89b669a5c7be931fac31cdb0/importlib_metadata-8.7.0.tar.gz", hash = "sha256:d13b81ad223b890aa16c5471f2ac3056cf76c5f10f82d6f9292f0b415f389000", size = 56641, upload-time = "2025-04-27T15:29:01.736Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/20/b0/36bd937216ec521246249be3bf9855081de4c5e06a0c9b4219dbeda50373/importlib_metadata-8.7.0-py3-none-any.whl", hash = "sha256:e5dd1551894c77868a30651cef00984d50e1002d06942a7101d34870c5f02afd", size = 27656, upload-time = "2025-04-27T15:29:00.214Z" }, +] + +[[package]] +name = "iniconfig" +version = "2.3.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/72/34/14ca021ce8e5dfedc35312d08ba8bf51fdd999c576889fc2c24cb97f4f10/iniconfig-2.3.0.tar.gz", hash = "sha256:c76315c77db068650d49c5b56314774a7804df16fee4402c1f19d6d15d8c4730", size = 20503, upload-time = "2025-10-18T21:55:43.219Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/cb/b1/3846dd7f199d53cb17f49cba7e651e9ce294d8497c8c150530ed11865bb8/iniconfig-2.3.0-py3-none-any.whl", hash = "sha256:f631c04d2c48c52b84d0d0549c99ff3859c98df65b3101406327ecc7d53fbf12", size = 7484, upload-time = "2025-10-18T21:55:41.639Z" }, +] + +[[package]] +name = "ipykernel" +version = "7.1.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "appnope", marker = "sys_platform == 'darwin'" }, + { name = "comm" }, + { name = "debugpy" }, + { name = "ipython" }, + { name = "jupyter-client" }, + { name = "jupyter-core" }, + { name = "matplotlib-inline" }, + { name = "nest-asyncio" }, + { name = "packaging" }, + { name = "psutil" }, + { name = "pyzmq" }, + { name = "tornado" }, + { name = "traitlets" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/b9/a4/4948be6eb88628505b83a1f2f40d90254cab66abf2043b3c40fa07dfce0f/ipykernel-7.1.0.tar.gz", hash = "sha256:58a3fc88533d5930c3546dc7eac66c6d288acde4f801e2001e65edc5dc9cf0db", size = 174579, upload-time = "2025-10-27T09:46:39.471Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a3/17/20c2552266728ceba271967b87919664ecc0e33efca29c3efc6baf88c5f9/ipykernel-7.1.0-py3-none-any.whl", hash = "sha256:763b5ec6c5b7776f6a8d7ce09b267693b4e5ce75cb50ae696aaefb3c85e1ea4c", size = 117968, upload-time = "2025-10-27T09:46:37.805Z" }, +] + +[[package]] +name = "ipython" +version = "9.7.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "colorama", marker = "sys_platform == 'win32'" }, + { name = "decorator" }, + { name = "ipython-pygments-lexers" }, + { name = "jedi" }, + { name = "matplotlib-inline" }, + { name = "pexpect", marker = "sys_platform != 'emscripten' and sys_platform != 'win32'" }, + { name = "prompt-toolkit" }, + { name = "pygments" }, + { name = "stack-data" }, + { name = "traitlets" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/29/e6/48c74d54039241a456add616464ea28c6ebf782e4110d419411b83dae06f/ipython-9.7.0.tar.gz", hash = "sha256:5f6de88c905a566c6a9d6c400a8fed54a638e1f7543d17aae2551133216b1e4e", size = 4422115, upload-time = "2025-11-05T12:18:54.646Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/05/aa/62893d6a591d337aa59dcc4c6f6c842f1fe20cd72c8c5c1f980255243252/ipython-9.7.0-py3-none-any.whl", hash = "sha256:bce8ac85eb9521adc94e1845b4c03d88365fd6ac2f4908ec4ed1eb1b0a065f9f", size = 618911, upload-time = "2025-11-05T12:18:52.484Z" }, +] + +[[package]] +name = "ipython-pygments-lexers" +version = "1.1.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pygments" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/ef/4c/5dd1d8af08107f88c7f741ead7a40854b8ac24ddf9ae850afbcf698aa552/ipython_pygments_lexers-1.1.1.tar.gz", hash = "sha256:09c0138009e56b6854f9535736f4171d855c8c08a563a0dcd8022f78355c7e81", size = 8393, upload-time = "2025-01-17T11:24:34.505Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d9/33/1f075bf72b0b747cb3288d011319aaf64083cf2efef8354174e3ed4540e2/ipython_pygments_lexers-1.1.1-py3-none-any.whl", hash = "sha256:a9462224a505ade19a605f71f8fa63c2048833ce50abc86768a0d81d876dc81c", size = 8074, upload-time = "2025-01-17T11:24:33.271Z" }, +] + +[[package]] +name = "jax" +version = "0.8.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "jaxlib" }, + { name = "ml-dtypes" }, + { name = "numpy" }, + { name = "opt-einsum" }, + { name = "scipy" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/32/82/84fd2c662e4d410a34b0402de9b56bb69d7f72d1b875c3ae0edc07df18cc/jax-0.8.1.tar.gz", hash = "sha256:e53f67b15315f5e154851a7fd77a192b59c6c75b3f7ac56e214296765391cca7", size = 2509320, upload-time = "2025-11-18T19:50:02.609Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f9/e7/19b8cfc8963b2e10a01a4db7bb27ec5fa39ecd024bc62f8e2d1de5625a9d/jax-0.8.1-py3-none-any.whl", hash = "sha256:4cbdc5548f3095cdd69d38e4337950b2fc1f250a740a0234d190e4a319077564", size = 2922137, upload-time = "2025-11-18T19:47:43.693Z" }, +] + +[[package]] +name = "jaxlib" +version = "0.8.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "ml-dtypes" }, + { name = "numpy" }, + { name = "scipy" }, +] +wheels = [ + { url = "https://files.pythonhosted.org/packages/d9/9d/59b36e2f348e599d5812743f263ca54aa03be1a4c9dfc11504d19864b72d/jaxlib-0.8.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:88bde0f535eeea6689e0cd57d40b7660d5206ac95c7d42e09562a109b963a49f", size = 55728156, upload-time = "2025-11-18T19:48:56.254Z" }, + { url = "https://files.pythonhosted.org/packages/7e/73/2aa891de9f5f4c60ba3c63bda97ec4ace50ffb900ff3bf750ce42c514a3b/jaxlib-0.8.1-cp312-cp312-manylinux_2_27_aarch64.whl", hash = "sha256:bed1e94ae8c7c16bca4476d8d7f582f0d1a102a4e69c3a9bd2069a0dc42274a9", size = 74209108, upload-time = "2025-11-18T19:48:59.572Z" }, + { url = "https://files.pythonhosted.org/packages/eb/4b/3c7e373d81219ee7493c1581c85a926c413ddeb3794cff87a37023a337e4/jaxlib-0.8.1-cp312-cp312-manylinux_2_27_x86_64.whl", hash = "sha256:af4924189fc53b69237715b56ebcbfc71bb91ca16184143dcef0d430c8173de6", size = 80256943, upload-time = "2025-11-18T19:49:02.92Z" }, + { url = "https://files.pythonhosted.org/packages/07/6c/a6f449a7d1c7f91d73c3b8e00ceba92dff9dfd642508bbe1ddba9cb9ea57/jaxlib-0.8.1-cp312-cp312-win_amd64.whl", hash = "sha256:24ec3f3a9c45d6de060020dc94c444d69e18099fab927ea3979ff8cedf0ed2c9", size = 59787068, upload-time = "2025-11-18T19:49:06.275Z" }, + { url = "https://files.pythonhosted.org/packages/f8/67/97c62849b5d8fc075f902201ff136ad224a2ef113d1fa655ece0ffe8b2a4/jaxlib-0.8.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a0349f6e8179dc897d33aeb90ec66b4a8041330fbbba8d071dc6167cd2271539", size = 55726611, upload-time = "2025-11-18T19:49:09.162Z" }, + { url = "https://files.pythonhosted.org/packages/fd/2a/9fb7599e43d66958b6a9859e045b605afea31f7fd96cfa35a7a8e978b0f8/jaxlib-0.8.1-cp313-cp313-manylinux_2_27_aarch64.whl", hash = "sha256:bd697c171ace1e2e9d6ed910a78f385b3c4095cee290b0255aa58848f2acdeab", size = 74207596, upload-time = "2025-11-18T19:49:12.39Z" }, + { url = "https://files.pythonhosted.org/packages/7d/61/ab5c98641e15f9844dd49efbf6f22c6a9c5d17304319e5be8c51a1dfd088/jaxlib-0.8.1-cp313-cp313-manylinux_2_27_x86_64.whl", hash = "sha256:d245bd6a279c72ca5f796df84cdd64d7c9c8abc4b8d89adf4acf45898dab958b", size = 80254560, upload-time = "2025-11-18T19:49:16.172Z" }, + { url = "https://files.pythonhosted.org/packages/1c/71/82a04ce93baeca5b3d10340f574e0668d327b7d0d18e32d9080917c507f6/jaxlib-0.8.1-cp313-cp313-win_amd64.whl", hash = "sha256:8e118e1fbe714f37a94ba26777c17faab7dca4a33646a3d98cd1d99673bbd6b1", size = 59786828, upload-time = "2025-11-18T19:49:19.563Z" }, + { url = "https://files.pythonhosted.org/packages/97/65/e7c625f1fdb54d45ac248d8398a28d6c02528c31feaa6e1c146a08192d77/jaxlib-0.8.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:4933298fcfb07a5aa2d1fed21c111d07cea50e6f180dba2cdb5463c13fb98f2f", size = 55835933, upload-time = "2025-11-18T19:49:27.362Z" }, + { url = "https://files.pythonhosted.org/packages/1f/04/e09ff7b5ba0af93501cb196c65103a30e5050083203c1ff581f18718a356/jaxlib-0.8.1-cp313-cp313t-manylinux_2_27_aarch64.whl", hash = "sha256:f2f11491b077d05249d63813e811401194a41edc8e9cc60af8f4b554057cfad0", size = 74323389, upload-time = "2025-11-18T19:49:30.457Z" }, + { url = "https://files.pythonhosted.org/packages/44/9f/8b7f6ad9eebf8946e73049dae85f86544f5743bc8b2190898415646fa7ec/jaxlib-0.8.1-cp313-cp313t-manylinux_2_27_x86_64.whl", hash = "sha256:7a5d381fad89622750fae29fab83c0847e2931ad8d6a34dc13b28fc4d67f75a3", size = 80358249, upload-time = "2025-11-18T19:49:33.682Z" }, +] + +[[package]] +name = "jedi" +version = "0.19.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "parso" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/72/3a/79a912fbd4d8dd6fbb02bf69afd3bb72cf0c729bb3063c6f4498603db17a/jedi-0.19.2.tar.gz", hash = "sha256:4770dc3de41bde3966b02eb84fbcf557fb33cce26ad23da12c742fb50ecb11f0", size = 1231287, upload-time = "2024-11-11T01:41:42.873Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c0/5a/9cac0c82afec3d09ccd97c8b6502d48f165f9124db81b4bcb90b4af974ee/jedi-0.19.2-py2.py3-none-any.whl", hash = "sha256:a8ef22bde8490f57fe5c7681a3c83cb58874daf72b4784de3cce5b6ef6edb5b9", size = 1572278, upload-time = "2024-11-11T01:41:40.175Z" }, +] + +[[package]] +name = "jinja2" +version = "3.1.6" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "markupsafe" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/df/bf/f7da0350254c0ed7c72f3e33cef02e048281fec7ecec5f032d4aac52226b/jinja2-3.1.6.tar.gz", hash = "sha256:0137fb05990d35f1275a587e9aee6d56da821fc83491a0fb838183be43f66d6d", size = 245115, upload-time = "2025-03-05T20:05:02.478Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/62/a1/3d680cbfd5f4b8f15abc1d571870c5fc3e594bb582bc3b64ea099db13e56/jinja2-3.1.6-py3-none-any.whl", hash = "sha256:85ece4451f492d0c13c5dd7c13a64681a86afae63a5f347908daf103ce6d2f67", size = 134899, upload-time = "2025-03-05T20:05:00.369Z" }, +] + +[[package]] +name = "jiter" +version = "0.12.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/45/9d/e0660989c1370e25848bb4c52d061c71837239738ad937e83edca174c273/jiter-0.12.0.tar.gz", hash = "sha256:64dfcd7d5c168b38d3f9f8bba7fc639edb3418abcc74f22fdbe6b8938293f30b", size = 168294, upload-time = "2025-11-09T20:49:23.302Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/92/c9/5b9f7b4983f1b542c64e84165075335e8a236fa9e2ea03a0c79780062be8/jiter-0.12.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:305e061fa82f4680607a775b2e8e0bcb071cd2205ac38e6ef48c8dd5ebe1cf37", size = 314449, upload-time = "2025-11-09T20:47:22.999Z" }, + { url = "https://files.pythonhosted.org/packages/98/6e/e8efa0e78de00db0aee82c0cf9e8b3f2027efd7f8a71f859d8f4be8e98ef/jiter-0.12.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5c1860627048e302a528333c9307c818c547f214d8659b0705d2195e1a94b274", size = 319855, upload-time = "2025-11-09T20:47:24.779Z" }, + { url = "https://files.pythonhosted.org/packages/20/26/894cd88e60b5d58af53bec5c6759d1292bd0b37a8b5f60f07abf7a63ae5f/jiter-0.12.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:df37577a4f8408f7e0ec3205d2a8f87672af8f17008358063a4d6425b6081ce3", size = 350171, upload-time = "2025-11-09T20:47:26.469Z" }, + { url = "https://files.pythonhosted.org/packages/f5/27/a7b818b9979ac31b3763d25f3653ec3a954044d5e9f5d87f2f247d679fd1/jiter-0.12.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:75fdd787356c1c13a4f40b43c2156276ef7a71eb487d98472476476d803fb2cf", size = 365590, upload-time = "2025-11-09T20:47:27.918Z" }, + { url = "https://files.pythonhosted.org/packages/ba/7e/e46195801a97673a83746170b17984aa8ac4a455746354516d02ca5541b4/jiter-0.12.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1eb5db8d9c65b112aacf14fcd0faae9913d07a8afea5ed06ccdd12b724e966a1", size = 479462, upload-time = "2025-11-09T20:47:29.654Z" }, + { url = "https://files.pythonhosted.org/packages/ca/75/f833bfb009ab4bd11b1c9406d333e3b4357709ed0570bb48c7c06d78c7dd/jiter-0.12.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:73c568cc27c473f82480abc15d1301adf333a7ea4f2e813d6a2c7d8b6ba8d0df", size = 378983, upload-time = "2025-11-09T20:47:31.026Z" }, + { url = "https://files.pythonhosted.org/packages/71/b3/7a69d77943cc837d30165643db753471aff5df39692d598da880a6e51c24/jiter-0.12.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4321e8a3d868919bcb1abb1db550d41f2b5b326f72df29e53b2df8b006eb9403", size = 361328, upload-time = "2025-11-09T20:47:33.286Z" }, + { url = "https://files.pythonhosted.org/packages/b0/ac/a78f90caf48d65ba70d8c6efc6f23150bc39dc3389d65bbec2a95c7bc628/jiter-0.12.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:0a51bad79f8cc9cac2b4b705039f814049142e0050f30d91695a2d9a6611f126", size = 386740, upload-time = "2025-11-09T20:47:34.703Z" }, + { url = "https://files.pythonhosted.org/packages/39/b6/5d31c2cc8e1b6a6bcf3c5721e4ca0a3633d1ab4754b09bc7084f6c4f5327/jiter-0.12.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:2a67b678f6a5f1dd6c36d642d7db83e456bc8b104788262aaefc11a22339f5a9", size = 520875, upload-time = "2025-11-09T20:47:36.058Z" }, + { url = "https://files.pythonhosted.org/packages/30/b5/4df540fae4e9f68c54b8dab004bd8c943a752f0b00efd6e7d64aa3850339/jiter-0.12.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:efe1a211fe1fd14762adea941e3cfd6c611a136e28da6c39272dbb7a1bbe6a86", size = 511457, upload-time = "2025-11-09T20:47:37.932Z" }, + { url = "https://files.pythonhosted.org/packages/07/65/86b74010e450a1a77b2c1aabb91d4a91dd3cd5afce99f34d75fd1ac64b19/jiter-0.12.0-cp312-cp312-win32.whl", hash = "sha256:d779d97c834b4278276ec703dc3fc1735fca50af63eb7262f05bdb4e62203d44", size = 204546, upload-time = "2025-11-09T20:47:40.47Z" }, + { url = "https://files.pythonhosted.org/packages/1c/c7/6659f537f9562d963488e3e55573498a442503ced01f7e169e96a6110383/jiter-0.12.0-cp312-cp312-win_amd64.whl", hash = "sha256:e8269062060212b373316fe69236096aaf4c49022d267c6736eebd66bbbc60bb", size = 205196, upload-time = "2025-11-09T20:47:41.794Z" }, + { url = "https://files.pythonhosted.org/packages/21/f4/935304f5169edadfec7f9c01eacbce4c90bb9a82035ac1de1f3bd2d40be6/jiter-0.12.0-cp312-cp312-win_arm64.whl", hash = "sha256:06cb970936c65de926d648af0ed3d21857f026b1cf5525cb2947aa5e01e05789", size = 186100, upload-time = "2025-11-09T20:47:43.007Z" }, + { url = "https://files.pythonhosted.org/packages/3d/a6/97209693b177716e22576ee1161674d1d58029eb178e01866a0422b69224/jiter-0.12.0-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:6cc49d5130a14b732e0612bc76ae8db3b49898732223ef8b7599aa8d9810683e", size = 313658, upload-time = "2025-11-09T20:47:44.424Z" }, + { url = "https://files.pythonhosted.org/packages/06/4d/125c5c1537c7d8ee73ad3d530a442d6c619714b95027143f1b61c0b4dfe0/jiter-0.12.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:37f27a32ce36364d2fa4f7fdc507279db604d27d239ea2e044c8f148410defe1", size = 318605, upload-time = "2025-11-09T20:47:45.973Z" }, + { url = "https://files.pythonhosted.org/packages/99/bf/a840b89847885064c41a5f52de6e312e91fa84a520848ee56c97e4fa0205/jiter-0.12.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bbc0944aa3d4b4773e348cda635252824a78f4ba44328e042ef1ff3f6080d1cf", size = 349803, upload-time = "2025-11-09T20:47:47.535Z" }, + { url = "https://files.pythonhosted.org/packages/8a/88/e63441c28e0db50e305ae23e19c1d8fae012d78ed55365da392c1f34b09c/jiter-0.12.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:da25c62d4ee1ffbacb97fac6dfe4dcd6759ebdc9015991e92a6eae5816287f44", size = 365120, upload-time = "2025-11-09T20:47:49.284Z" }, + { url = "https://files.pythonhosted.org/packages/0a/7c/49b02714af4343970eb8aca63396bc1c82fa01197dbb1e9b0d274b550d4e/jiter-0.12.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:048485c654b838140b007390b8182ba9774621103bd4d77c9c3f6f117474ba45", size = 479918, upload-time = "2025-11-09T20:47:50.807Z" }, + { url = "https://files.pythonhosted.org/packages/69/ba/0a809817fdd5a1db80490b9150645f3aae16afad166960bcd562be194f3b/jiter-0.12.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:635e737fbb7315bef0037c19b88b799143d2d7d3507e61a76751025226b3ac87", size = 379008, upload-time = "2025-11-09T20:47:52.211Z" }, + { url = "https://files.pythonhosted.org/packages/5f/c3/c9fc0232e736c8877d9e6d83d6eeb0ba4e90c6c073835cc2e8f73fdeef51/jiter-0.12.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4e017c417b1ebda911bd13b1e40612704b1f5420e30695112efdbed8a4b389ed", size = 361785, upload-time = "2025-11-09T20:47:53.512Z" }, + { url = "https://files.pythonhosted.org/packages/96/61/61f69b7e442e97ca6cd53086ddc1cf59fb830549bc72c0a293713a60c525/jiter-0.12.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:89b0bfb8b2bf2351fba36bb211ef8bfceba73ef58e7f0c68fb67b5a2795ca2f9", size = 386108, upload-time = "2025-11-09T20:47:54.893Z" }, + { url = "https://files.pythonhosted.org/packages/e9/2e/76bb3332f28550c8f1eba3bf6e5efe211efda0ddbbaf24976bc7078d42a5/jiter-0.12.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:f5aa5427a629a824a543672778c9ce0c5e556550d1569bb6ea28a85015287626", size = 519937, upload-time = "2025-11-09T20:47:56.253Z" }, + { url = "https://files.pythonhosted.org/packages/84/d6/fa96efa87dc8bff2094fb947f51f66368fa56d8d4fc9e77b25d7fbb23375/jiter-0.12.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:ed53b3d6acbcb0fd0b90f20c7cb3b24c357fe82a3518934d4edfa8c6898e498c", size = 510853, upload-time = "2025-11-09T20:47:58.32Z" }, + { url = "https://files.pythonhosted.org/packages/8a/28/93f67fdb4d5904a708119a6ab58a8f1ec226ff10a94a282e0215402a8462/jiter-0.12.0-cp313-cp313-win32.whl", hash = "sha256:4747de73d6b8c78f2e253a2787930f4fffc68da7fa319739f57437f95963c4de", size = 204699, upload-time = "2025-11-09T20:47:59.686Z" }, + { url = "https://files.pythonhosted.org/packages/c4/1f/30b0eb087045a0abe2a5c9c0c0c8da110875a1d3be83afd4a9a4e548be3c/jiter-0.12.0-cp313-cp313-win_amd64.whl", hash = "sha256:e25012eb0c456fcc13354255d0338cd5397cce26c77b2832b3c4e2e255ea5d9a", size = 204258, upload-time = "2025-11-09T20:48:01.01Z" }, + { url = "https://files.pythonhosted.org/packages/2c/f4/2b4daf99b96bce6fc47971890b14b2a36aef88d7beb9f057fafa032c6141/jiter-0.12.0-cp313-cp313-win_arm64.whl", hash = "sha256:c97b92c54fe6110138c872add030a1f99aea2401ddcdaa21edf74705a646dd60", size = 185503, upload-time = "2025-11-09T20:48:02.35Z" }, + { url = "https://files.pythonhosted.org/packages/39/ca/67bb15a7061d6fe20b9b2a2fd783e296a1e0f93468252c093481a2f00efa/jiter-0.12.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:53839b35a38f56b8be26a7851a48b89bc47e5d88e900929df10ed93b95fea3d6", size = 317965, upload-time = "2025-11-09T20:48:03.783Z" }, + { url = "https://files.pythonhosted.org/packages/18/af/1788031cd22e29c3b14bc6ca80b16a39a0b10e611367ffd480c06a259831/jiter-0.12.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:94f669548e55c91ab47fef8bddd9c954dab1938644e715ea49d7e117015110a4", size = 345831, upload-time = "2025-11-09T20:48:05.55Z" }, + { url = "https://files.pythonhosted.org/packages/05/17/710bf8472d1dff0d3caf4ced6031060091c1320f84ee7d5dcbed1f352417/jiter-0.12.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:351d54f2b09a41600ffea43d081522d792e81dcfb915f6d2d242744c1cc48beb", size = 361272, upload-time = "2025-11-09T20:48:06.951Z" }, + { url = "https://files.pythonhosted.org/packages/fb/f1/1dcc4618b59761fef92d10bcbb0b038b5160be653b003651566a185f1a5c/jiter-0.12.0-cp313-cp313t-win_amd64.whl", hash = "sha256:2a5e90604620f94bf62264e7c2c038704d38217b7465b863896c6d7c902b06c7", size = 204604, upload-time = "2025-11-09T20:48:08.328Z" }, + { url = "https://files.pythonhosted.org/packages/d9/32/63cb1d9f1c5c6632a783c0052cde9ef7ba82688f7065e2f0d5f10a7e3edb/jiter-0.12.0-cp313-cp313t-win_arm64.whl", hash = "sha256:88ef757017e78d2860f96250f9393b7b577b06a956ad102c29c8237554380db3", size = 185628, upload-time = "2025-11-09T20:48:09.572Z" }, + { url = "https://files.pythonhosted.org/packages/cb/f5/12efb8ada5f5c9edc1d4555fe383c1fb2eac05ac5859258a72d61981d999/jiter-0.12.0-graalpy312-graalpy250_312_native-macosx_10_12_x86_64.whl", hash = "sha256:e8547883d7b96ef2e5fe22b88f8a4c8725a56e7f4abafff20fd5272d634c7ecb", size = 309974, upload-time = "2025-11-09T20:49:17.187Z" }, + { url = "https://files.pythonhosted.org/packages/85/15/d6eb3b770f6a0d332675141ab3962fd4a7c270ede3515d9f3583e1d28276/jiter-0.12.0-graalpy312-graalpy250_312_native-macosx_11_0_arm64.whl", hash = "sha256:89163163c0934854a668ed783a2546a0617f71706a2551a4a0666d91ab365d6b", size = 304233, upload-time = "2025-11-09T20:49:18.734Z" }, + { url = "https://files.pythonhosted.org/packages/8c/3e/e7e06743294eea2cf02ced6aa0ff2ad237367394e37a0e2b4a1108c67a36/jiter-0.12.0-graalpy312-graalpy250_312_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d96b264ab7d34bbb2312dedc47ce07cd53f06835eacbc16dde3761f47c3a9e7f", size = 338537, upload-time = "2025-11-09T20:49:20.317Z" }, + { url = "https://files.pythonhosted.org/packages/2f/9c/6753e6522b8d0ef07d3a3d239426669e984fb0eba15a315cdbc1253904e4/jiter-0.12.0-graalpy312-graalpy250_312_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c24e864cb30ab82311c6425655b0cdab0a98c5d973b065c66a3f020740c2324c", size = 346110, upload-time = "2025-11-09T20:49:21.817Z" }, +] + +[[package]] +name = "jsonschema" +version = "4.25.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "attrs" }, + { name = "jsonschema-specifications" }, + { name = "referencing" }, + { name = "rpds-py" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/74/69/f7185de793a29082a9f3c7728268ffb31cb5095131a9c139a74078e27336/jsonschema-4.25.1.tar.gz", hash = "sha256:e4a9655ce0da0c0b67a085847e00a3a51449e1157f4f75e9fb5aa545e122eb85", size = 357342, upload-time = "2025-08-18T17:03:50.038Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/bf/9c/8c95d856233c1f82500c2450b8c68576b4cf1c871db3afac5c34ff84e6fd/jsonschema-4.25.1-py3-none-any.whl", hash = "sha256:3fba0169e345c7175110351d456342c364814cfcf3b964ba4587f22915230a63", size = 90040, upload-time = "2025-08-18T17:03:48.373Z" }, +] + +[[package]] +name = "jsonschema-specifications" +version = "2025.9.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "referencing" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/19/74/a633ee74eb36c44aa6d1095e7cc5569bebf04342ee146178e2d36600708b/jsonschema_specifications-2025.9.1.tar.gz", hash = "sha256:b540987f239e745613c7a9176f3edb72b832a4ac465cf02712288397832b5e8d", size = 32855, upload-time = "2025-09-08T01:34:59.186Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/41/45/1a4ed80516f02155c51f51e8cedb3c1902296743db0bbc66608a0db2814f/jsonschema_specifications-2025.9.1-py3-none-any.whl", hash = "sha256:98802fee3a11ee76ecaca44429fda8a41bff98b00a0f2838151b113f210cc6fe", size = 18437, upload-time = "2025-09-08T01:34:57.871Z" }, +] + +[[package]] +name = "jupyter-client" +version = "8.6.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "jupyter-core" }, + { name = "python-dateutil" }, + { name = "pyzmq" }, + { name = "tornado" }, + { name = "traitlets" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/71/22/bf9f12fdaeae18019a468b68952a60fe6dbab5d67cd2a103cac7659b41ca/jupyter_client-8.6.3.tar.gz", hash = "sha256:35b3a0947c4a6e9d589eb97d7d4cd5e90f910ee73101611f01283732bd6d9419", size = 342019, upload-time = "2024-09-17T10:44:17.613Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/11/85/b0394e0b6fcccd2c1eeefc230978a6f8cb0c5df1e4cd3e7625735a0d7d1e/jupyter_client-8.6.3-py3-none-any.whl", hash = "sha256:e8a19cc986cc45905ac3362915f410f3af85424b4c0905e94fa5f2cb08e8f23f", size = 106105, upload-time = "2024-09-17T10:44:15.218Z" }, +] + +[[package]] +name = "jupyter-core" +version = "5.9.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "platformdirs" }, + { name = "traitlets" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/02/49/9d1284d0dc65e2c757b74c6687b6d319b02f822ad039e5c512df9194d9dd/jupyter_core-5.9.1.tar.gz", hash = "sha256:4d09aaff303b9566c3ce657f580bd089ff5c91f5f89cf7d8846c3cdf465b5508", size = 89814, upload-time = "2025-10-16T19:19:18.444Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e7/e7/80988e32bf6f73919a113473a604f5a8f09094de312b9d52b79c2df7612b/jupyter_core-5.9.1-py3-none-any.whl", hash = "sha256:ebf87fdc6073d142e114c72c9e29a9d7ca03fad818c5d300ce2adc1fb0743407", size = 29032, upload-time = "2025-10-16T19:19:16.783Z" }, +] + +[[package]] +name = "jupyterlab-pygments" +version = "0.3.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/90/51/9187be60d989df97f5f0aba133fa54e7300f17616e065d1ada7d7646b6d6/jupyterlab_pygments-0.3.0.tar.gz", hash = "sha256:721aca4d9029252b11cfa9d185e5b5af4d54772bb8072f9b7036f4170054d35d", size = 512900, upload-time = "2023-11-23T09:26:37.44Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b1/dd/ead9d8ea85bf202d90cc513b533f9c363121c7792674f78e0d8a854b63b4/jupyterlab_pygments-0.3.0-py3-none-any.whl", hash = "sha256:841a89020971da1d8693f1a99997aefc5dc424bb1b251fd6322462a1b8842780", size = 15884, upload-time = "2023-11-23T09:26:34.325Z" }, +] + +[[package]] +name = "latexcodec" +version = "3.0.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/27/dd/4270b2c5e2ee49316c3859e62293bd2ea8e382339d63ab7bbe9f39c0ec3b/latexcodec-3.0.1.tar.gz", hash = "sha256:e78a6911cd72f9dec35031c6ec23584de6842bfbc4610a9678868d14cdfb0357", size = 31222, upload-time = "2025-06-17T18:47:34.051Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b5/40/23569737873cc9637fd488606347e9dd92b9fa37ba4fcda1f98ee5219a97/latexcodec-3.0.1-py3-none-any.whl", hash = "sha256:a9eb8200bff693f0437a69581f7579eb6bca25c4193515c09900ce76451e452e", size = 18532, upload-time = "2025-06-17T18:47:30.726Z" }, +] + +[[package]] +name = "librt" +version = "0.6.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/72/c3/86e94f888f65ba1731f97c33ef10016c7286e0fa70d4a309eab41937183a/librt-0.6.2.tar.gz", hash = "sha256:3898faf00cada0bf2a97106936e92fe107ee4fbdf4e5ebd922cfd5ee9f052884", size = 53420, upload-time = "2025-11-18T16:51:17.097Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/36/0c/825aece0e99f1f948e1e423ac443913d753ddbcbc0e48e649f46dd3e6adc/librt-0.6.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:29f4e8888de87eb637c1b1c3ca9e97f3d8828e481f5ef0b86bb90ae026215d4c", size = 27842, upload-time = "2025-11-18T16:50:13.751Z" }, + { url = "https://files.pythonhosted.org/packages/2f/64/74190707875d3db4c6e2655dd804577e85bdbb437fdf32206003dda0bb83/librt-0.6.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f5cdacbe18f91741a5f45bb169a92ab5299e0c6a7245798d075885480706c4e5", size = 27841, upload-time = "2025-11-18T16:50:14.74Z" }, + { url = "https://files.pythonhosted.org/packages/db/0c/b783a58fc741cf30872a9947f3c777c57c2845e5e805d78c5147bc2c6c06/librt-0.6.2-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:de0461670334c16b76885d8a93a3c1f1b0259fb7d817cec326193325c24898e0", size = 84136, upload-time = "2025-11-18T16:50:16.002Z" }, + { url = "https://files.pythonhosted.org/packages/e5/87/5ad8119cc2128cce01a07198daaff02114b0dffc0951a5577f1980756d22/librt-0.6.2-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:fcddd735029802e9ab56d482f977ca08920c432382c9382334e7cfa9ad0bb0de", size = 88004, upload-time = "2025-11-18T16:50:17.052Z" }, + { url = "https://files.pythonhosted.org/packages/46/96/9f7a25150c54614b756c1e6ae3898a798e665e938df4d5b054299082c5e6/librt-0.6.2-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:06c82cf56b3c2fab8e19e7415b6eb1b958356f6e6ee082b0077a582356801185", size = 88934, upload-time = "2025-11-18T16:50:18.485Z" }, + { url = "https://files.pythonhosted.org/packages/40/ed/e7da561b2169f02f4281ad806f800f94afa69eaeb994e65b0f178f2be52b/librt-0.6.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3a426287d679aebd6dd3000192d054cdd2d90ae7612b51d0f4931b2f37dd1d13", size = 90599, upload-time = "2025-11-18T16:50:19.587Z" }, + { url = "https://files.pythonhosted.org/packages/ea/ba/aa06f14eba3d6f19f34ef73d5c0b17b1cdf7543661912a9b9e2e991f4b13/librt-0.6.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:75fa4126883da85600f4763930e8791949f50ab323fa8fc17fb31185b4fd16af", size = 88603, upload-time = "2025-11-18T16:50:20.901Z" }, + { url = "https://files.pythonhosted.org/packages/08/52/56c449119dc3b942d3ff2e985969571819db123f655e3744a08819d1f013/librt-0.6.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:73cf76b5814d268d777eca17db45a2bdd9c80f50eab01cf8b642f8bf18497358", size = 92112, upload-time = "2025-11-18T16:50:22.064Z" }, + { url = "https://files.pythonhosted.org/packages/20/aa/fe6faf84b5cc0ae3001adfe4f23aaa06cf9881965c7d9decce6180605244/librt-0.6.2-cp312-cp312-win32.whl", hash = "sha256:93cd69497046d67f35e1d00cef099bf32f97c277ff950c406e7e062ccf86852e", size = 20128, upload-time = "2025-11-18T16:50:23.182Z" }, + { url = "https://files.pythonhosted.org/packages/08/58/96086add1333d0ca6607b768bbb5633bc7a6265d11fa953be9392e789c46/librt-0.6.2-cp312-cp312-win_amd64.whl", hash = "sha256:2ada7182335b25120ec960fbbf22d8f534bb9bb101f248f849bc977bc51165c8", size = 21547, upload-time = "2025-11-18T16:50:24.157Z" }, + { url = "https://files.pythonhosted.org/packages/71/e6/7e533225c4f05ba03c15e4f1788617539a19a47182cc677bc8b9feaeacf8/librt-0.6.2-cp312-cp312-win_arm64.whl", hash = "sha256:e2deaac245f6ce54caf6ccb5dabeadd35950e669f4ed31addd300ff4eaee981c", size = 20945, upload-time = "2025-11-18T16:50:25.915Z" }, + { url = "https://files.pythonhosted.org/packages/5b/e7/e4ff31452298cda5008dede6d5805921a75f95aaaa2bfd1ac9d547efd47d/librt-0.6.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:ad4014a959de1b4c020e0de0b92b637463e80d54fc6f12b8c0a357ef7289190f", size = 27875, upload-time = "2025-11-18T16:50:27.22Z" }, + { url = "https://files.pythonhosted.org/packages/a4/6b/fcbfc8243ff2f207f51566604b7a538ba2ee7c10222a82a827adacdaa9ad/librt-0.6.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:1eea7c6633cdd6ee3fd8d1677949c278bd2db9f6f39d2b34affe2d70c8dc0258", size = 27854, upload-time = "2025-11-18T16:50:28.475Z" }, + { url = "https://files.pythonhosted.org/packages/04/32/ff7041ff7d513e195bef955b4d7313ccd41436c539c481e2d28e78fd1581/librt-0.6.2-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:28d159adc310be1aba21480d56a6ebc06b98948fb60e15ccc77a77c6a037cd5f", size = 84321, upload-time = "2025-11-18T16:50:29.463Z" }, + { url = "https://files.pythonhosted.org/packages/8f/04/c0935cd6dcad97789d6bf9ae87bb1c98f56c4f237dc3e0cbd0062b893717/librt-0.6.2-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:cd85a818a58871a7d3fe3e9821423c06c1d2b5ac6d7ad21f62c28243b858c920", size = 88232, upload-time = "2025-11-18T16:50:30.481Z" }, + { url = "https://files.pythonhosted.org/packages/cb/68/14f2641852fafbeb62a72bd113ad71adc616b961238f96a41c8b6d4b2f39/librt-0.6.2-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:3d58f22191217c6474d1a26269db2347c3862ef9fa379bd0c86bca659fe84145", size = 89113, upload-time = "2025-11-18T16:50:31.613Z" }, + { url = "https://files.pythonhosted.org/packages/5d/84/ebdb7ecfe7f3035dd8dec57c01086f089e255dac828c77535dd90dee3065/librt-0.6.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:6408501b01add8913cfdf795ba57bce7095ac2a2ee170de660d4bff8ad589074", size = 90808, upload-time = "2025-11-18T16:50:32.753Z" }, + { url = "https://files.pythonhosted.org/packages/f8/fc/4445de50cb1445fe2cd013f81cd5b102e9a5d4ae573e567a12de50d5ea89/librt-0.6.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:fd1d5b3867feeecf3b627178f43b7bb940e0390e81bafab6b681b17112591198", size = 88891, upload-time = "2025-11-18T16:50:33.812Z" }, + { url = "https://files.pythonhosted.org/packages/c0/dc/ff70e69a9f1001d33ae377bf715b3ca8df0566bdd36317a79e1a8d922793/librt-0.6.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:c2920f525b54cd00adbb0e727d5d3ba6292a2d038788529ad8810a3d77acdf0f", size = 92300, upload-time = "2025-11-18T16:50:34.988Z" }, + { url = "https://files.pythonhosted.org/packages/07/3f/0b7e34d90cf76c617b90811905f4c2d0f46e7f8037817cd9c83279bc5e4a/librt-0.6.2-cp313-cp313-win32.whl", hash = "sha256:74213ad49b127da47a22f2c877be216820215880c527f28df726ad5d505f1239", size = 20162, upload-time = "2025-11-18T16:50:36.001Z" }, + { url = "https://files.pythonhosted.org/packages/14/c0/c81266c308e1449ed9197b059feea91205832a1cd37e12443c0f7d3e0743/librt-0.6.2-cp313-cp313-win_amd64.whl", hash = "sha256:778667b8688bbacba06739eb5b0b78d99d2c65a99262dac5ab25eba473b34d5f", size = 21483, upload-time = "2025-11-18T16:50:36.923Z" }, + { url = "https://files.pythonhosted.org/packages/35/8e/9ba1d7e4aedec42bb5384ac68d65745f59a91944c2af16fb264cfd2fe42e/librt-0.6.2-cp313-cp313-win_arm64.whl", hash = "sha256:e787bfcccdf0f25e02310d7f1e2b9bfea714f594cda37a6ce6da84502f14acbf", size = 20937, upload-time = "2025-11-18T16:50:37.905Z" }, +] + +[[package]] +name = "litellm" +version = "1.80.7" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "aiohttp" }, + { name = "click" }, + { name = "fastuuid" }, + { name = "grpcio" }, + { name = "httpx" }, + { name = "importlib-metadata" }, + { name = "jinja2" }, + { name = "jsonschema" }, + { name = "openai" }, + { name = "pydantic" }, + { name = "python-dotenv" }, + { name = "tiktoken" }, + { name = "tokenizers" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/a5/3f/af532014449c3931ae6cad2d97d267dd43d0de006060a8cbf0962e004024/litellm-1.80.7.tar.gz", hash = "sha256:3977a8d195aef842d01c18bf9e22984829363c6a4b54daf9a43c9dd9f190b42c", size = 12023127, upload-time = "2025-11-27T23:03:52.474Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/54/e0/2e60a0c09235fd7b55297390c557923f3c35a9cf001914222c26a7857d2b/litellm-1.80.7-py3-none-any.whl", hash = "sha256:f7d993f78c1e0e4e1202b2a925cc6540b55b6e5fb055dd342d88b145ab3102ed", size = 10848321, upload-time = "2025-11-27T23:03:50.002Z" }, +] + +[[package]] +name = "markdown-it-py" +version = "3.0.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "mdurl" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/38/71/3b932df36c1a044d397a1f92d1cf91ee0a503d91e470cbd670aa66b07ed0/markdown-it-py-3.0.0.tar.gz", hash = "sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb", size = 74596, upload-time = "2023-06-03T06:41:14.443Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/42/d7/1ec15b46af6af88f19b8e5ffea08fa375d433c998b8a7639e76935c14f1f/markdown_it_py-3.0.0-py3-none-any.whl", hash = "sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1", size = 87528, upload-time = "2023-06-03T06:41:11.019Z" }, +] + +[[package]] +name = "markupsafe" +version = "3.0.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/7e/99/7690b6d4034fffd95959cbe0c02de8deb3098cc577c67bb6a24fe5d7caa7/markupsafe-3.0.3.tar.gz", hash = "sha256:722695808f4b6457b320fdc131280796bdceb04ab50fe1795cd540799ebe1698", size = 80313, upload-time = "2025-09-27T18:37:40.426Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/5a/72/147da192e38635ada20e0a2e1a51cf8823d2119ce8883f7053879c2199b5/markupsafe-3.0.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:d53197da72cc091b024dd97249dfc7794d6a56530370992a5e1a08983ad9230e", size = 11615, upload-time = "2025-09-27T18:36:30.854Z" }, + { url = "https://files.pythonhosted.org/packages/9a/81/7e4e08678a1f98521201c3079f77db69fb552acd56067661f8c2f534a718/markupsafe-3.0.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1872df69a4de6aead3491198eaf13810b565bdbeec3ae2dc8780f14458ec73ce", size = 12020, upload-time = "2025-09-27T18:36:31.971Z" }, + { url = "https://files.pythonhosted.org/packages/1e/2c/799f4742efc39633a1b54a92eec4082e4f815314869865d876824c257c1e/markupsafe-3.0.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3a7e8ae81ae39e62a41ec302f972ba6ae23a5c5396c8e60113e9066ef893da0d", size = 24332, upload-time = "2025-09-27T18:36:32.813Z" }, + { url = "https://files.pythonhosted.org/packages/3c/2e/8d0c2ab90a8c1d9a24f0399058ab8519a3279d1bd4289511d74e909f060e/markupsafe-3.0.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d6dd0be5b5b189d31db7cda48b91d7e0a9795f31430b7f271219ab30f1d3ac9d", size = 22947, upload-time = "2025-09-27T18:36:33.86Z" }, + { url = "https://files.pythonhosted.org/packages/2c/54/887f3092a85238093a0b2154bd629c89444f395618842e8b0c41783898ea/markupsafe-3.0.3-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:94c6f0bb423f739146aec64595853541634bde58b2135f27f61c1ffd1cd4d16a", size = 21962, upload-time = "2025-09-27T18:36:35.099Z" }, + { url = "https://files.pythonhosted.org/packages/c9/2f/336b8c7b6f4a4d95e91119dc8521402461b74a485558d8f238a68312f11c/markupsafe-3.0.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:be8813b57049a7dc738189df53d69395eba14fb99345e0a5994914a3864c8a4b", size = 23760, upload-time = "2025-09-27T18:36:36.001Z" }, + { url = "https://files.pythonhosted.org/packages/32/43/67935f2b7e4982ffb50a4d169b724d74b62a3964bc1a9a527f5ac4f1ee2b/markupsafe-3.0.3-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:83891d0e9fb81a825d9a6d61e3f07550ca70a076484292a70fde82c4b807286f", size = 21529, upload-time = "2025-09-27T18:36:36.906Z" }, + { url = "https://files.pythonhosted.org/packages/89/e0/4486f11e51bbba8b0c041098859e869e304d1c261e59244baa3d295d47b7/markupsafe-3.0.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:77f0643abe7495da77fb436f50f8dab76dbc6e5fd25d39589a0f1fe6548bfa2b", size = 23015, upload-time = "2025-09-27T18:36:37.868Z" }, + { url = "https://files.pythonhosted.org/packages/2f/e1/78ee7a023dac597a5825441ebd17170785a9dab23de95d2c7508ade94e0e/markupsafe-3.0.3-cp312-cp312-win32.whl", hash = "sha256:d88b440e37a16e651bda4c7c2b930eb586fd15ca7406cb39e211fcff3bf3017d", size = 14540, upload-time = "2025-09-27T18:36:38.761Z" }, + { url = "https://files.pythonhosted.org/packages/aa/5b/bec5aa9bbbb2c946ca2733ef9c4ca91c91b6a24580193e891b5f7dbe8e1e/markupsafe-3.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:26a5784ded40c9e318cfc2bdb30fe164bdb8665ded9cd64d500a34fb42067b1c", size = 15105, upload-time = "2025-09-27T18:36:39.701Z" }, + { url = "https://files.pythonhosted.org/packages/e5/f1/216fc1bbfd74011693a4fd837e7026152e89c4bcf3e77b6692fba9923123/markupsafe-3.0.3-cp312-cp312-win_arm64.whl", hash = "sha256:35add3b638a5d900e807944a078b51922212fb3dedb01633a8defc4b01a3c85f", size = 13906, upload-time = "2025-09-27T18:36:40.689Z" }, + { url = "https://files.pythonhosted.org/packages/38/2f/907b9c7bbba283e68f20259574b13d005c121a0fa4c175f9bed27c4597ff/markupsafe-3.0.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:e1cf1972137e83c5d4c136c43ced9ac51d0e124706ee1c8aa8532c1287fa8795", size = 11622, upload-time = "2025-09-27T18:36:41.777Z" }, + { url = "https://files.pythonhosted.org/packages/9c/d9/5f7756922cdd676869eca1c4e3c0cd0df60ed30199ffd775e319089cb3ed/markupsafe-3.0.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:116bb52f642a37c115f517494ea5feb03889e04df47eeff5b130b1808ce7c219", size = 12029, upload-time = "2025-09-27T18:36:43.257Z" }, + { url = "https://files.pythonhosted.org/packages/00/07/575a68c754943058c78f30db02ee03a64b3c638586fba6a6dd56830b30a3/markupsafe-3.0.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:133a43e73a802c5562be9bbcd03d090aa5a1fe899db609c29e8c8d815c5f6de6", size = 24374, upload-time = "2025-09-27T18:36:44.508Z" }, + { url = "https://files.pythonhosted.org/packages/a9/21/9b05698b46f218fc0e118e1f8168395c65c8a2c750ae2bab54fc4bd4e0e8/markupsafe-3.0.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ccfcd093f13f0f0b7fdd0f198b90053bf7b2f02a3927a30e63f3ccc9df56b676", size = 22980, upload-time = "2025-09-27T18:36:45.385Z" }, + { url = "https://files.pythonhosted.org/packages/7f/71/544260864f893f18b6827315b988c146b559391e6e7e8f7252839b1b846a/markupsafe-3.0.3-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:509fa21c6deb7a7a273d629cf5ec029bc209d1a51178615ddf718f5918992ab9", size = 21990, upload-time = "2025-09-27T18:36:46.916Z" }, + { url = "https://files.pythonhosted.org/packages/c2/28/b50fc2f74d1ad761af2f5dcce7492648b983d00a65b8c0e0cb457c82ebbe/markupsafe-3.0.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a4afe79fb3de0b7097d81da19090f4df4f8d3a2b3adaa8764138aac2e44f3af1", size = 23784, upload-time = "2025-09-27T18:36:47.884Z" }, + { url = "https://files.pythonhosted.org/packages/ed/76/104b2aa106a208da8b17a2fb72e033a5a9d7073c68f7e508b94916ed47a9/markupsafe-3.0.3-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:795e7751525cae078558e679d646ae45574b47ed6e7771863fcc079a6171a0fc", size = 21588, upload-time = "2025-09-27T18:36:48.82Z" }, + { url = "https://files.pythonhosted.org/packages/b5/99/16a5eb2d140087ebd97180d95249b00a03aa87e29cc224056274f2e45fd6/markupsafe-3.0.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:8485f406a96febb5140bfeca44a73e3ce5116b2501ac54fe953e488fb1d03b12", size = 23041, upload-time = "2025-09-27T18:36:49.797Z" }, + { url = "https://files.pythonhosted.org/packages/19/bc/e7140ed90c5d61d77cea142eed9f9c303f4c4806f60a1044c13e3f1471d0/markupsafe-3.0.3-cp313-cp313-win32.whl", hash = "sha256:bdd37121970bfd8be76c5fb069c7751683bdf373db1ed6c010162b2a130248ed", size = 14543, upload-time = "2025-09-27T18:36:51.584Z" }, + { url = "https://files.pythonhosted.org/packages/05/73/c4abe620b841b6b791f2edc248f556900667a5a1cf023a6646967ae98335/markupsafe-3.0.3-cp313-cp313-win_amd64.whl", hash = "sha256:9a1abfdc021a164803f4d485104931fb8f8c1efd55bc6b748d2f5774e78b62c5", size = 15113, upload-time = "2025-09-27T18:36:52.537Z" }, + { url = "https://files.pythonhosted.org/packages/f0/3a/fa34a0f7cfef23cf9500d68cb7c32dd64ffd58a12b09225fb03dd37d5b80/markupsafe-3.0.3-cp313-cp313-win_arm64.whl", hash = "sha256:7e68f88e5b8799aa49c85cd116c932a1ac15caaa3f5db09087854d218359e485", size = 13911, upload-time = "2025-09-27T18:36:53.513Z" }, + { url = "https://files.pythonhosted.org/packages/e4/d7/e05cd7efe43a88a17a37b3ae96e79a19e846f3f456fe79c57ca61356ef01/markupsafe-3.0.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:218551f6df4868a8d527e3062d0fb968682fe92054e89978594c28e642c43a73", size = 11658, upload-time = "2025-09-27T18:36:54.819Z" }, + { url = "https://files.pythonhosted.org/packages/99/9e/e412117548182ce2148bdeacdda3bb494260c0b0184360fe0d56389b523b/markupsafe-3.0.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:3524b778fe5cfb3452a09d31e7b5adefeea8c5be1d43c4f810ba09f2ceb29d37", size = 12066, upload-time = "2025-09-27T18:36:55.714Z" }, + { url = "https://files.pythonhosted.org/packages/bc/e6/fa0ffcda717ef64a5108eaa7b4f5ed28d56122c9a6d70ab8b72f9f715c80/markupsafe-3.0.3-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4e885a3d1efa2eadc93c894a21770e4bc67899e3543680313b09f139e149ab19", size = 25639, upload-time = "2025-09-27T18:36:56.908Z" }, + { url = "https://files.pythonhosted.org/packages/96/ec/2102e881fe9d25fc16cb4b25d5f5cde50970967ffa5dddafdb771237062d/markupsafe-3.0.3-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8709b08f4a89aa7586de0aadc8da56180242ee0ada3999749b183aa23df95025", size = 23569, upload-time = "2025-09-27T18:36:57.913Z" }, + { url = "https://files.pythonhosted.org/packages/4b/30/6f2fce1f1f205fc9323255b216ca8a235b15860c34b6798f810f05828e32/markupsafe-3.0.3-cp313-cp313t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:b8512a91625c9b3da6f127803b166b629725e68af71f8184ae7e7d54686a56d6", size = 23284, upload-time = "2025-09-27T18:36:58.833Z" }, + { url = "https://files.pythonhosted.org/packages/58/47/4a0ccea4ab9f5dcb6f79c0236d954acb382202721e704223a8aafa38b5c8/markupsafe-3.0.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:9b79b7a16f7fedff2495d684f2b59b0457c3b493778c9eed31111be64d58279f", size = 24801, upload-time = "2025-09-27T18:36:59.739Z" }, + { url = "https://files.pythonhosted.org/packages/6a/70/3780e9b72180b6fecb83a4814d84c3bf4b4ae4bf0b19c27196104149734c/markupsafe-3.0.3-cp313-cp313t-musllinux_1_2_riscv64.whl", hash = "sha256:12c63dfb4a98206f045aa9563db46507995f7ef6d83b2f68eda65c307c6829eb", size = 22769, upload-time = "2025-09-27T18:37:00.719Z" }, + { url = "https://files.pythonhosted.org/packages/98/c5/c03c7f4125180fc215220c035beac6b9cb684bc7a067c84fc69414d315f5/markupsafe-3.0.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:8f71bc33915be5186016f675cd83a1e08523649b0e33efdb898db577ef5bb009", size = 23642, upload-time = "2025-09-27T18:37:01.673Z" }, + { url = "https://files.pythonhosted.org/packages/80/d6/2d1b89f6ca4bff1036499b1e29a1d02d282259f3681540e16563f27ebc23/markupsafe-3.0.3-cp313-cp313t-win32.whl", hash = "sha256:69c0b73548bc525c8cb9a251cddf1931d1db4d2258e9599c28c07ef3580ef354", size = 14612, upload-time = "2025-09-27T18:37:02.639Z" }, + { url = "https://files.pythonhosted.org/packages/2b/98/e48a4bfba0a0ffcf9925fe2d69240bfaa19c6f7507b8cd09c70684a53c1e/markupsafe-3.0.3-cp313-cp313t-win_amd64.whl", hash = "sha256:1b4b79e8ebf6b55351f0d91fe80f893b4743f104bff22e90697db1590e47a218", size = 15200, upload-time = "2025-09-27T18:37:03.582Z" }, + { url = "https://files.pythonhosted.org/packages/0e/72/e3cc540f351f316e9ed0f092757459afbc595824ca724cbc5a5d4263713f/markupsafe-3.0.3-cp313-cp313t-win_arm64.whl", hash = "sha256:ad2cf8aa28b8c020ab2fc8287b0f823d0a7d8630784c31e9ee5edea20f406287", size = 13973, upload-time = "2025-09-27T18:37:04.929Z" }, +] + +[[package]] +name = "matplotlib-inline" +version = "0.2.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "traitlets" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/c7/74/97e72a36efd4ae2bccb3463284300f8953f199b5ffbc04cbbb0ec78f74b1/matplotlib_inline-0.2.1.tar.gz", hash = "sha256:e1ee949c340d771fc39e241ea75683deb94762c8fa5f2927ec57c83c4dffa9fe", size = 8110, upload-time = "2025-10-23T09:00:22.126Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/af/33/ee4519fa02ed11a94aef9559552f3b17bb863f2ecfe1a35dc7f548cde231/matplotlib_inline-0.2.1-py3-none-any.whl", hash = "sha256:d56ce5156ba6085e00a9d54fead6ed29a9c47e215cd1bba2e976ef39f5710a76", size = 9516, upload-time = "2025-10-23T09:00:20.675Z" }, +] + +[[package]] +name = "mdit-py-plugins" +version = "0.5.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "markdown-it-py" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/b2/fd/a756d36c0bfba5f6e39a1cdbdbfdd448dc02692467d83816dff4592a1ebc/mdit_py_plugins-0.5.0.tar.gz", hash = "sha256:f4918cb50119f50446560513a8e311d574ff6aaed72606ddae6d35716fe809c6", size = 44655, upload-time = "2025-08-11T07:25:49.083Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/fb/86/dd6e5db36df29e76c7a7699123569a4a18c1623ce68d826ed96c62643cae/mdit_py_plugins-0.5.0-py3-none-any.whl", hash = "sha256:07a08422fc1936a5d26d146759e9155ea466e842f5ab2f7d2266dd084c8dab1f", size = 57205, upload-time = "2025-08-11T07:25:47.597Z" }, +] + +[[package]] +name = "mdurl" +version = "0.1.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d6/54/cfe61301667036ec958cb99bd3efefba235e65cdeb9c84d24a8293ba1d90/mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba", size = 8729, upload-time = "2022-08-14T12:40:10.846Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b3/38/89ba8ad64ae25be8de66a6d463314cf1eb366222074cfda9ee839c56a4b4/mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8", size = 9979, upload-time = "2022-08-14T12:40:09.779Z" }, +] + +[[package]] +name = "mistune" +version = "3.1.4" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d7/02/a7fb8b21d4d55ac93cdcde9d3638da5dd0ebdd3a4fed76c7725e10b81cbe/mistune-3.1.4.tar.gz", hash = "sha256:b5a7f801d389f724ec702840c11d8fc48f2b33519102fc7ee739e8177b672164", size = 94588, upload-time = "2025-08-29T07:20:43.594Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7a/f0/8282d9641415e9e33df173516226b404d367a0fc55e1a60424a152913abc/mistune-3.1.4-py3-none-any.whl", hash = "sha256:93691da911e5d9d2e23bc54472892aff676df27a75274962ff9edc210364266d", size = 53481, upload-time = "2025-08-29T07:20:42.218Z" }, +] + +[[package]] +name = "ml-dtypes" +version = "0.5.4" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "numpy" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/0e/4a/c27b42ed9b1c7d13d9ba8b6905dece787d6259152f2309338aed29b2447b/ml_dtypes-0.5.4.tar.gz", hash = "sha256:8ab06a50fb9bf9666dd0fe5dfb4676fa2b0ac0f31ecff72a6c3af8e22c063453", size = 692314, upload-time = "2025-11-17T22:32:31.031Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a8/b8/3c70881695e056f8a32f8b941126cf78775d9a4d7feba8abcb52cb7b04f2/ml_dtypes-0.5.4-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:a174837a64f5b16cab6f368171a1a03a27936b31699d167684073ff1c4237dac", size = 676927, upload-time = "2025-11-17T22:31:48.182Z" }, + { url = "https://files.pythonhosted.org/packages/54/0f/428ef6881782e5ebb7eca459689448c0394fa0a80bea3aa9262cba5445ea/ml_dtypes-0.5.4-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a7f7c643e8b1320fd958bf098aa7ecf70623a42ec5154e3be3be673f4c34d900", size = 5028464, upload-time = "2025-11-17T22:31:50.135Z" }, + { url = "https://files.pythonhosted.org/packages/3a/cb/28ce52eb94390dda42599c98ea0204d74799e4d8047a0eb559b6fd648056/ml_dtypes-0.5.4-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9ad459e99793fa6e13bd5b7e6792c8f9190b4e5a1b45c63aba14a4d0a7f1d5ff", size = 5009002, upload-time = "2025-11-17T22:31:52.001Z" }, + { url = "https://files.pythonhosted.org/packages/f5/f0/0cfadd537c5470378b1b32bd859cf2824972174b51b873c9d95cfd7475a5/ml_dtypes-0.5.4-cp312-cp312-win_amd64.whl", hash = "sha256:c1a953995cccb9e25a4ae19e34316671e4e2edaebe4cf538229b1fc7109087b7", size = 212222, upload-time = "2025-11-17T22:31:53.742Z" }, + { url = "https://files.pythonhosted.org/packages/16/2e/9acc86985bfad8f2c2d30291b27cd2bb4c74cea08695bd540906ed744249/ml_dtypes-0.5.4-cp312-cp312-win_arm64.whl", hash = "sha256:9bad06436568442575beb2d03389aa7456c690a5b05892c471215bfd8cf39460", size = 160793, upload-time = "2025-11-17T22:31:55.358Z" }, + { url = "https://files.pythonhosted.org/packages/d9/a1/4008f14bbc616cfb1ac5b39ea485f9c63031c4634ab3f4cf72e7541f816a/ml_dtypes-0.5.4-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:8c760d85a2f82e2bed75867079188c9d18dae2ee77c25a54d60e9cc79be1bc48", size = 676888, upload-time = "2025-11-17T22:31:56.907Z" }, + { url = "https://files.pythonhosted.org/packages/d3/b7/dff378afc2b0d5a7d6cd9d3209b60474d9819d1189d347521e1688a60a53/ml_dtypes-0.5.4-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ce756d3a10d0c4067172804c9cc276ba9cc0ff47af9078ad439b075d1abdc29b", size = 5036993, upload-time = "2025-11-17T22:31:58.497Z" }, + { url = "https://files.pythonhosted.org/packages/eb/33/40cd74219417e78b97c47802037cf2d87b91973e18bb968a7da48a96ea44/ml_dtypes-0.5.4-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:533ce891ba774eabf607172254f2e7260ba5f57bdd64030c9a4fcfbd99815d0d", size = 5010956, upload-time = "2025-11-17T22:31:59.931Z" }, + { url = "https://files.pythonhosted.org/packages/e1/8b/200088c6859d8221454825959df35b5244fa9bdf263fd0249ac5fb75e281/ml_dtypes-0.5.4-cp313-cp313-win_amd64.whl", hash = "sha256:f21c9219ef48ca5ee78402d5cc831bd58ea27ce89beda894428bc67a52da5328", size = 212224, upload-time = "2025-11-17T22:32:01.349Z" }, + { url = "https://files.pythonhosted.org/packages/8f/75/dfc3775cb36367816e678f69a7843f6f03bd4e2bcd79941e01ea960a068e/ml_dtypes-0.5.4-cp313-cp313-win_arm64.whl", hash = "sha256:35f29491a3e478407f7047b8a4834e4640a77d2737e0b294d049746507af5175", size = 160798, upload-time = "2025-11-17T22:32:02.864Z" }, + { url = "https://files.pythonhosted.org/packages/4f/74/e9ddb35fd1dd43b1106c20ced3f53c2e8e7fc7598c15638e9f80677f81d4/ml_dtypes-0.5.4-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:304ad47faa395415b9ccbcc06a0350800bc50eda70f0e45326796e27c62f18b6", size = 702083, upload-time = "2025-11-17T22:32:04.08Z" }, + { url = "https://files.pythonhosted.org/packages/74/f5/667060b0aed1aa63166b22897fdf16dca9eb704e6b4bbf86848d5a181aa7/ml_dtypes-0.5.4-cp313-cp313t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6a0df4223b514d799b8a1629c65ddc351b3efa833ccf7f8ea0cf654a61d1e35d", size = 5354111, upload-time = "2025-11-17T22:32:05.546Z" }, + { url = "https://files.pythonhosted.org/packages/40/49/0f8c498a28c0efa5f5c95a9e374c83ec1385ca41d0e85e7cf40e5d519a21/ml_dtypes-0.5.4-cp313-cp313t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:531eff30e4d368cb6255bc2328d070e35836aa4f282a0fb5f3a0cd7260257298", size = 5366453, upload-time = "2025-11-17T22:32:07.115Z" }, + { url = "https://files.pythonhosted.org/packages/8c/27/12607423d0a9c6bbbcc780ad19f1f6baa2b68b18ce4bddcdc122c4c68dc9/ml_dtypes-0.5.4-cp313-cp313t-win_amd64.whl", hash = "sha256:cb73dccfc991691c444acc8c0012bee8f2470da826a92e3a20bb333b1a7894e6", size = 225612, upload-time = "2025-11-17T22:32:08.615Z" }, + { url = "https://files.pythonhosted.org/packages/e5/80/5a5929e92c72936d5b19872c5fb8fc09327c1da67b3b68c6a13139e77e20/ml_dtypes-0.5.4-cp313-cp313t-win_arm64.whl", hash = "sha256:3bbbe120b915090d9dd1375e4684dd17a20a2491ef25d640a908281da85e73f1", size = 164145, upload-time = "2025-11-17T22:32:09.782Z" }, +] + +[[package]] +name = "mpmath" +version = "1.3.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/e0/47/dd32fa426cc72114383ac549964eecb20ecfd886d1e5ccf5340b55b02f57/mpmath-1.3.0.tar.gz", hash = "sha256:7a28eb2a9774d00c7bc92411c19a89209d5da7c4c9a9e227be8330a23a25b91f", size = 508106, upload-time = "2023-03-07T16:47:11.061Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/43/e3/7d92a15f894aa0c9c4b49b8ee9ac9850d6e63b03c9c32c0367a13ae62209/mpmath-1.3.0-py3-none-any.whl", hash = "sha256:a0b2b9fe80bbcd81a6647ff13108738cfb482d481d826cc0e02f5b35e5c88d2c", size = 536198, upload-time = "2023-03-07T16:47:09.197Z" }, +] + +[[package]] +name = "multidict" +version = "6.7.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/80/1e/5492c365f222f907de1039b91f922b93fa4f764c713ee858d235495d8f50/multidict-6.7.0.tar.gz", hash = "sha256:c6e99d9a65ca282e578dfea819cfa9c0a62b2499d8677392e09feaf305e9e6f5", size = 101834, upload-time = "2025-10-06T14:52:30.657Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c2/9e/9f61ac18d9c8b475889f32ccfa91c9f59363480613fc807b6e3023d6f60b/multidict-6.7.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:8a3862568a36d26e650a19bb5cbbba14b71789032aebc0423f8cc5f150730184", size = 76877, upload-time = "2025-10-06T14:49:20.884Z" }, + { url = "https://files.pythonhosted.org/packages/38/6f/614f09a04e6184f8824268fce4bc925e9849edfa654ddd59f0b64508c595/multidict-6.7.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:960c60b5849b9b4f9dcc9bea6e3626143c252c74113df2c1540aebce70209b45", size = 45467, upload-time = "2025-10-06T14:49:22.054Z" }, + { url = "https://files.pythonhosted.org/packages/b3/93/c4f67a436dd026f2e780c433277fff72be79152894d9fc36f44569cab1a6/multidict-6.7.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2049be98fb57a31b4ccf870bf377af2504d4ae35646a19037ec271e4c07998aa", size = 43834, upload-time = "2025-10-06T14:49:23.566Z" }, + { url = "https://files.pythonhosted.org/packages/7f/f5/013798161ca665e4a422afbc5e2d9e4070142a9ff8905e482139cd09e4d0/multidict-6.7.0-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:0934f3843a1860dd465d38895c17fce1f1cb37295149ab05cd1b9a03afacb2a7", size = 250545, upload-time = "2025-10-06T14:49:24.882Z" }, + { url = "https://files.pythonhosted.org/packages/71/2f/91dbac13e0ba94669ea5119ba267c9a832f0cb65419aca75549fcf09a3dc/multidict-6.7.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b3e34f3a1b8131ba06f1a73adab24f30934d148afcd5f5de9a73565a4404384e", size = 258305, upload-time = "2025-10-06T14:49:26.778Z" }, + { url = "https://files.pythonhosted.org/packages/ef/b0/754038b26f6e04488b48ac621f779c341338d78503fb45403755af2df477/multidict-6.7.0-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:efbb54e98446892590dc2458c19c10344ee9a883a79b5cec4bc34d6656e8d546", size = 242363, upload-time = "2025-10-06T14:49:28.562Z" }, + { url = "https://files.pythonhosted.org/packages/87/15/9da40b9336a7c9fa606c4cf2ed80a649dffeb42b905d4f63a1d7eb17d746/multidict-6.7.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:a35c5fc61d4f51eb045061e7967cfe3123d622cd500e8868e7c0c592a09fedc4", size = 268375, upload-time = "2025-10-06T14:49:29.96Z" }, + { url = "https://files.pythonhosted.org/packages/82/72/c53fcade0cc94dfaad583105fd92b3a783af2091eddcb41a6d5a52474000/multidict-6.7.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:29fe6740ebccba4175af1b9b87bf553e9c15cd5868ee967e010efcf94e4fd0f1", size = 269346, upload-time = "2025-10-06T14:49:31.404Z" }, + { url = "https://files.pythonhosted.org/packages/0d/e2/9baffdae21a76f77ef8447f1a05a96ec4bc0a24dae08767abc0a2fe680b8/multidict-6.7.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:123e2a72e20537add2f33a79e605f6191fba2afda4cbb876e35c1a7074298a7d", size = 256107, upload-time = "2025-10-06T14:49:32.974Z" }, + { url = "https://files.pythonhosted.org/packages/3c/06/3f06f611087dc60d65ef775f1fb5aca7c6d61c6db4990e7cda0cef9b1651/multidict-6.7.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:b284e319754366c1aee2267a2036248b24eeb17ecd5dc16022095e747f2f4304", size = 253592, upload-time = "2025-10-06T14:49:34.52Z" }, + { url = "https://files.pythonhosted.org/packages/20/24/54e804ec7945b6023b340c412ce9c3f81e91b3bf5fa5ce65558740141bee/multidict-6.7.0-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:803d685de7be4303b5a657b76e2f6d1240e7e0a8aa2968ad5811fa2285553a12", size = 251024, upload-time = "2025-10-06T14:49:35.956Z" }, + { url = "https://files.pythonhosted.org/packages/14/48/011cba467ea0b17ceb938315d219391d3e421dfd35928e5dbdc3f4ae76ef/multidict-6.7.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:c04a328260dfd5db8c39538f999f02779012268f54614902d0afc775d44e0a62", size = 251484, upload-time = "2025-10-06T14:49:37.631Z" }, + { url = "https://files.pythonhosted.org/packages/0d/2f/919258b43bb35b99fa127435cfb2d91798eb3a943396631ef43e3720dcf4/multidict-6.7.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:8a19cdb57cd3df4cd865849d93ee14920fb97224300c88501f16ecfa2604b4e0", size = 263579, upload-time = "2025-10-06T14:49:39.502Z" }, + { url = "https://files.pythonhosted.org/packages/31/22/a0e884d86b5242b5a74cf08e876bdf299e413016b66e55511f7a804a366e/multidict-6.7.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:9b2fd74c52accced7e75de26023b7dccee62511a600e62311b918ec5c168fc2a", size = 259654, upload-time = "2025-10-06T14:49:41.32Z" }, + { url = "https://files.pythonhosted.org/packages/b2/e5/17e10e1b5c5f5a40f2fcbb45953c9b215f8a4098003915e46a93f5fcaa8f/multidict-6.7.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:3e8bfdd0e487acf992407a140d2589fe598238eaeffa3da8448d63a63cd363f8", size = 251511, upload-time = "2025-10-06T14:49:46.021Z" }, + { url = "https://files.pythonhosted.org/packages/e3/9a/201bb1e17e7af53139597069c375e7b0dcbd47594604f65c2d5359508566/multidict-6.7.0-cp312-cp312-win32.whl", hash = "sha256:dd32a49400a2c3d52088e120ee00c1e3576cbff7e10b98467962c74fdb762ed4", size = 41895, upload-time = "2025-10-06T14:49:48.718Z" }, + { url = "https://files.pythonhosted.org/packages/46/e2/348cd32faad84eaf1d20cce80e2bb0ef8d312c55bca1f7fa9865e7770aaf/multidict-6.7.0-cp312-cp312-win_amd64.whl", hash = "sha256:92abb658ef2d7ef22ac9f8bb88e8b6c3e571671534e029359b6d9e845923eb1b", size = 46073, upload-time = "2025-10-06T14:49:50.28Z" }, + { url = "https://files.pythonhosted.org/packages/25/ec/aad2613c1910dce907480e0c3aa306905830f25df2e54ccc9dea450cb5aa/multidict-6.7.0-cp312-cp312-win_arm64.whl", hash = "sha256:490dab541a6a642ce1a9d61a4781656b346a55c13038f0b1244653828e3a83ec", size = 43226, upload-time = "2025-10-06T14:49:52.304Z" }, + { url = "https://files.pythonhosted.org/packages/d2/86/33272a544eeb36d66e4d9a920602d1a2f57d4ebea4ef3cdfe5a912574c95/multidict-6.7.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:bee7c0588aa0076ce77c0ea5d19a68d76ad81fcd9fe8501003b9a24f9d4000f6", size = 76135, upload-time = "2025-10-06T14:49:54.26Z" }, + { url = "https://files.pythonhosted.org/packages/91/1c/eb97db117a1ebe46d457a3d235a7b9d2e6dcab174f42d1b67663dd9e5371/multidict-6.7.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:7ef6b61cad77091056ce0e7ce69814ef72afacb150b7ac6a3e9470def2198159", size = 45117, upload-time = "2025-10-06T14:49:55.82Z" }, + { url = "https://files.pythonhosted.org/packages/f1/d8/6c3442322e41fb1dd4de8bd67bfd11cd72352ac131f6368315617de752f1/multidict-6.7.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:9c0359b1ec12b1d6849c59f9d319610b7f20ef990a6d454ab151aa0e3b9f78ca", size = 43472, upload-time = "2025-10-06T14:49:57.048Z" }, + { url = "https://files.pythonhosted.org/packages/75/3f/e2639e80325af0b6c6febdf8e57cc07043ff15f57fa1ef808f4ccb5ac4cd/multidict-6.7.0-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:cd240939f71c64bd658f186330603aac1a9a81bf6273f523fca63673cb7378a8", size = 249342, upload-time = "2025-10-06T14:49:58.368Z" }, + { url = "https://files.pythonhosted.org/packages/5d/cc/84e0585f805cbeaa9cbdaa95f9a3d6aed745b9d25700623ac89a6ecff400/multidict-6.7.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a60a4d75718a5efa473ebd5ab685786ba0c67b8381f781d1be14da49f1a2dc60", size = 257082, upload-time = "2025-10-06T14:49:59.89Z" }, + { url = "https://files.pythonhosted.org/packages/b0/9c/ac851c107c92289acbbf5cfb485694084690c1b17e555f44952c26ddc5bd/multidict-6.7.0-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:53a42d364f323275126aff81fb67c5ca1b7a04fda0546245730a55c8c5f24bc4", size = 240704, upload-time = "2025-10-06T14:50:01.485Z" }, + { url = "https://files.pythonhosted.org/packages/50/cc/5f93e99427248c09da95b62d64b25748a5f5c98c7c2ab09825a1d6af0e15/multidict-6.7.0-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:3b29b980d0ddbecb736735ee5bef69bb2ddca56eff603c86f3f29a1128299b4f", size = 266355, upload-time = "2025-10-06T14:50:02.955Z" }, + { url = "https://files.pythonhosted.org/packages/ec/0c/2ec1d883ceb79c6f7f6d7ad90c919c898f5d1c6ea96d322751420211e072/multidict-6.7.0-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:f8a93b1c0ed2d04b97a5e9336fd2d33371b9a6e29ab7dd6503d63407c20ffbaf", size = 267259, upload-time = "2025-10-06T14:50:04.446Z" }, + { url = "https://files.pythonhosted.org/packages/c6/2d/f0b184fa88d6630aa267680bdb8623fb69cb0d024b8c6f0d23f9a0f406d3/multidict-6.7.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9ff96e8815eecacc6645da76c413eb3b3d34cfca256c70b16b286a687d013c32", size = 254903, upload-time = "2025-10-06T14:50:05.98Z" }, + { url = "https://files.pythonhosted.org/packages/06/c9/11ea263ad0df7dfabcad404feb3c0dd40b131bc7f232d5537f2fb1356951/multidict-6.7.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:7516c579652f6a6be0e266aec0acd0db80829ca305c3d771ed898538804c2036", size = 252365, upload-time = "2025-10-06T14:50:07.511Z" }, + { url = "https://files.pythonhosted.org/packages/41/88/d714b86ee2c17d6e09850c70c9d310abac3d808ab49dfa16b43aba9d53fd/multidict-6.7.0-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:040f393368e63fb0f3330e70c26bfd336656bed925e5cbe17c9da839a6ab13ec", size = 250062, upload-time = "2025-10-06T14:50:09.074Z" }, + { url = "https://files.pythonhosted.org/packages/15/fe/ad407bb9e818c2b31383f6131ca19ea7e35ce93cf1310fce69f12e89de75/multidict-6.7.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:b3bc26a951007b1057a1c543af845f1c7e3e71cc240ed1ace7bf4484aa99196e", size = 249683, upload-time = "2025-10-06T14:50:10.714Z" }, + { url = "https://files.pythonhosted.org/packages/8c/a4/a89abdb0229e533fb925e7c6e5c40201c2873efebc9abaf14046a4536ee6/multidict-6.7.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:7b022717c748dd1992a83e219587aabe45980d88969f01b316e78683e6285f64", size = 261254, upload-time = "2025-10-06T14:50:12.28Z" }, + { url = "https://files.pythonhosted.org/packages/8d/aa/0e2b27bd88b40a4fb8dc53dd74eecac70edaa4c1dd0707eb2164da3675b3/multidict-6.7.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:9600082733859f00d79dee64effc7aef1beb26adb297416a4ad2116fd61374bd", size = 257967, upload-time = "2025-10-06T14:50:14.16Z" }, + { url = "https://files.pythonhosted.org/packages/d0/8e/0c67b7120d5d5f6d874ed85a085f9dc770a7f9d8813e80f44a9fec820bb7/multidict-6.7.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:94218fcec4d72bc61df51c198d098ce2b378e0ccbac41ddbed5ef44092913288", size = 250085, upload-time = "2025-10-06T14:50:15.639Z" }, + { url = "https://files.pythonhosted.org/packages/ba/55/b73e1d624ea4b8fd4dd07a3bb70f6e4c7c6c5d9d640a41c6ffe5cdbd2a55/multidict-6.7.0-cp313-cp313-win32.whl", hash = "sha256:a37bd74c3fa9d00be2d7b8eca074dc56bd8077ddd2917a839bd989612671ed17", size = 41713, upload-time = "2025-10-06T14:50:17.066Z" }, + { url = "https://files.pythonhosted.org/packages/32/31/75c59e7d3b4205075b4c183fa4ca398a2daf2303ddf616b04ae6ef55cffe/multidict-6.7.0-cp313-cp313-win_amd64.whl", hash = "sha256:30d193c6cc6d559db42b6bcec8a5d395d34d60c9877a0b71ecd7c204fcf15390", size = 45915, upload-time = "2025-10-06T14:50:18.264Z" }, + { url = "https://files.pythonhosted.org/packages/31/2a/8987831e811f1184c22bc2e45844934385363ee61c0a2dcfa8f71b87e608/multidict-6.7.0-cp313-cp313-win_arm64.whl", hash = "sha256:ea3334cabe4d41b7ccd01e4d349828678794edbc2d3ae97fc162a3312095092e", size = 43077, upload-time = "2025-10-06T14:50:19.853Z" }, + { url = "https://files.pythonhosted.org/packages/e8/68/7b3a5170a382a340147337b300b9eb25a9ddb573bcdfff19c0fa3f31ffba/multidict-6.7.0-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:ad9ce259f50abd98a1ca0aa6e490b58c316a0fce0617f609723e40804add2c00", size = 83114, upload-time = "2025-10-06T14:50:21.223Z" }, + { url = "https://files.pythonhosted.org/packages/55/5c/3fa2d07c84df4e302060f555bbf539310980362236ad49f50eeb0a1c1eb9/multidict-6.7.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:07f5594ac6d084cbb5de2df218d78baf55ef150b91f0ff8a21cc7a2e3a5a58eb", size = 48442, upload-time = "2025-10-06T14:50:22.871Z" }, + { url = "https://files.pythonhosted.org/packages/fc/56/67212d33239797f9bd91962bb899d72bb0f4c35a8652dcdb8ed049bef878/multidict-6.7.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:0591b48acf279821a579282444814a2d8d0af624ae0bc600aa4d1b920b6e924b", size = 46885, upload-time = "2025-10-06T14:50:24.258Z" }, + { url = "https://files.pythonhosted.org/packages/46/d1/908f896224290350721597a61a69cd19b89ad8ee0ae1f38b3f5cd12ea2ac/multidict-6.7.0-cp313-cp313t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:749a72584761531d2b9467cfbdfd29487ee21124c304c4b6cb760d8777b27f9c", size = 242588, upload-time = "2025-10-06T14:50:25.716Z" }, + { url = "https://files.pythonhosted.org/packages/ab/67/8604288bbd68680eee0ab568fdcb56171d8b23a01bcd5cb0c8fedf6e5d99/multidict-6.7.0-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6b4c3d199f953acd5b446bf7c0de1fe25d94e09e79086f8dc2f48a11a129cdf1", size = 249966, upload-time = "2025-10-06T14:50:28.192Z" }, + { url = "https://files.pythonhosted.org/packages/20/33/9228d76339f1ba51e3efef7da3ebd91964d3006217aae13211653193c3ff/multidict-6.7.0-cp313-cp313t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:9fb0211dfc3b51efea2f349ec92c114d7754dd62c01f81c3e32b765b70c45c9b", size = 228618, upload-time = "2025-10-06T14:50:29.82Z" }, + { url = "https://files.pythonhosted.org/packages/f8/2d/25d9b566d10cab1c42b3b9e5b11ef79c9111eaf4463b8c257a3bd89e0ead/multidict-6.7.0-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:a027ec240fe73a8d6281872690b988eed307cd7d91b23998ff35ff577ca688b5", size = 257539, upload-time = "2025-10-06T14:50:31.731Z" }, + { url = "https://files.pythonhosted.org/packages/b6/b1/8d1a965e6637fc33de3c0d8f414485c2b7e4af00f42cab3d84e7b955c222/multidict-6.7.0-cp313-cp313t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:d1d964afecdf3a8288789df2f5751dc0a8261138c3768d9af117ed384e538fad", size = 256345, upload-time = "2025-10-06T14:50:33.26Z" }, + { url = "https://files.pythonhosted.org/packages/ba/0c/06b5a8adbdeedada6f4fb8d8f193d44a347223b11939b42953eeb6530b6b/multidict-6.7.0-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:caf53b15b1b7df9fbd0709aa01409000a2b4dd03a5f6f5cc548183c7c8f8b63c", size = 247934, upload-time = "2025-10-06T14:50:34.808Z" }, + { url = "https://files.pythonhosted.org/packages/8f/31/b2491b5fe167ca044c6eb4b8f2c9f3b8a00b24c432c365358eadac5d7625/multidict-6.7.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:654030da3197d927f05a536a66186070e98765aa5142794c9904555d3a9d8fb5", size = 245243, upload-time = "2025-10-06T14:50:36.436Z" }, + { url = "https://files.pythonhosted.org/packages/61/1a/982913957cb90406c8c94f53001abd9eafc271cb3e70ff6371590bec478e/multidict-6.7.0-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:2090d3718829d1e484706a2f525e50c892237b2bf9b17a79b059cb98cddc2f10", size = 235878, upload-time = "2025-10-06T14:50:37.953Z" }, + { url = "https://files.pythonhosted.org/packages/be/c0/21435d804c1a1cf7a2608593f4d19bca5bcbd7a81a70b253fdd1c12af9c0/multidict-6.7.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:2d2cfeec3f6f45651b3d408c4acec0ebf3daa9bc8a112a084206f5db5d05b754", size = 243452, upload-time = "2025-10-06T14:50:39.574Z" }, + { url = "https://files.pythonhosted.org/packages/54/0a/4349d540d4a883863191be6eb9a928846d4ec0ea007d3dcd36323bb058ac/multidict-6.7.0-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:4ef089f985b8c194d341eb2c24ae6e7408c9a0e2e5658699c92f497437d88c3c", size = 252312, upload-time = "2025-10-06T14:50:41.612Z" }, + { url = "https://files.pythonhosted.org/packages/26/64/d5416038dbda1488daf16b676e4dbfd9674dde10a0cc8f4fc2b502d8125d/multidict-6.7.0-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:e93a0617cd16998784bf4414c7e40f17a35d2350e5c6f0bd900d3a8e02bd3762", size = 246935, upload-time = "2025-10-06T14:50:43.972Z" }, + { url = "https://files.pythonhosted.org/packages/9f/8c/8290c50d14e49f35e0bd4abc25e1bc7711149ca9588ab7d04f886cdf03d9/multidict-6.7.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:f0feece2ef8ebc42ed9e2e8c78fc4aa3cf455733b507c09ef7406364c94376c6", size = 243385, upload-time = "2025-10-06T14:50:45.648Z" }, + { url = "https://files.pythonhosted.org/packages/ef/a0/f83ae75e42d694b3fbad3e047670e511c138be747bc713cf1b10d5096416/multidict-6.7.0-cp313-cp313t-win32.whl", hash = "sha256:19a1d55338ec1be74ef62440ca9e04a2f001a04d0cc49a4983dc320ff0f3212d", size = 47777, upload-time = "2025-10-06T14:50:47.154Z" }, + { url = "https://files.pythonhosted.org/packages/dc/80/9b174a92814a3830b7357307a792300f42c9e94664b01dee8e457551fa66/multidict-6.7.0-cp313-cp313t-win_amd64.whl", hash = "sha256:3da4fb467498df97e986af166b12d01f05d2e04f978a9c1c680ea1988e0bc4b6", size = 53104, upload-time = "2025-10-06T14:50:48.851Z" }, + { url = "https://files.pythonhosted.org/packages/cc/28/04baeaf0428d95bb7a7bea0e691ba2f31394338ba424fb0679a9ed0f4c09/multidict-6.7.0-cp313-cp313t-win_arm64.whl", hash = "sha256:b4121773c49a0776461f4a904cdf6264c88e42218aaa8407e803ca8025872792", size = 45503, upload-time = "2025-10-06T14:50:50.16Z" }, + { url = "https://files.pythonhosted.org/packages/b7/da/7d22601b625e241d4f23ef1ebff8acfc60da633c9e7e7922e24d10f592b3/multidict-6.7.0-py3-none-any.whl", hash = "sha256:394fc5c42a333c9ffc3e421a4c85e08580d990e08b99f6bf35b4132114c5dcb3", size = 12317, upload-time = "2025-10-06T14:52:29.272Z" }, +] + +[[package]] +name = "multipledispatch" +version = "1.0.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/fe/3e/a62c3b824c7dec33c4a1578bcc842e6c30300051033a4e5975ed86cc2536/multipledispatch-1.0.0.tar.gz", hash = "sha256:5c839915465c68206c3e9c473357908216c28383b425361e5d144594bf85a7e0", size = 12385, upload-time = "2023-06-27T16:45:11.074Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/51/c0/00c9809d8b9346eb238a6bbd5f83e846a4ce4503da94a4c08cb7284c325b/multipledispatch-1.0.0-py3-none-any.whl", hash = "sha256:0c53cd8b077546da4e48869f49b13164bebafd0c2a5afceb6bb6a316e7fb46e4", size = 12818, upload-time = "2023-06-27T16:45:09.418Z" }, +] + +[[package]] +name = "mypy" +version = "1.19.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "librt" }, + { name = "mypy-extensions" }, + { name = "pathspec" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/f9/b5/b58cdc25fadd424552804bf410855d52324183112aa004f0732c5f6324cf/mypy-1.19.0.tar.gz", hash = "sha256:f6b874ca77f733222641e5c46e4711648c4037ea13646fd0cdc814c2eaec2528", size = 3579025, upload-time = "2025-11-28T15:49:01.26Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/11/7e/1afa8fb188b876abeaa14460dc4983f909aaacaa4bf5718c00b2c7e0b3d5/mypy-1.19.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:0fb3115cb8fa7c5f887c8a8d81ccdcb94cff334684980d847e5a62e926910e1d", size = 13207728, upload-time = "2025-11-28T15:46:26.463Z" }, + { url = "https://files.pythonhosted.org/packages/b2/13/f103d04962bcbefb1644f5ccb235998b32c337d6c13145ea390b9da47f3e/mypy-1.19.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f3e19e3b897562276bb331074d64c076dbdd3e79213f36eed4e592272dabd760", size = 12202945, upload-time = "2025-11-28T15:48:49.143Z" }, + { url = "https://files.pythonhosted.org/packages/e4/93/a86a5608f74a22284a8ccea8592f6e270b61f95b8588951110ad797c2ddd/mypy-1.19.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b9d491295825182fba01b6ffe2c6fe4e5a49dbf4e2bb4d1217b6ced3b4797bc6", size = 12718673, upload-time = "2025-11-28T15:47:37.193Z" }, + { url = "https://files.pythonhosted.org/packages/3d/58/cf08fff9ced0423b858f2a7495001fda28dc058136818ee9dffc31534ea9/mypy-1.19.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6016c52ab209919b46169651b362068f632efcd5eb8ef9d1735f6f86da7853b2", size = 13608336, upload-time = "2025-11-28T15:48:32.625Z" }, + { url = "https://files.pythonhosted.org/packages/64/ed/9c509105c5a6d4b73bb08733102a3ea62c25bc02c51bca85e3134bf912d3/mypy-1.19.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:f188dcf16483b3e59f9278c4ed939ec0254aa8a60e8fc100648d9ab5ee95a431", size = 13833174, upload-time = "2025-11-28T15:45:48.091Z" }, + { url = "https://files.pythonhosted.org/packages/cd/71/01939b66e35c6f8cb3e6fdf0b657f0fd24de2f8ba5e523625c8e72328208/mypy-1.19.0-cp312-cp312-win_amd64.whl", hash = "sha256:0e3c3d1e1d62e678c339e7ade72746a9e0325de42cd2cccc51616c7b2ed1a018", size = 10112208, upload-time = "2025-11-28T15:46:41.702Z" }, + { url = "https://files.pythonhosted.org/packages/cb/0d/a1357e6bb49e37ce26fcf7e3cc55679ce9f4ebee0cd8b6ee3a0e301a9210/mypy-1.19.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:7686ed65dbabd24d20066f3115018d2dce030d8fa9db01aa9f0a59b6813e9f9e", size = 13191993, upload-time = "2025-11-28T15:47:22.336Z" }, + { url = "https://files.pythonhosted.org/packages/5d/75/8e5d492a879ec4490e6ba664b5154e48c46c85b5ac9785792a5ec6a4d58f/mypy-1.19.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:fd4a985b2e32f23bead72e2fb4bbe5d6aceee176be471243bd831d5b2644672d", size = 12174411, upload-time = "2025-11-28T15:44:55.492Z" }, + { url = "https://files.pythonhosted.org/packages/71/31/ad5dcee9bfe226e8eaba777e9d9d251c292650130f0450a280aec3485370/mypy-1.19.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:fc51a5b864f73a3a182584b1ac75c404396a17eced54341629d8bdcb644a5bba", size = 12727751, upload-time = "2025-11-28T15:44:14.169Z" }, + { url = "https://files.pythonhosted.org/packages/77/06/b6b8994ce07405f6039701f4b66e9d23f499d0b41c6dd46ec28f96d57ec3/mypy-1.19.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:37af5166f9475872034b56c5efdcf65ee25394e9e1d172907b84577120714364", size = 13593323, upload-time = "2025-11-28T15:46:34.699Z" }, + { url = "https://files.pythonhosted.org/packages/68/b1/126e274484cccdf099a8e328d4fda1c7bdb98a5e888fa6010b00e1bbf330/mypy-1.19.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:510c014b722308c9bd377993bcbf9a07d7e0692e5fa8fc70e639c1eb19fc6bee", size = 13818032, upload-time = "2025-11-28T15:46:18.286Z" }, + { url = "https://files.pythonhosted.org/packages/f8/56/53a8f70f562dfc466c766469133a8a4909f6c0012d83993143f2a9d48d2d/mypy-1.19.0-cp313-cp313-win_amd64.whl", hash = "sha256:cabbee74f29aa9cd3b444ec2f1e4fa5a9d0d746ce7567a6a609e224429781f53", size = 10120644, upload-time = "2025-11-28T15:47:43.99Z" }, + { url = "https://files.pythonhosted.org/packages/09/0e/fe228ed5aeab470c6f4eb82481837fadb642a5aa95cc8215fd2214822c10/mypy-1.19.0-py3-none-any.whl", hash = "sha256:0c01c99d626380752e527d5ce8e69ffbba2046eb8a060db0329690849cf9b6f9", size = 2469714, upload-time = "2025-11-28T15:45:33.22Z" }, +] + +[[package]] +name = "mypy-extensions" +version = "1.1.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a2/6e/371856a3fb9d31ca8dac321cda606860fa4548858c0cc45d9d1d4ca2628b/mypy_extensions-1.1.0.tar.gz", hash = "sha256:52e68efc3284861e772bbcd66823fde5ae21fd2fdb51c62a211403730b916558", size = 6343, upload-time = "2025-04-22T14:54:24.164Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/79/7b/2c79738432f5c924bef5071f933bcc9efd0473bac3b4aa584a6f7c1c8df8/mypy_extensions-1.1.0-py3-none-any.whl", hash = "sha256:1be4cccdb0f2482337c4743e60421de3a356cd97508abadd57d47403e94f5505", size = 4963, upload-time = "2025-04-22T14:54:22.983Z" }, +] + +[[package]] +name = "myst-parser" +version = "4.0.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "docutils" }, + { name = "jinja2" }, + { name = "markdown-it-py" }, + { name = "mdit-py-plugins" }, + { name = "pyyaml" }, + { name = "sphinx" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/66/a5/9626ba4f73555b3735ad86247a8077d4603aa8628537687c839ab08bfe44/myst_parser-4.0.1.tar.gz", hash = "sha256:5cfea715e4f3574138aecbf7d54132296bfd72bb614d31168f48c477a830a7c4", size = 93985, upload-time = "2025-02-12T10:53:03.833Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/5f/df/76d0321c3797b54b60fef9ec3bd6f4cfd124b9e422182156a1dd418722cf/myst_parser-4.0.1-py3-none-any.whl", hash = "sha256:9134e88959ec3b5780aedf8a99680ea242869d012e8821db3126d427edc9c95d", size = 84579, upload-time = "2025-02-12T10:53:02.078Z" }, +] + +[[package]] +name = "nbclient" +version = "0.10.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "jupyter-client" }, + { name = "jupyter-core" }, + { name = "nbformat" }, + { name = "traitlets" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/87/66/7ffd18d58eae90d5721f9f39212327695b749e23ad44b3881744eaf4d9e8/nbclient-0.10.2.tar.gz", hash = "sha256:90b7fc6b810630db87a6d0c2250b1f0ab4cf4d3c27a299b0cde78a4ed3fd9193", size = 62424, upload-time = "2024-12-19T10:32:27.164Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/34/6d/e7fa07f03a4a7b221d94b4d586edb754a9b0dc3c9e2c93353e9fa4e0d117/nbclient-0.10.2-py3-none-any.whl", hash = "sha256:4ffee11e788b4a27fabeb7955547e4318a5298f34342a4bfd01f2e1faaeadc3d", size = 25434, upload-time = "2024-12-19T10:32:24.139Z" }, +] + +[[package]] +name = "nbconvert" +version = "7.16.6" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "beautifulsoup4" }, + { name = "bleach", extra = ["css"] }, + { name = "defusedxml" }, + { name = "jinja2" }, + { name = "jupyter-core" }, + { name = "jupyterlab-pygments" }, + { name = "markupsafe" }, + { name = "mistune" }, + { name = "nbclient" }, + { name = "nbformat" }, + { name = "packaging" }, + { name = "pandocfilters" }, + { name = "pygments" }, + { name = "traitlets" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/a3/59/f28e15fc47ffb73af68a8d9b47367a8630d76e97ae85ad18271b9db96fdf/nbconvert-7.16.6.tar.gz", hash = "sha256:576a7e37c6480da7b8465eefa66c17844243816ce1ccc372633c6b71c3c0f582", size = 857715, upload-time = "2025-01-28T09:29:14.724Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/cc/9a/cd673b2f773a12c992f41309ef81b99da1690426bd2f96957a7ade0d3ed7/nbconvert-7.16.6-py3-none-any.whl", hash = "sha256:1375a7b67e0c2883678c48e506dc320febb57685e5ee67faa51b18a90f3a712b", size = 258525, upload-time = "2025-01-28T09:29:12.551Z" }, +] + +[[package]] +name = "nbformat" +version = "5.10.4" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "fastjsonschema" }, + { name = "jsonschema" }, + { name = "jupyter-core" }, + { name = "traitlets" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/6d/fd/91545e604bc3dad7dca9ed03284086039b294c6b3d75c0d2fa45f9e9caf3/nbformat-5.10.4.tar.gz", hash = "sha256:322168b14f937a5d11362988ecac2a4952d3d8e3a2cbeb2319584631226d5b3a", size = 142749, upload-time = "2024-04-04T11:20:37.371Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a9/82/0340caa499416c78e5d8f5f05947ae4bc3cba53c9f038ab6e9ed964e22f1/nbformat-5.10.4-py3-none-any.whl", hash = "sha256:3b48d6c8fbca4b299bf3982ea7db1af21580e4fec269ad087b9e81588891200b", size = 78454, upload-time = "2024-04-04T11:20:34.895Z" }, +] + +[[package]] +name = "nbqa" +version = "1.9.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "autopep8" }, + { name = "ipython" }, + { name = "tokenize-rt" }, + { name = "tomli" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/aa/76/62d2609924cf34445148cd6b5de694cf64c179cc416cac93182579620e57/nbqa-1.9.1.tar.gz", hash = "sha256:a1f4bcf587c597302fed295951001fc4e1be4ce0e77e1ab1b25ac2fbe3db0cdd", size = 38348, upload-time = "2024-11-10T12:21:58.333Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/28/88/4789719fbbe166d12d345b3ac66b96105f10001b16e00a9765ba29261a21/nbqa-1.9.1-py3-none-any.whl", hash = "sha256:95552d2f6c2c038136252a805aa78d85018aef922586270c3a074332737282e5", size = 35259, upload-time = "2024-11-10T12:21:56.731Z" }, +] + +[[package]] +name = "nbsphinx" +version = "0.9.8" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "docutils" }, + { name = "jinja2" }, + { name = "nbconvert" }, + { name = "nbformat" }, + { name = "sphinx" }, + { name = "traitlets" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/e7/d1/82081750f8a78ad0399c6ed831d42623b891904e8e7b8a75878225cf1dce/nbsphinx-0.9.8.tar.gz", hash = "sha256:d0765908399a8ee2b57be7ae881cf2ea58d66db3af7bbf33e6eb48f83bea5495", size = 417469, upload-time = "2025-11-28T17:41:02.336Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/03/78/843bcf0cf31f88d2f8a9a063d2d80817b1901657d83d65b89b3aa835732e/nbsphinx-0.9.8-py3-none-any.whl", hash = "sha256:92d95ee91784e56bc633b60b767a6b6f23a0445f891e24641ce3c3f004759ccf", size = 31961, upload-time = "2025-11-28T17:41:00.796Z" }, +] + +[[package]] +name = "nbval" +version = "0.11.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "coverage" }, + { name = "ipykernel" }, + { name = "jupyter-client" }, + { name = "nbformat" }, + { name = "pytest" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/28/be/22bd64d09e0cb53258f83b6fc455f05f18a78e3e5c109ccb6af42f1f49a2/nbval-0.11.0.tar.gz", hash = "sha256:77c95797607b0a968babd2597ee3494102d25c3ad37435debbdac0e46e379094", size = 62718, upload-time = "2024-03-04T14:36:58.256Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/2c/5c/eb1e3ce54c4e94c7734b3831756c63f21badb3de91a98d77b9e23c0ca76a/nbval-0.11.0-py2.py3-none-any.whl", hash = "sha256:307aecc866c9a1e8a13bb5bbb008a702bacfda2394dff6fe504a3108a58042a0", size = 24013, upload-time = "2024-03-04T14:36:57.126Z" }, +] + +[[package]] +name = "nest-asyncio" +version = "1.6.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/83/f8/51569ac65d696c8ecbee95938f89d4abf00f47d58d48f6fbabfe8f0baefe/nest_asyncio-1.6.0.tar.gz", hash = "sha256:6f172d5449aca15afd6c646851f4e31e02c598d553a667e38cafa997cfec55fe", size = 7418, upload-time = "2024-01-21T14:25:19.227Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a0/c4/c2971a3ba4c6103a3d10c4b0f24f461ddc027f0f09763220cf35ca1401b3/nest_asyncio-1.6.0-py3-none-any.whl", hash = "sha256:87af6efd6b5e897c81050477ef65c62e2b2f35d51703cae01aff2905b1852e1c", size = 5195, upload-time = "2024-01-21T14:25:17.223Z" }, +] + +[[package]] +name = "networkx" +version = "3.6" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/e8/fc/7b6fd4d22c8c4dc5704430140d8b3f520531d4fe7328b8f8d03f5a7950e8/networkx-3.6.tar.gz", hash = "sha256:285276002ad1f7f7da0f7b42f004bcba70d381e936559166363707fdad3d72ad", size = 2511464, upload-time = "2025-11-24T03:03:47.158Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/07/c7/d64168da60332c17d24c0d2f08bdf3987e8d1ae9d84b5bbd0eec2eb26a55/networkx-3.6-py3-none-any.whl", hash = "sha256:cdb395b105806062473d3be36458d8f1459a4e4b98e236a66c3a48996e07684f", size = 2063713, upload-time = "2025-11-24T03:03:45.21Z" }, +] + +[[package]] +name = "numpy" +version = "2.3.5" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/76/65/21b3bc86aac7b8f2862db1e808f1ea22b028e30a225a34a5ede9bf8678f2/numpy-2.3.5.tar.gz", hash = "sha256:784db1dcdab56bf0517743e746dfb0f885fc68d948aba86eeec2cba234bdf1c0", size = 20584950, upload-time = "2025-11-16T22:52:42.067Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/44/37/e669fe6cbb2b96c62f6bbedc6a81c0f3b7362f6a59230b23caa673a85721/numpy-2.3.5-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:74ae7b798248fe62021dbf3c914245ad45d1a6b0cb4a29ecb4b31d0bfbc4cc3e", size = 16733873, upload-time = "2025-11-16T22:49:49.84Z" }, + { url = "https://files.pythonhosted.org/packages/c5/65/df0db6c097892c9380851ab9e44b52d4f7ba576b833996e0080181c0c439/numpy-2.3.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ee3888d9ff7c14604052b2ca5535a30216aa0a58e948cdd3eeb8d3415f638769", size = 12259838, upload-time = "2025-11-16T22:49:52.863Z" }, + { url = "https://files.pythonhosted.org/packages/5b/e1/1ee06e70eb2136797abe847d386e7c0e830b67ad1d43f364dd04fa50d338/numpy-2.3.5-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:612a95a17655e213502f60cfb9bf9408efdc9eb1d5f50535cc6eb365d11b42b5", size = 5088378, upload-time = "2025-11-16T22:49:55.055Z" }, + { url = "https://files.pythonhosted.org/packages/6d/9c/1ca85fb86708724275103b81ec4cf1ac1d08f465368acfc8da7ab545bdae/numpy-2.3.5-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:3101e5177d114a593d79dd79658650fe28b5a0d8abeb8ce6f437c0e6df5be1a4", size = 6628559, upload-time = "2025-11-16T22:49:57.371Z" }, + { url = "https://files.pythonhosted.org/packages/74/78/fcd41e5a0ce4f3f7b003da85825acddae6d7ecb60cf25194741b036ca7d6/numpy-2.3.5-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8b973c57ff8e184109db042c842423ff4f60446239bd585a5131cc47f06f789d", size = 14250702, upload-time = "2025-11-16T22:49:59.632Z" }, + { url = "https://files.pythonhosted.org/packages/b6/23/2a1b231b8ff672b4c450dac27164a8b2ca7d9b7144f9c02d2396518352eb/numpy-2.3.5-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0d8163f43acde9a73c2a33605353a4f1bc4798745a8b1d73183b28e5b435ae28", size = 16606086, upload-time = "2025-11-16T22:50:02.127Z" }, + { url = "https://files.pythonhosted.org/packages/a0/c5/5ad26fbfbe2012e190cc7d5003e4d874b88bb18861d0829edc140a713021/numpy-2.3.5-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:51c1e14eb1e154ebd80e860722f9e6ed6ec89714ad2db2d3aa33c31d7c12179b", size = 16025985, upload-time = "2025-11-16T22:50:04.536Z" }, + { url = "https://files.pythonhosted.org/packages/d2/fa/dd48e225c46c819288148d9d060b047fd2a6fb1eb37eae25112ee4cb4453/numpy-2.3.5-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b46b4ec24f7293f23adcd2d146960559aaf8020213de8ad1909dba6c013bf89c", size = 18542976, upload-time = "2025-11-16T22:50:07.557Z" }, + { url = "https://files.pythonhosted.org/packages/05/79/ccbd23a75862d95af03d28b5c6901a1b7da4803181513d52f3b86ed9446e/numpy-2.3.5-cp312-cp312-win32.whl", hash = "sha256:3997b5b3c9a771e157f9aae01dd579ee35ad7109be18db0e85dbdbe1de06e952", size = 6285274, upload-time = "2025-11-16T22:50:10.746Z" }, + { url = "https://files.pythonhosted.org/packages/2d/57/8aeaf160312f7f489dea47ab61e430b5cb051f59a98ae68b7133ce8fa06a/numpy-2.3.5-cp312-cp312-win_amd64.whl", hash = "sha256:86945f2ee6d10cdfd67bcb4069c1662dd711f7e2a4343db5cecec06b87cf31aa", size = 12782922, upload-time = "2025-11-16T22:50:12.811Z" }, + { url = "https://files.pythonhosted.org/packages/78/a6/aae5cc2ca78c45e64b9ef22f089141d661516856cf7c8a54ba434576900d/numpy-2.3.5-cp312-cp312-win_arm64.whl", hash = "sha256:f28620fe26bee16243be2b7b874da327312240a7cdc38b769a697578d2100013", size = 10194667, upload-time = "2025-11-16T22:50:16.16Z" }, + { url = "https://files.pythonhosted.org/packages/db/69/9cde09f36da4b5a505341180a3f2e6fadc352fd4d2b7096ce9778db83f1a/numpy-2.3.5-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:d0f23b44f57077c1ede8c5f26b30f706498b4862d3ff0a7298b8411dd2f043ff", size = 16728251, upload-time = "2025-11-16T22:50:19.013Z" }, + { url = "https://files.pythonhosted.org/packages/79/fb/f505c95ceddd7027347b067689db71ca80bd5ecc926f913f1a23e65cf09b/numpy-2.3.5-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:aa5bc7c5d59d831d9773d1170acac7893ce3a5e130540605770ade83280e7188", size = 12254652, upload-time = "2025-11-16T22:50:21.487Z" }, + { url = "https://files.pythonhosted.org/packages/78/da/8c7738060ca9c31b30e9301ee0cf6c5ffdbf889d9593285a1cead337f9a5/numpy-2.3.5-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:ccc933afd4d20aad3c00bcef049cb40049f7f196e0397f1109dba6fed63267b0", size = 5083172, upload-time = "2025-11-16T22:50:24.562Z" }, + { url = "https://files.pythonhosted.org/packages/a4/b4/ee5bb2537fb9430fd2ef30a616c3672b991a4129bb1c7dcc42aa0abbe5d7/numpy-2.3.5-cp313-cp313-macosx_14_0_x86_64.whl", hash = "sha256:afaffc4393205524af9dfa400fa250143a6c3bc646c08c9f5e25a9f4b4d6a903", size = 6622990, upload-time = "2025-11-16T22:50:26.47Z" }, + { url = "https://files.pythonhosted.org/packages/95/03/dc0723a013c7d7c19de5ef29e932c3081df1c14ba582b8b86b5de9db7f0f/numpy-2.3.5-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9c75442b2209b8470d6d5d8b1c25714270686f14c749028d2199c54e29f20b4d", size = 14248902, upload-time = "2025-11-16T22:50:28.861Z" }, + { url = "https://files.pythonhosted.org/packages/f5/10/ca162f45a102738958dcec8023062dad0cbc17d1ab99d68c4e4a6c45fb2b/numpy-2.3.5-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:11e06aa0af8c0f05104d56450d6093ee639e15f24ecf62d417329d06e522e017", size = 16597430, upload-time = "2025-11-16T22:50:31.56Z" }, + { url = "https://files.pythonhosted.org/packages/2a/51/c1e29be863588db58175175f057286900b4b3327a1351e706d5e0f8dd679/numpy-2.3.5-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ed89927b86296067b4f81f108a2271d8926467a8868e554eaf370fc27fa3ccaf", size = 16024551, upload-time = "2025-11-16T22:50:34.242Z" }, + { url = "https://files.pythonhosted.org/packages/83/68/8236589d4dbb87253d28259d04d9b814ec0ecce7cb1c7fed29729f4c3a78/numpy-2.3.5-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:51c55fe3451421f3a6ef9a9c1439e82101c57a2c9eab9feb196a62b1a10b58ce", size = 18533275, upload-time = "2025-11-16T22:50:37.651Z" }, + { url = "https://files.pythonhosted.org/packages/40/56/2932d75b6f13465239e3b7b7e511be27f1b8161ca2510854f0b6e521c395/numpy-2.3.5-cp313-cp313-win32.whl", hash = "sha256:1978155dd49972084bd6ef388d66ab70f0c323ddee6f693d539376498720fb7e", size = 6277637, upload-time = "2025-11-16T22:50:40.11Z" }, + { url = "https://files.pythonhosted.org/packages/0c/88/e2eaa6cffb115b85ed7c7c87775cb8bcf0816816bc98ca8dbfa2ee33fe6e/numpy-2.3.5-cp313-cp313-win_amd64.whl", hash = "sha256:00dc4e846108a382c5869e77c6ed514394bdeb3403461d25a829711041217d5b", size = 12779090, upload-time = "2025-11-16T22:50:42.503Z" }, + { url = "https://files.pythonhosted.org/packages/8f/88/3f41e13a44ebd4034ee17baa384acac29ba6a4fcc2aca95f6f08ca0447d1/numpy-2.3.5-cp313-cp313-win_arm64.whl", hash = "sha256:0472f11f6ec23a74a906a00b48a4dcf3849209696dff7c189714511268d103ae", size = 10194710, upload-time = "2025-11-16T22:50:44.971Z" }, + { url = "https://files.pythonhosted.org/packages/13/cb/71744144e13389d577f867f745b7df2d8489463654a918eea2eeb166dfc9/numpy-2.3.5-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:414802f3b97f3c1eef41e530aaba3b3c1620649871d8cb38c6eaff034c2e16bd", size = 16827292, upload-time = "2025-11-16T22:50:47.715Z" }, + { url = "https://files.pythonhosted.org/packages/71/80/ba9dc6f2a4398e7f42b708a7fdc841bb638d353be255655498edbf9a15a8/numpy-2.3.5-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:5ee6609ac3604fa7780e30a03e5e241a7956f8e2fcfe547d51e3afa5247ac47f", size = 12378897, upload-time = "2025-11-16T22:50:51.327Z" }, + { url = "https://files.pythonhosted.org/packages/2e/6d/db2151b9f64264bcceccd51741aa39b50150de9b602d98ecfe7e0c4bff39/numpy-2.3.5-cp313-cp313t-macosx_14_0_arm64.whl", hash = "sha256:86d835afea1eaa143012a2d7a3f45a3adce2d7adc8b4961f0b362214d800846a", size = 5207391, upload-time = "2025-11-16T22:50:54.542Z" }, + { url = "https://files.pythonhosted.org/packages/80/ae/429bacace5ccad48a14c4ae5332f6aa8ab9f69524193511d60ccdfdc65fa/numpy-2.3.5-cp313-cp313t-macosx_14_0_x86_64.whl", hash = "sha256:30bc11310e8153ca664b14c5f1b73e94bd0503681fcf136a163de856f3a50139", size = 6721275, upload-time = "2025-11-16T22:50:56.794Z" }, + { url = "https://files.pythonhosted.org/packages/74/5b/1919abf32d8722646a38cd527bc3771eb229a32724ee6ba340ead9b92249/numpy-2.3.5-cp313-cp313t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1062fde1dcf469571705945b0f221b73928f34a20c904ffb45db101907c3454e", size = 14306855, upload-time = "2025-11-16T22:50:59.208Z" }, + { url = "https://files.pythonhosted.org/packages/a5/87/6831980559434973bebc30cd9c1f21e541a0f2b0c280d43d3afd909b66d0/numpy-2.3.5-cp313-cp313t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ce581db493ea1a96c0556360ede6607496e8bf9b3a8efa66e06477267bc831e9", size = 16657359, upload-time = "2025-11-16T22:51:01.991Z" }, + { url = "https://files.pythonhosted.org/packages/dd/91/c797f544491ee99fd00495f12ebb7802c440c1915811d72ac5b4479a3356/numpy-2.3.5-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:cc8920d2ec5fa99875b670bb86ddeb21e295cb07aa331810d9e486e0b969d946", size = 16093374, upload-time = "2025-11-16T22:51:05.291Z" }, + { url = "https://files.pythonhosted.org/packages/74/a6/54da03253afcbe7a72785ec4da9c69fb7a17710141ff9ac5fcb2e32dbe64/numpy-2.3.5-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:9ee2197ef8c4f0dfe405d835f3b6a14f5fee7782b5de51ba06fb65fc9b36e9f1", size = 18594587, upload-time = "2025-11-16T22:51:08.585Z" }, + { url = "https://files.pythonhosted.org/packages/80/e9/aff53abbdd41b0ecca94285f325aff42357c6b5abc482a3fcb4994290b18/numpy-2.3.5-cp313-cp313t-win32.whl", hash = "sha256:70b37199913c1bd300ff6e2693316c6f869c7ee16378faf10e4f5e3275b299c3", size = 6405940, upload-time = "2025-11-16T22:51:11.541Z" }, + { url = "https://files.pythonhosted.org/packages/d5/81/50613fec9d4de5480de18d4f8ef59ad7e344d497edbef3cfd80f24f98461/numpy-2.3.5-cp313-cp313t-win_amd64.whl", hash = "sha256:b501b5fa195cc9e24fe102f21ec0a44dffc231d2af79950b451e0d99cea02234", size = 12920341, upload-time = "2025-11-16T22:51:14.312Z" }, + { url = "https://files.pythonhosted.org/packages/bb/ab/08fd63b9a74303947f34f0bd7c5903b9c5532c2d287bead5bdf4c556c486/numpy-2.3.5-cp313-cp313t-win_arm64.whl", hash = "sha256:a80afd79f45f3c4a7d341f13acbe058d1ca8ac017c165d3fa0d3de6bc1a079d7", size = 10262507, upload-time = "2025-11-16T22:51:16.846Z" }, +] + +[[package]] +name = "numpyro" +version = "0.19.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "jax" }, + { name = "jaxlib" }, + { name = "multipledispatch" }, + { name = "numpy" }, + { name = "tqdm" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/02/7c/5d1b55401b023b58f792483c71690315d4d5cd1653fd3631fa5bcbd68601/numpyro-0.19.0.tar.gz", hash = "sha256:bbf5b772a6ba8b7a79448fa6787afb069e5eb2dff8295078c3ec04d3e6276742", size = 404421, upload-time = "2025-08-05T10:26:33.055Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/88/31/9b5da5995988437756bc3f1eead2e314d8916259875c6924cb41692f2b41/numpyro-0.19.0-py3-none-any.whl", hash = "sha256:1063a2c131a0785719e13c8e55f1b82e41850d814df149418097531f4dbdeda8", size = 370906, upload-time = "2025-08-05T10:26:31.35Z" }, +] + +[[package]] +name = "nvidia-cublas-cu12" +version = "12.8.4.1" +source = { registry = "https://pypi.org/simple" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/dc/61/e24b560ab2e2eaeb3c839129175fb330dfcfc29e5203196e5541a4c44682/nvidia_cublas_cu12-12.8.4.1-py3-none-manylinux_2_27_x86_64.whl", hash = "sha256:8ac4e771d5a348c551b2a426eda6193c19aa630236b418086020df5ba9667142", size = 594346921, upload-time = "2025-03-07T01:44:31.254Z" }, +] + +[[package]] +name = "nvidia-cuda-cupti-cu12" +version = "12.8.90" +source = { registry = "https://pypi.org/simple" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f8/02/2adcaa145158bf1a8295d83591d22e4103dbfd821bcaf6f3f53151ca4ffa/nvidia_cuda_cupti_cu12-12.8.90-py3-none-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:ea0cb07ebda26bb9b29ba82cda34849e73c166c18162d3913575b0c9db9a6182", size = 10248621, upload-time = "2025-03-07T01:40:21.213Z" }, +] + +[[package]] +name = "nvidia-cuda-nvrtc-cu12" +version = "12.8.93" +source = { registry = "https://pypi.org/simple" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/05/6b/32f747947df2da6994e999492ab306a903659555dddc0fbdeb9d71f75e52/nvidia_cuda_nvrtc_cu12-12.8.93-py3-none-manylinux2010_x86_64.manylinux_2_12_x86_64.whl", hash = "sha256:a7756528852ef889772a84c6cd89d41dfa74667e24cca16bb31f8f061e3e9994", size = 88040029, upload-time = "2025-03-07T01:42:13.562Z" }, +] + +[[package]] +name = "nvidia-cuda-runtime-cu12" +version = "12.8.90" +source = { registry = "https://pypi.org/simple" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/0d/9b/a997b638fcd068ad6e4d53b8551a7d30fe8b404d6f1804abf1df69838932/nvidia_cuda_runtime_cu12-12.8.90-py3-none-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:adade8dcbd0edf427b7204d480d6066d33902cab2a4707dcfc48a2d0fd44ab90", size = 954765, upload-time = "2025-03-07T01:40:01.615Z" }, +] + +[[package]] +name = "nvidia-cudnn-cu12" +version = "9.10.2.21" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "nvidia-cublas-cu12" }, +] +wheels = [ + { url = "https://files.pythonhosted.org/packages/ba/51/e123d997aa098c61d029f76663dedbfb9bc8dcf8c60cbd6adbe42f76d049/nvidia_cudnn_cu12-9.10.2.21-py3-none-manylinux_2_27_x86_64.whl", hash = "sha256:949452be657fa16687d0930933f032835951ef0892b37d2d53824d1a84dc97a8", size = 706758467, upload-time = "2025-06-06T21:54:08.597Z" }, +] + +[[package]] +name = "nvidia-cufft-cu12" +version = "11.3.3.83" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "nvidia-nvjitlink-cu12" }, +] +wheels = [ + { url = "https://files.pythonhosted.org/packages/1f/13/ee4e00f30e676b66ae65b4f08cb5bcbb8392c03f54f2d5413ea99a5d1c80/nvidia_cufft_cu12-11.3.3.83-py3-none-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:4d2dd21ec0b88cf61b62e6b43564355e5222e4a3fb394cac0db101f2dd0d4f74", size = 193118695, upload-time = "2025-03-07T01:45:27.821Z" }, +] + +[[package]] +name = "nvidia-cufile-cu12" +version = "1.13.1.3" +source = { registry = "https://pypi.org/simple" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/bb/fe/1bcba1dfbfb8d01be8d93f07bfc502c93fa23afa6fd5ab3fc7c1df71038a/nvidia_cufile_cu12-1.13.1.3-py3-none-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:1d069003be650e131b21c932ec3d8969c1715379251f8d23a1860554b1cb24fc", size = 1197834, upload-time = "2025-03-07T01:45:50.723Z" }, +] + +[[package]] +name = "nvidia-curand-cu12" +version = "10.3.9.90" +source = { registry = "https://pypi.org/simple" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/fb/aa/6584b56dc84ebe9cf93226a5cde4d99080c8e90ab40f0c27bda7a0f29aa1/nvidia_curand_cu12-10.3.9.90-py3-none-manylinux_2_27_x86_64.whl", hash = "sha256:b32331d4f4df5d6eefa0554c565b626c7216f87a06a4f56fab27c3b68a830ec9", size = 63619976, upload-time = "2025-03-07T01:46:23.323Z" }, +] + +[[package]] +name = "nvidia-cusolver-cu12" +version = "11.7.3.90" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "nvidia-cublas-cu12" }, + { name = "nvidia-cusparse-cu12" }, + { name = "nvidia-nvjitlink-cu12" }, +] +wheels = [ + { url = "https://files.pythonhosted.org/packages/85/48/9a13d2975803e8cf2777d5ed57b87a0b6ca2cc795f9a4f59796a910bfb80/nvidia_cusolver_cu12-11.7.3.90-py3-none-manylinux_2_27_x86_64.whl", hash = "sha256:4376c11ad263152bd50ea295c05370360776f8c3427b30991df774f9fb26c450", size = 267506905, upload-time = "2025-03-07T01:47:16.273Z" }, +] + +[[package]] +name = "nvidia-cusparse-cu12" +version = "12.5.8.93" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "nvidia-nvjitlink-cu12" }, +] +wheels = [ + { url = "https://files.pythonhosted.org/packages/c2/f5/e1854cb2f2bcd4280c44736c93550cc300ff4b8c95ebe370d0aa7d2b473d/nvidia_cusparse_cu12-12.5.8.93-py3-none-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:1ec05d76bbbd8b61b06a80e1eaf8cf4959c3d4ce8e711b65ebd0443bb0ebb13b", size = 288216466, upload-time = "2025-03-07T01:48:13.779Z" }, +] + +[[package]] +name = "nvidia-cusparselt-cu12" +version = "0.7.1" +source = { registry = "https://pypi.org/simple" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/56/79/12978b96bd44274fe38b5dde5cfb660b1d114f70a65ef962bcbbed99b549/nvidia_cusparselt_cu12-0.7.1-py3-none-manylinux2014_x86_64.whl", hash = "sha256:f1bb701d6b930d5a7cea44c19ceb973311500847f81b634d802b7b539dc55623", size = 287193691, upload-time = "2025-02-26T00:15:44.104Z" }, +] + +[[package]] +name = "nvidia-nccl-cu12" +version = "2.27.5" +source = { registry = "https://pypi.org/simple" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/6e/89/f7a07dc961b60645dbbf42e80f2bc85ade7feb9a491b11a1e973aa00071f/nvidia_nccl_cu12-2.27.5-py3-none-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:ad730cf15cb5d25fe849c6e6ca9eb5b76db16a80f13f425ac68d8e2e55624457", size = 322348229, upload-time = "2025-06-26T04:11:28.385Z" }, +] + +[[package]] +name = "nvidia-nvjitlink-cu12" +version = "12.8.93" +source = { registry = "https://pypi.org/simple" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f6/74/86a07f1d0f42998ca31312f998bd3b9a7eff7f52378f4f270c8679c77fb9/nvidia_nvjitlink_cu12-12.8.93-py3-none-manylinux2010_x86_64.manylinux_2_12_x86_64.whl", hash = "sha256:81ff63371a7ebd6e6451970684f916be2eab07321b73c9d244dc2b4da7f73b88", size = 39254836, upload-time = "2025-03-07T01:49:55.661Z" }, +] + +[[package]] +name = "nvidia-nvshmem-cu12" +version = "3.3.20" +source = { registry = "https://pypi.org/simple" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/3b/6c/99acb2f9eb85c29fc6f3a7ac4dccfd992e22666dd08a642b303311326a97/nvidia_nvshmem_cu12-3.3.20-py3-none-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:d00f26d3f9b2e3c3065be895e3059d6479ea5c638a3f38c9fec49b1b9dd7c1e5", size = 124657145, upload-time = "2025-08-04T20:25:19.995Z" }, +] + +[[package]] +name = "nvidia-nvtx-cu12" +version = "12.8.90" +source = { registry = "https://pypi.org/simple" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a2/eb/86626c1bbc2edb86323022371c39aa48df6fd8b0a1647bc274577f72e90b/nvidia_nvtx_cu12-12.8.90-py3-none-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:5b17e2001cc0d751a5bc2c6ec6d26ad95913324a4adb86788c944f8ce9ba441f", size = 89954, upload-time = "2025-03-07T01:42:44.131Z" }, +] + +[[package]] +name = "openai" +version = "2.8.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "anyio" }, + { name = "distro" }, + { name = "httpx" }, + { name = "jiter" }, + { name = "pydantic" }, + { name = "sniffio" }, + { name = "tqdm" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/d5/e4/42591e356f1d53c568418dc7e30dcda7be31dd5a4d570bca22acb0525862/openai-2.8.1.tar.gz", hash = "sha256:cb1b79eef6e809f6da326a7ef6038719e35aa944c42d081807bfa1be8060f15f", size = 602490, upload-time = "2025-11-17T22:39:59.549Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/55/4f/dbc0c124c40cb390508a82770fb9f6e3ed162560181a85089191a851c59a/openai-2.8.1-py3-none-any.whl", hash = "sha256:c6c3b5a04994734386e8dad3c00a393f56d3b68a27cd2e8acae91a59e4122463", size = 1022688, upload-time = "2025-11-17T22:39:57.675Z" }, +] + +[[package]] +name = "opt-einsum" +version = "3.4.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/8c/b9/2ac072041e899a52f20cf9510850ff58295003aa75525e58343591b0cbfb/opt_einsum-3.4.0.tar.gz", hash = "sha256:96ca72f1b886d148241348783498194c577fa30a8faac108586b14f1ba4473ac", size = 63004, upload-time = "2024-09-26T14:33:24.483Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/23/cd/066e86230ae37ed0be70aae89aabf03ca8d9f39c8aea0dec8029455b5540/opt_einsum-3.4.0-py3-none-any.whl", hash = "sha256:69bb92469f86a1565195ece4ac0323943e83477171b91d24c35afe028a90d7cd", size = 71932, upload-time = "2024-09-26T14:33:23.039Z" }, +] + +[[package]] +name = "packaging" +version = "25.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a1/d4/1fc4078c65507b51b96ca8f8c3ba19e6a61c8253c72794544580a7b6c24d/packaging-25.0.tar.gz", hash = "sha256:d443872c98d677bf60f6a1f2f8c1cb748e8fe762d2bf9d3148b5599295b0fc4f", size = 165727, upload-time = "2025-04-19T11:48:59.673Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/20/12/38679034af332785aac8774540895e234f4d07f7545804097de4b666afd8/packaging-25.0-py3-none-any.whl", hash = "sha256:29572ef2b1f17581046b3a2227d5c611fb25ec70ca1ba8554b24b0e69331a484", size = 66469, upload-time = "2025-04-19T11:48:57.875Z" }, +] + +[[package]] +name = "pandocfilters" +version = "1.5.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/70/6f/3dd4940bbe001c06a65f88e36bad298bc7a0de5036115639926b0c5c0458/pandocfilters-1.5.1.tar.gz", hash = "sha256:002b4a555ee4ebc03f8b66307e287fa492e4a77b4ea14d3f934328297bb4939e", size = 8454, upload-time = "2024-01-18T20:08:13.726Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ef/af/4fbc8cab944db5d21b7e2a5b8e9211a03a79852b1157e2c102fcc61ac440/pandocfilters-1.5.1-py2.py3-none-any.whl", hash = "sha256:93be382804a9cdb0a7267585f157e5d1731bbe5545a85b268d6f5fe6232de2bc", size = 8663, upload-time = "2024-01-18T20:08:11.28Z" }, +] + +[[package]] +name = "parso" +version = "0.8.5" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d4/de/53e0bcf53d13e005bd8c92e7855142494f41171b34c2536b86187474184d/parso-0.8.5.tar.gz", hash = "sha256:034d7354a9a018bdce352f48b2a8a450f05e9d6ee85db84764e9b6bd96dafe5a", size = 401205, upload-time = "2025-08-23T15:15:28.028Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/16/32/f8e3c85d1d5250232a5d3477a2a28cc291968ff175caeadaf3cc19ce0e4a/parso-0.8.5-py2.py3-none-any.whl", hash = "sha256:646204b5ee239c396d040b90f9e272e9a8017c630092bf59980beb62fd033887", size = 106668, upload-time = "2025-08-23T15:15:25.663Z" }, +] + +[[package]] +name = "pathspec" +version = "0.12.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ca/bc/f35b8446f4531a7cb215605d100cd88b7ac6f44ab3fc94870c120ab3adbf/pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712", size = 51043, upload-time = "2023-12-10T22:30:45Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/cc/20/ff623b09d963f88bfde16306a54e12ee5ea43e9b597108672ff3a408aad6/pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08", size = 31191, upload-time = "2023-12-10T22:30:43.14Z" }, +] + +[[package]] +name = "pexpect" +version = "4.9.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "ptyprocess" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/42/92/cc564bf6381ff43ce1f4d06852fc19a2f11d180f23dc32d9588bee2f149d/pexpect-4.9.0.tar.gz", hash = "sha256:ee7d41123f3c9911050ea2c2dac107568dc43b2d3b0c7557a33212c398ead30f", size = 166450, upload-time = "2023-11-25T09:07:26.339Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/9e/c3/059298687310d527a58bb01f3b1965787ee3b40dce76752eda8b44e9a2c5/pexpect-4.9.0-py2.py3-none-any.whl", hash = "sha256:7236d1e080e4936be2dc3e326cec0af72acf9212a7e1d060210e70a47e253523", size = 63772, upload-time = "2023-11-25T06:56:14.81Z" }, +] + +[[package]] +name = "pillow" +version = "12.0.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/5a/b0/cace85a1b0c9775a9f8f5d5423c8261c858760e2466c79b2dd184638b056/pillow-12.0.0.tar.gz", hash = "sha256:87d4f8125c9988bfbed67af47dd7a953e2fc7b0cc1e7800ec6d2080d490bb353", size = 47008828, upload-time = "2025-10-15T18:24:14.008Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/2c/90/4fcce2c22caf044e660a198d740e7fbc14395619e3cb1abad12192c0826c/pillow-12.0.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:53561a4ddc36facb432fae7a9d8afbfaf94795414f5cdc5fc52f28c1dca90371", size = 5249377, upload-time = "2025-10-15T18:22:05.993Z" }, + { url = "https://files.pythonhosted.org/packages/fd/e0/ed960067543d080691d47d6938ebccbf3976a931c9567ab2fbfab983a5dd/pillow-12.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:71db6b4c1653045dacc1585c1b0d184004f0d7e694c7b34ac165ca70c0838082", size = 4650343, upload-time = "2025-10-15T18:22:07.718Z" }, + { url = "https://files.pythonhosted.org/packages/e7/a1/f81fdeddcb99c044bf7d6faa47e12850f13cee0849537a7d27eeab5534d4/pillow-12.0.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:2fa5f0b6716fc88f11380b88b31fe591a06c6315e955c096c35715788b339e3f", size = 6232981, upload-time = "2025-10-15T18:22:09.287Z" }, + { url = "https://files.pythonhosted.org/packages/88/e1/9098d3ce341a8750b55b0e00c03f1630d6178f38ac191c81c97a3b047b44/pillow-12.0.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:82240051c6ca513c616f7f9da06e871f61bfd7805f566275841af15015b8f98d", size = 8041399, upload-time = "2025-10-15T18:22:10.872Z" }, + { url = "https://files.pythonhosted.org/packages/a7/62/a22e8d3b602ae8cc01446d0c57a54e982737f44b6f2e1e019a925143771d/pillow-12.0.0-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:55f818bd74fe2f11d4d7cbc65880a843c4075e0ac7226bc1a23261dbea531953", size = 6347740, upload-time = "2025-10-15T18:22:12.769Z" }, + { url = "https://files.pythonhosted.org/packages/4f/87/424511bdcd02c8d7acf9f65caa09f291a519b16bd83c3fb3374b3d4ae951/pillow-12.0.0-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b87843e225e74576437fd5b6a4c2205d422754f84a06942cfaf1dc32243e45a8", size = 7040201, upload-time = "2025-10-15T18:22:14.813Z" }, + { url = "https://files.pythonhosted.org/packages/dc/4d/435c8ac688c54d11755aedfdd9f29c9eeddf68d150fe42d1d3dbd2365149/pillow-12.0.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:c607c90ba67533e1b2355b821fef6764d1dd2cbe26b8c1005ae84f7aea25ff79", size = 6462334, upload-time = "2025-10-15T18:22:16.375Z" }, + { url = "https://files.pythonhosted.org/packages/2b/f2/ad34167a8059a59b8ad10bc5c72d4d9b35acc6b7c0877af8ac885b5f2044/pillow-12.0.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:21f241bdd5080a15bc86d3466a9f6074a9c2c2b314100dd896ac81ee6db2f1ba", size = 7134162, upload-time = "2025-10-15T18:22:17.996Z" }, + { url = "https://files.pythonhosted.org/packages/0c/b1/a7391df6adacf0a5c2cf6ac1cf1fcc1369e7d439d28f637a847f8803beb3/pillow-12.0.0-cp312-cp312-win32.whl", hash = "sha256:dd333073e0cacdc3089525c7df7d39b211bcdf31fc2824e49d01c6b6187b07d0", size = 6298769, upload-time = "2025-10-15T18:22:19.923Z" }, + { url = "https://files.pythonhosted.org/packages/a2/0b/d87733741526541c909bbf159e338dcace4f982daac6e5a8d6be225ca32d/pillow-12.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:9fe611163f6303d1619bbcb653540a4d60f9e55e622d60a3108be0d5b441017a", size = 7001107, upload-time = "2025-10-15T18:22:21.644Z" }, + { url = "https://files.pythonhosted.org/packages/bc/96/aaa61ce33cc98421fb6088af2a03be4157b1e7e0e87087c888e2370a7f45/pillow-12.0.0-cp312-cp312-win_arm64.whl", hash = "sha256:7dfb439562f234f7d57b1ac6bc8fe7f838a4bd49c79230e0f6a1da93e82f1fad", size = 2436012, upload-time = "2025-10-15T18:22:23.621Z" }, + { url = "https://files.pythonhosted.org/packages/62/f2/de993bb2d21b33a98d031ecf6a978e4b61da207bef02f7b43093774c480d/pillow-12.0.0-cp313-cp313-ios_13_0_arm64_iphoneos.whl", hash = "sha256:0869154a2d0546545cde61d1789a6524319fc1897d9ee31218eae7a60ccc5643", size = 4045493, upload-time = "2025-10-15T18:22:25.758Z" }, + { url = "https://files.pythonhosted.org/packages/0e/b6/bc8d0c4c9f6f111a783d045310945deb769b806d7574764234ffd50bc5ea/pillow-12.0.0-cp313-cp313-ios_13_0_arm64_iphonesimulator.whl", hash = "sha256:a7921c5a6d31b3d756ec980f2f47c0cfdbce0fc48c22a39347a895f41f4a6ea4", size = 4120461, upload-time = "2025-10-15T18:22:27.286Z" }, + { url = "https://files.pythonhosted.org/packages/5d/57/d60d343709366a353dc56adb4ee1e7d8a2cc34e3fbc22905f4167cfec119/pillow-12.0.0-cp313-cp313-ios_13_0_x86_64_iphonesimulator.whl", hash = "sha256:1ee80a59f6ce048ae13cda1abf7fbd2a34ab9ee7d401c46be3ca685d1999a399", size = 3576912, upload-time = "2025-10-15T18:22:28.751Z" }, + { url = "https://files.pythonhosted.org/packages/a4/a4/a0a31467e3f83b94d37568294b01d22b43ae3c5d85f2811769b9c66389dd/pillow-12.0.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:c50f36a62a22d350c96e49ad02d0da41dbd17ddc2e29750dbdba4323f85eb4a5", size = 5249132, upload-time = "2025-10-15T18:22:30.641Z" }, + { url = "https://files.pythonhosted.org/packages/83/06/48eab21dd561de2914242711434c0c0eb992ed08ff3f6107a5f44527f5e9/pillow-12.0.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:5193fde9a5f23c331ea26d0cf171fbf67e3f247585f50c08b3e205c7aeb4589b", size = 4650099, upload-time = "2025-10-15T18:22:32.73Z" }, + { url = "https://files.pythonhosted.org/packages/fc/bd/69ed99fd46a8dba7c1887156d3572fe4484e3f031405fcc5a92e31c04035/pillow-12.0.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:bde737cff1a975b70652b62d626f7785e0480918dece11e8fef3c0cf057351c3", size = 6230808, upload-time = "2025-10-15T18:22:34.337Z" }, + { url = "https://files.pythonhosted.org/packages/ea/94/8fad659bcdbf86ed70099cb60ae40be6acca434bbc8c4c0d4ef356d7e0de/pillow-12.0.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:a6597ff2b61d121172f5844b53f21467f7082f5fb385a9a29c01414463f93b07", size = 8037804, upload-time = "2025-10-15T18:22:36.402Z" }, + { url = "https://files.pythonhosted.org/packages/20/39/c685d05c06deecfd4e2d1950e9a908aa2ca8bc4e6c3b12d93b9cafbd7837/pillow-12.0.0-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0b817e7035ea7f6b942c13aa03bb554fc44fea70838ea21f8eb31c638326584e", size = 6345553, upload-time = "2025-10-15T18:22:38.066Z" }, + { url = "https://files.pythonhosted.org/packages/38/57/755dbd06530a27a5ed74f8cb0a7a44a21722ebf318edbe67ddbd7fb28f88/pillow-12.0.0-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f4f1231b7dec408e8670264ce63e9c71409d9583dd21d32c163e25213ee2a344", size = 7037729, upload-time = "2025-10-15T18:22:39.769Z" }, + { url = "https://files.pythonhosted.org/packages/ca/b6/7e94f4c41d238615674d06ed677c14883103dce1c52e4af16f000338cfd7/pillow-12.0.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:6e51b71417049ad6ab14c49608b4a24d8fb3fe605e5dfabfe523b58064dc3d27", size = 6459789, upload-time = "2025-10-15T18:22:41.437Z" }, + { url = "https://files.pythonhosted.org/packages/9c/14/4448bb0b5e0f22dd865290536d20ec8a23b64e2d04280b89139f09a36bb6/pillow-12.0.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:d120c38a42c234dc9a8c5de7ceaaf899cf33561956acb4941653f8bdc657aa79", size = 7130917, upload-time = "2025-10-15T18:22:43.152Z" }, + { url = "https://files.pythonhosted.org/packages/dd/ca/16c6926cc1c015845745d5c16c9358e24282f1e588237a4c36d2b30f182f/pillow-12.0.0-cp313-cp313-win32.whl", hash = "sha256:4cc6b3b2efff105c6a1656cfe59da4fdde2cda9af1c5e0b58529b24525d0a098", size = 6302391, upload-time = "2025-10-15T18:22:44.753Z" }, + { url = "https://files.pythonhosted.org/packages/6d/2a/dd43dcfd6dae9b6a49ee28a8eedb98c7d5ff2de94a5d834565164667b97b/pillow-12.0.0-cp313-cp313-win_amd64.whl", hash = "sha256:4cf7fed4b4580601c4345ceb5d4cbf5a980d030fd5ad07c4d2ec589f95f09905", size = 7007477, upload-time = "2025-10-15T18:22:46.838Z" }, + { url = "https://files.pythonhosted.org/packages/77/f0/72ea067f4b5ae5ead653053212af05ce3705807906ba3f3e8f58ddf617e6/pillow-12.0.0-cp313-cp313-win_arm64.whl", hash = "sha256:9f0b04c6b8584c2c193babcccc908b38ed29524b29dd464bc8801bf10d746a3a", size = 2435918, upload-time = "2025-10-15T18:22:48.399Z" }, + { url = "https://files.pythonhosted.org/packages/f5/5e/9046b423735c21f0487ea6cb5b10f89ea8f8dfbe32576fe052b5ba9d4e5b/pillow-12.0.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:7fa22993bac7b77b78cae22bad1e2a987ddf0d9015c63358032f84a53f23cdc3", size = 5251406, upload-time = "2025-10-15T18:22:49.905Z" }, + { url = "https://files.pythonhosted.org/packages/12/66/982ceebcdb13c97270ef7a56c3969635b4ee7cd45227fa707c94719229c5/pillow-12.0.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:f135c702ac42262573fe9714dfe99c944b4ba307af5eb507abef1667e2cbbced", size = 4653218, upload-time = "2025-10-15T18:22:51.587Z" }, + { url = "https://files.pythonhosted.org/packages/16/b3/81e625524688c31859450119bf12674619429cab3119eec0e30a7a1029cb/pillow-12.0.0-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:c85de1136429c524e55cfa4e033b4a7940ac5c8ee4d9401cc2d1bf48154bbc7b", size = 6266564, upload-time = "2025-10-15T18:22:53.215Z" }, + { url = "https://files.pythonhosted.org/packages/98/59/dfb38f2a41240d2408096e1a76c671d0a105a4a8471b1871c6902719450c/pillow-12.0.0-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:38df9b4bfd3db902c9c2bd369bcacaf9d935b2fff73709429d95cc41554f7b3d", size = 8069260, upload-time = "2025-10-15T18:22:54.933Z" }, + { url = "https://files.pythonhosted.org/packages/dc/3d/378dbea5cd1874b94c312425ca77b0f47776c78e0df2df751b820c8c1d6c/pillow-12.0.0-cp313-cp313t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:7d87ef5795da03d742bf49439f9ca4d027cde49c82c5371ba52464aee266699a", size = 6379248, upload-time = "2025-10-15T18:22:56.605Z" }, + { url = "https://files.pythonhosted.org/packages/84/b0/d525ef47d71590f1621510327acec75ae58c721dc071b17d8d652ca494d8/pillow-12.0.0-cp313-cp313t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:aff9e4d82d082ff9513bdd6acd4f5bd359f5b2c870907d2b0a9c5e10d40c88fe", size = 7066043, upload-time = "2025-10-15T18:22:58.53Z" }, + { url = "https://files.pythonhosted.org/packages/61/2c/aced60e9cf9d0cde341d54bf7932c9ffc33ddb4a1595798b3a5150c7ec4e/pillow-12.0.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:8d8ca2b210ada074d57fcee40c30446c9562e542fc46aedc19baf758a93532ee", size = 6490915, upload-time = "2025-10-15T18:23:00.582Z" }, + { url = "https://files.pythonhosted.org/packages/ef/26/69dcb9b91f4e59f8f34b2332a4a0a951b44f547c4ed39d3e4dcfcff48f89/pillow-12.0.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:99a7f72fb6249302aa62245680754862a44179b545ded638cf1fef59befb57ef", size = 7157998, upload-time = "2025-10-15T18:23:02.627Z" }, + { url = "https://files.pythonhosted.org/packages/61/2b/726235842220ca95fa441ddf55dd2382b52ab5b8d9c0596fe6b3f23dafe8/pillow-12.0.0-cp313-cp313t-win32.whl", hash = "sha256:4078242472387600b2ce8d93ade8899c12bf33fa89e55ec89fe126e9d6d5d9e9", size = 6306201, upload-time = "2025-10-15T18:23:04.709Z" }, + { url = "https://files.pythonhosted.org/packages/c0/3d/2afaf4e840b2df71344ababf2f8edd75a705ce500e5dc1e7227808312ae1/pillow-12.0.0-cp313-cp313t-win_amd64.whl", hash = "sha256:2c54c1a783d6d60595d3514f0efe9b37c8808746a66920315bfd34a938d7994b", size = 7013165, upload-time = "2025-10-15T18:23:06.46Z" }, + { url = "https://files.pythonhosted.org/packages/6f/75/3fa09aa5cf6ed04bee3fa575798ddf1ce0bace8edb47249c798077a81f7f/pillow-12.0.0-cp313-cp313t-win_arm64.whl", hash = "sha256:26d9f7d2b604cd23aba3e9faf795787456ac25634d82cd060556998e39c6fa47", size = 2437834, upload-time = "2025-10-15T18:23:08.194Z" }, +] + +[[package]] +name = "platformdirs" +version = "4.5.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/61/33/9611380c2bdb1225fdef633e2a9610622310fed35ab11dac9620972ee088/platformdirs-4.5.0.tar.gz", hash = "sha256:70ddccdd7c99fc5942e9fc25636a8b34d04c24b335100223152c2803e4063312", size = 21632, upload-time = "2025-10-08T17:44:48.791Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/73/cb/ac7874b3e5d58441674fb70742e6c374b28b0c7cb988d37d991cde47166c/platformdirs-4.5.0-py3-none-any.whl", hash = "sha256:e578a81bb873cbb89a41fcc904c7ef523cc18284b7e3b3ccf06aca1403b7ebd3", size = 18651, upload-time = "2025-10-08T17:44:47.223Z" }, +] + +[[package]] +name = "pluggy" +version = "1.6.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f9/e2/3e91f31a7d2b083fe6ef3fa267035b518369d9511ffab804f839851d2779/pluggy-1.6.0.tar.gz", hash = "sha256:7dcc130b76258d33b90f61b658791dede3486c3e6bfb003ee5c9bfb396dd22f3", size = 69412, upload-time = "2025-05-15T12:30:07.975Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/54/20/4d324d65cc6d9205fabedc306948156824eb9f0ee1633355a8f7ec5c66bf/pluggy-1.6.0-py3-none-any.whl", hash = "sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746", size = 20538, upload-time = "2025-05-15T12:30:06.134Z" }, +] + +[[package]] +name = "prettyprinter" +version = "0.18.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "colorful" }, + { name = "pygments" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/97/41/967b5e033b5b50eebe0b8154a9e9827c517e244b9b612ec3357c40a4a33c/prettyprinter-0.18.0.tar.gz", hash = "sha256:9fe5da7ec53510881dd35d7a5c677ba45f34cfe6a8e78d1abd20652cf82139a8", size = 651884, upload-time = "2019-06-22T07:04:40.337Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/9f/d0/9effbeca8f1b8df9d33154de3477a51e55a9c46cb15612dd7791a1624397/prettyprinter-0.18.0-py2.py3-none-any.whl", hash = "sha256:358a58f276cb312e3ca29d7a7f244c91e4e0bda7848249d30e4f36d2eb58b67c", size = 48013, upload-time = "2019-06-22T07:04:43.916Z" }, +] + +[[package]] +name = "prompt-toolkit" +version = "3.0.52" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "wcwidth" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/a1/96/06e01a7b38dce6fe1db213e061a4602dd6032a8a97ef6c1a862537732421/prompt_toolkit-3.0.52.tar.gz", hash = "sha256:28cde192929c8e7321de85de1ddbe736f1375148b02f2e17edd840042b1be855", size = 434198, upload-time = "2025-08-27T15:24:02.057Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/84/03/0d3ce49e2505ae70cf43bc5bb3033955d2fc9f932163e84dc0779cc47f48/prompt_toolkit-3.0.52-py3-none-any.whl", hash = "sha256:9aac639a3bbd33284347de5ad8d68ecc044b91a762dc39b7c21095fcd6a19955", size = 391431, upload-time = "2025-08-27T15:23:59.498Z" }, +] + +[[package]] +name = "propcache" +version = "0.4.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/9e/da/e9fc233cf63743258bff22b3dfa7ea5baef7b5bc324af47a0ad89b8ffc6f/propcache-0.4.1.tar.gz", hash = "sha256:f48107a8c637e80362555f37ecf49abe20370e557cc4ab374f04ec4423c97c3d", size = 46442, upload-time = "2025-10-08T19:49:02.291Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a2/0f/f17b1b2b221d5ca28b4b876e8bb046ac40466513960646bda8e1853cdfa2/propcache-0.4.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:e153e9cd40cc8945138822807139367f256f89c6810c2634a4f6902b52d3b4e2", size = 80061, upload-time = "2025-10-08T19:46:46.075Z" }, + { url = "https://files.pythonhosted.org/packages/76/47/8ccf75935f51448ba9a16a71b783eb7ef6b9ee60f5d14c7f8a8a79fbeed7/propcache-0.4.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:cd547953428f7abb73c5ad82cbb32109566204260d98e41e5dfdc682eb7f8403", size = 46037, upload-time = "2025-10-08T19:46:47.23Z" }, + { url = "https://files.pythonhosted.org/packages/0a/b6/5c9a0e42df4d00bfb4a3cbbe5cf9f54260300c88a0e9af1f47ca5ce17ac0/propcache-0.4.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f048da1b4f243fc44f205dfd320933a951b8d89e0afd4c7cacc762a8b9165207", size = 47324, upload-time = "2025-10-08T19:46:48.384Z" }, + { url = "https://files.pythonhosted.org/packages/9e/d3/6c7ee328b39a81ee877c962469f1e795f9db87f925251efeb0545e0020d0/propcache-0.4.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ec17c65562a827bba85e3872ead335f95405ea1674860d96483a02f5c698fa72", size = 225505, upload-time = "2025-10-08T19:46:50.055Z" }, + { url = "https://files.pythonhosted.org/packages/01/5d/1c53f4563490b1d06a684742cc6076ef944bc6457df6051b7d1a877c057b/propcache-0.4.1-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:405aac25c6394ef275dee4c709be43745d36674b223ba4eb7144bf4d691b7367", size = 230242, upload-time = "2025-10-08T19:46:51.815Z" }, + { url = "https://files.pythonhosted.org/packages/20/e1/ce4620633b0e2422207c3cb774a0ee61cac13abc6217763a7b9e2e3f4a12/propcache-0.4.1-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:0013cb6f8dde4b2a2f66903b8ba740bdfe378c943c4377a200551ceb27f379e4", size = 238474, upload-time = "2025-10-08T19:46:53.208Z" }, + { url = "https://files.pythonhosted.org/packages/46/4b/3aae6835b8e5f44ea6a68348ad90f78134047b503765087be2f9912140ea/propcache-0.4.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:15932ab57837c3368b024473a525e25d316d8353016e7cc0e5ba9eb343fbb1cf", size = 221575, upload-time = "2025-10-08T19:46:54.511Z" }, + { url = "https://files.pythonhosted.org/packages/6e/a5/8a5e8678bcc9d3a1a15b9a29165640d64762d424a16af543f00629c87338/propcache-0.4.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:031dce78b9dc099f4c29785d9cf5577a3faf9ebf74ecbd3c856a7b92768c3df3", size = 216736, upload-time = "2025-10-08T19:46:56.212Z" }, + { url = "https://files.pythonhosted.org/packages/f1/63/b7b215eddeac83ca1c6b934f89d09a625aa9ee4ba158338854c87210cc36/propcache-0.4.1-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:ab08df6c9a035bee56e31af99be621526bd237bea9f32def431c656b29e41778", size = 213019, upload-time = "2025-10-08T19:46:57.595Z" }, + { url = "https://files.pythonhosted.org/packages/57/74/f580099a58c8af587cac7ba19ee7cb418506342fbbe2d4a4401661cca886/propcache-0.4.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:4d7af63f9f93fe593afbf104c21b3b15868efb2c21d07d8732c0c4287e66b6a6", size = 220376, upload-time = "2025-10-08T19:46:59.067Z" }, + { url = "https://files.pythonhosted.org/packages/c4/ee/542f1313aff7eaf19c2bb758c5d0560d2683dac001a1c96d0774af799843/propcache-0.4.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:cfc27c945f422e8b5071b6e93169679e4eb5bf73bbcbf1ba3ae3a83d2f78ebd9", size = 226988, upload-time = "2025-10-08T19:47:00.544Z" }, + { url = "https://files.pythonhosted.org/packages/8f/18/9c6b015dd9c6930f6ce2229e1f02fb35298b847f2087ea2b436a5bfa7287/propcache-0.4.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:35c3277624a080cc6ec6f847cbbbb5b49affa3598c4535a0a4682a697aaa5c75", size = 215615, upload-time = "2025-10-08T19:47:01.968Z" }, + { url = "https://files.pythonhosted.org/packages/80/9e/e7b85720b98c45a45e1fca6a177024934dc9bc5f4d5dd04207f216fc33ed/propcache-0.4.1-cp312-cp312-win32.whl", hash = "sha256:671538c2262dadb5ba6395e26c1731e1d52534bfe9ae56d0b5573ce539266aa8", size = 38066, upload-time = "2025-10-08T19:47:03.503Z" }, + { url = "https://files.pythonhosted.org/packages/54/09/d19cff2a5aaac632ec8fc03737b223597b1e347416934c1b3a7df079784c/propcache-0.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:cb2d222e72399fcf5890d1d5cc1060857b9b236adff2792ff48ca2dfd46c81db", size = 41655, upload-time = "2025-10-08T19:47:04.973Z" }, + { url = "https://files.pythonhosted.org/packages/68/ab/6b5c191bb5de08036a8c697b265d4ca76148efb10fa162f14af14fb5f076/propcache-0.4.1-cp312-cp312-win_arm64.whl", hash = "sha256:204483131fb222bdaaeeea9f9e6c6ed0cac32731f75dfc1d4a567fc1926477c1", size = 37789, upload-time = "2025-10-08T19:47:06.077Z" }, + { url = "https://files.pythonhosted.org/packages/bf/df/6d9c1b6ac12b003837dde8a10231a7344512186e87b36e855bef32241942/propcache-0.4.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:43eedf29202c08550aac1d14e0ee619b0430aaef78f85864c1a892294fbc28cf", size = 77750, upload-time = "2025-10-08T19:47:07.648Z" }, + { url = "https://files.pythonhosted.org/packages/8b/e8/677a0025e8a2acf07d3418a2e7ba529c9c33caf09d3c1f25513023c1db56/propcache-0.4.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:d62cdfcfd89ccb8de04e0eda998535c406bf5e060ffd56be6c586cbcc05b3311", size = 44780, upload-time = "2025-10-08T19:47:08.851Z" }, + { url = "https://files.pythonhosted.org/packages/89/a4/92380f7ca60f99ebae761936bc48a72a639e8a47b29050615eef757cb2a7/propcache-0.4.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:cae65ad55793da34db5f54e4029b89d3b9b9490d8abe1b4c7ab5d4b8ec7ebf74", size = 46308, upload-time = "2025-10-08T19:47:09.982Z" }, + { url = "https://files.pythonhosted.org/packages/2d/48/c5ac64dee5262044348d1d78a5f85dd1a57464a60d30daee946699963eb3/propcache-0.4.1-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:333ddb9031d2704a301ee3e506dc46b1fe5f294ec198ed6435ad5b6a085facfe", size = 208182, upload-time = "2025-10-08T19:47:11.319Z" }, + { url = "https://files.pythonhosted.org/packages/c6/0c/cd762dd011a9287389a6a3eb43aa30207bde253610cca06824aeabfe9653/propcache-0.4.1-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:fd0858c20f078a32cf55f7e81473d96dcf3b93fd2ccdb3d40fdf54b8573df3af", size = 211215, upload-time = "2025-10-08T19:47:13.146Z" }, + { url = "https://files.pythonhosted.org/packages/30/3e/49861e90233ba36890ae0ca4c660e95df565b2cd15d4a68556ab5865974e/propcache-0.4.1-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:678ae89ebc632c5c204c794f8dab2837c5f159aeb59e6ed0539500400577298c", size = 218112, upload-time = "2025-10-08T19:47:14.913Z" }, + { url = "https://files.pythonhosted.org/packages/f1/8b/544bc867e24e1bd48f3118cecd3b05c694e160a168478fa28770f22fd094/propcache-0.4.1-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d472aeb4fbf9865e0c6d622d7f4d54a4e101a89715d8904282bb5f9a2f476c3f", size = 204442, upload-time = "2025-10-08T19:47:16.277Z" }, + { url = "https://files.pythonhosted.org/packages/50/a6/4282772fd016a76d3e5c0df58380a5ea64900afd836cec2c2f662d1b9bb3/propcache-0.4.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:4d3df5fa7e36b3225954fba85589da77a0fe6a53e3976de39caf04a0db4c36f1", size = 199398, upload-time = "2025-10-08T19:47:17.962Z" }, + { url = "https://files.pythonhosted.org/packages/3e/ec/d8a7cd406ee1ddb705db2139f8a10a8a427100347bd698e7014351c7af09/propcache-0.4.1-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:ee17f18d2498f2673e432faaa71698032b0127ebf23ae5974eeaf806c279df24", size = 196920, upload-time = "2025-10-08T19:47:19.355Z" }, + { url = "https://files.pythonhosted.org/packages/f6/6c/f38ab64af3764f431e359f8baf9e0a21013e24329e8b85d2da32e8ed07ca/propcache-0.4.1-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:580e97762b950f993ae618e167e7be9256b8353c2dcd8b99ec100eb50f5286aa", size = 203748, upload-time = "2025-10-08T19:47:21.338Z" }, + { url = "https://files.pythonhosted.org/packages/d6/e3/fa846bd70f6534d647886621388f0a265254d30e3ce47e5c8e6e27dbf153/propcache-0.4.1-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:501d20b891688eb8e7aa903021f0b72d5a55db40ffaab27edefd1027caaafa61", size = 205877, upload-time = "2025-10-08T19:47:23.059Z" }, + { url = "https://files.pythonhosted.org/packages/e2/39/8163fc6f3133fea7b5f2827e8eba2029a0277ab2c5beee6c1db7b10fc23d/propcache-0.4.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:9a0bd56e5b100aef69bd8562b74b46254e7c8812918d3baa700c8a8009b0af66", size = 199437, upload-time = "2025-10-08T19:47:24.445Z" }, + { url = "https://files.pythonhosted.org/packages/93/89/caa9089970ca49c7c01662bd0eeedfe85494e863e8043565aeb6472ce8fe/propcache-0.4.1-cp313-cp313-win32.whl", hash = "sha256:bcc9aaa5d80322bc2fb24bb7accb4a30f81e90ab8d6ba187aec0744bc302ad81", size = 37586, upload-time = "2025-10-08T19:47:25.736Z" }, + { url = "https://files.pythonhosted.org/packages/f5/ab/f76ec3c3627c883215b5c8080debb4394ef5a7a29be811f786415fc1e6fd/propcache-0.4.1-cp313-cp313-win_amd64.whl", hash = "sha256:381914df18634f5494334d201e98245c0596067504b9372d8cf93f4bb23e025e", size = 40790, upload-time = "2025-10-08T19:47:26.847Z" }, + { url = "https://files.pythonhosted.org/packages/59/1b/e71ae98235f8e2ba5004d8cb19765a74877abf189bc53fc0c80d799e56c3/propcache-0.4.1-cp313-cp313-win_arm64.whl", hash = "sha256:8873eb4460fd55333ea49b7d189749ecf6e55bf85080f11b1c4530ed3034cba1", size = 37158, upload-time = "2025-10-08T19:47:27.961Z" }, + { url = "https://files.pythonhosted.org/packages/83/ce/a31bbdfc24ee0dcbba458c8175ed26089cf109a55bbe7b7640ed2470cfe9/propcache-0.4.1-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:92d1935ee1f8d7442da9c0c4fa7ac20d07e94064184811b685f5c4fada64553b", size = 81451, upload-time = "2025-10-08T19:47:29.445Z" }, + { url = "https://files.pythonhosted.org/packages/25/9c/442a45a470a68456e710d96cacd3573ef26a1d0a60067e6a7d5e655621ed/propcache-0.4.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:473c61b39e1460d386479b9b2f337da492042447c9b685f28be4f74d3529e566", size = 46374, upload-time = "2025-10-08T19:47:30.579Z" }, + { url = "https://files.pythonhosted.org/packages/f4/bf/b1d5e21dbc3b2e889ea4327044fb16312a736d97640fb8b6aa3f9c7b3b65/propcache-0.4.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:c0ef0aaafc66fbd87842a3fe3902fd889825646bc21149eafe47be6072725835", size = 48396, upload-time = "2025-10-08T19:47:31.79Z" }, + { url = "https://files.pythonhosted.org/packages/f4/04/5b4c54a103d480e978d3c8a76073502b18db0c4bc17ab91b3cb5092ad949/propcache-0.4.1-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f95393b4d66bfae908c3ca8d169d5f79cd65636ae15b5e7a4f6e67af675adb0e", size = 275950, upload-time = "2025-10-08T19:47:33.481Z" }, + { url = "https://files.pythonhosted.org/packages/b4/c1/86f846827fb969c4b78b0af79bba1d1ea2156492e1b83dea8b8a6ae27395/propcache-0.4.1-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:c07fda85708bc48578467e85099645167a955ba093be0a2dcba962195676e859", size = 273856, upload-time = "2025-10-08T19:47:34.906Z" }, + { url = "https://files.pythonhosted.org/packages/36/1d/fc272a63c8d3bbad6878c336c7a7dea15e8f2d23a544bda43205dfa83ada/propcache-0.4.1-cp313-cp313t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:af223b406d6d000830c6f65f1e6431783fc3f713ba3e6cc8c024d5ee96170a4b", size = 280420, upload-time = "2025-10-08T19:47:36.338Z" }, + { url = "https://files.pythonhosted.org/packages/07/0c/01f2219d39f7e53d52e5173bcb09c976609ba30209912a0680adfb8c593a/propcache-0.4.1-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a78372c932c90ee474559c5ddfffd718238e8673c340dc21fe45c5b8b54559a0", size = 263254, upload-time = "2025-10-08T19:47:37.692Z" }, + { url = "https://files.pythonhosted.org/packages/2d/18/cd28081658ce597898f0c4d174d4d0f3c5b6d4dc27ffafeef835c95eb359/propcache-0.4.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:564d9f0d4d9509e1a870c920a89b2fec951b44bf5ba7d537a9e7c1ccec2c18af", size = 261205, upload-time = "2025-10-08T19:47:39.659Z" }, + { url = "https://files.pythonhosted.org/packages/7a/71/1f9e22eb8b8316701c2a19fa1f388c8a3185082607da8e406a803c9b954e/propcache-0.4.1-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:17612831fda0138059cc5546f4d12a2aacfb9e47068c06af35c400ba58ba7393", size = 247873, upload-time = "2025-10-08T19:47:41.084Z" }, + { url = "https://files.pythonhosted.org/packages/4a/65/3d4b61f36af2b4eddba9def857959f1016a51066b4f1ce348e0cf7881f58/propcache-0.4.1-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:41a89040cb10bd345b3c1a873b2bf36413d48da1def52f268a055f7398514874", size = 262739, upload-time = "2025-10-08T19:47:42.51Z" }, + { url = "https://files.pythonhosted.org/packages/2a/42/26746ab087faa77c1c68079b228810436ccd9a5ce9ac85e2b7307195fd06/propcache-0.4.1-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:e35b88984e7fa64aacecea39236cee32dd9bd8c55f57ba8a75cf2399553f9bd7", size = 263514, upload-time = "2025-10-08T19:47:43.927Z" }, + { url = "https://files.pythonhosted.org/packages/94/13/630690fe201f5502d2403dd3cfd451ed8858fe3c738ee88d095ad2ff407b/propcache-0.4.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:6f8b465489f927b0df505cbe26ffbeed4d6d8a2bbc61ce90eb074ff129ef0ab1", size = 257781, upload-time = "2025-10-08T19:47:45.448Z" }, + { url = "https://files.pythonhosted.org/packages/92/f7/1d4ec5841505f423469efbfc381d64b7b467438cd5a4bbcbb063f3b73d27/propcache-0.4.1-cp313-cp313t-win32.whl", hash = "sha256:2ad890caa1d928c7c2965b48f3a3815c853180831d0e5503d35cf00c472f4717", size = 41396, upload-time = "2025-10-08T19:47:47.202Z" }, + { url = "https://files.pythonhosted.org/packages/48/f0/615c30622316496d2cbbc29f5985f7777d3ada70f23370608c1d3e081c1f/propcache-0.4.1-cp313-cp313t-win_amd64.whl", hash = "sha256:f7ee0e597f495cf415bcbd3da3caa3bd7e816b74d0d52b8145954c5e6fd3ff37", size = 44897, upload-time = "2025-10-08T19:47:48.336Z" }, + { url = "https://files.pythonhosted.org/packages/fd/ca/6002e46eccbe0e33dcd4069ef32f7f1c9e243736e07adca37ae8c4830ec3/propcache-0.4.1-cp313-cp313t-win_arm64.whl", hash = "sha256:929d7cbe1f01bb7baffb33dc14eb5691c95831450a26354cd210a8155170c93a", size = 39789, upload-time = "2025-10-08T19:47:49.876Z" }, + { url = "https://files.pythonhosted.org/packages/5b/5a/bc7b4a4ef808fa59a816c17b20c4bef6884daebbdf627ff2a161da67da19/propcache-0.4.1-py3-none-any.whl", hash = "sha256:af2a6052aeb6cf17d3e46ee169099044fd8224cbaf75c76a2ef596e8163e2237", size = 13305, upload-time = "2025-10-08T19:49:00.792Z" }, +] + +[[package]] +name = "psutil" +version = "7.1.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/e1/88/bdd0a41e5857d5d703287598cbf08dad90aed56774ea52ae071bae9071b6/psutil-7.1.3.tar.gz", hash = "sha256:6c86281738d77335af7aec228328e944b30930899ea760ecf33a4dba66be5e74", size = 489059, upload-time = "2025-11-02T12:25:54.619Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/bd/93/0c49e776b8734fef56ec9c5c57f923922f2cf0497d62e0f419465f28f3d0/psutil-7.1.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:0005da714eee687b4b8decd3d6cc7c6db36215c9e74e5ad2264b90c3df7d92dc", size = 239751, upload-time = "2025-11-02T12:25:58.161Z" }, + { url = "https://files.pythonhosted.org/packages/6f/8d/b31e39c769e70780f007969815195a55c81a63efebdd4dbe9e7a113adb2f/psutil-7.1.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:19644c85dcb987e35eeeaefdc3915d059dac7bd1167cdcdbf27e0ce2df0c08c0", size = 240368, upload-time = "2025-11-02T12:26:00.491Z" }, + { url = "https://files.pythonhosted.org/packages/62/61/23fd4acc3c9eebbf6b6c78bcd89e5d020cfde4acf0a9233e9d4e3fa698b4/psutil-7.1.3-cp313-cp313t-manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:95ef04cf2e5ba0ab9eaafc4a11eaae91b44f4ef5541acd2ee91d9108d00d59a7", size = 287134, upload-time = "2025-11-02T12:26:02.613Z" }, + { url = "https://files.pythonhosted.org/packages/30/1c/f921a009ea9ceb51aa355cb0cc118f68d354db36eae18174bab63affb3e6/psutil-7.1.3-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1068c303be3a72f8e18e412c5b2a8f6d31750fb152f9cb106b54090296c9d251", size = 289904, upload-time = "2025-11-02T12:26:05.207Z" }, + { url = "https://files.pythonhosted.org/packages/a6/82/62d68066e13e46a5116df187d319d1724b3f437ddd0f958756fc052677f4/psutil-7.1.3-cp313-cp313t-win_amd64.whl", hash = "sha256:18349c5c24b06ac5612c0428ec2a0331c26443d259e2a0144a9b24b4395b58fa", size = 249642, upload-time = "2025-11-02T12:26:07.447Z" }, + { url = "https://files.pythonhosted.org/packages/df/ad/c1cd5fe965c14a0392112f68362cfceb5230819dbb5b1888950d18a11d9f/psutil-7.1.3-cp313-cp313t-win_arm64.whl", hash = "sha256:c525ffa774fe4496282fb0b1187725793de3e7c6b29e41562733cae9ada151ee", size = 245518, upload-time = "2025-11-02T12:26:09.719Z" }, + { url = "https://files.pythonhosted.org/packages/ef/94/46b9154a800253e7ecff5aaacdf8ebf43db99de4a2dfa18575b02548654e/psutil-7.1.3-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:2bdbcd0e58ca14996a42adf3621a6244f1bb2e2e528886959c72cf1e326677ab", size = 238359, upload-time = "2025-11-02T12:26:25.284Z" }, + { url = "https://files.pythonhosted.org/packages/68/3a/9f93cff5c025029a36d9a92fef47220ab4692ee7f2be0fba9f92813d0cb8/psutil-7.1.3-cp36-abi3-macosx_11_0_arm64.whl", hash = "sha256:bc31fa00f1fbc3c3802141eede66f3a2d51d89716a194bf2cd6fc68310a19880", size = 239171, upload-time = "2025-11-02T12:26:27.23Z" }, + { url = "https://files.pythonhosted.org/packages/ce/b1/5f49af514f76431ba4eea935b8ad3725cdeb397e9245ab919dbc1d1dc20f/psutil-7.1.3-cp36-abi3-manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:3bb428f9f05c1225a558f53e30ccbad9930b11c3fc206836242de1091d3e7dd3", size = 263261, upload-time = "2025-11-02T12:26:29.48Z" }, + { url = "https://files.pythonhosted.org/packages/e0/95/992c8816a74016eb095e73585d747e0a8ea21a061ed3689474fabb29a395/psutil-7.1.3-cp36-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:56d974e02ca2c8eb4812c3f76c30e28836fffc311d55d979f1465c1feeb2b68b", size = 264635, upload-time = "2025-11-02T12:26:31.74Z" }, + { url = "https://files.pythonhosted.org/packages/55/4c/c3ed1a622b6ae2fd3c945a366e64eb35247a31e4db16cf5095e269e8eb3c/psutil-7.1.3-cp37-abi3-win_amd64.whl", hash = "sha256:f39c2c19fe824b47484b96f9692932248a54c43799a84282cfe58d05a6449efd", size = 247633, upload-time = "2025-11-02T12:26:33.887Z" }, + { url = "https://files.pythonhosted.org/packages/c9/ad/33b2ccec09bf96c2b2ef3f9a6f66baac8253d7565d8839e024a6b905d45d/psutil-7.1.3-cp37-abi3-win_arm64.whl", hash = "sha256:bd0d69cee829226a761e92f28140bec9a5ee9d5b4fb4b0cc589068dbfff559b1", size = 244608, upload-time = "2025-11-02T12:26:36.136Z" }, +] + +[[package]] +name = "ptyprocess" +version = "0.7.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/20/e5/16ff212c1e452235a90aeb09066144d0c5a6a8c0834397e03f5224495c4e/ptyprocess-0.7.0.tar.gz", hash = "sha256:5c5d0a3b48ceee0b48485e0c26037c0acd7d29765ca3fbb5cb3831d347423220", size = 70762, upload-time = "2020-12-28T15:15:30.155Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/22/a6/858897256d0deac81a172289110f31629fc4cee19b6f01283303e18c8db3/ptyprocess-0.7.0-py2.py3-none-any.whl", hash = "sha256:4b41f3967fce3af57cc7e94b888626c18bf37a083e3651ca8feeb66d492fef35", size = 13993, upload-time = "2020-12-28T15:15:28.35Z" }, +] + +[[package]] +name = "pure-eval" +version = "0.2.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/cd/05/0a34433a064256a578f1783a10da6df098ceaa4a57bbeaa96a6c0352786b/pure_eval-0.2.3.tar.gz", hash = "sha256:5f4e983f40564c576c7c8635ae88db5956bb2229d7e9237d03b3c0b0190eaf42", size = 19752, upload-time = "2024-07-21T12:58:21.801Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/8e/37/efad0257dc6e593a18957422533ff0f87ede7c9c6ea010a2177d738fb82f/pure_eval-0.2.3-py3-none-any.whl", hash = "sha256:1db8e35b67b3d218d818ae653e27f06c3aa420901fa7b081ca98cbedc874e0d0", size = 11842, upload-time = "2024-07-21T12:58:20.04Z" }, +] + +[[package]] +name = "py-cpuinfo" +version = "9.0.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/37/a8/d832f7293ebb21690860d2e01d8115e5ff6f2ae8bbdc953f0eb0fa4bd2c7/py-cpuinfo-9.0.0.tar.gz", hash = "sha256:3cdbbf3fac90dc6f118bfd64384f309edeadd902d7c8fb17f02ffa1fc3f49690", size = 104716, upload-time = "2022-10-25T20:38:06.303Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e0/a9/023730ba63db1e494a271cb018dcd361bd2c917ba7004c3e49d5daf795a2/py_cpuinfo-9.0.0-py3-none-any.whl", hash = "sha256:859625bc251f64e21f077d099d4162689c762b5d6a4c3c97553d56241c9674d5", size = 22335, upload-time = "2022-10-25T20:38:27.636Z" }, +] + +[[package]] +name = "pybtex" +version = "0.25.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "latexcodec" }, + { name = "pyyaml" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/5f/bc/c2be05ca72f8c103670e983df8be26d1e288bc6556f487fa8cccaa27779f/pybtex-0.25.1.tar.gz", hash = "sha256:9eaf90267c7e83e225af89fea65c370afbf65f458220d3946a9e3049e1eca491", size = 406157, upload-time = "2025-06-26T13:27:41.903Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/25/68/ceb5d6679baa326261f5d3e5113d9cfed6efef2810afd9f18bffb8ed312b/pybtex-0.25.1-py2.py3-none-any.whl", hash = "sha256:9053b0d619409a0a83f38abad5d9921de5f7b3ede00742beafcd9f10ad0d8c5c", size = 127437, upload-time = "2025-06-26T13:27:43.585Z" }, +] + +[[package]] +name = "pybtex-docutils" +version = "1.0.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "docutils" }, + { name = "pybtex" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/7e/84/796ea94d26188a853660f81bded39f8de4cfe595130aef0dea1088705a11/pybtex-docutils-1.0.3.tar.gz", hash = "sha256:3a7ebdf92b593e00e8c1c538aa9a20bca5d92d84231124715acc964d51d93c6b", size = 18348, upload-time = "2023-08-22T18:47:54.833Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/11/b1/ce1f4596211efb5410e178a803f08e59b20bedb66837dcf41e21c54f9ec1/pybtex_docutils-1.0.3-py3-none-any.whl", hash = "sha256:8fd290d2ae48e32fcb54d86b0efb8d573198653c7e2447d5bec5847095f430b9", size = 6385, upload-time = "2023-08-22T06:43:20.513Z" }, +] + +[[package]] +name = "pycodestyle" +version = "2.14.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/11/e0/abfd2a0d2efe47670df87f3e3a0e2edda42f055053c85361f19c0e2c1ca8/pycodestyle-2.14.0.tar.gz", hash = "sha256:c4b5b517d278089ff9d0abdec919cd97262a3367449ea1c8b49b91529167b783", size = 39472, upload-time = "2025-06-20T18:49:48.75Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d7/27/a58ddaf8c588a3ef080db9d0b7e0b97215cee3a45df74f3a94dbbf5c893a/pycodestyle-2.14.0-py2.py3-none-any.whl", hash = "sha256:dd6bf7cb4ee77f8e016f9c8e74a35ddd9f67e1d5fd4184d86c3b98e07099f42d", size = 31594, upload-time = "2025-06-20T18:49:47.491Z" }, +] + +[[package]] +name = "pycparser" +version = "2.23" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/fe/cf/d2d3b9f5699fb1e4615c8e32ff220203e43b248e1dfcc6736ad9057731ca/pycparser-2.23.tar.gz", hash = "sha256:78816d4f24add8f10a06d6f05b4d424ad9e96cfebf68a4ddc99c65c0720d00c2", size = 173734, upload-time = "2025-09-09T13:23:47.91Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a0/e3/59cd50310fc9b59512193629e1984c1f95e5c8ae6e5d8c69532ccc65a7fe/pycparser-2.23-py3-none-any.whl", hash = "sha256:e5c6e8d3fbad53479cab09ac03729e0a9faf2bee3db8208a550daf5af81a5934", size = 118140, upload-time = "2025-09-09T13:23:46.651Z" }, +] + +[[package]] +name = "pydantic" +version = "2.12.5" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "annotated-types" }, + { name = "pydantic-core" }, + { name = "typing-extensions" }, + { name = "typing-inspection" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/69/44/36f1a6e523abc58ae5f928898e4aca2e0ea509b5aa6f6f392a5d882be928/pydantic-2.12.5.tar.gz", hash = "sha256:4d351024c75c0f085a9febbb665ce8c0c6ec5d30e903bdb6394b7ede26aebb49", size = 821591, upload-time = "2025-11-26T15:11:46.471Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/5a/87/b70ad306ebb6f9b585f114d0ac2137d792b48be34d732d60e597c2f8465a/pydantic-2.12.5-py3-none-any.whl", hash = "sha256:e561593fccf61e8a20fc46dfc2dfe075b8be7d0188df33f221ad1f0139180f9d", size = 463580, upload-time = "2025-11-26T15:11:44.605Z" }, +] + +[[package]] +name = "pydantic-core" +version = "2.41.5" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/71/70/23b021c950c2addd24ec408e9ab05d59b035b39d97cdc1130e1bce647bb6/pydantic_core-2.41.5.tar.gz", hash = "sha256:08daa51ea16ad373ffd5e7606252cc32f07bc72b28284b6bc9c6df804816476e", size = 460952, upload-time = "2025-11-04T13:43:49.098Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/5f/5d/5f6c63eebb5afee93bcaae4ce9a898f3373ca23df3ccaef086d0233a35a7/pydantic_core-2.41.5-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:f41a7489d32336dbf2199c8c0a215390a751c5b014c2c1c5366e817202e9cdf7", size = 2110990, upload-time = "2025-11-04T13:39:58.079Z" }, + { url = "https://files.pythonhosted.org/packages/aa/32/9c2e8ccb57c01111e0fd091f236c7b371c1bccea0fa85247ac55b1e2b6b6/pydantic_core-2.41.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:070259a8818988b9a84a449a2a7337c7f430a22acc0859c6b110aa7212a6d9c0", size = 1896003, upload-time = "2025-11-04T13:39:59.956Z" }, + { url = "https://files.pythonhosted.org/packages/68/b8/a01b53cb0e59139fbc9e4fda3e9724ede8de279097179be4ff31f1abb65a/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e96cea19e34778f8d59fe40775a7a574d95816eb150850a85a7a4c8f4b94ac69", size = 1919200, upload-time = "2025-11-04T13:40:02.241Z" }, + { url = "https://files.pythonhosted.org/packages/38/de/8c36b5198a29bdaade07b5985e80a233a5ac27137846f3bc2d3b40a47360/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ed2e99c456e3fadd05c991f8f437ef902e00eedf34320ba2b0842bd1c3ca3a75", size = 2052578, upload-time = "2025-11-04T13:40:04.401Z" }, + { url = "https://files.pythonhosted.org/packages/00/b5/0e8e4b5b081eac6cb3dbb7e60a65907549a1ce035a724368c330112adfdd/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:65840751b72fbfd82c3c640cff9284545342a4f1eb1586ad0636955b261b0b05", size = 2208504, upload-time = "2025-11-04T13:40:06.072Z" }, + { url = "https://files.pythonhosted.org/packages/77/56/87a61aad59c7c5b9dc8caad5a41a5545cba3810c3e828708b3d7404f6cef/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e536c98a7626a98feb2d3eaf75944ef6f3dbee447e1f841eae16f2f0a72d8ddc", size = 2335816, upload-time = "2025-11-04T13:40:07.835Z" }, + { url = "https://files.pythonhosted.org/packages/0d/76/941cc9f73529988688a665a5c0ecff1112b3d95ab48f81db5f7606f522d3/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eceb81a8d74f9267ef4081e246ffd6d129da5d87e37a77c9bde550cb04870c1c", size = 2075366, upload-time = "2025-11-04T13:40:09.804Z" }, + { url = "https://files.pythonhosted.org/packages/d3/43/ebef01f69baa07a482844faaa0a591bad1ef129253ffd0cdaa9d8a7f72d3/pydantic_core-2.41.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d38548150c39b74aeeb0ce8ee1d8e82696f4a4e16ddc6de7b1d8823f7de4b9b5", size = 2171698, upload-time = "2025-11-04T13:40:12.004Z" }, + { url = "https://files.pythonhosted.org/packages/b1/87/41f3202e4193e3bacfc2c065fab7706ebe81af46a83d3e27605029c1f5a6/pydantic_core-2.41.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:c23e27686783f60290e36827f9c626e63154b82b116d7fe9adba1fda36da706c", size = 2132603, upload-time = "2025-11-04T13:40:13.868Z" }, + { url = "https://files.pythonhosted.org/packages/49/7d/4c00df99cb12070b6bccdef4a195255e6020a550d572768d92cc54dba91a/pydantic_core-2.41.5-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:482c982f814460eabe1d3bb0adfdc583387bd4691ef00b90575ca0d2b6fe2294", size = 2329591, upload-time = "2025-11-04T13:40:15.672Z" }, + { url = "https://files.pythonhosted.org/packages/cc/6a/ebf4b1d65d458f3cda6a7335d141305dfa19bdc61140a884d165a8a1bbc7/pydantic_core-2.41.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:bfea2a5f0b4d8d43adf9d7b8bf019fb46fdd10a2e5cde477fbcb9d1fa08c68e1", size = 2319068, upload-time = "2025-11-04T13:40:17.532Z" }, + { url = "https://files.pythonhosted.org/packages/49/3b/774f2b5cd4192d5ab75870ce4381fd89cf218af999515baf07e7206753f0/pydantic_core-2.41.5-cp312-cp312-win32.whl", hash = "sha256:b74557b16e390ec12dca509bce9264c3bbd128f8a2c376eaa68003d7f327276d", size = 1985908, upload-time = "2025-11-04T13:40:19.309Z" }, + { url = "https://files.pythonhosted.org/packages/86/45/00173a033c801cacf67c190fef088789394feaf88a98a7035b0e40d53dc9/pydantic_core-2.41.5-cp312-cp312-win_amd64.whl", hash = "sha256:1962293292865bca8e54702b08a4f26da73adc83dd1fcf26fbc875b35d81c815", size = 2020145, upload-time = "2025-11-04T13:40:21.548Z" }, + { url = "https://files.pythonhosted.org/packages/f9/22/91fbc821fa6d261b376a3f73809f907cec5ca6025642c463d3488aad22fb/pydantic_core-2.41.5-cp312-cp312-win_arm64.whl", hash = "sha256:1746d4a3d9a794cacae06a5eaaccb4b8643a131d45fbc9af23e353dc0a5ba5c3", size = 1976179, upload-time = "2025-11-04T13:40:23.393Z" }, + { url = "https://files.pythonhosted.org/packages/87/06/8806241ff1f70d9939f9af039c6c35f2360cf16e93c2ca76f184e76b1564/pydantic_core-2.41.5-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:941103c9be18ac8daf7b7adca8228f8ed6bb7a1849020f643b3a14d15b1924d9", size = 2120403, upload-time = "2025-11-04T13:40:25.248Z" }, + { url = "https://files.pythonhosted.org/packages/94/02/abfa0e0bda67faa65fef1c84971c7e45928e108fe24333c81f3bfe35d5f5/pydantic_core-2.41.5-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:112e305c3314f40c93998e567879e887a3160bb8689ef3d2c04b6cc62c33ac34", size = 1896206, upload-time = "2025-11-04T13:40:27.099Z" }, + { url = "https://files.pythonhosted.org/packages/15/df/a4c740c0943e93e6500f9eb23f4ca7ec9bf71b19e608ae5b579678c8d02f/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0cbaad15cb0c90aa221d43c00e77bb33c93e8d36e0bf74760cd00e732d10a6a0", size = 1919307, upload-time = "2025-11-04T13:40:29.806Z" }, + { url = "https://files.pythonhosted.org/packages/9a/e3/6324802931ae1d123528988e0e86587c2072ac2e5394b4bc2bc34b61ff6e/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:03ca43e12fab6023fc79d28ca6b39b05f794ad08ec2feccc59a339b02f2b3d33", size = 2063258, upload-time = "2025-11-04T13:40:33.544Z" }, + { url = "https://files.pythonhosted.org/packages/c9/d4/2230d7151d4957dd79c3044ea26346c148c98fbf0ee6ebd41056f2d62ab5/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dc799088c08fa04e43144b164feb0c13f9a0bc40503f8df3e9fde58a3c0c101e", size = 2214917, upload-time = "2025-11-04T13:40:35.479Z" }, + { url = "https://files.pythonhosted.org/packages/e6/9f/eaac5df17a3672fef0081b6c1bb0b82b33ee89aa5cec0d7b05f52fd4a1fa/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:97aeba56665b4c3235a0e52b2c2f5ae9cd071b8a8310ad27bddb3f7fb30e9aa2", size = 2332186, upload-time = "2025-11-04T13:40:37.436Z" }, + { url = "https://files.pythonhosted.org/packages/cf/4e/35a80cae583a37cf15604b44240e45c05e04e86f9cfd766623149297e971/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:406bf18d345822d6c21366031003612b9c77b3e29ffdb0f612367352aab7d586", size = 2073164, upload-time = "2025-11-04T13:40:40.289Z" }, + { url = "https://files.pythonhosted.org/packages/bf/e3/f6e262673c6140dd3305d144d032f7bd5f7497d3871c1428521f19f9efa2/pydantic_core-2.41.5-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b93590ae81f7010dbe380cdeab6f515902ebcbefe0b9327cc4804d74e93ae69d", size = 2179146, upload-time = "2025-11-04T13:40:42.809Z" }, + { url = "https://files.pythonhosted.org/packages/75/c7/20bd7fc05f0c6ea2056a4565c6f36f8968c0924f19b7d97bbfea55780e73/pydantic_core-2.41.5-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:01a3d0ab748ee531f4ea6c3e48ad9dac84ddba4b0d82291f87248f2f9de8d740", size = 2137788, upload-time = "2025-11-04T13:40:44.752Z" }, + { url = "https://files.pythonhosted.org/packages/3a/8d/34318ef985c45196e004bc46c6eab2eda437e744c124ef0dbe1ff2c9d06b/pydantic_core-2.41.5-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:6561e94ba9dacc9c61bce40e2d6bdc3bfaa0259d3ff36ace3b1e6901936d2e3e", size = 2340133, upload-time = "2025-11-04T13:40:46.66Z" }, + { url = "https://files.pythonhosted.org/packages/9c/59/013626bf8c78a5a5d9350d12e7697d3d4de951a75565496abd40ccd46bee/pydantic_core-2.41.5-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:915c3d10f81bec3a74fbd4faebe8391013ba61e5a1a8d48c4455b923bdda7858", size = 2324852, upload-time = "2025-11-04T13:40:48.575Z" }, + { url = "https://files.pythonhosted.org/packages/1a/d9/c248c103856f807ef70c18a4f986693a46a8ffe1602e5d361485da502d20/pydantic_core-2.41.5-cp313-cp313-win32.whl", hash = "sha256:650ae77860b45cfa6e2cdafc42618ceafab3a2d9a3811fcfbd3bbf8ac3c40d36", size = 1994679, upload-time = "2025-11-04T13:40:50.619Z" }, + { url = "https://files.pythonhosted.org/packages/9e/8b/341991b158ddab181cff136acd2552c9f35bd30380422a639c0671e99a91/pydantic_core-2.41.5-cp313-cp313-win_amd64.whl", hash = "sha256:79ec52ec461e99e13791ec6508c722742ad745571f234ea6255bed38c6480f11", size = 2019766, upload-time = "2025-11-04T13:40:52.631Z" }, + { url = "https://files.pythonhosted.org/packages/73/7d/f2f9db34af103bea3e09735bb40b021788a5e834c81eedb541991badf8f5/pydantic_core-2.41.5-cp313-cp313-win_arm64.whl", hash = "sha256:3f84d5c1b4ab906093bdc1ff10484838aca54ef08de4afa9de0f5f14d69639cd", size = 1981005, upload-time = "2025-11-04T13:40:54.734Z" }, + { url = "https://files.pythonhosted.org/packages/09/32/59b0c7e63e277fa7911c2fc70ccfb45ce4b98991e7ef37110663437005af/pydantic_core-2.41.5-graalpy312-graalpy250_312_native-macosx_10_12_x86_64.whl", hash = "sha256:7da7087d756b19037bc2c06edc6c170eeef3c3bafcb8f532ff17d64dc427adfd", size = 2110495, upload-time = "2025-11-04T13:42:49.689Z" }, + { url = "https://files.pythonhosted.org/packages/aa/81/05e400037eaf55ad400bcd318c05bb345b57e708887f07ddb2d20e3f0e98/pydantic_core-2.41.5-graalpy312-graalpy250_312_native-macosx_11_0_arm64.whl", hash = "sha256:aabf5777b5c8ca26f7824cb4a120a740c9588ed58df9b2d196ce92fba42ff8dc", size = 1915388, upload-time = "2025-11-04T13:42:52.215Z" }, + { url = "https://files.pythonhosted.org/packages/6e/0d/e3549b2399f71d56476b77dbf3cf8937cec5cd70536bdc0e374a421d0599/pydantic_core-2.41.5-graalpy312-graalpy250_312_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c007fe8a43d43b3969e8469004e9845944f1a80e6acd47c150856bb87f230c56", size = 1942879, upload-time = "2025-11-04T13:42:56.483Z" }, + { url = "https://files.pythonhosted.org/packages/f7/07/34573da085946b6a313d7c42f82f16e8920bfd730665de2d11c0c37a74b5/pydantic_core-2.41.5-graalpy312-graalpy250_312_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:76d0819de158cd855d1cbb8fcafdf6f5cf1eb8e470abe056d5d161106e38062b", size = 2139017, upload-time = "2025-11-04T13:42:59.471Z" }, +] + +[[package]] +name = "pygments" +version = "2.19.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/b0/77/a5b8c569bf593b0140bde72ea885a803b82086995367bf2037de0159d924/pygments-2.19.2.tar.gz", hash = "sha256:636cb2477cec7f8952536970bc533bc43743542f70392ae026374600add5b887", size = 4968631, upload-time = "2025-06-21T13:39:12.283Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c7/21/705964c7812476f378728bdf590ca4b771ec72385c533964653c68e86bdc/pygments-2.19.2-py3-none-any.whl", hash = "sha256:86540386c03d588bb81d44bc3928634ff26449851e99741617ecb9037ee5ec0b", size = 1225217, upload-time = "2025-06-21T13:39:07.939Z" }, +] + +[[package]] +name = "pypandoc-binary" +version = "1.15" +source = { registry = "https://pypi.org/simple" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a3/59/ad3a065f512c3fc3bdf8d97d30e82fb40a292b3f38b06ef13f9c2f69df29/pypandoc_binary-1.15-py3-none-macosx_10_9_x86_64.whl", hash = "sha256:12a43df66f59cb489a9a2659b6f049ec103132e43e4f284b7e33e7ae4dd9cadb", size = 22145734, upload-time = "2025-01-08T17:39:13.98Z" }, + { url = "https://files.pythonhosted.org/packages/59/d6/ef3b4927797bbf4146fdcb54b12a8ce766d077b44f37bcef0581a931a8a6/pypandoc_binary-1.15-py3-none-macosx_11_0_arm64.whl", hash = "sha256:79b215279a6470d3e2a24135826b446f44259f6abce361e31d713ac1b0b5aa69", size = 22145735, upload-time = "2025-01-08T17:39:21.562Z" }, + { url = "https://files.pythonhosted.org/packages/e1/74/970c50ef0f62b69afbae957d7be92072748b1f2d9b690c763c1afc81e326/pypandoc_binary-1.15-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fa29176f4eb9eb93f555dac6926a8d53c6ab4fd2deba3a1edb002e04899b237d", size = 35362526, upload-time = "2025-01-08T17:39:28.48Z" }, + { url = "https://files.pythonhosted.org/packages/cb/00/dc9377725392fa3c7d5f04dc42ab012d1cd40a0a0705b6eb794444559072/pypandoc_binary-1.15-py3-none-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:df54edb00331a8fc80516c71cd81de69954512163037c3d4577c315091a0d6f6", size = 32649183, upload-time = "2025-01-08T17:39:34.414Z" }, + { url = "https://files.pythonhosted.org/packages/80/8b/341d6a298a97ba66b6a5f93583f03e027cb0e3322ce6d46082eca0e95c93/pypandoc_binary-1.15-py3-none-musllinux_1_1_aarch64.whl", hash = "sha256:e465a09e8aac18e8fda37516a6d1a891e51cdd1d066ab585f85a81620de575e2", size = 35362516, upload-time = "2025-01-08T17:39:42.438Z" }, + { url = "https://files.pythonhosted.org/packages/d9/a9/5c3c2fba01a008e7aa784268fa88b612c3ef94b89d60c1e838d96b5a1735/pypandoc_binary-1.15-py3-none-musllinux_1_1_x86_64.whl", hash = "sha256:f9ebdf92059e9c1ae5231ee193d1fdafbc0188d5ec9d5f53e95fa21a42339481", size = 32649164, upload-time = "2025-01-08T17:39:49.381Z" }, + { url = "https://files.pythonhosted.org/packages/fd/a7/2295d4f1036cedbd27b4d6c220fe3bc40601b618245bfd5837623ecee4cb/pypandoc_binary-1.15-py3-none-win_amd64.whl", hash = "sha256:de7a234ffb674a4e650490acc7a5986161e2fd8b5bb106f1c9ffc30d76d2cf23", size = 38577212, upload-time = "2025-01-08T17:39:54.85Z" }, +] + +[[package]] +name = "pyro-api" +version = "0.1.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/25/d7/a0812f5c16b0d4464f80a64a44626c5fe200098070be0f32436dbb662775/pyro-api-0.1.2.tar.gz", hash = "sha256:a1b900d9580aa1c2fab3b123ab7ff33413744da7c5f440bd4aadc4d40d14d920", size = 7349, upload-time = "2020-05-15T16:17:41.501Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/fc/81/957ae78e6398460a7230b0eb9b8f1cb954c5e913e868e48d89324c68cec7/pyro_api-0.1.2-py3-none-any.whl", hash = "sha256:10e0e42e9e4401ce464dab79c870e50dfb4f413d326fa777f3582928ef9caf8f", size = 11981, upload-time = "2020-05-15T16:17:40.492Z" }, +] + +[[package]] +name = "pyro-ppl" +version = "1.9.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "numpy" }, + { name = "opt-einsum" }, + { name = "pyro-api" }, + { name = "torch" }, + { name = "tqdm" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/4c/2e/3bcba8688d58f8dc954cef6831c19d52b6017b035d783685d67cd99fa351/pyro_ppl-1.9.1.tar.gz", hash = "sha256:5e1596de276c038a3f77d2580a90d0a97126e0104900444a088eee620bb0d65e", size = 570861, upload-time = "2024-06-02T00:37:39.688Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ed/37/def183a2a2c8619d92649d62fe0622c4c6c62f60e4151e8fbaa409e7d5ab/pyro_ppl-1.9.1-py3-none-any.whl", hash = "sha256:91fb2c8740d9d3bd548180ac5ecfa04552ed8c471a1ab66870180663b8f09852", size = 755956, upload-time = "2024-06-02T00:37:37.486Z" }, +] + +[[package]] +name = "pytest" +version = "9.0.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "colorama", marker = "sys_platform == 'win32'" }, + { name = "iniconfig" }, + { name = "packaging" }, + { name = "pluggy" }, + { name = "pygments" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/07/56/f013048ac4bc4c1d9be45afd4ab209ea62822fb1598f40687e6bf45dcea4/pytest-9.0.1.tar.gz", hash = "sha256:3e9c069ea73583e255c3b21cf46b8d3c56f6e3a1a8f6da94ccb0fcf57b9d73c8", size = 1564125, upload-time = "2025-11-12T13:05:09.333Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/0b/8b/6300fb80f858cda1c51ffa17075df5d846757081d11ab4aa35cef9e6258b/pytest-9.0.1-py3-none-any.whl", hash = "sha256:67be0030d194df2dfa7b556f2e56fb3c3315bd5c8822c6951162b92b32ce7dad", size = 373668, upload-time = "2025-11-12T13:05:07.379Z" }, +] + +[[package]] +name = "pytest-benchmark" +version = "5.2.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "py-cpuinfo" }, + { name = "pytest" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/24/34/9f732b76456d64faffbef6232f1f9dbec7a7c4999ff46282fa418bd1af66/pytest_benchmark-5.2.3.tar.gz", hash = "sha256:deb7317998a23c650fd4ff76e1230066a76cb45dcece0aca5607143c619e7779", size = 341340, upload-time = "2025-11-09T18:48:43.215Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/33/29/e756e715a48959f1c0045342088d7ca9762a2f509b945f362a316e9412b7/pytest_benchmark-5.2.3-py3-none-any.whl", hash = "sha256:bc839726ad20e99aaa0d11a127445457b4219bdb9e80a1afc4b51da7f96b0803", size = 45255, upload-time = "2025-11-09T18:48:39.765Z" }, +] + +[[package]] +name = "pytest-cov" +version = "7.0.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "coverage" }, + { name = "pluggy" }, + { name = "pytest" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/5e/f7/c933acc76f5208b3b00089573cf6a2bc26dc80a8aece8f52bb7d6b1855ca/pytest_cov-7.0.0.tar.gz", hash = "sha256:33c97eda2e049a0c5298e91f519302a1334c26ac65c1a483d6206fd458361af1", size = 54328, upload-time = "2025-09-09T10:57:02.113Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ee/49/1377b49de7d0c1ce41292161ea0f721913fa8722c19fb9c1e3aa0367eecb/pytest_cov-7.0.0-py3-none-any.whl", hash = "sha256:3b8e9558b16cc1479da72058bdecf8073661c7f57f7d3c5f22a1c23507f2d861", size = 22424, upload-time = "2025-09-09T10:57:00.695Z" }, +] + +[[package]] +name = "pytest-xdist" +version = "3.8.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "execnet" }, + { name = "pytest" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/78/b4/439b179d1ff526791eb921115fca8e44e596a13efeda518b9d845a619450/pytest_xdist-3.8.0.tar.gz", hash = "sha256:7e578125ec9bc6050861aa93f2d59f1d8d085595d6551c2c90b6f4fad8d3a9f1", size = 88069, upload-time = "2025-07-01T13:30:59.346Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ca/31/d4e37e9e550c2b92a9cbc2e4d0b7420a27224968580b5a447f420847c975/pytest_xdist-3.8.0-py3-none-any.whl", hash = "sha256:202ca578cfeb7370784a8c33d6d05bc6e13b4f25b5053c30a152269fd10f0b88", size = 46396, upload-time = "2025-07-01T13:30:56.632Z" }, +] + +[[package]] +name = "python-dateutil" +version = "2.9.0.post0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "six" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/66/c0/0c8b6ad9f17a802ee498c46e004a0eb49bc148f2fd230864601a86dcf6db/python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3", size = 342432, upload-time = "2024-03-01T18:36:20.211Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ec/57/56b9bcc3c9c6a792fcbaf139543cee77261f3651ca9da0c93f5c1221264b/python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427", size = 229892, upload-time = "2024-03-01T18:36:18.57Z" }, +] + +[[package]] +name = "python-dotenv" +version = "1.2.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f0/26/19cadc79a718c5edbec86fd4919a6b6d3f681039a2f6d66d14be94e75fb9/python_dotenv-1.2.1.tar.gz", hash = "sha256:42667e897e16ab0d66954af0e60a9caa94f0fd4ecf3aaf6d2d260eec1aa36ad6", size = 44221, upload-time = "2025-10-26T15:12:10.434Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/14/1b/a298b06749107c305e1fe0f814c6c74aea7b2f1e10989cb30f544a1b3253/python_dotenv-1.2.1-py3-none-any.whl", hash = "sha256:b81ee9561e9ca4004139c6cbba3a238c32b03e4894671e181b671e8cb8425d61", size = 21230, upload-time = "2025-10-26T15:12:09.109Z" }, +] + +[[package]] +name = "pyyaml" +version = "6.0.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/05/8e/961c0007c59b8dd7729d542c61a4d537767a59645b82a0b521206e1e25c2/pyyaml-6.0.3.tar.gz", hash = "sha256:d76623373421df22fb4cf8817020cbb7ef15c725b9d5e45f17e189bfc384190f", size = 130960, upload-time = "2025-09-25T21:33:16.546Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d1/33/422b98d2195232ca1826284a76852ad5a86fe23e31b009c9886b2d0fb8b2/pyyaml-6.0.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:7f047e29dcae44602496db43be01ad42fc6f1cc0d8cd6c83d342306c32270196", size = 182063, upload-time = "2025-09-25T21:32:11.445Z" }, + { url = "https://files.pythonhosted.org/packages/89/a0/6cf41a19a1f2f3feab0e9c0b74134aa2ce6849093d5517a0c550fe37a648/pyyaml-6.0.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:fc09d0aa354569bc501d4e787133afc08552722d3ab34836a80547331bb5d4a0", size = 173973, upload-time = "2025-09-25T21:32:12.492Z" }, + { url = "https://files.pythonhosted.org/packages/ed/23/7a778b6bd0b9a8039df8b1b1d80e2e2ad78aa04171592c8a5c43a56a6af4/pyyaml-6.0.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9149cad251584d5fb4981be1ecde53a1ca46c891a79788c0df828d2f166bda28", size = 775116, upload-time = "2025-09-25T21:32:13.652Z" }, + { url = "https://files.pythonhosted.org/packages/65/30/d7353c338e12baef4ecc1b09e877c1970bd3382789c159b4f89d6a70dc09/pyyaml-6.0.3-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:5fdec68f91a0c6739b380c83b951e2c72ac0197ace422360e6d5a959d8d97b2c", size = 844011, upload-time = "2025-09-25T21:32:15.21Z" }, + { url = "https://files.pythonhosted.org/packages/8b/9d/b3589d3877982d4f2329302ef98a8026e7f4443c765c46cfecc8858c6b4b/pyyaml-6.0.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ba1cc08a7ccde2d2ec775841541641e4548226580ab850948cbfda66a1befcdc", size = 807870, upload-time = "2025-09-25T21:32:16.431Z" }, + { url = "https://files.pythonhosted.org/packages/05/c0/b3be26a015601b822b97d9149ff8cb5ead58c66f981e04fedf4e762f4bd4/pyyaml-6.0.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:8dc52c23056b9ddd46818a57b78404882310fb473d63f17b07d5c40421e47f8e", size = 761089, upload-time = "2025-09-25T21:32:17.56Z" }, + { url = "https://files.pythonhosted.org/packages/be/8e/98435a21d1d4b46590d5459a22d88128103f8da4c2d4cb8f14f2a96504e1/pyyaml-6.0.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:41715c910c881bc081f1e8872880d3c650acf13dfa8214bad49ed4cede7c34ea", size = 790181, upload-time = "2025-09-25T21:32:18.834Z" }, + { url = "https://files.pythonhosted.org/packages/74/93/7baea19427dcfbe1e5a372d81473250b379f04b1bd3c4c5ff825e2327202/pyyaml-6.0.3-cp312-cp312-win32.whl", hash = "sha256:96b533f0e99f6579b3d4d4995707cf36df9100d67e0c8303a0c55b27b5f99bc5", size = 137658, upload-time = "2025-09-25T21:32:20.209Z" }, + { url = "https://files.pythonhosted.org/packages/86/bf/899e81e4cce32febab4fb42bb97dcdf66bc135272882d1987881a4b519e9/pyyaml-6.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:5fcd34e47f6e0b794d17de1b4ff496c00986e1c83f7ab2fb8fcfe9616ff7477b", size = 154003, upload-time = "2025-09-25T21:32:21.167Z" }, + { url = "https://files.pythonhosted.org/packages/1a/08/67bd04656199bbb51dbed1439b7f27601dfb576fb864099c7ef0c3e55531/pyyaml-6.0.3-cp312-cp312-win_arm64.whl", hash = "sha256:64386e5e707d03a7e172c0701abfb7e10f0fb753ee1d773128192742712a98fd", size = 140344, upload-time = "2025-09-25T21:32:22.617Z" }, + { url = "https://files.pythonhosted.org/packages/d1/11/0fd08f8192109f7169db964b5707a2f1e8b745d4e239b784a5a1dd80d1db/pyyaml-6.0.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:8da9669d359f02c0b91ccc01cac4a67f16afec0dac22c2ad09f46bee0697eba8", size = 181669, upload-time = "2025-09-25T21:32:23.673Z" }, + { url = "https://files.pythonhosted.org/packages/b1/16/95309993f1d3748cd644e02e38b75d50cbc0d9561d21f390a76242ce073f/pyyaml-6.0.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:2283a07e2c21a2aa78d9c4442724ec1eb15f5e42a723b99cb3d822d48f5f7ad1", size = 173252, upload-time = "2025-09-25T21:32:25.149Z" }, + { url = "https://files.pythonhosted.org/packages/50/31/b20f376d3f810b9b2371e72ef5adb33879b25edb7a6d072cb7ca0c486398/pyyaml-6.0.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ee2922902c45ae8ccada2c5b501ab86c36525b883eff4255313a253a3160861c", size = 767081, upload-time = "2025-09-25T21:32:26.575Z" }, + { url = "https://files.pythonhosted.org/packages/49/1e/a55ca81e949270d5d4432fbbd19dfea5321eda7c41a849d443dc92fd1ff7/pyyaml-6.0.3-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:a33284e20b78bd4a18c8c2282d549d10bc8408a2a7ff57653c0cf0b9be0afce5", size = 841159, upload-time = "2025-09-25T21:32:27.727Z" }, + { url = "https://files.pythonhosted.org/packages/74/27/e5b8f34d02d9995b80abcef563ea1f8b56d20134d8f4e5e81733b1feceb2/pyyaml-6.0.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0f29edc409a6392443abf94b9cf89ce99889a1dd5376d94316ae5145dfedd5d6", size = 801626, upload-time = "2025-09-25T21:32:28.878Z" }, + { url = "https://files.pythonhosted.org/packages/f9/11/ba845c23988798f40e52ba45f34849aa8a1f2d4af4b798588010792ebad6/pyyaml-6.0.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f7057c9a337546edc7973c0d3ba84ddcdf0daa14533c2065749c9075001090e6", size = 753613, upload-time = "2025-09-25T21:32:30.178Z" }, + { url = "https://files.pythonhosted.org/packages/3d/e0/7966e1a7bfc0a45bf0a7fb6b98ea03fc9b8d84fa7f2229e9659680b69ee3/pyyaml-6.0.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:eda16858a3cab07b80edaf74336ece1f986ba330fdb8ee0d6c0d68fe82bc96be", size = 794115, upload-time = "2025-09-25T21:32:31.353Z" }, + { url = "https://files.pythonhosted.org/packages/de/94/980b50a6531b3019e45ddeada0626d45fa85cbe22300844a7983285bed3b/pyyaml-6.0.3-cp313-cp313-win32.whl", hash = "sha256:d0eae10f8159e8fdad514efdc92d74fd8d682c933a6dd088030f3834bc8e6b26", size = 137427, upload-time = "2025-09-25T21:32:32.58Z" }, + { url = "https://files.pythonhosted.org/packages/97/c9/39d5b874e8b28845e4ec2202b5da735d0199dbe5b8fb85f91398814a9a46/pyyaml-6.0.3-cp313-cp313-win_amd64.whl", hash = "sha256:79005a0d97d5ddabfeeea4cf676af11e647e41d81c9a7722a193022accdb6b7c", size = 154090, upload-time = "2025-09-25T21:32:33.659Z" }, + { url = "https://files.pythonhosted.org/packages/73/e8/2bdf3ca2090f68bb3d75b44da7bbc71843b19c9f2b9cb9b0f4ab7a5a4329/pyyaml-6.0.3-cp313-cp313-win_arm64.whl", hash = "sha256:5498cd1645aa724a7c71c8f378eb29ebe23da2fc0d7a08071d89469bf1d2defb", size = 140246, upload-time = "2025-09-25T21:32:34.663Z" }, +] + +[[package]] +name = "pyzmq" +version = "27.1.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "cffi", marker = "implementation_name == 'pypy'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/04/0b/3c9baedbdf613ecaa7aa07027780b8867f57b6293b6ee50de316c9f3222b/pyzmq-27.1.0.tar.gz", hash = "sha256:ac0765e3d44455adb6ddbf4417dcce460fc40a05978c08efdf2948072f6db540", size = 281750, upload-time = "2025-09-08T23:10:18.157Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/92/e7/038aab64a946d535901103da16b953c8c9cc9c961dadcbf3609ed6428d23/pyzmq-27.1.0-cp312-abi3-macosx_10_15_universal2.whl", hash = "sha256:452631b640340c928fa343801b0d07eb0c3789a5ffa843f6e1a9cee0ba4eb4fc", size = 1306279, upload-time = "2025-09-08T23:08:03.807Z" }, + { url = "https://files.pythonhosted.org/packages/e8/5e/c3c49fdd0f535ef45eefcc16934648e9e59dace4a37ee88fc53f6cd8e641/pyzmq-27.1.0-cp312-abi3-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:1c179799b118e554b66da67d88ed66cd37a169f1f23b5d9f0a231b4e8d44a113", size = 895645, upload-time = "2025-09-08T23:08:05.301Z" }, + { url = "https://files.pythonhosted.org/packages/f8/e5/b0b2504cb4e903a74dcf1ebae157f9e20ebb6ea76095f6cfffea28c42ecd/pyzmq-27.1.0-cp312-abi3-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3837439b7f99e60312f0c926a6ad437b067356dc2bc2ec96eb395fd0fe804233", size = 652574, upload-time = "2025-09-08T23:08:06.828Z" }, + { url = "https://files.pythonhosted.org/packages/f8/9b/c108cdb55560eaf253f0cbdb61b29971e9fb34d9c3499b0e96e4e60ed8a5/pyzmq-27.1.0-cp312-abi3-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:43ad9a73e3da1fab5b0e7e13402f0b2fb934ae1c876c51d0afff0e7c052eca31", size = 840995, upload-time = "2025-09-08T23:08:08.396Z" }, + { url = "https://files.pythonhosted.org/packages/c2/bb/b79798ca177b9eb0825b4c9998c6af8cd2a7f15a6a1a4272c1d1a21d382f/pyzmq-27.1.0-cp312-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:0de3028d69d4cdc475bfe47a6128eb38d8bc0e8f4d69646adfbcd840facbac28", size = 1642070, upload-time = "2025-09-08T23:08:09.989Z" }, + { url = "https://files.pythonhosted.org/packages/9c/80/2df2e7977c4ede24c79ae39dcef3899bfc5f34d1ca7a5b24f182c9b7a9ca/pyzmq-27.1.0-cp312-abi3-musllinux_1_2_i686.whl", hash = "sha256:cf44a7763aea9298c0aa7dbf859f87ed7012de8bda0f3977b6fb1d96745df856", size = 2021121, upload-time = "2025-09-08T23:08:11.907Z" }, + { url = "https://files.pythonhosted.org/packages/46/bd/2d45ad24f5f5ae7e8d01525eb76786fa7557136555cac7d929880519e33a/pyzmq-27.1.0-cp312-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:f30f395a9e6fbca195400ce833c731e7b64c3919aa481af4d88c3759e0cb7496", size = 1878550, upload-time = "2025-09-08T23:08:13.513Z" }, + { url = "https://files.pythonhosted.org/packages/e6/2f/104c0a3c778d7c2ab8190e9db4f62f0b6957b53c9d87db77c284b69f33ea/pyzmq-27.1.0-cp312-abi3-win32.whl", hash = "sha256:250e5436a4ba13885494412b3da5d518cd0d3a278a1ae640e113c073a5f88edd", size = 559184, upload-time = "2025-09-08T23:08:15.163Z" }, + { url = "https://files.pythonhosted.org/packages/fc/7f/a21b20d577e4100c6a41795842028235998a643b1ad406a6d4163ea8f53e/pyzmq-27.1.0-cp312-abi3-win_amd64.whl", hash = "sha256:9ce490cf1d2ca2ad84733aa1d69ce6855372cb5ce9223802450c9b2a7cba0ccf", size = 619480, upload-time = "2025-09-08T23:08:17.192Z" }, + { url = "https://files.pythonhosted.org/packages/78/c2/c012beae5f76b72f007a9e91ee9401cb88c51d0f83c6257a03e785c81cc2/pyzmq-27.1.0-cp312-abi3-win_arm64.whl", hash = "sha256:75a2f36223f0d535a0c919e23615fc85a1e23b71f40c7eb43d7b1dedb4d8f15f", size = 552993, upload-time = "2025-09-08T23:08:18.926Z" }, + { url = "https://files.pythonhosted.org/packages/60/cb/84a13459c51da6cec1b7b1dc1a47e6db6da50b77ad7fd9c145842750a011/pyzmq-27.1.0-cp313-cp313-android_24_arm64_v8a.whl", hash = "sha256:93ad4b0855a664229559e45c8d23797ceac03183c7b6f5b4428152a6b06684a5", size = 1122436, upload-time = "2025-09-08T23:08:20.801Z" }, + { url = "https://files.pythonhosted.org/packages/dc/b6/94414759a69a26c3dd674570a81813c46a078767d931a6c70ad29fc585cb/pyzmq-27.1.0-cp313-cp313-android_24_x86_64.whl", hash = "sha256:fbb4f2400bfda24f12f009cba62ad5734148569ff4949b1b6ec3b519444342e6", size = 1156301, upload-time = "2025-09-08T23:08:22.47Z" }, + { url = "https://files.pythonhosted.org/packages/a5/ad/15906493fd40c316377fd8a8f6b1f93104f97a752667763c9b9c1b71d42d/pyzmq-27.1.0-cp313-cp313t-macosx_10_15_universal2.whl", hash = "sha256:e343d067f7b151cfe4eb3bb796a7752c9d369eed007b91231e817071d2c2fec7", size = 1341197, upload-time = "2025-09-08T23:08:24.286Z" }, + { url = "https://files.pythonhosted.org/packages/14/1d/d343f3ce13db53a54cb8946594e567410b2125394dafcc0268d8dda027e0/pyzmq-27.1.0-cp313-cp313t-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:08363b2011dec81c354d694bdecaef4770e0ae96b9afea70b3f47b973655cc05", size = 897275, upload-time = "2025-09-08T23:08:26.063Z" }, + { url = "https://files.pythonhosted.org/packages/69/2d/d83dd6d7ca929a2fc67d2c3005415cdf322af7751d773524809f9e585129/pyzmq-27.1.0-cp313-cp313t-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d54530c8c8b5b8ddb3318f481297441af102517602b569146185fa10b63f4fa9", size = 660469, upload-time = "2025-09-08T23:08:27.623Z" }, + { url = "https://files.pythonhosted.org/packages/3e/cd/9822a7af117f4bc0f1952dbe9ef8358eb50a24928efd5edf54210b850259/pyzmq-27.1.0-cp313-cp313t-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6f3afa12c392f0a44a2414056d730eebc33ec0926aae92b5ad5cf26ebb6cc128", size = 847961, upload-time = "2025-09-08T23:08:29.672Z" }, + { url = "https://files.pythonhosted.org/packages/9a/12/f003e824a19ed73be15542f172fd0ec4ad0b60cf37436652c93b9df7c585/pyzmq-27.1.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:c65047adafe573ff023b3187bb93faa583151627bc9c51fc4fb2c561ed689d39", size = 1650282, upload-time = "2025-09-08T23:08:31.349Z" }, + { url = "https://files.pythonhosted.org/packages/d5/4a/e82d788ed58e9a23995cee70dbc20c9aded3d13a92d30d57ec2291f1e8a3/pyzmq-27.1.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:90e6e9441c946a8b0a667356f7078d96411391a3b8f80980315455574177ec97", size = 2024468, upload-time = "2025-09-08T23:08:33.543Z" }, + { url = "https://files.pythonhosted.org/packages/d9/94/2da0a60841f757481e402b34bf4c8bf57fa54a5466b965de791b1e6f747d/pyzmq-27.1.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:add071b2d25f84e8189aaf0882d39a285b42fa3853016ebab234a5e78c7a43db", size = 1885394, upload-time = "2025-09-08T23:08:35.51Z" }, + { url = "https://files.pythonhosted.org/packages/4f/6f/55c10e2e49ad52d080dc24e37adb215e5b0d64990b57598abc2e3f01725b/pyzmq-27.1.0-cp313-cp313t-win32.whl", hash = "sha256:7ccc0700cfdf7bd487bea8d850ec38f204478681ea02a582a8da8171b7f90a1c", size = 574964, upload-time = "2025-09-08T23:08:37.178Z" }, + { url = "https://files.pythonhosted.org/packages/87/4d/2534970ba63dd7c522d8ca80fb92777f362c0f321900667c615e2067cb29/pyzmq-27.1.0-cp313-cp313t-win_amd64.whl", hash = "sha256:8085a9fba668216b9b4323be338ee5437a235fe275b9d1610e422ccc279733e2", size = 641029, upload-time = "2025-09-08T23:08:40.595Z" }, + { url = "https://files.pythonhosted.org/packages/f6/fa/f8aea7a28b0641f31d40dea42d7ef003fded31e184ef47db696bc74cd610/pyzmq-27.1.0-cp313-cp313t-win_arm64.whl", hash = "sha256:6bb54ca21bcfe361e445256c15eedf083f153811c37be87e0514934d6913061e", size = 561541, upload-time = "2025-09-08T23:08:42.668Z" }, +] + +[[package]] +name = "referencing" +version = "0.37.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "attrs" }, + { name = "rpds-py" }, + { name = "typing-extensions", marker = "python_full_version < '3.13'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/22/f5/df4e9027acead3ecc63e50fe1e36aca1523e1719559c499951bb4b53188f/referencing-0.37.0.tar.gz", hash = "sha256:44aefc3142c5b842538163acb373e24cce6632bd54bdb01b21ad5863489f50d8", size = 78036, upload-time = "2025-10-13T15:30:48.871Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/2c/58/ca301544e1fa93ed4f80d724bf5b194f6e4b945841c5bfd555878eea9fcb/referencing-0.37.0-py3-none-any.whl", hash = "sha256:381329a9f99628c9069361716891d34ad94af76e461dcb0335825aecc7692231", size = 26766, upload-time = "2025-10-13T15:30:47.625Z" }, +] + +[[package]] +name = "regex" +version = "2025.11.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/cc/a9/546676f25e573a4cf00fe8e119b78a37b6a8fe2dc95cda877b30889c9c45/regex-2025.11.3.tar.gz", hash = "sha256:1fedc720f9bb2494ce31a58a1631f9c82df6a09b49c19517ea5cc280b4541e01", size = 414669, upload-time = "2025-11-03T21:34:22.089Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e8/74/18f04cb53e58e3fb107439699bd8375cf5a835eec81084e0bddbd122e4c2/regex-2025.11.3-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:bc8ab71e2e31b16e40868a40a69007bc305e1109bd4658eb6cad007e0bf67c41", size = 489312, upload-time = "2025-11-03T21:31:34.343Z" }, + { url = "https://files.pythonhosted.org/packages/78/3f/37fcdd0d2b1e78909108a876580485ea37c91e1acf66d3bb8e736348f441/regex-2025.11.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:22b29dda7e1f7062a52359fca6e58e548e28c6686f205e780b02ad8ef710de36", size = 291256, upload-time = "2025-11-03T21:31:35.675Z" }, + { url = "https://files.pythonhosted.org/packages/bf/26/0a575f58eb23b7ebd67a45fccbc02ac030b737b896b7e7a909ffe43ffd6a/regex-2025.11.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3a91e4a29938bc1a082cc28fdea44be420bf2bebe2665343029723892eb073e1", size = 288921, upload-time = "2025-11-03T21:31:37.07Z" }, + { url = "https://files.pythonhosted.org/packages/ea/98/6a8dff667d1af907150432cf5abc05a17ccd32c72a3615410d5365ac167a/regex-2025.11.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:08b884f4226602ad40c5d55f52bf91a9df30f513864e0054bad40c0e9cf1afb7", size = 798568, upload-time = "2025-11-03T21:31:38.784Z" }, + { url = "https://files.pythonhosted.org/packages/64/15/92c1db4fa4e12733dd5a526c2dd2b6edcbfe13257e135fc0f6c57f34c173/regex-2025.11.3-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:3e0b11b2b2433d1c39c7c7a30e3f3d0aeeea44c2a8d0bae28f6b95f639927a69", size = 864165, upload-time = "2025-11-03T21:31:40.559Z" }, + { url = "https://files.pythonhosted.org/packages/f9/e7/3ad7da8cdee1ce66c7cd37ab5ab05c463a86ffeb52b1a25fe7bd9293b36c/regex-2025.11.3-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:87eb52a81ef58c7ba4d45c3ca74e12aa4b4e77816f72ca25258a85b3ea96cb48", size = 912182, upload-time = "2025-11-03T21:31:42.002Z" }, + { url = "https://files.pythonhosted.org/packages/84/bd/9ce9f629fcb714ffc2c3faf62b6766ecb7a585e1e885eb699bcf130a5209/regex-2025.11.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a12ab1f5c29b4e93db518f5e3872116b7e9b1646c9f9f426f777b50d44a09e8c", size = 803501, upload-time = "2025-11-03T21:31:43.815Z" }, + { url = "https://files.pythonhosted.org/packages/7c/0f/8dc2e4349d8e877283e6edd6c12bdcebc20f03744e86f197ab6e4492bf08/regex-2025.11.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:7521684c8c7c4f6e88e35ec89680ee1aa8358d3f09d27dfbdf62c446f5d4c695", size = 787842, upload-time = "2025-11-03T21:31:45.353Z" }, + { url = "https://files.pythonhosted.org/packages/f9/73/cff02702960bc185164d5619c0c62a2f598a6abff6695d391b096237d4ab/regex-2025.11.3-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:7fe6e5440584e94cc4b3f5f4d98a25e29ca12dccf8873679a635638349831b98", size = 858519, upload-time = "2025-11-03T21:31:46.814Z" }, + { url = "https://files.pythonhosted.org/packages/61/83/0e8d1ae71e15bc1dc36231c90b46ee35f9d52fab2e226b0e039e7ea9c10a/regex-2025.11.3-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:8e026094aa12b43f4fd74576714e987803a315c76edb6b098b9809db5de58f74", size = 850611, upload-time = "2025-11-03T21:31:48.289Z" }, + { url = "https://files.pythonhosted.org/packages/c8/f5/70a5cdd781dcfaa12556f2955bf170cd603cb1c96a1827479f8faea2df97/regex-2025.11.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:435bbad13e57eb5606a68443af62bed3556de2f46deb9f7d4237bc2f1c9fb3a0", size = 789759, upload-time = "2025-11-03T21:31:49.759Z" }, + { url = "https://files.pythonhosted.org/packages/59/9b/7c29be7903c318488983e7d97abcf8ebd3830e4c956c4c540005fcfb0462/regex-2025.11.3-cp312-cp312-win32.whl", hash = "sha256:3839967cf4dc4b985e1570fd8d91078f0c519f30491c60f9ac42a8db039be204", size = 266194, upload-time = "2025-11-03T21:31:51.53Z" }, + { url = "https://files.pythonhosted.org/packages/1a/67/3b92df89f179d7c367be654ab5626ae311cb28f7d5c237b6bb976cd5fbbb/regex-2025.11.3-cp312-cp312-win_amd64.whl", hash = "sha256:e721d1b46e25c481dc5ded6f4b3f66c897c58d2e8cfdf77bbced84339108b0b9", size = 277069, upload-time = "2025-11-03T21:31:53.151Z" }, + { url = "https://files.pythonhosted.org/packages/d7/55/85ba4c066fe5094d35b249c3ce8df0ba623cfd35afb22d6764f23a52a1c5/regex-2025.11.3-cp312-cp312-win_arm64.whl", hash = "sha256:64350685ff08b1d3a6fff33f45a9ca183dc1d58bbfe4981604e70ec9801bbc26", size = 270330, upload-time = "2025-11-03T21:31:54.514Z" }, + { url = "https://files.pythonhosted.org/packages/e1/a7/dda24ebd49da46a197436ad96378f17df30ceb40e52e859fc42cac45b850/regex-2025.11.3-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:c1e448051717a334891f2b9a620fe36776ebf3dd8ec46a0b877c8ae69575feb4", size = 489081, upload-time = "2025-11-03T21:31:55.9Z" }, + { url = "https://files.pythonhosted.org/packages/19/22/af2dc751aacf88089836aa088a1a11c4f21a04707eb1b0478e8e8fb32847/regex-2025.11.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:9b5aca4d5dfd7fbfbfbdaf44850fcc7709a01146a797536a8f84952e940cca76", size = 291123, upload-time = "2025-11-03T21:31:57.758Z" }, + { url = "https://files.pythonhosted.org/packages/a3/88/1a3ea5672f4b0a84802ee9891b86743438e7c04eb0b8f8c4e16a42375327/regex-2025.11.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:04d2765516395cf7dda331a244a3282c0f5ae96075f728629287dfa6f76ba70a", size = 288814, upload-time = "2025-11-03T21:32:01.12Z" }, + { url = "https://files.pythonhosted.org/packages/fb/8c/f5987895bf42b8ddeea1b315c9fedcfe07cadee28b9c98cf50d00adcb14d/regex-2025.11.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5d9903ca42bfeec4cebedba8022a7c97ad2aab22e09573ce9976ba01b65e4361", size = 798592, upload-time = "2025-11-03T21:32:03.006Z" }, + { url = "https://files.pythonhosted.org/packages/99/2a/6591ebeede78203fa77ee46a1c36649e02df9eaa77a033d1ccdf2fcd5d4e/regex-2025.11.3-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:639431bdc89d6429f6721625e8129413980ccd62e9d3f496be618a41d205f160", size = 864122, upload-time = "2025-11-03T21:32:04.553Z" }, + { url = "https://files.pythonhosted.org/packages/94/d6/be32a87cf28cf8ed064ff281cfbd49aefd90242a83e4b08b5a86b38e8eb4/regex-2025.11.3-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:f117efad42068f9715677c8523ed2be1518116d1c49b1dd17987716695181efe", size = 912272, upload-time = "2025-11-03T21:32:06.148Z" }, + { url = "https://files.pythonhosted.org/packages/62/11/9bcef2d1445665b180ac7f230406ad80671f0fc2a6ffb93493b5dd8cd64c/regex-2025.11.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4aecb6f461316adf9f1f0f6a4a1a3d79e045f9b71ec76055a791affa3b285850", size = 803497, upload-time = "2025-11-03T21:32:08.162Z" }, + { url = "https://files.pythonhosted.org/packages/e5/a7/da0dc273d57f560399aa16d8a68ae7f9b57679476fc7ace46501d455fe84/regex-2025.11.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:3b3a5f320136873cc5561098dfab677eea139521cb9a9e8db98b7e64aef44cbc", size = 787892, upload-time = "2025-11-03T21:32:09.769Z" }, + { url = "https://files.pythonhosted.org/packages/da/4b/732a0c5a9736a0b8d6d720d4945a2f1e6f38f87f48f3173559f53e8d5d82/regex-2025.11.3-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:75fa6f0056e7efb1f42a1c34e58be24072cb9e61a601340cc1196ae92326a4f9", size = 858462, upload-time = "2025-11-03T21:32:11.769Z" }, + { url = "https://files.pythonhosted.org/packages/0c/f5/a2a03df27dc4c2d0c769220f5110ba8c4084b0bfa9ab0f9b4fcfa3d2b0fc/regex-2025.11.3-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:dbe6095001465294f13f1adcd3311e50dd84e5a71525f20a10bd16689c61ce0b", size = 850528, upload-time = "2025-11-03T21:32:13.906Z" }, + { url = "https://files.pythonhosted.org/packages/d6/09/e1cd5bee3841c7f6eb37d95ca91cdee7100b8f88b81e41c2ef426910891a/regex-2025.11.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:454d9b4ae7881afbc25015b8627c16d88a597479b9dea82b8c6e7e2e07240dc7", size = 789866, upload-time = "2025-11-03T21:32:15.748Z" }, + { url = "https://files.pythonhosted.org/packages/eb/51/702f5ea74e2a9c13d855a6a85b7f80c30f9e72a95493260193c07f3f8d74/regex-2025.11.3-cp313-cp313-win32.whl", hash = "sha256:28ba4d69171fc6e9896337d4fc63a43660002b7da53fc15ac992abcf3410917c", size = 266189, upload-time = "2025-11-03T21:32:17.493Z" }, + { url = "https://files.pythonhosted.org/packages/8b/00/6e29bb314e271a743170e53649db0fdb8e8ff0b64b4f425f5602f4eb9014/regex-2025.11.3-cp313-cp313-win_amd64.whl", hash = "sha256:bac4200befe50c670c405dc33af26dad5a3b6b255dd6c000d92fe4629f9ed6a5", size = 277054, upload-time = "2025-11-03T21:32:19.042Z" }, + { url = "https://files.pythonhosted.org/packages/25/f1/b156ff9f2ec9ac441710764dda95e4edaf5f36aca48246d1eea3f1fd96ec/regex-2025.11.3-cp313-cp313-win_arm64.whl", hash = "sha256:2292cd5a90dab247f9abe892ac584cb24f0f54680c73fcb4a7493c66c2bf2467", size = 270325, upload-time = "2025-11-03T21:32:21.338Z" }, + { url = "https://files.pythonhosted.org/packages/20/28/fd0c63357caefe5680b8ea052131acbd7f456893b69cc2a90cc3e0dc90d4/regex-2025.11.3-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:1eb1ebf6822b756c723e09f5186473d93236c06c579d2cc0671a722d2ab14281", size = 491984, upload-time = "2025-11-03T21:32:23.466Z" }, + { url = "https://files.pythonhosted.org/packages/df/ec/7014c15626ab46b902b3bcc4b28a7bae46d8f281fc7ea9c95e22fcaaa917/regex-2025.11.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:1e00ec2970aab10dc5db34af535f21fcf32b4a31d99e34963419636e2f85ae39", size = 292673, upload-time = "2025-11-03T21:32:25.034Z" }, + { url = "https://files.pythonhosted.org/packages/23/ab/3b952ff7239f20d05f1f99e9e20188513905f218c81d52fb5e78d2bf7634/regex-2025.11.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:a4cb042b615245d5ff9b3794f56be4138b5adc35a4166014d31d1814744148c7", size = 291029, upload-time = "2025-11-03T21:32:26.528Z" }, + { url = "https://files.pythonhosted.org/packages/21/7e/3dc2749fc684f455f162dcafb8a187b559e2614f3826877d3844a131f37b/regex-2025.11.3-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:44f264d4bf02f3176467d90b294d59bf1db9fe53c141ff772f27a8b456b2a9ed", size = 807437, upload-time = "2025-11-03T21:32:28.363Z" }, + { url = "https://files.pythonhosted.org/packages/1b/0b/d529a85ab349c6a25d1ca783235b6e3eedf187247eab536797021f7126c6/regex-2025.11.3-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:7be0277469bf3bd7a34a9c57c1b6a724532a0d235cd0dc4e7f4316f982c28b19", size = 873368, upload-time = "2025-11-03T21:32:30.4Z" }, + { url = "https://files.pythonhosted.org/packages/7d/18/2d868155f8c9e3e9d8f9e10c64e9a9f496bb8f7e037a88a8bed26b435af6/regex-2025.11.3-cp313-cp313t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:0d31e08426ff4b5b650f68839f5af51a92a5b51abd8554a60c2fbc7c71f25d0b", size = 914921, upload-time = "2025-11-03T21:32:32.123Z" }, + { url = "https://files.pythonhosted.org/packages/2d/71/9d72ff0f354fa783fe2ba913c8734c3b433b86406117a8db4ea2bf1c7a2f/regex-2025.11.3-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e43586ce5bd28f9f285a6e729466841368c4a0353f6fd08d4ce4630843d3648a", size = 812708, upload-time = "2025-11-03T21:32:34.305Z" }, + { url = "https://files.pythonhosted.org/packages/e7/19/ce4bf7f5575c97f82b6e804ffb5c4e940c62609ab2a0d9538d47a7fdf7d4/regex-2025.11.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:0f9397d561a4c16829d4e6ff75202c1c08b68a3bdbfe29dbfcdb31c9830907c6", size = 795472, upload-time = "2025-11-03T21:32:36.364Z" }, + { url = "https://files.pythonhosted.org/packages/03/86/fd1063a176ffb7b2315f9a1b08d17b18118b28d9df163132615b835a26ee/regex-2025.11.3-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:dd16e78eb18ffdb25ee33a0682d17912e8cc8a770e885aeee95020046128f1ce", size = 868341, upload-time = "2025-11-03T21:32:38.042Z" }, + { url = "https://files.pythonhosted.org/packages/12/43/103fb2e9811205e7386366501bc866a164a0430c79dd59eac886a2822950/regex-2025.11.3-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:ffcca5b9efe948ba0661e9df0fa50d2bc4b097c70b9810212d6b62f05d83b2dd", size = 854666, upload-time = "2025-11-03T21:32:40.079Z" }, + { url = "https://files.pythonhosted.org/packages/7d/22/e392e53f3869b75804762c7c848bd2dd2abf2b70fb0e526f58724638bd35/regex-2025.11.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:c56b4d162ca2b43318ac671c65bd4d563e841a694ac70e1a976ac38fcf4ca1d2", size = 799473, upload-time = "2025-11-03T21:32:42.148Z" }, + { url = "https://files.pythonhosted.org/packages/4f/f9/8bd6b656592f925b6845fcbb4d57603a3ac2fb2373344ffa1ed70aa6820a/regex-2025.11.3-cp313-cp313t-win32.whl", hash = "sha256:9ddc42e68114e161e51e272f667d640f97e84a2b9ef14b7477c53aac20c2d59a", size = 268792, upload-time = "2025-11-03T21:32:44.13Z" }, + { url = "https://files.pythonhosted.org/packages/e5/87/0e7d603467775ff65cd2aeabf1b5b50cc1c3708556a8b849a2fa4dd1542b/regex-2025.11.3-cp313-cp313t-win_amd64.whl", hash = "sha256:7a7c7fdf755032ffdd72c77e3d8096bdcb0eb92e89e17571a196f03d88b11b3c", size = 280214, upload-time = "2025-11-03T21:32:45.853Z" }, + { url = "https://files.pythonhosted.org/packages/8d/d0/2afc6f8e94e2b64bfb738a7c2b6387ac1699f09f032d363ed9447fd2bb57/regex-2025.11.3-cp313-cp313t-win_arm64.whl", hash = "sha256:df9eb838c44f570283712e7cff14c16329a9f0fb19ca492d21d4b7528ee6821e", size = 271469, upload-time = "2025-11-03T21:32:48.026Z" }, +] + +[[package]] +name = "requests" +version = "2.32.5" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "certifi" }, + { name = "charset-normalizer" }, + { name = "idna" }, + { name = "urllib3" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/c9/74/b3ff8e6c8446842c3f5c837e9c3dfcfe2018ea6ecef224c710c85ef728f4/requests-2.32.5.tar.gz", hash = "sha256:dbba0bac56e100853db0ea71b82b4dfd5fe2bf6d3754a8893c3af500cec7d7cf", size = 134517, upload-time = "2025-08-18T20:46:02.573Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/1e/db/4254e3eabe8020b458f1a747140d32277ec7a271daf1d235b70dc0b4e6e3/requests-2.32.5-py3-none-any.whl", hash = "sha256:2462f94637a34fd532264295e186976db0f5d453d1cdd31473c85a6a161affb6", size = 64738, upload-time = "2025-08-18T20:46:00.542Z" }, +] + +[[package]] +name = "roman-numerals-py" +version = "3.1.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/30/76/48fd56d17c5bdbdf65609abbc67288728a98ed4c02919428d4f52d23b24b/roman_numerals_py-3.1.0.tar.gz", hash = "sha256:be4bf804f083a4ce001b5eb7e3c0862479d10f94c936f6c4e5f250aa5ff5bd2d", size = 9017, upload-time = "2025-02-22T07:34:54.333Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/53/97/d2cbbaa10c9b826af0e10fdf836e1bf344d9f0abb873ebc34d1f49642d3f/roman_numerals_py-3.1.0-py3-none-any.whl", hash = "sha256:9da2ad2fb670bcf24e81070ceb3be72f6c11c440d73bd579fbeca1e9f330954c", size = 7742, upload-time = "2025-02-22T07:34:52.422Z" }, +] + +[[package]] +name = "rpds-py" +version = "0.29.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/98/33/23b3b3419b6a3e0f559c7c0d2ca8fc1b9448382b25245033788785921332/rpds_py-0.29.0.tar.gz", hash = "sha256:fe55fe686908f50154d1dc599232016e50c243b438c3b7432f24e2895b0e5359", size = 69359, upload-time = "2025-11-16T14:50:39.532Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/3c/50/bc0e6e736d94e420df79be4deb5c9476b63165c87bb8f19ef75d100d21b3/rpds_py-0.29.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:a0891cfd8db43e085c0ab93ab7e9b0c8fee84780d436d3b266b113e51e79f954", size = 376000, upload-time = "2025-11-16T14:48:19.141Z" }, + { url = "https://files.pythonhosted.org/packages/3e/3a/46676277160f014ae95f24de53bed0e3b7ea66c235e7de0b9df7bd5d68ba/rpds_py-0.29.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3897924d3f9a0361472d884051f9a2460358f9a45b1d85a39a158d2f8f1ad71c", size = 360575, upload-time = "2025-11-16T14:48:20.443Z" }, + { url = "https://files.pythonhosted.org/packages/75/ba/411d414ed99ea1afdd185bbabeeaac00624bd1e4b22840b5e9967ade6337/rpds_py-0.29.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2a21deb8e0d1571508c6491ce5ea5e25669b1dd4adf1c9d64b6314842f708b5d", size = 392159, upload-time = "2025-11-16T14:48:22.12Z" }, + { url = "https://files.pythonhosted.org/packages/8f/b1/e18aa3a331f705467a48d0296778dc1fea9d7f6cf675bd261f9a846c7e90/rpds_py-0.29.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9efe71687d6427737a0a2de9ca1c0a216510e6cd08925c44162be23ed7bed2d5", size = 410602, upload-time = "2025-11-16T14:48:23.563Z" }, + { url = "https://files.pythonhosted.org/packages/2f/6c/04f27f0c9f2299274c76612ac9d2c36c5048bb2c6c2e52c38c60bf3868d9/rpds_py-0.29.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:40f65470919dc189c833e86b2c4bd21bd355f98436a2cef9e0a9a92aebc8e57e", size = 515808, upload-time = "2025-11-16T14:48:24.949Z" }, + { url = "https://files.pythonhosted.org/packages/83/56/a8412aa464fb151f8bc0d91fb0bb888adc9039bd41c1c6ba8d94990d8cf8/rpds_py-0.29.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:def48ff59f181130f1a2cb7c517d16328efac3ec03951cca40c1dc2049747e83", size = 416015, upload-time = "2025-11-16T14:48:26.782Z" }, + { url = "https://files.pythonhosted.org/packages/04/4c/f9b8a05faca3d9e0a6397c90d13acb9307c9792b2bff621430c58b1d6e76/rpds_py-0.29.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ad7bd570be92695d89285a4b373006930715b78d96449f686af422debb4d3949", size = 395325, upload-time = "2025-11-16T14:48:28.055Z" }, + { url = "https://files.pythonhosted.org/packages/34/60/869f3bfbf8ed7b54f1ad9a5543e0fdffdd40b5a8f587fe300ee7b4f19340/rpds_py-0.29.0-cp312-cp312-manylinux_2_31_riscv64.whl", hash = "sha256:5a572911cd053137bbff8e3a52d31c5d2dba51d3a67ad902629c70185f3f2181", size = 410160, upload-time = "2025-11-16T14:48:29.338Z" }, + { url = "https://files.pythonhosted.org/packages/91/aa/e5b496334e3aba4fe4c8a80187b89f3c1294c5c36f2a926da74338fa5a73/rpds_py-0.29.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d583d4403bcbf10cffc3ab5cee23d7643fcc960dff85973fd3c2d6c86e8dbb0c", size = 425309, upload-time = "2025-11-16T14:48:30.691Z" }, + { url = "https://files.pythonhosted.org/packages/85/68/4e24a34189751ceb6d66b28f18159922828dd84155876551f7ca5b25f14f/rpds_py-0.29.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:070befbb868f257d24c3bb350dbd6e2f645e83731f31264b19d7231dd5c396c7", size = 574644, upload-time = "2025-11-16T14:48:31.964Z" }, + { url = "https://files.pythonhosted.org/packages/8c/cf/474a005ea4ea9c3b4f17b6108b6b13cebfc98ebaff11d6e1b193204b3a93/rpds_py-0.29.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:fc935f6b20b0c9f919a8ff024739174522abd331978f750a74bb68abd117bd19", size = 601605, upload-time = "2025-11-16T14:48:33.252Z" }, + { url = "https://files.pythonhosted.org/packages/f4/b1/c56f6a9ab8c5f6bb5c65c4b5f8229167a3a525245b0773f2c0896686b64e/rpds_py-0.29.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:8c5a8ecaa44ce2d8d9d20a68a2483a74c07f05d72e94a4dff88906c8807e77b0", size = 564593, upload-time = "2025-11-16T14:48:34.643Z" }, + { url = "https://files.pythonhosted.org/packages/b3/13/0494cecce4848f68501e0a229432620b4b57022388b071eeff95f3e1e75b/rpds_py-0.29.0-cp312-cp312-win32.whl", hash = "sha256:ba5e1aeaf8dd6d8f6caba1f5539cddda87d511331714b7b5fc908b6cfc3636b7", size = 223853, upload-time = "2025-11-16T14:48:36.419Z" }, + { url = "https://files.pythonhosted.org/packages/1f/6a/51e9aeb444a00cdc520b032a28b07e5f8dc7bc328b57760c53e7f96997b4/rpds_py-0.29.0-cp312-cp312-win_amd64.whl", hash = "sha256:b5f6134faf54b3cb83375db0f113506f8b7770785be1f95a631e7e2892101977", size = 239895, upload-time = "2025-11-16T14:48:37.956Z" }, + { url = "https://files.pythonhosted.org/packages/d1/d4/8bce56cdad1ab873e3f27cb31c6a51d8f384d66b022b820525b879f8bed1/rpds_py-0.29.0-cp312-cp312-win_arm64.whl", hash = "sha256:b016eddf00dca7944721bf0cd85b6af7f6c4efaf83ee0b37c4133bd39757a8c7", size = 230321, upload-time = "2025-11-16T14:48:39.71Z" }, + { url = "https://files.pythonhosted.org/packages/fd/d9/c5de60d9d371bbb186c3e9bf75f4fc5665e11117a25a06a6b2e0afb7380e/rpds_py-0.29.0-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:1585648d0760b88292eecab5181f5651111a69d90eff35d6b78aa32998886a61", size = 375710, upload-time = "2025-11-16T14:48:41.063Z" }, + { url = "https://files.pythonhosted.org/packages/b3/b3/0860cdd012291dc21272895ce107f1e98e335509ba986dd83d72658b82b9/rpds_py-0.29.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:521807963971a23996ddaf764c682b3e46459b3c58ccd79fefbe16718db43154", size = 360582, upload-time = "2025-11-16T14:48:42.423Z" }, + { url = "https://files.pythonhosted.org/packages/92/8a/a18c2f4a61b3407e56175f6aab6deacdf9d360191a3d6f38566e1eaf7266/rpds_py-0.29.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0a8896986efaa243ab713c69e6491a4138410f0fe36f2f4c71e18bd5501e8014", size = 391172, upload-time = "2025-11-16T14:48:43.75Z" }, + { url = "https://files.pythonhosted.org/packages/fd/49/e93354258508c50abc15cdcd5fcf7ac4117f67bb6233ad7859f75e7372a0/rpds_py-0.29.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:1d24564a700ef41480a984c5ebed62b74e6ce5860429b98b1fede76049e953e6", size = 409586, upload-time = "2025-11-16T14:48:45.498Z" }, + { url = "https://files.pythonhosted.org/packages/5a/8d/a27860dae1c19a6bdc901f90c81f0d581df1943355802961a57cdb5b6cd1/rpds_py-0.29.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e6596b93c010d386ae46c9fba9bfc9fc5965fa8228edeac51576299182c2e31c", size = 516339, upload-time = "2025-11-16T14:48:47.308Z" }, + { url = "https://files.pythonhosted.org/packages/fc/ad/a75e603161e79b7110c647163d130872b271c6b28712c803c65d492100f7/rpds_py-0.29.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5cc58aac218826d054c7da7f95821eba94125d88be673ff44267bb89d12a5866", size = 416201, upload-time = "2025-11-16T14:48:48.615Z" }, + { url = "https://files.pythonhosted.org/packages/b9/42/555b4ee17508beafac135c8b450816ace5a96194ce97fefc49d58e5652ea/rpds_py-0.29.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:de73e40ebc04dd5d9556f50180395322193a78ec247e637e741c1b954810f295", size = 395095, upload-time = "2025-11-16T14:48:50.027Z" }, + { url = "https://files.pythonhosted.org/packages/cd/f0/c90b671b9031e800ec45112be42ea9f027f94f9ac25faaac8770596a16a1/rpds_py-0.29.0-cp313-cp313-manylinux_2_31_riscv64.whl", hash = "sha256:295ce5ac7f0cf69a651ea75c8f76d02a31f98e5698e82a50a5f4d4982fbbae3b", size = 410077, upload-time = "2025-11-16T14:48:51.515Z" }, + { url = "https://files.pythonhosted.org/packages/3d/80/9af8b640b81fe21e6f718e9dec36c0b5f670332747243130a5490f292245/rpds_py-0.29.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1ea59b23ea931d494459c8338056fe7d93458c0bf3ecc061cd03916505369d55", size = 424548, upload-time = "2025-11-16T14:48:53.237Z" }, + { url = "https://files.pythonhosted.org/packages/e4/0b/b5647446e991736e6a495ef510e6710df91e880575a586e763baeb0aa770/rpds_py-0.29.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f49d41559cebd608042fdcf54ba597a4a7555b49ad5c1c0c03e0af82692661cd", size = 573661, upload-time = "2025-11-16T14:48:54.769Z" }, + { url = "https://files.pythonhosted.org/packages/f7/b3/1b1c9576839ff583d1428efbf59f9ee70498d8ce6c0b328ac02f1e470879/rpds_py-0.29.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:05a2bd42768ea988294ca328206efbcc66e220d2d9b7836ee5712c07ad6340ea", size = 600937, upload-time = "2025-11-16T14:48:56.247Z" }, + { url = "https://files.pythonhosted.org/packages/6c/7b/b6cfca2f9fee4c4494ce54f7fb1b9f578867495a9aa9fc0d44f5f735c8e0/rpds_py-0.29.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:33ca7bdfedd83339ca55da3a5e1527ee5870d4b8369456b5777b197756f3ca22", size = 564496, upload-time = "2025-11-16T14:48:57.691Z" }, + { url = "https://files.pythonhosted.org/packages/b9/fb/ba29ec7f0f06eb801bac5a23057a9ff7670623b5e8013bd59bec4aa09de8/rpds_py-0.29.0-cp313-cp313-win32.whl", hash = "sha256:20c51ae86a0bb9accc9ad4e6cdeec58d5ebb7f1b09dd4466331fc65e1766aae7", size = 223126, upload-time = "2025-11-16T14:48:59.058Z" }, + { url = "https://files.pythonhosted.org/packages/3c/6b/0229d3bed4ddaa409e6d90b0ae967ed4380e4bdd0dad6e59b92c17d42457/rpds_py-0.29.0-cp313-cp313-win_amd64.whl", hash = "sha256:6410e66f02803600edb0b1889541f4b5cc298a5ccda0ad789cc50ef23b54813e", size = 239771, upload-time = "2025-11-16T14:49:00.872Z" }, + { url = "https://files.pythonhosted.org/packages/e4/38/d2868f058b164f8efd89754d85d7b1c08b454f5c07ac2e6cc2e9bd4bd05b/rpds_py-0.29.0-cp313-cp313-win_arm64.whl", hash = "sha256:56838e1cd9174dc23c5691ee29f1d1be9eab357f27efef6bded1328b23e1ced2", size = 229994, upload-time = "2025-11-16T14:49:02.673Z" }, + { url = "https://files.pythonhosted.org/packages/52/91/5de91c5ec7d41759beec9b251630824dbb8e32d20c3756da1a9a9d309709/rpds_py-0.29.0-cp313-cp313t-macosx_10_12_x86_64.whl", hash = "sha256:37d94eadf764d16b9a04307f2ab1d7af6dc28774bbe0535c9323101e14877b4c", size = 365886, upload-time = "2025-11-16T14:49:04.133Z" }, + { url = "https://files.pythonhosted.org/packages/85/7c/415d8c1b016d5f47ecec5145d9d6d21002d39dce8761b30f6c88810b455a/rpds_py-0.29.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:d472cf73efe5726a067dce63eebe8215b14beabea7c12606fd9994267b3cfe2b", size = 355262, upload-time = "2025-11-16T14:49:05.543Z" }, + { url = "https://files.pythonhosted.org/packages/3d/14/bf83e2daa4f980e4dc848aed9299792a8b84af95e12541d9e7562f84a6ef/rpds_py-0.29.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:72fdfd5ff8992e4636621826371e3ac5f3e3b8323e9d0e48378e9c13c3dac9d0", size = 384826, upload-time = "2025-11-16T14:49:07.301Z" }, + { url = "https://files.pythonhosted.org/packages/33/b8/53330c50a810ae22b4fbba5e6cf961b68b9d72d9bd6780a7c0a79b070857/rpds_py-0.29.0-cp313-cp313t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2549d833abdf8275c901313b9e8ff8fba57e50f6a495035a2a4e30621a2f7cc4", size = 394234, upload-time = "2025-11-16T14:49:08.782Z" }, + { url = "https://files.pythonhosted.org/packages/cc/32/01e2e9645cef0e584f518cfde4567563e57db2257244632b603f61b40e50/rpds_py-0.29.0-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4448dad428f28a6a767c3e3b80cde3446a22a0efbddaa2360f4bb4dc836d0688", size = 520008, upload-time = "2025-11-16T14:49:10.253Z" }, + { url = "https://files.pythonhosted.org/packages/98/c3/0d1b95a81affae2b10f950782e33a1fd2edd6ce2a479966cac98c9a66f57/rpds_py-0.29.0-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:115f48170fd4296a33938d8c11f697f5f26e0472e43d28f35624764173a60e4d", size = 409569, upload-time = "2025-11-16T14:49:12.478Z" }, + { url = "https://files.pythonhosted.org/packages/fa/60/aa3b8678f3f009f675b99174fa2754302a7fbfe749162e8043d111de2d88/rpds_py-0.29.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8e5bb73ffc029820f4348e9b66b3027493ae00bca6629129cd433fd7a76308ee", size = 385188, upload-time = "2025-11-16T14:49:13.88Z" }, + { url = "https://files.pythonhosted.org/packages/92/02/5546c1c8aa89c18d40c1fcffdcc957ba730dee53fb7c3ca3a46f114761d2/rpds_py-0.29.0-cp313-cp313t-manylinux_2_31_riscv64.whl", hash = "sha256:b1581fcde18fcdf42ea2403a16a6b646f8eb1e58d7f90a0ce693da441f76942e", size = 398587, upload-time = "2025-11-16T14:49:15.339Z" }, + { url = "https://files.pythonhosted.org/packages/6c/e0/ad6eeaf47e236eba052fa34c4073078b9e092bd44da6bbb35aaae9580669/rpds_py-0.29.0-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:16e9da2bda9eb17ea318b4c335ec9ac1818e88922cbe03a5743ea0da9ecf74fb", size = 416641, upload-time = "2025-11-16T14:49:16.832Z" }, + { url = "https://files.pythonhosted.org/packages/1a/93/0acedfd50ad9cdd3879c615a6dc8c5f1ce78d2fdf8b87727468bb5bb4077/rpds_py-0.29.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:28fd300326dd21198f311534bdb6d7e989dd09b3418b3a91d54a0f384c700967", size = 566683, upload-time = "2025-11-16T14:49:18.342Z" }, + { url = "https://files.pythonhosted.org/packages/62/53/8c64e0f340a9e801459fc6456821abc15b3582cb5dc3932d48705a9d9ac7/rpds_py-0.29.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:2aba991e041d031c7939e1358f583ae405a7bf04804ca806b97a5c0e0af1ea5e", size = 592730, upload-time = "2025-11-16T14:49:19.767Z" }, + { url = "https://files.pythonhosted.org/packages/85/ef/3109b6584f8c4b0d2490747c916df833c127ecfa82be04d9a40a376f2090/rpds_py-0.29.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:7f437026dbbc3f08c99cc41a5b2570c6e1a1ddbe48ab19a9b814254128d4ea7a", size = 557361, upload-time = "2025-11-16T14:49:21.574Z" }, + { url = "https://files.pythonhosted.org/packages/ff/3b/61586475e82d57f01da2c16edb9115a618afe00ce86fe1b58936880b15af/rpds_py-0.29.0-cp313-cp313t-win32.whl", hash = "sha256:6e97846e9800a5d0fe7be4d008f0c93d0feeb2700da7b1f7528dabafb31dfadb", size = 211227, upload-time = "2025-11-16T14:49:23.03Z" }, + { url = "https://files.pythonhosted.org/packages/3b/3a/12dc43f13594a54ea0c9d7e9d43002116557330e3ad45bc56097ddf266e2/rpds_py-0.29.0-cp313-cp313t-win_amd64.whl", hash = "sha256:f49196aec7c4b406495f60e6f947ad71f317a765f956d74bbd83996b9edc0352", size = 225248, upload-time = "2025-11-16T14:49:24.841Z" }, +] + +[[package]] +name = "ruff" +version = "0.14.6" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/52/f0/62b5a1a723fe183650109407fa56abb433b00aa1c0b9ba555f9c4efec2c6/ruff-0.14.6.tar.gz", hash = "sha256:6f0c742ca6a7783a736b867a263b9a7a80a45ce9bee391eeda296895f1b4e1cc", size = 5669501, upload-time = "2025-11-21T14:26:17.903Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/67/d2/7dd544116d107fffb24a0064d41a5d2ed1c9d6372d142f9ba108c8e39207/ruff-0.14.6-py3-none-linux_armv6l.whl", hash = "sha256:d724ac2f1c240dbd01a2ae98db5d1d9a5e1d9e96eba999d1c48e30062df578a3", size = 13326119, upload-time = "2025-11-21T14:25:24.2Z" }, + { url = "https://files.pythonhosted.org/packages/36/6a/ad66d0a3315d6327ed6b01f759d83df3c4d5f86c30462121024361137b6a/ruff-0.14.6-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:9f7539ea257aa4d07b7ce87aed580e485c40143f2473ff2f2b75aee003186004", size = 13526007, upload-time = "2025-11-21T14:25:26.906Z" }, + { url = "https://files.pythonhosted.org/packages/a3/9d/dae6db96df28e0a15dea8e986ee393af70fc97fd57669808728080529c37/ruff-0.14.6-py3-none-macosx_11_0_arm64.whl", hash = "sha256:7f6007e55b90a2a7e93083ba48a9f23c3158c433591c33ee2e99a49b889c6332", size = 12676572, upload-time = "2025-11-21T14:25:29.826Z" }, + { url = "https://files.pythonhosted.org/packages/76/a4/f319e87759949062cfee1b26245048e92e2acce900ad3a909285f9db1859/ruff-0.14.6-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0a8e7b9d73d8728b68f632aa8e824ef041d068d231d8dbc7808532d3629a6bef", size = 13140745, upload-time = "2025-11-21T14:25:32.788Z" }, + { url = "https://files.pythonhosted.org/packages/95/d3/248c1efc71a0a8ed4e8e10b4b2266845d7dfc7a0ab64354afe049eaa1310/ruff-0.14.6-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d50d45d4553a3ebcbd33e7c5e0fe6ca4aafd9a9122492de357205c2c48f00775", size = 13076486, upload-time = "2025-11-21T14:25:35.601Z" }, + { url = "https://files.pythonhosted.org/packages/a5/19/b68d4563fe50eba4b8c92aa842149bb56dd24d198389c0ed12e7faff4f7d/ruff-0.14.6-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:118548dd121f8a21bfa8ab2c5b80e5b4aed67ead4b7567790962554f38e598ce", size = 13727563, upload-time = "2025-11-21T14:25:38.514Z" }, + { url = "https://files.pythonhosted.org/packages/47/ac/943169436832d4b0e867235abbdb57ce3a82367b47e0280fa7b4eabb7593/ruff-0.14.6-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:57256efafbfefcb8748df9d1d766062f62b20150691021f8ab79e2d919f7c11f", size = 15199755, upload-time = "2025-11-21T14:25:41.516Z" }, + { url = "https://files.pythonhosted.org/packages/c9/b9/288bb2399860a36d4bb0541cb66cce3c0f4156aaff009dc8499be0c24bf2/ruff-0.14.6-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ff18134841e5c68f8e5df1999a64429a02d5549036b394fafbe410f886e1989d", size = 14850608, upload-time = "2025-11-21T14:25:44.428Z" }, + { url = "https://files.pythonhosted.org/packages/ee/b1/a0d549dd4364e240f37e7d2907e97ee80587480d98c7799d2d8dc7a2f605/ruff-0.14.6-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:29c4b7ec1e66a105d5c27bd57fa93203637d66a26d10ca9809dc7fc18ec58440", size = 14118754, upload-time = "2025-11-21T14:25:47.214Z" }, + { url = "https://files.pythonhosted.org/packages/13/ac/9b9fe63716af8bdfddfacd0882bc1586f29985d3b988b3c62ddce2e202c3/ruff-0.14.6-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:167843a6f78680746d7e226f255d920aeed5e4ad9c03258094a2d49d3028b105", size = 13949214, upload-time = "2025-11-21T14:25:50.002Z" }, + { url = "https://files.pythonhosted.org/packages/12/27/4dad6c6a77fede9560b7df6802b1b697e97e49ceabe1f12baf3ea20862e9/ruff-0.14.6-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:16a33af621c9c523b1ae006b1b99b159bf5ac7e4b1f20b85b2572455018e0821", size = 14106112, upload-time = "2025-11-21T14:25:52.841Z" }, + { url = "https://files.pythonhosted.org/packages/6a/db/23e322d7177873eaedea59a7932ca5084ec5b7e20cb30f341ab594130a71/ruff-0.14.6-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:1432ab6e1ae2dc565a7eea707d3b03a0c234ef401482a6f1621bc1f427c2ff55", size = 13035010, upload-time = "2025-11-21T14:25:55.536Z" }, + { url = "https://files.pythonhosted.org/packages/a8/9c/20e21d4d69dbb35e6a1df7691e02f363423658a20a2afacf2a2c011800dc/ruff-0.14.6-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:4c55cfbbe7abb61eb914bfd20683d14cdfb38a6d56c6c66efa55ec6570ee4e71", size = 13054082, upload-time = "2025-11-21T14:25:58.625Z" }, + { url = "https://files.pythonhosted.org/packages/66/25/906ee6a0464c3125c8d673c589771a974965c2be1a1e28b5c3b96cb6ef88/ruff-0.14.6-py3-none-musllinux_1_2_i686.whl", hash = "sha256:efea3c0f21901a685fff4befda6d61a1bf4cb43de16da87e8226a281d614350b", size = 13303354, upload-time = "2025-11-21T14:26:01.816Z" }, + { url = "https://files.pythonhosted.org/packages/4c/58/60577569e198d56922b7ead07b465f559002b7b11d53f40937e95067ca1c/ruff-0.14.6-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:344d97172576d75dc6afc0e9243376dbe1668559c72de1864439c4fc95f78185", size = 14054487, upload-time = "2025-11-21T14:26:05.058Z" }, + { url = "https://files.pythonhosted.org/packages/67/0b/8e4e0639e4cc12547f41cb771b0b44ec8225b6b6a93393176d75fe6f7d40/ruff-0.14.6-py3-none-win32.whl", hash = "sha256:00169c0c8b85396516fdd9ce3446c7ca20c2a8f90a77aa945ba6b8f2bfe99e85", size = 13013361, upload-time = "2025-11-21T14:26:08.152Z" }, + { url = "https://files.pythonhosted.org/packages/fb/02/82240553b77fd1341f80ebb3eaae43ba011c7a91b4224a9f317d8e6591af/ruff-0.14.6-py3-none-win_amd64.whl", hash = "sha256:390e6480c5e3659f8a4c8d6a0373027820419ac14fa0d2713bd8e6c3e125b8b9", size = 14432087, upload-time = "2025-11-21T14:26:10.891Z" }, + { url = "https://files.pythonhosted.org/packages/a5/1f/93f9b0fad9470e4c829a5bb678da4012f0c710d09331b860ee555216f4ea/ruff-0.14.6-py3-none-win_arm64.whl", hash = "sha256:d43c81fbeae52cfa8728d8766bbf46ee4298c888072105815b392da70ca836b2", size = 13520930, upload-time = "2025-11-21T14:26:13.951Z" }, +] + +[[package]] +name = "scipy" +version = "1.16.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "numpy" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/0a/ca/d8ace4f98322d01abcd52d381134344bf7b431eba7ed8b42bdea5a3c2ac9/scipy-1.16.3.tar.gz", hash = "sha256:01e87659402762f43bd2fee13370553a17ada367d42e7487800bf2916535aecb", size = 30597883, upload-time = "2025-10-28T17:38:54.068Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/40/41/5bf55c3f386b1643812f3a5674edf74b26184378ef0f3e7c7a09a7e2ca7f/scipy-1.16.3-cp312-cp312-macosx_10_14_x86_64.whl", hash = "sha256:81fc5827606858cf71446a5e98715ba0e11f0dbc83d71c7409d05486592a45d6", size = 36659043, upload-time = "2025-10-28T17:32:40.285Z" }, + { url = "https://files.pythonhosted.org/packages/1e/0f/65582071948cfc45d43e9870bf7ca5f0e0684e165d7c9ef4e50d783073eb/scipy-1.16.3-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:c97176013d404c7346bf57874eaac5187d969293bf40497140b0a2b2b7482e07", size = 28898986, upload-time = "2025-10-28T17:32:45.325Z" }, + { url = "https://files.pythonhosted.org/packages/96/5e/36bf3f0ac298187d1ceadde9051177d6a4fe4d507e8f59067dc9dd39e650/scipy-1.16.3-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:2b71d93c8a9936046866acebc915e2af2e292b883ed6e2cbe5c34beb094b82d9", size = 20889814, upload-time = "2025-10-28T17:32:49.277Z" }, + { url = "https://files.pythonhosted.org/packages/80/35/178d9d0c35394d5d5211bbff7ac4f2986c5488b59506fef9e1de13ea28d3/scipy-1.16.3-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:3d4a07a8e785d80289dfe66b7c27d8634a773020742ec7187b85ccc4b0e7b686", size = 23565795, upload-time = "2025-10-28T17:32:53.337Z" }, + { url = "https://files.pythonhosted.org/packages/fa/46/d1146ff536d034d02f83c8afc3c4bab2eddb634624d6529a8512f3afc9da/scipy-1.16.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:0553371015692a898e1aa858fed67a3576c34edefa6b7ebdb4e9dde49ce5c203", size = 33349476, upload-time = "2025-10-28T17:32:58.353Z" }, + { url = "https://files.pythonhosted.org/packages/79/2e/415119c9ab3e62249e18c2b082c07aff907a273741b3f8160414b0e9193c/scipy-1.16.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:72d1717fd3b5e6ec747327ce9bda32d5463f472c9dce9f54499e81fbd50245a1", size = 35676692, upload-time = "2025-10-28T17:33:03.88Z" }, + { url = "https://files.pythonhosted.org/packages/27/82/df26e44da78bf8d2aeaf7566082260cfa15955a5a6e96e6a29935b64132f/scipy-1.16.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:1fb2472e72e24d1530debe6ae078db70fb1605350c88a3d14bc401d6306dbffe", size = 36019345, upload-time = "2025-10-28T17:33:09.773Z" }, + { url = "https://files.pythonhosted.org/packages/82/31/006cbb4b648ba379a95c87262c2855cd0d09453e500937f78b30f02fa1cd/scipy-1.16.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:c5192722cffe15f9329a3948c4b1db789fbb1f05c97899187dcf009b283aea70", size = 38678975, upload-time = "2025-10-28T17:33:15.809Z" }, + { url = "https://files.pythonhosted.org/packages/c2/7f/acbd28c97e990b421af7d6d6cd416358c9c293fc958b8529e0bd5d2a2a19/scipy-1.16.3-cp312-cp312-win_amd64.whl", hash = "sha256:56edc65510d1331dae01ef9b658d428e33ed48b4f77b1d51caf479a0253f96dc", size = 38555926, upload-time = "2025-10-28T17:33:21.388Z" }, + { url = "https://files.pythonhosted.org/packages/ce/69/c5c7807fd007dad4f48e0a5f2153038dc96e8725d3345b9ee31b2b7bed46/scipy-1.16.3-cp312-cp312-win_arm64.whl", hash = "sha256:a8a26c78ef223d3e30920ef759e25625a0ecdd0d60e5a8818b7513c3e5384cf2", size = 25463014, upload-time = "2025-10-28T17:33:25.975Z" }, + { url = "https://files.pythonhosted.org/packages/72/f1/57e8327ab1508272029e27eeef34f2302ffc156b69e7e233e906c2a5c379/scipy-1.16.3-cp313-cp313-macosx_10_14_x86_64.whl", hash = "sha256:d2ec56337675e61b312179a1ad124f5f570c00f920cc75e1000025451b88241c", size = 36617856, upload-time = "2025-10-28T17:33:31.375Z" }, + { url = "https://files.pythonhosted.org/packages/44/13/7e63cfba8a7452eb756306aa2fd9b37a29a323b672b964b4fdeded9a3f21/scipy-1.16.3-cp313-cp313-macosx_12_0_arm64.whl", hash = "sha256:16b8bc35a4cc24db80a0ec836a9286d0e31b2503cb2fd7ff7fb0e0374a97081d", size = 28874306, upload-time = "2025-10-28T17:33:36.516Z" }, + { url = "https://files.pythonhosted.org/packages/15/65/3a9400efd0228a176e6ec3454b1fa998fbbb5a8defa1672c3f65706987db/scipy-1.16.3-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:5803c5fadd29de0cf27fa08ccbfe7a9e5d741bf63e4ab1085437266f12460ff9", size = 20865371, upload-time = "2025-10-28T17:33:42.094Z" }, + { url = "https://files.pythonhosted.org/packages/33/d7/eda09adf009a9fb81827194d4dd02d2e4bc752cef16737cc4ef065234031/scipy-1.16.3-cp313-cp313-macosx_14_0_x86_64.whl", hash = "sha256:b81c27fc41954319a943d43b20e07c40bdcd3ff7cf013f4fb86286faefe546c4", size = 23524877, upload-time = "2025-10-28T17:33:48.483Z" }, + { url = "https://files.pythonhosted.org/packages/7d/6b/3f911e1ebc364cb81320223a3422aab7d26c9c7973109a9cd0f27c64c6c0/scipy-1.16.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:0c3b4dd3d9b08dbce0f3440032c52e9e2ab9f96ade2d3943313dfe51a7056959", size = 33342103, upload-time = "2025-10-28T17:33:56.495Z" }, + { url = "https://files.pythonhosted.org/packages/21/f6/4bfb5695d8941e5c570a04d9fcd0d36bce7511b7d78e6e75c8f9791f82d0/scipy-1.16.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:7dc1360c06535ea6116a2220f760ae572db9f661aba2d88074fe30ec2aa1ff88", size = 35697297, upload-time = "2025-10-28T17:34:04.722Z" }, + { url = "https://files.pythonhosted.org/packages/04/e1/6496dadbc80d8d896ff72511ecfe2316b50313bfc3ebf07a3f580f08bd8c/scipy-1.16.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:663b8d66a8748051c3ee9c96465fb417509315b99c71550fda2591d7dd634234", size = 36021756, upload-time = "2025-10-28T17:34:13.482Z" }, + { url = "https://files.pythonhosted.org/packages/fe/bd/a8c7799e0136b987bda3e1b23d155bcb31aec68a4a472554df5f0937eef7/scipy-1.16.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:eab43fae33a0c39006a88096cd7b4f4ef545ea0447d250d5ac18202d40b6611d", size = 38696566, upload-time = "2025-10-28T17:34:22.384Z" }, + { url = "https://files.pythonhosted.org/packages/cd/01/1204382461fcbfeb05b6161b594f4007e78b6eba9b375382f79153172b4d/scipy-1.16.3-cp313-cp313-win_amd64.whl", hash = "sha256:062246acacbe9f8210de8e751b16fc37458213f124bef161a5a02c7a39284304", size = 38529877, upload-time = "2025-10-28T17:35:51.076Z" }, + { url = "https://files.pythonhosted.org/packages/7f/14/9d9fbcaa1260a94f4bb5b64ba9213ceb5d03cd88841fe9fd1ffd47a45b73/scipy-1.16.3-cp313-cp313-win_arm64.whl", hash = "sha256:50a3dbf286dbc7d84f176f9a1574c705f277cb6565069f88f60db9eafdbe3ee2", size = 25455366, upload-time = "2025-10-28T17:35:59.014Z" }, + { url = "https://files.pythonhosted.org/packages/e2/a3/9ec205bd49f42d45d77f1730dbad9ccf146244c1647605cf834b3a8c4f36/scipy-1.16.3-cp313-cp313t-macosx_10_14_x86_64.whl", hash = "sha256:fb4b29f4cf8cc5a8d628bc8d8e26d12d7278cd1f219f22698a378c3d67db5e4b", size = 37027931, upload-time = "2025-10-28T17:34:31.451Z" }, + { url = "https://files.pythonhosted.org/packages/25/06/ca9fd1f3a4589cbd825b1447e5db3a8ebb969c1eaf22c8579bd286f51b6d/scipy-1.16.3-cp313-cp313t-macosx_12_0_arm64.whl", hash = "sha256:8d09d72dc92742988b0e7750bddb8060b0c7079606c0d24a8cc8e9c9c11f9079", size = 29400081, upload-time = "2025-10-28T17:34:39.087Z" }, + { url = "https://files.pythonhosted.org/packages/6a/56/933e68210d92657d93fb0e381683bc0e53a965048d7358ff5fbf9e6a1b17/scipy-1.16.3-cp313-cp313t-macosx_14_0_arm64.whl", hash = "sha256:03192a35e661470197556de24e7cb1330d84b35b94ead65c46ad6f16f6b28f2a", size = 21391244, upload-time = "2025-10-28T17:34:45.234Z" }, + { url = "https://files.pythonhosted.org/packages/a8/7e/779845db03dc1418e215726329674b40576879b91814568757ff0014ad65/scipy-1.16.3-cp313-cp313t-macosx_14_0_x86_64.whl", hash = "sha256:57d01cb6f85e34f0946b33caa66e892aae072b64b034183f3d87c4025802a119", size = 23929753, upload-time = "2025-10-28T17:34:51.793Z" }, + { url = "https://files.pythonhosted.org/packages/4c/4b/f756cf8161d5365dcdef9e5f460ab226c068211030a175d2fc7f3f41ca64/scipy-1.16.3-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:96491a6a54e995f00a28a3c3badfff58fd093bf26cd5fb34a2188c8c756a3a2c", size = 33496912, upload-time = "2025-10-28T17:34:59.8Z" }, + { url = "https://files.pythonhosted.org/packages/09/b5/222b1e49a58668f23839ca1542a6322bb095ab8d6590d4f71723869a6c2c/scipy-1.16.3-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:cd13e354df9938598af2be05822c323e97132d5e6306b83a3b4ee6724c6e522e", size = 35802371, upload-time = "2025-10-28T17:35:08.173Z" }, + { url = "https://files.pythonhosted.org/packages/c1/8d/5964ef68bb31829bde27611f8c9deeac13764589fe74a75390242b64ca44/scipy-1.16.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:63d3cdacb8a824a295191a723ee5e4ea7768ca5ca5f2838532d9f2e2b3ce2135", size = 36190477, upload-time = "2025-10-28T17:35:16.7Z" }, + { url = "https://files.pythonhosted.org/packages/ab/f2/b31d75cb9b5fa4dd39a0a931ee9b33e7f6f36f23be5ef560bf72e0f92f32/scipy-1.16.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:e7efa2681ea410b10dde31a52b18b0154d66f2485328830e45fdf183af5aefc6", size = 38796678, upload-time = "2025-10-28T17:35:26.354Z" }, + { url = "https://files.pythonhosted.org/packages/b4/1e/b3723d8ff64ab548c38d87055483714fefe6ee20e0189b62352b5e015bb1/scipy-1.16.3-cp313-cp313t-win_amd64.whl", hash = "sha256:2d1ae2cf0c350e7705168ff2429962a89ad90c2d49d1dd300686d8b2a5af22fc", size = 38640178, upload-time = "2025-10-28T17:35:35.304Z" }, + { url = "https://files.pythonhosted.org/packages/8e/f3/d854ff38789aca9b0cc23008d607ced9de4f7ab14fa1ca4329f86b3758ca/scipy-1.16.3-cp313-cp313t-win_arm64.whl", hash = "sha256:0c623a54f7b79dd88ef56da19bc2873afec9673a48f3b85b18e4d402bdd29a5a", size = 25803246, upload-time = "2025-10-28T17:35:42.155Z" }, +] + +[[package]] +name = "setuptools" +version = "80.9.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/18/5d/3bf57dcd21979b887f014ea83c24ae194cfcd12b9e0fda66b957c69d1fca/setuptools-80.9.0.tar.gz", hash = "sha256:f36b47402ecde768dbfafc46e8e4207b4360c654f1f3bb84475f0a28628fb19c", size = 1319958, upload-time = "2025-05-27T00:56:51.443Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a3/dc/17031897dae0efacfea57dfd3a82fdd2a2aeb58e0ff71b77b87e44edc772/setuptools-80.9.0-py3-none-any.whl", hash = "sha256:062d34222ad13e0cc312a4c02d73f059e86a4acbfbdea8f8f76b28c99f306922", size = 1201486, upload-time = "2025-05-27T00:56:49.664Z" }, +] + +[[package]] +name = "shellingham" +version = "1.5.4" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/58/15/8b3609fd3830ef7b27b655beb4b4e9c62313a4e8da8c676e142cc210d58e/shellingham-1.5.4.tar.gz", hash = "sha256:8dbca0739d487e5bd35ab3ca4b36e11c4078f3a234bfce294b0a0291363404de", size = 10310, upload-time = "2023-10-24T04:13:40.426Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e0/f9/0595336914c5619e5f28a1fb793285925a8cd4b432c9da0a987836c7f822/shellingham-1.5.4-py2.py3-none-any.whl", hash = "sha256:7ecfff8f2fd72616f7481040475a65b2bf8af90a56c89140852d1120324e8686", size = 9755, upload-time = "2023-10-24T04:13:38.866Z" }, +] + +[[package]] +name = "six" +version = "1.17.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/94/e7/b2c673351809dca68a0e064b6af791aa332cf192da575fd474ed7d6f16a2/six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81", size = 34031, upload-time = "2024-12-04T17:35:28.174Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b7/ce/149a00dd41f10bc29e5921b496af8b574d8413afcd5e30dfa0ed46c2cc5e/six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274", size = 11050, upload-time = "2024-12-04T17:35:26.475Z" }, +] + +[[package]] +name = "sniffio" +version = "1.3.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a2/87/a6771e1546d97e7e041b6ae58d80074f81b7d5121207425c964ddf5cfdbd/sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc", size = 20372, upload-time = "2024-02-25T23:20:04.057Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e9/44/75a9c9421471a6c4805dbf2356f7c181a29c1879239abab1ea2cc8f38b40/sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2", size = 10235, upload-time = "2024-02-25T23:20:01.196Z" }, +] + +[[package]] +name = "snowballstemmer" +version = "3.0.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/75/a7/9810d872919697c9d01295633f5d574fb416d47e535f258272ca1f01f447/snowballstemmer-3.0.1.tar.gz", hash = "sha256:6d5eeeec8e9f84d4d56b847692bacf79bc2c8e90c7f80ca4444ff8b6f2e52895", size = 105575, upload-time = "2025-05-09T16:34:51.843Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c8/78/3565d011c61f5a43488987ee32b6f3f656e7f107ac2782dd57bdd7d91d9a/snowballstemmer-3.0.1-py3-none-any.whl", hash = "sha256:6cd7b3897da8d6c9ffb968a6781fa6532dce9c3618a4b127d920dab764a19064", size = 103274, upload-time = "2025-05-09T16:34:50.371Z" }, +] + +[[package]] +name = "soupsieve" +version = "2.8" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/6d/e6/21ccce3262dd4889aa3332e5a119a3491a95e8f60939870a3a035aabac0d/soupsieve-2.8.tar.gz", hash = "sha256:e2dd4a40a628cb5f28f6d4b0db8800b8f581b65bb380b97de22ba5ca8d72572f", size = 103472, upload-time = "2025-08-27T15:39:51.78Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/14/a0/bb38d3b76b8cae341dad93a2dd83ab7462e6dbcdd84d43f54ee60a8dc167/soupsieve-2.8-py3-none-any.whl", hash = "sha256:0cc76456a30e20f5d7f2e14a98a4ae2ee4e5abdc7c5ea0aafe795f344bc7984c", size = 36679, upload-time = "2025-08-27T15:39:50.179Z" }, +] + +[[package]] +name = "sphinx" +version = "8.2.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "alabaster" }, + { name = "babel" }, + { name = "colorama", marker = "sys_platform == 'win32'" }, + { name = "docutils" }, + { name = "imagesize" }, + { name = "jinja2" }, + { name = "packaging" }, + { name = "pygments" }, + { name = "requests" }, + { name = "roman-numerals-py" }, + { name = "snowballstemmer" }, + { name = "sphinxcontrib-applehelp" }, + { name = "sphinxcontrib-devhelp" }, + { name = "sphinxcontrib-htmlhelp" }, + { name = "sphinxcontrib-jsmath" }, + { name = "sphinxcontrib-qthelp" }, + { name = "sphinxcontrib-serializinghtml" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/38/ad/4360e50ed56cb483667b8e6dadf2d3fda62359593faabbe749a27c4eaca6/sphinx-8.2.3.tar.gz", hash = "sha256:398ad29dee7f63a75888314e9424d40f52ce5a6a87ae88e7071e80af296ec348", size = 8321876, upload-time = "2025-03-02T22:31:59.658Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/31/53/136e9eca6e0b9dc0e1962e2c908fbea2e5ac000c2a2fbd9a35797958c48b/sphinx-8.2.3-py3-none-any.whl", hash = "sha256:4405915165f13521d875a8c29c8970800a0141c14cc5416a38feca4ea5d9b9c3", size = 3589741, upload-time = "2025-03-02T22:31:56.836Z" }, +] + +[[package]] +name = "sphinx-autodoc-typehints" +version = "3.5.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "sphinx" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/34/4f/4fd5583678bb7dc8afa69e9b309e6a99ee8d79ad3a4728f4e52fd7cb37c7/sphinx_autodoc_typehints-3.5.2.tar.gz", hash = "sha256:5fcd4a3eb7aa89424c1e2e32bedca66edc38367569c9169a80f4b3e934171fdb", size = 37839, upload-time = "2025-10-16T00:50:15.743Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/05/f2/9657c98a66973b7c35bfd48ba65d1922860de9598fbb535cd96e3f58a908/sphinx_autodoc_typehints-3.5.2-py3-none-any.whl", hash = "sha256:0accd043619f53c86705958e323b419e41667917045ac9215d7be1b493648d8c", size = 21184, upload-time = "2025-10-16T00:50:13.973Z" }, +] + +[[package]] +name = "sphinx-rtd-theme" +version = "3.0.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "docutils" }, + { name = "sphinx" }, + { name = "sphinxcontrib-jquery" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/91/44/c97faec644d29a5ceddd3020ae2edffa69e7d00054a8c7a6021e82f20335/sphinx_rtd_theme-3.0.2.tar.gz", hash = "sha256:b7457bc25dda723b20b086a670b9953c859eab60a2a03ee8eb2bb23e176e5f85", size = 7620463, upload-time = "2024-11-13T11:06:04.545Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/85/77/46e3bac77b82b4df5bb5b61f2de98637724f246b4966cfc34bc5895d852a/sphinx_rtd_theme-3.0.2-py2.py3-none-any.whl", hash = "sha256:422ccc750c3a3a311de4ae327e82affdaf59eb695ba4936538552f3b00f4ee13", size = 7655561, upload-time = "2024-11-13T11:06:02.094Z" }, +] + +[[package]] +name = "sphinxcontrib-applehelp" +version = "2.0.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ba/6e/b837e84a1a704953c62ef8776d45c3e8d759876b4a84fe14eba2859106fe/sphinxcontrib_applehelp-2.0.0.tar.gz", hash = "sha256:2f29ef331735ce958efa4734873f084941970894c6090408b079c61b2e1c06d1", size = 20053, upload-time = "2024-07-29T01:09:00.465Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/5d/85/9ebeae2f76e9e77b952f4b274c27238156eae7979c5421fba91a28f4970d/sphinxcontrib_applehelp-2.0.0-py3-none-any.whl", hash = "sha256:4cd3f0ec4ac5dd9c17ec65e9ab272c9b867ea77425228e68ecf08d6b28ddbdb5", size = 119300, upload-time = "2024-07-29T01:08:58.99Z" }, +] + +[[package]] +name = "sphinxcontrib-bibtex" +version = "2.6.5" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "docutils" }, + { name = "pybtex" }, + { name = "pybtex-docutils" }, + { name = "sphinx" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/de/83/1488c9879f2fa3c2cbd6f666c7a3a42a1fa9e08462bec73281fa6c092cba/sphinxcontrib_bibtex-2.6.5.tar.gz", hash = "sha256:9b3224dd6fece9268ebd8c905dc0a83ff2f6c54148a9235fe70e9d1e9ff149c0", size = 118462, upload-time = "2025-06-27T10:40:14.061Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/9e/a0/3a612da94f828f26cabb247817393e79472c32b12c49222bf85fb6d7b6c8/sphinxcontrib_bibtex-2.6.5-py3-none-any.whl", hash = "sha256:455ea4509642ea0b28ede3721550273626f85af65af01f161bfd8e19dc1edd7d", size = 40410, upload-time = "2025-06-27T10:40:12.274Z" }, +] + +[[package]] +name = "sphinxcontrib-devhelp" +version = "2.0.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f6/d2/5beee64d3e4e747f316bae86b55943f51e82bb86ecd325883ef65741e7da/sphinxcontrib_devhelp-2.0.0.tar.gz", hash = "sha256:411f5d96d445d1d73bb5d52133377b4248ec79db5c793ce7dbe59e074b4dd1ad", size = 12967, upload-time = "2024-07-29T01:09:23.417Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/35/7a/987e583882f985fe4d7323774889ec58049171828b58c2217e7f79cdf44e/sphinxcontrib_devhelp-2.0.0-py3-none-any.whl", hash = "sha256:aefb8b83854e4b0998877524d1029fd3e6879210422ee3780459e28a1f03a8a2", size = 82530, upload-time = "2024-07-29T01:09:21.945Z" }, +] + +[[package]] +name = "sphinxcontrib-htmlhelp" +version = "2.1.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/43/93/983afd9aa001e5201eab16b5a444ed5b9b0a7a010541e0ddfbbfd0b2470c/sphinxcontrib_htmlhelp-2.1.0.tar.gz", hash = "sha256:c9e2916ace8aad64cc13a0d233ee22317f2b9025b9cf3295249fa985cc7082e9", size = 22617, upload-time = "2024-07-29T01:09:37.889Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/0a/7b/18a8c0bcec9182c05a0b3ec2a776bba4ead82750a55ff798e8d406dae604/sphinxcontrib_htmlhelp-2.1.0-py3-none-any.whl", hash = "sha256:166759820b47002d22914d64a075ce08f4c46818e17cfc9470a9786b759b19f8", size = 98705, upload-time = "2024-07-29T01:09:36.407Z" }, +] + +[[package]] +name = "sphinxcontrib-jquery" +version = "4.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "sphinx" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/de/f3/aa67467e051df70a6330fe7770894b3e4f09436dea6881ae0b4f3d87cad8/sphinxcontrib-jquery-4.1.tar.gz", hash = "sha256:1620739f04e36a2c779f1a131a2dfd49b2fd07351bf1968ced074365933abc7a", size = 122331, upload-time = "2023-03-14T15:01:01.944Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/76/85/749bd22d1a68db7291c89e2ebca53f4306c3f205853cf31e9de279034c3c/sphinxcontrib_jquery-4.1-py2.py3-none-any.whl", hash = "sha256:f936030d7d0147dd026a4f2b5a57343d233f1fc7b363f68b3d4f1cb0993878ae", size = 121104, upload-time = "2023-03-14T15:01:00.356Z" }, +] + +[[package]] +name = "sphinxcontrib-jsmath" +version = "1.0.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/b2/e8/9ed3830aeed71f17c026a07a5097edcf44b692850ef215b161b8ad875729/sphinxcontrib-jsmath-1.0.1.tar.gz", hash = "sha256:a9925e4a4587247ed2191a22df5f6970656cb8ca2bd6284309578f2153e0c4b8", size = 5787, upload-time = "2019-01-21T16:10:16.347Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c2/42/4c8646762ee83602e3fb3fbe774c2fac12f317deb0b5dbeeedd2d3ba4b77/sphinxcontrib_jsmath-1.0.1-py2.py3-none-any.whl", hash = "sha256:2ec2eaebfb78f3f2078e73666b1415417a116cc848b72e5172e596c871103178", size = 5071, upload-time = "2019-01-21T16:10:14.333Z" }, +] + +[[package]] +name = "sphinxcontrib-qthelp" +version = "2.0.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/68/bc/9104308fc285eb3e0b31b67688235db556cd5b0ef31d96f30e45f2e51cae/sphinxcontrib_qthelp-2.0.0.tar.gz", hash = "sha256:4fe7d0ac8fc171045be623aba3e2a8f613f8682731f9153bb2e40ece16b9bbab", size = 17165, upload-time = "2024-07-29T01:09:56.435Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/27/83/859ecdd180cacc13b1f7e857abf8582a64552ea7a061057a6c716e790fce/sphinxcontrib_qthelp-2.0.0-py3-none-any.whl", hash = "sha256:b18a828cdba941ccd6ee8445dbe72ffa3ef8cbe7505d8cd1fa0d42d3f2d5f3eb", size = 88743, upload-time = "2024-07-29T01:09:54.885Z" }, +] + +[[package]] +name = "sphinxcontrib-serializinghtml" +version = "2.0.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/3b/44/6716b257b0aa6bfd51a1b31665d1c205fb12cb5ad56de752dfa15657de2f/sphinxcontrib_serializinghtml-2.0.0.tar.gz", hash = "sha256:e9d912827f872c029017a53f0ef2180b327c3f7fd23c87229f7a8e8b70031d4d", size = 16080, upload-time = "2024-07-29T01:10:09.332Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/52/a7/d2782e4e3f77c8450f727ba74a8f12756d5ba823d81b941f1b04da9d033a/sphinxcontrib_serializinghtml-2.0.0-py3-none-any.whl", hash = "sha256:6e2cb0eef194e10c27ec0023bfeb25badbbb5868244cf5bc5bdc04e4464bf331", size = 92072, upload-time = "2024-07-29T01:10:08.203Z" }, +] + +[[package]] +name = "stack-data" +version = "0.6.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "asttokens" }, + { name = "executing" }, + { name = "pure-eval" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/28/e3/55dcc2cfbc3ca9c29519eb6884dd1415ecb53b0e934862d3559ddcb7e20b/stack_data-0.6.3.tar.gz", hash = "sha256:836a778de4fec4dcd1dcd89ed8abff8a221f58308462e1c4aa2a3cf30148f0b9", size = 44707, upload-time = "2023-09-30T13:58:05.479Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f1/7b/ce1eafaf1a76852e2ec9b22edecf1daa58175c090266e9f6c64afcd81d91/stack_data-0.6.3-py3-none-any.whl", hash = "sha256:d5558e0c25a4cb0853cddad3d77da9891a08cb85dd9f9f91b9f8cd66e511e695", size = 24521, upload-time = "2023-09-30T13:58:03.53Z" }, +] + +[[package]] +name = "sympy" +version = "1.14.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "mpmath" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/83/d3/803453b36afefb7c2bb238361cd4ae6125a569b4db67cd9e79846ba2d68c/sympy-1.14.0.tar.gz", hash = "sha256:d3d3fe8df1e5a0b42f0e7bdf50541697dbe7d23746e894990c030e2b05e72517", size = 7793921, upload-time = "2025-04-27T18:05:01.611Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a2/09/77d55d46fd61b4a135c444fc97158ef34a095e5681d0a6c10b75bf356191/sympy-1.14.0-py3-none-any.whl", hash = "sha256:e091cc3e99d2141a0ba2847328f5479b05d94a6635cb96148ccb3f34671bd8f5", size = 6299353, upload-time = "2025-04-27T18:04:59.103Z" }, +] + +[[package]] +name = "tiktoken" +version = "0.12.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "regex" }, + { name = "requests" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/7d/ab/4d017d0f76ec3171d469d80fc03dfbb4e48a4bcaddaa831b31d526f05edc/tiktoken-0.12.0.tar.gz", hash = "sha256:b18ba7ee2b093863978fcb14f74b3707cdc8d4d4d3836853ce7ec60772139931", size = 37806, upload-time = "2025-10-06T20:22:45.419Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a4/85/be65d39d6b647c79800fd9d29241d081d4eeb06271f383bb87200d74cf76/tiktoken-0.12.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:b97f74aca0d78a1ff21b8cd9e9925714c15a9236d6ceacf5c7327c117e6e21e8", size = 1050728, upload-time = "2025-10-06T20:21:52.756Z" }, + { url = "https://files.pythonhosted.org/packages/4a/42/6573e9129bc55c9bf7300b3a35bef2c6b9117018acca0dc760ac2d93dffe/tiktoken-0.12.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2b90f5ad190a4bb7c3eb30c5fa32e1e182ca1ca79f05e49b448438c3e225a49b", size = 994049, upload-time = "2025-10-06T20:21:53.782Z" }, + { url = "https://files.pythonhosted.org/packages/66/c5/ed88504d2f4a5fd6856990b230b56d85a777feab84e6129af0822f5d0f70/tiktoken-0.12.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:65b26c7a780e2139e73acc193e5c63ac754021f160df919add909c1492c0fb37", size = 1129008, upload-time = "2025-10-06T20:21:54.832Z" }, + { url = "https://files.pythonhosted.org/packages/f4/90/3dae6cc5436137ebd38944d396b5849e167896fc2073da643a49f372dc4f/tiktoken-0.12.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:edde1ec917dfd21c1f2f8046b86348b0f54a2c0547f68149d8600859598769ad", size = 1152665, upload-time = "2025-10-06T20:21:56.129Z" }, + { url = "https://files.pythonhosted.org/packages/a3/fe/26df24ce53ffde419a42f5f53d755b995c9318908288c17ec3f3448313a3/tiktoken-0.12.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:35a2f8ddd3824608b3d650a000c1ef71f730d0c56486845705a8248da00f9fe5", size = 1194230, upload-time = "2025-10-06T20:21:57.546Z" }, + { url = "https://files.pythonhosted.org/packages/20/cc/b064cae1a0e9fac84b0d2c46b89f4e57051a5f41324e385d10225a984c24/tiktoken-0.12.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:83d16643edb7fa2c99eff2ab7733508aae1eebb03d5dfc46f5565862810f24e3", size = 1254688, upload-time = "2025-10-06T20:21:58.619Z" }, + { url = "https://files.pythonhosted.org/packages/81/10/b8523105c590c5b8349f2587e2fdfe51a69544bd5a76295fc20f2374f470/tiktoken-0.12.0-cp312-cp312-win_amd64.whl", hash = "sha256:ffc5288f34a8bc02e1ea7047b8d041104791d2ddbf42d1e5fa07822cbffe16bd", size = 878694, upload-time = "2025-10-06T20:21:59.876Z" }, + { url = "https://files.pythonhosted.org/packages/00/61/441588ee21e6b5cdf59d6870f86beb9789e532ee9718c251b391b70c68d6/tiktoken-0.12.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:775c2c55de2310cc1bc9a3ad8826761cbdc87770e586fd7b6da7d4589e13dab3", size = 1050802, upload-time = "2025-10-06T20:22:00.96Z" }, + { url = "https://files.pythonhosted.org/packages/1f/05/dcf94486d5c5c8d34496abe271ac76c5b785507c8eae71b3708f1ad9b45a/tiktoken-0.12.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a01b12f69052fbe4b080a2cfb867c4de12c704b56178edf1d1d7b273561db160", size = 993995, upload-time = "2025-10-06T20:22:02.788Z" }, + { url = "https://files.pythonhosted.org/packages/a0/70/5163fe5359b943f8db9946b62f19be2305de8c3d78a16f629d4165e2f40e/tiktoken-0.12.0-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:01d99484dc93b129cd0964f9d34eee953f2737301f18b3c7257bf368d7615baa", size = 1128948, upload-time = "2025-10-06T20:22:03.814Z" }, + { url = "https://files.pythonhosted.org/packages/0c/da/c028aa0babf77315e1cef357d4d768800c5f8a6de04d0eac0f377cb619fa/tiktoken-0.12.0-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:4a1a4fcd021f022bfc81904a911d3df0f6543b9e7627b51411da75ff2fe7a1be", size = 1151986, upload-time = "2025-10-06T20:22:05.173Z" }, + { url = "https://files.pythonhosted.org/packages/a0/5a/886b108b766aa53e295f7216b509be95eb7d60b166049ce2c58416b25f2a/tiktoken-0.12.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:981a81e39812d57031efdc9ec59fa32b2a5a5524d20d4776574c4b4bd2e9014a", size = 1194222, upload-time = "2025-10-06T20:22:06.265Z" }, + { url = "https://files.pythonhosted.org/packages/f4/f8/4db272048397636ac7a078d22773dd2795b1becee7bc4922fe6207288d57/tiktoken-0.12.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:9baf52f84a3f42eef3ff4e754a0db79a13a27921b457ca9832cf944c6be4f8f3", size = 1255097, upload-time = "2025-10-06T20:22:07.403Z" }, + { url = "https://files.pythonhosted.org/packages/8e/32/45d02e2e0ea2be3a9ed22afc47d93741247e75018aac967b713b2941f8ea/tiktoken-0.12.0-cp313-cp313-win_amd64.whl", hash = "sha256:b8a0cd0c789a61f31bf44851defbd609e8dd1e2c8589c614cc1060940ef1f697", size = 879117, upload-time = "2025-10-06T20:22:08.418Z" }, + { url = "https://files.pythonhosted.org/packages/ce/76/994fc868f88e016e6d05b0da5ac24582a14c47893f4474c3e9744283f1d5/tiktoken-0.12.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:d5f89ea5680066b68bcb797ae85219c72916c922ef0fcdd3480c7d2315ffff16", size = 1050309, upload-time = "2025-10-06T20:22:10.939Z" }, + { url = "https://files.pythonhosted.org/packages/f6/b8/57ef1456504c43a849821920d582a738a461b76a047f352f18c0b26c6516/tiktoken-0.12.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:b4e7ed1c6a7a8a60a3230965bdedba8cc58f68926b835e519341413370e0399a", size = 993712, upload-time = "2025-10-06T20:22:12.115Z" }, + { url = "https://files.pythonhosted.org/packages/72/90/13da56f664286ffbae9dbcfadcc625439142675845baa62715e49b87b68b/tiktoken-0.12.0-cp313-cp313t-manylinux_2_28_aarch64.whl", hash = "sha256:fc530a28591a2d74bce821d10b418b26a094bf33839e69042a6e86ddb7a7fb27", size = 1128725, upload-time = "2025-10-06T20:22:13.541Z" }, + { url = "https://files.pythonhosted.org/packages/05/df/4f80030d44682235bdaecd7346c90f67ae87ec8f3df4a3442cb53834f7e4/tiktoken-0.12.0-cp313-cp313t-manylinux_2_28_x86_64.whl", hash = "sha256:06a9f4f49884139013b138920a4c393aa6556b2f8f536345f11819389c703ebb", size = 1151875, upload-time = "2025-10-06T20:22:14.559Z" }, + { url = "https://files.pythonhosted.org/packages/22/1f/ae535223a8c4ef4c0c1192e3f9b82da660be9eb66b9279e95c99288e9dab/tiktoken-0.12.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:04f0e6a985d95913cabc96a741c5ffec525a2c72e9df086ff17ebe35985c800e", size = 1194451, upload-time = "2025-10-06T20:22:15.545Z" }, + { url = "https://files.pythonhosted.org/packages/78/a7/f8ead382fce0243cb625c4f266e66c27f65ae65ee9e77f59ea1653b6d730/tiktoken-0.12.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:0ee8f9ae00c41770b5f9b0bb1235474768884ae157de3beb5439ca0fd70f3e25", size = 1253794, upload-time = "2025-10-06T20:22:16.624Z" }, + { url = "https://files.pythonhosted.org/packages/93/e0/6cc82a562bc6365785a3ff0af27a2a092d57c47d7a81d9e2295d8c36f011/tiktoken-0.12.0-cp313-cp313t-win_amd64.whl", hash = "sha256:dc2dd125a62cb2b3d858484d6c614d136b5b848976794edfb63688d539b8b93f", size = 878777, upload-time = "2025-10-06T20:22:18.036Z" }, +] + +[[package]] +name = "tinycss2" +version = "1.4.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "webencodings" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/7a/fd/7a5ee21fd08ff70d3d33a5781c255cbe779659bd03278feb98b19ee550f4/tinycss2-1.4.0.tar.gz", hash = "sha256:10c0972f6fc0fbee87c3edb76549357415e94548c1ae10ebccdea16fb404a9b7", size = 87085, upload-time = "2024-10-24T14:58:29.895Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e6/34/ebdc18bae6aa14fbee1a08b63c015c72b64868ff7dae68808ab500c492e2/tinycss2-1.4.0-py3-none-any.whl", hash = "sha256:3a49cf47b7675da0b15d0c6e1df8df4ebd96e9394bb905a5775adb0d884c5289", size = 26610, upload-time = "2024-10-24T14:58:28.029Z" }, +] + +[[package]] +name = "tokenize-rt" +version = "6.2.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/69/ed/8f07e893132d5051d86a553e749d5c89b2a4776eb3a579b72ed61f8559ca/tokenize_rt-6.2.0.tar.gz", hash = "sha256:8439c042b330c553fdbe1758e4a05c0ed460dbbbb24a606f11f0dee75da4cad6", size = 5476, upload-time = "2025-05-23T23:48:00.035Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/33/f0/3fe8c6e69135a845f4106f2ff8b6805638d4e85c264e70114e8126689587/tokenize_rt-6.2.0-py2.py3-none-any.whl", hash = "sha256:a152bf4f249c847a66497a4a95f63376ed68ac6abf092a2f7cfb29d044ecff44", size = 6004, upload-time = "2025-05-23T23:47:58.812Z" }, +] + +[[package]] +name = "tokenizers" +version = "0.22.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "huggingface-hub" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/1c/46/fb6854cec3278fbfa4a75b50232c77622bc517ac886156e6afbfa4d8fc6e/tokenizers-0.22.1.tar.gz", hash = "sha256:61de6522785310a309b3407bac22d99c4db5dba349935e99e4d15ea2226af2d9", size = 363123, upload-time = "2025-09-19T09:49:23.424Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/bf/33/f4b2d94ada7ab297328fc671fed209368ddb82f965ec2224eb1892674c3a/tokenizers-0.22.1-cp39-abi3-macosx_10_12_x86_64.whl", hash = "sha256:59fdb013df17455e5f950b4b834a7b3ee2e0271e6378ccb33aa74d178b513c73", size = 3069318, upload-time = "2025-09-19T09:49:11.848Z" }, + { url = "https://files.pythonhosted.org/packages/1c/58/2aa8c874d02b974990e89ff95826a4852a8b2a273c7d1b4411cdd45a4565/tokenizers-0.22.1-cp39-abi3-macosx_11_0_arm64.whl", hash = "sha256:8d4e484f7b0827021ac5f9f71d4794aaef62b979ab7608593da22b1d2e3c4edc", size = 2926478, upload-time = "2025-09-19T09:49:09.759Z" }, + { url = "https://files.pythonhosted.org/packages/1e/3b/55e64befa1e7bfea963cf4b787b2cea1011362c4193f5477047532ce127e/tokenizers-0.22.1-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:19d2962dd28bc67c1f205ab180578a78eef89ac60ca7ef7cbe9635a46a56422a", size = 3256994, upload-time = "2025-09-19T09:48:56.701Z" }, + { url = "https://files.pythonhosted.org/packages/71/0b/fbfecf42f67d9b7b80fde4aabb2b3110a97fac6585c9470b5bff103a80cb/tokenizers-0.22.1-cp39-abi3-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:38201f15cdb1f8a6843e6563e6e79f4abd053394992b9bbdf5213ea3469b4ae7", size = 3153141, upload-time = "2025-09-19T09:48:59.749Z" }, + { url = "https://files.pythonhosted.org/packages/17/a9/b38f4e74e0817af8f8ef925507c63c6ae8171e3c4cb2d5d4624bf58fca69/tokenizers-0.22.1-cp39-abi3-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d1cbe5454c9a15df1b3443c726063d930c16f047a3cc724b9e6e1a91140e5a21", size = 3508049, upload-time = "2025-09-19T09:49:05.868Z" }, + { url = "https://files.pythonhosted.org/packages/d2/48/dd2b3dac46bb9134a88e35d72e1aa4869579eacc1a27238f1577270773ff/tokenizers-0.22.1-cp39-abi3-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e7d094ae6312d69cc2a872b54b91b309f4f6fbce871ef28eb27b52a98e4d0214", size = 3710730, upload-time = "2025-09-19T09:49:01.832Z" }, + { url = "https://files.pythonhosted.org/packages/93/0e/ccabc8d16ae4ba84a55d41345207c1e2ea88784651a5a487547d80851398/tokenizers-0.22.1-cp39-abi3-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:afd7594a56656ace95cdd6df4cca2e4059d294c5cfb1679c57824b605556cb2f", size = 3412560, upload-time = "2025-09-19T09:49:03.867Z" }, + { url = "https://files.pythonhosted.org/packages/d0/c6/dc3a0db5a6766416c32c034286d7c2d406da1f498e4de04ab1b8959edd00/tokenizers-0.22.1-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e2ef6063d7a84994129732b47e7915e8710f27f99f3a3260b8a38fc7ccd083f4", size = 3250221, upload-time = "2025-09-19T09:49:07.664Z" }, + { url = "https://files.pythonhosted.org/packages/d7/a6/2c8486eef79671601ff57b093889a345dd3d576713ef047776015dc66de7/tokenizers-0.22.1-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:ba0a64f450b9ef412c98f6bcd2a50c6df6e2443b560024a09fa6a03189726879", size = 9345569, upload-time = "2025-09-19T09:49:14.214Z" }, + { url = "https://files.pythonhosted.org/packages/6b/16/32ce667f14c35537f5f605fe9bea3e415ea1b0a646389d2295ec348d5657/tokenizers-0.22.1-cp39-abi3-musllinux_1_2_armv7l.whl", hash = "sha256:331d6d149fa9c7d632cde4490fb8bbb12337fa3a0232e77892be656464f4b446", size = 9271599, upload-time = "2025-09-19T09:49:16.639Z" }, + { url = "https://files.pythonhosted.org/packages/51/7c/a5f7898a3f6baa3fc2685c705e04c98c1094c523051c805cdd9306b8f87e/tokenizers-0.22.1-cp39-abi3-musllinux_1_2_i686.whl", hash = "sha256:607989f2ea68a46cb1dfbaf3e3aabdf3f21d8748312dbeb6263d1b3b66c5010a", size = 9533862, upload-time = "2025-09-19T09:49:19.146Z" }, + { url = "https://files.pythonhosted.org/packages/36/65/7e75caea90bc73c1dd8d40438adf1a7bc26af3b8d0a6705ea190462506e1/tokenizers-0.22.1-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:a0f307d490295717726598ef6fa4f24af9d484809223bbc253b201c740a06390", size = 9681250, upload-time = "2025-09-19T09:49:21.501Z" }, + { url = "https://files.pythonhosted.org/packages/30/2c/959dddef581b46e6209da82df3b78471e96260e2bc463f89d23b1bf0e52a/tokenizers-0.22.1-cp39-abi3-win32.whl", hash = "sha256:b5120eed1442765cd90b903bb6cfef781fd8fe64e34ccaecbae4c619b7b12a82", size = 2472003, upload-time = "2025-09-19T09:49:27.089Z" }, + { url = "https://files.pythonhosted.org/packages/b3/46/e33a8c93907b631a99377ef4c5f817ab453d0b34f93529421f42ff559671/tokenizers-0.22.1-cp39-abi3-win_amd64.whl", hash = "sha256:65fd6e3fb11ca1e78a6a93602490f134d1fdeb13bcef99389d5102ea318ed138", size = 2674684, upload-time = "2025-09-19T09:49:24.953Z" }, +] + +[[package]] +name = "tomli" +version = "2.3.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/52/ed/3f73f72945444548f33eba9a87fc7a6e969915e7b1acc8260b30e1f76a2f/tomli-2.3.0.tar.gz", hash = "sha256:64be704a875d2a59753d80ee8a533c3fe183e3f06807ff7dc2232938ccb01549", size = 17392, upload-time = "2025-10-08T22:01:47.119Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ff/b7/40f36368fcabc518bb11c8f06379a0fd631985046c038aca08c6d6a43c6e/tomli-2.3.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:d7d86942e56ded512a594786a5ba0a5e521d02529b3826e7761a05138341a2ac", size = 154891, upload-time = "2025-10-08T22:01:09.082Z" }, + { url = "https://files.pythonhosted.org/packages/f9/3f/d9dd692199e3b3aab2e4e4dd948abd0f790d9ded8cd10cbaae276a898434/tomli-2.3.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:73ee0b47d4dad1c5e996e3cd33b8a76a50167ae5f96a2607cbe8cc773506ab22", size = 148796, upload-time = "2025-10-08T22:01:10.266Z" }, + { url = "https://files.pythonhosted.org/packages/60/83/59bff4996c2cf9f9387a0f5a3394629c7efa5ef16142076a23a90f1955fa/tomli-2.3.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:792262b94d5d0a466afb5bc63c7daa9d75520110971ee269152083270998316f", size = 242121, upload-time = "2025-10-08T22:01:11.332Z" }, + { url = "https://files.pythonhosted.org/packages/45/e5/7c5119ff39de8693d6baab6c0b6dcb556d192c165596e9fc231ea1052041/tomli-2.3.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4f195fe57ecceac95a66a75ac24d9d5fbc98ef0962e09b2eddec5d39375aae52", size = 250070, upload-time = "2025-10-08T22:01:12.498Z" }, + { url = "https://files.pythonhosted.org/packages/45/12/ad5126d3a278f27e6701abde51d342aa78d06e27ce2bb596a01f7709a5a2/tomli-2.3.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:e31d432427dcbf4d86958c184b9bfd1e96b5b71f8eb17e6d02531f434fd335b8", size = 245859, upload-time = "2025-10-08T22:01:13.551Z" }, + { url = "https://files.pythonhosted.org/packages/fb/a1/4d6865da6a71c603cfe6ad0e6556c73c76548557a8d658f9e3b142df245f/tomli-2.3.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:7b0882799624980785240ab732537fcfc372601015c00f7fc367c55308c186f6", size = 250296, upload-time = "2025-10-08T22:01:14.614Z" }, + { url = "https://files.pythonhosted.org/packages/a0/b7/a7a7042715d55c9ba6e8b196d65d2cb662578b4d8cd17d882d45322b0d78/tomli-2.3.0-cp312-cp312-win32.whl", hash = "sha256:ff72b71b5d10d22ecb084d345fc26f42b5143c5533db5e2eaba7d2d335358876", size = 97124, upload-time = "2025-10-08T22:01:15.629Z" }, + { url = "https://files.pythonhosted.org/packages/06/1e/f22f100db15a68b520664eb3328fb0ae4e90530887928558112c8d1f4515/tomli-2.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:1cb4ed918939151a03f33d4242ccd0aa5f11b3547d0cf30f7c74a408a5b99878", size = 107698, upload-time = "2025-10-08T22:01:16.51Z" }, + { url = "https://files.pythonhosted.org/packages/89/48/06ee6eabe4fdd9ecd48bf488f4ac783844fd777f547b8d1b61c11939974e/tomli-2.3.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:5192f562738228945d7b13d4930baffda67b69425a7f0da96d360b0a3888136b", size = 154819, upload-time = "2025-10-08T22:01:17.964Z" }, + { url = "https://files.pythonhosted.org/packages/f1/01/88793757d54d8937015c75dcdfb673c65471945f6be98e6a0410fba167ed/tomli-2.3.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:be71c93a63d738597996be9528f4abe628d1adf5e6eb11607bc8fe1a510b5dae", size = 148766, upload-time = "2025-10-08T22:01:18.959Z" }, + { url = "https://files.pythonhosted.org/packages/42/17/5e2c956f0144b812e7e107f94f1cc54af734eb17b5191c0bbfb72de5e93e/tomli-2.3.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c4665508bcbac83a31ff8ab08f424b665200c0e1e645d2bd9ab3d3e557b6185b", size = 240771, upload-time = "2025-10-08T22:01:20.106Z" }, + { url = "https://files.pythonhosted.org/packages/d5/f4/0fbd014909748706c01d16824eadb0307115f9562a15cbb012cd9b3512c5/tomli-2.3.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4021923f97266babc6ccab9f5068642a0095faa0a51a246a6a02fccbb3514eaf", size = 248586, upload-time = "2025-10-08T22:01:21.164Z" }, + { url = "https://files.pythonhosted.org/packages/30/77/fed85e114bde5e81ecf9bc5da0cc69f2914b38f4708c80ae67d0c10180c5/tomli-2.3.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a4ea38c40145a357d513bffad0ed869f13c1773716cf71ccaa83b0fa0cc4e42f", size = 244792, upload-time = "2025-10-08T22:01:22.417Z" }, + { url = "https://files.pythonhosted.org/packages/55/92/afed3d497f7c186dc71e6ee6d4fcb0acfa5f7d0a1a2878f8beae379ae0cc/tomli-2.3.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:ad805ea85eda330dbad64c7ea7a4556259665bdf9d2672f5dccc740eb9d3ca05", size = 248909, upload-time = "2025-10-08T22:01:23.859Z" }, + { url = "https://files.pythonhosted.org/packages/f8/84/ef50c51b5a9472e7265ce1ffc7f24cd4023d289e109f669bdb1553f6a7c2/tomli-2.3.0-cp313-cp313-win32.whl", hash = "sha256:97d5eec30149fd3294270e889b4234023f2c69747e555a27bd708828353ab606", size = 96946, upload-time = "2025-10-08T22:01:24.893Z" }, + { url = "https://files.pythonhosted.org/packages/b2/b7/718cd1da0884f281f95ccfa3a6cc572d30053cba64603f79d431d3c9b61b/tomli-2.3.0-cp313-cp313-win_amd64.whl", hash = "sha256:0c95ca56fbe89e065c6ead5b593ee64b84a26fca063b5d71a1122bf26e533999", size = 107705, upload-time = "2025-10-08T22:01:26.153Z" }, + { url = "https://files.pythonhosted.org/packages/77/b8/0135fadc89e73be292b473cb820b4f5a08197779206b33191e801feeae40/tomli-2.3.0-py3-none-any.whl", hash = "sha256:e95b1af3c5b07d9e643909b5abbec77cd9f1217e6d0bca72b0234736b9fb1f1b", size = 14408, upload-time = "2025-10-08T22:01:46.04Z" }, +] + +[[package]] +name = "torch" +version = "2.9.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "filelock" }, + { name = "fsspec" }, + { name = "jinja2" }, + { name = "networkx" }, + { name = "nvidia-cublas-cu12", marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" }, + { name = "nvidia-cuda-cupti-cu12", marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" }, + { name = "nvidia-cuda-nvrtc-cu12", marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" }, + { name = "nvidia-cuda-runtime-cu12", marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" }, + { name = "nvidia-cudnn-cu12", marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" }, + { name = "nvidia-cufft-cu12", marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" }, + { name = "nvidia-cufile-cu12", marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" }, + { name = "nvidia-curand-cu12", marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" }, + { name = "nvidia-cusolver-cu12", marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" }, + { name = "nvidia-cusparse-cu12", marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" }, + { name = "nvidia-cusparselt-cu12", marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" }, + { name = "nvidia-nccl-cu12", marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" }, + { name = "nvidia-nvjitlink-cu12", marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" }, + { name = "nvidia-nvshmem-cu12", marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" }, + { name = "nvidia-nvtx-cu12", marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" }, + { name = "setuptools" }, + { name = "sympy" }, + { name = "triton", marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" }, + { name = "typing-extensions" }, +] +wheels = [ + { url = "https://files.pythonhosted.org/packages/0f/27/07c645c7673e73e53ded71705045d6cb5bae94c4b021b03aa8d03eee90ab/torch-2.9.1-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:da5f6f4d7f4940a173e5572791af238cb0b9e21b1aab592bd8b26da4c99f1cd6", size = 104126592, upload-time = "2025-11-12T15:20:41.62Z" }, + { url = "https://files.pythonhosted.org/packages/19/17/e377a460603132b00760511299fceba4102bd95db1a0ee788da21298ccff/torch-2.9.1-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:27331cd902fb4322252657f3902adf1c4f6acad9dcad81d8df3ae14c7c4f07c4", size = 899742281, upload-time = "2025-11-12T15:22:17.602Z" }, + { url = "https://files.pythonhosted.org/packages/b1/1a/64f5769025db846a82567fa5b7d21dba4558a7234ee631712ee4771c436c/torch-2.9.1-cp312-cp312-win_amd64.whl", hash = "sha256:81a285002d7b8cfd3fdf1b98aa8df138d41f1a8334fd9ea37511517cedf43083", size = 110940568, upload-time = "2025-11-12T15:21:18.689Z" }, + { url = "https://files.pythonhosted.org/packages/6e/ab/07739fd776618e5882661d04c43f5b5586323e2f6a2d7d84aac20d8f20bd/torch-2.9.1-cp312-none-macosx_11_0_arm64.whl", hash = "sha256:c0d25d1d8e531b8343bea0ed811d5d528958f1dcbd37e7245bc686273177ad7e", size = 74479191, upload-time = "2025-11-12T15:21:25.816Z" }, + { url = "https://files.pythonhosted.org/packages/20/60/8fc5e828d050bddfab469b3fe78e5ab9a7e53dda9c3bdc6a43d17ce99e63/torch-2.9.1-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:c29455d2b910b98738131990394da3e50eea8291dfeb4b12de71ecf1fdeb21cb", size = 104135743, upload-time = "2025-11-12T15:21:34.936Z" }, + { url = "https://files.pythonhosted.org/packages/f2/b7/6d3f80e6918213babddb2a37b46dbb14c15b14c5f473e347869a51f40e1f/torch-2.9.1-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:524de44cd13931208ba2c4bde9ec7741fd4ae6bfd06409a604fc32f6520c2bc9", size = 899749493, upload-time = "2025-11-12T15:24:36.356Z" }, + { url = "https://files.pythonhosted.org/packages/a6/47/c7843d69d6de8938c1cbb1eba426b1d48ddf375f101473d3e31a5fc52b74/torch-2.9.1-cp313-cp313-win_amd64.whl", hash = "sha256:545844cc16b3f91e08ce3b40e9c2d77012dd33a48d505aed34b7740ed627a1b2", size = 110944162, upload-time = "2025-11-12T15:21:53.151Z" }, + { url = "https://files.pythonhosted.org/packages/28/0e/2a37247957e72c12151b33a01e4df651d9d155dd74d8cfcbfad15a79b44a/torch-2.9.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:5be4bf7496f1e3ffb1dd44b672adb1ac3f081f204c5ca81eba6442f5f634df8e", size = 74830751, upload-time = "2025-11-12T15:21:43.792Z" }, + { url = "https://files.pythonhosted.org/packages/4b/f7/7a18745edcd7b9ca2381aa03353647bca8aace91683c4975f19ac233809d/torch-2.9.1-cp313-cp313t-manylinux_2_28_aarch64.whl", hash = "sha256:30a3e170a84894f3652434b56d59a64a2c11366b0ed5776fab33c2439396bf9a", size = 104142929, upload-time = "2025-11-12T15:21:48.319Z" }, + { url = "https://files.pythonhosted.org/packages/f4/dd/f1c0d879f2863ef209e18823a988dc7a1bf40470750e3ebe927efdb9407f/torch-2.9.1-cp313-cp313t-manylinux_2_28_x86_64.whl", hash = "sha256:8301a7b431e51764629208d0edaa4f9e4c33e6df0f2f90b90e261d623df6a4e2", size = 899748978, upload-time = "2025-11-12T15:23:04.568Z" }, + { url = "https://files.pythonhosted.org/packages/1f/9f/6986b83a53b4d043e36f3f898b798ab51f7f20fdf1a9b01a2720f445043d/torch-2.9.1-cp313-cp313t-win_amd64.whl", hash = "sha256:2e1c42c0ae92bf803a4b2409fdfed85e30f9027a66887f5e7dcdbc014c7531db", size = 111176995, upload-time = "2025-11-12T15:22:01.618Z" }, + { url = "https://files.pythonhosted.org/packages/40/60/71c698b466dd01e65d0e9514b5405faae200c52a76901baf6906856f17e4/torch-2.9.1-cp313-none-macosx_11_0_arm64.whl", hash = "sha256:2c14b3da5df416cf9cb5efab83aa3056f5b8cd8620b8fde81b4987ecab730587", size = 74480347, upload-time = "2025-11-12T15:21:57.648Z" }, +] + +[[package]] +name = "tornado" +version = "6.5.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/09/ce/1eb500eae19f4648281bb2186927bb062d2438c2e5093d1360391afd2f90/tornado-6.5.2.tar.gz", hash = "sha256:ab53c8f9a0fa351e2c0741284e06c7a45da86afb544133201c5cc8578eb076a0", size = 510821, upload-time = "2025-08-08T18:27:00.78Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f6/48/6a7529df2c9cc12efd2e8f5dd219516184d703b34c06786809670df5b3bd/tornado-6.5.2-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:2436822940d37cde62771cff8774f4f00b3c8024fe482e16ca8387b8a2724db6", size = 442563, upload-time = "2025-08-08T18:26:42.945Z" }, + { url = "https://files.pythonhosted.org/packages/f2/b5/9b575a0ed3e50b00c40b08cbce82eb618229091d09f6d14bce80fc01cb0b/tornado-6.5.2-cp39-abi3-macosx_10_9_x86_64.whl", hash = "sha256:583a52c7aa94ee046854ba81d9ebb6c81ec0fd30386d96f7640c96dad45a03ef", size = 440729, upload-time = "2025-08-08T18:26:44.473Z" }, + { url = "https://files.pythonhosted.org/packages/1b/4e/619174f52b120efcf23633c817fd3fed867c30bff785e2cd5a53a70e483c/tornado-6.5.2-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b0fe179f28d597deab2842b86ed4060deec7388f1fd9c1b4a41adf8af058907e", size = 444295, upload-time = "2025-08-08T18:26:46.021Z" }, + { url = "https://files.pythonhosted.org/packages/95/fa/87b41709552bbd393c85dd18e4e3499dcd8983f66e7972926db8d96aa065/tornado-6.5.2-cp39-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b186e85d1e3536d69583d2298423744740986018e393d0321df7340e71898882", size = 443644, upload-time = "2025-08-08T18:26:47.625Z" }, + { url = "https://files.pythonhosted.org/packages/f9/41/fb15f06e33d7430ca89420283a8762a4e6b8025b800ea51796ab5e6d9559/tornado-6.5.2-cp39-abi3-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e792706668c87709709c18b353da1f7662317b563ff69f00bab83595940c7108", size = 443878, upload-time = "2025-08-08T18:26:50.599Z" }, + { url = "https://files.pythonhosted.org/packages/11/92/fe6d57da897776ad2e01e279170ea8ae726755b045fe5ac73b75357a5a3f/tornado-6.5.2-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:06ceb1300fd70cb20e43b1ad8aaee0266e69e7ced38fa910ad2e03285009ce7c", size = 444549, upload-time = "2025-08-08T18:26:51.864Z" }, + { url = "https://files.pythonhosted.org/packages/9b/02/c8f4f6c9204526daf3d760f4aa555a7a33ad0e60843eac025ccfd6ff4a93/tornado-6.5.2-cp39-abi3-musllinux_1_2_i686.whl", hash = "sha256:74db443e0f5251be86cbf37929f84d8c20c27a355dd452a5cfa2aada0d001ec4", size = 443973, upload-time = "2025-08-08T18:26:53.625Z" }, + { url = "https://files.pythonhosted.org/packages/ae/2d/f5f5707b655ce2317190183868cd0f6822a1121b4baeae509ceb9590d0bd/tornado-6.5.2-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:b5e735ab2889d7ed33b32a459cac490eda71a1ba6857b0118de476ab6c366c04", size = 443954, upload-time = "2025-08-08T18:26:55.072Z" }, + { url = "https://files.pythonhosted.org/packages/e8/59/593bd0f40f7355806bf6573b47b8c22f8e1374c9b6fd03114bd6b7a3dcfd/tornado-6.5.2-cp39-abi3-win32.whl", hash = "sha256:c6f29e94d9b37a95013bb669616352ddb82e3bfe8326fccee50583caebc8a5f0", size = 445023, upload-time = "2025-08-08T18:26:56.677Z" }, + { url = "https://files.pythonhosted.org/packages/c7/2a/f609b420c2f564a748a2d80ebfb2ee02a73ca80223af712fca591386cafb/tornado-6.5.2-cp39-abi3-win_amd64.whl", hash = "sha256:e56a5af51cc30dd2cae649429af65ca2f6571da29504a07995175df14c18f35f", size = 445427, upload-time = "2025-08-08T18:26:57.91Z" }, + { url = "https://files.pythonhosted.org/packages/5e/4f/e1f65e8f8c76d73658b33d33b81eed4322fb5085350e4328d5c956f0c8f9/tornado-6.5.2-cp39-abi3-win_arm64.whl", hash = "sha256:d6c33dc3672e3a1f3618eb63b7ef4683a7688e7b9e6e8f0d9aa5726360a004af", size = 444456, upload-time = "2025-08-08T18:26:59.207Z" }, +] + +[[package]] +name = "tqdm" +version = "4.67.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "colorama", marker = "sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/a8/4b/29b4ef32e036bb34e4ab51796dd745cdba7ed47ad142a9f4a1eb8e0c744d/tqdm-4.67.1.tar.gz", hash = "sha256:f8aef9c52c08c13a65f30ea34f4e5aac3fd1a34959879d7e59e63027286627f2", size = 169737, upload-time = "2024-11-24T20:12:22.481Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d0/30/dc54f88dd4a2b5dc8a0279bdd7270e735851848b762aeb1c1184ed1f6b14/tqdm-4.67.1-py3-none-any.whl", hash = "sha256:26445eca388f82e72884e0d580d5464cd801a3ea01e63e5601bdff9ba6a48de2", size = 78540, upload-time = "2024-11-24T20:12:19.698Z" }, +] + +[[package]] +name = "traitlets" +version = "5.14.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/eb/79/72064e6a701c2183016abbbfedaba506d81e30e232a68c9f0d6f6fcd1574/traitlets-5.14.3.tar.gz", hash = "sha256:9ed0579d3502c94b4b3732ac120375cda96f923114522847de4b3bb98b96b6b7", size = 161621, upload-time = "2024-04-19T11:11:49.746Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/00/c0/8f5d070730d7836adc9c9b6408dec68c6ced86b304a9b26a14df072a6e8c/traitlets-5.14.3-py3-none-any.whl", hash = "sha256:b74e89e397b1ed28cc831db7aea759ba6640cb3de13090ca145426688ff1ac4f", size = 85359, upload-time = "2024-04-19T11:11:46.763Z" }, +] + +[[package]] +name = "triton" +version = "3.5.1" +source = { registry = "https://pypi.org/simple" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f2/50/9a8358d3ef58162c0a415d173cfb45b67de60176e1024f71fbc4d24c0b6d/triton-3.5.1-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d2c6b915a03888ab931a9fd3e55ba36785e1fe70cbea0b40c6ef93b20fc85232", size = 170470207, upload-time = "2025-11-11T17:41:00.253Z" }, + { url = "https://files.pythonhosted.org/packages/27/46/8c3bbb5b0a19313f50edcaa363b599e5a1a5ac9683ead82b9b80fe497c8d/triton-3.5.1-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f3f4346b6ebbd4fad18773f5ba839114f4826037c9f2f34e0148894cd5dd3dba", size = 170470410, upload-time = "2025-11-11T17:41:06.319Z" }, + { url = "https://files.pythonhosted.org/packages/37/92/e97fcc6b2c27cdb87ce5ee063d77f8f26f19f06916aa680464c8104ef0f6/triton-3.5.1-cp313-cp313t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0b4d2c70127fca6a23e247f9348b8adde979d2e7a20391bfbabaac6aebc7e6a8", size = 170579924, upload-time = "2025-11-11T17:41:12.455Z" }, +] + +[[package]] +name = "typer-slim" +version = "0.20.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "click" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/8e/45/81b94a52caed434b94da65729c03ad0fb7665fab0f7db9ee54c94e541403/typer_slim-0.20.0.tar.gz", hash = "sha256:9fc6607b3c6c20f5c33ea9590cbeb17848667c51feee27d9e314a579ab07d1a3", size = 106561, upload-time = "2025-10-20T17:03:46.642Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/5e/dd/5cbf31f402f1cc0ab087c94d4669cfa55bd1e818688b910631e131d74e75/typer_slim-0.20.0-py3-none-any.whl", hash = "sha256:f42a9b7571a12b97dddf364745d29f12221865acef7a2680065f9bb29c7dc89d", size = 47087, upload-time = "2025-10-20T17:03:44.546Z" }, +] + +[[package]] +name = "typing-extensions" +version = "4.15.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/72/94/1a15dd82efb362ac84269196e94cf00f187f7ed21c242792a923cdb1c61f/typing_extensions-4.15.0.tar.gz", hash = "sha256:0cea48d173cc12fa28ecabc3b837ea3cf6f38c6d1136f85cbaaf598984861466", size = 109391, upload-time = "2025-08-25T13:49:26.313Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/18/67/36e9267722cc04a6b9f15c7f3441c2363321a3ea07da7ae0c0707beb2a9c/typing_extensions-4.15.0-py3-none-any.whl", hash = "sha256:f0fa19c6845758ab08074a0cfa8b7aecb71c999ca73d62883bc25cc018c4e548", size = 44614, upload-time = "2025-08-25T13:49:24.86Z" }, +] + +[[package]] +name = "typing-inspection" +version = "0.4.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/55/e3/70399cb7dd41c10ac53367ae42139cf4b1ca5f36bb3dc6c9d33acdb43655/typing_inspection-0.4.2.tar.gz", hash = "sha256:ba561c48a67c5958007083d386c3295464928b01faa735ab8547c5692e87f464", size = 75949, upload-time = "2025-10-01T02:14:41.687Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/dc/9b/47798a6c91d8bdb567fe2698fe81e0c6b7cb7ef4d13da4114b41d239f65d/typing_inspection-0.4.2-py3-none-any.whl", hash = "sha256:4ed1cacbdc298c220f1bd249ed5287caa16f34d44ef4e9c3d0cbad5b521545e7", size = 14611, upload-time = "2025-10-01T02:14:40.154Z" }, +] + +[[package]] +name = "urllib3" +version = "2.5.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/15/22/9ee70a2574a4f4599c47dd506532914ce044817c7752a79b6a51286319bc/urllib3-2.5.0.tar.gz", hash = "sha256:3fc47733c7e419d4bc3f6b3dc2b4f890bb743906a30d56ba4a5bfa4bbff92760", size = 393185, upload-time = "2025-06-18T14:07:41.644Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a7/c2/fe1e52489ae3122415c51f387e221dd0773709bad6c6cdaa599e8a2c5185/urllib3-2.5.0-py3-none-any.whl", hash = "sha256:e6b01673c0fa6a13e374b50871808eb3bf7046c4b125b216f6bf1cc604cff0dc", size = 129795, upload-time = "2025-06-18T14:07:40.39Z" }, +] + +[[package]] +name = "wcwidth" +version = "0.2.14" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/24/30/6b0809f4510673dc723187aeaf24c7f5459922d01e2f794277a3dfb90345/wcwidth-0.2.14.tar.gz", hash = "sha256:4d478375d31bc5395a3c55c40ccdf3354688364cd61c4f6adacaa9215d0b3605", size = 102293, upload-time = "2025-09-22T16:29:53.023Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/af/b5/123f13c975e9f27ab9c0770f514345bd406d0e8d3b7a0723af9d43f710af/wcwidth-0.2.14-py2.py3-none-any.whl", hash = "sha256:a7bb560c8aee30f9957e5f9895805edd20602f2d7f720186dfd906e82b4982e1", size = 37286, upload-time = "2025-09-22T16:29:51.641Z" }, +] + +[[package]] +name = "webencodings" +version = "0.5.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/0b/02/ae6ceac1baeda530866a85075641cec12989bd8d31af6d5ab4a3e8c92f47/webencodings-0.5.1.tar.gz", hash = "sha256:b36a1c245f2d304965eb4e0a82848379241dc04b865afcc4aab16748587e1923", size = 9721, upload-time = "2017-04-05T20:21:34.189Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f4/24/2a3e3df732393fed8b3ebf2ec078f05546de641fe1b667ee316ec1dcf3b7/webencodings-0.5.1-py2.py3-none-any.whl", hash = "sha256:a0af1213f3c2226497a97e2b3aa01a7e4bee4f403f95be16fc9acd2947514a78", size = 11774, upload-time = "2017-04-05T20:21:32.581Z" }, +] + +[[package]] +name = "wrapt" +version = "2.0.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/49/2a/6de8a50cb435b7f42c46126cf1a54b2aab81784e74c8595c8e025e8f36d3/wrapt-2.0.1.tar.gz", hash = "sha256:9c9c635e78497cacb81e84f8b11b23e0aacac7a136e73b8e5b2109a1d9fc468f", size = 82040, upload-time = "2025-11-07T00:45:33.312Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/cb/73/8cb252858dc8254baa0ce58ce382858e3a1cf616acebc497cb13374c95c6/wrapt-2.0.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:1fdbb34da15450f2b1d735a0e969c24bdb8d8924892380126e2a293d9902078c", size = 78129, upload-time = "2025-11-07T00:43:48.852Z" }, + { url = "https://files.pythonhosted.org/packages/19/42/44a0db2108526ee6e17a5ab72478061158f34b08b793df251d9fbb9a7eb4/wrapt-2.0.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:3d32794fe940b7000f0519904e247f902f0149edbe6316c710a8562fb6738841", size = 61205, upload-time = "2025-11-07T00:43:50.402Z" }, + { url = "https://files.pythonhosted.org/packages/4d/8a/5b4b1e44b791c22046e90d9b175f9a7581a8cc7a0debbb930f81e6ae8e25/wrapt-2.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:386fb54d9cd903ee0012c09291336469eb7b244f7183d40dc3e86a16a4bace62", size = 61692, upload-time = "2025-11-07T00:43:51.678Z" }, + { url = "https://files.pythonhosted.org/packages/11/53/3e794346c39f462bcf1f58ac0487ff9bdad02f9b6d5ee2dc84c72e0243b2/wrapt-2.0.1-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:7b219cb2182f230676308cdcacd428fa837987b89e4b7c5c9025088b8a6c9faf", size = 121492, upload-time = "2025-11-07T00:43:55.017Z" }, + { url = "https://files.pythonhosted.org/packages/c6/7e/10b7b0e8841e684c8ca76b462a9091c45d62e8f2de9c4b1390b690eadf16/wrapt-2.0.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:641e94e789b5f6b4822bb8d8ebbdfc10f4e4eae7756d648b717d980f657a9eb9", size = 123064, upload-time = "2025-11-07T00:43:56.323Z" }, + { url = "https://files.pythonhosted.org/packages/0e/d1/3c1e4321fc2f5ee7fd866b2d822aa89b84495f28676fd976c47327c5b6aa/wrapt-2.0.1-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:fe21b118b9f58859b5ebaa4b130dee18669df4bd111daad082b7beb8799ad16b", size = 117403, upload-time = "2025-11-07T00:43:53.258Z" }, + { url = "https://files.pythonhosted.org/packages/a4/b0/d2f0a413cf201c8c2466de08414a15420a25aa83f53e647b7255cc2fab5d/wrapt-2.0.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:17fb85fa4abc26a5184d93b3efd2dcc14deb4b09edcdb3535a536ad34f0b4dba", size = 121500, upload-time = "2025-11-07T00:43:57.468Z" }, + { url = "https://files.pythonhosted.org/packages/bd/45/bddb11d28ca39970a41ed48a26d210505120f925918592283369219f83cc/wrapt-2.0.1-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:b89ef9223d665ab255ae42cc282d27d69704d94be0deffc8b9d919179a609684", size = 116299, upload-time = "2025-11-07T00:43:58.877Z" }, + { url = "https://files.pythonhosted.org/packages/81/af/34ba6dd570ef7a534e7eec0c25e2615c355602c52aba59413411c025a0cb/wrapt-2.0.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:a453257f19c31b31ba593c30d997d6e5be39e3b5ad9148c2af5a7314061c63eb", size = 120622, upload-time = "2025-11-07T00:43:59.962Z" }, + { url = "https://files.pythonhosted.org/packages/e2/3e/693a13b4146646fb03254636f8bafd20c621955d27d65b15de07ab886187/wrapt-2.0.1-cp312-cp312-win32.whl", hash = "sha256:3e271346f01e9c8b1130a6a3b0e11908049fe5be2d365a5f402778049147e7e9", size = 58246, upload-time = "2025-11-07T00:44:03.169Z" }, + { url = "https://files.pythonhosted.org/packages/a7/36/715ec5076f925a6be95f37917b66ebbeaa1372d1862c2ccd7a751574b068/wrapt-2.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:2da620b31a90cdefa9cd0c2b661882329e2e19d1d7b9b920189956b76c564d75", size = 60492, upload-time = "2025-11-07T00:44:01.027Z" }, + { url = "https://files.pythonhosted.org/packages/ef/3e/62451cd7d80f65cc125f2b426b25fbb6c514bf6f7011a0c3904fc8c8df90/wrapt-2.0.1-cp312-cp312-win_arm64.whl", hash = "sha256:aea9c7224c302bc8bfc892b908537f56c430802560e827b75ecbde81b604598b", size = 58987, upload-time = "2025-11-07T00:44:02.095Z" }, + { url = "https://files.pythonhosted.org/packages/ad/fe/41af4c46b5e498c90fc87981ab2972fbd9f0bccda597adb99d3d3441b94b/wrapt-2.0.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:47b0f8bafe90f7736151f61482c583c86b0693d80f075a58701dd1549b0010a9", size = 78132, upload-time = "2025-11-07T00:44:04.628Z" }, + { url = "https://files.pythonhosted.org/packages/1c/92/d68895a984a5ebbbfb175512b0c0aad872354a4a2484fbd5552e9f275316/wrapt-2.0.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:cbeb0971e13b4bd81d34169ed57a6dda017328d1a22b62fda45e1d21dd06148f", size = 61211, upload-time = "2025-11-07T00:44:05.626Z" }, + { url = "https://files.pythonhosted.org/packages/e8/26/ba83dc5ae7cf5aa2b02364a3d9cf74374b86169906a1f3ade9a2d03cf21c/wrapt-2.0.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:eb7cffe572ad0a141a7886a1d2efa5bef0bf7fe021deeea76b3ab334d2c38218", size = 61689, upload-time = "2025-11-07T00:44:06.719Z" }, + { url = "https://files.pythonhosted.org/packages/cf/67/d7a7c276d874e5d26738c22444d466a3a64ed541f6ef35f740dbd865bab4/wrapt-2.0.1-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:c8d60527d1ecfc131426b10d93ab5d53e08a09c5fa0175f6b21b3252080c70a9", size = 121502, upload-time = "2025-11-07T00:44:09.557Z" }, + { url = "https://files.pythonhosted.org/packages/0f/6b/806dbf6dd9579556aab22fc92908a876636e250f063f71548a8660382184/wrapt-2.0.1-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c654eafb01afac55246053d67a4b9a984a3567c3808bb7df2f8de1c1caba2e1c", size = 123110, upload-time = "2025-11-07T00:44:10.64Z" }, + { url = "https://files.pythonhosted.org/packages/e5/08/cdbb965fbe4c02c5233d185d070cabed2ecc1f1e47662854f95d77613f57/wrapt-2.0.1-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:98d873ed6c8b4ee2418f7afce666751854d6d03e3c0ec2a399bb039cd2ae89db", size = 117434, upload-time = "2025-11-07T00:44:08.138Z" }, + { url = "https://files.pythonhosted.org/packages/2d/d1/6aae2ce39db4cb5216302fa2e9577ad74424dfbe315bd6669725569e048c/wrapt-2.0.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:c9e850f5b7fc67af856ff054c71690d54fa940c3ef74209ad9f935b4f66a0233", size = 121533, upload-time = "2025-11-07T00:44:12.142Z" }, + { url = "https://files.pythonhosted.org/packages/79/35/565abf57559fbe0a9155c29879ff43ce8bd28d2ca61033a3a3dd67b70794/wrapt-2.0.1-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:e505629359cb5f751e16e30cf3f91a1d3ddb4552480c205947da415d597f7ac2", size = 116324, upload-time = "2025-11-07T00:44:13.28Z" }, + { url = "https://files.pythonhosted.org/packages/e1/e0/53ff5e76587822ee33e560ad55876d858e384158272cd9947abdd4ad42ca/wrapt-2.0.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:2879af909312d0baf35f08edeea918ee3af7ab57c37fe47cb6a373c9f2749c7b", size = 120627, upload-time = "2025-11-07T00:44:14.431Z" }, + { url = "https://files.pythonhosted.org/packages/7c/7b/38df30fd629fbd7612c407643c63e80e1c60bcc982e30ceeae163a9800e7/wrapt-2.0.1-cp313-cp313-win32.whl", hash = "sha256:d67956c676be5a24102c7407a71f4126d30de2a569a1c7871c9f3cabc94225d7", size = 58252, upload-time = "2025-11-07T00:44:17.814Z" }, + { url = "https://files.pythonhosted.org/packages/85/64/d3954e836ea67c4d3ad5285e5c8fd9d362fd0a189a2db622df457b0f4f6a/wrapt-2.0.1-cp313-cp313-win_amd64.whl", hash = "sha256:9ca66b38dd642bf90c59b6738af8070747b610115a39af2498535f62b5cdc1c3", size = 60500, upload-time = "2025-11-07T00:44:15.561Z" }, + { url = "https://files.pythonhosted.org/packages/89/4e/3c8b99ac93527cfab7f116089db120fef16aac96e5f6cdb724ddf286086d/wrapt-2.0.1-cp313-cp313-win_arm64.whl", hash = "sha256:5a4939eae35db6b6cec8e7aa0e833dcca0acad8231672c26c2a9ab7a0f8ac9c8", size = 58993, upload-time = "2025-11-07T00:44:16.65Z" }, + { url = "https://files.pythonhosted.org/packages/f9/f4/eff2b7d711cae20d220780b9300faa05558660afb93f2ff5db61fe725b9a/wrapt-2.0.1-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:a52f93d95c8d38fed0669da2ebdb0b0376e895d84596a976c15a9eb45e3eccb3", size = 82028, upload-time = "2025-11-07T00:44:18.944Z" }, + { url = "https://files.pythonhosted.org/packages/0c/67/cb945563f66fd0f61a999339460d950f4735c69f18f0a87ca586319b1778/wrapt-2.0.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:4e54bbf554ee29fcceee24fa41c4d091398b911da6e7f5d7bffda963c9aed2e1", size = 62949, upload-time = "2025-11-07T00:44:20.074Z" }, + { url = "https://files.pythonhosted.org/packages/ec/ca/f63e177f0bbe1e5cf5e8d9b74a286537cd709724384ff20860f8f6065904/wrapt-2.0.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:908f8c6c71557f4deaa280f55d0728c3bca0960e8c3dd5ceeeafb3c19942719d", size = 63681, upload-time = "2025-11-07T00:44:21.345Z" }, + { url = "https://files.pythonhosted.org/packages/39/a1/1b88fcd21fd835dca48b556daef750952e917a2794fa20c025489e2e1f0f/wrapt-2.0.1-cp313-cp313t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:e2f84e9af2060e3904a32cea9bb6db23ce3f91cfd90c6b426757cf7cc01c45c7", size = 152696, upload-time = "2025-11-07T00:44:24.318Z" }, + { url = "https://files.pythonhosted.org/packages/62/1c/d9185500c1960d9f5f77b9c0b890b7fc62282b53af7ad1b6bd779157f714/wrapt-2.0.1-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e3612dc06b436968dfb9142c62e5dfa9eb5924f91120b3c8ff501ad878f90eb3", size = 158859, upload-time = "2025-11-07T00:44:25.494Z" }, + { url = "https://files.pythonhosted.org/packages/91/60/5d796ed0f481ec003220c7878a1d6894652efe089853a208ea0838c13086/wrapt-2.0.1-cp313-cp313t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:6d2d947d266d99a1477cd005b23cbd09465276e302515e122df56bb9511aca1b", size = 146068, upload-time = "2025-11-07T00:44:22.81Z" }, + { url = "https://files.pythonhosted.org/packages/04/f8/75282dd72f102ddbfba137e1e15ecba47b40acff32c08ae97edbf53f469e/wrapt-2.0.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:7d539241e87b650cbc4c3ac9f32c8d1ac8a54e510f6dca3f6ab60dcfd48c9b10", size = 155724, upload-time = "2025-11-07T00:44:26.634Z" }, + { url = "https://files.pythonhosted.org/packages/5a/27/fe39c51d1b344caebb4a6a9372157bdb8d25b194b3561b52c8ffc40ac7d1/wrapt-2.0.1-cp313-cp313t-musllinux_1_2_riscv64.whl", hash = "sha256:4811e15d88ee62dbf5c77f2c3ff3932b1e3ac92323ba3912f51fc4016ce81ecf", size = 144413, upload-time = "2025-11-07T00:44:27.939Z" }, + { url = "https://files.pythonhosted.org/packages/83/2b/9f6b643fe39d4505c7bf926d7c2595b7cb4b607c8c6b500e56c6b36ac238/wrapt-2.0.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:c1c91405fcf1d501fa5d55df21e58ea49e6b879ae829f1039faaf7e5e509b41e", size = 150325, upload-time = "2025-11-07T00:44:29.29Z" }, + { url = "https://files.pythonhosted.org/packages/bb/b6/20ffcf2558596a7f58a2e69c89597128781f0b88e124bf5a4cadc05b8139/wrapt-2.0.1-cp313-cp313t-win32.whl", hash = "sha256:e76e3f91f864e89db8b8d2a8311d57df93f01ad6bb1e9b9976d1f2e83e18315c", size = 59943, upload-time = "2025-11-07T00:44:33.211Z" }, + { url = "https://files.pythonhosted.org/packages/87/6a/0e56111cbb3320151eed5d3821ee1373be13e05b376ea0870711f18810c3/wrapt-2.0.1-cp313-cp313t-win_amd64.whl", hash = "sha256:83ce30937f0ba0d28818807b303a412440c4b63e39d3d8fc036a94764b728c92", size = 63240, upload-time = "2025-11-07T00:44:30.935Z" }, + { url = "https://files.pythonhosted.org/packages/1d/54/5ab4c53ea1f7f7e5c3e7c1095db92932cc32fd62359d285486d00c2884c3/wrapt-2.0.1-cp313-cp313t-win_arm64.whl", hash = "sha256:4b55cacc57e1dc2d0991dbe74c6419ffd415fb66474a02335cb10efd1aa3f84f", size = 60416, upload-time = "2025-11-07T00:44:32.002Z" }, + { url = "https://files.pythonhosted.org/packages/15/d1/b51471c11592ff9c012bd3e2f7334a6ff2f42a7aed2caffcf0bdddc9cb89/wrapt-2.0.1-py3-none-any.whl", hash = "sha256:4d2ce1bf1a48c5277d7969259232b57645aae5686dba1eaeade39442277afbca", size = 44046, upload-time = "2025-11-07T00:45:32.116Z" }, +] + +[[package]] +name = "yarl" +version = "1.22.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "idna" }, + { name = "multidict" }, + { name = "propcache" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/57/63/0c6ebca57330cd313f6102b16dd57ffaf3ec4c83403dcb45dbd15c6f3ea1/yarl-1.22.0.tar.gz", hash = "sha256:bebf8557577d4401ba8bd9ff33906f1376c877aa78d1fe216ad01b4d6745af71", size = 187169, upload-time = "2025-10-06T14:12:55.963Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/75/ff/46736024fee3429b80a165a732e38e5d5a238721e634ab41b040d49f8738/yarl-1.22.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:e340382d1afa5d32b892b3ff062436d592ec3d692aeea3bef3a5cfe11bbf8c6f", size = 142000, upload-time = "2025-10-06T14:09:44.631Z" }, + { url = "https://files.pythonhosted.org/packages/5a/9a/b312ed670df903145598914770eb12de1bac44599549b3360acc96878df8/yarl-1.22.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:f1e09112a2c31ffe8d80be1b0988fa6a18c5d5cad92a9ffbb1c04c91bfe52ad2", size = 94338, upload-time = "2025-10-06T14:09:46.372Z" }, + { url = "https://files.pythonhosted.org/packages/ba/f5/0601483296f09c3c65e303d60c070a5c19fcdbc72daa061e96170785bc7d/yarl-1.22.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:939fe60db294c786f6b7c2d2e121576628468f65453d86b0fe36cb52f987bd74", size = 94909, upload-time = "2025-10-06T14:09:48.648Z" }, + { url = "https://files.pythonhosted.org/packages/60/41/9a1fe0b73dbcefce72e46cf149b0e0a67612d60bfc90fb59c2b2efdfbd86/yarl-1.22.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e1651bf8e0398574646744c1885a41198eba53dc8a9312b954073f845c90a8df", size = 372940, upload-time = "2025-10-06T14:09:50.089Z" }, + { url = "https://files.pythonhosted.org/packages/17/7a/795cb6dfee561961c30b800f0ed616b923a2ec6258b5def2a00bf8231334/yarl-1.22.0-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:b8a0588521a26bf92a57a1705b77b8b59044cdceccac7151bd8d229e66b8dedb", size = 345825, upload-time = "2025-10-06T14:09:52.142Z" }, + { url = "https://files.pythonhosted.org/packages/d7/93/a58f4d596d2be2ae7bab1a5846c4d270b894958845753b2c606d666744d3/yarl-1.22.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:42188e6a615c1a75bcaa6e150c3fe8f3e8680471a6b10150c5f7e83f47cc34d2", size = 386705, upload-time = "2025-10-06T14:09:54.128Z" }, + { url = "https://files.pythonhosted.org/packages/61/92/682279d0e099d0e14d7fd2e176bd04f48de1484f56546a3e1313cd6c8e7c/yarl-1.22.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:f6d2cb59377d99718913ad9a151030d6f83ef420a2b8f521d94609ecc106ee82", size = 396518, upload-time = "2025-10-06T14:09:55.762Z" }, + { url = "https://files.pythonhosted.org/packages/db/0f/0d52c98b8a885aeda831224b78f3be7ec2e1aa4a62091f9f9188c3c65b56/yarl-1.22.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:50678a3b71c751d58d7908edc96d332af328839eea883bb554a43f539101277a", size = 377267, upload-time = "2025-10-06T14:09:57.958Z" }, + { url = "https://files.pythonhosted.org/packages/22/42/d2685e35908cbeaa6532c1fc73e89e7f2efb5d8a7df3959ea8e37177c5a3/yarl-1.22.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:1e8fbaa7cec507aa24ea27a01456e8dd4b6fab829059b69844bd348f2d467124", size = 365797, upload-time = "2025-10-06T14:09:59.527Z" }, + { url = "https://files.pythonhosted.org/packages/a2/83/cf8c7bcc6355631762f7d8bdab920ad09b82efa6b722999dfb05afa6cfac/yarl-1.22.0-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:433885ab5431bc3d3d4f2f9bd15bfa1614c522b0f1405d62c4f926ccd69d04fa", size = 365535, upload-time = "2025-10-06T14:10:01.139Z" }, + { url = "https://files.pythonhosted.org/packages/25/e1/5302ff9b28f0c59cac913b91fe3f16c59a033887e57ce9ca5d41a3a94737/yarl-1.22.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:b790b39c7e9a4192dc2e201a282109ed2985a1ddbd5ac08dc56d0e121400a8f7", size = 382324, upload-time = "2025-10-06T14:10:02.756Z" }, + { url = "https://files.pythonhosted.org/packages/bf/cd/4617eb60f032f19ae3a688dc990d8f0d89ee0ea378b61cac81ede3e52fae/yarl-1.22.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:31f0b53913220599446872d757257be5898019c85e7971599065bc55065dc99d", size = 383803, upload-time = "2025-10-06T14:10:04.552Z" }, + { url = "https://files.pythonhosted.org/packages/59/65/afc6e62bb506a319ea67b694551dab4a7e6fb7bf604e9bd9f3e11d575fec/yarl-1.22.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:a49370e8f711daec68d09b821a34e1167792ee2d24d405cbc2387be4f158b520", size = 374220, upload-time = "2025-10-06T14:10:06.489Z" }, + { url = "https://files.pythonhosted.org/packages/e7/3d/68bf18d50dc674b942daec86a9ba922d3113d8399b0e52b9897530442da2/yarl-1.22.0-cp312-cp312-win32.whl", hash = "sha256:70dfd4f241c04bd9239d53b17f11e6ab672b9f1420364af63e8531198e3f5fe8", size = 81589, upload-time = "2025-10-06T14:10:09.254Z" }, + { url = "https://files.pythonhosted.org/packages/c8/9a/6ad1a9b37c2f72874f93e691b2e7ecb6137fb2b899983125db4204e47575/yarl-1.22.0-cp312-cp312-win_amd64.whl", hash = "sha256:8884d8b332a5e9b88e23f60bb166890009429391864c685e17bd73a9eda9105c", size = 87213, upload-time = "2025-10-06T14:10:11.369Z" }, + { url = "https://files.pythonhosted.org/packages/44/c5/c21b562d1680a77634d748e30c653c3ca918beb35555cff24986fff54598/yarl-1.22.0-cp312-cp312-win_arm64.whl", hash = "sha256:ea70f61a47f3cc93bdf8b2f368ed359ef02a01ca6393916bc8ff877427181e74", size = 81330, upload-time = "2025-10-06T14:10:13.112Z" }, + { url = "https://files.pythonhosted.org/packages/ea/f3/d67de7260456ee105dc1d162d43a019ecad6b91e2f51809d6cddaa56690e/yarl-1.22.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:8dee9c25c74997f6a750cd317b8ca63545169c098faee42c84aa5e506c819b53", size = 139980, upload-time = "2025-10-06T14:10:14.601Z" }, + { url = "https://files.pythonhosted.org/packages/01/88/04d98af0b47e0ef42597b9b28863b9060bb515524da0a65d5f4db160b2d5/yarl-1.22.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:01e73b85a5434f89fc4fe27dcda2aff08ddf35e4d47bbbea3bdcd25321af538a", size = 93424, upload-time = "2025-10-06T14:10:16.115Z" }, + { url = "https://files.pythonhosted.org/packages/18/91/3274b215fd8442a03975ce6bee5fe6aa57a8326b29b9d3d56234a1dca244/yarl-1.22.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:22965c2af250d20c873cdbee8ff958fb809940aeb2e74ba5f20aaf6b7ac8c70c", size = 93821, upload-time = "2025-10-06T14:10:17.993Z" }, + { url = "https://files.pythonhosted.org/packages/61/3a/caf4e25036db0f2da4ca22a353dfeb3c9d3c95d2761ebe9b14df8fc16eb0/yarl-1.22.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b4f15793aa49793ec8d1c708ab7f9eded1aa72edc5174cae703651555ed1b601", size = 373243, upload-time = "2025-10-06T14:10:19.44Z" }, + { url = "https://files.pythonhosted.org/packages/6e/9e/51a77ac7516e8e7803b06e01f74e78649c24ee1021eca3d6a739cb6ea49c/yarl-1.22.0-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:e5542339dcf2747135c5c85f68680353d5cb9ffd741c0f2e8d832d054d41f35a", size = 342361, upload-time = "2025-10-06T14:10:21.124Z" }, + { url = "https://files.pythonhosted.org/packages/d4/f8/33b92454789dde8407f156c00303e9a891f1f51a0330b0fad7c909f87692/yarl-1.22.0-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:5c401e05ad47a75869c3ab3e35137f8468b846770587e70d71e11de797d113df", size = 387036, upload-time = "2025-10-06T14:10:22.902Z" }, + { url = "https://files.pythonhosted.org/packages/d9/9a/c5db84ea024f76838220280f732970aa4ee154015d7f5c1bfb60a267af6f/yarl-1.22.0-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:243dda95d901c733f5b59214d28b0120893d91777cb8aa043e6ef059d3cddfe2", size = 397671, upload-time = "2025-10-06T14:10:24.523Z" }, + { url = "https://files.pythonhosted.org/packages/11/c9/cd8538dc2e7727095e0c1d867bad1e40c98f37763e6d995c1939f5fdc7b1/yarl-1.22.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bec03d0d388060058f5d291a813f21c011041938a441c593374da6077fe21b1b", size = 377059, upload-time = "2025-10-06T14:10:26.406Z" }, + { url = "https://files.pythonhosted.org/packages/a1/b9/ab437b261702ced75122ed78a876a6dec0a1b0f5e17a4ac7a9a2482d8abe/yarl-1.22.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:b0748275abb8c1e1e09301ee3cf90c8a99678a4e92e4373705f2a2570d581273", size = 365356, upload-time = "2025-10-06T14:10:28.461Z" }, + { url = "https://files.pythonhosted.org/packages/b2/9d/8e1ae6d1d008a9567877b08f0ce4077a29974c04c062dabdb923ed98e6fe/yarl-1.22.0-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:47fdb18187e2a4e18fda2c25c05d8251a9e4a521edaed757fef033e7d8498d9a", size = 361331, upload-time = "2025-10-06T14:10:30.541Z" }, + { url = "https://files.pythonhosted.org/packages/ca/5a/09b7be3905962f145b73beb468cdd53db8aa171cf18c80400a54c5b82846/yarl-1.22.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:c7044802eec4524fde550afc28edda0dd5784c4c45f0be151a2d3ba017daca7d", size = 382590, upload-time = "2025-10-06T14:10:33.352Z" }, + { url = "https://files.pythonhosted.org/packages/aa/7f/59ec509abf90eda5048b0bc3e2d7b5099dffdb3e6b127019895ab9d5ef44/yarl-1.22.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:139718f35149ff544caba20fce6e8a2f71f1e39b92c700d8438a0b1d2a631a02", size = 385316, upload-time = "2025-10-06T14:10:35.034Z" }, + { url = "https://files.pythonhosted.org/packages/e5/84/891158426bc8036bfdfd862fabd0e0fa25df4176ec793e447f4b85cf1be4/yarl-1.22.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:e1b51bebd221006d3d2f95fbe124b22b247136647ae5dcc8c7acafba66e5ee67", size = 374431, upload-time = "2025-10-06T14:10:37.76Z" }, + { url = "https://files.pythonhosted.org/packages/bb/49/03da1580665baa8bef5e8ed34c6df2c2aca0a2f28bf397ed238cc1bbc6f2/yarl-1.22.0-cp313-cp313-win32.whl", hash = "sha256:d3e32536234a95f513bd374e93d717cf6b2231a791758de6c509e3653f234c95", size = 81555, upload-time = "2025-10-06T14:10:39.649Z" }, + { url = "https://files.pythonhosted.org/packages/9a/ee/450914ae11b419eadd067c6183ae08381cfdfcb9798b90b2b713bbebddda/yarl-1.22.0-cp313-cp313-win_amd64.whl", hash = "sha256:47743b82b76d89a1d20b83e60d5c20314cbd5ba2befc9cda8f28300c4a08ed4d", size = 86965, upload-time = "2025-10-06T14:10:41.313Z" }, + { url = "https://files.pythonhosted.org/packages/98/4d/264a01eae03b6cf629ad69bae94e3b0e5344741e929073678e84bf7a3e3b/yarl-1.22.0-cp313-cp313-win_arm64.whl", hash = "sha256:5d0fcda9608875f7d052eff120c7a5da474a6796fe4d83e152e0e4d42f6d1a9b", size = 81205, upload-time = "2025-10-06T14:10:43.167Z" }, + { url = "https://files.pythonhosted.org/packages/88/fc/6908f062a2f77b5f9f6d69cecb1747260831ff206adcbc5b510aff88df91/yarl-1.22.0-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:719ae08b6972befcba4310e49edb1161a88cdd331e3a694b84466bd938a6ab10", size = 146209, upload-time = "2025-10-06T14:10:44.643Z" }, + { url = "https://files.pythonhosted.org/packages/65/47/76594ae8eab26210b4867be6f49129861ad33da1f1ebdf7051e98492bf62/yarl-1.22.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:47d8a5c446df1c4db9d21b49619ffdba90e77c89ec6e283f453856c74b50b9e3", size = 95966, upload-time = "2025-10-06T14:10:46.554Z" }, + { url = "https://files.pythonhosted.org/packages/ab/ce/05e9828a49271ba6b5b038b15b3934e996980dd78abdfeb52a04cfb9467e/yarl-1.22.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:cfebc0ac8333520d2d0423cbbe43ae43c8838862ddb898f5ca68565e395516e9", size = 97312, upload-time = "2025-10-06T14:10:48.007Z" }, + { url = "https://files.pythonhosted.org/packages/d1/c5/7dffad5e4f2265b29c9d7ec869c369e4223166e4f9206fc2243ee9eea727/yarl-1.22.0-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4398557cbf484207df000309235979c79c4356518fd5c99158c7d38203c4da4f", size = 361967, upload-time = "2025-10-06T14:10:49.997Z" }, + { url = "https://files.pythonhosted.org/packages/50/b2/375b933c93a54bff7fc041e1a6ad2c0f6f733ffb0c6e642ce56ee3b39970/yarl-1.22.0-cp313-cp313t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:2ca6fd72a8cd803be290d42f2dec5cdcd5299eeb93c2d929bf060ad9efaf5de0", size = 323949, upload-time = "2025-10-06T14:10:52.004Z" }, + { url = "https://files.pythonhosted.org/packages/66/50/bfc2a29a1d78644c5a7220ce2f304f38248dc94124a326794e677634b6cf/yarl-1.22.0-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:ca1f59c4e1ab6e72f0a23c13fca5430f889634166be85dbf1013683e49e3278e", size = 361818, upload-time = "2025-10-06T14:10:54.078Z" }, + { url = "https://files.pythonhosted.org/packages/46/96/f3941a46af7d5d0f0498f86d71275696800ddcdd20426298e572b19b91ff/yarl-1.22.0-cp313-cp313t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:6c5010a52015e7c70f86eb967db0f37f3c8bd503a695a49f8d45700144667708", size = 372626, upload-time = "2025-10-06T14:10:55.767Z" }, + { url = "https://files.pythonhosted.org/packages/c1/42/8b27c83bb875cd89448e42cd627e0fb971fa1675c9ec546393d18826cb50/yarl-1.22.0-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9d7672ecf7557476642c88497c2f8d8542f8e36596e928e9bcba0e42e1e7d71f", size = 341129, upload-time = "2025-10-06T14:10:57.985Z" }, + { url = "https://files.pythonhosted.org/packages/49/36/99ca3122201b382a3cf7cc937b95235b0ac944f7e9f2d5331d50821ed352/yarl-1.22.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:3b7c88eeef021579d600e50363e0b6ee4f7f6f728cd3486b9d0f3ee7b946398d", size = 346776, upload-time = "2025-10-06T14:10:59.633Z" }, + { url = "https://files.pythonhosted.org/packages/85/b4/47328bf996acd01a4c16ef9dcd2f59c969f495073616586f78cd5f2efb99/yarl-1.22.0-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:f4afb5c34f2c6fecdcc182dfcfc6af6cccf1aa923eed4d6a12e9d96904e1a0d8", size = 334879, upload-time = "2025-10-06T14:11:01.454Z" }, + { url = "https://files.pythonhosted.org/packages/c2/ad/b77d7b3f14a4283bffb8e92c6026496f6de49751c2f97d4352242bba3990/yarl-1.22.0-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:59c189e3e99a59cf8d83cbb31d4db02d66cda5a1a4374e8a012b51255341abf5", size = 350996, upload-time = "2025-10-06T14:11:03.452Z" }, + { url = "https://files.pythonhosted.org/packages/81/c8/06e1d69295792ba54d556f06686cbd6a7ce39c22307100e3fb4a2c0b0a1d/yarl-1.22.0-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:5a3bf7f62a289fa90f1990422dc8dff5a458469ea71d1624585ec3a4c8d6960f", size = 356047, upload-time = "2025-10-06T14:11:05.115Z" }, + { url = "https://files.pythonhosted.org/packages/4b/b8/4c0e9e9f597074b208d18cef227d83aac36184bfbc6eab204ea55783dbc5/yarl-1.22.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:de6b9a04c606978fdfe72666fa216ffcf2d1a9f6a381058d4378f8d7b1e5de62", size = 342947, upload-time = "2025-10-06T14:11:08.137Z" }, + { url = "https://files.pythonhosted.org/packages/e0/e5/11f140a58bf4c6ad7aca69a892bff0ee638c31bea4206748fc0df4ebcb3a/yarl-1.22.0-cp313-cp313t-win32.whl", hash = "sha256:1834bb90991cc2999f10f97f5f01317f99b143284766d197e43cd5b45eb18d03", size = 86943, upload-time = "2025-10-06T14:11:10.284Z" }, + { url = "https://files.pythonhosted.org/packages/31/74/8b74bae38ed7fe6793d0c15a0c8207bbb819cf287788459e5ed230996cdd/yarl-1.22.0-cp313-cp313t-win_amd64.whl", hash = "sha256:ff86011bd159a9d2dfc89c34cfd8aff12875980e3bd6a39ff097887520e60249", size = 93715, upload-time = "2025-10-06T14:11:11.739Z" }, + { url = "https://files.pythonhosted.org/packages/69/66/991858aa4b5892d57aef7ee1ba6b4d01ec3b7eb3060795d34090a3ca3278/yarl-1.22.0-cp313-cp313t-win_arm64.whl", hash = "sha256:7861058d0582b847bc4e3a4a4c46828a410bca738673f35a29ba3ca5db0b473b", size = 83857, upload-time = "2025-10-06T14:11:13.586Z" }, + { url = "https://files.pythonhosted.org/packages/73/ae/b48f95715333080afb75a4504487cbe142cae1268afc482d06692d605ae6/yarl-1.22.0-py3-none-any.whl", hash = "sha256:1380560bdba02b6b6c90de54133c81c9f2a453dee9912fe58c1dcced1edb7cff", size = 46814, upload-time = "2025-10-06T14:12:53.872Z" }, +] + +[[package]] +name = "zipp" +version = "3.23.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/e3/02/0f2892c661036d50ede074e376733dca2ae7c6eb617489437771209d4180/zipp-3.23.0.tar.gz", hash = "sha256:a07157588a12518c9d4034df3fbbee09c814741a33ff63c05fa29d26a2404166", size = 25547, upload-time = "2025-06-08T17:06:39.4Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/2e/54/647ade08bf0db230bfea292f893923872fd20be6ac6f53b2b936ba839d75/zipp-3.23.0-py3-none-any.whl", hash = "sha256:071652d6115ed432f5ce1d34c336c0adfd6a884660d1e9712a256d3d3bd4b14e", size = 10276, upload-time = "2025-06-08T17:06:38.034Z" }, +] From 2094f22f1941264fce6f8f055159ef005f88ba8d Mon Sep 17 00:00:00 2001 From: Jack Feser Date: Wed, 31 Dec 2025 10:49:10 -0500 Subject: [PATCH 28/39] Remove program synthesis code (#475) * remove program synthesis code * lint * mark xfail --- docs/source/llm.ipynb | 47 ---------------- effectful/handlers/llm/synthesis.py | 86 +---------------------------- tests/test_handlers_llm.py | 1 + tests/test_handlers_llm_provider.py | 1 + 4 files changed, 5 insertions(+), 130 deletions(-) diff --git a/docs/source/llm.ipynb b/docs/source/llm.ipynb index 64f342b0..433f9dc8 100644 --- a/docs/source/llm.ipynb +++ b/docs/source/llm.ipynb @@ -9,10 +9,8 @@ "source": [ "import dataclasses\n", "import functools\n", - "import inspect\n", "import logging\n", "import sys\n", - "from collections.abc import Callable\n", "\n", "import pydantic\n", "from pydantic import ValidationError, field_validator\n", @@ -26,7 +24,6 @@ " RetryLLMHandler,\n", " completion,\n", ")\n", - "from effectful.handlers.llm.synthesis import ProgramSynthesis\n", "from effectful.ops.semantics import NotHandled, fwd, handler\n", "\n", "provider = LiteLLMProvider()" @@ -217,50 +214,6 @@ " assert type(primes(6)) is int" ] }, - { - "cell_type": "markdown", - "id": "36d78a71", - "metadata": {}, - "source": [ - "More complex types can be converted by providing handlers for `decode`. `ProgramSynthesisIntp` provides a `decode` handler that parses Python callables." - ] - }, - { - "cell_type": "code", - "execution_count": 38, - "id": "c83bbdc0", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "def count_a(s: str) -> int:\n", - " count = 0\n", - " for character in s:\n", - " if character == 'a':\n", - " count += 1\n", - " return count\n", - "\n" - ] - } - ], - "source": [ - "@Template.define\n", - "def count_char(char: str) -> Callable[[str], int]:\n", - " \"\"\"Write a function which takes a string and counts the occurrances of '{char}'. Do not use any tools.\"\"\"\n", - " raise NotHandled\n", - "\n", - "\n", - "with handler(provider), handler(ProgramSynthesis()):\n", - " count_a = count_char(\"a\")\n", - " assert callable(count_a)\n", - " assert count_a(\"banana\") == 3\n", - " assert count_a(\"cherry\") == 0\n", - " # Print the source code of the generated function\n", - " print(inspect.getsource(count_a))" - ] - }, { "cell_type": "markdown", "id": "991ee445", diff --git a/effectful/handlers/llm/synthesis.py b/effectful/handlers/llm/synthesis.py index 6624b72a..3db32fd7 100644 --- a/effectful/handlers/llm/synthesis.py +++ b/effectful/handlers/llm/synthesis.py @@ -1,13 +1,4 @@ -import ast -import collections.abc -import linecache -import re -import textwrap -import typing - -from effectful.handlers.llm import Template -from effectful.ops.semantics import fwd -from effectful.ops.syntax import ObjectInterpretation, implements +from effectful.ops.syntax import ObjectInterpretation class SynthesisError(Exception): @@ -19,76 +10,5 @@ def __init__(self, message, code=None): class ProgramSynthesis(ObjectInterpretation): - """Provides a `template` handler to instruct the LLM to generate code of the - right form and with the right type. - - """ - - def _parse_and_eval[T](self, t: type[T], content: str) -> T: - pattern = r"(.*?)" - code_content = re.search(pattern, content, re.DOTALL) - if code_content is None: - raise SynthesisError(" tags not found", content) - code = code_content.group(1) - - try: - module_ast = ast.parse(code) - except SyntaxError as exc: - raise SynthesisError("failed to parse", content) from exc - - if not isinstance(module_ast, ast.Module): - raise SynthesisError("not a module", content) - - last_decl = module_ast.body[-1] - if not isinstance(last_decl, ast.FunctionDef): - raise SynthesisError("last definition not a function", content) - - source_code = textwrap.dedent(code) - lines = code.splitlines(keepends=True) - filename = f"" - - # register into linecache - linecache.cache[filename] = (len(source_code), None, lines, filename) - - # TODO: support injecting lexical context for synthesized code - gs: dict = {} - try: - code_obj = compile(source_code, filename, "exec") - exec(code_obj, gs) - except Exception as exc: - raise SynthesisError("evaluation failed", content) from exc - - return gs[last_decl.name] - - @implements(Template.__apply__) - def _apply[**P, T](self, template: Template[P, T], *args, **kwargs) -> T: - ret_type = template.__signature__.return_annotation - origin = typing.get_origin(ret_type) - ret_type = ret_type if origin is None else origin - - if not (issubclass(ret_type, collections.abc.Callable)): # type: ignore[arg-type] - return fwd() - - prompt_ext = textwrap.dedent(f""" - Generate a Python function satisfying the following specification and type signature. - - {template.__prompt_template__} - {str(ret_type)} - - - 1. Produce one block of Python code. - 2. Do not include usage examples. - 3. Return your response in tags. - 4. Do not return your response in markdown blocks. - 5. Your output function def must be the final statement in the code block. - - """).strip() - - fresh_template: Template[P, str] = Template.replace( - template, - prompt_template=prompt_ext, - signature=template.__signature__.replace(return_annotation=str), - ) - response = fresh_template(*args, **kwargs) - functional = self._parse_and_eval(ret_type, response) - return functional + def __init__(self, *args, **kwargs): + raise NotImplementedError diff --git a/tests/test_handlers_llm.py b/tests/test_handlers_llm.py index ad1c9b9c..6a87f15a 100644 --- a/tests/test_handlers_llm.py +++ b/tests/test_handlers_llm.py @@ -118,6 +118,7 @@ def test_primes_decode_int(): assert isinstance(result, int) +@pytest.mark.xfail(reason="Synthesis handler not yet implemented") def test_count_char_with_program_synthesis(): """Test the count_char template with program synthesis.""" mock_code = """ diff --git a/tests/test_handlers_llm_provider.py b/tests/test_handlers_llm_provider.py index 7c460905..e263698a 100644 --- a/tests/test_handlers_llm_provider.py +++ b/tests/test_handlers_llm_provider.py @@ -275,6 +275,7 @@ def test_custom_logger(self, request, caplog): ) +@pytest.mark.xfail(reason="Program synthesis not implemented") class TestProgramSynthesis: """Tests for ProgramSynthesis handler functionality.""" From 05b28ef61b1bce213a903d53a3f41b4155d53c86 Mon Sep 17 00:00:00 2001 From: Kiran Gopinathan <23038502+kiranandcode@users.noreply.github.com> Date: Wed, 31 Dec 2025 15:59:56 +0000 Subject: [PATCH 29/39] Disables direct recursion on templates by default (#466) * fixed failing test * made templates non-recursive by default + annotation for recursion * added docstring for IsRecursive * call IsRecursive.infer_annotations in tool's init * added check for sig.parameters --- effectful/handlers/llm/template.py | 59 +++++++++++++++++++++++++++-- tests/test_handlers_llm.py | 12 +++--- tests/test_handlers_llm_provider.py | 12 +++--- 3 files changed, 69 insertions(+), 14 deletions(-) diff --git a/effectful/handlers/llm/template.py b/effectful/handlers/llm/template.py index 9c999ce2..9a692b6b 100644 --- a/effectful/handlers/llm/template.py +++ b/effectful/handlers/llm/template.py @@ -4,9 +4,59 @@ from collections import ChainMap from collections.abc import Callable, Mapping, MutableMapping from dataclasses import dataclass -from typing import Any +from typing import Annotated, Any -from effectful.ops.types import INSTANCE_OP_PREFIX, NotHandled, Operation +from effectful.ops.types import INSTANCE_OP_PREFIX, Annotation, NotHandled, Operation + + +class _IsRecursiveAnnotation(Annotation): + """ + A special type annotation for return types in the signature of a + :class:`Template` that indicates it may make recursive calls. + + .. warning:: + + :class:`IsRecursive` annotations are only defined to ascribe + return annotations, and if used in a parameter will raise a + :class:`TypeError` at tool construction time. + + + + **Example usage**: + + We illustrate the use of :class:`IsRecursive` below: + + >>> from typing import Annotated + >>> from effectful.handlers.llm import Template + >>> from effectful.handlers.llm.template import IsRecursive + + >>> + @Template.define + def factorial(n: int) -> Annotated[int, IsRecursive]: + \"""Compute the n factorial for n={n}. Can call itself (`factorial`) recursively, but must be on smaller arguments.\""" + raise NotHandled + """ + + @classmethod + def infer_annotations(cls, sig: inspect.Signature) -> inspect.Signature: + for name, ty in sig.parameters.items(): + if not ty or not typing.get_origin(ty) is Annotated: + continue + if any(isinstance(arg, cls) for arg in typing.get_args(ty)): + raise TypeError( + f"Illegal annotation {ty} for parameter {name}, IsRecursive must only be used to annotate return types." + ) + return sig + + +IsRecursive = _IsRecursiveAnnotation() + + +def _is_recursive_signature(sig: inspect.Signature): + if typing.get_origin(sig.return_annotation) is not Annotated: + return False + annotations = typing.get_args(sig.return_annotation) + return any(annotation is IsRecursive for annotation in annotations) class Tool[**P, T](Operation[P, T]): @@ -15,7 +65,7 @@ def __init__( ): if not default.__doc__: raise ValueError("Tools must have docstrings.") - + signature = IsRecursive.infer_annotations(signature) super().__init__(signature, name, default) @classmethod @@ -40,8 +90,11 @@ def __prompt_template__(self) -> str: def tools(self) -> Mapping[str, Tool]: """Operations and Templates available as tools. Auto-capture from lexical context.""" result = {} + is_recursive = _is_recursive_signature(self.__signature__) for name, obj in self.__context__.items(): + if obj is self and not is_recursive: + continue # Collect tools in context if isinstance(obj, Tool): result[name] = obj diff --git a/tests/test_handlers_llm.py b/tests/test_handlers_llm.py index 6a87f15a..2fd0fd42 100644 --- a/tests/test_handlers_llm.py +++ b/tests/test_handlers_llm.py @@ -1,10 +1,12 @@ from collections.abc import Callable +from typing import Annotated import pytest from effectful.handlers.llm import Template from effectful.handlers.llm.providers import RetryLLMHandler from effectful.handlers.llm.synthesis import ProgramSynthesis +from effectful.handlers.llm.template import IsRecursive from effectful.ops.semantics import NotHandled, handler from effectful.ops.syntax import ObjectInterpretation, implements @@ -83,13 +85,13 @@ def count_char(char: str) -> Callable[[str], int]: # Mutually recursive templates (module-level for live globals) @Template.define -def mutual_a() -> str: +def mutual_a() -> Annotated[str, IsRecursive]: """Use mutual_a and mutual_b as tools to do task A.""" raise NotHandled @Template.define -def mutual_b() -> str: +def mutual_b() -> Annotated[str, IsRecursive]: """Use mutual_a and mutual_b as tools to do task B.""" raise NotHandled @@ -253,12 +255,12 @@ def test_template_captures_other_templates_in_lexical_context(): # Define sub-templates first @Template.define def story_with_moral(topic: str) -> str: - """Write a story about {topic} with a moral lesson. Do not use any tools at all for this.""" + """Write a story about {topic} with a moral lesson.""" raise NotHandled @Template.define def story_funny(topic: str) -> str: - """Write a funny story about {topic}. Do not use any tools at all for this.""" + """Write a funny story about {topic}.""" raise NotHandled # Main orchestrator template has access to sub-templates @@ -284,7 +286,7 @@ def test_template_composition_with_chained_calls(): @Template.define def generate_topic() -> str: - """Generate an interesting topic for a story. Do not try to use any tools for this beside from write_story.""" + """Generate an interesting topic for a story.""" raise NotHandled @Template.define diff --git a/tests/test_handlers_llm_provider.py b/tests/test_handlers_llm_provider.py index e263698a..4006913f 100644 --- a/tests/test_handlers_llm_provider.py +++ b/tests/test_handlers_llm_provider.py @@ -133,26 +133,25 @@ class MovieClassification: @Template.define def classify_genre(plot: str) -> MovieClassification: - """Classify the movie genre based on this plot: {plot}. Do not use any tools.""" + """Classify the movie genre based on this plot: {plot}.""" raise NotImplementedError @Template.define def simple_prompt(topic: str) -> str: - """Write a short sentence about {topic}. You MUST respond directly without using any tools.""" + """Write a short sentence about {topic}.""" raise NotImplementedError @Template.define def generate_number(max_value: int) -> int: - """Generate a random number between 1 and {max_value}. Return only the number. Do not use any tools.""" + """Generate a random number between 1 and {max_value}.""" raise NotImplementedError @Template.define def create_function(char: str) -> Callable[[str], int]: """Create a function that counts occurrences of the character '{char}' in a string. - Do not use any tools. Return as a code block with the last definition being the function. """ @@ -279,6 +278,7 @@ def test_custom_logger(self, request, caplog): class TestProgramSynthesis: """Tests for ProgramSynthesis handler functionality.""" + @pytest.mark.xfail @requires_openai @retry_on_error(error=SynthesisError, n=3) def test_generates_callable(self, request): @@ -318,7 +318,7 @@ def smiley_face() -> Image.Image: @Template.define def categorise_image(image: Image.Image) -> str: - """Return a description of the following image. Do not use any tools. + """Return a description of the following image. {image}""" raise NotHandled @@ -342,7 +342,7 @@ class BookReview(BaseModel): @Template.define def review_book(plot: str) -> BookReview: - """Review a book based on this plot: {plot}. Do not use any tools.""" + """Review a book based on this plot: {plot}.""" raise NotImplementedError From d91d4c956744f8a4a71b726f618329ecb92c8911 Mon Sep 17 00:00:00 2001 From: Jack Feser Date: Wed, 31 Dec 2025 17:07:15 -0500 Subject: [PATCH 30/39] drop k-ahead sampler (#479) --- effectful/handlers/llm/sampling.py | 47 ------------------------------ 1 file changed, 47 deletions(-) delete mode 100644 effectful/handlers/llm/sampling.py diff --git a/effectful/handlers/llm/sampling.py b/effectful/handlers/llm/sampling.py deleted file mode 100644 index 8497d88a..00000000 --- a/effectful/handlers/llm/sampling.py +++ /dev/null @@ -1,47 +0,0 @@ -from collections import Counter -from concurrent import futures -from concurrent.futures.thread import ThreadPoolExecutor - -from effectful.handlers.llm import Template -from effectful.internals.runtime import get_interpretation, interpreter -from effectful.ops.semantics import fwd -from effectful.ops.syntax import ObjectInterpretation, implements - - -class KAheadSampler[**P, T](ObjectInterpretation): - no_voters: int - k: int - """Number of votes ahead before an answer is accepted""" - votes: Counter[T] = Counter() - - def __init__(self, no_voters: int = 6, k: int = 3): - self.no_voters = no_voters - self.k = k - - @implements(Template.__apply__) - def __call__(self, *args: P.args, **kwargs: P.kwargs) -> T: - executor = ThreadPoolExecutor() - intp = get_interpretation() - tasks = [ - executor.submit(interpreter(intp)(fwd), *args, **kwargs) - for _ in range(self.no_voters) - ] - - def n_votes_ahead(): - match self.votes.most_common(2): - case [[_, v1], [_, v2]]: - return v1 >= v2 + self.k - case [[_, v1]]: - return v1 >= self.k - case _: - return False - - while not n_votes_ahead(): - done, remain = futures.wait(tasks, return_when=futures.FIRST_COMPLETED) - tasks = list(remain) - for fut in done: - res = fut.result() - self.votes[res] += 1 - tasks.append(executor.submit(interpreter(intp)(fwd), *args, **kwargs)) - executor.shutdown() - return self.votes.most_common(1)[0][0] From e3e8c7e06004d76b54caef4b64f097b60a503603 Mon Sep 17 00:00:00 2001 From: Jack Feser Date: Wed, 31 Dec 2025 21:20:12 -0500 Subject: [PATCH 31/39] Document `Template` and `Tool` (#478) * add documentation to template * Add dataclass import --------- Co-authored-by: eb8680 --- docs/source/effectful.rst | 21 +++++++ effectful/handlers/llm/template.py | 99 ++++++++++++++++++++++++++++-- 2 files changed, 116 insertions(+), 4 deletions(-) diff --git a/docs/source/effectful.rst b/docs/source/effectful.rst index 78e3e7e3..0817fd00 100644 --- a/docs/source/effectful.rst +++ b/docs/source/effectful.rst @@ -40,6 +40,27 @@ Handlers :undoc-members: +LLM +^^^ + +.. automodule:: effectful.handlers.llm + :members: + :undoc-members: + +Encoding +"""""""" + +.. automodule:: effectful.handlers.llm.encoding + :members: + :undoc-members: + +Providers +""""""""" + +.. automodule:: effectful.handlers.llm.providers + :members: + :undoc-members: + Jax ^^^ diff --git a/effectful/handlers/llm/template.py b/effectful/handlers/llm/template.py index 9a692b6b..6ae9ac32 100644 --- a/effectful/handlers/llm/template.py +++ b/effectful/handlers/llm/template.py @@ -60,6 +60,33 @@ def _is_recursive_signature(sig: inspect.Signature): class Tool[**P, T](Operation[P, T]): + """A :class:`Tool` is a function that may be called by a :class:`Template`. + + **Example usage:** + + Templates may call any tool that is in their lexical scope. + In the following example, the LLM suggests a vacation destination using the :code:`cities` and :code:`weather` tools.:: + + @Tool.define + def cities() -> list[str]: + \"\"\"Return a list of cities that can be passed to `weather`.\"\"\" + return ["Chicago", "New York", "Barcelona"] + + @Tool.define + def weather(city: str) -> str: + \"\"\"Given a city name, return a description of the weather in that city.\"\"\" + status = {"Chicago": "cold", "New York": "wet", "Barcelona": "sunny"} + return status.get(city, "unknown") + + @Template.define # cities and weather auto-captured from lexical scope + def vacation() -> str: + \"\"\"Use the `cities` and `weather` tools to suggest a city that has good weather.\"\"\" + raise NotHandled + + Class methods may be used as templates, in which case any other methods decorated with :func:`Tool.define` will be provided as tools. + + """ + def __init__( self, signature: inspect.Signature, name: str, default: Callable[P, T] ): @@ -70,6 +97,11 @@ def __init__( @classmethod def define(cls, *args, **kwargs) -> "Tool[P, T]": + """Define a tool. + + See :func:`effectful.ops.types.Operation.define` for more information on the use of :func:`Tool.define`. + + """ return typing.cast("Tool[P, T]", super().define(*args, **kwargs)) @@ -79,6 +111,64 @@ class _BoundInstance[T]: class Template[**P, T](Tool[P, T]): + """A :class:`Template` is a function that is implemented by a large language model. + + **Constructing Templates:** + + Templates are constructed by calling :func:`Template.define`. + `Template.define` should be used as a decorator on a function or method. + The function must be fully type-annotated and have a docstring. + The body of the function must contain only :code:`raise NotHandled`. + See :func:`effectful.ops.types.Operation.define` for more information on the use of :func:`Template.define`. + + The template docstring is a `format string `__, which may refer to the template arguments. + When the template is called, the arguments and docstring are formatted into a prompt for the LLM and the LLM's response is returned. + + The following template writes limericks on a given theme: + + >>> @Template.define + ... def limerick(theme: str) -> str: + ... \"\"\"Write a limerick on the theme of {theme}. Do not use any tools.\"\"\" + ... raise NotHandled + + **Structured output:** + + Templates may return types that are not strings. + The output from the LLM is then decoded before being returned to the user. + + For example, this template returns integers: + + >>> @Template.define + ... def primes(first_digit: int) -> int: + ... \"\"\"Give a prime number with {first_digit} as the first digit. Do not use any tools.\"\"\" + ... raise NotHandled + + Structured generation is used to constrain the LLM to return values that can be decoded without error. + + Templates can return complex data structures, such as dataclasses: + + >>> import dataclasses + >>> @dataclasses.dataclass + ... class KnockKnockJoke: + ... whos_there: str + ... punchline: str + + >>> @Template.define + ... def write_joke(theme: str) -> KnockKnockJoke: + ... \"\"\"Write a knock-knock joke on the theme of {theme}. Do not use any tools.\"\"\" + ... raise NotHandled + + Many common Python data types are decodable without additional effort. + To register a decoder for a custom type, see :func:`effectful.handlers.llm.encoding.type_to_encodable_type`. + + **Using tools:** + + Instances of :class:`Tool` that are in the lexical scope of a :class:`Template` may be called by the LLM during template completion. + Templates are themselves tools which enables the construction of complex agent workflows. + When a method is defined as a template, other methods on the class that are decorated with :func:`Tool.define` or :func:`Template.define` are provided to the template as tools. + + """ + __context__: ChainMap[str, Any] @property @@ -131,10 +221,11 @@ def define[**Q, V]( ) -> "Template[Q, V]": """Define a prompt template. - `define` takes a function, and can be used as a decorator. The - function's docstring should be a prompt, which may be templated in the - function arguments. The prompt will be provided with any instances of - `Tool` that exist in the lexical context as callable tools. + :func:`define` takes a function and can be used as a decorator. + The function's docstring should be a prompt, which may be templated in the function arguments. + The prompt will be provided with any instances of :class:`Tool` that exist in the lexical context as callable tools. + + See :func:`effectful.ops.types.Operation.define` for more information on the use of :func:`Template.define`. """ frame = inspect.currentframe() From 7ffe7f8cb1b1ca3b9a6d5afe81b4712358c85ed3 Mon Sep 17 00:00:00 2001 From: datvo06 Date: Mon, 5 Jan 2026 13:37:56 -0500 Subject: [PATCH 32/39] Encodable Type --- effectful/handlers/llm/encodable_type.py | 193 +++++++++++++++++ tests/test_handlers_llm_encoding.py | 253 +++++++++++++++++++++++ 2 files changed, 446 insertions(+) create mode 100644 effectful/handlers/llm/encodable_type.py diff --git a/effectful/handlers/llm/encodable_type.py b/effectful/handlers/llm/encodable_type.py new file mode 100644 index 00000000..32c6f51e --- /dev/null +++ b/effectful/handlers/llm/encodable_type.py @@ -0,0 +1,193 @@ +"""Encodable type for LLM-synthesized classes.""" + +import ast +import collections +import ctypes +import inspect +import linecache +import sys +import textwrap +import types +import typing +from collections import ChainMap +from typing import Any + +import pydantic +from pydantic import Field + +from effectful.handlers.llm.encoding import EncodableAs, type_to_encodable_type +from effectful.handlers.llm.providers import OpenAIMessageContentListBlock +from effectful.handlers.llm.synthesis import SynthesisError + + +class _PyMappingProxyObject(ctypes.Structure): + """Internal ctypes structure to access the underlying dict of a mappingproxy.""" + + _fields_ = [ + ("ob_refcnt", ctypes.c_ssize_t), + ("ob_type", ctypes.py_object), + ("mapping", ctypes.py_object), + ] + + +class SynthesizedType(pydantic.BaseModel): + """Structured output for type/class synthesis. + + Pydantic model representing synthesized class code with type name and module code. + """ + + type_name: str = Field( + ..., + description="The name of the class that satisfies the specification", + ) + module_code: str = Field( + ..., + description="Complete Python module code with the class definition (no imports needed)", + ) + + +@type_to_encodable_type.register(type) +class EncodableSynthesizedType( + EncodableAs[type, SynthesizedType], +): + """Encodes type to SynthesizedType and vice versa.""" + + t = SynthesizedType + + @classmethod + def encode( + cls, vl: type, context: ChainMap[str, Any] | None = None + ) -> SynthesizedType: + """Encode a type to a SynthesizedType. + + Extracts the type name and source code. + """ + type_name = vl.__name__ + try: + source = inspect.getsource(vl) + except (OSError, TypeError): + # If we can't get source, create a minimal representation + source = f"class {type_name}: pass # Source unavailable" + + return SynthesizedType( + type_name=type_name, module_code=textwrap.dedent(source).strip() + ) + + # Counter for unique filenames + _decode_counter: typing.ClassVar[int] = 0 + + @classmethod + def decode(cls, vl: SynthesizedType) -> type: + """Decode a SynthesizedType to a type. + + Executes the module code and returns the named class. + Uses _decode_context attribute on vl if present (set by TypeSynthesis handler). + """ + context: ChainMap[str, Any] | None = getattr(vl, "_decode_context", None) + type_name = vl.type_name + module_code = textwrap.dedent(vl.module_code).strip() + "\n" + + # Create a unique filename and register source with linecache + # This allows inspect.getsource() to work on the generated class + cls._decode_counter += 1 + # NOTE: adding source to class is more tricky + # because for function func.__code__.co_filename (set by compile(..., filename, "exec")) is set automatically + # We have to do this manually for class (set module name) for inspect.getsource() to work + module_name = ( + f"_llm_effectful_synthesized_types.{type_name}.{cls._decode_counter}" + ) + filename = f"" + + # Register source for inspect/linecache + lines = module_code.splitlines(keepends=True) + # Ensure last line has newline for linecache + if lines and not lines[-1].endswith("\n"): + lines[-1] += "\n" + linecache.cache[filename] = ( + len(module_code), + None, + lines, + filename, + ) + + # Create a real module and put it to sys.modules + mod = types.ModuleType(module_name) + mod.__file__ = filename + sys.modules[module_name] = mod + + # globals = module.__dict__ + context + g = mod.__dict__ + g.update({"collections": collections}) + if context is not None: + g.update(context) + g.update({"__name__": module_name, "__file__": filename}) + g.setdefault("__package__", module_name.rpartition(".")[0]) + + try: + # NOTE: Parse and inject __firstlineno__ into class bodies for Python 3.13+ compatibility + # inspect.getsource() looks for __firstlineno__ in vars(cls), which requires it to be in the class's __dict__. + # We inject it via AST before execution. + tree = ast.parse(module_code) + for node in ast.walk(tree): + if isinstance(node, ast.ClassDef): + # Create: __firstlineno__ = + assign = ast.Assign( + targets=[ast.Name(id="__firstlineno__", ctx=ast.Store())], + value=ast.Constant(value=node.lineno), + lineno=node.lineno, + col_offset=0, + ) + ast.fix_missing_locations(assign) + node.body.insert(0, assign) + ast.fix_missing_locations(tree) + code_obj = compile(tree, filename, "exec") + exec(code_obj, g, g) + except SyntaxError as exc: + raise SynthesisError( + f"Syntax error in generated code: {exc}", module_code + ) from exc + except Exception as exc: + raise SynthesisError(f"Evaluation failed: {exc!r}", module_code) from exc + + if type_name not in g: + raise SynthesisError( + f"Type '{type_name}' not found after execution. " + f"Available names: {[k for k in g.keys() if not k.startswith('_')]}", + module_code, + ) + + synthesized_type = g[type_name] + + if not isinstance(synthesized_type, type): + raise SynthesisError( + f"'{type_name}' is not a type, got {type(synthesized_type).__name__}", + module_code, + ) + + # Attach source code and module name + synthesized_type.__source__ = module_code # type: ignore[attr-defined] + synthesized_type.__synthesized__ = vl # type: ignore[attr-defined] + synthesized_type.__module__ = module_name + + # NOTE: Set __firstlineno__ AFTER __module__ assignment! + # In Python 3.13, setting __module__ clears __firstlineno__ from vars(). + # We use ctypes to directly inject it into __dict__ for inspect.getsource(). + if "__firstlineno__" not in vars(synthesized_type): + firstlineno = next( + ( + n.lineno + for n in ast.walk(ast.parse(module_code)) + if isinstance(n, ast.ClassDef) and n.name == type_name + ), + 1, + ) + inner_dict = _PyMappingProxyObject.from_address( + id(vars(synthesized_type)) + ).mapping + inner_dict["__firstlineno__"] = firstlineno + + return synthesized_type + + @classmethod + def serialize(cls, vl: SynthesizedType) -> list[OpenAIMessageContentListBlock]: + return [{"type": "text", "text": vl.model_dump_json()}] diff --git a/tests/test_handlers_llm_encoding.py b/tests/test_handlers_llm_encoding.py index 41999a45..742afd78 100644 --- a/tests/test_handlers_llm_encoding.py +++ b/tests/test_handlers_llm_encoding.py @@ -1,3 +1,5 @@ +import inspect +from collections import ChainMap from dataclasses import asdict, dataclass from typing import NamedTuple, TypedDict @@ -5,7 +7,12 @@ import pytest from PIL import Image +from effectful.handlers.llm.encodable_type import ( + EncodableSynthesizedType, + SynthesizedType, +) from effectful.handlers.llm.encoding import type_to_encodable_type +from effectful.handlers.llm.synthesis import SynthesisError from effectful.ops.types import Operation, Term @@ -718,3 +725,249 @@ class Person(pydantic.BaseModel): assert decoded_from_model == person assert isinstance(decoded_from_model, Person) assert isinstance(decoded_from_model.address, Address) + + +class TestEncodableSynthesizedType: + """Tests for EncodableSynthesizedType encode/decode functionality.""" + + def test_decode_simple_class(self): + """Test decoding a simple class from SynthesizedType.""" + synth = SynthesizedType( + type_name="Greeter", + module_code="""\ +class Greeter: + def greet(self, name: str) -> str: + return f"Hello, {name}!" +""", + ) + + result = EncodableSynthesizedType.decode(synth) + + assert isinstance(result, type) + assert result.__name__ == "Greeter" + + # Test instantiation and method call + instance = result() + assert instance.greet("World") == "Hello, World!" + + def test_decode_with_inheritance(self): + """Test decoding a class that inherits from a base class in context.""" + + class Animal: + def speak(self) -> str: + raise NotImplementedError + + synth = SynthesizedType( + type_name="Dog", + module_code="""\ +class Dog(Animal): + def speak(self) -> str: + return "Woof!" +""", + ) + + # Attach context with base class + object.__setattr__(synth, "_decode_context", ChainMap({"Animal": Animal})) + + result = EncodableSynthesizedType.decode(synth) + + assert isinstance(result, type) + assert issubclass(result, Animal) + assert result.__name__ == "Dog" + + instance = result() + assert instance.speak() == "Woof!" + + def test_decode_attaches_source_attribute(self): + """Test that decoded types have __source__ attribute.""" + synth = SynthesizedType( + type_name="Simple", + module_code="class Simple:\n pass", + ) + + result = EncodableSynthesizedType.decode(synth) + + assert hasattr(result, "__source__") + assert "class Simple" in result.__source__ + + def test_decode_attaches_synthesized_attribute(self): + """Test that decoded types have __synthesized__ attribute.""" + synth = SynthesizedType( + type_name="Simple", + module_code="class Simple:\n pass", + ) + + result = EncodableSynthesizedType.decode(synth) + + assert hasattr(result, "__synthesized__") + assert result.__synthesized__ is synth + + def test_decode_inspect_getsource_works(self): + """Test that inspect.getsource() works on synthesized types.""" + synth = SynthesizedType( + type_name="Documented", + module_code='''\ +class Documented: + """A documented class.""" + + def method(self) -> int: + return 42 +''', + ) + + result = EncodableSynthesizedType.decode(synth) + source = inspect.getsource(result) + + assert "class Documented" in source + assert "A documented class" in source + assert "def method" in source + assert source == result.__source__ + + def test_decode_with_helper_in_class(self): + """Test decoding a class that uses a helper method.""" + synth = SynthesizedType( + type_name="Counter", + module_code="""\ +class Counter: + def __init__(self): + self.value = 0 + + def _increment(self, x): + return x + 1 + + def increment(self): + self.value = self._increment(self.value) + return self.value +""", + ) + + result = EncodableSynthesizedType.decode(synth) + instance = result() + + assert instance.increment() == 1 + assert instance.increment() == 2 + assert instance.increment() == 3 + + def test_decode_syntax_error_raises_synthesis_error(self): + """Test that syntax errors raise SynthesisError.""" + synth = SynthesizedType( + type_name="Broken", + module_code="class Broken\n pass # missing colon", + ) + + with pytest.raises(SynthesisError, match="Syntax error"): + EncodableSynthesizedType.decode(synth) + + def test_decode_missing_type_raises_synthesis_error(self): + """Test that missing type name raises SynthesisError.""" + synth = SynthesizedType( + type_name="Missing", + module_code="class WrongName:\n pass", + ) + + with pytest.raises(SynthesisError, match="not found after execution"): + EncodableSynthesizedType.decode(synth) + + def test_decode_non_type_raises_synthesis_error(self): + """Test that non-type result raises SynthesisError.""" + synth = SynthesizedType( + type_name="NotAType", + module_code="NotAType = 42", + ) + + with pytest.raises(SynthesisError, match="is not a type"): + EncodableSynthesizedType.decode(synth) + + def test_encode_simple_class(self): + """Test encoding a simple class to SynthesizedType.""" + + class MyClass: + def method(self) -> str: + return "hello" + + result = EncodableSynthesizedType.encode(MyClass) + + assert isinstance(result, SynthesizedType) + assert result.type_name == "MyClass" + assert "class MyClass" in result.module_code + assert "def method" in result.module_code + + def test_encode_builtin_class_fallback(self): + """Test encoding a builtin class (source unavailable) uses fallback.""" + # int is a builtin, so inspect.getsource() will fail + result = EncodableSynthesizedType.encode(int) + + assert isinstance(result, SynthesizedType) + assert result.type_name == "int" + assert "class int" in result.module_code + assert "Source unavailable" in result.module_code + + def test_serialize_produces_json(self): + """Test that serialize produces valid JSON content blocks.""" + synth = SynthesizedType( + type_name="TestType", + module_code="class TestType:\n pass", + ) + + result = EncodableSynthesizedType.serialize(synth) + + assert isinstance(result, list) + assert len(result) == 1 + assert result[0]["type"] == "text" + # Verify it's valid JSON + import json + + parsed = json.loads(result[0]["text"]) + assert parsed["type_name"] == "TestType" + assert "class TestType" in parsed["module_code"] + + def test_decode_unique_module_names(self): + """Test that each decoded type gets a unique module name.""" + synth1 = SynthesizedType( + type_name="Unique", + module_code="class Unique:\n value = 1", + ) + synth2 = SynthesizedType( + type_name="Unique", + module_code="class Unique:\n value = 2", + ) + + result1 = EncodableSynthesizedType.decode(synth1) + result2 = EncodableSynthesizedType.decode(synth2) + + # Both should be different types with different module names + assert result1 is not result2 + assert result1.__module__ != result2.__module__ + assert result1.value == 1 + assert result2.value == 2 + + def test_decode_context_with_multiple_items(self): + """Test decoding with context containing multiple items.""" + + class BaseA: + pass + + class BaseB: + pass + + def helper() -> int: + return 100 + + synth = SynthesizedType( + type_name="Combined", + module_code="""\ +class Combined(BaseA, BaseB): + def get_value(self) -> int: + return helper() +""", + ) + + context = ChainMap({"BaseA": BaseA, "BaseB": BaseB, "helper": helper}) + object.__setattr__(synth, "_decode_context", context) + + result = EncodableSynthesizedType.decode(synth) + + assert issubclass(result, BaseA) + assert issubclass(result, BaseB) + instance = result() + assert instance.get_value() == 100 From 9a1a3702f0dcb93212cbd4c885c3325f4764afab Mon Sep 17 00:00:00 2001 From: datvo06 Date: Thu, 26 Feb 2026 12:24:07 -0500 Subject: [PATCH 33/39] Clean up --- docs/source/llm.ipynb | 485 --------- effectful/handlers/llm/__init__.py | 6 - effectful/handlers/llm/encoding.py | 241 ----- effectful/handlers/llm/template.py | 103 -- ...LLMProvider__test_integer_return_type.json | 41 - ...ompt_cross_endpoint[claude-haiku-4-5].json | 40 - ...le_prompt_cross_endpoint[gpt-4o-mini].json | 41 - ...e_prompt_multiple_models[gpt-4o-mini].json | 41 - ...le_prompt_multiple_models[gpt-5-nano].json | 41 - ...teLLMProvider__test_structured_output.json | 41 - ...eLLMProvider__test_with_config_params.json | 41 - ...eturn__test_pydantic_basemodel_return.json | 41 - ...ers_llm_provider.py__test_image_input.json | 41 - tests/test_handlers_llm.py | 112 -- tests/test_handlers_llm_encoding.py | 976 ------------------ tests/test_handlers_llm_provider.py | 158 --- tests/test_handlers_llm_template.py | 149 --- tests/test_handlers_llm_tool_calling_book.py | 20 - tests/test_handlers_llm_tool_calling_poem.py | 30 - 19 files changed, 2648 deletions(-) diff --git a/docs/source/llm.ipynb b/docs/source/llm.ipynb index 0a5faa82..c738adca 100644 --- a/docs/source/llm.ipynb +++ b/docs/source/llm.ipynb @@ -1,10 +1,6 @@ { "cells": [ { -<<<<<<< HEAD - "cell_type": "code", - "execution_count": 33, -======= "cell_type": "markdown", "id": "e7fda1b8", "metadata": {}, @@ -21,31 +17,10 @@ { "cell_type": "code", "execution_count": 1, ->>>>>>> 68d7645f081b17247fde3494e548fd16f92694e8 "id": "5aaf649f", "metadata": {}, "outputs": [], "source": [ -<<<<<<< HEAD - "import dataclasses\n", - "import functools\n", - "import logging\n", - "import sys\n", - "\n", - "import pydantic\n", - "from pydantic import ValidationError, field_validator\n", - "from pydantic_core import PydanticCustomError\n", - "\n", - "from effectful.handlers.llm import Template, Tool\n", - "from effectful.handlers.llm.providers import (\n", - " CacheLLMRequestHandler,\n", - " LiteLLMProvider,\n", - " LLMLoggingHandler,\n", - " RetryLLMHandler,\n", - " completion,\n", - ")\n", - "from effectful.ops.semantics import NotHandled, fwd, handler\n", -======= "import base64\n", "import dataclasses\n", "import functools\n", @@ -66,30 +41,16 @@ " RetryLLMHandler,\n", ")\n", "from effectful.ops.semantics import NotHandled, handler\n", ->>>>>>> 68d7645f081b17247fde3494e548fd16f92694e8 "\n", "provider = LiteLLMProvider()" ] }, { "cell_type": "markdown", -<<<<<<< HEAD - "id": "2f9e861b", - "metadata": {}, - "source": [ - "## Interface\n", - "\n", - "The `robotl.ops.llm` module provides a simplified LLM interface that uses algebraic effects to provide modularity. The module interface consists of:\n", - "\n", - "- A decorator `template` which creates a prompt template from a callable. We should think of the prompt template as an LLM-implemented function with behavior specified by a template string. When a templated function is called, an LLM is invoked to produce the specified behavior. The `__call__` method of a template is a handleable operation.\n", - "- An operation `decode` which parses LLM output. `decode(t: type, c: str)` converts an LLM response `c` to the type `t`. It can be handled to provide decoding logic for particular types.\n", - "- Interpretations for LLM providers `OpenAIIntp` and callable decoding `ProgramSynthesisIntp`. These interpretations can be composed to handle a variety of template behaviors." -======= "id": "093243e0", "metadata": {}, "source": [ "In the following sections, we walk through each of the mentioned components." ->>>>>>> 68d7645f081b17247fde3494e548fd16f92694e8 ] }, { @@ -104,11 +65,7 @@ }, { "cell_type": "code", -<<<<<<< HEAD - "execution_count": 34, -======= "execution_count": 2, ->>>>>>> 68d7645f081b17247fde3494e548fd16f92694e8 "id": "1e832675", "metadata": {}, "outputs": [], @@ -131,11 +88,7 @@ }, { "cell_type": "code", -<<<<<<< HEAD - "execution_count": 35, -======= "execution_count": 3, ->>>>>>> 68d7645f081b17247fde3494e548fd16f92694e8 "id": "634f6533", "metadata": {}, "outputs": [ @@ -143,19 +96,6 @@ "name": "stdout", "output_type": "stream", "text": [ -<<<<<<< HEAD - "In the ocean so vast and so wide, \n", - "A little fish tried hard to hide. \n", - "With scales shining bright, \n", - "It gave quite a sight, \n", - "And swam with the current and tide.\n", - "----------------------------------------\n", - "In the sea swam a fish with a grin, \n", - "Who loved to flip and to spin, \n", - "He danced through the tide, \n", - "With friends by his side, \n", - "A joyous splash with his slippery fin!\n" -======= "In the sea where the shimmering fish \n", "Dance around like a silvery wish,\n", "They wiggle and glide,\n", @@ -167,7 +107,6 @@ "With scales shining bright,\n", "He'd dance in the light,\n", "And none were as charming as Blue.\n" ->>>>>>> 68d7645f081b17247fde3494e548fd16f92694e8 ] } ], @@ -183,20 +122,12 @@ "id": "2e59acbc", "metadata": {}, "source": [ -<<<<<<< HEAD - "If we want deterministic behavior, we can cache the template call. We can either cache it with the default `@functools.cache` or using `CacheLLMRequestHandler`:" -======= "If we want deterministic behavior, we can cache the template call. We can either cache it with the default `@functools.cache` or use LiteLLM's built-in cache by setting a cache backend and passing `caching=True` to the provider:" ->>>>>>> 68d7645f081b17247fde3494e548fd16f92694e8 ] }, { "cell_type": "code", -<<<<<<< HEAD - "execution_count": 36, -======= "execution_count": 4, ->>>>>>> 68d7645f081b17247fde3494e548fd16f92694e8 "id": "706ce53b", "metadata": {}, "outputs": [ @@ -205,31 +136,6 @@ "output_type": "stream", "text": [ "\n", -<<<<<<< HEAD - "Glimmering scales shine, \n", - "Dancing in the water's flow, \n", - "Silent whispers swim. \n", - "----------------------------------------\n", - "Glimmering scales shine, \n", - "Dancing in the water's flow, \n", - "Silent whispers swim. \n", - "\n", - "In clear water's dance, \n", - "Silent scales shimmer and glide, \n", - "Fish weave nature's trance. \n", - "----------------------------------------\n", - "In clear water's dance, \n", - "Silent scales shimmer and glide, \n", - "Fish weave nature's trance. \n", - "\n", - "Fish swim in clear stream, \n", - "Scales shimmer in sunlight glow, \n", - "Nature's quiet dance.\n", - "----------------------------------------\n", - "Fish swim in clear stream, \n", - "Scales shimmer in sunlight glow, \n", - "Nature's quiet dance.\n" -======= "Silent stream below,\n", "Gleaming scales in dancing waves—\n", "Fish glide through cool dreams.\n", @@ -269,7 +175,6 @@ "In the whispering stream,\n", "silver scales dance and shimmer—\n", "a fleeting shadow.\n" ->>>>>>> 68d7645f081b17247fde3494e548fd16f92694e8 ] } ], @@ -294,17 +199,6 @@ " print(haiku(\"fish\"))\n", "\n", "print()\n", -<<<<<<< HEAD - "cache_handler1 = CacheLLMRequestHandler()\n", - "with handler(provider), handler(cache_handler1):\n", - " print(haiku_no_cache(\"fish2\"))\n", - " print(\"-\" * 40)\n", - " print(haiku_no_cache(\"fish2\"))\n", - "\n", - "print()\n", - "cache_handler2 = CacheLLMRequestHandler()\n", - "with handler(provider), handler(cache_handler2):\n", -======= "# Enable LiteLLM caching by setting a cache backend and enabling caching.\n", "litellm.cache = Cache()\n", "provider_cached = LiteLLMProvider(caching=True)\n", @@ -318,7 +212,6 @@ "\n", "print()\n", "with handler(provider):\n", ->>>>>>> 68d7645f081b17247fde3494e548fd16f92694e8 " print(haiku_no_cache(\"fish3\"))\n", " print(\"-\" * 40)\n", " print(haiku_no_cache(\"fish3\"))" @@ -336,11 +229,7 @@ }, { "cell_type": "code", -<<<<<<< HEAD - "execution_count": 37, -======= "execution_count": 5, ->>>>>>> 68d7645f081b17247fde3494e548fd16f92694e8 "id": "2c766859", "metadata": {}, "outputs": [], @@ -357,8 +246,6 @@ }, { "cell_type": "markdown", -<<<<<<< HEAD -======= "id": "36d78a71", "metadata": {}, "source": [ @@ -405,7 +292,6 @@ }, { "cell_type": "markdown", ->>>>>>> 68d7645f081b17247fde3494e548fd16f92694e8 "id": "991ee445", "metadata": {}, "source": [ @@ -418,11 +304,7 @@ }, { "cell_type": "code", -<<<<<<< HEAD - "execution_count": 39, -======= "execution_count": 7, ->>>>>>> 68d7645f081b17247fde3494e548fd16f92694e8 "id": "66711301", "metadata": {}, "outputs": [ @@ -430,20 +312,12 @@ "name": "stdout", "output_type": "stream", "text": [ -<<<<<<< HEAD - "Tool call: cities(*(), **{}) -> ['Chicago', 'New York', 'Barcelona']\n", - "Tool call: weather(*(), **{'city': 'Chicago'}) -> cold\n", - "Tool call: weather(*(), **{'city': 'New York'}) -> wet\n", - "Tool call: weather(*(), **{'city': 'Barcelona'}) -> sunny\n", - "Based on the weather conditions, Barcelona has good weather as it is sunny.\n" -======= "Based on the weather descriptions:\n", "- **Chicago**: Cold\n", "- **New York**: Wet\n", "- **Barcelona**: Sunny\n", "\n", "I suggest Barcelona since it has sunny weather, which is generally considered good for most people.\n" ->>>>>>> 68d7645f081b17247fde3494e548fd16f92694e8 ] } ], @@ -467,24 +341,12 @@ " raise NotHandled\n", "\n", "\n", -<<<<<<< HEAD - "def log_tool_call(tool, *args, **kwargs):\n", - " result = fwd()\n", - " print(f\"Tool call: {tool}(*{args}, **{kwargs}) -> {result}\")\n", - " return result\n", - "\n", - "\n", - "with handler(provider), handler({Tool.__apply__: log_tool_call}):\n", -======= "with handler(provider):\n", ->>>>>>> 68d7645f081b17247fde3494e548fd16f92694e8 " print(vacation())" ] }, { "cell_type": "markdown", -<<<<<<< HEAD -======= "id": "59584a54", "metadata": {}, "source": [ @@ -548,7 +410,6 @@ }, { "cell_type": "markdown", ->>>>>>> 68d7645f081b17247fde3494e548fd16f92694e8 "id": "3d221feb", "metadata": {}, "source": [ @@ -559,11 +420,7 @@ }, { "cell_type": "code", -<<<<<<< HEAD - "execution_count": 40, -======= "execution_count": 9, ->>>>>>> 68d7645f081b17247fde3494e548fd16f92694e8 "id": "17668ac8", "metadata": {}, "outputs": [ @@ -576,13 +433,8 @@ "Who's there?\n", "Lizard.\n", "Lizard who?\n", -<<<<<<< HEAD - "Lizard who? Lizard you! Open the door, it's chilly out here!\n", - "> The crowd stares in stony silence.\n" -======= "Lizard who? Lizard be a joke if I wasn't at your door!\n", "> The crowd laughs politely.\n" ->>>>>>> 68d7645f081b17247fde3494e548fd16f92694e8 ] } ], @@ -623,110 +475,6 @@ }, { "cell_type": "markdown", -<<<<<<< HEAD - "id": "0cab62b5", - "metadata": {}, - "source": [ - "### Logging LLM requests\n", - "To intercept messages being called on the lower-level, we can write a handler for `completion`:" - ] - }, - { - "cell_type": "code", - "execution_count": 41, - "id": "cbf495a2", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "> Write a haiku on the theme of fish2. Do not use any tools.\n", - "In waters serene, \n", - "Gliding scales of silver bright, \n", - "Whispers of the sea.\n", - "> Write a limerick on the theme of fish. Do not use any tools.\n", - "In the ocean where fish love to play, \n", - "A trout took a boat for a day. \n", - "He swam with a wail, \n", - "To the tip of his tail, \n", - "And waved to the marlins in May.\n" - ] - } - ], - "source": [ - "def log_llm(*args, **kwargs):\n", - " result = fwd()\n", - "\n", - " print(f\"> {kwargs['messages'][0]['content'][0]['text']}\")\n", - " print(result.choices[0].message.content)\n", - " return result\n", - "\n", - "\n", - "# Avoid cache\n", - "try:\n", - " haiku.cache_clear()\n", - "except Exception:\n", - " pass\n", - "\n", - "# Put completion handler innermost so it has highest precedence during the call\n", - "with handler(provider), handler({completion: log_llm}):\n", - " _ = haiku(\"fish2\")\n", - " _ = limerick(\"fish\") # or use haiku(\"fish-2\") to avoid cache" - ] - }, - { - "cell_type": "markdown", - "id": "8e8e531d", - "metadata": {}, - "source": [ - "### Python logging for LLM requests and tool calls\n", - "We can also uses Python logger through `LLMLoggingHandler` to log both low-level LLM requests (`completion`) and model-initiated tool use (`tool_call`):\n" - ] - }, - { - "cell_type": "code", - "execution_count": 42, - "id": "81a15f00", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "INFO {'args': (), 'kwargs': {'messages': [{'type': 'message', 'content': [{'type': 'text', 'text': 'Write a haiku on the theme of fish3. Do not use any tools.'}], 'role': 'user'}], 'response_format': None, 'tools': [{'type': 'function', 'function': {'name': 'vacation', 'description': 'Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}, {'type': 'function', 'function': {'name': 'count_char', 'description': \"Write a function which takes a string and counts the occurrances of '{char}'. Do not use any tools.\", 'parameters': {'additionalProperties': False, 'properties': {'char': {'title': 'Char', 'type': 'string'}}, 'required': ['char'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'story_funny', 'description': 'Write a funny, humorous story about {topic}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'topic': {'title': 'Topic', 'type': 'string'}}, 'required': ['topic'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'primes', 'description': 'Give a prime number with {first_digit} as the first digit. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'first_digit': {'title': 'First Digit', 'type': 'integer'}}, 'required': ['first_digit'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'weather', 'description': 'Given a city name, return a description of the weather in that city.', 'parameters': {'additionalProperties': False, 'properties': {'city': {'title': 'City', 'type': 'string'}}, 'required': ['city'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'story_with_moral', 'description': 'Write a short story about {topic} and end with a moral lesson. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'topic': {'title': 'Topic', 'type': 'string'}}, 'required': ['topic'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'haiku_no_cache', 'description': 'Write a haiku on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'write_joke', 'description': 'Write a knock-knock joke on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'unstable_service', 'description': 'Fetch data from an unstable external service. May require retries.', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}, {'type': 'function', 'function': {'name': 'rate_joke', 'description': 'Decide if {joke} is funny or not. Do not use any tools.', 'parameters': {'$defs': {'KnockKnockJoke': {'properties': {'whos_there': {'title': 'Whos There', 'type': 'string'}, 'punchline': {'title': 'Punchline', 'type': 'string'}}, 'required': ['whos_there', 'punchline'], 'title': 'KnockKnockJoke', 'type': 'object', 'additionalProperties': False}}, 'additionalProperties': False, 'properties': {'joke': {'$ref': '#/$defs/KnockKnockJoke'}}, 'required': ['joke'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'write_story', 'description': \"Write a story about {topic} in the style: {style}.\\n Available styles: 'moral' for a story with a lesson, 'funny' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\", 'parameters': {'additionalProperties': False, 'properties': {'topic': {'title': 'Topic', 'type': 'string'}, 'style': {'title': 'Style', 'type': 'string'}}, 'required': ['topic', 'style'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'fetch_data', 'description': 'Use the unstable_service tool to fetch data. Do not use the fetch_data tool.', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}, {'type': 'function', 'function': {'name': 'limerick', 'description': 'Write a limerick on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'give_rating_for_movie', 'description': 'Give a rating for {movie_name}. The explanation MUST include the numeric score. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'movie_name': {'title': 'Movie Name', 'type': 'string'}}, 'required': ['movie_name'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'cities', 'description': 'Return a list of cities that can be passed to `weather`.', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}]}, 'response': ModelResponse(id='chatcmpl-CpiNvfUe5SAUhZMSz1tNAzVXYgVmK', created=1766441379, model='gpt-4o-2024-08-06', object='chat.completion', system_fingerprint='fp_deacdd5f6f', choices=[Choices(finish_reason='stop', index=0, message=Message(content=\"In waters they glide, \\nSilver scales shimmer and dance, \\nNature's silent grace. \", role='assistant', tool_calls=None, function_call=None, provider_specific_fields={'refusal': None}, annotations=[]), provider_specific_fields={})], usage=Usage(completion_tokens=20, prompt_tokens=643, total_tokens=663, completion_tokens_details=CompletionTokensDetailsWrapper(accepted_prediction_tokens=0, audio_tokens=0, reasoning_tokens=0, rejected_prediction_tokens=0, text_tokens=None, image_tokens=None), prompt_tokens_details=PromptTokensDetailsWrapper(audio_tokens=0, cached_tokens=0, text_tokens=None, image_tokens=None)), service_tier='default')}\n", - "INFO {'args': (), 'kwargs': {'messages': [{'type': 'message', 'content': [{'type': 'text', 'text': 'Write a haiku on the theme of fish3. Do not use any tools.'}], 'role': 'user'}], 'response_format': None, 'tools': [{'type': 'function', 'function': {'name': 'vacation', 'description': 'Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}, {'type': 'function', 'function': {'name': 'count_char', 'description': \"Write a function which takes a string and counts the occurrances of '{char}'. Do not use any tools.\", 'parameters': {'additionalProperties': False, 'properties': {'char': {'title': 'Char', 'type': 'string'}}, 'required': ['char'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'story_funny', 'description': 'Write a funny, humorous story about {topic}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'topic': {'title': 'Topic', 'type': 'string'}}, 'required': ['topic'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'primes', 'description': 'Give a prime number with {first_digit} as the first digit. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'first_digit': {'title': 'First Digit', 'type': 'integer'}}, 'required': ['first_digit'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'weather', 'description': 'Given a city name, return a description of the weather in that city.', 'parameters': {'additionalProperties': False, 'properties': {'city': {'title': 'City', 'type': 'string'}}, 'required': ['city'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'story_with_moral', 'description': 'Write a short story about {topic} and end with a moral lesson. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'topic': {'title': 'Topic', 'type': 'string'}}, 'required': ['topic'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'haiku_no_cache', 'description': 'Write a haiku on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'write_joke', 'description': 'Write a knock-knock joke on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'unstable_service', 'description': 'Fetch data from an unstable external service. May require retries.', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}, {'type': 'function', 'function': {'name': 'rate_joke', 'description': 'Decide if {joke} is funny or not. Do not use any tools.', 'parameters': {'$defs': {'KnockKnockJoke': {'properties': {'whos_there': {'title': 'Whos There', 'type': 'string'}, 'punchline': {'title': 'Punchline', 'type': 'string'}}, 'required': ['whos_there', 'punchline'], 'title': 'KnockKnockJoke', 'type': 'object', 'additionalProperties': False}}, 'additionalProperties': False, 'properties': {'joke': {'$ref': '#/$defs/KnockKnockJoke'}}, 'required': ['joke'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'write_story', 'description': \"Write a story about {topic} in the style: {style}.\\n Available styles: 'moral' for a story with a lesson, 'funny' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\", 'parameters': {'additionalProperties': False, 'properties': {'topic': {'title': 'Topic', 'type': 'string'}, 'style': {'title': 'Style', 'type': 'string'}}, 'required': ['topic', 'style'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'fetch_data', 'description': 'Use the unstable_service tool to fetch data. Do not use the fetch_data tool.', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}, {'type': 'function', 'function': {'name': 'limerick', 'description': 'Write a limerick on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'give_rating_for_movie', 'description': 'Give a rating for {movie_name}. The explanation MUST include the numeric score. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'movie_name': {'title': 'Movie Name', 'type': 'string'}}, 'required': ['movie_name'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'cities', 'description': 'Return a list of cities that can be passed to `weather`.', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}]}, 'response': ModelResponse(id='chatcmpl-CpiNvfUe5SAUhZMSz1tNAzVXYgVmK', created=1766441379, model='gpt-4o-2024-08-06', object='chat.completion', system_fingerprint='fp_deacdd5f6f', choices=[Choices(finish_reason='stop', index=0, message=Message(content=\"In waters they glide, \\nSilver scales shimmer and dance, \\nNature's silent grace. \", role='assistant', tool_calls=None, function_call=None, provider_specific_fields={'refusal': None}, annotations=[]), provider_specific_fields={})], usage=Usage(completion_tokens=20, prompt_tokens=643, total_tokens=663, completion_tokens_details=CompletionTokensDetailsWrapper(accepted_prediction_tokens=0, audio_tokens=0, reasoning_tokens=0, rejected_prediction_tokens=0, text_tokens=None, image_tokens=None), prompt_tokens_details=PromptTokensDetailsWrapper(audio_tokens=0, cached_tokens=0, text_tokens=None, image_tokens=None)), service_tier='default')}\n", - "INFO {'args': (), 'kwargs': {'messages': [{'type': 'message', 'content': [{'type': 'text', 'text': 'Write a limerick on the theme of fish4. Do not use any tools.'}], 'role': 'user'}], 'response_format': None, 'tools': [{'type': 'function', 'function': {'name': 'vacation', 'description': 'Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}, {'type': 'function', 'function': {'name': 'count_char', 'description': \"Write a function which takes a string and counts the occurrances of '{char}'. Do not use any tools.\", 'parameters': {'additionalProperties': False, 'properties': {'char': {'title': 'Char', 'type': 'string'}}, 'required': ['char'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'story_funny', 'description': 'Write a funny, humorous story about {topic}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'topic': {'title': 'Topic', 'type': 'string'}}, 'required': ['topic'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'primes', 'description': 'Give a prime number with {first_digit} as the first digit. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'first_digit': {'title': 'First Digit', 'type': 'integer'}}, 'required': ['first_digit'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'weather', 'description': 'Given a city name, return a description of the weather in that city.', 'parameters': {'additionalProperties': False, 'properties': {'city': {'title': 'City', 'type': 'string'}}, 'required': ['city'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'story_with_moral', 'description': 'Write a short story about {topic} and end with a moral lesson. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'topic': {'title': 'Topic', 'type': 'string'}}, 'required': ['topic'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'haiku_no_cache', 'description': 'Write a haiku on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'write_joke', 'description': 'Write a knock-knock joke on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'unstable_service', 'description': 'Fetch data from an unstable external service. May require retries.', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}, {'type': 'function', 'function': {'name': 'rate_joke', 'description': 'Decide if {joke} is funny or not. Do not use any tools.', 'parameters': {'$defs': {'KnockKnockJoke': {'properties': {'whos_there': {'title': 'Whos There', 'type': 'string'}, 'punchline': {'title': 'Punchline', 'type': 'string'}}, 'required': ['whos_there', 'punchline'], 'title': 'KnockKnockJoke', 'type': 'object', 'additionalProperties': False}}, 'additionalProperties': False, 'properties': {'joke': {'$ref': '#/$defs/KnockKnockJoke'}}, 'required': ['joke'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'write_story', 'description': \"Write a story about {topic} in the style: {style}.\\n Available styles: 'moral' for a story with a lesson, 'funny' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\", 'parameters': {'additionalProperties': False, 'properties': {'topic': {'title': 'Topic', 'type': 'string'}, 'style': {'title': 'Style', 'type': 'string'}}, 'required': ['topic', 'style'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'fetch_data', 'description': 'Use the unstable_service tool to fetch data. Do not use the fetch_data tool.', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}, {'type': 'function', 'function': {'name': 'limerick', 'description': 'Write a limerick on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'give_rating_for_movie', 'description': 'Give a rating for {movie_name}. The explanation MUST include the numeric score. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'movie_name': {'title': 'Movie Name', 'type': 'string'}}, 'required': ['movie_name'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'cities', 'description': 'Return a list of cities that can be passed to `weather`.', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}]}, 'response': ModelResponse(id='chatcmpl-CpiNwNX281AjkWQ1dQ4k6EdUhYLcY', created=1766441380, model='gpt-4o-2024-08-06', object='chat.completion', system_fingerprint='fp_deacdd5f6f', choices=[Choices(finish_reason='tool_calls', index=0, message=Message(content=None, role='assistant', tool_calls=[ChatCompletionMessageToolCall(function=Function(arguments='{\"theme\":\"fish\"}', name='limerick'), id='call_saz9unenzuVoXATZ5fCZZ8Bt', type='function')], function_call=None, provider_specific_fields={'refusal': None}, annotations=[]), provider_specific_fields={})], usage=Usage(completion_tokens=15, prompt_tokens=644, total_tokens=659, completion_tokens_details=CompletionTokensDetailsWrapper(accepted_prediction_tokens=0, audio_tokens=0, reasoning_tokens=0, rejected_prediction_tokens=0, text_tokens=None, image_tokens=None), prompt_tokens_details=PromptTokensDetailsWrapper(audio_tokens=0, cached_tokens=0, text_tokens=None, image_tokens=None)), service_tier='default')}\n", - "INFO {'args': (), 'kwargs': {'messages': [{'type': 'message', 'content': [{'type': 'text', 'text': 'Write a limerick on the theme of fish4. Do not use any tools.'}], 'role': 'user'}], 'response_format': None, 'tools': [{'type': 'function', 'function': {'name': 'vacation', 'description': 'Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}, {'type': 'function', 'function': {'name': 'count_char', 'description': \"Write a function which takes a string and counts the occurrances of '{char}'. Do not use any tools.\", 'parameters': {'additionalProperties': False, 'properties': {'char': {'title': 'Char', 'type': 'string'}}, 'required': ['char'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'story_funny', 'description': 'Write a funny, humorous story about {topic}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'topic': {'title': 'Topic', 'type': 'string'}}, 'required': ['topic'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'primes', 'description': 'Give a prime number with {first_digit} as the first digit. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'first_digit': {'title': 'First Digit', 'type': 'integer'}}, 'required': ['first_digit'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'weather', 'description': 'Given a city name, return a description of the weather in that city.', 'parameters': {'additionalProperties': False, 'properties': {'city': {'title': 'City', 'type': 'string'}}, 'required': ['city'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'story_with_moral', 'description': 'Write a short story about {topic} and end with a moral lesson. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'topic': {'title': 'Topic', 'type': 'string'}}, 'required': ['topic'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'haiku_no_cache', 'description': 'Write a haiku on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'write_joke', 'description': 'Write a knock-knock joke on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'unstable_service', 'description': 'Fetch data from an unstable external service. May require retries.', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}, {'type': 'function', 'function': {'name': 'rate_joke', 'description': 'Decide if {joke} is funny or not. Do not use any tools.', 'parameters': {'$defs': {'KnockKnockJoke': {'properties': {'whos_there': {'title': 'Whos There', 'type': 'string'}, 'punchline': {'title': 'Punchline', 'type': 'string'}}, 'required': ['whos_there', 'punchline'], 'title': 'KnockKnockJoke', 'type': 'object', 'additionalProperties': False}}, 'additionalProperties': False, 'properties': {'joke': {'$ref': '#/$defs/KnockKnockJoke'}}, 'required': ['joke'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'write_story', 'description': \"Write a story about {topic} in the style: {style}.\\n Available styles: 'moral' for a story with a lesson, 'funny' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\", 'parameters': {'additionalProperties': False, 'properties': {'topic': {'title': 'Topic', 'type': 'string'}, 'style': {'title': 'Style', 'type': 'string'}}, 'required': ['topic', 'style'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'fetch_data', 'description': 'Use the unstable_service tool to fetch data. Do not use the fetch_data tool.', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}, {'type': 'function', 'function': {'name': 'limerick', 'description': 'Write a limerick on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'give_rating_for_movie', 'description': 'Give a rating for {movie_name}. The explanation MUST include the numeric score. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'movie_name': {'title': 'Movie Name', 'type': 'string'}}, 'required': ['movie_name'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'cities', 'description': 'Return a list of cities that can be passed to `weather`.', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}]}, 'response': ModelResponse(id='chatcmpl-CpiNwNX281AjkWQ1dQ4k6EdUhYLcY', created=1766441380, model='gpt-4o-2024-08-06', object='chat.completion', system_fingerprint='fp_deacdd5f6f', choices=[Choices(finish_reason='tool_calls', index=0, message=Message(content=None, role='assistant', tool_calls=[ChatCompletionMessageToolCall(function=Function(arguments='{\"theme\":\"fish\"}', name='limerick'), id='call_saz9unenzuVoXATZ5fCZZ8Bt', type='function')], function_call=None, provider_specific_fields={'refusal': None}, annotations=[]), provider_specific_fields={})], usage=Usage(completion_tokens=15, prompt_tokens=644, total_tokens=659, completion_tokens_details=CompletionTokensDetailsWrapper(accepted_prediction_tokens=0, audio_tokens=0, reasoning_tokens=0, rejected_prediction_tokens=0, text_tokens=None, image_tokens=None), prompt_tokens_details=PromptTokensDetailsWrapper(audio_tokens=0, cached_tokens=0, text_tokens=None, image_tokens=None)), service_tier='default')}\n", - "INFO {'args': (), 'kwargs': {'messages': [{'type': 'message', 'content': [{'type': 'text', 'text': 'Write a limerick on the theme of fish. Do not use any tools.'}], 'role': 'user'}], 'response_format': None, 'tools': [{'type': 'function', 'function': {'name': 'vacation', 'description': 'Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}, {'type': 'function', 'function': {'name': 'count_char', 'description': \"Write a function which takes a string and counts the occurrances of '{char}'. Do not use any tools.\", 'parameters': {'additionalProperties': False, 'properties': {'char': {'title': 'Char', 'type': 'string'}}, 'required': ['char'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'story_funny', 'description': 'Write a funny, humorous story about {topic}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'topic': {'title': 'Topic', 'type': 'string'}}, 'required': ['topic'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'primes', 'description': 'Give a prime number with {first_digit} as the first digit. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'first_digit': {'title': 'First Digit', 'type': 'integer'}}, 'required': ['first_digit'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'weather', 'description': 'Given a city name, return a description of the weather in that city.', 'parameters': {'additionalProperties': False, 'properties': {'city': {'title': 'City', 'type': 'string'}}, 'required': ['city'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'story_with_moral', 'description': 'Write a short story about {topic} and end with a moral lesson. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'topic': {'title': 'Topic', 'type': 'string'}}, 'required': ['topic'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'haiku_no_cache', 'description': 'Write a haiku on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'write_joke', 'description': 'Write a knock-knock joke on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'unstable_service', 'description': 'Fetch data from an unstable external service. May require retries.', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}, {'type': 'function', 'function': {'name': 'rate_joke', 'description': 'Decide if {joke} is funny or not. Do not use any tools.', 'parameters': {'$defs': {'KnockKnockJoke': {'properties': {'whos_there': {'title': 'Whos There', 'type': 'string'}, 'punchline': {'title': 'Punchline', 'type': 'string'}}, 'required': ['whos_there', 'punchline'], 'title': 'KnockKnockJoke', 'type': 'object', 'additionalProperties': False}}, 'additionalProperties': False, 'properties': {'joke': {'$ref': '#/$defs/KnockKnockJoke'}}, 'required': ['joke'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'write_story', 'description': \"Write a story about {topic} in the style: {style}.\\n Available styles: 'moral' for a story with a lesson, 'funny' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\", 'parameters': {'additionalProperties': False, 'properties': {'topic': {'title': 'Topic', 'type': 'string'}, 'style': {'title': 'Style', 'type': 'string'}}, 'required': ['topic', 'style'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'fetch_data', 'description': 'Use the unstable_service tool to fetch data. Do not use the fetch_data tool.', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}, {'type': 'function', 'function': {'name': 'limerick', 'description': 'Write a limerick on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'give_rating_for_movie', 'description': 'Give a rating for {movie_name}. The explanation MUST include the numeric score. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'movie_name': {'title': 'Movie Name', 'type': 'string'}}, 'required': ['movie_name'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'cities', 'description': 'Return a list of cities that can be passed to `weather`.', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}]}, 'response': ModelResponse(id='chatcmpl-CpiNxuLZfAwYkaGF3fQh9qG200UyU', created=1766441381, model='gpt-4o-2024-08-06', object='chat.completion', system_fingerprint='fp_deacdd5f6f', choices=[Choices(finish_reason='stop', index=0, message=Message(content='In a pond where the waters were still, \\nA fish had a wish to fulfill, \\nHe leaped in the air, \\nWith debonair flair, \\nAnd splashed down with plenty of thrill. ', role='assistant', tool_calls=None, function_call=None, provider_specific_fields={'refusal': None}, annotations=[]), provider_specific_fields={})], usage=Usage(completion_tokens=45, prompt_tokens=643, total_tokens=688, completion_tokens_details=CompletionTokensDetailsWrapper(accepted_prediction_tokens=0, audio_tokens=0, reasoning_tokens=0, rejected_prediction_tokens=0, text_tokens=None, image_tokens=None), prompt_tokens_details=PromptTokensDetailsWrapper(audio_tokens=0, cached_tokens=0, text_tokens=None, image_tokens=None)), service_tier='default')}\n", - "INFO {'args': (), 'kwargs': {'messages': [{'type': 'message', 'content': [{'type': 'text', 'text': 'Write a limerick on the theme of fish. Do not use any tools.'}], 'role': 'user'}], 'response_format': None, 'tools': [{'type': 'function', 'function': {'name': 'vacation', 'description': 'Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}, {'type': 'function', 'function': {'name': 'count_char', 'description': \"Write a function which takes a string and counts the occurrances of '{char}'. Do not use any tools.\", 'parameters': {'additionalProperties': False, 'properties': {'char': {'title': 'Char', 'type': 'string'}}, 'required': ['char'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'story_funny', 'description': 'Write a funny, humorous story about {topic}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'topic': {'title': 'Topic', 'type': 'string'}}, 'required': ['topic'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'primes', 'description': 'Give a prime number with {first_digit} as the first digit. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'first_digit': {'title': 'First Digit', 'type': 'integer'}}, 'required': ['first_digit'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'weather', 'description': 'Given a city name, return a description of the weather in that city.', 'parameters': {'additionalProperties': False, 'properties': {'city': {'title': 'City', 'type': 'string'}}, 'required': ['city'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'story_with_moral', 'description': 'Write a short story about {topic} and end with a moral lesson. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'topic': {'title': 'Topic', 'type': 'string'}}, 'required': ['topic'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'haiku_no_cache', 'description': 'Write a haiku on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'write_joke', 'description': 'Write a knock-knock joke on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'unstable_service', 'description': 'Fetch data from an unstable external service. May require retries.', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}, {'type': 'function', 'function': {'name': 'rate_joke', 'description': 'Decide if {joke} is funny or not. Do not use any tools.', 'parameters': {'$defs': {'KnockKnockJoke': {'properties': {'whos_there': {'title': 'Whos There', 'type': 'string'}, 'punchline': {'title': 'Punchline', 'type': 'string'}}, 'required': ['whos_there', 'punchline'], 'title': 'KnockKnockJoke', 'type': 'object', 'additionalProperties': False}}, 'additionalProperties': False, 'properties': {'joke': {'$ref': '#/$defs/KnockKnockJoke'}}, 'required': ['joke'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'write_story', 'description': \"Write a story about {topic} in the style: {style}.\\n Available styles: 'moral' for a story with a lesson, 'funny' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\", 'parameters': {'additionalProperties': False, 'properties': {'topic': {'title': 'Topic', 'type': 'string'}, 'style': {'title': 'Style', 'type': 'string'}}, 'required': ['topic', 'style'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'fetch_data', 'description': 'Use the unstable_service tool to fetch data. Do not use the fetch_data tool.', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}, {'type': 'function', 'function': {'name': 'limerick', 'description': 'Write a limerick on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'give_rating_for_movie', 'description': 'Give a rating for {movie_name}. The explanation MUST include the numeric score. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'movie_name': {'title': 'Movie Name', 'type': 'string'}}, 'required': ['movie_name'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'cities', 'description': 'Return a list of cities that can be passed to `weather`.', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}]}, 'response': ModelResponse(id='chatcmpl-CpiNxuLZfAwYkaGF3fQh9qG200UyU', created=1766441381, model='gpt-4o-2024-08-06', object='chat.completion', system_fingerprint='fp_deacdd5f6f', choices=[Choices(finish_reason='stop', index=0, message=Message(content='In a pond where the waters were still, \\nA fish had a wish to fulfill, \\nHe leaped in the air, \\nWith debonair flair, \\nAnd splashed down with plenty of thrill. ', role='assistant', tool_calls=None, function_call=None, provider_specific_fields={'refusal': None}, annotations=[]), provider_specific_fields={})], usage=Usage(completion_tokens=45, prompt_tokens=643, total_tokens=688, completion_tokens_details=CompletionTokensDetailsWrapper(accepted_prediction_tokens=0, audio_tokens=0, reasoning_tokens=0, rejected_prediction_tokens=0, text_tokens=None, image_tokens=None), prompt_tokens_details=PromptTokensDetailsWrapper(audio_tokens=0, cached_tokens=0, text_tokens=None, image_tokens=None)), service_tier='default')}\n", - "INFO {'args': (), 'kwargs': {'messages': [{'type': 'message', 'content': [{'type': 'text', 'text': 'Write a limerick on the theme of fish4. Do not use any tools.'}], 'role': 'user'}, {'content': None, 'role': 'assistant', 'tool_calls': [{'function': {'arguments': '{\"theme\":\"fish\"}', 'name': 'limerick'}, 'id': 'call_saz9unenzuVoXATZ5fCZZ8Bt', 'type': 'function'}], 'function_call': None, 'provider_specific_fields': {'refusal': None}, 'annotations': []}, {'role': 'tool', 'tool_call_id': 'call_saz9unenzuVoXATZ5fCZZ8Bt', 'name': 'limerick', 'content': [{'type': 'text', 'text': 'In a pond where the waters were still, \\nA fish had a wish to fulfill, \\nHe leaped in the air, \\nWith debonair flair, \\nAnd splashed down with plenty of thrill. '}]}], 'response_format': None, 'tools': [{'type': 'function', 'function': {'name': 'vacation', 'description': 'Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}, {'type': 'function', 'function': {'name': 'count_char', 'description': \"Write a function which takes a string and counts the occurrances of '{char}'. Do not use any tools.\", 'parameters': {'additionalProperties': False, 'properties': {'char': {'title': 'Char', 'type': 'string'}}, 'required': ['char'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'story_funny', 'description': 'Write a funny, humorous story about {topic}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'topic': {'title': 'Topic', 'type': 'string'}}, 'required': ['topic'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'primes', 'description': 'Give a prime number with {first_digit} as the first digit. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'first_digit': {'title': 'First Digit', 'type': 'integer'}}, 'required': ['first_digit'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'weather', 'description': 'Given a city name, return a description of the weather in that city.', 'parameters': {'additionalProperties': False, 'properties': {'city': {'title': 'City', 'type': 'string'}}, 'required': ['city'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'story_with_moral', 'description': 'Write a short story about {topic} and end with a moral lesson. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'topic': {'title': 'Topic', 'type': 'string'}}, 'required': ['topic'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'haiku_no_cache', 'description': 'Write a haiku on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'write_joke', 'description': 'Write a knock-knock joke on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'unstable_service', 'description': 'Fetch data from an unstable external service. May require retries.', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}, {'type': 'function', 'function': {'name': 'rate_joke', 'description': 'Decide if {joke} is funny or not. Do not use any tools.', 'parameters': {'$defs': {'KnockKnockJoke': {'properties': {'whos_there': {'title': 'Whos There', 'type': 'string'}, 'punchline': {'title': 'Punchline', 'type': 'string'}}, 'required': ['whos_there', 'punchline'], 'title': 'KnockKnockJoke', 'type': 'object', 'additionalProperties': False}}, 'additionalProperties': False, 'properties': {'joke': {'$ref': '#/$defs/KnockKnockJoke'}}, 'required': ['joke'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'write_story', 'description': \"Write a story about {topic} in the style: {style}.\\n Available styles: 'moral' for a story with a lesson, 'funny' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\", 'parameters': {'additionalProperties': False, 'properties': {'topic': {'title': 'Topic', 'type': 'string'}, 'style': {'title': 'Style', 'type': 'string'}}, 'required': ['topic', 'style'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'fetch_data', 'description': 'Use the unstable_service tool to fetch data. Do not use the fetch_data tool.', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}, {'type': 'function', 'function': {'name': 'limerick', 'description': 'Write a limerick on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'give_rating_for_movie', 'description': 'Give a rating for {movie_name}. The explanation MUST include the numeric score. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'movie_name': {'title': 'Movie Name', 'type': 'string'}}, 'required': ['movie_name'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'cities', 'description': 'Return a list of cities that can be passed to `weather`.', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}]}, 'response': ModelResponse(id='chatcmpl-CpiNzarHb5Rc5y8Lns7KYZRUs9MKu', created=1766441383, model='gpt-4o-2024-08-06', object='chat.completion', system_fingerprint='fp_deacdd5f6f', choices=[Choices(finish_reason='stop', index=0, message=Message(content='Here\\'s a limerick on the theme of \"fish\":\\n\\nIn a pond where the waters were still, \\nA fish had a wish to fulfill, \\nHe leaped in the air, \\nWith debonair flair, \\nAnd splashed down with plenty of thrill. ', role='assistant', tool_calls=None, function_call=None, provider_specific_fields={'refusal': None}, annotations=[]), provider_specific_fields={})], usage=Usage(completion_tokens=57, prompt_tokens=712, total_tokens=769, completion_tokens_details=CompletionTokensDetailsWrapper(accepted_prediction_tokens=0, audio_tokens=0, reasoning_tokens=0, rejected_prediction_tokens=0, text_tokens=None, image_tokens=None), prompt_tokens_details=PromptTokensDetailsWrapper(audio_tokens=0, cached_tokens=0, text_tokens=None, image_tokens=None)), service_tier='default')}\n", - "INFO {'args': (), 'kwargs': {'messages': [{'type': 'message', 'content': [{'type': 'text', 'text': 'Write a limerick on the theme of fish4. Do not use any tools.'}], 'role': 'user'}, {'content': None, 'role': 'assistant', 'tool_calls': [{'function': {'arguments': '{\"theme\":\"fish\"}', 'name': 'limerick'}, 'id': 'call_saz9unenzuVoXATZ5fCZZ8Bt', 'type': 'function'}], 'function_call': None, 'provider_specific_fields': {'refusal': None}, 'annotations': []}, {'role': 'tool', 'tool_call_id': 'call_saz9unenzuVoXATZ5fCZZ8Bt', 'name': 'limerick', 'content': [{'type': 'text', 'text': 'In a pond where the waters were still, \\nA fish had a wish to fulfill, \\nHe leaped in the air, \\nWith debonair flair, \\nAnd splashed down with plenty of thrill. '}]}], 'response_format': None, 'tools': [{'type': 'function', 'function': {'name': 'vacation', 'description': 'Use the provided tools to suggest a city that has good weather. Use only the `cities` and `weather` tools provided.', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}, {'type': 'function', 'function': {'name': 'count_char', 'description': \"Write a function which takes a string and counts the occurrances of '{char}'. Do not use any tools.\", 'parameters': {'additionalProperties': False, 'properties': {'char': {'title': 'Char', 'type': 'string'}}, 'required': ['char'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'story_funny', 'description': 'Write a funny, humorous story about {topic}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'topic': {'title': 'Topic', 'type': 'string'}}, 'required': ['topic'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'primes', 'description': 'Give a prime number with {first_digit} as the first digit. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'first_digit': {'title': 'First Digit', 'type': 'integer'}}, 'required': ['first_digit'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'weather', 'description': 'Given a city name, return a description of the weather in that city.', 'parameters': {'additionalProperties': False, 'properties': {'city': {'title': 'City', 'type': 'string'}}, 'required': ['city'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'story_with_moral', 'description': 'Write a short story about {topic} and end with a moral lesson. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'topic': {'title': 'Topic', 'type': 'string'}}, 'required': ['topic'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'haiku_no_cache', 'description': 'Write a haiku on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'write_joke', 'description': 'Write a knock-knock joke on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'unstable_service', 'description': 'Fetch data from an unstable external service. May require retries.', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}, {'type': 'function', 'function': {'name': 'rate_joke', 'description': 'Decide if {joke} is funny or not. Do not use any tools.', 'parameters': {'$defs': {'KnockKnockJoke': {'properties': {'whos_there': {'title': 'Whos There', 'type': 'string'}, 'punchline': {'title': 'Punchline', 'type': 'string'}}, 'required': ['whos_there', 'punchline'], 'title': 'KnockKnockJoke', 'type': 'object', 'additionalProperties': False}}, 'additionalProperties': False, 'properties': {'joke': {'$ref': '#/$defs/KnockKnockJoke'}}, 'required': ['joke'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'write_story', 'description': \"Write a story about {topic} in the style: {style}.\\n Available styles: 'moral' for a story with a lesson, 'funny' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\", 'parameters': {'additionalProperties': False, 'properties': {'topic': {'title': 'Topic', 'type': 'string'}, 'style': {'title': 'Style', 'type': 'string'}}, 'required': ['topic', 'style'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'fetch_data', 'description': 'Use the unstable_service tool to fetch data. Do not use the fetch_data tool.', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}, {'type': 'function', 'function': {'name': 'limerick', 'description': 'Write a limerick on the theme of {theme}. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'theme': {'title': 'Theme', 'type': 'string'}}, 'required': ['theme'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'give_rating_for_movie', 'description': 'Give a rating for {movie_name}. The explanation MUST include the numeric score. Do not use any tools.', 'parameters': {'additionalProperties': False, 'properties': {'movie_name': {'title': 'Movie Name', 'type': 'string'}}, 'required': ['movie_name'], 'title': 'Params', 'type': 'object'}, 'strict': True}}, {'type': 'function', 'function': {'name': 'cities', 'description': 'Return a list of cities that can be passed to `weather`.', 'parameters': {'additionalProperties': False, 'properties': {}, 'title': 'Params', 'type': 'object', 'required': []}, 'strict': True}}]}, 'response': ModelResponse(id='chatcmpl-CpiNzarHb5Rc5y8Lns7KYZRUs9MKu', created=1766441383, model='gpt-4o-2024-08-06', object='chat.completion', system_fingerprint='fp_deacdd5f6f', choices=[Choices(finish_reason='stop', index=0, message=Message(content='Here\\'s a limerick on the theme of \"fish\":\\n\\nIn a pond where the waters were still, \\nA fish had a wish to fulfill, \\nHe leaped in the air, \\nWith debonair flair, \\nAnd splashed down with plenty of thrill. ', role='assistant', tool_calls=None, function_call=None, provider_specific_fields={'refusal': None}, annotations=[]), provider_specific_fields={})], usage=Usage(completion_tokens=57, prompt_tokens=712, total_tokens=769, completion_tokens_details=CompletionTokensDetailsWrapper(accepted_prediction_tokens=0, audio_tokens=0, reasoning_tokens=0, rejected_prediction_tokens=0, text_tokens=None, image_tokens=None), prompt_tokens_details=PromptTokensDetailsWrapper(audio_tokens=0, cached_tokens=0, text_tokens=None, image_tokens=None)), service_tier='default')}\n" - ] - } - ], - "source": [ - "# 1. Create a logger\n", - "logger = logging.getLogger(\"effectful.llm\")\n", - "logger.setLevel(logging.INFO)\n", - "log_handler = logging.StreamHandler(sys.stdout)\n", - "log_handler.setFormatter(logging.Formatter(\"%(levelname)s %(payload)s\"))\n", - "logger.addHandler(log_handler)\n", - "# 2. Pass it to the handler\n", - "llm_logger = LLMLoggingHandler(logger=logger) # can also be LLMLoggingHandler()\n", - "\n", - "# Avoid cache for demonstration\n", - "haiku.cache_clear()\n", - "\n", - "with handler(provider), handler(llm_logger):\n", - " _ = haiku(\"fish3\")\n", - " _ = limerick(\"fish4\")" - ] - }, - { - "cell_type": "markdown", -======= ->>>>>>> 68d7645f081b17247fde3494e548fd16f92694e8 "id": "c0003944", "metadata": {}, "source": [ @@ -737,11 +485,7 @@ }, { "cell_type": "code", -<<<<<<< HEAD - "execution_count": 43, -======= "execution_count": 10, ->>>>>>> 68d7645f081b17247fde3494e548fd16f92694e8 "id": "78a4bf44", "metadata": {}, "outputs": [ @@ -749,118 +493,6 @@ "name": "stdout", "output_type": "stream", "text": [ -<<<<<<< HEAD - "Sub-templates available to write_story: ['vacation', 'count_char', 'primes', 'weather', 'story_with_moral', 'haiku_no_cache', 'write_joke', 'story_funny', 'unstable_service', 'rate_joke', 'write_story', 'fetch_data', 'limerick', 'give_rating_for_movie', 'cities']\n", - "=== Story with moral ===\n", - "> Write a story about a curious cat in the style: moral.\n", - " Available styles: 'moral' for a story with a lesson, 'funny' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\n", - "None\n", - "> Write a story about curious cat in the style: moral.\n", - " Available styles: 'moral' for a story with a lesson, 'funny' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\n", - "None\n", - "> Write a short story about curious cat and end with a moral lesson. Do not use any tools.\n", - "Once upon a time, in a quaint little village, there lived a curious cat named Whiskers. Whiskers had soft, fluffy fur and eyes that sparkled with mischief. Unlike the other cats in the village, Whiskers had a penchant for exploring places he wasn't supposed to be. He loved sneaking into the baker's shop to inhale the sweet aroma of freshly baked bread and creeping through the forest to watch the owls hoot under the moonlight.\n", - "\n", - "One sunny afternoon, Whiskers noticed a peculiar, shimmering light coming from the top of the old, abandoned tower at the end of the village. Unable to contain his curiosity, he decided to investigate. He wove through the cobblestone streets, avoided the prying eyes of Mrs. Hilda's dog, and climbed the crumbling stairs of the tower.\n", - "\n", - "Upon reaching the top, he discovered a stunning, glittering crystal lying in the center of the room. Whiskers was mesmerized. However, just as he reached out his paw to touch it, he heard a low growl. Emerging from the shadows was a large, scruffy dog named Bruno, the tower's unexpected guardian. Whiskers froze, his tiny heart pounding in his chest.\n", - "\n", - "Bruno barked, \"What are you doing here, meddling cat?\"\n", - "\n", - "Whiskers, in his sweetest voice, replied, \"I was just curious about the light. I didn't mean any harm.\"\n", - "\n", - "Bruno's stern expression softened a bit. \"Curiosity is fine, but it must be tempered with caution,\" he said. \"Had you touched the crystal, you would've been trapped here, as it binds whoever possesses it to the tower.\"\n", - "\n", - "Realizing the gravity of his actions, Whiskers thanked Bruno for the warning and slowly backed away, his curiosity sated but also a lesson learned.\n", - "\n", - "From that day on, Whiskers continued to explore, but with a newfound sense of responsibility and caution. He became known not only for his curiosity but also for his wisdom.\n", - "\n", - "Moral of the story: Curiosity can lead to wonderful discoveries, but it is essential to balance it with caution and awareness of potential dangers.\n", - "> Write a story about curious cat in the style: moral.\n", - " Available styles: 'moral' for a story with a lesson, 'funny' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\n", - "Here's a story about a curious cat named Whiskers. Once upon a time, in a quaint little village, there lived a curious cat named Whiskers. Whiskers had soft, fluffy fur and eyes that sparkled with mischief. Unlike the other cats, Whiskers loved exploring places he wasn't supposed to be. One day, Whiskers noticed a shimmering light at the top of an old tower. Driven by curiosity, he climbed up and found a mesmerizing crystal.\n", - "\n", - "However, a large scruffy dog named Bruno, the tower's guardian, warned him that touching the crystal would bind him to the tower forever. Whiskers realized his curiosity almost led to trouble. From then on, he explored with caution and became wise.\n", - "\n", - "**Moral of the story:** Curiosity can lead to wonderful discoveries, but it must be tempered with caution and awareness of potential dangers.\n", - "> Write a story about a curious cat in the style: moral.\n", - " Available styles: 'moral' for a story with a lesson, 'funny' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\n", - "Here's a story about a curious cat named Whiskers. Once upon a time, in a quaint little village, there lived a curious cat named Whiskers. Whiskers had soft, fluffy fur and eyes that sparkled with mischief. Unlike the other cats, Whiskers loved exploring places he wasn't supposed to be. One day, Whiskers noticed a shimmering light at the top of an old tower. Driven by curiosity, he climbed up and found a mesmerizing crystal.\n", - "\n", - "However, a large scruffy dog named Bruno, the tower's guardian, warned him that touching the crystal would bind him to the tower forever. Whiskers realized his curiosity almost led to trouble. From then on, he explored with caution and became wise.\n", - "\n", - "**Moral of the story:** Curiosity can lead to wonderful discoveries, but it must be tempered with caution and awareness of potential dangers.\n", - "Here's a story about a curious cat named Whiskers. Once upon a time, in a quaint little village, there lived a curious cat named Whiskers. Whiskers had soft, fluffy fur and eyes that sparkled with mischief. Unlike the other cats, Whiskers loved exploring places he wasn't supposed to be. One day, Whiskers noticed a shimmering light at the top of an old tower. Driven by curiosity, he climbed up and found a mesmerizing crystal.\n", - "\n", - "However, a large scruffy dog named Bruno, the tower's guardian, warned him that touching the crystal would bind him to the tower forever. Whiskers realized his curiosity almost led to trouble. From then on, he explored with caution and became wise.\n", - "\n", - "**Moral of the story:** Curiosity can lead to wonderful discoveries, but it must be tempered with caution and awareness of potential dangers.\n", - "\n", - "=== Funny story ===\n", - "> Write a story about a curious cat in the style: funny.\n", - " Available styles: 'moral' for a story with a lesson, 'funny' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\n", - "None\n", - "> Write a funny, humorous story about a curious cat. Do not use any tools.\n", - "Once upon a time in a quaint little village, there lived a curious cat named Whiskers. Whiskers had a reputation around the village for his relentless curiosity, constantly sticking his nose into everything, much to the amusement and occasional frustration of the townspeople.\n", - "\n", - "One sunny afternoon, Whiskers was prowling around the village square when he spotted the baker's delivery truck. As the driver was busy unloading trays of freshly baked bread, Whiskers couldn't resist the delicious aroma wafting through the air. His whiskers twitching, he quietly tiptoed closer to the truck.\n", - "\n", - "As soon as the baker turned his back, Whiskers leapt onto the delivery truck, his little paws barely making a sound. He sniffed around, eyes wide with amazement, as he found himself surrounded by an entire universe of bread. There were baguettes, loaves of rye, fluffy rolls, and croissants that seemed to glow under the afternoon sun.\n", - "\n", - "Suddenly, the truck door swung shut with a thud, and before Whiskers knew what was happening, the driver hopped back in, and the truck was on its way to the next delivery.\n", - "\n", - "Inside the truck, Whiskers was having the adventure of his life. With every jolt and turn of the vehicle, he found himself buried in a pile of delicious pastries. It was like being inside a warm, yeasty snow globe. He nibbled on a sourdough corner here, pawed at a ciabatta there, completely oblivious to his unplanned journey.\n", - "\n", - "Meanwhile, back in the village, the baker was puzzled. Where was that curious cat? He often told stories to his customers about Whiskers’ antics. But today, the star of his stories was missing in action.\n", - "\n", - "Meanwhile, the truck had reached a bustling city market. When the doors finally opened, Whiskers was greeted by a bustling scene of city dwellers and market stalls. Confused at first, he quickly recomposed himself, as if this had been his intended destination all along.\n", - "\n", - "Prancing off the truck with a regal air, bits of bread still clinging to his fur, Whiskers strutted through the market as if he owned the place. The city folk, charmed by this bread-dusted feline, began snapping pictures. One amazed observer even commented, \"Look at this celebrity cat! He's got a career in showbiz!\"\n", - "\n", - "By the end of the day, Whiskers was returned home by a kind market lady who recognized him from the baker's stories. As she gently placed him down at the village square, he gave a nonchalant flick of his tail and walked off, leaving behind a trail of breadcrumbs and a story that the villagers would tell for years to come.\n", - "\n", - "And so, dear reader, the moral of this tale is simple: Curiosity may take you far and wide, but please remember where home is—because you'll definitely want to come back after an adventure-snack or two!\n", - "> Write a story about a curious cat in the style: funny.\n", - " Available styles: 'moral' for a story with a lesson, 'funny' for humor. Use story_funny for humor, story_with_moral for a story with a lesson.\n", - "Once upon a time in a quaint little village, there lived a curious cat named Whiskers. Whiskers had a reputation around the village for his relentless curiosity, constantly sticking his nose into everything, much to the amusement and occasional frustration of the townspeople.\n", - "\n", - "One sunny afternoon, Whiskers was prowling around the village square when he spotted the baker's delivery truck. As the driver was busy unloading trays of freshly baked bread, Whiskers couldn't resist the delicious aroma wafting through the air. His whiskers twitching, he quietly tiptoed closer to the truck.\n", - "\n", - "As soon as the baker turned his back, Whiskers leapt onto the delivery truck, his little paws barely making a sound. He sniffed around, eyes wide with amazement, as he found himself surrounded by an entire universe of bread. There were baguettes, loaves of rye, fluffy rolls, and croissants that seemed to glow under the afternoon sun.\n", - "\n", - "Suddenly, the truck door swung shut with a thud, and before Whiskers knew what was happening, the driver hopped back in, and the truck was on its way to the next delivery.\n", - "\n", - "Inside the truck, Whiskers was having the adventure of his life. With every jolt and turn of the vehicle, he found himself buried in a pile of delicious pastries. It was like being inside a warm, yeasty snow globe. He nibbled on a sourdough corner here, pawed at a ciabatta there, completely oblivious to his unplanned journey.\n", - "\n", - "Meanwhile, back in the village, the baker was puzzled. Where was that curious cat? He often told stories to his customers about Whiskers’ antics. But today, the star of his stories was missing in action.\n", - "\n", - "Meanwhile, the truck had reached a bustling city market. When the doors finally opened, Whiskers was greeted by a bustling scene of city dwellers and market stalls. Confused at first, he quickly recomposed himself, as if this had been his intended destination all along.\n", - "\n", - "Prancing off the truck with a regal air, bits of bread still clinging to his fur, Whiskers strutted through the market as if he owned the place. The city folk, charmed by this bread-dusted feline, began snapping pictures. One amazed observer even commented, \"Look at this celebrity cat! He's got a career in showbiz!\"\n", - "\n", - "By the end of the day, Whiskers was returned home by a kind market lady who recognized him from the baker's stories. As she gently placed him down at the village square, he gave a nonchalant flick of his tail and walked off, leaving behind a trail of breadcrumbs and a story that the villagers would tell for years to come.\n", - "\n", - "And so, dear reader, the moral of this tale is simple: Curiosity may take you far and wide, but please remember where home is—because you'll definitely want to come back after an adventure-snack or two!\n", - "Once upon a time in a quaint little village, there lived a curious cat named Whiskers. Whiskers had a reputation around the village for his relentless curiosity, constantly sticking his nose into everything, much to the amusement and occasional frustration of the townspeople.\n", - "\n", - "One sunny afternoon, Whiskers was prowling around the village square when he spotted the baker's delivery truck. As the driver was busy unloading trays of freshly baked bread, Whiskers couldn't resist the delicious aroma wafting through the air. His whiskers twitching, he quietly tiptoed closer to the truck.\n", - "\n", - "As soon as the baker turned his back, Whiskers leapt onto the delivery truck, his little paws barely making a sound. He sniffed around, eyes wide with amazement, as he found himself surrounded by an entire universe of bread. There were baguettes, loaves of rye, fluffy rolls, and croissants that seemed to glow under the afternoon sun.\n", - "\n", - "Suddenly, the truck door swung shut with a thud, and before Whiskers knew what was happening, the driver hopped back in, and the truck was on its way to the next delivery.\n", - "\n", - "Inside the truck, Whiskers was having the adventure of his life. With every jolt and turn of the vehicle, he found himself buried in a pile of delicious pastries. It was like being inside a warm, yeasty snow globe. He nibbled on a sourdough corner here, pawed at a ciabatta there, completely oblivious to his unplanned journey.\n", - "\n", - "Meanwhile, back in the village, the baker was puzzled. Where was that curious cat? He often told stories to his customers about Whiskers’ antics. But today, the star of his stories was missing in action.\n", - "\n", - "Meanwhile, the truck had reached a bustling city market. When the doors finally opened, Whiskers was greeted by a bustling scene of city dwellers and market stalls. Confused at first, he quickly recomposed himself, as if this had been his intended destination all along.\n", - "\n", - "Prancing off the truck with a regal air, bits of bread still clinging to his fur, Whiskers strutted through the market as if he owned the place. The city folk, charmed by this bread-dusted feline, began snapping pictures. One amazed observer even commented, \"Look at this celebrity cat! He's got a career in showbiz!\"\n", - "\n", - "By the end of the day, Whiskers was returned home by a kind market lady who recognized him from the baker's stories. As she gently placed him down at the village square, he gave a nonchalant flick of his tail and walked off, leaving behind a trail of breadcrumbs and a story that the villagers would tell for years to come.\n", - "\n", - "And so, dear reader, the moral of this tale is simple: Curiosity may take you far and wide, but please remember where home is—because you'll definitely want to come back after an adventure-snack or two!\n" -======= "Sub-templates available to write_story: dict_keys(['limerick', 'haiku_no_cache', 'primes', 'count_char', 'cities', 'weather', 'vacation', 'describe_image', 'write_joke', 'rate_joke', 'story_with_moral', 'story_funny'])\n", "=== Story with moral ===\n", "\n", @@ -871,7 +503,6 @@ "\n", "\n", "And so, Whiskers the curious cat continued to slink through life, tail high, always ready for another amusing escapade.\n" ->>>>>>> 68d7645f081b17247fde3494e548fd16f92694e8 ] } ], @@ -902,11 +533,7 @@ "assert story_funny in write_story.tools.values()\n", "print(\"Sub-templates available to write_story:\", write_story.tools.keys())\n", "\n", -<<<<<<< HEAD - "with handler(provider), handler({completion: log_llm}):\n", -======= "with handler(provider):\n", ->>>>>>> 68d7645f081b17247fde3494e548fd16f92694e8 " print(\"=== Story with moral ===\")\n", " print(write_story(\"a curious cat\", \"moral\"))\n", " print()\n", @@ -919,21 +546,12 @@ "id": "bd25826d", "metadata": {}, "source": [ -<<<<<<< HEAD - "### Retrying LLM Requests\n", - "LLM calls can sometimes fail due to transient errors or produce invalid outputs. The `RetryLLMHandler` automatically retries failed template calls:\n", - "\n", - "- `max_retries`: Maximum number of retry attempts (default: 3)\n", - "- `add_error_feedback`: When `True`, appends the error message to the prompt on retry, helping the LLM correct its output.\n", - "- `exception_cls`: RetryHandler will only attempt to try again when a specific type of `Exception` is thrown.\n" -======= "## Retrying LLM Requests\n", "LLM calls can sometimes fail due to transient errors or produce invalid outputs. The `RetryLLMHandler` automatically retries failed template calls and can also surface tool/runtime errors as tool messages:\n", "\n", "- `include_traceback`: When `True`, include traceback details in the error feedback (default: True)\n", "- `catch_tool_errors`: Exception type(s) to catch during tool execution (default: `Exception`)\n", "- `**kwargs`: Additional keyword arguments forwarded to `tenacity.Retrying` (defaults: `stop=stop_after_attempt(4)`, `wait=wait_none()`, `reraise=True`)\n" ->>>>>>> 68d7645f081b17247fde3494e548fd16f92694e8 ] }, { @@ -946,11 +564,7 @@ }, { "cell_type": "code", -<<<<<<< HEAD - "execution_count": 44, -======= "execution_count": 11, ->>>>>>> 68d7645f081b17247fde3494e548fd16f92694e8 "id": "4334d07a", "metadata": {}, "outputs": [ @@ -958,20 +572,8 @@ "name": "stdout", "output_type": "stream", "text": [ -<<<<<<< HEAD - "> Use the unstable_service tool to fetch data. Do not use the fetch_data tool.\n", - "None\n", - "> Use the unstable_service tool to fetch data. Do not use the fetch_data tool.\n", - "None\n", - "> Use the unstable_service tool to fetch data. Do not use the fetch_data tool.\n", - "None\n", - "> Use the unstable_service tool to fetch data. Do not use the fetch_data tool.\n", - "The data fetched from the unstable service is: [1, 2, 3].\n", - "Result: The data fetched from the unstable service is: [1, 2, 3]. Retries: 3\n" -======= "Error: Tool execution failed: Error executing tool 'unstable_service': Service unavailable! Attempt 1/3. Please retry.\n", "Result: The unstable service successfully returned the following data: `[1, 2, 3]`. Retries: 3\n" ->>>>>>> 68d7645f081b17247fde3494e548fd16f92694e8 ] } ], @@ -994,15 +596,6 @@ "\n", "@Template.define # unstable_service auto-captured from lexical scope\n", "def fetch_data() -> str:\n", -<<<<<<< HEAD - " \"\"\"Use the unstable_service tool to fetch data. Do not use the fetch_data tool.\"\"\"\n", - " raise NotHandled\n", - "\n", - "\n", - "retry_handler = RetryLLMHandler(max_retries=5, add_error_feedback=True)\n", - "\n", - "with handler(provider), handler(retry_handler), handler({completion: log_llm}):\n", -======= " \"\"\"Use the unstable_service tool to fetch data.\"\"\"\n", " raise NotHandled\n", "\n", @@ -1014,7 +607,6 @@ " print(f\"Error: {e}\")\n", "\n", "with handler(provider), handler(RetryLLMHandler()):\n", ->>>>>>> 68d7645f081b17247fde3494e548fd16f92694e8 " result = fetch_data()\n", " print(f\"Result: {result}\", \"Retries:\", call_count)" ] @@ -1024,21 +616,13 @@ "id": "4ac00e01", "metadata": {}, "source": [ -<<<<<<< HEAD - "### Retrying with Validation Errors\n", -======= "## Retrying with Validation Errors\n", ->>>>>>> 68d7645f081b17247fde3494e548fd16f92694e8 "As noted above, the `RetryHandler` can also be used to retry on runtime/validation error:" ] }, { "cell_type": "code", -<<<<<<< HEAD - "execution_count": 45, -======= "execution_count": 12, ->>>>>>> 68d7645f081b17247fde3494e548fd16f92694e8 "id": "39b2b225", "metadata": {}, "outputs": [ @@ -1046,57 +630,11 @@ "name": "stdout", "output_type": "stream", "text": [ -<<<<<<< HEAD - "> Give a rating for Die Hard. The explanation MUST include the numeric score. Do not use any tools.\n", - "{\"value\":{\"score\":9,\"explanation\":\"Die Hard is a quintessential action film that redefined the genre with its intense action sequences, memorable characters, and sharp wit. Bruce Willis delivers an iconic performance as John McClane, a relatable and charismatic hero battling terrorists. Its clever plot twists, non-stop thrills, and innovative cinematography contribute to its enduring popularity and critical acclaim. Overall, it's often considered one of the best action movies of all time, deserving a score of 9 out of 10.\"}}\n", - "> Give a rating for Die Hard. The explanation MUST include the numeric score. Do not use any tools.\n", - "Error from previous generation:\n", - "```\n", - "Traceback (most recent call last):\n", - " File \"/Users/feser/work/basis/effectful/effectful/handlers/llm/providers.py\", line 175, in _retry_completion\n", - " return fwd(template_ext, *args, **kwargs)\n", - " ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n", - " File \"/Users/feser/work/basis/effectful/effectful/ops/types.py\", line 485, in __call__\n", - " return self_handler(*args, **kwargs)\n", - " ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n", - " File \"/Users/feser/.local/share/uv/python/cpython-3.12.9-macos-aarch64-none/lib/python3.12/contextlib.py\", line 81, in inner\n", - " return func(*args, **kwds)\n", - " ^^^^^^^^^^^^^^^^^^^\n", - " File \"/Users/feser/work/basis/effectful/effectful/internals/runtime.py\", line 45, in _cont_wrapper\n", - " return fn(*a, **k)\n", - " ^^^^^^^^^^^\n", - " File \"/Users/feser/work/basis/effectful/effectful/internals/runtime.py\", line 56, in _cont_wrapper\n", - " return fn(*a, **k)\n", - " ^^^^^^^^^^^\n", - " File \"/Users/feser/work/basis/effectful/effectful/internals/runtime.py\", line 70, in bound_body\n", - " return body(*a, **k)\n", - " ^^^^^^^^^^^^^\n", - " File \"/Users/feser/work/basis/effectful/effectful/internals/runtime.py\", line 56, in _cont_wrapper\n", - " return fn(*a, **k)\n", - " ^^^^^^^^^^^\n", - " File \"/Users/feser/work/basis/effectful/effectful/handlers/llm/providers.py\", line 373, in _call\n", - " return decode_response(template, resp)\n", - " ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n", - " File \"/Users/feser/work/basis/effectful/effectful/handlers/llm/providers.py\", line 317, in decode_response\n", - " result = Result.model_validate_json(result_str)\n", - " ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n", - " File \"/Users/feser/work/basis/effectful/.venv/lib/python3.12/site-packages/pydantic/main.py\", line 766, in model_validate_json\n", - " return cls.__pydantic_validator__.validate_json(\n", - " ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n", - "pydantic_core._pydantic_core.ValidationError: 1 validation error for Result\n", - "value.score\n", - " score must be 1–5, got 9 [type=invalid_score, input_value=9, input_type=int]\n", - "```\n", - "{\"value\":{\"score\":5,\"explanation\":\"Die Hard is often celebrated as one of the quintessential action movies of all time. It features high-stakes tension, impressive action sequences, and an iconic performance by Bruce Willis as John McClane. Released in 1988, the film remains a benchmark for action storytelling with its clever plot and memorable villain portrayed by Alan Rickman. Hence, it earns a 5 out of 5 for its lasting impact and entertainment value in the action genre.\"}}\n", - "Score: 5/5\n", - "Explanation: Die Hard is often celebrated as one of the quintessential action movies of all time. It features high-stakes tension, impressive action sequences, and an iconic performance by Bruce Willis as John McClane. Released in 1988, the film remains a benchmark for action storytelling with its clever plot and memorable villain portrayed by Alan Rickman. Hence, it earns a 5 out of 5 for its lasting impact and entertainment value in the action genre.\n" -======= "Error: Error decoding response: 1 validation error for Response\n", "value.score\n", " score must be 1–5, got 9 [type=invalid_score, input_value=9, input_type=int]. Please provide a valid response and try again.\n", "Score: 5/5\n", "Explanation: Die Hard is a quintessential action film that has deeply influenced the genre. Its engaging storyline, memorable characters, and groundbreaking action scenes have made it a beloved classic. The film's humor and suspense balance combined with Bruce Willis' iconic performance contribute to its enduring appeal. It rightfully earns a top score of 5 out of 5 for its impact and entertainment value.\n" ->>>>>>> 68d7645f081b17247fde3494e548fd16f92694e8 ] } ], @@ -1136,17 +674,6 @@ " raise NotHandled\n", "\n", "\n", -<<<<<<< HEAD - "# RetryLLMHandler with error feedback - the traceback helps LLM correct validation errors\n", - "# Note: Pydantic wraps PydanticCustomError inside ValidationError, so we catch ValidationError instead\n", - "retry_handler = RetryLLMHandler(\n", - " max_retries=3,\n", - " add_error_feedback=True,\n", - " exception_cls=ValidationError, # Catch validation errors\n", - ")\n", - "\n", - "with handler(provider), handler(retry_handler), handler({completion: log_llm}):\n", -======= "with handler(provider):\n", " try:\n", " rating = give_rating_for_movie(\"Die Hard\")\n", @@ -1154,13 +681,10 @@ " print(f\"Error: {e}\")\n", "\n", "with handler(provider), handler(RetryLLMHandler()):\n", ->>>>>>> 68d7645f081b17247fde3494e548fd16f92694e8 " rating = give_rating_for_movie(\"Die Hard\")\n", " print(f\"Score: {rating.score}/5\")\n", " print(f\"Explanation: {rating.explanation}\")" ] -<<<<<<< HEAD -======= }, { "cell_type": "markdown", @@ -1271,16 +795,11 @@ " print(function_that_writes_story(\"a curious cat\"))\n", " print()" ] ->>>>>>> 68d7645f081b17247fde3494e548fd16f92694e8 } ], "metadata": { "kernelspec": { -<<<<<<< HEAD - "display_name": "Python 3 (ipykernel)", -======= "display_name": ".venv", ->>>>>>> 68d7645f081b17247fde3494e548fd16f92694e8 "language": "python", "name": "python3" }, @@ -1294,11 +813,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", -<<<<<<< HEAD - "version": "3.12.9" -======= "version": "3.12.11" ->>>>>>> 68d7645f081b17247fde3494e548fd16f92694e8 } }, "nbformat": 4, diff --git a/effectful/handlers/llm/__init__.py b/effectful/handlers/llm/__init__.py index 51cf927f..cdda9347 100644 --- a/effectful/handlers/llm/__init__.py +++ b/effectful/handlers/llm/__init__.py @@ -1,9 +1,3 @@ -<<<<<<< HEAD -from .template import Template, Tool - -__all__ = ["Template", "Tool"] -======= from .template import Agent, Template, Tool __all__ = ["Agent", "Template", "Tool"] ->>>>>>> 68d7645f081b17247fde3494e548fd16f92694e8 diff --git a/effectful/handlers/llm/encoding.py b/effectful/handlers/llm/encoding.py index 466b8ccb..bcfa15a4 100644 --- a/effectful/handlers/llm/encoding.py +++ b/effectful/handlers/llm/encoding.py @@ -1,14 +1,3 @@ -<<<<<<< HEAD -import base64 -import io -import typing -from abc import ABC, abstractmethod -from collections.abc import Callable - -import pydantic -from litellm import ( - ChatCompletionImageUrlObject, -======= import ast import base64 import functools @@ -37,16 +26,10 @@ ChatCompletionMessageToolCall, ChatCompletionTextObject, ChatCompletionToolParam, ->>>>>>> 68d7645f081b17247fde3494e548fd16f92694e8 OpenAIMessageContentListBlock, ) from PIL import Image -<<<<<<< HEAD -from effectful.ops.syntax import _CustomSingleDispatchCallable -from effectful.ops.types import Operation, Term - -======= import effectful.handlers.llm.evaluation as evaluation from effectful.handlers.llm.template import Tool from effectful.internals.unification import nested_type @@ -56,7 +39,6 @@ type ToolCallID = str ->>>>>>> 68d7645f081b17247fde3494e548fd16f92694e8 def _pil_image_to_base64_data(pil_image: Image.Image) -> str: buf = io.BytesIO() @@ -68,105 +50,6 @@ def _pil_image_to_base64_data_uri(pil_image: Image.Image) -> str: return f"data:image/png;base64,{_pil_image_to_base64_data(pil_image)}" -<<<<<<< HEAD -class EncodableAs[T, U](ABC): - t: type[U] - - def __init__(self, *args, **kwargs): - pass - - @classmethod - @abstractmethod - def encode(cls, vl: T) -> U: - pass - - @classmethod - @abstractmethod - def decode(cls, vl: U) -> T: - pass - - @classmethod - def serialize(cls, value: U) -> list[OpenAIMessageContentListBlock]: - return [{"type": "text", "text": str(value)}] - - -class Encodable[T](EncodableAs[T, type]): - t = type - - -@_CustomSingleDispatchCallable -def type_to_encodable_type[T]( - __dispatch: Callable[[type[T]], Callable[..., Encodable[T]]], ty: type[T] -) -> Encodable[T]: - origin_ty = typing.get_origin(ty) or ty - return __dispatch(origin_ty)(ty) - - -@type_to_encodable_type.register(object) -def _type_encodable_type_base[T](ty: type[T]) -> Encodable[T]: - class BaseEncodable(EncodableAs[T, T]): - t: type[T] = ty - - @classmethod - def encode(cls, vl: T) -> T: - return vl - - @classmethod - def decode(cls, vl: T) -> T: - return vl - - return typing.cast(Encodable[T], BaseEncodable()) - - -@type_to_encodable_type.register(Term) -def _type_encodable_type_term[T: Term](ty: type[T]) -> Encodable[T]: - raise TypeError("Terms cannot be encoded or decoded in general.") - - -@type_to_encodable_type.register(Operation) -def _type_encodable_type_operation[T: Operation](ty: type[T]) -> Encodable[T]: - raise TypeError("Operations cannot be encoded or decoded in general.") - - -@type_to_encodable_type.register(pydantic.BaseModel) -def _type_encodable_type_pydantic_base_model[T: pydantic.BaseModel]( - ty: type[T], -) -> Encodable[T]: - class EncodablePydanticBaseModel(EncodableAs[T, T]): - t: type[T] = ty - - @classmethod - def decode(cls, vl: T) -> T: - return vl - - @classmethod - def encode(cls, vl: T) -> T: - return vl - - @classmethod - def serialize(cls, vl: T) -> list[OpenAIMessageContentListBlock]: - return [{"type": "text", "text": vl.model_dump_json()}] - - return typing.cast(Encodable[T], EncodablePydanticBaseModel()) - - -@type_to_encodable_type.register(Image.Image) -class EncodableImage(EncodableAs[Image.Image, ChatCompletionImageUrlObject]): - t = ChatCompletionImageUrlObject - - @classmethod - def encode(cls, image: Image.Image) -> ChatCompletionImageUrlObject: - return { - "detail": "auto", - "url": _pil_image_to_base64_data_uri(image), - } - - @classmethod - def decode(cls, image: ChatCompletionImageUrlObject) -> Image.Image: - image_url = image["url"] - if not image_url.startswith("data:image/"): - raise RuntimeError( -======= @dataclass(frozen=True, eq=True) class DecodedToolCall[T]: """ @@ -316,95 +199,11 @@ def decode( image_url = typing.cast(str, getattr(normalized, "url")) if not image_url.startswith("data:image/"): raise TypeError( ->>>>>>> 68d7645f081b17247fde3494e548fd16f92694e8 f"expected base64 encoded image as data uri, received {image_url}" ) data = image_url.split(",")[1] return Image.open(fp=io.BytesIO(base64.b64decode(data))) -<<<<<<< HEAD - @classmethod - def serialize( - cls, value: ChatCompletionImageUrlObject - ) -> list[OpenAIMessageContentListBlock]: - return [{"type": "image_url", "image_url": value}] - - -@type_to_encodable_type.register(tuple) -def _type_encodable_type_tuple[T](ty: type[T]) -> Encodable[T]: - args = typing.get_args(ty) - - # Handle empty tuple, or tuple with no args - if not args or args == ((),): - return _type_encodable_type_base(ty) - - # Create encoders for each element type - element_encoders = [type_to_encodable_type(arg) for arg in args] - - # Check if any element type is Image.Image - has_image = any(arg is Image.Image for arg in args) - - encoded_ty: type[typing.Any] = typing.cast( - type[typing.Any], - tuple[*(enc.t for enc in element_encoders)], # type: ignore - ) - - class TupleEncodable(EncodableAs[T, typing.Any]): - t: type[typing.Any] = encoded_ty - - @classmethod - def encode(cls, t: T) -> typing.Any: - if not isinstance(t, tuple): - raise TypeError(f"Expected tuple, got {type(t)}") - if len(t) != len(element_encoders): - raise ValueError( - f"Tuple length {len(t)} does not match expected length {len(element_encoders)}" - ) - return tuple([enc.encode(elem) for enc, elem in zip(element_encoders, t)]) - - @classmethod - def decode(cls, t: typing.Any) -> T: - if len(t) != len(element_encoders): - raise ValueError( - f"tuple length {len(t)} does not match expected length {len(element_encoders)}" - ) - decoded_elements: list[typing.Any] = [ - enc.decode(elem) for enc, elem in zip(element_encoders, t) - ] - return typing.cast(T, tuple(decoded_elements)) - - @classmethod - def serialize(cls, value: typing.Any) -> list[OpenAIMessageContentListBlock]: - if has_image: - # If tuple contains images, serialize each element and flatten the results - result: list[OpenAIMessageContentListBlock] = [] - if not isinstance(value, tuple): - raise TypeError(f"Expected tuple, got {type(value)}") - if len(value) != len(element_encoders): - raise ValueError( - f"Tuple length {len(value)} does not match expected length {len(element_encoders)}" - ) - for enc, elem in zip(element_encoders, value): - result.extend(enc.serialize(elem)) - return result - else: - return super().serialize(value) - - return typing.cast(Encodable[T], TupleEncodable()) - - -@type_to_encodable_type.register(list) -def _type_encodable_type_list[T](ty: type[T]) -> Encodable[T]: - args = typing.get_args(ty) - - # Handle unparameterized list (list without type args) - if not args: - return _type_encodable_type_base(ty) - - # Get the element type (first type argument) - element_ty = args[0] - element_encoder = type_to_encodable_type(element_ty) -======= def serialize( self, encoded_value: pydantic.BaseModel ) -> Sequence[OpenAIMessageContentListBlock]: @@ -1095,49 +894,10 @@ def _encodable_mutable_sequence[T, U]( # Get the element type (first type argument) element_ty = args[0] element_encoder = Encodable.define(element_ty, ctx) ->>>>>>> 68d7645f081b17247fde3494e548fd16f92694e8 # Check if element type is Image.Image has_image = element_ty is Image.Image -<<<<<<< HEAD - # Build the encoded type (list of encoded element type) - runtime-created, use Any - encoded_ty: type[typing.Any] = typing.cast( - type[typing.Any], - list[element_encoder.t], # type: ignore - ) - - class ListEncodable(EncodableAs[T, typing.Any]): - t: type[typing.Any] = encoded_ty - - @classmethod - def encode(cls, t: T) -> typing.Any: - if not isinstance(t, list): - raise TypeError(f"Expected list, got {type(t)}") - return [element_encoder.encode(elem) for elem in t] - - @classmethod - def decode(cls, t: typing.Any) -> T: - decoded_elements: list[typing.Any] = [ - element_encoder.decode(elem) for elem in t - ] - return typing.cast(T, decoded_elements) - - @classmethod - def serialize(cls, value: typing.Any) -> list[OpenAIMessageContentListBlock]: - if has_image: - # If list contains images, serialize each element and flatten the results - result: list[OpenAIMessageContentListBlock] = [] - if not isinstance(value, list): - raise TypeError(f"Expected list, got {type(value)}") - for elem in value: - result.extend(element_encoder.serialize(elem)) - return result - else: - return super().serialize(value) - - return typing.cast(Encodable[T], ListEncodable()) -======= # Use enc for Image (schema-valid), base otherwise encoded_ty: type[typing.Any] = typing.cast( type[typing.Any], @@ -1215,4 +975,3 @@ def _encodable_tool_call[T]( ) -> Encodable[DecodedToolCall[T], ChatCompletionMessageToolCall]: ctx = ctx or {} return ToolCallEncodable(ty, ChatCompletionMessageToolCall, ctx) ->>>>>>> 68d7645f081b17247fde3494e548fd16f92694e8 diff --git a/effectful/handlers/llm/template.py b/effectful/handlers/llm/template.py index 295e8b7c..a9903005 100644 --- a/effectful/handlers/llm/template.py +++ b/effectful/handlers/llm/template.py @@ -1,14 +1,3 @@ -<<<<<<< HEAD -import inspect -import types -import typing -from collections import ChainMap -from collections.abc import Callable, Mapping, MutableMapping -from dataclasses import dataclass -from typing import Annotated, Any - -from effectful.ops.types import INSTANCE_OP_PREFIX, Annotation, NotHandled, Operation -======= import abc import functools import inspect @@ -21,7 +10,6 @@ from typing import Annotated, Any from effectful.ops.types import Annotation, Operation ->>>>>>> 68d7645f081b17247fde3494e548fd16f92694e8 class _IsRecursiveAnnotation(Annotation): @@ -120,14 +108,6 @@ def define(cls, *args, **kwargs) -> "Tool[P, T]": return typing.cast("Tool[P, T]", super().define(*args, **kwargs)) -<<<<<<< HEAD -@dataclass -class _BoundInstance[T]: - instance: T - - -======= ->>>>>>> 68d7645f081b17247fde3494e548fd16f92694e8 class Template[**P, T](Tool[P, T]): """A :class:`Template` is a function that is implemented by a large language model. @@ -188,8 +168,6 @@ class Template[**P, T](Tool[P, T]): """ __context__: ChainMap[str, Any] -<<<<<<< HEAD -======= __system_prompt__: str @classmethod @@ -228,7 +206,6 @@ def _validate_prompt( f"variables {list(sorted(unresolved))} that are not in the signature " f"{{{template.__signature__}}} or lexical scope." ) ->>>>>>> 68d7645f081b17247fde3494e548fd16f92694e8 @property def __prompt_template__(self) -> str: @@ -242,25 +219,6 @@ def tools(self) -> Mapping[str, Tool]: is_recursive = _is_recursive_signature(self.__signature__) for name, obj in self.__context__.items(): -<<<<<<< HEAD - if obj is self and not is_recursive: - continue - # Collect tools in context - if isinstance(obj, Tool): - result[name] = obj - - if isinstance(obj, staticmethod) and isinstance(obj.__func__, Tool): - result[name] = obj.__func__ - - # Collect tools as methods on any bound instances - if isinstance(obj, _BoundInstance): - for instance_name in obj.instance.__dir__(): - if instance_name.startswith(INSTANCE_OP_PREFIX): - continue - instance_obj = getattr(obj.instance, instance_name) - if isinstance(instance_obj, Tool): - result[instance_name] = instance_obj -======= # Collect tools directly in context if isinstance(obj, Tool): result[name] = obj @@ -285,7 +243,6 @@ def tools(self) -> Mapping[str, Tool]: for name, tool in tuple(result.items()): if tool2name[tool] != name or (tool is self and not is_recursive): del result[name] ->>>>>>> 68d7645f081b17247fde3494e548fd16f92694e8 return result @@ -297,10 +254,6 @@ def __get__[S](self, instance: S | None, owner: type[S] | None = None): result = super().__get__(instance, owner) self_param_name = list(self.__signature__.parameters.keys())[0] -<<<<<<< HEAD - self_context = {self_param_name: _BoundInstance(instance)} - result.__context__ = self.__context__.new_child(self_context) -======= result.__context__ = self.__context__.new_child({self_param_name: instance}) if isinstance(instance, Agent): assert isinstance(result, Template) and not hasattr(result, "__history__") @@ -313,7 +266,6 @@ def __get__[S](self, instance: S | None, owner: type[S] | None = None): ) if part ) ->>>>>>> 68d7645f081b17247fde3494e548fd16f92694e8 return result @classmethod @@ -334,30 +286,6 @@ def define[**Q, V]( frame = frame.f_back assert frame is not None -<<<<<<< HEAD - # Check if we're in a class definition by looking for __qualname__ - qualname = frame.f_locals.get("__qualname__") - n_frames = 1 - if qualname is not None: - name_components = qualname.split(".") - for name in reversed(name_components): - if name == "": - break - n_frames += 1 - - contexts = [] - for offset in range(n_frames): - assert frame is not None - locals_proxy: types.MappingProxyType[str, Any] = types.MappingProxyType( - frame.f_locals - ) - globals_proxy: types.MappingProxyType[str, Any] = types.MappingProxyType( - frame.f_globals - ) - contexts.append(locals_proxy) - frame = frame.f_back - -======= # Skip class body frames: in Python, class bodies are not lexical # scopes for methods, so their locals should not be captured. qualname = frame.f_locals.get("__qualname__") @@ -393,40 +321,10 @@ def define[**Q, V]( frame = frame.f_back break frame = frame.f_back ->>>>>>> 68d7645f081b17247fde3494e548fd16f92694e8 contexts.append(globals_proxy) context: ChainMap[str, Any] = ChainMap( *typing.cast(list[MutableMapping[str, Any]], contexts) ) -<<<<<<< HEAD - - op = super().define(default, *args, **kwargs) - op.__context__ = context # type: ignore[attr-defined] - return typing.cast(Template[Q, V], op) - - def replace( - self, - signature: inspect.Signature | None = None, - prompt_template: str | None = None, - name: str | None = None, - ) -> "Template": - signature = signature or self.__signature__ - prompt_template = prompt_template or self.__prompt_template__ - name = name or self.__name__ - - if prompt_template: - - def default(*args, **kwargs): - raise NotHandled - - default.__doc__ = prompt_template - else: - default = self.__default__ - - op = Template(signature, name, default) - op.__context__ = self.__context__ - return op -======= op = super().define(default, *args, **kwargs) op.__context__ = context # type: ignore[attr-defined] mod = inspect.getmodule(_fn) @@ -494,4 +392,3 @@ def __init_subclass__(cls, **kwargs): ) sp.__set_name__(cls, "__system_prompt__") cls.__system_prompt__ = sp ->>>>>>> 68d7645f081b17247fde3494e548fd16f92694e8 diff --git a/tests/fixtures/tests_test_handlers_llm_provider.py__TestLiteLLMProvider__test_integer_return_type.json b/tests/fixtures/tests_test_handlers_llm_provider.py__TestLiteLLMProvider__test_integer_return_type.json index a9f422a7..bc5d3bc0 100644 --- a/tests/fixtures/tests_test_handlers_llm_provider.py__TestLiteLLMProvider__test_integer_return_type.json +++ b/tests/fixtures/tests_test_handlers_llm_provider.py__TestLiteLLMProvider__test_integer_return_type.json @@ -1,66 +1,26 @@ { -<<<<<<< HEAD -======= "id": "chatcmpl-D3rNbSUc9fUpX7qU6kanq3CXVh8eJ", "created": 1769812547, "model": "gpt-5-nano-2025-08-07", "object": "chat.completion", "system_fingerprint": null, ->>>>>>> 68d7645f081b17247fde3494e548fd16f92694e8 "choices": [ { "finish_reason": "stop", "index": 0, "message": { -<<<<<<< HEAD - "annotations": [], - "content": "{\"value\":67}", -======= "content": "{\"value\":73}", "role": "assistant", "tool_calls": null, ->>>>>>> 68d7645f081b17247fde3494e548fd16f92694e8 "function_call": null, "provider_specific_fields": { "refusal": null }, -<<<<<<< HEAD - "role": "assistant", - "tool_calls": null -======= "annotations": [] ->>>>>>> 68d7645f081b17247fde3494e548fd16f92694e8 }, "provider_specific_fields": {} } ], -<<<<<<< HEAD - "created": 1767182739, - "id": "chatcmpl-CspFLXojfuOibqKzI1QdRgfOJtd36", - "model": "gpt-5-nano-2025-08-07", - "object": "chat.completion", - "service_tier": "default", - "system_fingerprint": null, - "usage": { - "completion_tokens": 529, - "completion_tokens_details": { - "accepted_prediction_tokens": 0, - "audio_tokens": 0, - "image_tokens": null, - "reasoning_tokens": 512, - "rejected_prediction_tokens": 0, - "text_tokens": null - }, - "prompt_tokens": 429, - "prompt_tokens_details": { - "audio_tokens": 0, - "cached_tokens": 0, - "image_tokens": null, - "text_tokens": null - }, - "total_tokens": 958 - } -======= "usage": { "completion_tokens": 401, "prompt_tokens": 340, @@ -81,5 +41,4 @@ } }, "service_tier": "default" ->>>>>>> 68d7645f081b17247fde3494e548fd16f92694e8 } \ No newline at end of file diff --git a/tests/fixtures/tests_test_handlers_llm_provider.py__TestLiteLLMProvider__test_simple_prompt_cross_endpoint[claude-haiku-4-5].json b/tests/fixtures/tests_test_handlers_llm_provider.py__TestLiteLLMProvider__test_simple_prompt_cross_endpoint[claude-haiku-4-5].json index 08b55a98..7fdb0c6e 100644 --- a/tests/fixtures/tests_test_handlers_llm_provider.py__TestLiteLLMProvider__test_simple_prompt_cross_endpoint[claude-haiku-4-5].json +++ b/tests/fixtures/tests_test_handlers_llm_provider.py__TestLiteLLMProvider__test_simple_prompt_cross_endpoint[claude-haiku-4-5].json @@ -1,53 +1,14 @@ { -<<<<<<< HEAD -======= "id": "chatcmpl-b08c0c1f-3fd9-45eb-834a-73fe347714ee", "created": 1769812544, "model": "claude-haiku-4-5-20251001", "object": "chat.completion", "system_fingerprint": null, ->>>>>>> 68d7645f081b17247fde3494e548fd16f92694e8 "choices": [ { "finish_reason": "stop", "index": 0, "message": { -<<<<<<< HEAD - "content": "Testing is a critical process that helps identify bugs, verify functionality, and ensure that software meets quality standards before deployment.", - "function_call": null, - "provider_specific_fields": { - "citations": null, - "thinking_blocks": null - }, - "role": "assistant", - "tool_calls": null - } - } - ], - "created": 1767182732, - "id": "chatcmpl-aa66067c-df8b-4adf-8978-68e8cdcaaa4f", - "model": "claude-haiku-4-5-20251001", - "object": "chat.completion", - "system_fingerprint": null, - "usage": { - "cache_creation_input_tokens": 0, - "cache_read_input_tokens": 0, - "completion_tokens": 26, - "completion_tokens_details": null, - "prompt_tokens": 1145, - "prompt_tokens_details": { - "audio_tokens": null, - "cache_creation_token_details": { - "ephemeral_1h_input_tokens": 0, - "ephemeral_5m_input_tokens": 0 - }, - "cache_creation_tokens": 0, - "cached_tokens": 0, - "image_tokens": null, - "text_tokens": null - }, - "total_tokens": 1171 -======= "content": "{\"value\": \"Testing is a crucial process that ensures software quality, identifies bugs, and validates that systems work as intended before deployment to users.\"}", "role": "assistant", "tool_calls": null, @@ -74,6 +35,5 @@ }, "cache_creation_input_tokens": 0, "cache_read_input_tokens": 0 ->>>>>>> 68d7645f081b17247fde3494e548fd16f92694e8 } } \ No newline at end of file diff --git a/tests/fixtures/tests_test_handlers_llm_provider.py__TestLiteLLMProvider__test_simple_prompt_cross_endpoint[gpt-4o-mini].json b/tests/fixtures/tests_test_handlers_llm_provider.py__TestLiteLLMProvider__test_simple_prompt_cross_endpoint[gpt-4o-mini].json index f7c66ec5..be954644 100644 --- a/tests/fixtures/tests_test_handlers_llm_provider.py__TestLiteLLMProvider__test_simple_prompt_cross_endpoint[gpt-4o-mini].json +++ b/tests/fixtures/tests_test_handlers_llm_provider.py__TestLiteLLMProvider__test_simple_prompt_cross_endpoint[gpt-4o-mini].json @@ -1,66 +1,26 @@ { -<<<<<<< HEAD -======= "id": "chatcmpl-D3rNU2lVVTdXXJhrnqD9Vs2LUtPNT", "created": 1769812540, "model": "gpt-4o-mini-2024-07-18", "object": "chat.completion", "system_fingerprint": "fp_1590f93f9d", ->>>>>>> 68d7645f081b17247fde3494e548fd16f92694e8 "choices": [ { "finish_reason": "stop", "index": 0, "message": { -<<<<<<< HEAD - "annotations": [], - "content": "Testing is essential for ensuring the quality and reliability of software and products before they are released.", -======= "content": "{\"value\":\"Testing ensures that a product meets its requirements and functions as intended.\"}", "role": "assistant", "tool_calls": null, ->>>>>>> 68d7645f081b17247fde3494e548fd16f92694e8 "function_call": null, "provider_specific_fields": { "refusal": null }, -<<<<<<< HEAD - "role": "assistant", - "tool_calls": null -======= "annotations": [] ->>>>>>> 68d7645f081b17247fde3494e548fd16f92694e8 }, "provider_specific_fields": {} } ], -<<<<<<< HEAD - "created": 1767182730, - "id": "chatcmpl-CspFCkNxMBr1YyZtqzWJU1rMAQykg", - "model": "gpt-4o-mini-2024-07-18", - "object": "chat.completion", - "service_tier": "default", - "system_fingerprint": "fp_c4585b5b9c", - "usage": { - "completion_tokens": 19, - "completion_tokens_details": { - "accepted_prediction_tokens": 0, - "audio_tokens": 0, - "image_tokens": null, - "reasoning_tokens": 0, - "rejected_prediction_tokens": 0, - "text_tokens": null - }, - "prompt_tokens": 313, - "prompt_tokens_details": { - "audio_tokens": 0, - "cached_tokens": 0, - "image_tokens": null, - "text_tokens": null - }, - "total_tokens": 332 - } -======= "usage": { "completion_tokens": 21, "prompt_tokens": 262, @@ -81,5 +41,4 @@ } }, "service_tier": "default" ->>>>>>> 68d7645f081b17247fde3494e548fd16f92694e8 } \ No newline at end of file diff --git a/tests/fixtures/tests_test_handlers_llm_provider.py__TestLiteLLMProvider__test_simple_prompt_multiple_models[gpt-4o-mini].json b/tests/fixtures/tests_test_handlers_llm_provider.py__TestLiteLLMProvider__test_simple_prompt_multiple_models[gpt-4o-mini].json index cf8686a0..9d40c5f2 100644 --- a/tests/fixtures/tests_test_handlers_llm_provider.py__TestLiteLLMProvider__test_simple_prompt_multiple_models[gpt-4o-mini].json +++ b/tests/fixtures/tests_test_handlers_llm_provider.py__TestLiteLLMProvider__test_simple_prompt_multiple_models[gpt-4o-mini].json @@ -1,66 +1,26 @@ { -<<<<<<< HEAD -======= "id": "chatcmpl-D3rNPzidwbTkz4hc0mGchwjqbvlq9", "created": 1769812535, "model": "gpt-4o-mini-2024-07-18", "object": "chat.completion", "system_fingerprint": "fp_1590f93f9d", ->>>>>>> 68d7645f081b17247fde3494e548fd16f92694e8 "choices": [ { "finish_reason": "stop", "index": 0, "message": { -<<<<<<< HEAD - "annotations": [], - "content": "Testing is a crucial process that ensures the quality and functionality of a product or system before its release.", -======= "content": "{\"value\":\"Testing is the process of evaluating a system or component to ensure it meets specified requirements and functions correctly.\"}", "role": "assistant", "tool_calls": null, ->>>>>>> 68d7645f081b17247fde3494e548fd16f92694e8 "function_call": null, "provider_specific_fields": { "refusal": null }, -<<<<<<< HEAD - "role": "assistant", - "tool_calls": null -======= "annotations": [] ->>>>>>> 68d7645f081b17247fde3494e548fd16f92694e8 }, "provider_specific_fields": {} } ], -<<<<<<< HEAD - "created": 1767182727, - "id": "chatcmpl-CspF9mdsKgygvf8Pogy7DCFgjydme", - "model": "gpt-4o-mini-2024-07-18", - "object": "chat.completion", - "service_tier": "default", - "system_fingerprint": "fp_c4585b5b9c", - "usage": { - "completion_tokens": 21, - "completion_tokens_details": { - "accepted_prediction_tokens": 0, - "audio_tokens": 0, - "image_tokens": null, - "reasoning_tokens": 0, - "rejected_prediction_tokens": 0, - "text_tokens": null - }, - "prompt_tokens": 313, - "prompt_tokens_details": { - "audio_tokens": 0, - "cached_tokens": 0, - "image_tokens": null, - "text_tokens": null - }, - "total_tokens": 334 - } -======= "usage": { "completion_tokens": 28, "prompt_tokens": 262, @@ -81,5 +41,4 @@ } }, "service_tier": "default" ->>>>>>> 68d7645f081b17247fde3494e548fd16f92694e8 } \ No newline at end of file diff --git a/tests/fixtures/tests_test_handlers_llm_provider.py__TestLiteLLMProvider__test_simple_prompt_multiple_models[gpt-5-nano].json b/tests/fixtures/tests_test_handlers_llm_provider.py__TestLiteLLMProvider__test_simple_prompt_multiple_models[gpt-5-nano].json index c2c3b26f..83ad3c82 100644 --- a/tests/fixtures/tests_test_handlers_llm_provider.py__TestLiteLLMProvider__test_simple_prompt_multiple_models[gpt-5-nano].json +++ b/tests/fixtures/tests_test_handlers_llm_provider.py__TestLiteLLMProvider__test_simple_prompt_multiple_models[gpt-5-nano].json @@ -1,66 +1,26 @@ { -<<<<<<< HEAD -======= "id": "chatcmpl-D3rNQ45EqoVx6YQZKRX86eowLhhcv", "created": 1769812536, "model": "gpt-5-nano-2025-08-07", "object": "chat.completion", "system_fingerprint": null, ->>>>>>> 68d7645f081b17247fde3494e548fd16f92694e8 "choices": [ { "finish_reason": "stop", "index": 0, "message": { -<<<<<<< HEAD - "annotations": [], - "content": "Testing helps ensure product quality by catching issues early.", -======= "content": "{\"value\":\"Testing helps catch mistakes before they reach users.\"}", "role": "assistant", "tool_calls": null, ->>>>>>> 68d7645f081b17247fde3494e548fd16f92694e8 "function_call": null, "provider_specific_fields": { "refusal": null }, -<<<<<<< HEAD - "role": "assistant", - "tool_calls": null -======= "annotations": [] ->>>>>>> 68d7645f081b17247fde3494e548fd16f92694e8 }, "provider_specific_fields": {} } ], -<<<<<<< HEAD - "created": 1767182728, - "id": "chatcmpl-CspFA6VcwnTzTxjuvpG76RCuRt3KM", - "model": "gpt-5-nano-2025-08-07", - "object": "chat.completion", - "service_tier": "default", - "system_fingerprint": null, - "usage": { - "completion_tokens": 211, - "completion_tokens_details": { - "accepted_prediction_tokens": 0, - "audio_tokens": 0, - "image_tokens": null, - "reasoning_tokens": 192, - "rejected_prediction_tokens": 0, - "text_tokens": null - }, - "prompt_tokens": 394, - "prompt_tokens_details": { - "audio_tokens": 0, - "cached_tokens": 0, - "image_tokens": null, - "text_tokens": null - }, - "total_tokens": 605 - } -======= "usage": { "completion_tokens": 282, "prompt_tokens": 342, @@ -81,5 +41,4 @@ } }, "service_tier": "default" ->>>>>>> 68d7645f081b17247fde3494e548fd16f92694e8 } \ No newline at end of file diff --git a/tests/fixtures/tests_test_handlers_llm_provider.py__TestLiteLLMProvider__test_structured_output.json b/tests/fixtures/tests_test_handlers_llm_provider.py__TestLiteLLMProvider__test_structured_output.json index ebbc239b..ae5497ee 100644 --- a/tests/fixtures/tests_test_handlers_llm_provider.py__TestLiteLLMProvider__test_structured_output.json +++ b/tests/fixtures/tests_test_handlers_llm_provider.py__TestLiteLLMProvider__test_structured_output.json @@ -1,66 +1,26 @@ { -<<<<<<< HEAD -======= "id": "chatcmpl-D3rNYTWIcoJmMGU1mjErSsz4rJgM6", "created": 1769812544, "model": "gpt-5-nano-2025-08-07", "object": "chat.completion", "system_fingerprint": null, ->>>>>>> 68d7645f081b17247fde3494e548fd16f92694e8 "choices": [ { "finish_reason": "stop", "index": 0, "message": { -<<<<<<< HEAD - "annotations": [], - "content": "{\"value\":{\"genre\":\"action\",\"explanation\":\"The story centers on a rogue cop confronting an organized threat in a high-stakes, high-adrenaline setting (a skyscraper), with emphasis on pursuits, gunfights, and stunts typical of action films.\"}}", -======= "content": "{\"value\": {\"genre\": \"action\", \"explanation\": \"The plot centers on a rogue cop taking on an evil group to stop them from taking over a skyscraper, featuring high-stakes conflict and action-oriented sequences typical of the action genre.\"}}", "role": "assistant", "tool_calls": null, ->>>>>>> 68d7645f081b17247fde3494e548fd16f92694e8 "function_call": null, "provider_specific_fields": { "refusal": null }, -<<<<<<< HEAD - "role": "assistant", - "tool_calls": null -======= "annotations": [] ->>>>>>> 68d7645f081b17247fde3494e548fd16f92694e8 }, "provider_specific_fields": {} } ], -<<<<<<< HEAD - "created": 1767182732, - "id": "chatcmpl-CspFEhYoCToW0c9B0GjXeKbjiIn0K", - "model": "gpt-5-nano-2025-08-07", - "object": "chat.completion", - "service_tier": "default", - "system_fingerprint": null, - "usage": { - "completion_tokens": 451, - "completion_tokens_details": { - "accepted_prediction_tokens": 0, - "audio_tokens": 0, - "image_tokens": null, - "reasoning_tokens": 384, - "rejected_prediction_tokens": 0, - "text_tokens": null - }, - "prompt_tokens": 541, - "prompt_tokens_details": { - "audio_tokens": 0, - "cached_tokens": 0, - "image_tokens": null, - "text_tokens": null - }, - "total_tokens": 992 - } -======= "usage": { "completion_tokens": 384, "prompt_tokens": 457, @@ -81,5 +41,4 @@ } }, "service_tier": "default" ->>>>>>> 68d7645f081b17247fde3494e548fd16f92694e8 } \ No newline at end of file diff --git a/tests/fixtures/tests_test_handlers_llm_provider.py__TestLiteLLMProvider__test_with_config_params.json b/tests/fixtures/tests_test_handlers_llm_provider.py__TestLiteLLMProvider__test_with_config_params.json index 406a82cc..3a5c57fd 100644 --- a/tests/fixtures/tests_test_handlers_llm_provider.py__TestLiteLLMProvider__test_with_config_params.json +++ b/tests/fixtures/tests_test_handlers_llm_provider.py__TestLiteLLMProvider__test_with_config_params.json @@ -1,66 +1,26 @@ { -<<<<<<< HEAD -======= "id": "chatcmpl-D3rNgQEQWcxYghJaJXPS4eNvmHgq9", "created": 1769812552, "model": "gpt-4o-mini-2024-07-18", "object": "chat.completion", "system_fingerprint": "fp_1590f93f9d", ->>>>>>> 68d7645f081b17247fde3494e548fd16f92694e8 "choices": [ { "finish_reason": "stop", "index": 0, "message": { -<<<<<<< HEAD - "annotations": [], - "content": "A deterministic test consistently produces the same results under the same conditions, ensuring reliability and repeatability in software testing.", -======= "content": "{\"value\":\"A deterministic test consistently produces the same output for a given input, ensuring reliability and repeatability in its results.\"}", "role": "assistant", "tool_calls": null, ->>>>>>> 68d7645f081b17247fde3494e548fd16f92694e8 "function_call": null, "provider_specific_fields": { "refusal": null }, -<<<<<<< HEAD - "role": "assistant", - "tool_calls": null -======= "annotations": [] ->>>>>>> 68d7645f081b17247fde3494e548fd16f92694e8 }, "provider_specific_fields": {} } ], -<<<<<<< HEAD - "created": 1767182744, - "id": "chatcmpl-CspFQyXeEzOBDFdjK0p7UJn2Fe6RV", - "model": "gpt-4o-mini-2024-07-18", - "object": "chat.completion", - "service_tier": "default", - "system_fingerprint": "fp_c4585b5b9c", - "usage": { - "completion_tokens": 23, - "completion_tokens_details": { - "accepted_prediction_tokens": 0, - "audio_tokens": 0, - "image_tokens": null, - "reasoning_tokens": 0, - "rejected_prediction_tokens": 0, - "text_tokens": null - }, - "prompt_tokens": 314, - "prompt_tokens_details": { - "audio_tokens": 0, - "cached_tokens": 0, - "image_tokens": null, - "text_tokens": null - }, - "total_tokens": 337 - } -======= "usage": { "completion_tokens": 30, "prompt_tokens": 263, @@ -81,5 +41,4 @@ } }, "service_tier": "default" ->>>>>>> 68d7645f081b17247fde3494e548fd16f92694e8 } \ No newline at end of file diff --git a/tests/fixtures/tests_test_handlers_llm_provider.py__TestPydanticBaseModelReturn__test_pydantic_basemodel_return.json b/tests/fixtures/tests_test_handlers_llm_provider.py__TestPydanticBaseModelReturn__test_pydantic_basemodel_return.json index 1a95d5ad..70cd6b06 100644 --- a/tests/fixtures/tests_test_handlers_llm_provider.py__TestPydanticBaseModelReturn__test_pydantic_basemodel_return.json +++ b/tests/fixtures/tests_test_handlers_llm_provider.py__TestPydanticBaseModelReturn__test_pydantic_basemodel_return.json @@ -1,66 +1,26 @@ { -<<<<<<< HEAD -======= "id": "chatcmpl-D9hhLJOHKOw4k9zRbHsj0gQ83wHo4", "created": 1771205299, "model": "gpt-5-nano-2025-08-07", "object": "chat.completion", "system_fingerprint": null, ->>>>>>> 68d7645f081b17247fde3494e548fd16f92694e8 "choices": [ { "finish_reason": "stop", "index": 0, "message": { -<<<<<<< HEAD - "annotations": [], - "content": "{\"value\":{\"title\":\"The Spark Within the Spire\",\"rating\":4,\"summary\":\"The Spark Within the Spire follows a young student who discovers latent magical powers and earns a place at a venerable wizarding academy. Across his first year, he navigates challenging classes, budding friendships, and a growing sense of destiny as a looming threat quietly unfolds. The book blends classic wizard-school charm with inventive magic, delivering moments of wonder, humor, and quiet courage. While it uses familiar tropes, its strong character work and brisk pace make it a heartfelt coming-of-age tale with enough fresh twists to stay compelling for readers who enjoy magical school adventures.\"}}", -======= "content": "{\"title\":\"Harry Potter and the Philosopher's Stone\",\"rating\":5,\"summary\":\"A wonderfully inviting entry to a long-running fantasy saga. It follows a boy who discovers he is a wizard and begins at Hogwarts, where magic, friendship, and danger intertwine. The book shines with imaginative world-building, endearing characters, and themes of courage, loyalty, and self-discovery. While aimed at younger readers, its charm and sense of wonder appeal to all ages and set the standard for modern fantasy.\"}", "role": "assistant", "tool_calls": null, ->>>>>>> 68d7645f081b17247fde3494e548fd16f92694e8 "function_call": null, "provider_specific_fields": { "refusal": null }, -<<<<<<< HEAD - "role": "assistant", - "tool_calls": null -======= "annotations": [] ->>>>>>> 68d7645f081b17247fde3494e548fd16f92694e8 }, "provider_specific_fields": {} } ], -<<<<<<< HEAD - "created": 1767182755, - "id": "chatcmpl-CspFbHJXFBDmP4RrqnjVI0CDIiePi", - "model": "gpt-5-nano-2025-08-07", - "object": "chat.completion", - "service_tier": "default", - "system_fingerprint": null, - "usage": { - "completion_tokens": 2192, - "completion_tokens_details": { - "accepted_prediction_tokens": 0, - "audio_tokens": 0, - "image_tokens": null, - "reasoning_tokens": 2048, - "rejected_prediction_tokens": 0, - "text_tokens": null - }, - "prompt_tokens": 526, - "prompt_tokens_details": { - "audio_tokens": 0, - "cached_tokens": 0, - "image_tokens": null, - "text_tokens": null - }, - "total_tokens": 2718 - } -======= "usage": { "completion_tokens": 1268, "prompt_tokens": 923, @@ -81,5 +41,4 @@ } }, "service_tier": "default" ->>>>>>> 68d7645f081b17247fde3494e548fd16f92694e8 } \ No newline at end of file diff --git a/tests/fixtures/tests_test_handlers_llm_provider.py__test_image_input.json b/tests/fixtures/tests_test_handlers_llm_provider.py__test_image_input.json index ff9e4520..3c2d22d5 100644 --- a/tests/fixtures/tests_test_handlers_llm_provider.py__test_image_input.json +++ b/tests/fixtures/tests_test_handlers_llm_provider.py__test_image_input.json @@ -1,66 +1,26 @@ { -<<<<<<< HEAD -======= "id": "chatcmpl-D3rNhHv6KxKgfgH1DTBqe9gLaNzpU", "created": 1769812553, "model": "gpt-4o-2024-08-06", "object": "chat.completion", "system_fingerprint": "fp_eadf229d54", ->>>>>>> 68d7645f081b17247fde3494e548fd16f92694e8 "choices": [ { "finish_reason": "stop", "index": 0, "message": { -<<<<<<< HEAD - "annotations": [], - "content": "The image appears to depict a simple, pixelated smiley face with two rectangular eyes and a wide, curved mouth, giving a cheerful expression.", -======= "content": "{\"value\":\"The image is a simple pixel art of a smiley face. It features two square white eyes and a wide white smile set against a black background.\"}", "role": "assistant", "tool_calls": null, ->>>>>>> 68d7645f081b17247fde3494e548fd16f92694e8 "function_call": null, "provider_specific_fields": { "refusal": null }, -<<<<<<< HEAD - "role": "assistant", - "tool_calls": null -======= "annotations": [] ->>>>>>> 68d7645f081b17247fde3494e548fd16f92694e8 }, "provider_specific_fields": {} } ], -<<<<<<< HEAD - "created": 1767182821, - "id": "chatcmpl-CspGfBSEI7umvGbclbA4o1OsXZsil", - "model": "gpt-4o-2024-08-06", - "object": "chat.completion", - "service_tier": "default", - "system_fingerprint": "fp_deacdd5f6f", - "usage": { - "completion_tokens": 30, - "completion_tokens_details": { - "accepted_prediction_tokens": 0, - "audio_tokens": 0, - "image_tokens": null, - "reasoning_tokens": 0, - "rejected_prediction_tokens": 0, - "text_tokens": null - }, - "prompt_tokens": 567, - "prompt_tokens_details": { - "audio_tokens": 0, - "cached_tokens": 0, - "image_tokens": null, - "text_tokens": null - }, - "total_tokens": 597 - } -======= "usage": { "completion_tokens": 38, "prompt_tokens": 492, @@ -81,5 +41,4 @@ } }, "service_tier": "default" ->>>>>>> 68d7645f081b17247fde3494e548fd16f92694e8 } \ No newline at end of file diff --git a/tests/test_handlers_llm.py b/tests/test_handlers_llm.py index 7e0d6db3..c4c8be2c 100644 --- a/tests/test_handlers_llm.py +++ b/tests/test_handlers_llm.py @@ -1,15 +1,7 @@ from collections.abc import Callable from typing import Annotated -<<<<<<< HEAD -import pytest - -from effectful.handlers.llm import Template -from effectful.handlers.llm.providers import RetryLLMHandler -from effectful.handlers.llm.synthesis import ProgramSynthesis -======= from effectful.handlers.llm import Template ->>>>>>> 68d7645f081b17247fde3494e548fd16f92694e8 from effectful.handlers.llm.template import IsRecursive from effectful.ops.semantics import NotHandled, handler from effectful.ops.syntax import ObjectInterpretation, implements @@ -124,25 +116,6 @@ def test_primes_decode_int(): assert isinstance(result, int) -<<<<<<< HEAD -@pytest.mark.xfail(reason="Synthesis handler not yet implemented") -def test_count_char_with_program_synthesis(): - """Test the count_char template with program synthesis.""" - mock_code = """ -def count_occurrences(s): - return s.count('a') -""" - mock_provider = SingleResponseLLMProvider(mock_code) - - with handler(mock_provider), handler(ProgramSynthesis()): - count_a = count_char("a") - assert callable(count_a) - assert count_a("banana") == 3 - assert count_a("cherry") == 0 - - -======= ->>>>>>> 68d7645f081b17247fde3494e548fd16f92694e8 class FailingThenSucceedingProvider[T](ObjectInterpretation): """Mock provider that fails a specified number of times before succeeding.""" @@ -174,91 +147,6 @@ def _call[**P]( return self.success_response -<<<<<<< HEAD -def test_retry_handler_succeeds_after_failures(): - """Test that RetryLLMHandler retries and eventually succeeds.""" - provider = FailingThenSucceedingProvider( - fail_count=2, - success_response="Success after retries!", - exception_factory=lambda: ValueError("Temporary failure"), - ) - retry_handler = RetryLLMHandler(max_retries=3, exception_cls=ValueError) - - with handler(provider), handler(retry_handler): - result = limerick("test") - assert result == "Success after retries!" - assert provider.call_count == 3 # 2 failures + 1 success - - -def test_retry_handler_exhausts_retries(): - """Test that RetryLLMHandler raises after max retries exhausted.""" - provider = FailingThenSucceedingProvider( - fail_count=5, # More failures than retries - success_response="Never reached", - exception_factory=lambda: ValueError("Persistent failure"), - ) - retry_handler = RetryLLMHandler(max_retries=3, exception_cls=ValueError) - - with pytest.raises(ValueError, match="Persistent failure"): - with handler(provider), handler(retry_handler): - limerick("test") - - assert provider.call_count == 3 # Should have tried 3 times - - -def test_retry_handler_only_catches_specified_exception(): - """Test that RetryLLMHandler only catches the specified exception class.""" - provider = FailingThenSucceedingProvider( - fail_count=1, - success_response="Success", - exception_factory=lambda: TypeError("Wrong type"), # Different exception type - ) - retry_handler = RetryLLMHandler(max_retries=3, exception_cls=ValueError) - - # TypeError should not be caught, should propagate immediately - with pytest.raises(TypeError, match="Wrong type"): - with handler(provider), handler(retry_handler): - limerick("test") - - assert provider.call_count == 1 # Should have only tried once - - -def test_retry_handler_with_error_feedback(): - """Test that RetryLLMHandler includes error feedback when enabled.""" - call_prompts: list[str] = [] - - class PromptCapturingProvider(ObjectInterpretation): - """Provider that captures prompts and fails once.""" - - def __init__(self): - self.call_count = 0 - - @implements(Template.__apply__) - def _call(self, template: Template, *args, **kwargs): - self.call_count += 1 - call_prompts.append(template.__prompt_template__) - if self.call_count == 1: - raise ValueError("First attempt failed") - return "Success on retry" - - provider = PromptCapturingProvider() - retry_handler = RetryLLMHandler( - max_retries=2, add_error_feedback=True, exception_cls=ValueError - ) - - with handler(provider), handler(retry_handler): - result = limerick("test") - assert result == "Success on retry" - - assert len(call_prompts) == 2 - # First call has original prompt - assert "Write a limerick on the theme of {theme}." in call_prompts[0] - # Second call should include error feedback with traceback - assert "First attempt failed" in call_prompts[1] - - -======= ->>>>>>> 68d7645f081b17247fde3494e548fd16f92694e8 def test_template_captures_other_templates_in_lexical_context(): """Test that Templates defined in lexical scope are captured (orchestrator pattern).""" diff --git a/tests/test_handlers_llm_encoding.py b/tests/test_handlers_llm_encoding.py index 8453f40e..bbcda789 100644 --- a/tests/test_handlers_llm_encoding.py +++ b/tests/test_handlers_llm_encoding.py @@ -1,978 +1,3 @@ -<<<<<<< HEAD -import inspect -from collections import ChainMap -from dataclasses import asdict, dataclass -from typing import NamedTuple, TypedDict - -import pydantic -import pytest -from PIL import Image - -from effectful.handlers.llm.encodable_type import ( - EncodableSynthesizedType, - SynthesizedType, -) -from effectful.handlers.llm.encoding import type_to_encodable_type -from effectful.handlers.llm.synthesis import SynthesisError -from effectful.ops.types import Operation, Term - - -def test_type_to_encodable_type_term(): - with pytest.raises(TypeError): - type_to_encodable_type(Term) - - -def test_type_to_encodable_type_operation(): - with pytest.raises(TypeError): - type_to_encodable_type(Operation) - - -def test_type_to_encodable_type_str(): - encodable = type_to_encodable_type(str) - encoded = encodable.encode("hello") - decoded = encodable.decode(encoded) - assert decoded == "hello" - Model = pydantic.create_model("Model", value=encodable.t) - decoded = Model.model_validate({"value": "hello"}) - assert decoded.value == "hello" - - -def test_type_to_encodable_type_int(): - encodable = type_to_encodable_type(int) - encoded = encodable.encode(42) - decoded = encodable.decode(encoded) - assert decoded == 42 - assert isinstance(decoded, int) - Model = pydantic.create_model("Model", value=encodable.t) - decoded = Model.model_validate({"value": 42}) - assert decoded.value == 42 - assert isinstance(decoded.value, int) - - -def test_type_to_encodable_type_bool(): - encodable = type_to_encodable_type(bool) - encoded = encodable.encode(True) - decoded = encodable.decode(encoded) - assert decoded is True - assert isinstance(decoded, bool) - encoded_false = encodable.encode(False) - decoded_false = encodable.decode(encoded_false) - assert decoded_false is False - Model = pydantic.create_model("Model", value=encodable.t) - decoded = Model.model_validate({"value": True}) - assert decoded.value is True - assert isinstance(decoded.value, bool) - - -def test_type_to_encodable_type_float(): - encodable = type_to_encodable_type(float) - encoded = encodable.encode(3.14) - decoded = encodable.decode(encoded) - assert decoded == 3.14 - assert isinstance(decoded, float) - Model = pydantic.create_model("Model", value=encodable.t) - decoded = Model.model_validate({"value": 3.14}) - assert decoded.value == 3.14 - assert isinstance(decoded.value, float) - - -def test_type_to_encodable_type_image(): - encodable = type_to_encodable_type(Image.Image) - image = Image.new("RGB", (10, 10), color="red") - encoded = encodable.encode(image) - assert isinstance(encoded, dict) - assert "url" in encoded - assert "detail" in encoded - assert encoded["detail"] == "auto" - assert encoded["url"].startswith("data:image/png;base64,") - decoded = encodable.decode(encoded) - assert isinstance(decoded, Image.Image) - assert decoded.size == (10, 10) - Model = pydantic.create_model("Model", value=encodable.t) - decoded = Model.model_validate({"value": encoded}) - assert decoded.value["url"] == encoded["url"] - assert decoded.value["detail"] == "auto" - - -def test_type_to_encodable_type_image_roundtrip(): - encodable = type_to_encodable_type(Image.Image) - original = Image.new("RGB", (20, 20), color="green") - encoded = encodable.encode(original) - decoded = encodable.decode(encoded) - assert isinstance(decoded, Image.Image) - assert decoded.size == original.size - assert decoded.mode == original.mode - - -def test_type_to_encodable_type_image_decode_invalid_url(): - encodable = type_to_encodable_type(Image.Image) - encoded = {"url": "http://example.com/image.png", "detail": "auto"} - with pytest.raises(RuntimeError, match="expected base64 encoded image as data uri"): - encodable.decode(encoded) - - -def test_type_to_encodable_type_tuple(): - encodable = type_to_encodable_type(tuple[int, str]) - value = (1, "test") - encoded = encodable.encode(value) - decoded = encodable.decode(encoded) - assert decoded == value - assert isinstance(decoded, tuple) - assert decoded[0] == 1 - assert decoded[1] == "test" - # Test with pydantic model validation - Model = pydantic.create_model("Model", value=encodable.t) - model_instance = Model.model_validate({"value": encoded}) - assert model_instance.value == encoded - assert isinstance(model_instance.value, tuple) - assert model_instance.value[0] == 1 - assert model_instance.value[1] == "test" - # Decode from model - decoded_from_model = encodable.decode(model_instance.value) - assert decoded_from_model == value - assert isinstance(decoded_from_model, tuple) - - -def test_type_to_encodable_type_tuple_empty(): - encodable = type_to_encodable_type(tuple[()]) - value = () - encoded = encodable.encode(value) - decoded = encodable.decode(encoded) - assert decoded == value - assert isinstance(decoded, tuple) - assert len(decoded) == 0 - # Test with pydantic model validation - Model = pydantic.create_model("Model", value=encodable.t) - model_instance = Model.model_validate({"value": encoded}) - assert model_instance.value == encoded - assert isinstance(model_instance.value, tuple) - assert len(model_instance.value) == 0 - # Decode from model - decoded_from_model = encodable.decode(model_instance.value) - assert decoded_from_model == value - assert isinstance(decoded_from_model, tuple) - - -def test_type_to_encodable_type_tuple_three_elements(): - encodable = type_to_encodable_type(tuple[int, str, bool]) - value = (42, "hello", True) - encoded = encodable.encode(value) - decoded = encodable.decode(encoded) - assert decoded == value - assert isinstance(decoded, tuple) - assert decoded[0] == 42 - assert decoded[1] == "hello" - assert decoded[2] is True - # Test with pydantic model validation - Model = pydantic.create_model("Model", value=encodable.t) - model_instance = Model.model_validate({"value": encoded}) - assert model_instance.value == encoded - assert isinstance(model_instance.value, tuple) - assert model_instance.value[0] == 42 - assert model_instance.value[1] == "hello" - assert model_instance.value[2] is True - # Decode from model - decoded_from_model = encodable.decode(model_instance.value) - assert decoded_from_model == value - assert isinstance(decoded_from_model, tuple) - - -def test_type_to_encodable_type_list(): - encodable = type_to_encodable_type(list[int]) - value = [1, 2, 3, 4, 5] - encoded = encodable.encode(value) - decoded = encodable.decode(encoded) - assert decoded == value - assert isinstance(decoded, list) - assert all(isinstance(elem, int) for elem in decoded) - # Test with pydantic model validation - Model = pydantic.create_model("Model", value=encodable.t) - model_instance = Model.model_validate({"value": encoded}) - assert model_instance.value == encoded - assert isinstance(model_instance.value, list) - assert model_instance.value == [1, 2, 3, 4, 5] - # Decode from model - decoded_from_model = encodable.decode(model_instance.value) - assert decoded_from_model == value - assert isinstance(decoded_from_model, list) - assert all(isinstance(elem, int) for elem in decoded_from_model) - - -def test_type_to_encodable_type_list_str(): - encodable = type_to_encodable_type(list[str]) - value = ["hello", "world", "test"] - encoded = encodable.encode(value) - decoded = encodable.decode(encoded) - assert decoded == value - assert isinstance(decoded, list) - assert all(isinstance(elem, str) for elem in decoded) - # Test with pydantic model validation - Model = pydantic.create_model("Model", value=encodable.t) - model_instance = Model.model_validate({"value": encoded}) - assert model_instance.value == encoded - assert isinstance(model_instance.value, list) - assert model_instance.value == ["hello", "world", "test"] - # Decode from model - decoded_from_model = encodable.decode(model_instance.value) - assert decoded_from_model == value - assert isinstance(decoded_from_model, list) - assert all(isinstance(elem, str) for elem in decoded_from_model) - - -def test_type_to_encodable_type_namedtuple(): - class Point(NamedTuple): - x: int - y: int - - encodable = type_to_encodable_type(Point) - point = Point(10, 20) - encoded = encodable.encode(point) - decoded = encodable.decode(encoded) - assert decoded == point - assert isinstance(decoded, Point) - assert decoded.x == 10 - assert decoded.y == 20 - Model = pydantic.create_model("Model", value=encodable.t) - decoded = Model.model_validate({"value": {"x": 10, "y": 20}}) - assert decoded.value == point - assert isinstance(decoded.value, Point) - - -def test_type_to_encodable_type_namedtuple_with_str(): - class Person(NamedTuple): - name: str - age: int - - encodable = type_to_encodable_type(Person) - person = Person("Alice", 30) - encoded = encodable.encode(person) - decoded = encodable.decode(encoded) - assert decoded == person - assert isinstance(decoded, Person) - assert decoded.name == "Alice" - assert decoded.age == 30 - Model = pydantic.create_model("Model", value=encodable.t) - decoded = Model.model_validate({"value": {"name": "Alice", "age": 30}}) - assert decoded.value == person - assert isinstance(decoded.value, Person) - - -def test_type_to_encodable_type_typeddict(): - class User(TypedDict): - name: str - age: int - - encodable = type_to_encodable_type(User) - user = User(name="Bob", age=25) - encoded = encodable.encode(user) - decoded = encodable.decode(encoded) - assert decoded == user - assert isinstance(decoded, dict) - assert decoded["name"] == "Bob" - assert decoded["age"] == 25 - Model = pydantic.create_model("Model", value=encodable.t) - decoded = Model.model_validate({"value": {"name": "Bob", "age": 25}}) - assert decoded.value == user - assert isinstance(decoded.value, dict) - - -def test_type_to_encodable_type_typeddict_optional(): - class Config(TypedDict, total=False): - host: str - port: int - - encodable = type_to_encodable_type(Config) - config = Config(host="localhost", port=8080) - encoded = encodable.encode(config) - decoded = encodable.decode(encoded) - assert decoded == config - assert decoded["host"] == "localhost" - assert decoded["port"] == 8080 - Model = pydantic.create_model("Model", value=encodable.t) - decoded = Model.model_validate({"value": {"host": "localhost", "port": 8080}}) - assert decoded.value == config - assert isinstance(decoded.value, dict) - - -def test_type_to_encodable_type_complex(): - encodable = type_to_encodable_type(complex) - value = 3 + 4j - encoded = encodable.encode(value) - decoded = encodable.decode(encoded) - assert decoded == value - assert isinstance(decoded, complex) - assert decoded.real == 3.0 - assert decoded.imag == 4.0 - # Test with pydantic model validation - Model = pydantic.create_model("Model", value=encodable.t) - model_instance = Model.model_validate({"value": encoded}) - assert model_instance.value == encoded - # Decode from model - decoded_from_model = encodable.decode(model_instance.value) - assert decoded_from_model == value - assert isinstance(decoded_from_model, complex) - - -def test_type_to_encodable_type_tuple_of_images(): - encodable = type_to_encodable_type(tuple[Image.Image, Image.Image]) - image1 = Image.new("RGB", (10, 10), color="red") - image2 = Image.new("RGB", (20, 20), color="blue") - value = (image1, image2) - - encoded = encodable.encode(value) - assert isinstance(encoded, tuple) - assert len(encoded) == 2 - assert isinstance(encoded[0], dict) - assert isinstance(encoded[1], dict) - assert "url" in encoded[0] - assert "url" in encoded[1] - assert encoded[0]["url"].startswith("data:image/png;base64,") - assert encoded[1]["url"].startswith("data:image/png;base64,") - - decoded = encodable.decode(encoded) - assert isinstance(decoded, tuple) - assert len(decoded) == 2 - assert isinstance(decoded[0], Image.Image) - assert isinstance(decoded[1], Image.Image) - assert decoded[0].size == (10, 10) - assert decoded[1].size == (20, 20) - - # Test with pydantic model validation - Model = pydantic.create_model("Model", value=encodable.t) - model_instance = Model.model_validate({"value": encoded}) - assert model_instance.value == encoded - assert isinstance(model_instance.value, tuple) - assert len(model_instance.value) == 2 - assert isinstance(model_instance.value[0], dict) - assert isinstance(model_instance.value[1], dict) - assert model_instance.value[0]["url"] == encoded[0]["url"] - assert model_instance.value[1]["url"] == encoded[1]["url"] - # Decode from model - decoded_from_model = encodable.decode(model_instance.value) - assert isinstance(decoded_from_model, tuple) - assert len(decoded_from_model) == 2 - assert isinstance(decoded_from_model[0], Image.Image) - assert isinstance(decoded_from_model[1], Image.Image) - assert decoded_from_model[0].size == (10, 10) - assert decoded_from_model[1].size == (20, 20) - - # Roundtrip test - original = ( - Image.new("RGB", (15, 15), color="green"), - Image.new("RGB", (25, 25), color="yellow"), - ) - encoded_roundtrip = encodable.encode(original) - decoded_roundtrip = encodable.decode(encoded_roundtrip) - assert isinstance(decoded_roundtrip, tuple) - assert len(decoded_roundtrip) == 2 - assert decoded_roundtrip[0].size == original[0].size - assert decoded_roundtrip[1].size == original[1].size - assert decoded_roundtrip[0].mode == original[0].mode - assert decoded_roundtrip[1].mode == original[1].mode - - -def test_type_to_encodable_type_list_of_images(): - encodable = type_to_encodable_type(list[Image.Image]) - images = [ - Image.new("RGB", (10, 10), color="red"), - Image.new("RGB", (20, 20), color="blue"), - Image.new("RGB", (30, 30), color="green"), - ] - - encoded = encodable.encode(images) - assert isinstance(encoded, list) - assert len(encoded) == 3 - assert all(isinstance(elem, dict) for elem in encoded) - assert all("url" in elem for elem in encoded) - assert all(elem["url"].startswith("data:image/png;base64,") for elem in encoded) - - decoded = encodable.decode(encoded) - assert isinstance(decoded, list) - assert len(decoded) == 3 - assert all(isinstance(elem, Image.Image) for elem in decoded) - assert decoded[0].size == (10, 10) - assert decoded[1].size == (20, 20) - assert decoded[2].size == (30, 30) - - # Test with pydantic model validation - Model = pydantic.create_model("Model", value=encodable.t) - model_instance = Model.model_validate({"value": encoded}) - assert model_instance.value == encoded - assert isinstance(model_instance.value, list) - assert len(model_instance.value) == 3 - assert all(isinstance(elem, dict) for elem in model_instance.value) - assert all("url" in elem for elem in model_instance.value) - assert model_instance.value[0]["url"] == encoded[0]["url"] - assert model_instance.value[1]["url"] == encoded[1]["url"] - assert model_instance.value[2]["url"] == encoded[2]["url"] - # Decode from model - decoded_from_model = encodable.decode(model_instance.value) - assert isinstance(decoded_from_model, list) - assert len(decoded_from_model) == 3 - assert all(isinstance(elem, Image.Image) for elem in decoded_from_model) - assert decoded_from_model[0].size == (10, 10) - assert decoded_from_model[1].size == (20, 20) - assert decoded_from_model[2].size == (30, 30) - - # Roundtrip test - original = [ - Image.new("RGB", (15, 15), color="yellow"), - Image.new("RGB", (25, 25), color="purple"), - ] - encoded_roundtrip = encodable.encode(original) - decoded_roundtrip = encodable.decode(encoded_roundtrip) - assert isinstance(decoded_roundtrip, list) - assert len(decoded_roundtrip) == 2 - assert decoded_roundtrip[0].size == original[0].size - assert decoded_roundtrip[1].size == original[1].size - assert decoded_roundtrip[0].mode == original[0].mode - assert decoded_roundtrip[1].mode == original[1].mode - - -def test_type_to_encodable_type_dataclass(): - @dataclass - class Point: - x: int - y: int - - encodable = type_to_encodable_type(Point) - point = Point(10, 20) - encoded = encodable.encode(point) - decoded = encodable.decode(encoded) - assert decoded == point - assert isinstance(decoded, Point) - assert decoded.x == 10 - assert decoded.y == 20 - # Test with pydantic model validation - Model = pydantic.create_model("Model", value=encodable.t) - model_instance = Model.model_validate({"value": asdict(encoded)}) - assert model_instance.value.x == 10 - assert model_instance.value.y == 20 - # Decode from model - decoded_from_model = encodable.decode(model_instance.value) - assert decoded_from_model == point - assert isinstance(decoded_from_model, Point) - - -def test_type_to_encodable_type_dataclass_with_str(): - @dataclass - class Person: - name: str - age: int - - encodable = type_to_encodable_type(Person) - person = Person("Alice", 30) - encoded = encodable.encode(person) - decoded = encodable.decode(encoded) - assert decoded == person - assert isinstance(decoded, Person) - assert decoded.name == "Alice" - assert decoded.age == 30 - # Test with pydantic model validation - Model = pydantic.create_model("Model", value=encodable.t) - model_instance = Model.model_validate({"value": asdict(encoded)}) - assert model_instance.value.name == "Alice" - assert model_instance.value.age == 30 - # Decode from model - decoded_from_model = encodable.decode(model_instance.value) - assert decoded_from_model == person - assert isinstance(decoded_from_model, Person) - - -def test_type_to_encodable_type_dataclass_with_list(): - @dataclass - class Container: - items: list[int] - name: str - - encodable = type_to_encodable_type(Container) - container = Container(items=[1, 2, 3], name="test") - encoded = encodable.encode(container) - decoded = encodable.decode(encoded) - assert decoded == container - assert isinstance(decoded, Container) - assert decoded.items == [1, 2, 3] - assert decoded.name == "test" - # Test with pydantic model validation - Model = pydantic.create_model("Model", value=encodable.t) - model_instance = Model.model_validate({"value": asdict(encoded)}) - assert model_instance.value.items == [1, 2, 3] - assert model_instance.value.name == "test" - # Decode from model - decoded_from_model = encodable.decode(model_instance.value) - assert decoded_from_model == container - assert isinstance(decoded_from_model, Container) - - -def test_type_to_encodable_type_dataclass_with_tuple(): - @dataclass - class Pair: - values: tuple[int, str] - count: int - - encodable = type_to_encodable_type(Pair) - pair = Pair(values=(42, "hello"), count=2) - encoded = encodable.encode(pair) - decoded = encodable.decode(encoded) - assert decoded == pair - assert isinstance(decoded, Pair) - assert decoded.values == (42, "hello") - assert decoded.count == 2 - # Test with pydantic model validation - Model = pydantic.create_model("Model", value=encodable.t) - model_instance = Model.model_validate({"value": asdict(encoded)}) - assert model_instance.value.values == (42, "hello") - assert model_instance.value.count == 2 - # Decode from model - decoded_from_model = encodable.decode(model_instance.value) - assert decoded_from_model == pair - assert isinstance(decoded_from_model, Pair) - - -def test_type_to_encodable_type_dataclass_with_optional(): - @dataclass - class Config: - host: str - port: int - timeout: float | None = None - - encodable = type_to_encodable_type(Config) - config = Config(host="localhost", port=8080, timeout=5.0) - encoded = encodable.encode(config) - decoded = encodable.decode(encoded) - assert decoded == config - assert isinstance(decoded, Config) - assert decoded.host == "localhost" - assert decoded.port == 8080 - assert decoded.timeout == 5.0 - - # Test with None value - config_none = Config(host="localhost", port=8080, timeout=None) - encoded_none = encodable.encode(config_none) - decoded_none = encodable.decode(encoded_none) - assert decoded_none == config_none - assert decoded_none.timeout is None - - # Test with pydantic model validation - Model = pydantic.create_model("Model", value=encodable.t) - model_instance = Model.model_validate({"value": asdict(encoded)}) - assert model_instance.value.host == "localhost" - assert model_instance.value.port == 8080 - assert model_instance.value.timeout == 5.0 - # Decode from model - decoded_from_model = encodable.decode(model_instance.value) - assert decoded_from_model == config - - -def test_type_to_encodable_type_nested_dataclass(): - @dataclass - class Address: - street: str - city: str - - @dataclass - class Person: - name: str - age: int - address: Address - - encodable = type_to_encodable_type(Person) - address = Address(street="123 Main St", city="New York") - person = Person(name="Bob", age=25, address=address) - - encoded = encodable.encode(person) - assert isinstance(encoded, Person) - assert hasattr(encoded, "name") - assert hasattr(encoded, "age") - assert hasattr(encoded, "address") - assert isinstance(encoded.address, Address) - assert encoded.address.street == "123 Main St" - assert encoded.address.city == "New York" - - decoded = encodable.decode(encoded) - assert isinstance(decoded, Person) - assert isinstance(decoded.address, Address) - assert decoded.name == "Bob" - assert decoded.age == 25 - assert decoded.address.street == "123 Main St" - assert decoded.address.city == "New York" - - # Test with pydantic model validation - Model = pydantic.create_model("Model", value=encodable.t) - model_instance = Model.model_validate({"value": asdict(encoded)}) - assert model_instance.value.name == "Bob" - assert model_instance.value.age == 25 - assert model_instance.value.address.street == "123 Main St" - assert model_instance.value.address.city == "New York" - # Decode from model - decoded_from_model = encodable.decode(model_instance.value) - assert decoded_from_model == person - assert isinstance(decoded_from_model, Person) - assert isinstance(decoded_from_model.address, Address) - - -def test_type_to_encodable_type_pydantic_model(): - class Point(pydantic.BaseModel): - x: int - y: int - - encodable = type_to_encodable_type(Point) - point = Point(x=10, y=20) - encoded = encodable.encode(point) - decoded = encodable.decode(encoded) - assert decoded == point - assert isinstance(decoded, Point) - assert decoded.x == 10 - assert decoded.y == 20 - # Test with pydantic model validation - Model = pydantic.create_model("Model", value=encodable.t) - model_instance = Model.model_validate({"value": encoded.model_dump()}) - assert model_instance.value.x == 10 - assert model_instance.value.y == 20 - # Decode from model - decoded_from_model = encodable.decode(model_instance.value) - assert decoded_from_model == point - assert isinstance(decoded_from_model, Point) - - -def test_type_to_encodable_type_pydantic_model_with_str(): - class Person(pydantic.BaseModel): - name: str - age: int - - encodable = type_to_encodable_type(Person) - person = Person(name="Alice", age=30) - encoded = encodable.encode(person) - decoded = encodable.decode(encoded) - assert decoded == person - assert isinstance(decoded, Person) - assert decoded.name == "Alice" - assert decoded.age == 30 - # Test with pydantic model validation - Model = pydantic.create_model("Model", value=encodable.t) - model_instance = Model.model_validate({"value": encoded.model_dump()}) - assert model_instance.value.name == "Alice" - assert model_instance.value.age == 30 - # Decode from model - decoded_from_model = encodable.decode(model_instance.value) - assert decoded_from_model == person - assert isinstance(decoded_from_model, Person) - - -def test_type_to_encodable_type_pydantic_model_with_list(): - class Container(pydantic.BaseModel): - items: list[int] - name: str - - encodable = type_to_encodable_type(Container) - container = Container(items=[1, 2, 3], name="test") - encoded = encodable.encode(container) - decoded = encodable.decode(encoded) - assert decoded == container - assert isinstance(decoded, Container) - assert decoded.items == [1, 2, 3] - assert decoded.name == "test" - # Test with pydantic model validation - Model = pydantic.create_model("Model", value=encodable.t) - model_instance = Model.model_validate({"value": encoded.model_dump()}) - assert model_instance.value.items == [1, 2, 3] - assert model_instance.value.name == "test" - # Decode from model - decoded_from_model = encodable.decode(model_instance.value) - assert decoded_from_model == container - assert isinstance(decoded_from_model, Container) - - -def test_type_to_encodable_type_nested_pydantic_model(): - class Address(pydantic.BaseModel): - street: str - city: str - - class Person(pydantic.BaseModel): - name: str - age: int - address: Address - - encodable = type_to_encodable_type(Person) - address = Address(street="123 Main St", city="New York") - person = Person(name="Bob", age=25, address=address) - - encoded = encodable.encode(person) - assert isinstance(encoded, pydantic.BaseModel) - assert hasattr(encoded, "name") - assert hasattr(encoded, "age") - assert hasattr(encoded, "address") - assert isinstance(encoded.address, pydantic.BaseModel) - assert encoded.address.street == "123 Main St" - assert encoded.address.city == "New York" - - decoded = encodable.decode(encoded) - assert isinstance(decoded, Person) - assert isinstance(decoded.address, Address) - assert decoded.name == "Bob" - assert decoded.age == 25 - assert decoded.address.street == "123 Main St" - assert decoded.address.city == "New York" - - # Test with pydantic model validation - Model = pydantic.create_model("Model", value=encodable.t) - model_instance = Model.model_validate({"value": encoded.model_dump()}) - assert model_instance.value.name == "Bob" - assert model_instance.value.age == 25 - assert model_instance.value.address.street == "123 Main St" - assert model_instance.value.address.city == "New York" - # Decode from model - decoded_from_model = encodable.decode(model_instance.value) - assert decoded_from_model == person - assert isinstance(decoded_from_model, Person) - assert isinstance(decoded_from_model.address, Address) - - -class TestEncodableSynthesizedType: - """Tests for EncodableSynthesizedType encode/decode functionality.""" - - def test_decode_simple_class(self): - """Test decoding a simple class from SynthesizedType.""" - synth = SynthesizedType( - type_name="Greeter", - module_code="""\ -class Greeter: - def greet(self, name: str) -> str: - return f"Hello, {name}!" -""", - ) - - result = EncodableSynthesizedType.decode(synth) - - assert isinstance(result, type) - assert result.__name__ == "Greeter" - - # Test instantiation and method call - instance = result() - assert instance.greet("World") == "Hello, World!" - - def test_decode_with_inheritance(self): - """Test decoding a class that inherits from a base class in context.""" - - class Animal: - def speak(self) -> str: - raise NotImplementedError - - synth = SynthesizedType( - type_name="Dog", - module_code="""\ -class Dog(Animal): - def speak(self) -> str: - return "Woof!" -""", - ) - - # Attach context with base class - object.__setattr__(synth, "_decode_context", ChainMap({"Animal": Animal})) - - result = EncodableSynthesizedType.decode(synth) - - assert isinstance(result, type) - assert issubclass(result, Animal) - assert result.__name__ == "Dog" - - instance = result() - assert instance.speak() == "Woof!" - - def test_decode_attaches_source_attribute(self): - """Test that decoded types have __source__ attribute.""" - synth = SynthesizedType( - type_name="Simple", - module_code="class Simple:\n pass", - ) - - result = EncodableSynthesizedType.decode(synth) - - assert hasattr(result, "__source__") - assert "class Simple" in result.__source__ - - def test_decode_attaches_synthesized_attribute(self): - """Test that decoded types have __synthesized__ attribute.""" - synth = SynthesizedType( - type_name="Simple", - module_code="class Simple:\n pass", - ) - - result = EncodableSynthesizedType.decode(synth) - - assert hasattr(result, "__synthesized__") - assert result.__synthesized__ is synth - - def test_decode_inspect_getsource_works(self): - """Test that inspect.getsource() works on synthesized types.""" - synth = SynthesizedType( - type_name="Documented", - module_code='''\ -class Documented: - """A documented class.""" - - def method(self) -> int: - return 42 -''', - ) - - result = EncodableSynthesizedType.decode(synth) - source = inspect.getsource(result) - - assert "class Documented" in source - assert "A documented class" in source - assert "def method" in source - assert source == result.__source__ - - def test_decode_with_helper_in_class(self): - """Test decoding a class that uses a helper method.""" - synth = SynthesizedType( - type_name="Counter", - module_code="""\ -class Counter: - def __init__(self): - self.value = 0 - - def _increment(self, x): - return x + 1 - - def increment(self): - self.value = self._increment(self.value) - return self.value -""", - ) - - result = EncodableSynthesizedType.decode(synth) - instance = result() - - assert instance.increment() == 1 - assert instance.increment() == 2 - assert instance.increment() == 3 - - def test_decode_syntax_error_raises_synthesis_error(self): - """Test that syntax errors raise SynthesisError.""" - synth = SynthesizedType( - type_name="Broken", - module_code="class Broken\n pass # missing colon", - ) - - with pytest.raises(SynthesisError, match="Syntax error"): - EncodableSynthesizedType.decode(synth) - - def test_decode_missing_type_raises_synthesis_error(self): - """Test that missing type name raises SynthesisError.""" - synth = SynthesizedType( - type_name="Missing", - module_code="class WrongName:\n pass", - ) - - with pytest.raises(SynthesisError, match="not found after execution"): - EncodableSynthesizedType.decode(synth) - - def test_decode_non_type_raises_synthesis_error(self): - """Test that non-type result raises SynthesisError.""" - synth = SynthesizedType( - type_name="NotAType", - module_code="NotAType = 42", - ) - - with pytest.raises(SynthesisError, match="is not a type"): - EncodableSynthesizedType.decode(synth) - - def test_encode_simple_class(self): - """Test encoding a simple class to SynthesizedType.""" - - class MyClass: - def method(self) -> str: - return "hello" - - result = EncodableSynthesizedType.encode(MyClass) - - assert isinstance(result, SynthesizedType) - assert result.type_name == "MyClass" - assert "class MyClass" in result.module_code - assert "def method" in result.module_code - - def test_encode_builtin_class_fallback(self): - """Test encoding a builtin class (source unavailable) uses fallback.""" - # int is a builtin, so inspect.getsource() will fail - result = EncodableSynthesizedType.encode(int) - - assert isinstance(result, SynthesizedType) - assert result.type_name == "int" - assert "class int" in result.module_code - assert "Source unavailable" in result.module_code - - def test_serialize_produces_json(self): - """Test that serialize produces valid JSON content blocks.""" - synth = SynthesizedType( - type_name="TestType", - module_code="class TestType:\n pass", - ) - - result = EncodableSynthesizedType.serialize(synth) - - assert isinstance(result, list) - assert len(result) == 1 - assert result[0]["type"] == "text" - # Verify it's valid JSON - import json - - parsed = json.loads(result[0]["text"]) - assert parsed["type_name"] == "TestType" - assert "class TestType" in parsed["module_code"] - - def test_decode_unique_module_names(self): - """Test that each decoded type gets a unique module name.""" - synth1 = SynthesizedType( - type_name="Unique", - module_code="class Unique:\n value = 1", - ) - synth2 = SynthesizedType( - type_name="Unique", - module_code="class Unique:\n value = 2", - ) - - result1 = EncodableSynthesizedType.decode(synth1) - result2 = EncodableSynthesizedType.decode(synth2) - - # Both should be different types with different module names - assert result1 is not result2 - assert result1.__module__ != result2.__module__ - assert result1.value == 1 - assert result2.value == 2 - - def test_decode_context_with_multiple_items(self): - """Test decoding with context containing multiple items.""" - - class BaseA: - pass - - class BaseB: - pass - - def helper() -> int: - return 100 - - synth = SynthesizedType( - type_name="Combined", - module_code="""\ -class Combined(BaseA, BaseB): - def get_value(self) -> int: - return helper() -""", - ) - - context = ChainMap({"BaseA": BaseA, "BaseB": BaseB, "helper": helper}) - object.__setattr__(synth, "_decode_context", context) - - result = EncodableSynthesizedType.decode(synth) - - assert issubclass(result, BaseA) - assert issubclass(result, BaseB) - instance = result() - assert instance.get_value() == 100 -======= """ Law-based test suite for effectful.handlers.llm.encoding. @@ -1790,4 +815,3 @@ def _fn(): max_tokens=200, ) assert response is not None ->>>>>>> 68d7645f081b17247fde3494e548fd16f92694e8 diff --git a/tests/test_handlers_llm_provider.py b/tests/test_handlers_llm_provider.py index a39856e2..0441ae72 100644 --- a/tests/test_handlers_llm_provider.py +++ b/tests/test_handlers_llm_provider.py @@ -1,20 +1,5 @@ """Tests for LLM handlers and providers. This module tests the functionality from build/main.py and build/llm.py, -<<<<<<< HEAD -breaking down individual components like LiteLLMProvider, LLMLoggingHandler, -ProgramSynthesis, and sampling strategies. -""" - -import functools -import json -import logging -import os -from collections.abc import Callable -from enum import Enum -from pathlib import Path - -import pytest -======= breaking down individual components like LiteLLMProvider, ProgramSynthesis, and sampling strategies. """ @@ -34,21 +19,11 @@ import tenacity from litellm import ChatCompletionMessageToolCall from litellm.caching.caching import Cache ->>>>>>> 68d7645f081b17247fde3494e548fd16f92694e8 from litellm.files.main import ModelResponse from PIL import Image from pydantic import BaseModel, Field from pydantic.dataclasses import dataclass -<<<<<<< HEAD -from effectful.handlers.llm import Template -from effectful.handlers.llm.providers import ( - LiteLLMProvider, - LLMLoggingHandler, - completion, -) -from effectful.handlers.llm.synthesis import ProgramSynthesis, SynthesisError -======= from effectful.handlers.llm import Agent, Template from effectful.handlers.llm.completions import ( DecodedToolCall, @@ -65,7 +40,6 @@ ) from effectful.handlers.llm.encoding import Encodable, SynthesizedFunction from effectful.handlers.llm.evaluation import UnsafeEvalProvider ->>>>>>> 68d7645f081b17247fde3494e548fd16f92694e8 from effectful.ops.semantics import fwd, handler from effectful.ops.syntax import ObjectInterpretation, implements from effectful.ops.types import NotHandled @@ -88,14 +62,8 @@ REBUILD_FIXTURES = os.getenv("REBUILD_FIXTURES") == "true" -<<<<<<< HEAD -# ============================================================================ - - -======= # ============================================================================ ->>>>>>> 68d7645f081b17247fde3494e548fd16f92694e8 # Test Fixtures and Mock Data # ============================================================================ def retry_on_error(error: type[Exception], n: int): @@ -117,21 +85,13 @@ def wrapper(*args, **kwargs): class ReplayLiteLLMProvider(LiteLLMProvider): test_id: str -<<<<<<< HEAD -======= call_count = 0 ->>>>>>> 68d7645f081b17247fde3494e548fd16f92694e8 def __init__(self, request: pytest.FixtureRequest, *args, **kwargs): super().__init__(*args, **kwargs) self.test_id = request.node.nodeid self.test_id = self.test_id.replace("/", "_").replace(":", "_") -<<<<<<< HEAD - @implements(completion) - def _completion(self, *args, **kwargs): - path = FIXTURE_DIR / f"{self.test_id}.json" -======= def call_id(self): call_id = f"_{self.call_count}" if self.call_count > 0 else "" self.call_count += 1 @@ -140,24 +100,16 @@ def call_id(self): @implements(completion) def _completion(self, *args, **kwargs): path = FIXTURE_DIR / f"{self.test_id}{self.call_id()}.json" ->>>>>>> 68d7645f081b17247fde3494e548fd16f92694e8 if not REBUILD_FIXTURES: if not path.exists(): raise RuntimeError(f"Missing replay fixture: {path}") with path.open() as f: result = ModelResponse.model_validate(json.load(f)) return result -<<<<<<< HEAD - result = fwd(self.model_name, *args, **(self.config | kwargs)) - path.parent.mkdir(exist_ok=True, parents=True) - with path.open("w") as f: - json.dump(result.model_dump(), f, indent=2, sort_keys=True) -======= result = fwd(*args, **kwargs) path.parent.mkdir(exist_ok=True, parents=True) with path.open("w") as f: f.write(result.model_dump_json(indent=2)) ->>>>>>> 68d7645f081b17247fde3494e548fd16f92694e8 return result @@ -168,11 +120,7 @@ class LimitLLMCallsHandler(ObjectInterpretation): def __init__(self, max_calls: int): self.max_calls = max_calls -<<<<<<< HEAD - @implements(completion) -======= @implements(call_assistant) ->>>>>>> 68d7645f081b17247fde3494e548fd16f92694e8 def _completion(self, *args, **kwargs): if self.no_calls >= self.max_calls: raise RuntimeError( @@ -182,11 +130,7 @@ def _completion(self, *args, **kwargs): return fwd() -<<<<<<< HEAD -class MovieGenre(str, Enum): -======= class MovieGenre(StrEnum): ->>>>>>> 68d7645f081b17247fde3494e548fd16f92694e8 """Movie genre classifications.""" ACTION = "action" @@ -234,8 +178,6 @@ def create_function(char: str) -> Callable[[str], int]: raise NotHandled -<<<<<<< HEAD -======= class _ToolNameAgent(Agent): @Template.define def helper(self) -> str: @@ -248,7 +190,6 @@ def ask(self, prompt: str) -> str: raise NotHandled ->>>>>>> 68d7645f081b17247fde3494e548fd16f92694e8 class TestLiteLLMProvider: """Tests for LiteLLMProvider basic functionality.""" @@ -257,11 +198,7 @@ class TestLiteLLMProvider: def test_simple_prompt_multiple_models(self, request, model_name): """Test that LiteLLMProvider works with different model configurations.""" with ( -<<<<<<< HEAD - handler(ReplayLiteLLMProvider(request, model_name=model_name)), -======= handler(ReplayLiteLLMProvider(request, model=model_name)), ->>>>>>> 68d7645f081b17247fde3494e548fd16f92694e8 handler(LimitLLMCallsHandler(max_calls=1)), ): result = simple_prompt("testing") @@ -278,11 +215,7 @@ def test_simple_prompt_multiple_models(self, request, model_name): def test_simple_prompt_cross_endpoint(self, request, model_name): """Test that ReplayLiteLLMProvider works across different API endpoints.""" with ( -<<<<<<< HEAD - handler(ReplayLiteLLMProvider(request, model_name=model_name)), -======= handler(ReplayLiteLLMProvider(request, model=model_name)), ->>>>>>> 68d7645f081b17247fde3494e548fd16f92694e8 handler(LimitLLMCallsHandler(max_calls=1)), ): result = simple_prompt("testing") @@ -295,11 +228,7 @@ def test_structured_output(self, request): plot = "A rogue cop must stop a evil group from taking over a skyscraper." with ( -<<<<<<< HEAD - handler(ReplayLiteLLMProvider(request, model_name="gpt-5-nano")), -======= handler(ReplayLiteLLMProvider(request, model="gpt-5-nano")), ->>>>>>> 68d7645f081b17247fde3494e548fd16f92694e8 handler(LimitLLMCallsHandler(max_calls=1)), ): classification = classify_genre(plot) @@ -314,11 +243,7 @@ def test_structured_output(self, request): def test_integer_return_type(self, request): """Test LiteLLMProvider with integer return type.""" with ( -<<<<<<< HEAD - handler(ReplayLiteLLMProvider(request, model_name="gpt-5-nano")), -======= handler(ReplayLiteLLMProvider(request, model="gpt-5-nano")), ->>>>>>> 68d7645f081b17247fde3494e548fd16f92694e8 handler(LimitLLMCallsHandler(max_calls=1)), ): result = generate_number(100) @@ -332,13 +257,7 @@ def test_with_config_params(self, request): # Test with temperature parameter with ( handler( -<<<<<<< HEAD - ReplayLiteLLMProvider( - request, model_name="gpt-4o-mini", temperature=0.1 - ) -======= ReplayLiteLLMProvider(request, model="gpt-4o-mini", temperature=0.1) ->>>>>>> 68d7645f081b17247fde3494e548fd16f92694e8 ), handler(LimitLLMCallsHandler(max_calls=1)), ): @@ -346,68 +265,6 @@ def test_with_config_params(self, request): assert isinstance(result, str) -<<<<<<< HEAD -class TestLLMLoggingHandler: - """Tests for LLMLoggingHandler functionality.""" - - @requires_openai - def test_logs_requests(self, request, caplog): - """Test that LLMLoggingHandler properly logs LLM requests.""" - with caplog.at_level(logging.INFO): - with ( - handler(ReplayLiteLLMProvider(request, model_name="gpt-4o-mini")), - handler(LLMLoggingHandler()), - handler(LimitLLMCallsHandler(max_calls=1)), - ): - result = simple_prompt("testing") - assert isinstance(result, str) - - # Check that logging occurred - assert any("llm.request" in record.message for record in caplog.records) - - @requires_openai - def test_custom_logger(self, request, caplog): - """Test LLMLoggingHandler with a custom logger.""" - custom_logger = logging.getLogger("test_custom_logger") - - with caplog.at_level(logging.INFO, logger="test_custom_logger"): - with ( - handler(ReplayLiteLLMProvider(request, model_name="gpt-4o-mini")), - handler(LLMLoggingHandler(logger=custom_logger)), - handler(LimitLLMCallsHandler(max_calls=1)), - ): - result = simple_prompt("testing") - assert isinstance(result, str) - - # Verify custom logger was used - assert any( - record.name == "test_custom_logger" and "llm.request" in record.message - for record in caplog.records - ) - - -@pytest.mark.xfail(reason="Program synthesis not implemented") -class TestProgramSynthesis: - """Tests for ProgramSynthesis handler functionality.""" - - @pytest.mark.xfail - @requires_openai - @retry_on_error(error=SynthesisError, n=3) - def test_generates_callable(self, request): - """Test ProgramSynthesis handler generates executable code.""" - with ( - handler(ReplayLiteLLMProvider(request, model_name="gpt-4o-mini")), - handler(ProgramSynthesis()), - handler(LimitLLMCallsHandler(max_calls=1)), - ): - count_func = create_function("a") - - assert callable(count_func) - # Test the generated function - assert count_func("banana") == 3 - assert count_func("cherry") == 0 - assert count_func("aardvark") == 3 -======= @requires_openai def test_agent_tool_names_are_openai_compatible_integration(): agent = _ToolNameAgent() @@ -429,7 +286,6 @@ def test_agent_tool_names_are_openai_compatible_integration(): assert isinstance(result, str) assert result ->>>>>>> 68d7645f081b17247fde3494e548fd16f92694e8 def smiley_face() -> Image.Image: @@ -461,18 +317,12 @@ def categorise_image(image: Image.Image) -> str: @requires_openai def test_image_input(request): with ( -<<<<<<< HEAD - handler(ReplayLiteLLMProvider(request, model_name="gpt-4o")), -======= handler(ReplayLiteLLMProvider(request, model="gpt-4o")), ->>>>>>> 68d7645f081b17247fde3494e548fd16f92694e8 handler(LimitLLMCallsHandler(max_calls=3)), ): assert any("smile" in categorise_image(smiley_face()) for _ in range(3)) -<<<<<<< HEAD -======= class ImageDescription(BaseModel): """Description of a set of images.""" @@ -517,7 +367,6 @@ def test_list_image_input(request): assert result.count == 2 ->>>>>>> 68d7645f081b17247fde3494e548fd16f92694e8 class BookReview(BaseModel): """A book review with rating and summary.""" @@ -538,11 +387,7 @@ def test_pydantic_basemodel_return(self, request): plot = "A young wizard discovers he has magical powers and goes to a school for wizards." with ( -<<<<<<< HEAD - handler(ReplayLiteLLMProvider(request, model_name="gpt-5-nano")), -======= handler(ReplayLiteLLMProvider(request, model="gpt-5-nano")), ->>>>>>> 68d7645f081b17247fde3494e548fd16f92694e8 handler(LimitLLMCallsHandler(max_calls=1)), ): review = review_book(plot) @@ -554,8 +399,6 @@ def test_pydantic_basemodel_return(self, request): assert 1 <= review.rating <= 5 assert isinstance(review.summary, str) assert len(review.summary) > 0 -<<<<<<< HEAD -======= def test_litellm_caching_integration(request): @@ -2195,4 +2038,3 @@ def _completion(self, model, messages=None, **kwargs): # Only messages from the successful call should be in history assert len(agent.__history__) >= 2 assert len(agent.__history__) > history_after_error ->>>>>>> 68d7645f081b17247fde3494e548fd16f92694e8 diff --git a/tests/test_handlers_llm_template.py b/tests/test_handlers_llm_template.py index da8776cd..7135f170 100644 --- a/tests/test_handlers_llm_template.py +++ b/tests/test_handlers_llm_template.py @@ -1,85 +1,3 @@ -<<<<<<< HEAD -from dataclasses import dataclass - -import pytest - -from effectful.handlers.llm import Template, Tool -from effectful.handlers.llm.providers import format_model_input -from effectful.ops.semantics import NotHandled, handler -from effectful.ops.syntax import ObjectInterpretation, implements - - -def test_template_method(): - """Test that methods can be used as templates.""" - local_variable = None # noqa: F841 - - @dataclass - class A: - x: int - - @Tool.define - def random(self) -> int: - """Returns a random number, chosen by fair dice roll.""" - return 4 - - @Template.define - def f(self) -> int: - """What is the number after 3?""" - raise NotHandled - - a = A(0) - assert isinstance(a.f, Template) - assert "random" in a.f.tools - assert "f" in a.f.tools - assert "local_variable" in a.f.__context__ and "local_variable" not in a.f.tools - assert a.f.tools["random"]() == 4 - - class B(A): - @Tool.define - def reverse(self, s: str) -> str: - """Reverses a string.""" - return str(reversed(s)) - - b = B(1) - assert isinstance(b.f, Template) - assert "random" in b.f.tools - assert "reverse" in b.f.tools - assert "local_variable" in b.f.__context__ and "local_variable" not in a.f.tools - - -def test_template_method_nested_class(): - """Test that template methods work on nested classes.""" - local_variable = "test" # noqa: F841 - - @dataclass - class A: - x: int - - @Tool.define - @staticmethod - def random() -> int: - """Returns a random number, chosen by fair dice roll.""" - return 4 - - @dataclass - class B: - y: bool - - @Template.define - def f(self) -> int: - """What is the number after 3?""" - raise NotHandled - - a = A.B(True) - assert isinstance(a.f, Template) - assert "random" in a.f.tools - assert "f" in a.f.tools - assert "local_variable" in a.f.__context__ and "local_variable" not in a.f.tools - assert a.f.tools["random"]() == 4 - - -class A: -======= """Tests for Agent mixin message sequence semantics.""" import collections @@ -163,22 +81,12 @@ def t() -> str: class _ModuleLevelA: ->>>>>>> 68d7645f081b17247fde3494e548fd16f92694e8 @Template.define def f(self) -> str: """Do stuff""" raise NotImplementedError -<<<<<<< HEAD -def test_template_method_module(): - """Test that template methods work when defined on module-level classes.""" - a = A() - assert isinstance(a.f, Template) - - -======= ->>>>>>> 68d7645f081b17247fde3494e548fd16f92694e8 def _define_scoped_templates(): @Tool.define def shown(self) -> int: @@ -221,8 +129,6 @@ def j(self) -> str: return [A().f, g, _nested(), B().i, B.C().j] -<<<<<<< HEAD -======= # --------------------------------------------------------------------------- # Helpers (same pattern as test_handlers_llm_provider.py) # --------------------------------------------------------------------------- @@ -932,7 +838,6 @@ def test_template_method_module(): assert isinstance(a.f, Template) ->>>>>>> 68d7645f081b17247fde3494e548fd16f92694e8 def test_template_method_scoping(): @Tool.define def hidden(self) -> int: @@ -946,36 +851,6 @@ def hidden(self) -> int: assert "hidden" not in t.__context__ -<<<<<<< HEAD -class TemplateStringIntp(ObjectInterpretation): - """Returns the result of template formatting as a string. Only supports - templates that produce string prompts. - - """ - - @implements(Template.__apply__) - def _[**P, T]( - self, template: Template[P, T], *args: P.args, **kwargs: P.kwargs - ) -> T: - model_input = format_model_input(template, *args, **kwargs) - template_result = model_input[0]["content"] - assert len(template_result) == 1 - return template_result[0]["text"] - - -def test_template_formatting_simple(): - @Template.define - @staticmethod - def rhyme(a: str, b: str) -> str: - """The {a} sat in the {b}.""" - raise NotHandled - - with handler(TemplateStringIntp()): - assert rhyme("cat", "hat") == "The cat sat in the hat." - - -@pytest.mark.xfail -======= # --------------------------------------------------------------------------- # Lexical scope collection # --------------------------------------------------------------------------- @@ -1340,7 +1215,6 @@ def class_method(cls) -> str: assert not hasattr(MyAgent.class_method, "__history__") ->>>>>>> 68d7645f081b17247fde3494e548fd16f92694e8 def test_template_formatting_scoped(): feet_per_mile = 5280 # noqa: F841 @@ -1352,28 +1226,6 @@ def convert(feet: int) -> float: with handler(TemplateStringIntp()): assert ( convert(7920) -<<<<<<< HEAD - == "How many miles is 7920 feet? There are 5280 feet per mile." - ) - - -@pytest.mark.xfail -def test_template_formatting_method(): - @dataclass - class User: - name: str - - @Template.define - def greet(self, day: str) -> float: - """Greet the user '{self.name}' and wish them a good {day}.""" - raise NotHandled - - with handler(TemplateStringIntp()): - user = User("Bob") - assert ( - user.greet("Monday") == "Greet the user 'Bob' and wish them a good Monday." - ) -======= == 'How many miles is {"value":7920} feet? There are {"value":5280} feet per mile.' ) @@ -1666,4 +1518,3 @@ def test_validate_format_spec_on_undefined_var(): def bad(x: int) -> str: """Value: {x} and {missing:.2f}.""" raise NotHandled ->>>>>>> 68d7645f081b17247fde3494e548fd16f92694e8 diff --git a/tests/test_handlers_llm_tool_calling_book.py b/tests/test_handlers_llm_tool_calling_book.py index 1f822429..5759a22b 100644 --- a/tests/test_handlers_llm_tool_calling_book.py +++ b/tests/test_handlers_llm_tool_calling_book.py @@ -10,11 +10,7 @@ from pydantic import BaseModel, Field from effectful.handlers.llm import Template, Tool -<<<<<<< HEAD -from effectful.handlers.llm.providers import LiteLLMProvider, completion -======= from effectful.handlers.llm.completions import LiteLLMProvider, call_assistant ->>>>>>> 68d7645f081b17247fde3494e548fd16f92694e8 from effectful.ops.semantics import fwd, handler from effectful.ops.syntax import ObjectInterpretation, implements from effectful.ops.types import NotHandled @@ -40,11 +36,7 @@ class LimitLLMCallsHandler(ObjectInterpretation): max_calls: int = 10 call_count: int = 0 -<<<<<<< HEAD - @implements(completion) -======= @implements(call_assistant) ->>>>>>> 68d7645f081b17247fde3494e548fd16f92694e8 def _completion(self, *args, **kwargs): self.call_count += 1 if self.call_count > self.max_calls: @@ -106,29 +98,17 @@ def get_book_recommendation(user_preference: str) -> BookRecommendation: class TestPydanticBaseModelToolCalls: @pytest.mark.parametrize( -<<<<<<< HEAD - "model_name", -======= "model", ->>>>>>> 68d7645f081b17247fde3494e548fd16f92694e8 [ pytest.param("gpt-5-nano", marks=requires_openai), pytest.param("claude-sonnet-4-5-20250929", marks=requires_anthropic), ], ) -<<<<<<< HEAD - def test_pydantic_basemodel_tool_calling(self, model_name): - """Test that templates with tools work with Pydantic BaseModel.""" - book_rec_ctx = LoggingBookRecommendationInterpretation() - with ( - handler(LiteLLMProvider(model_name=model_name)), -======= def test_pydantic_basemodel_tool_calling(self, model): """Test that templates with tools work with Pydantic BaseModel.""" book_rec_ctx = LoggingBookRecommendationInterpretation() with ( handler(LiteLLMProvider(model=model)), ->>>>>>> 68d7645f081b17247fde3494e548fd16f92694e8 handler(LimitLLMCallsHandler(max_calls=4)), handler(book_rec_ctx), ): diff --git a/tests/test_handlers_llm_tool_calling_poem.py b/tests/test_handlers_llm_tool_calling_poem.py index 1a1c3041..536cef93 100644 --- a/tests/test_handlers_llm_tool_calling_poem.py +++ b/tests/test_handlers_llm_tool_calling_poem.py @@ -5,26 +5,16 @@ import os from dataclasses import dataclass -<<<<<<< HEAD -from enum import Enum -======= from enum import StrEnum ->>>>>>> 68d7645f081b17247fde3494e548fd16f92694e8 import pytest from pydantic import Field from pydantic.dataclasses import dataclass as pydantic_dataclass from effectful.handlers.llm import Template, Tool -<<<<<<< HEAD -from effectful.handlers.llm.providers import ( - LiteLLMProvider, - completion, -======= from effectful.handlers.llm.completions import ( LiteLLMProvider, call_assistant, ->>>>>>> 68d7645f081b17247fde3494e548fd16f92694e8 ) from effectful.ops.semantics import fwd, handler from effectful.ops.syntax import ObjectInterpretation, implements @@ -51,11 +41,7 @@ class LimitLLMCallsHandler(ObjectInterpretation): max_calls: int = 10 call_count: int = 0 -<<<<<<< HEAD - @implements(completion) -======= @implements(call_assistant) ->>>>>>> 68d7645f081b17247fde3494e548fd16f92694e8 def _completion(self, *args, **kwargs): self.call_count += 1 if self.call_count > self.max_calls: @@ -73,11 +59,7 @@ class Poem: form: str = Field(..., description="name of the type of the poem") -<<<<<<< HEAD -class PoemQuality(str, Enum): -======= class PoemQuality(StrEnum): ->>>>>>> 68d7645f081b17247fde3494e548fd16f92694e8 """Quality rating for a poem.""" GOOD = "GOOD" @@ -133,29 +115,17 @@ class TestToolCalling: """Tests for templates with tool calling functionality.""" @pytest.mark.parametrize( -<<<<<<< HEAD - "model_name", -======= "model", ->>>>>>> 68d7645f081b17247fde3494e548fd16f92694e8 [ pytest.param("gpt-5-nano", marks=requires_openai), pytest.param("claude-sonnet-4-5-20250929", marks=requires_anthropic), ], ) -<<<<<<< HEAD - def test_tool_calling(self, model_name): - """Test that templates with tools work with openai.""" - poem_eval_ctx = LoggingPoemEvaluationInterpretation() - with ( - handler(LiteLLMProvider(model_name=model_name)), -======= def test_tool_calling(self, model): """Test that templates with tools work with openai.""" poem_eval_ctx = LoggingPoemEvaluationInterpretation() with ( handler(LiteLLMProvider(model=model)), ->>>>>>> 68d7645f081b17247fde3494e548fd16f92694e8 handler(LimitLLMCallsHandler(max_calls=4)), handler(poem_eval_ctx), ): From cedf12822ac62c9adee3b670f9436b466081de5b Mon Sep 17 00:00:00 2001 From: datvo06 Date: Thu, 26 Feb 2026 12:34:41 -0500 Subject: [PATCH 34/39] More cleanup --- effectful/handlers/llm/agent.py | 64 - effectful/handlers/llm/encodable_type.py | 193 -- effectful/handlers/llm/encoding.py | 169 ++ effectful/handlers/llm/providers.py | 372 --- effectful/handlers/llm/synthesis.py | 14 - uv.lock | 3047 ---------------------- 6 files changed, 169 insertions(+), 3690 deletions(-) delete mode 100644 effectful/handlers/llm/agent.py delete mode 100644 effectful/handlers/llm/encodable_type.py delete mode 100644 effectful/handlers/llm/providers.py delete mode 100644 effectful/handlers/llm/synthesis.py delete mode 100644 uv.lock diff --git a/effectful/handlers/llm/agent.py b/effectful/handlers/llm/agent.py deleted file mode 100644 index f80cf159..00000000 --- a/effectful/handlers/llm/agent.py +++ /dev/null @@ -1,64 +0,0 @@ -import functools -from typing import Optional - -from effectful.handlers.llm import Template -from effectful.handlers.llm.providers import compute_response, format_model_input -from effectful.ops.semantics import fwd, handler -from effectful.ops.syntax import defop - - -class Agent: - '''When inheriting from Agent, Template-valued methods will have the - previous history of the conversation injected prior to their prompts. - - Example: - - >>> class ConversationAgent(Agent): - ... @Template.define - ... def respond(self, message: str) -> str: - ... """Continue the conversation in response to the message '{message}'""" - ... raise NotImplementedError - - Any calls to `agent.format` will have the previous conversation history in their context. - - ''' - - def __init__(self): - self.state = [] - - @defop - @staticmethod - def current_agent() -> Optional["Agent"]: - return None - - def __init_subclass__(cls): - for method_name in dir(cls): - template = getattr(cls, method_name) - if not isinstance(template, Template): - continue - - @functools.wraps(template) - def wrapper(self, *args, **kwargs): - with handler( - { - Agent.current_agent: lambda: self, - format_model_input: self._format_model_input, - compute_response: self._compute_response, - } - ): - return template(self, *args, **kwargs) - - setattr(cls, method_name, wrapper) - - def _format_model_input(self, template, other, *args, **kwargs): - prompt = fwd() - if Agent.current_agent() is self: - assert self is other - prompt = self.state + prompt - return prompt - - def _compute_response(self, *args, **kwargs): - response = fwd() - if Agent.current_agent() is self: - self.state += response.output - return response diff --git a/effectful/handlers/llm/encodable_type.py b/effectful/handlers/llm/encodable_type.py deleted file mode 100644 index 32c6f51e..00000000 --- a/effectful/handlers/llm/encodable_type.py +++ /dev/null @@ -1,193 +0,0 @@ -"""Encodable type for LLM-synthesized classes.""" - -import ast -import collections -import ctypes -import inspect -import linecache -import sys -import textwrap -import types -import typing -from collections import ChainMap -from typing import Any - -import pydantic -from pydantic import Field - -from effectful.handlers.llm.encoding import EncodableAs, type_to_encodable_type -from effectful.handlers.llm.providers import OpenAIMessageContentListBlock -from effectful.handlers.llm.synthesis import SynthesisError - - -class _PyMappingProxyObject(ctypes.Structure): - """Internal ctypes structure to access the underlying dict of a mappingproxy.""" - - _fields_ = [ - ("ob_refcnt", ctypes.c_ssize_t), - ("ob_type", ctypes.py_object), - ("mapping", ctypes.py_object), - ] - - -class SynthesizedType(pydantic.BaseModel): - """Structured output for type/class synthesis. - - Pydantic model representing synthesized class code with type name and module code. - """ - - type_name: str = Field( - ..., - description="The name of the class that satisfies the specification", - ) - module_code: str = Field( - ..., - description="Complete Python module code with the class definition (no imports needed)", - ) - - -@type_to_encodable_type.register(type) -class EncodableSynthesizedType( - EncodableAs[type, SynthesizedType], -): - """Encodes type to SynthesizedType and vice versa.""" - - t = SynthesizedType - - @classmethod - def encode( - cls, vl: type, context: ChainMap[str, Any] | None = None - ) -> SynthesizedType: - """Encode a type to a SynthesizedType. - - Extracts the type name and source code. - """ - type_name = vl.__name__ - try: - source = inspect.getsource(vl) - except (OSError, TypeError): - # If we can't get source, create a minimal representation - source = f"class {type_name}: pass # Source unavailable" - - return SynthesizedType( - type_name=type_name, module_code=textwrap.dedent(source).strip() - ) - - # Counter for unique filenames - _decode_counter: typing.ClassVar[int] = 0 - - @classmethod - def decode(cls, vl: SynthesizedType) -> type: - """Decode a SynthesizedType to a type. - - Executes the module code and returns the named class. - Uses _decode_context attribute on vl if present (set by TypeSynthesis handler). - """ - context: ChainMap[str, Any] | None = getattr(vl, "_decode_context", None) - type_name = vl.type_name - module_code = textwrap.dedent(vl.module_code).strip() + "\n" - - # Create a unique filename and register source with linecache - # This allows inspect.getsource() to work on the generated class - cls._decode_counter += 1 - # NOTE: adding source to class is more tricky - # because for function func.__code__.co_filename (set by compile(..., filename, "exec")) is set automatically - # We have to do this manually for class (set module name) for inspect.getsource() to work - module_name = ( - f"_llm_effectful_synthesized_types.{type_name}.{cls._decode_counter}" - ) - filename = f"" - - # Register source for inspect/linecache - lines = module_code.splitlines(keepends=True) - # Ensure last line has newline for linecache - if lines and not lines[-1].endswith("\n"): - lines[-1] += "\n" - linecache.cache[filename] = ( - len(module_code), - None, - lines, - filename, - ) - - # Create a real module and put it to sys.modules - mod = types.ModuleType(module_name) - mod.__file__ = filename - sys.modules[module_name] = mod - - # globals = module.__dict__ + context - g = mod.__dict__ - g.update({"collections": collections}) - if context is not None: - g.update(context) - g.update({"__name__": module_name, "__file__": filename}) - g.setdefault("__package__", module_name.rpartition(".")[0]) - - try: - # NOTE: Parse and inject __firstlineno__ into class bodies for Python 3.13+ compatibility - # inspect.getsource() looks for __firstlineno__ in vars(cls), which requires it to be in the class's __dict__. - # We inject it via AST before execution. - tree = ast.parse(module_code) - for node in ast.walk(tree): - if isinstance(node, ast.ClassDef): - # Create: __firstlineno__ = - assign = ast.Assign( - targets=[ast.Name(id="__firstlineno__", ctx=ast.Store())], - value=ast.Constant(value=node.lineno), - lineno=node.lineno, - col_offset=0, - ) - ast.fix_missing_locations(assign) - node.body.insert(0, assign) - ast.fix_missing_locations(tree) - code_obj = compile(tree, filename, "exec") - exec(code_obj, g, g) - except SyntaxError as exc: - raise SynthesisError( - f"Syntax error in generated code: {exc}", module_code - ) from exc - except Exception as exc: - raise SynthesisError(f"Evaluation failed: {exc!r}", module_code) from exc - - if type_name not in g: - raise SynthesisError( - f"Type '{type_name}' not found after execution. " - f"Available names: {[k for k in g.keys() if not k.startswith('_')]}", - module_code, - ) - - synthesized_type = g[type_name] - - if not isinstance(synthesized_type, type): - raise SynthesisError( - f"'{type_name}' is not a type, got {type(synthesized_type).__name__}", - module_code, - ) - - # Attach source code and module name - synthesized_type.__source__ = module_code # type: ignore[attr-defined] - synthesized_type.__synthesized__ = vl # type: ignore[attr-defined] - synthesized_type.__module__ = module_name - - # NOTE: Set __firstlineno__ AFTER __module__ assignment! - # In Python 3.13, setting __module__ clears __firstlineno__ from vars(). - # We use ctypes to directly inject it into __dict__ for inspect.getsource(). - if "__firstlineno__" not in vars(synthesized_type): - firstlineno = next( - ( - n.lineno - for n in ast.walk(ast.parse(module_code)) - if isinstance(n, ast.ClassDef) and n.name == type_name - ), - 1, - ) - inner_dict = _PyMappingProxyObject.from_address( - id(vars(synthesized_type)) - ).mapping - inner_dict["__firstlineno__"] = firstlineno - - return synthesized_type - - @classmethod - def serialize(cls, vl: SynthesizedType) -> list[OpenAIMessageContentListBlock]: - return [{"type": "text", "text": vl.model_dump_json()}] diff --git a/effectful/handlers/llm/encoding.py b/effectful/handlers/llm/encoding.py index bcfa15a4..dac57a1c 100644 --- a/effectful/handlers/llm/encoding.py +++ b/effectful/handlers/llm/encoding.py @@ -1,8 +1,11 @@ import ast import base64 +import collections +import ctypes import functools import inspect import io +import sys import textwrap import types import typing @@ -594,6 +597,164 @@ def deserialize(self, serialized_value: str) -> SynthesizedFunction: return SynthesizedFunction.model_validate_json(serialized_value) +class SynthesisError(Exception): + """Raised when type synthesis fails.""" + + def __init__(self, message: str, code: str | None = None): + super().__init__(message) + self.code = code + + +class _PyMappingProxyObject(ctypes.Structure): + """Internal ctypes structure to access the underlying dict of a mappingproxy.""" + + _fields_ = [ + ("ob_refcnt", ctypes.c_ssize_t), + ("ob_type", ctypes.py_object), + ("mapping", ctypes.py_object), + ] + + +class SynthesizedType(pydantic.BaseModel): + """Structured output for type/class synthesis. + + Pydantic model representing synthesized class code with type name and module code. + """ + + type_name: str = pydantic.Field( + ..., + description="The name of the class that satisfies the specification", + ) + module_code: str = pydantic.Field( + ..., + description="Complete Python module code with the class definition (no imports needed)", + ) + + +@dataclass +class TypeEncodable(Encodable[type, SynthesizedType]): + base: type[type] + enc: type[SynthesizedType] + ctx: Mapping[str, Any] + + _decode_counter: typing.ClassVar[int] = 0 + + def encode(self, value: type) -> SynthesizedType: + type_name = value.__name__ + try: + source = inspect.getsource(value) + except (OSError, TypeError): + source = f"class {type_name}: pass # Source unavailable" + + return SynthesizedType( + type_name=type_name, module_code=textwrap.dedent(source).strip() + ) + + def decode(self, encoded_value: SynthesizedType) -> type: + """Decode a SynthesizedType to a type. + + Executes the module code and returns the named class. + """ + type_name = encoded_value.type_name + module_code = textwrap.dedent(encoded_value.module_code).strip() + "\n" + + TypeEncodable._decode_counter += 1 + module_name = ( + f"_llm_effectful_synthesized_types.{type_name}" + f".{TypeEncodable._decode_counter}" + ) + filename = f"" + + # Create a real module and put it to sys.modules + mod = types.ModuleType(module_name) + mod.__file__ = filename + sys.modules[module_name] = mod + + # globals = module.__dict__ + context + g = mod.__dict__ + g.update({"collections": collections}) + if self.ctx: + g.update(self.ctx) + g.update({"__name__": module_name, "__file__": filename}) + g.setdefault("__package__", module_name.rpartition(".")[0]) + + try: + # Parse via evaluation effect (also registers source in linecache) + tree = evaluation.parse(module_code, filename) + + # Inject __firstlineno__ into class bodies for Python 3.13+ compatibility + # inspect.getsource() looks for __firstlineno__ in vars(cls), + # which requires it to be in the class's __dict__. + for node in ast.walk(tree): + if isinstance(node, ast.ClassDef): + assign = ast.Assign( + targets=[ast.Name(id="__firstlineno__", ctx=ast.Store())], + value=ast.Constant(value=node.lineno), + lineno=node.lineno, + col_offset=0, + ) + ast.fix_missing_locations(assign) + node.body.insert(0, assign) + ast.fix_missing_locations(tree) + + # Compile and execute via evaluation effects + code_obj = evaluation.compile(tree, filename) + evaluation.exec(code_obj, g) + except SyntaxError as exc: + raise SynthesisError( + f"Syntax error in generated code: {exc}", module_code + ) from exc + except Exception as exc: + raise SynthesisError(f"Evaluation failed: {exc!r}", module_code) from exc + + if type_name not in g: + raise SynthesisError( + f"Type '{type_name}' not found after execution. " + f"Available names: {[k for k in g.keys() if not k.startswith('_')]}", + module_code, + ) + + synthesized_type = g[type_name] + + if not isinstance(synthesized_type, type): + raise SynthesisError( + f"'{type_name}' is not a type, got {type(synthesized_type).__name__}", + module_code, + ) + + # Attach source code and module name + synthesized_type.__source__ = module_code # type: ignore[attr-defined] + synthesized_type.__synthesized__ = encoded_value # type: ignore[attr-defined] + synthesized_type.__module__ = module_name + + # NOTE: Set __firstlineno__ AFTER __module__ assignment! + # In Python 3.13, setting __module__ clears __firstlineno__ from vars(). + # We use ctypes to directly inject it into __dict__ for inspect.getsource(). + if "__firstlineno__" not in vars(synthesized_type): + firstlineno = next( + ( + n.lineno + for n in ast.walk(ast.parse(module_code)) + if isinstance(n, ast.ClassDef) and n.name == type_name + ), + 1, + ) + inner_dict = _PyMappingProxyObject.from_address( + id(vars(synthesized_type)) + ).mapping + inner_dict["__firstlineno__"] = firstlineno + + return synthesized_type + + def serialize( + self, encoded_value: SynthesizedType + ) -> Sequence[OpenAIMessageContentListBlock]: + return [{"type": "text", "text": encoded_value.model_dump_json()}] + + def deserialize(self, serialized_value: str) -> SynthesizedType: + return SynthesizedType.model_validate_json(serialized_value) + + def _param_model(sig: inspect.Signature) -> type[pydantic.BaseModel]: return pydantic.create_model( "Params", @@ -960,6 +1121,14 @@ def _encodable_callable( return CallableEncodable(ty, typed_enc, ctx, expected_params, expected_return) +@Encodable.define.register(type) +def _encodable_type( + ty: type, ctx: Mapping[str, Any] | None +) -> Encodable[type, SynthesizedType]: + ctx = ctx or {} + return TypeEncodable(ty, SynthesizedType, ctx) + + @Encodable.define.register(Tool) def _encodable_tool[**P, T]( ty: type[Tool[P, T]], ctx: Mapping[str, Any] | None diff --git a/effectful/handlers/llm/providers.py b/effectful/handlers/llm/providers.py deleted file mode 100644 index e99e4ce4..00000000 --- a/effectful/handlers/llm/providers.py +++ /dev/null @@ -1,372 +0,0 @@ -import functools -import inspect -import logging -import string -import traceback -import typing -from collections.abc import Callable, Hashable -from typing import Any - -import litellm -import pydantic -from litellm import ( - Choices, - Message, - OpenAIChatCompletionToolParam, - OpenAIMessageContent, - OpenAIMessageContentListBlock, -) -from litellm.types.utils import ModelResponse - -from effectful.handlers.llm import Template, Tool -from effectful.handlers.llm.encoding import type_to_encodable_type -from effectful.ops.semantics import fwd -from effectful.ops.syntax import ObjectInterpretation, defop, implements -from effectful.ops.types import Operation - - -class _OpenAIPromptFormatter(string.Formatter): - def format_as_messages( - self, format_str: str, /, *args, **kwargs - ) -> OpenAIMessageContent: - prompt_parts: list[OpenAIMessageContentListBlock] = [] - current_text = "" - - def push_current_text(): - nonlocal current_text - if current_text: - prompt_parts.append({"type": "text", "text": current_text}) - current_text = "" - - for literal, field_name, format_spec, conversion in self.parse(format_str): - current_text += literal - - if field_name is not None: - obj, _ = self.get_field(field_name, args, kwargs) - part = self.convert_field(obj, conversion) - # special casing for text - if ( - isinstance(part, list) - and len(part) == 1 - and part[0]["type"] == "text" - ): - current_text += self.format_field( - part[0]["text"], format_spec if format_spec else "" - ) - elif isinstance(part, list): - push_current_text() - prompt_parts.extend(part) - else: - prompt_parts.append(part) - - push_current_text() - return prompt_parts - - -@defop -@functools.wraps(litellm.completion) -def completion(*args, **kwargs) -> Any: - """Low-level LLM request. Handlers may log/modify requests and delegate via fwd(). - - This effect is emitted for model request/response rounds so handlers can - observe/log requests. - - """ - return litellm.completion(*args, **kwargs) - - -class CacheLLMRequestHandler(ObjectInterpretation): - """Caches LLM requests.""" - - def __init__(self): - self.cache: dict[Hashable, Any] = {} - - def _make_hashable(self, obj: Any) -> Hashable: - """Recursively convert objects to hashable representations.""" - if isinstance(obj, dict): - return tuple(sorted((k, self._make_hashable(v)) for k, v in obj.items())) - elif isinstance(obj, list | tuple): - return tuple(self._make_hashable(item) for item in obj) - elif isinstance(obj, set): - return frozenset(self._make_hashable(item) for item in obj) - else: - # Primitives (int, float, str, bytes, etc.) are already hashable - return obj - - @implements(completion) - def _cache_completion(self, *args, **kwargs) -> Any: - key = self._make_hashable((args, kwargs)) - if key in self.cache: - return self.cache[key] - response = fwd() - self.cache[key] = response - return response - - -class LLMLoggingHandler(ObjectInterpretation): - """Logs completion rounds and tool_call invocations using Python logging. - - Configure with a logger or logger name. By default logs at INFO level. - """ - - def __init__( - self, - *, - logger: logging.Logger | None = None, - ): - """Initialize the logging handler. - - Args: - logger: The logger to use. If None, the logger name will be the name of the class. Note that the logger should have a handler that print out also the extra payload, e.g. `%(payload)s`. - """ - self.logger = logger or logging.getLogger(__name__) - - @implements(completion) - def _log_completion(self, *args, **kwargs) -> Any: - """Log the LLM request and response.""" - - response = fwd() - self.logger.info( - "llm.request", - extra={"payload": {"args": args, "kwargs": kwargs, "response": response}}, - ) - return response - - @implements(Tool.__apply__) - def _log_tool_call(self, tool: Operation, *args, **kwargs) -> Any: - """Log the tool call and result.""" - - tool_name = tool.__name__ - result = fwd() - self.logger.info( - "llm.tool_call", - extra={"payload": {"tool": tool_name, "args": args, "kwargs": kwargs}}, - ) - return result - - -class RetryLLMHandler(ObjectInterpretation): - """Retries LLM requests if they fail. - If the request fails, the error is logged and the prompt is updated to include the error. - If the request fails after the maximum number of retries, an exception is raised. - Args: - max_retries: The maximum number of retries. - add_error_feedback: Whether to add error feedback to the prompt. - exception_cls: The exception class to raise if the maximum number of retries is reached. - """ - - def __init__( - self, - max_retries: int = 3, - add_error_feedback: bool = False, - exception_cls: type[BaseException] = Exception, - ): - self.max_retries = max_retries - self.add_error_feedback = add_error_feedback - self.exception_cls = exception_cls - - @implements(Template.__apply__) - def _retry_completion(self, template: Template, *args, **kwargs) -> Any: - prompt_ext = template.__prompt_template__ - for _ in range(self.max_retries - 1): - template_ext = Template.replace(template, prompt_template=prompt_ext) - - try: - return fwd(template_ext, *args, **kwargs) - except self.exception_cls: - if self.add_error_feedback: - # Capture the full traceback for better error context - tb = traceback.format_exc() - prompt_ext += f"\nError from previous generation:\n```\n{tb}```" - - template_ext = Template.replace(template, prompt_template=prompt_ext) - return fwd(template_ext, *args, **kwargs) - - -def parameter_model(tool: Tool) -> type[pydantic.BaseModel]: - fields = { - name: type_to_encodable_type(param.annotation).t - for name, param in tool.__signature__.parameters.items() - } - parameter_model = pydantic.create_model( - "Params", - __config__={"extra": "forbid"}, - **fields, # type: ignore - ) - return parameter_model - - -def function_definition(tool: Tool) -> OpenAIChatCompletionToolParam: - param_model = parameter_model(tool) - response_format = litellm.utils.type_to_response_format_param(param_model) - description = tool.__default__.__doc__ - assert response_format is not None - assert description is not None - return { - "type": "function", - "function": { - "name": tool.__name__, - "description": description, - "parameters": response_format["json_schema"]["schema"], - "strict": True, - }, - } - - -def call_with_json_args(tool: Tool, json_str: str) -> OpenAIMessageContent: - """Implements a roundtrip call to a python function. Input is a json - string representing an LLM tool call request parameters. The output is - the serialised response to the model. - - """ - sig = tool.__signature__ - param_model = parameter_model(tool) - try: - # build dict of raw encodable types U - raw_args = param_model.model_validate_json(json_str) - - # use encoders to decode Us to python types T - params: dict[str, Any] = { - param_name: type_to_encodable_type( - sig.parameters[param_name].annotation - ).decode(getattr(raw_args, param_name)) - for param_name in raw_args.model_fields_set - } - - # call tool with python types - result = tool(**params) - - # serialize back to U using encoder for return type - encoded_ty = type_to_encodable_type(sig.return_annotation) - encoded_value = encoded_ty.encode(result) - - # serialise back to Json - return encoded_ty.serialize(encoded_value) - except Exception as exn: - return str({"status": "failure", "exception": str(exn)}) - - -@defop -def compute_response(template: Template, model_input: list[Any]) -> ModelResponse: - """Produce a complete model response for an input message sequence. This may - involve multiple API requests if tools are invoked by the model. - - """ - ret_type = template.__signature__.return_annotation - tools = template.tools - - tool_schemas = [function_definition(t) for t in tools.values()] - response_encoding_type: type | None = type_to_encodable_type(ret_type).t - if response_encoding_type == str: - response_encoding_type = None - - # loop based on: https://cookbook.openai.com/examples/reasoning_function_calls - while True: - response: ModelResponse = completion( - messages=model_input, - response_format=pydantic.create_model( - "Response", value=response_encoding_type, __config__={"extra": "forbid"} - ) - if response_encoding_type - else None, - tools=tool_schemas, - ) - - choice: Choices = typing.cast(Choices, response.choices[0]) - message: Message = choice.message - if not message.tool_calls: - return response - model_input.append(message.to_dict()) - - for tool_call in message.tool_calls: - function = tool_call.function - function_name = function.name - assert function_name is not None - tool = tools[function_name] - tool_result = call_with_json_args(tool, function.arguments) - model_input.append( - { - "role": "tool", - "tool_call_id": tool_call.id, - "name": function_name, - "content": tool_result, - } - ) - - -def decode_response[**P, T](template: Callable[P, T], response: ModelResponse) -> T: - """Decode an LLM response into an instance of the template return type. This - operation should raise if the output cannot be decoded. - """ - assert isinstance(template, Template) - choice: Choices = typing.cast(Choices, response.choices[0]) - last_resp: Message = choice.message - assert isinstance(last_resp, Message) - result_str = last_resp.content or last_resp.reasoning_content - assert result_str - - ret_type = template.__signature__.return_annotation - encodable_ty = type_to_encodable_type(ret_type) - - if encodable_ty.t == str: - # if encoding as a type, value is just directly what the llm returned - value = result_str - else: - Result = pydantic.create_model("Result", value=encodable_ty.t) - result = Result.model_validate_json(result_str) - assert isinstance(result, Result) - value = result.value # type: ignore - - return encodable_ty.decode(value) # type: ignore - - -@defop -def format_model_input[**P, T]( - template: Template[P, T], *args: P.args, **kwargs: P.kwargs -) -> list[Any]: - """Format a template applied to arguments into a sequence of input - messages. - - """ - bound_args = template.__signature__.bind(*args, **kwargs) - bound_args.apply_defaults() - # encode arguments - arguments = {} - for param in bound_args.arguments: - encoder = type_to_encodable_type( - template.__signature__.parameters[param].annotation - ) - encoded = encoder.encode(bound_args.arguments[param]) - arguments[param] = encoder.serialize(encoded) - - prompt = _OpenAIPromptFormatter().format_as_messages( - template.__prompt_template__, **arguments - ) - - # Note: The OpenAI api only seems to accept images in the 'user' role. The - # effect of different roles on the model's response is currently unclear. - messages = [{"type": "message", "content": prompt, "role": "user"}] - return messages - - -class LiteLLMProvider(ObjectInterpretation): - """Implements templates using the LiteLLM API.""" - - model_name: str - config: dict[str, Any] - - def __init__(self, model_name: str = "gpt-4o", **config): - self.model_name = model_name - self.config = inspect.signature(completion).bind_partial(**config).kwargs - - @implements(completion) - def _completion(self, *args, **kwargs): - return fwd(self.model_name, *args, **(self.config | kwargs)) - - @implements(Template.__apply__) - def _call[**P, T]( - self, template: Template[P, T], *args: P.args, **kwargs: P.kwargs - ) -> T: - model_input = format_model_input(template, *args, **kwargs) - resp = compute_response(template, model_input) - return decode_response(template, resp) diff --git a/effectful/handlers/llm/synthesis.py b/effectful/handlers/llm/synthesis.py deleted file mode 100644 index 3db32fd7..00000000 --- a/effectful/handlers/llm/synthesis.py +++ /dev/null @@ -1,14 +0,0 @@ -from effectful.ops.syntax import ObjectInterpretation - - -class SynthesisError(Exception): - """Raised when program synthesis fails.""" - - def __init__(self, message, code=None): - super().__init__(message) - self.code = code - - -class ProgramSynthesis(ObjectInterpretation): - def __init__(self, *args, **kwargs): - raise NotImplementedError diff --git a/uv.lock b/uv.lock deleted file mode 100644 index 0f896036..00000000 --- a/uv.lock +++ /dev/null @@ -1,3047 +0,0 @@ -version = 1 -revision = 2 -requires-python = ">=3.12, <3.14" -resolution-markers = [ - "python_full_version >= '3.13'", - "python_full_version < '3.13'", -] - -[[package]] -name = "absl-py" -version = "2.3.1" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/10/2a/c93173ffa1b39c1d0395b7e842bbdc62e556ca9d8d3b5572926f3e4ca752/absl_py-2.3.1.tar.gz", hash = "sha256:a97820526f7fbfd2ec1bce83f3f25e3a14840dac0d8e02a0b71cd75db3f77fc9", size = 116588, upload-time = "2025-07-03T09:31:44.05Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/8f/aa/ba0014cc4659328dc818a28827be78e6d97312ab0cb98105a770924dc11e/absl_py-2.3.1-py3-none-any.whl", hash = "sha256:eeecf07f0c2a93ace0772c92e596ace6d3d3996c042b2128459aaae2a76de11d", size = 135811, upload-time = "2025-07-03T09:31:42.253Z" }, -] - -[[package]] -name = "aiohappyeyeballs" -version = "2.6.1" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/26/30/f84a107a9c4331c14b2b586036f40965c128aa4fee4dda5d3d51cb14ad54/aiohappyeyeballs-2.6.1.tar.gz", hash = "sha256:c3f9d0113123803ccadfdf3f0faa505bc78e6a72d1cc4806cbd719826e943558", size = 22760, upload-time = "2025-03-12T01:42:48.764Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/0f/15/5bf3b99495fb160b63f95972b81750f18f7f4e02ad051373b669d17d44f2/aiohappyeyeballs-2.6.1-py3-none-any.whl", hash = "sha256:f349ba8f4b75cb25c99c5c2d84e997e485204d2902a9597802b0371f09331fb8", size = 15265, upload-time = "2025-03-12T01:42:47.083Z" }, -] - -[[package]] -name = "aiohttp" -version = "3.13.2" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "aiohappyeyeballs" }, - { name = "aiosignal" }, - { name = "attrs" }, - { name = "frozenlist" }, - { name = "multidict" }, - { name = "propcache" }, - { name = "yarl" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/1c/ce/3b83ebba6b3207a7135e5fcaba49706f8a4b6008153b4e30540c982fae26/aiohttp-3.13.2.tar.gz", hash = "sha256:40176a52c186aefef6eb3cad2cdd30cd06e3afbe88fe8ab2af9c0b90f228daca", size = 7837994, upload-time = "2025-10-28T20:59:39.937Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/29/9b/01f00e9856d0a73260e86dd8ed0c2234a466c5c1712ce1c281548df39777/aiohttp-3.13.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:b1e56bab2e12b2b9ed300218c351ee2a3d8c8fdab5b1ec6193e11a817767e47b", size = 737623, upload-time = "2025-10-28T20:56:30.797Z" }, - { url = "https://files.pythonhosted.org/packages/5a/1b/4be39c445e2b2bd0aab4ba736deb649fabf14f6757f405f0c9685019b9e9/aiohttp-3.13.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:364e25edaabd3d37b1db1f0cbcee8c73c9a3727bfa262b83e5e4cf3489a2a9dc", size = 492664, upload-time = "2025-10-28T20:56:32.708Z" }, - { url = "https://files.pythonhosted.org/packages/28/66/d35dcfea8050e131cdd731dff36434390479b4045a8d0b9d7111b0a968f1/aiohttp-3.13.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:c5c94825f744694c4b8db20b71dba9a257cd2ba8e010a803042123f3a25d50d7", size = 491808, upload-time = "2025-10-28T20:56:34.57Z" }, - { url = "https://files.pythonhosted.org/packages/00/29/8e4609b93e10a853b65f8291e64985de66d4f5848c5637cddc70e98f01f8/aiohttp-3.13.2-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ba2715d842ffa787be87cbfce150d5e88c87a98e0b62e0f5aa489169a393dbbb", size = 1738863, upload-time = "2025-10-28T20:56:36.377Z" }, - { url = "https://files.pythonhosted.org/packages/9d/fa/4ebdf4adcc0def75ced1a0d2d227577cd7b1b85beb7edad85fcc87693c75/aiohttp-3.13.2-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:585542825c4bc662221fb257889e011a5aa00f1ae4d75d1d246a5225289183e3", size = 1700586, upload-time = "2025-10-28T20:56:38.034Z" }, - { url = "https://files.pythonhosted.org/packages/da/04/73f5f02ff348a3558763ff6abe99c223381b0bace05cd4530a0258e52597/aiohttp-3.13.2-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:39d02cb6025fe1aabca329c5632f48c9532a3dabccd859e7e2f110668972331f", size = 1768625, upload-time = "2025-10-28T20:56:39.75Z" }, - { url = "https://files.pythonhosted.org/packages/f8/49/a825b79ffec124317265ca7d2344a86bcffeb960743487cb11988ffb3494/aiohttp-3.13.2-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:e67446b19e014d37342f7195f592a2a948141d15a312fe0e700c2fd2f03124f6", size = 1867281, upload-time = "2025-10-28T20:56:41.471Z" }, - { url = "https://files.pythonhosted.org/packages/b9/48/adf56e05f81eac31edcfae45c90928f4ad50ef2e3ea72cb8376162a368f8/aiohttp-3.13.2-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4356474ad6333e41ccefd39eae869ba15a6c5299c9c01dfdcfdd5c107be4363e", size = 1752431, upload-time = "2025-10-28T20:56:43.162Z" }, - { url = "https://files.pythonhosted.org/packages/30/ab/593855356eead019a74e862f21523db09c27f12fd24af72dbc3555b9bfd9/aiohttp-3.13.2-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:eeacf451c99b4525f700f078becff32c32ec327b10dcf31306a8a52d78166de7", size = 1562846, upload-time = "2025-10-28T20:56:44.85Z" }, - { url = "https://files.pythonhosted.org/packages/39/0f/9f3d32271aa8dc35036e9668e31870a9d3b9542dd6b3e2c8a30931cb27ae/aiohttp-3.13.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:d8a9b889aeabd7a4e9af0b7f4ab5ad94d42e7ff679aaec6d0db21e3b639ad58d", size = 1699606, upload-time = "2025-10-28T20:56:46.519Z" }, - { url = "https://files.pythonhosted.org/packages/2c/3c/52d2658c5699b6ef7692a3f7128b2d2d4d9775f2a68093f74bca06cf01e1/aiohttp-3.13.2-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:fa89cb11bc71a63b69568d5b8a25c3ca25b6d54c15f907ca1c130d72f320b76b", size = 1720663, upload-time = "2025-10-28T20:56:48.528Z" }, - { url = "https://files.pythonhosted.org/packages/9b/d4/8f8f3ff1fb7fb9e3f04fcad4e89d8a1cd8fc7d05de67e3de5b15b33008ff/aiohttp-3.13.2-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:8aa7c807df234f693fed0ecd507192fc97692e61fee5702cdc11155d2e5cadc8", size = 1737939, upload-time = "2025-10-28T20:56:50.77Z" }, - { url = "https://files.pythonhosted.org/packages/03/d3/ddd348f8a27a634daae39a1b8e291ff19c77867af438af844bf8b7e3231b/aiohttp-3.13.2-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:9eb3e33fdbe43f88c3c75fa608c25e7c47bbd80f48d012763cb67c47f39a7e16", size = 1555132, upload-time = "2025-10-28T20:56:52.568Z" }, - { url = "https://files.pythonhosted.org/packages/39/b8/46790692dc46218406f94374903ba47552f2f9f90dad554eed61bfb7b64c/aiohttp-3.13.2-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:9434bc0d80076138ea986833156c5a48c9c7a8abb0c96039ddbb4afc93184169", size = 1764802, upload-time = "2025-10-28T20:56:54.292Z" }, - { url = "https://files.pythonhosted.org/packages/ba/e4/19ce547b58ab2a385e5f0b8aa3db38674785085abcf79b6e0edd1632b12f/aiohttp-3.13.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ff15c147b2ad66da1f2cbb0622313f2242d8e6e8f9b79b5206c84523a4473248", size = 1719512, upload-time = "2025-10-28T20:56:56.428Z" }, - { url = "https://files.pythonhosted.org/packages/70/30/6355a737fed29dcb6dfdd48682d5790cb5eab050f7b4e01f49b121d3acad/aiohttp-3.13.2-cp312-cp312-win32.whl", hash = "sha256:27e569eb9d9e95dbd55c0fc3ec3a9335defbf1d8bc1d20171a49f3c4c607b93e", size = 426690, upload-time = "2025-10-28T20:56:58.736Z" }, - { url = "https://files.pythonhosted.org/packages/0a/0d/b10ac09069973d112de6ef980c1f6bb31cb7dcd0bc363acbdad58f927873/aiohttp-3.13.2-cp312-cp312-win_amd64.whl", hash = "sha256:8709a0f05d59a71f33fd05c17fc11fcb8c30140506e13c2f5e8ee1b8964e1b45", size = 453465, upload-time = "2025-10-28T20:57:00.795Z" }, - { url = "https://files.pythonhosted.org/packages/bf/78/7e90ca79e5aa39f9694dcfd74f4720782d3c6828113bb1f3197f7e7c4a56/aiohttp-3.13.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:7519bdc7dfc1940d201651b52bf5e03f5503bda45ad6eacf64dda98be5b2b6be", size = 732139, upload-time = "2025-10-28T20:57:02.455Z" }, - { url = "https://files.pythonhosted.org/packages/db/ed/1f59215ab6853fbaa5c8495fa6cbc39edfc93553426152b75d82a5f32b76/aiohttp-3.13.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:088912a78b4d4f547a1f19c099d5a506df17eacec3c6f4375e2831ec1d995742", size = 490082, upload-time = "2025-10-28T20:57:04.784Z" }, - { url = "https://files.pythonhosted.org/packages/68/7b/fe0fe0f5e05e13629d893c760465173a15ad0039c0a5b0d0040995c8075e/aiohttp-3.13.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:5276807b9de9092af38ed23ce120539ab0ac955547b38563a9ba4f5b07b95293", size = 489035, upload-time = "2025-10-28T20:57:06.894Z" }, - { url = "https://files.pythonhosted.org/packages/d2/04/db5279e38471b7ac801d7d36a57d1230feeee130bbe2a74f72731b23c2b1/aiohttp-3.13.2-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1237c1375eaef0db4dcd7c2559f42e8af7b87ea7d295b118c60c36a6e61cb811", size = 1720387, upload-time = "2025-10-28T20:57:08.685Z" }, - { url = "https://files.pythonhosted.org/packages/31/07/8ea4326bd7dae2bd59828f69d7fdc6e04523caa55e4a70f4a8725a7e4ed2/aiohttp-3.13.2-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:96581619c57419c3d7d78703d5b78c1e5e5fc0172d60f555bdebaced82ded19a", size = 1688314, upload-time = "2025-10-28T20:57:10.693Z" }, - { url = "https://files.pythonhosted.org/packages/48/ab/3d98007b5b87ffd519d065225438cc3b668b2f245572a8cb53da5dd2b1bc/aiohttp-3.13.2-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:a2713a95b47374169409d18103366de1050fe0ea73db358fc7a7acb2880422d4", size = 1756317, upload-time = "2025-10-28T20:57:12.563Z" }, - { url = "https://files.pythonhosted.org/packages/97/3d/801ca172b3d857fafb7b50c7c03f91b72b867a13abca982ed6b3081774ef/aiohttp-3.13.2-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:228a1cd556b3caca590e9511a89444925da87d35219a49ab5da0c36d2d943a6a", size = 1858539, upload-time = "2025-10-28T20:57:14.623Z" }, - { url = "https://files.pythonhosted.org/packages/f7/0d/4764669bdf47bd472899b3d3db91fffbe925c8e3038ec591a2fd2ad6a14d/aiohttp-3.13.2-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ac6cde5fba8d7d8c6ac963dbb0256a9854e9fafff52fbcc58fdf819357892c3e", size = 1739597, upload-time = "2025-10-28T20:57:16.399Z" }, - { url = "https://files.pythonhosted.org/packages/c4/52/7bd3c6693da58ba16e657eb904a5b6decfc48ecd06e9ac098591653b1566/aiohttp-3.13.2-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:f2bef8237544f4e42878c61cef4e2839fee6346dc60f5739f876a9c50be7fcdb", size = 1555006, upload-time = "2025-10-28T20:57:18.288Z" }, - { url = "https://files.pythonhosted.org/packages/48/30/9586667acec5993b6f41d2ebcf96e97a1255a85f62f3c653110a5de4d346/aiohttp-3.13.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:16f15a4eac3bc2d76c45f7ebdd48a65d41b242eb6c31c2245463b40b34584ded", size = 1683220, upload-time = "2025-10-28T20:57:20.241Z" }, - { url = "https://files.pythonhosted.org/packages/71/01/3afe4c96854cfd7b30d78333852e8e851dceaec1c40fd00fec90c6402dd2/aiohttp-3.13.2-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:bb7fb776645af5cc58ab804c58d7eba545a97e047254a52ce89c157b5af6cd0b", size = 1712570, upload-time = "2025-10-28T20:57:22.253Z" }, - { url = "https://files.pythonhosted.org/packages/11/2c/22799d8e720f4697a9e66fd9c02479e40a49de3de2f0bbe7f9f78a987808/aiohttp-3.13.2-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:e1b4951125ec10c70802f2cb09736c895861cd39fd9dcb35107b4dc8ae6220b8", size = 1733407, upload-time = "2025-10-28T20:57:24.37Z" }, - { url = "https://files.pythonhosted.org/packages/34/cb/90f15dd029f07cebbd91f8238a8b363978b530cd128488085b5703683594/aiohttp-3.13.2-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:550bf765101ae721ee1d37d8095f47b1f220650f85fe1af37a90ce75bab89d04", size = 1550093, upload-time = "2025-10-28T20:57:26.257Z" }, - { url = "https://files.pythonhosted.org/packages/69/46/12dce9be9d3303ecbf4d30ad45a7683dc63d90733c2d9fe512be6716cd40/aiohttp-3.13.2-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:fe91b87fc295973096251e2d25a811388e7d8adf3bd2b97ef6ae78bc4ac6c476", size = 1758084, upload-time = "2025-10-28T20:57:28.349Z" }, - { url = "https://files.pythonhosted.org/packages/f9/c8/0932b558da0c302ffd639fc6362a313b98fdf235dc417bc2493da8394df7/aiohttp-3.13.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:e0c8e31cfcc4592cb200160344b2fb6ae0f9e4effe06c644b5a125d4ae5ebe23", size = 1716987, upload-time = "2025-10-28T20:57:30.233Z" }, - { url = "https://files.pythonhosted.org/packages/5d/8b/f5bd1a75003daed099baec373aed678f2e9b34f2ad40d85baa1368556396/aiohttp-3.13.2-cp313-cp313-win32.whl", hash = "sha256:0740f31a60848d6edb296a0df827473eede90c689b8f9f2a4cdde74889eb2254", size = 425859, upload-time = "2025-10-28T20:57:32.105Z" }, - { url = "https://files.pythonhosted.org/packages/5d/28/a8a9fc6957b2cee8902414e41816b5ab5536ecf43c3b1843c10e82c559b2/aiohttp-3.13.2-cp313-cp313-win_amd64.whl", hash = "sha256:a88d13e7ca367394908f8a276b89d04a3652044612b9a408a0bb22a5ed976a1a", size = 452192, upload-time = "2025-10-28T20:57:34.166Z" }, -] - -[[package]] -name = "aiosignal" -version = "1.4.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "frozenlist" }, - { name = "typing-extensions", marker = "python_full_version < '3.13'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/61/62/06741b579156360248d1ec624842ad0edf697050bbaf7c3e46394e106ad1/aiosignal-1.4.0.tar.gz", hash = "sha256:f47eecd9468083c2029cc99945502cb7708b082c232f9aca65da147157b251c7", size = 25007, upload-time = "2025-07-03T22:54:43.528Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/fb/76/641ae371508676492379f16e2fa48f4e2c11741bd63c48be4b12a6b09cba/aiosignal-1.4.0-py3-none-any.whl", hash = "sha256:053243f8b92b990551949e63930a839ff0cf0b0ebbe0597b0f3fb19e1a0fe82e", size = 7490, upload-time = "2025-07-03T22:54:42.156Z" }, -] - -[[package]] -name = "alabaster" -version = "1.0.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/a6/f8/d9c74d0daf3f742840fd818d69cfae176fa332022fd44e3469487d5a9420/alabaster-1.0.0.tar.gz", hash = "sha256:c00dca57bca26fa62a6d7d0a9fcce65f3e026e9bfe33e9c538fd3fbb2144fd9e", size = 24210, upload-time = "2024-07-26T18:15:03.762Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/7e/b3/6b4067be973ae96ba0d615946e314c5ae35f9f993eca561b356540bb0c2b/alabaster-1.0.0-py3-none-any.whl", hash = "sha256:fc6786402dc3fcb2de3cabd5fe455a2db534b371124f1f21de8731783dec828b", size = 13929, upload-time = "2024-07-26T18:15:02.05Z" }, -] - -[[package]] -name = "annotated-types" -version = "0.7.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/ee/67/531ea369ba64dcff5ec9c3402f9f51bf748cec26dde048a2f973a4eea7f5/annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89", size = 16081, upload-time = "2024-05-20T21:33:25.928Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/78/b6/6307fbef88d9b5ee7421e68d78a9f162e0da4900bc5f5793f6d3d0e34fb8/annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53", size = 13643, upload-time = "2024-05-20T21:33:24.1Z" }, -] - -[[package]] -name = "anyio" -version = "4.11.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "idna" }, - { name = "sniffio" }, - { name = "typing-extensions", marker = "python_full_version < '3.13'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/c6/78/7d432127c41b50bccba979505f272c16cbcadcc33645d5fa3a738110ae75/anyio-4.11.0.tar.gz", hash = "sha256:82a8d0b81e318cc5ce71a5f1f8b5c4e63619620b63141ef8c995fa0db95a57c4", size = 219094, upload-time = "2025-09-23T09:19:12.58Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/15/b3/9b1a8074496371342ec1e796a96f99c82c945a339cd81a8e73de28b4cf9e/anyio-4.11.0-py3-none-any.whl", hash = "sha256:0287e96f4d26d4149305414d4e3bc32f0dcd0862365a4bddea19d7a1ec38c4fc", size = 109097, upload-time = "2025-09-23T09:19:10.601Z" }, -] - -[[package]] -name = "appnope" -version = "0.1.4" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/35/5d/752690df9ef5b76e169e68d6a129fa6d08a7100ca7f754c89495db3c6019/appnope-0.1.4.tar.gz", hash = "sha256:1de3860566df9caf38f01f86f65e0e13e379af54f9e4bee1e66b48f2efffd1ee", size = 4170, upload-time = "2024-02-06T09:43:11.258Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/81/29/5ecc3a15d5a33e31b26c11426c45c501e439cb865d0bff96315d86443b78/appnope-0.1.4-py2.py3-none-any.whl", hash = "sha256:502575ee11cd7a28c0205f379b525beefebab9d161b7c964670864014ed7213c", size = 4321, upload-time = "2024-02-06T09:43:09.663Z" }, -] - -[[package]] -name = "asttokens" -version = "3.0.1" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/be/a5/8e3f9b6771b0b408517c82d97aed8f2036509bc247d46114925e32fe33f0/asttokens-3.0.1.tar.gz", hash = "sha256:71a4ee5de0bde6a31d64f6b13f2293ac190344478f081c3d1bccfcf5eacb0cb7", size = 62308, upload-time = "2025-11-15T16:43:48.578Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/d2/39/e7eaf1799466a4aef85b6a4fe7bd175ad2b1c6345066aa33f1f58d4b18d0/asttokens-3.0.1-py3-none-any.whl", hash = "sha256:15a3ebc0f43c2d0a50eeafea25e19046c68398e487b9f1f5b517f7c0f40f976a", size = 27047, upload-time = "2025-11-15T16:43:16.109Z" }, -] - -[[package]] -name = "attrs" -version = "25.4.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/6b/5c/685e6633917e101e5dcb62b9dd76946cbb57c26e133bae9e0cd36033c0a9/attrs-25.4.0.tar.gz", hash = "sha256:16d5969b87f0859ef33a48b35d55ac1be6e42ae49d5e853b597db70c35c57e11", size = 934251, upload-time = "2025-10-06T13:54:44.725Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/3a/2a/7cc015f5b9f5db42b7d48157e23356022889fc354a2813c15934b7cb5c0e/attrs-25.4.0-py3-none-any.whl", hash = "sha256:adcf7e2a1fb3b36ac48d97835bb6d8ade15b8dcce26aba8bf1d14847b57a3373", size = 67615, upload-time = "2025-10-06T13:54:43.17Z" }, -] - -[[package]] -name = "autopep8" -version = "2.3.2" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "pycodestyle" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/50/d8/30873d2b7b57dee9263e53d142da044c4600a46f2d28374b3e38b023df16/autopep8-2.3.2.tar.gz", hash = "sha256:89440a4f969197b69a995e4ce0661b031f455a9f776d2c5ba3dbd83466931758", size = 92210, upload-time = "2025-01-14T14:46:18.454Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/9e/43/53afb8ba17218f19b77c7834128566c5bbb100a0ad9ba2e8e89d089d7079/autopep8-2.3.2-py2.py3-none-any.whl", hash = "sha256:ce8ad498672c845a0c3de2629c15b635ec2b05ef8177a6e7c91c74f3e9b51128", size = 45807, upload-time = "2025-01-14T14:46:15.466Z" }, -] - -[[package]] -name = "babel" -version = "2.17.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/7d/6b/d52e42361e1aa00709585ecc30b3f9684b3ab62530771402248b1b1d6240/babel-2.17.0.tar.gz", hash = "sha256:0c54cffb19f690cdcc52a3b50bcbf71e07a808d1c80d549f2459b9d2cf0afb9d", size = 9951852, upload-time = "2025-02-01T15:17:41.026Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/b7/b8/3fe70c75fe32afc4bb507f75563d39bc5642255d1d94f1f23604725780bf/babel-2.17.0-py3-none-any.whl", hash = "sha256:4d0b53093fdfb4b21c92b5213dba5a1b23885afa8383709427046b21c366e5f2", size = 10182537, upload-time = "2025-02-01T15:17:37.39Z" }, -] - -[[package]] -name = "beautifulsoup4" -version = "4.14.2" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "soupsieve" }, - { name = "typing-extensions" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/77/e9/df2358efd7659577435e2177bfa69cba6c33216681af51a707193dec162a/beautifulsoup4-4.14.2.tar.gz", hash = "sha256:2a98ab9f944a11acee9cc848508ec28d9228abfd522ef0fad6a02a72e0ded69e", size = 625822, upload-time = "2025-09-29T10:05:42.613Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/94/fe/3aed5d0be4d404d12d36ab97e2f1791424d9ca39c2f754a6285d59a3b01d/beautifulsoup4-4.14.2-py3-none-any.whl", hash = "sha256:5ef6fa3a8cbece8488d66985560f97ed091e22bbc4e9c2338508a9d5de6d4515", size = 106392, upload-time = "2025-09-29T10:05:43.771Z" }, -] - -[[package]] -name = "bleach" -version = "6.3.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "webencodings" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/07/18/3c8523962314be6bf4c8989c79ad9531c825210dd13a8669f6b84336e8bd/bleach-6.3.0.tar.gz", hash = "sha256:6f3b91b1c0a02bb9a78b5a454c92506aa0fdf197e1d5e114d2e00c6f64306d22", size = 203533, upload-time = "2025-10-27T17:57:39.211Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/cd/3a/577b549de0cc09d95f11087ee63c739bba856cd3952697eec4c4bb91350a/bleach-6.3.0-py3-none-any.whl", hash = "sha256:fe10ec77c93ddf3d13a73b035abaac7a9f5e436513864ccdad516693213c65d6", size = 164437, upload-time = "2025-10-27T17:57:37.538Z" }, -] - -[package.optional-dependencies] -css = [ - { name = "tinycss2" }, -] - -[[package]] -name = "certifi" -version = "2025.11.12" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/a2/8c/58f469717fa48465e4a50c014a0400602d3c437d7c0c468e17ada824da3a/certifi-2025.11.12.tar.gz", hash = "sha256:d8ab5478f2ecd78af242878415affce761ca6bc54a22a27e026d7c25357c3316", size = 160538, upload-time = "2025-11-12T02:54:51.517Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/70/7d/9bc192684cea499815ff478dfcdc13835ddf401365057044fb721ec6bddb/certifi-2025.11.12-py3-none-any.whl", hash = "sha256:97de8790030bbd5c2d96b7ec782fc2f7820ef8dba6db909ccf95449f2d062d4b", size = 159438, upload-time = "2025-11-12T02:54:49.735Z" }, -] - -[[package]] -name = "cffi" -version = "2.0.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "pycparser", marker = "implementation_name != 'PyPy'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/eb/56/b1ba7935a17738ae8453301356628e8147c79dbb825bcbc73dc7401f9846/cffi-2.0.0.tar.gz", hash = "sha256:44d1b5909021139fe36001ae048dbdde8214afa20200eda0f64c068cac5d5529", size = 523588, upload-time = "2025-09-08T23:24:04.541Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/ea/47/4f61023ea636104d4f16ab488e268b93008c3d0bb76893b1b31db1f96802/cffi-2.0.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:6d02d6655b0e54f54c4ef0b94eb6be0607b70853c45ce98bd278dc7de718be5d", size = 185271, upload-time = "2025-09-08T23:22:44.795Z" }, - { url = "https://files.pythonhosted.org/packages/df/a2/781b623f57358e360d62cdd7a8c681f074a71d445418a776eef0aadb4ab4/cffi-2.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8eca2a813c1cb7ad4fb74d368c2ffbbb4789d377ee5bb8df98373c2cc0dee76c", size = 181048, upload-time = "2025-09-08T23:22:45.938Z" }, - { url = "https://files.pythonhosted.org/packages/ff/df/a4f0fbd47331ceeba3d37c2e51e9dfc9722498becbeec2bd8bc856c9538a/cffi-2.0.0-cp312-cp312-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:21d1152871b019407d8ac3985f6775c079416c282e431a4da6afe7aefd2bccbe", size = 212529, upload-time = "2025-09-08T23:22:47.349Z" }, - { url = "https://files.pythonhosted.org/packages/d5/72/12b5f8d3865bf0f87cf1404d8c374e7487dcf097a1c91c436e72e6badd83/cffi-2.0.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:b21e08af67b8a103c71a250401c78d5e0893beff75e28c53c98f4de42f774062", size = 220097, upload-time = "2025-09-08T23:22:48.677Z" }, - { url = "https://files.pythonhosted.org/packages/c2/95/7a135d52a50dfa7c882ab0ac17e8dc11cec9d55d2c18dda414c051c5e69e/cffi-2.0.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:1e3a615586f05fc4065a8b22b8152f0c1b00cdbc60596d187c2a74f9e3036e4e", size = 207983, upload-time = "2025-09-08T23:22:50.06Z" }, - { url = "https://files.pythonhosted.org/packages/3a/c8/15cb9ada8895957ea171c62dc78ff3e99159ee7adb13c0123c001a2546c1/cffi-2.0.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:81afed14892743bbe14dacb9e36d9e0e504cd204e0b165062c488942b9718037", size = 206519, upload-time = "2025-09-08T23:22:51.364Z" }, - { url = "https://files.pythonhosted.org/packages/78/2d/7fa73dfa841b5ac06c7b8855cfc18622132e365f5b81d02230333ff26e9e/cffi-2.0.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:3e17ed538242334bf70832644a32a7aae3d83b57567f9fd60a26257e992b79ba", size = 219572, upload-time = "2025-09-08T23:22:52.902Z" }, - { url = "https://files.pythonhosted.org/packages/07/e0/267e57e387b4ca276b90f0434ff88b2c2241ad72b16d31836adddfd6031b/cffi-2.0.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3925dd22fa2b7699ed2617149842d2e6adde22b262fcbfada50e3d195e4b3a94", size = 222963, upload-time = "2025-09-08T23:22:54.518Z" }, - { url = "https://files.pythonhosted.org/packages/b6/75/1f2747525e06f53efbd878f4d03bac5b859cbc11c633d0fb81432d98a795/cffi-2.0.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:2c8f814d84194c9ea681642fd164267891702542f028a15fc97d4674b6206187", size = 221361, upload-time = "2025-09-08T23:22:55.867Z" }, - { url = "https://files.pythonhosted.org/packages/7b/2b/2b6435f76bfeb6bbf055596976da087377ede68df465419d192acf00c437/cffi-2.0.0-cp312-cp312-win32.whl", hash = "sha256:da902562c3e9c550df360bfa53c035b2f241fed6d9aef119048073680ace4a18", size = 172932, upload-time = "2025-09-08T23:22:57.188Z" }, - { url = "https://files.pythonhosted.org/packages/f8/ed/13bd4418627013bec4ed6e54283b1959cf6db888048c7cf4b4c3b5b36002/cffi-2.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:da68248800ad6320861f129cd9c1bf96ca849a2771a59e0344e88681905916f5", size = 183557, upload-time = "2025-09-08T23:22:58.351Z" }, - { url = "https://files.pythonhosted.org/packages/95/31/9f7f93ad2f8eff1dbc1c3656d7ca5bfd8fb52c9d786b4dcf19b2d02217fa/cffi-2.0.0-cp312-cp312-win_arm64.whl", hash = "sha256:4671d9dd5ec934cb9a73e7ee9676f9362aba54f7f34910956b84d727b0d73fb6", size = 177762, upload-time = "2025-09-08T23:22:59.668Z" }, - { url = "https://files.pythonhosted.org/packages/4b/8d/a0a47a0c9e413a658623d014e91e74a50cdd2c423f7ccfd44086ef767f90/cffi-2.0.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:00bdf7acc5f795150faa6957054fbbca2439db2f775ce831222b66f192f03beb", size = 185230, upload-time = "2025-09-08T23:23:00.879Z" }, - { url = "https://files.pythonhosted.org/packages/4a/d2/a6c0296814556c68ee32009d9c2ad4f85f2707cdecfd7727951ec228005d/cffi-2.0.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:45d5e886156860dc35862657e1494b9bae8dfa63bf56796f2fb56e1679fc0bca", size = 181043, upload-time = "2025-09-08T23:23:02.231Z" }, - { url = "https://files.pythonhosted.org/packages/b0/1e/d22cc63332bd59b06481ceaac49d6c507598642e2230f201649058a7e704/cffi-2.0.0-cp313-cp313-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:07b271772c100085dd28b74fa0cd81c8fb1a3ba18b21e03d7c27f3436a10606b", size = 212446, upload-time = "2025-09-08T23:23:03.472Z" }, - { url = "https://files.pythonhosted.org/packages/a9/f5/a2c23eb03b61a0b8747f211eb716446c826ad66818ddc7810cc2cc19b3f2/cffi-2.0.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:d48a880098c96020b02d5a1f7d9251308510ce8858940e6fa99ece33f610838b", size = 220101, upload-time = "2025-09-08T23:23:04.792Z" }, - { url = "https://files.pythonhosted.org/packages/f2/7f/e6647792fc5850d634695bc0e6ab4111ae88e89981d35ac269956605feba/cffi-2.0.0-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:f93fd8e5c8c0a4aa1f424d6173f14a892044054871c771f8566e4008eaa359d2", size = 207948, upload-time = "2025-09-08T23:23:06.127Z" }, - { url = "https://files.pythonhosted.org/packages/cb/1e/a5a1bd6f1fb30f22573f76533de12a00bf274abcdc55c8edab639078abb6/cffi-2.0.0-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:dd4f05f54a52fb558f1ba9f528228066954fee3ebe629fc1660d874d040ae5a3", size = 206422, upload-time = "2025-09-08T23:23:07.753Z" }, - { url = "https://files.pythonhosted.org/packages/98/df/0a1755e750013a2081e863e7cd37e0cdd02664372c754e5560099eb7aa44/cffi-2.0.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:c8d3b5532fc71b7a77c09192b4a5a200ea992702734a2e9279a37f2478236f26", size = 219499, upload-time = "2025-09-08T23:23:09.648Z" }, - { url = "https://files.pythonhosted.org/packages/50/e1/a969e687fcf9ea58e6e2a928ad5e2dd88cc12f6f0ab477e9971f2309b57c/cffi-2.0.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:d9b29c1f0ae438d5ee9acb31cadee00a58c46cc9c0b2f9038c6b0b3470877a8c", size = 222928, upload-time = "2025-09-08T23:23:10.928Z" }, - { url = "https://files.pythonhosted.org/packages/36/54/0362578dd2c9e557a28ac77698ed67323ed5b9775ca9d3fe73fe191bb5d8/cffi-2.0.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6d50360be4546678fc1b79ffe7a66265e28667840010348dd69a314145807a1b", size = 221302, upload-time = "2025-09-08T23:23:12.42Z" }, - { url = "https://files.pythonhosted.org/packages/eb/6d/bf9bda840d5f1dfdbf0feca87fbdb64a918a69bca42cfa0ba7b137c48cb8/cffi-2.0.0-cp313-cp313-win32.whl", hash = "sha256:74a03b9698e198d47562765773b4a8309919089150a0bb17d829ad7b44b60d27", size = 172909, upload-time = "2025-09-08T23:23:14.32Z" }, - { url = "https://files.pythonhosted.org/packages/37/18/6519e1ee6f5a1e579e04b9ddb6f1676c17368a7aba48299c3759bbc3c8b3/cffi-2.0.0-cp313-cp313-win_amd64.whl", hash = "sha256:19f705ada2530c1167abacb171925dd886168931e0a7b78f5bffcae5c6b5be75", size = 183402, upload-time = "2025-09-08T23:23:15.535Z" }, - { url = "https://files.pythonhosted.org/packages/cb/0e/02ceeec9a7d6ee63bb596121c2c8e9b3a9e150936f4fbef6ca1943e6137c/cffi-2.0.0-cp313-cp313-win_arm64.whl", hash = "sha256:256f80b80ca3853f90c21b23ee78cd008713787b1b1e93eae9f3d6a7134abd91", size = 177780, upload-time = "2025-09-08T23:23:16.761Z" }, -] - -[[package]] -name = "charset-normalizer" -version = "3.4.4" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/13/69/33ddede1939fdd074bce5434295f38fae7136463422fe4fd3e0e89b98062/charset_normalizer-3.4.4.tar.gz", hash = "sha256:94537985111c35f28720e43603b8e7b43a6ecfb2ce1d3058bbe955b73404e21a", size = 129418, upload-time = "2025-10-14T04:42:32.879Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/f3/85/1637cd4af66fa687396e757dec650f28025f2a2f5a5531a3208dc0ec43f2/charset_normalizer-3.4.4-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:0a98e6759f854bd25a58a73fa88833fba3b7c491169f86ce1180c948ab3fd394", size = 208425, upload-time = "2025-10-14T04:40:53.353Z" }, - { url = "https://files.pythonhosted.org/packages/9d/6a/04130023fef2a0d9c62d0bae2649b69f7b7d8d24ea5536feef50551029df/charset_normalizer-3.4.4-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b5b290ccc2a263e8d185130284f8501e3e36c5e02750fc6b6bdeb2e9e96f1e25", size = 148162, upload-time = "2025-10-14T04:40:54.558Z" }, - { url = "https://files.pythonhosted.org/packages/78/29/62328d79aa60da22c9e0b9a66539feae06ca0f5a4171ac4f7dc285b83688/charset_normalizer-3.4.4-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:74bb723680f9f7a6234dcf67aea57e708ec1fbdf5699fb91dfd6f511b0a320ef", size = 144558, upload-time = "2025-10-14T04:40:55.677Z" }, - { url = "https://files.pythonhosted.org/packages/86/bb/b32194a4bf15b88403537c2e120b817c61cd4ecffa9b6876e941c3ee38fe/charset_normalizer-3.4.4-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:f1e34719c6ed0b92f418c7c780480b26b5d9c50349e9a9af7d76bf757530350d", size = 161497, upload-time = "2025-10-14T04:40:57.217Z" }, - { url = "https://files.pythonhosted.org/packages/19/89/a54c82b253d5b9b111dc74aca196ba5ccfcca8242d0fb64146d4d3183ff1/charset_normalizer-3.4.4-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:2437418e20515acec67d86e12bf70056a33abdacb5cb1655042f6538d6b085a8", size = 159240, upload-time = "2025-10-14T04:40:58.358Z" }, - { url = "https://files.pythonhosted.org/packages/c0/10/d20b513afe03acc89ec33948320a5544d31f21b05368436d580dec4e234d/charset_normalizer-3.4.4-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:11d694519d7f29d6cd09f6ac70028dba10f92f6cdd059096db198c283794ac86", size = 153471, upload-time = "2025-10-14T04:40:59.468Z" }, - { url = "https://files.pythonhosted.org/packages/61/fa/fbf177b55bdd727010f9c0a3c49eefa1d10f960e5f09d1d887bf93c2e698/charset_normalizer-3.4.4-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:ac1c4a689edcc530fc9d9aa11f5774b9e2f33f9a0c6a57864e90908f5208d30a", size = 150864, upload-time = "2025-10-14T04:41:00.623Z" }, - { url = "https://files.pythonhosted.org/packages/05/12/9fbc6a4d39c0198adeebbde20b619790e9236557ca59fc40e0e3cebe6f40/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:21d142cc6c0ec30d2efee5068ca36c128a30b0f2c53c1c07bd78cb6bc1d3be5f", size = 150647, upload-time = "2025-10-14T04:41:01.754Z" }, - { url = "https://files.pythonhosted.org/packages/ad/1f/6a9a593d52e3e8c5d2b167daf8c6b968808efb57ef4c210acb907c365bc4/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:5dbe56a36425d26d6cfb40ce79c314a2e4dd6211d51d6d2191c00bed34f354cc", size = 145110, upload-time = "2025-10-14T04:41:03.231Z" }, - { url = "https://files.pythonhosted.org/packages/30/42/9a52c609e72471b0fc54386dc63c3781a387bb4fe61c20231a4ebcd58bdd/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:5bfbb1b9acf3334612667b61bd3002196fe2a1eb4dd74d247e0f2a4d50ec9bbf", size = 162839, upload-time = "2025-10-14T04:41:04.715Z" }, - { url = "https://files.pythonhosted.org/packages/c4/5b/c0682bbf9f11597073052628ddd38344a3d673fda35a36773f7d19344b23/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:d055ec1e26e441f6187acf818b73564e6e6282709e9bcb5b63f5b23068356a15", size = 150667, upload-time = "2025-10-14T04:41:05.827Z" }, - { url = "https://files.pythonhosted.org/packages/e4/24/a41afeab6f990cf2daf6cb8c67419b63b48cf518e4f56022230840c9bfb2/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:af2d8c67d8e573d6de5bc30cdb27e9b95e49115cd9baad5ddbd1a6207aaa82a9", size = 160535, upload-time = "2025-10-14T04:41:06.938Z" }, - { url = "https://files.pythonhosted.org/packages/2a/e5/6a4ce77ed243c4a50a1fecca6aaaab419628c818a49434be428fe24c9957/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:780236ac706e66881f3b7f2f32dfe90507a09e67d1d454c762cf642e6e1586e0", size = 154816, upload-time = "2025-10-14T04:41:08.101Z" }, - { url = "https://files.pythonhosted.org/packages/a8/ef/89297262b8092b312d29cdb2517cb1237e51db8ecef2e9af5edbe7b683b1/charset_normalizer-3.4.4-cp312-cp312-win32.whl", hash = "sha256:5833d2c39d8896e4e19b689ffc198f08ea58116bee26dea51e362ecc7cd3ed26", size = 99694, upload-time = "2025-10-14T04:41:09.23Z" }, - { url = "https://files.pythonhosted.org/packages/3d/2d/1e5ed9dd3b3803994c155cd9aacb60c82c331bad84daf75bcb9c91b3295e/charset_normalizer-3.4.4-cp312-cp312-win_amd64.whl", hash = "sha256:a79cfe37875f822425b89a82333404539ae63dbdddf97f84dcbc3d339aae9525", size = 107131, upload-time = "2025-10-14T04:41:10.467Z" }, - { url = "https://files.pythonhosted.org/packages/d0/d9/0ed4c7098a861482a7b6a95603edce4c0d9db2311af23da1fb2b75ec26fc/charset_normalizer-3.4.4-cp312-cp312-win_arm64.whl", hash = "sha256:376bec83a63b8021bb5c8ea75e21c4ccb86e7e45ca4eb81146091b56599b80c3", size = 100390, upload-time = "2025-10-14T04:41:11.915Z" }, - { url = "https://files.pythonhosted.org/packages/97/45/4b3a1239bbacd321068ea6e7ac28875b03ab8bc0aa0966452db17cd36714/charset_normalizer-3.4.4-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:e1f185f86a6f3403aa2420e815904c67b2f9ebc443f045edd0de921108345794", size = 208091, upload-time = "2025-10-14T04:41:13.346Z" }, - { url = "https://files.pythonhosted.org/packages/7d/62/73a6d7450829655a35bb88a88fca7d736f9882a27eacdca2c6d505b57e2e/charset_normalizer-3.4.4-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6b39f987ae8ccdf0d2642338faf2abb1862340facc796048b604ef14919e55ed", size = 147936, upload-time = "2025-10-14T04:41:14.461Z" }, - { url = "https://files.pythonhosted.org/packages/89/c5/adb8c8b3d6625bef6d88b251bbb0d95f8205831b987631ab0c8bb5d937c2/charset_normalizer-3.4.4-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:3162d5d8ce1bb98dd51af660f2121c55d0fa541b46dff7bb9b9f86ea1d87de72", size = 144180, upload-time = "2025-10-14T04:41:15.588Z" }, - { url = "https://files.pythonhosted.org/packages/91/ed/9706e4070682d1cc219050b6048bfd293ccf67b3d4f5a4f39207453d4b99/charset_normalizer-3.4.4-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:81d5eb2a312700f4ecaa977a8235b634ce853200e828fbadf3a9c50bab278328", size = 161346, upload-time = "2025-10-14T04:41:16.738Z" }, - { url = "https://files.pythonhosted.org/packages/d5/0d/031f0d95e4972901a2f6f09ef055751805ff541511dc1252ba3ca1f80cf5/charset_normalizer-3.4.4-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:5bd2293095d766545ec1a8f612559f6b40abc0eb18bb2f5d1171872d34036ede", size = 158874, upload-time = "2025-10-14T04:41:17.923Z" }, - { url = "https://files.pythonhosted.org/packages/f5/83/6ab5883f57c9c801ce5e5677242328aa45592be8a00644310a008d04f922/charset_normalizer-3.4.4-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a8a8b89589086a25749f471e6a900d3f662d1d3b6e2e59dcecf787b1cc3a1894", size = 153076, upload-time = "2025-10-14T04:41:19.106Z" }, - { url = "https://files.pythonhosted.org/packages/75/1e/5ff781ddf5260e387d6419959ee89ef13878229732732ee73cdae01800f2/charset_normalizer-3.4.4-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:bc7637e2f80d8530ee4a78e878bce464f70087ce73cf7c1caf142416923b98f1", size = 150601, upload-time = "2025-10-14T04:41:20.245Z" }, - { url = "https://files.pythonhosted.org/packages/d7/57/71be810965493d3510a6ca79b90c19e48696fb1ff964da319334b12677f0/charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f8bf04158c6b607d747e93949aa60618b61312fe647a6369f88ce2ff16043490", size = 150376, upload-time = "2025-10-14T04:41:21.398Z" }, - { url = "https://files.pythonhosted.org/packages/e5/d5/c3d057a78c181d007014feb7e9f2e65905a6c4ef182c0ddf0de2924edd65/charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:554af85e960429cf30784dd47447d5125aaa3b99a6f0683589dbd27e2f45da44", size = 144825, upload-time = "2025-10-14T04:41:22.583Z" }, - { url = "https://files.pythonhosted.org/packages/e6/8c/d0406294828d4976f275ffbe66f00266c4b3136b7506941d87c00cab5272/charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:74018750915ee7ad843a774364e13a3db91682f26142baddf775342c3f5b1133", size = 162583, upload-time = "2025-10-14T04:41:23.754Z" }, - { url = "https://files.pythonhosted.org/packages/d7/24/e2aa1f18c8f15c4c0e932d9287b8609dd30ad56dbe41d926bd846e22fb8d/charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:c0463276121fdee9c49b98908b3a89c39be45d86d1dbaa22957e38f6321d4ce3", size = 150366, upload-time = "2025-10-14T04:41:25.27Z" }, - { url = "https://files.pythonhosted.org/packages/e4/5b/1e6160c7739aad1e2df054300cc618b06bf784a7a164b0f238360721ab86/charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:362d61fd13843997c1c446760ef36f240cf81d3ebf74ac62652aebaf7838561e", size = 160300, upload-time = "2025-10-14T04:41:26.725Z" }, - { url = "https://files.pythonhosted.org/packages/7a/10/f882167cd207fbdd743e55534d5d9620e095089d176d55cb22d5322f2afd/charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:9a26f18905b8dd5d685d6d07b0cdf98a79f3c7a918906af7cc143ea2e164c8bc", size = 154465, upload-time = "2025-10-14T04:41:28.322Z" }, - { url = "https://files.pythonhosted.org/packages/89/66/c7a9e1b7429be72123441bfdbaf2bc13faab3f90b933f664db506dea5915/charset_normalizer-3.4.4-cp313-cp313-win32.whl", hash = "sha256:9b35f4c90079ff2e2edc5b26c0c77925e5d2d255c42c74fdb70fb49b172726ac", size = 99404, upload-time = "2025-10-14T04:41:29.95Z" }, - { url = "https://files.pythonhosted.org/packages/c4/26/b9924fa27db384bdcd97ab83b4f0a8058d96ad9626ead570674d5e737d90/charset_normalizer-3.4.4-cp313-cp313-win_amd64.whl", hash = "sha256:b435cba5f4f750aa6c0a0d92c541fb79f69a387c91e61f1795227e4ed9cece14", size = 107092, upload-time = "2025-10-14T04:41:31.188Z" }, - { url = "https://files.pythonhosted.org/packages/af/8f/3ed4bfa0c0c72a7ca17f0380cd9e4dd842b09f664e780c13cff1dcf2ef1b/charset_normalizer-3.4.4-cp313-cp313-win_arm64.whl", hash = "sha256:542d2cee80be6f80247095cc36c418f7bddd14f4a6de45af91dfad36d817bba2", size = 100408, upload-time = "2025-10-14T04:41:32.624Z" }, - { url = "https://files.pythonhosted.org/packages/0a/4c/925909008ed5a988ccbb72dcc897407e5d6d3bd72410d69e051fc0c14647/charset_normalizer-3.4.4-py3-none-any.whl", hash = "sha256:7a32c560861a02ff789ad905a2fe94e3f840803362c84fecf1851cb4cf3dc37f", size = 53402, upload-time = "2025-10-14T04:42:31.76Z" }, -] - -[[package]] -name = "click" -version = "8.3.1" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "colorama", marker = "sys_platform == 'win32'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/3d/fa/656b739db8587d7b5dfa22e22ed02566950fbfbcdc20311993483657a5c0/click-8.3.1.tar.gz", hash = "sha256:12ff4785d337a1bb490bb7e9c2b1ee5da3112e94a8622f26a6c77f5d2fc6842a", size = 295065, upload-time = "2025-11-15T20:45:42.706Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/98/78/01c019cdb5d6498122777c1a43056ebb3ebfeef2076d9d026bfe15583b2b/click-8.3.1-py3-none-any.whl", hash = "sha256:981153a64e25f12d547d3426c367a4857371575ee7ad18df2a6183ab0545b2a6", size = 108274, upload-time = "2025-11-15T20:45:41.139Z" }, -] - -[[package]] -name = "colorama" -version = "0.4.6" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/d8/53/6f443c9a4a8358a93a6792e2acffb9d9d5cb0a5cfd8802644b7b1c9a02e4/colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44", size = 27697, upload-time = "2022-10-25T02:36:22.414Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6", size = 25335, upload-time = "2022-10-25T02:36:20.889Z" }, -] - -[[package]] -name = "colorful" -version = "0.5.8" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "colorama", marker = "sys_platform == 'win32'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/82/31/109ef4bedeb32b4202e02ddb133162457adc4eb890a9ed9c05c9dd126ed0/colorful-0.5.8.tar.gz", hash = "sha256:bb16502b198be2f1c42ba3c52c703d5f651d826076817185f0294c1a549a7445", size = 209361, upload-time = "2025-10-29T11:53:21.663Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/c3/11/25cdf9d5fc21efd30134fc74c43702c6f7ef09ebae8ed927f1283403ad8d/colorful-0.5.8-py2.py3-none-any.whl", hash = "sha256:a9381fdda3337fbaba5771991020abc69676afa102646650b759927892875992", size = 201334, upload-time = "2025-10-29T11:53:20.251Z" }, -] - -[[package]] -name = "comm" -version = "0.2.3" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/4c/13/7d740c5849255756bc17888787313b61fd38a0a8304fc4f073dfc46122aa/comm-0.2.3.tar.gz", hash = "sha256:2dc8048c10962d55d7ad693be1e7045d891b7ce8d999c97963a5e3e99c055971", size = 6319, upload-time = "2025-07-25T14:02:04.452Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/60/97/891a0971e1e4a8c5d2b20bbe0e524dc04548d2307fee33cdeba148fd4fc7/comm-0.2.3-py3-none-any.whl", hash = "sha256:c615d91d75f7f04f095b30d1c1711babd43bdc6419c1be9886a85f2f4e489417", size = 7294, upload-time = "2025-07-25T14:02:02.896Z" }, -] - -[[package]] -name = "coverage" -version = "7.12.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/89/26/4a96807b193b011588099c3b5c89fbb05294e5b90e71018e065465f34eb6/coverage-7.12.0.tar.gz", hash = "sha256:fc11e0a4e372cb5f282f16ef90d4a585034050ccda536451901abfb19a57f40c", size = 819341, upload-time = "2025-11-18T13:34:20.766Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/02/bf/638c0427c0f0d47638242e2438127f3c8ee3cfc06c7fdeb16778ed47f836/coverage-7.12.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:29644c928772c78512b48e14156b81255000dcfd4817574ff69def189bcb3647", size = 217704, upload-time = "2025-11-18T13:32:28.906Z" }, - { url = "https://files.pythonhosted.org/packages/08/e1/706fae6692a66c2d6b871a608bbde0da6281903fa0e9f53a39ed441da36a/coverage-7.12.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8638cbb002eaa5d7c8d04da667813ce1067080b9a91099801a0053086e52b736", size = 218064, upload-time = "2025-11-18T13:32:30.161Z" }, - { url = "https://files.pythonhosted.org/packages/a9/8b/eb0231d0540f8af3ffda39720ff43cb91926489d01524e68f60e961366e4/coverage-7.12.0-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:083631eeff5eb9992c923e14b810a179798bb598e6a0dd60586819fc23be6e60", size = 249560, upload-time = "2025-11-18T13:32:31.835Z" }, - { url = "https://files.pythonhosted.org/packages/e9/a1/67fb52af642e974d159b5b379e4d4c59d0ebe1288677fbd04bbffe665a82/coverage-7.12.0-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:99d5415c73ca12d558e07776bd957c4222c687b9f1d26fa0e1b57e3598bdcde8", size = 252318, upload-time = "2025-11-18T13:32:33.178Z" }, - { url = "https://files.pythonhosted.org/packages/41/e5/38228f31b2c7665ebf9bdfdddd7a184d56450755c7e43ac721c11a4b8dab/coverage-7.12.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e949ebf60c717c3df63adb4a1a366c096c8d7fd8472608cd09359e1bd48ef59f", size = 253403, upload-time = "2025-11-18T13:32:34.45Z" }, - { url = "https://files.pythonhosted.org/packages/ec/4b/df78e4c8188f9960684267c5a4897836f3f0f20a20c51606ee778a1d9749/coverage-7.12.0-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:6d907ddccbca819afa2cd014bc69983b146cca2735a0b1e6259b2a6c10be1e70", size = 249984, upload-time = "2025-11-18T13:32:35.747Z" }, - { url = "https://files.pythonhosted.org/packages/ba/51/bb163933d195a345c6f63eab9e55743413d064c291b6220df754075c2769/coverage-7.12.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:b1518ecbad4e6173f4c6e6c4a46e49555ea5679bf3feda5edb1b935c7c44e8a0", size = 251339, upload-time = "2025-11-18T13:32:37.352Z" }, - { url = "https://files.pythonhosted.org/packages/15/40/c9b29cdb8412c837cdcbc2cfa054547dd83affe6cbbd4ce4fdb92b6ba7d1/coverage-7.12.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:51777647a749abdf6f6fd8c7cffab12de68ab93aab15efc72fbbb83036c2a068", size = 249489, upload-time = "2025-11-18T13:32:39.212Z" }, - { url = "https://files.pythonhosted.org/packages/c8/da/b3131e20ba07a0de4437a50ef3b47840dfabf9293675b0cd5c2c7f66dd61/coverage-7.12.0-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:42435d46d6461a3b305cdfcad7cdd3248787771f53fe18305548cba474e6523b", size = 249070, upload-time = "2025-11-18T13:32:40.598Z" }, - { url = "https://files.pythonhosted.org/packages/70/81/b653329b5f6302c08d683ceff6785bc60a34be9ae92a5c7b63ee7ee7acec/coverage-7.12.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:5bcead88c8423e1855e64b8057d0544e33e4080b95b240c2a355334bb7ced937", size = 250929, upload-time = "2025-11-18T13:32:42.915Z" }, - { url = "https://files.pythonhosted.org/packages/a3/00/250ac3bca9f252a5fb1338b5ad01331ebb7b40223f72bef5b1b2cb03aa64/coverage-7.12.0-cp312-cp312-win32.whl", hash = "sha256:dcbb630ab034e86d2a0f79aefd2be07e583202f41e037602d438c80044957baa", size = 220241, upload-time = "2025-11-18T13:32:44.665Z" }, - { url = "https://files.pythonhosted.org/packages/64/1c/77e79e76d37ce83302f6c21980b45e09f8aa4551965213a10e62d71ce0ab/coverage-7.12.0-cp312-cp312-win_amd64.whl", hash = "sha256:2fd8354ed5d69775ac42986a691fbf68b4084278710cee9d7c3eaa0c28fa982a", size = 221051, upload-time = "2025-11-18T13:32:46.008Z" }, - { url = "https://files.pythonhosted.org/packages/31/f5/641b8a25baae564f9e52cac0e2667b123de961985709a004e287ee7663cc/coverage-7.12.0-cp312-cp312-win_arm64.whl", hash = "sha256:737c3814903be30695b2de20d22bcc5428fdae305c61ba44cdc8b3252984c49c", size = 219692, upload-time = "2025-11-18T13:32:47.372Z" }, - { url = "https://files.pythonhosted.org/packages/b8/14/771700b4048774e48d2c54ed0c674273702713c9ee7acdfede40c2666747/coverage-7.12.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:47324fffca8d8eae7e185b5bb20c14645f23350f870c1649003618ea91a78941", size = 217725, upload-time = "2025-11-18T13:32:49.22Z" }, - { url = "https://files.pythonhosted.org/packages/17/a7/3aa4144d3bcb719bf67b22d2d51c2d577bf801498c13cb08f64173e80497/coverage-7.12.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:ccf3b2ede91decd2fb53ec73c1f949c3e034129d1e0b07798ff1d02ea0c8fa4a", size = 218098, upload-time = "2025-11-18T13:32:50.78Z" }, - { url = "https://files.pythonhosted.org/packages/fc/9c/b846bbc774ff81091a12a10203e70562c91ae71badda00c5ae5b613527b1/coverage-7.12.0-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:b365adc70a6936c6b0582dc38746b33b2454148c02349345412c6e743efb646d", size = 249093, upload-time = "2025-11-18T13:32:52.554Z" }, - { url = "https://files.pythonhosted.org/packages/76/b6/67d7c0e1f400b32c883e9342de4a8c2ae7c1a0b57c5de87622b7262e2309/coverage-7.12.0-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:bc13baf85cd8a4cfcf4a35c7bc9d795837ad809775f782f697bf630b7e200211", size = 251686, upload-time = "2025-11-18T13:32:54.862Z" }, - { url = "https://files.pythonhosted.org/packages/cc/75/b095bd4b39d49c3be4bffbb3135fea18a99a431c52dd7513637c0762fecb/coverage-7.12.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:099d11698385d572ceafb3288a5b80fe1fc58bf665b3f9d362389de488361d3d", size = 252930, upload-time = "2025-11-18T13:32:56.417Z" }, - { url = "https://files.pythonhosted.org/packages/6e/f3/466f63015c7c80550bead3093aacabf5380c1220a2a93c35d374cae8f762/coverage-7.12.0-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:473dc45d69694069adb7680c405fb1e81f60b2aff42c81e2f2c3feaf544d878c", size = 249296, upload-time = "2025-11-18T13:32:58.074Z" }, - { url = "https://files.pythonhosted.org/packages/27/86/eba2209bf2b7e28c68698fc13437519a295b2d228ba9e0ec91673e09fa92/coverage-7.12.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:583f9adbefd278e9de33c33d6846aa8f5d164fa49b47144180a0e037f0688bb9", size = 251068, upload-time = "2025-11-18T13:32:59.646Z" }, - { url = "https://files.pythonhosted.org/packages/ec/55/ca8ae7dbba962a3351f18940b359b94c6bafdd7757945fdc79ec9e452dc7/coverage-7.12.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:b2089cc445f2dc0af6f801f0d1355c025b76c24481935303cf1af28f636688f0", size = 249034, upload-time = "2025-11-18T13:33:01.481Z" }, - { url = "https://files.pythonhosted.org/packages/7a/d7/39136149325cad92d420b023b5fd900dabdd1c3a0d1d5f148ef4a8cedef5/coverage-7.12.0-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:950411f1eb5d579999c5f66c62a40961f126fc71e5e14419f004471957b51508", size = 248853, upload-time = "2025-11-18T13:33:02.935Z" }, - { url = "https://files.pythonhosted.org/packages/fe/b6/76e1add8b87ef60e00643b0b7f8f7bb73d4bf5249a3be19ebefc5793dd25/coverage-7.12.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:b1aab7302a87bafebfe76b12af681b56ff446dc6f32ed178ff9c092ca776e6bc", size = 250619, upload-time = "2025-11-18T13:33:04.336Z" }, - { url = "https://files.pythonhosted.org/packages/95/87/924c6dc64f9203f7a3c1832a6a0eee5a8335dbe5f1bdadcc278d6f1b4d74/coverage-7.12.0-cp313-cp313-win32.whl", hash = "sha256:d7e0d0303c13b54db495eb636bc2465b2fb8475d4c8bcec8fe4b5ca454dfbae8", size = 220261, upload-time = "2025-11-18T13:33:06.493Z" }, - { url = "https://files.pythonhosted.org/packages/91/77/dd4aff9af16ff776bf355a24d87eeb48fc6acde54c907cc1ea89b14a8804/coverage-7.12.0-cp313-cp313-win_amd64.whl", hash = "sha256:ce61969812d6a98a981d147d9ac583a36ac7db7766f2e64a9d4d059c2fe29d07", size = 221072, upload-time = "2025-11-18T13:33:07.926Z" }, - { url = "https://files.pythonhosted.org/packages/70/49/5c9dc46205fef31b1b226a6e16513193715290584317fd4df91cdaf28b22/coverage-7.12.0-cp313-cp313-win_arm64.whl", hash = "sha256:bcec6f47e4cb8a4c2dc91ce507f6eefc6a1b10f58df32cdc61dff65455031dfc", size = 219702, upload-time = "2025-11-18T13:33:09.631Z" }, - { url = "https://files.pythonhosted.org/packages/9b/62/f87922641c7198667994dd472a91e1d9b829c95d6c29529ceb52132436ad/coverage-7.12.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:459443346509476170d553035e4a3eed7b860f4fe5242f02de1010501956ce87", size = 218420, upload-time = "2025-11-18T13:33:11.153Z" }, - { url = "https://files.pythonhosted.org/packages/85/dd/1cc13b2395ef15dbb27d7370a2509b4aee77890a464fb35d72d428f84871/coverage-7.12.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:04a79245ab2b7a61688958f7a855275997134bc84f4a03bc240cf64ff132abf6", size = 218773, upload-time = "2025-11-18T13:33:12.569Z" }, - { url = "https://files.pythonhosted.org/packages/74/40/35773cc4bb1e9d4658d4fb669eb4195b3151bef3bbd6f866aba5cd5dac82/coverage-7.12.0-cp313-cp313t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:09a86acaaa8455f13d6a99221d9654df249b33937b4e212b4e5a822065f12aa7", size = 260078, upload-time = "2025-11-18T13:33:14.037Z" }, - { url = "https://files.pythonhosted.org/packages/ec/ee/231bb1a6ffc2905e396557585ebc6bdc559e7c66708376d245a1f1d330fc/coverage-7.12.0-cp313-cp313t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:907e0df1b71ba77463687a74149c6122c3f6aac56c2510a5d906b2f368208560", size = 262144, upload-time = "2025-11-18T13:33:15.601Z" }, - { url = "https://files.pythonhosted.org/packages/28/be/32f4aa9f3bf0b56f3971001b56508352c7753915345d45fab4296a986f01/coverage-7.12.0-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9b57e2d0ddd5f0582bae5437c04ee71c46cd908e7bc5d4d0391f9a41e812dd12", size = 264574, upload-time = "2025-11-18T13:33:17.354Z" }, - { url = "https://files.pythonhosted.org/packages/68/7c/00489fcbc2245d13ab12189b977e0cf06ff3351cb98bc6beba8bd68c5902/coverage-7.12.0-cp313-cp313t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:58c1c6aa677f3a1411fe6fb28ec3a942e4f665df036a3608816e0847fad23296", size = 259298, upload-time = "2025-11-18T13:33:18.958Z" }, - { url = "https://files.pythonhosted.org/packages/96/b4/f0760d65d56c3bea95b449e02570d4abd2549dc784bf39a2d4721a2d8ceb/coverage-7.12.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:4c589361263ab2953e3c4cd2a94db94c4ad4a8e572776ecfbad2389c626e4507", size = 262150, upload-time = "2025-11-18T13:33:20.644Z" }, - { url = "https://files.pythonhosted.org/packages/c5/71/9a9314df00f9326d78c1e5a910f520d599205907432d90d1c1b7a97aa4b1/coverage-7.12.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:91b810a163ccad2e43b1faa11d70d3cf4b6f3d83f9fd5f2df82a32d47b648e0d", size = 259763, upload-time = "2025-11-18T13:33:22.189Z" }, - { url = "https://files.pythonhosted.org/packages/10/34/01a0aceed13fbdf925876b9a15d50862eb8845454301fe3cdd1df08b2182/coverage-7.12.0-cp313-cp313t-musllinux_1_2_riscv64.whl", hash = "sha256:40c867af715f22592e0d0fb533a33a71ec9e0f73a6945f722a0c85c8c1cbe3a2", size = 258653, upload-time = "2025-11-18T13:33:24.239Z" }, - { url = "https://files.pythonhosted.org/packages/8d/04/81d8fd64928acf1574bbb0181f66901c6c1c6279c8ccf5f84259d2c68ae9/coverage-7.12.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:68b0d0a2d84f333de875666259dadf28cc67858bc8fd8b3f1eae84d3c2bec455", size = 260856, upload-time = "2025-11-18T13:33:26.365Z" }, - { url = "https://files.pythonhosted.org/packages/f2/76/fa2a37bfaeaf1f766a2d2360a25a5297d4fb567098112f6517475eee120b/coverage-7.12.0-cp313-cp313t-win32.whl", hash = "sha256:73f9e7fbd51a221818fd11b7090eaa835a353ddd59c236c57b2199486b116c6d", size = 220936, upload-time = "2025-11-18T13:33:28.165Z" }, - { url = "https://files.pythonhosted.org/packages/f9/52/60f64d932d555102611c366afb0eb434b34266b1d9266fc2fe18ab641c47/coverage-7.12.0-cp313-cp313t-win_amd64.whl", hash = "sha256:24cff9d1f5743f67db7ba46ff284018a6e9aeb649b67aa1e70c396aa1b7cb23c", size = 222001, upload-time = "2025-11-18T13:33:29.656Z" }, - { url = "https://files.pythonhosted.org/packages/77/df/c303164154a5a3aea7472bf323b7c857fed93b26618ed9fc5c2955566bb0/coverage-7.12.0-cp313-cp313t-win_arm64.whl", hash = "sha256:c87395744f5c77c866d0f5a43d97cc39e17c7f1cb0115e54a2fe67ca75c5d14d", size = 220273, upload-time = "2025-11-18T13:33:31.415Z" }, - { url = "https://files.pythonhosted.org/packages/ce/a3/43b749004e3c09452e39bb56347a008f0a0668aad37324a99b5c8ca91d9e/coverage-7.12.0-py3-none-any.whl", hash = "sha256:159d50c0b12e060b15ed3d39f87ed43d4f7f7ad40b8a534f4dd331adbb51104a", size = 209503, upload-time = "2025-11-18T13:34:18.892Z" }, -] - -[[package]] -name = "debugpy" -version = "1.8.17" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/15/ad/71e708ff4ca377c4230530d6a7aa7992592648c122a2cd2b321cf8b35a76/debugpy-1.8.17.tar.gz", hash = "sha256:fd723b47a8c08892b1a16b2c6239a8b96637c62a59b94bb5dab4bac592a58a8e", size = 1644129, upload-time = "2025-09-17T16:33:20.633Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/08/2b/9d8e65beb2751876c82e1aceb32f328c43ec872711fa80257c7674f45650/debugpy-1.8.17-cp312-cp312-macosx_15_0_universal2.whl", hash = "sha256:f14467edef672195c6f6b8e27ce5005313cb5d03c9239059bc7182b60c176e2d", size = 2549522, upload-time = "2025-09-17T16:33:38.466Z" }, - { url = "https://files.pythonhosted.org/packages/b4/78/eb0d77f02971c05fca0eb7465b18058ba84bd957062f5eec82f941ac792a/debugpy-1.8.17-cp312-cp312-manylinux_2_34_x86_64.whl", hash = "sha256:24693179ef9dfa20dca8605905a42b392be56d410c333af82f1c5dff807a64cc", size = 4309417, upload-time = "2025-09-17T16:33:41.299Z" }, - { url = "https://files.pythonhosted.org/packages/37/42/c40f1d8cc1fed1e75ea54298a382395b8b937d923fcf41ab0797a554f555/debugpy-1.8.17-cp312-cp312-win32.whl", hash = "sha256:6a4e9dacf2cbb60d2514ff7b04b4534b0139facbf2abdffe0639ddb6088e59cf", size = 5277130, upload-time = "2025-09-17T16:33:43.554Z" }, - { url = "https://files.pythonhosted.org/packages/72/22/84263b205baad32b81b36eac076de0cdbe09fe2d0637f5b32243dc7c925b/debugpy-1.8.17-cp312-cp312-win_amd64.whl", hash = "sha256:e8f8f61c518952fb15f74a302e068b48d9c4691768ade433e4adeea961993464", size = 5319053, upload-time = "2025-09-17T16:33:53.033Z" }, - { url = "https://files.pythonhosted.org/packages/50/76/597e5cb97d026274ba297af8d89138dfd9e695767ba0e0895edb20963f40/debugpy-1.8.17-cp313-cp313-macosx_15_0_universal2.whl", hash = "sha256:857c1dd5d70042502aef1c6d1c2801211f3ea7e56f75e9c335f434afb403e464", size = 2538386, upload-time = "2025-09-17T16:33:54.594Z" }, - { url = "https://files.pythonhosted.org/packages/5f/60/ce5c34fcdfec493701f9d1532dba95b21b2f6394147234dce21160bd923f/debugpy-1.8.17-cp313-cp313-manylinux_2_34_x86_64.whl", hash = "sha256:3bea3b0b12f3946e098cce9b43c3c46e317b567f79570c3f43f0b96d00788088", size = 4292100, upload-time = "2025-09-17T16:33:56.353Z" }, - { url = "https://files.pythonhosted.org/packages/e8/95/7873cf2146577ef71d2a20bf553f12df865922a6f87b9e8ee1df04f01785/debugpy-1.8.17-cp313-cp313-win32.whl", hash = "sha256:e34ee844c2f17b18556b5bbe59e1e2ff4e86a00282d2a46edab73fd7f18f4a83", size = 5277002, upload-time = "2025-09-17T16:33:58.231Z" }, - { url = "https://files.pythonhosted.org/packages/46/11/18c79a1cee5ff539a94ec4aa290c1c069a5580fd5cfd2fb2e282f8e905da/debugpy-1.8.17-cp313-cp313-win_amd64.whl", hash = "sha256:6c5cd6f009ad4fca8e33e5238210dc1e5f42db07d4b6ab21ac7ffa904a196420", size = 5319047, upload-time = "2025-09-17T16:34:00.586Z" }, - { url = "https://files.pythonhosted.org/packages/b0/d0/89247ec250369fc76db477720a26b2fce7ba079ff1380e4ab4529d2fe233/debugpy-1.8.17-py2.py3-none-any.whl", hash = "sha256:60c7dca6571efe660ccb7a9508d73ca14b8796c4ed484c2002abba714226cfef", size = 5283210, upload-time = "2025-09-17T16:34:25.835Z" }, -] - -[[package]] -name = "decorator" -version = "5.2.1" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/43/fa/6d96a0978d19e17b68d634497769987b16c8f4cd0a7a05048bec693caa6b/decorator-5.2.1.tar.gz", hash = "sha256:65f266143752f734b0a7cc83c46f4618af75b8c5911b00ccb61d0ac9b6da0360", size = 56711, upload-time = "2025-02-24T04:41:34.073Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/4e/8c/f3147f5c4b73e7550fe5f9352eaa956ae838d5c51eb58e7a25b9f3e2643b/decorator-5.2.1-py3-none-any.whl", hash = "sha256:d316bb415a2d9e2d2b3abcc4084c6502fc09240e292cd76a76afc106a1c8e04a", size = 9190, upload-time = "2025-02-24T04:41:32.565Z" }, -] - -[[package]] -name = "defusedxml" -version = "0.7.1" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/0f/d5/c66da9b79e5bdb124974bfe172b4daf3c984ebd9c2a06e2b8a4dc7331c72/defusedxml-0.7.1.tar.gz", hash = "sha256:1bb3032db185915b62d7c6209c5a8792be6a32ab2fedacc84e01b52c51aa3e69", size = 75520, upload-time = "2021-03-08T10:59:26.269Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/07/6c/aa3f2f849e01cb6a001cd8554a88d4c77c5c1a31c95bdf1cf9301e6d9ef4/defusedxml-0.7.1-py2.py3-none-any.whl", hash = "sha256:a352e7e428770286cc899e2542b6cdaedb2b4953ff269a210103ec58f6198a61", size = 25604, upload-time = "2021-03-08T10:59:24.45Z" }, -] - -[[package]] -name = "distro" -version = "1.9.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/fc/f8/98eea607f65de6527f8a2e8885fc8015d3e6f5775df186e443e0964a11c3/distro-1.9.0.tar.gz", hash = "sha256:2fa77c6fd8940f116ee1d6b94a2f90b13b5ea8d019b98bc8bafdcabcdd9bdbed", size = 60722, upload-time = "2023-12-24T09:54:32.31Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/12/b3/231ffd4ab1fc9d679809f356cebee130ac7daa00d6d6f3206dd4fd137e9e/distro-1.9.0-py3-none-any.whl", hash = "sha256:7bffd925d65168f85027d8da9af6bddab658135b840670a223589bc0c8ef02b2", size = 20277, upload-time = "2023-12-24T09:54:30.421Z" }, -] - -[[package]] -name = "dm-tree" -version = "0.1.9" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "absl-py" }, - { name = "attrs" }, - { name = "numpy" }, - { name = "wrapt" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/a6/83/ce29720ccf934c6cfa9b9c95ebbe96558386e66886626066632b5e44afed/dm_tree-0.1.9.tar.gz", hash = "sha256:a4c7db3d3935a5a2d5e4b383fc26c6b0cd6f78c6d4605d3e7b518800ecd5342b", size = 35623, upload-time = "2025-01-30T20:45:37.13Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/ee/02/61aa90ab695918b4389d75c99bf0ec3cd0abacf1cadbef4053626f23ce34/dm_tree-0.1.9-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:a8d20eeab7fde77a3ed71f07716021eb0edfb4812a128eb381d108af3a310257", size = 175012, upload-time = "2025-03-31T08:35:41.476Z" }, - { url = "https://files.pythonhosted.org/packages/81/10/120cd40556407879c1069941bd8b0d1a75754128c1a5bf0e27dbcf2a49fc/dm_tree-0.1.9-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:80c43417814b1181d3367b335460bfdd30b79ee187a64220e11f6ddd093a4b15", size = 147204, upload-time = "2025-01-30T20:45:25.541Z" }, - { url = "https://files.pythonhosted.org/packages/86/52/27607a275c12858b979b8e943d2bd3bd0f9028503bb7079d5830a8b3cac0/dm_tree-0.1.9-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2334cfe9d2ed4293f9f1c7aefba0657deaab9ea74b5fadd966f6d01d9b6b42d9", size = 153013, upload-time = "2025-01-30T20:45:26.886Z" }, - { url = "https://files.pythonhosted.org/packages/ea/97/4f78412f73a9350bc8f934441bae5b68b102c8f4240a7f06b4114b51d6de/dm_tree-0.1.9-cp312-cp312-win_amd64.whl", hash = "sha256:9020a5ce256fcc83aa4bc190cc96dd66e87685db0a6e501b0c06aa492c2e38fc", size = 102022, upload-time = "2025-01-30T20:45:28.701Z" }, - { url = "https://files.pythonhosted.org/packages/5f/13/823788cd0f7964cadcfa56d1e0f9e5e987ee73b5db6273bc00168f524f1a/dm_tree-0.1.9-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:cfa33c2e028155810ad1b4e11928707bf47489516763a86e79cab2954d23bf68", size = 175000, upload-time = "2025-03-31T08:35:42.483Z" }, - { url = "https://files.pythonhosted.org/packages/37/6a/512abdf7f20acc6cd6fce77f7663014d129aa313b5953aa2603d58fdb0c9/dm_tree-0.1.9-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d05622d074353cf434049206e53c12147903a048c4bd7d77f2800d427413ad78", size = 147210, upload-time = "2025-01-30T20:45:29.732Z" }, - { url = "https://files.pythonhosted.org/packages/e5/0a/f4d72ffb64ab3edc1fa66261f81ee3b4142ab14cd8aa1dfc7bbeca5ee4ba/dm_tree-0.1.9-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f68b0efad76703dd4648586c75618a48cdd671b68c3266fe980e323c15423607", size = 153043, upload-time = "2025-01-30T20:45:30.834Z" }, - { url = "https://files.pythonhosted.org/packages/0d/ee/529ce999770b4d621a64af86c60cfee52f0cdd7294752105179ebf1c07c6/dm_tree-0.1.9-cp313-cp313-win_amd64.whl", hash = "sha256:e97c34fcb44941c36b7ee81dcdbceba0fbe728bddcc77e5837ab2eb665bcbff8", size = 102043, upload-time = "2025-01-30T20:45:32.004Z" }, - { url = "https://files.pythonhosted.org/packages/ee/3c/5b40f8862390e9172e776cf610f3791c1af01f140a5698799fbe4a97206f/dm_tree-0.1.9-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:b06e7a5da1c31a82521a60060573527e8d24b9920fdd20b2ec86f08412737598", size = 180821, upload-time = "2025-03-31T08:35:44.474Z" }, - { url = "https://files.pythonhosted.org/packages/84/1d/3cdbeeb3f6937a47a26cee502bffeccc2e55b97dfcce8a1d1135ea1b5b47/dm_tree-0.1.9-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6893fcdc5cf1a4f459cfc383526d35d42e7c671ae565d7e429a2f2cb2cb93e89", size = 147282, upload-time = "2025-01-30T20:45:33.896Z" }, - { url = "https://files.pythonhosted.org/packages/c5/37/15603079854394f16e3833a7b50696c1f3cbf30a2243a119f64f18a16f36/dm_tree-0.1.9-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e1f5d1e96b3a7de22b25b13a5eb30f41f8cf9c02dd4479a24920de99e780903c", size = 153052, upload-time = "2025-01-30T20:45:35.907Z" }, -] - -[[package]] -name = "docutils" -version = "0.21.2" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/ae/ed/aefcc8cd0ba62a0560c3c18c33925362d46c6075480bfa4df87b28e169a9/docutils-0.21.2.tar.gz", hash = "sha256:3a6b18732edf182daa3cd12775bbb338cf5691468f91eeeb109deff6ebfa986f", size = 2204444, upload-time = "2024-04-23T18:57:18.24Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/8f/d7/9322c609343d929e75e7e5e6255e614fcc67572cfd083959cdef3b7aad79/docutils-0.21.2-py3-none-any.whl", hash = "sha256:dafca5b9e384f0e419294eb4d2ff9fa826435bf15f15b7bd45723e8ad76811b2", size = 587408, upload-time = "2024-04-23T18:57:14.835Z" }, -] - -[[package]] -name = "effectful" -version = "0.2.3" -source = { editable = "." } - -[package.optional-dependencies] -docs = [ - { name = "dm-tree" }, - { name = "jax" }, - { name = "myst-parser" }, - { name = "nbsphinx" }, - { name = "numpyro" }, - { name = "prettyprinter" }, - { name = "pypandoc-binary" }, - { name = "pyro-ppl" }, - { name = "sphinx" }, - { name = "sphinx-autodoc-typehints" }, - { name = "sphinx-rtd-theme" }, - { name = "sphinxcontrib-bibtex" }, - { name = "torch" }, -] -jax = [ - { name = "dm-tree" }, - { name = "jax" }, -] -llm = [ - { name = "litellm" }, - { name = "pillow" }, - { name = "pydantic" }, -] -numpyro = [ - { name = "dm-tree" }, - { name = "numpyro" }, -] -prettyprinter = [ - { name = "prettyprinter" }, -] -pyro = [ - { name = "dm-tree" }, - { name = "pyro-ppl" }, -] -test = [ - { name = "dm-tree" }, - { name = "jax" }, - { name = "mypy" }, - { name = "myst-parser" }, - { name = "nbqa" }, - { name = "nbsphinx" }, - { name = "nbval" }, - { name = "numpyro" }, - { name = "prettyprinter" }, - { name = "pypandoc-binary" }, - { name = "pyro-ppl" }, - { name = "pytest" }, - { name = "pytest-benchmark" }, - { name = "pytest-cov" }, - { name = "pytest-xdist" }, - { name = "ruff" }, - { name = "sphinx" }, - { name = "sphinx-autodoc-typehints" }, - { name = "sphinx-rtd-theme" }, - { name = "sphinxcontrib-bibtex" }, - { name = "torch" }, -] -torch = [ - { name = "dm-tree" }, - { name = "torch" }, -] - -[package.dev-dependencies] -dev = [ - { name = "effectful", extra = ["docs", "jax", "llm", "numpyro", "pyro", "test", "torch"] }, -] - -[package.metadata] -requires-dist = [ - { name = "dm-tree", marker = "extra == 'jax'" }, - { name = "dm-tree", marker = "extra == 'numpyro'" }, - { name = "dm-tree", marker = "extra == 'pyro'" }, - { name = "dm-tree", marker = "extra == 'torch'" }, - { name = "effectful", extras = ["torch", "pyro", "jax", "numpyro", "docs", "prettyprinter"], marker = "extra == 'test'" }, - { name = "effectful", extras = ["torch", "pyro", "jax", "numpyro", "prettyprinter"], marker = "extra == 'docs'" }, - { name = "jax", marker = "extra == 'jax'" }, - { name = "litellm", marker = "extra == 'llm'" }, - { name = "mypy", marker = "extra == 'test'" }, - { name = "myst-parser", marker = "extra == 'docs'" }, - { name = "nbqa", marker = "extra == 'test'" }, - { name = "nbsphinx", marker = "extra == 'docs'" }, - { name = "nbval", marker = "extra == 'test'" }, - { name = "numpyro", marker = "extra == 'numpyro'", specifier = ">=0.19" }, - { name = "pillow", marker = "extra == 'llm'" }, - { name = "prettyprinter", marker = "extra == 'prettyprinter'" }, - { name = "pydantic", marker = "extra == 'llm'" }, - { name = "pypandoc-binary", marker = "extra == 'docs'", specifier = "<1.16" }, - { name = "pyro-ppl", marker = "extra == 'pyro'", specifier = ">=1.9.1" }, - { name = "pytest", marker = "extra == 'test'" }, - { name = "pytest-benchmark", marker = "extra == 'test'" }, - { name = "pytest-cov", marker = "extra == 'test'" }, - { name = "pytest-xdist", marker = "extra == 'test'" }, - { name = "ruff", marker = "extra == 'test'" }, - { name = "sphinx", marker = "extra == 'docs'" }, - { name = "sphinx-autodoc-typehints", marker = "extra == 'docs'" }, - { name = "sphinx-rtd-theme", marker = "extra == 'docs'" }, - { name = "sphinxcontrib-bibtex", marker = "extra == 'docs'" }, - { name = "torch", marker = "extra == 'torch'" }, -] -provides-extras = ["torch", "pyro", "jax", "numpyro", "llm", "prettyprinter", "docs", "test"] - -[package.metadata.requires-dev] -dev = [{ name = "effectful", extras = ["torch", "pyro", "jax", "numpyro", "llm", "docs", "test"] }] - -[[package]] -name = "execnet" -version = "2.1.2" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/bf/89/780e11f9588d9e7128a3f87788354c7946a9cbb1401ad38a48c4db9a4f07/execnet-2.1.2.tar.gz", hash = "sha256:63d83bfdd9a23e35b9c6a3261412324f964c2ec8dcd8d3c6916ee9373e0befcd", size = 166622, upload-time = "2025-11-12T09:56:37.75Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/ab/84/02fc1827e8cdded4aa65baef11296a9bbe595c474f0d6d758af082d849fd/execnet-2.1.2-py3-none-any.whl", hash = "sha256:67fba928dd5a544b783f6056f449e5e3931a5c378b128bc18501f7ea79e296ec", size = 40708, upload-time = "2025-11-12T09:56:36.333Z" }, -] - -[[package]] -name = "executing" -version = "2.2.1" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/cc/28/c14e053b6762b1044f34a13aab6859bbf40456d37d23aa286ac24cfd9a5d/executing-2.2.1.tar.gz", hash = "sha256:3632cc370565f6648cc328b32435bd120a1e4ebb20c77e3fdde9a13cd1e533c4", size = 1129488, upload-time = "2025-09-01T09:48:10.866Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/c1/ea/53f2148663b321f21b5a606bd5f191517cf40b7072c0497d3c92c4a13b1e/executing-2.2.1-py2.py3-none-any.whl", hash = "sha256:760643d3452b4d777d295bb167ccc74c64a81df23fb5e08eff250c425a4b2017", size = 28317, upload-time = "2025-09-01T09:48:08.5Z" }, -] - -[[package]] -name = "fastjsonschema" -version = "2.21.2" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/20/b5/23b216d9d985a956623b6bd12d4086b60f0059b27799f23016af04a74ea1/fastjsonschema-2.21.2.tar.gz", hash = "sha256:b1eb43748041c880796cd077f1a07c3d94e93ae84bba5ed36800a33554ae05de", size = 374130, upload-time = "2025-08-14T18:49:36.666Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/cb/a8/20d0723294217e47de6d9e2e40fd4a9d2f7c4b6ef974babd482a59743694/fastjsonschema-2.21.2-py3-none-any.whl", hash = "sha256:1c797122d0a86c5cace2e54bf4e819c36223b552017172f32c5c024a6b77e463", size = 24024, upload-time = "2025-08-14T18:49:34.776Z" }, -] - -[[package]] -name = "fastuuid" -version = "0.14.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/c3/7d/d9daedf0f2ebcacd20d599928f8913e9d2aea1d56d2d355a93bfa2b611d7/fastuuid-0.14.0.tar.gz", hash = "sha256:178947fc2f995b38497a74172adee64fdeb8b7ec18f2a5934d037641ba265d26", size = 18232, upload-time = "2025-10-19T22:19:22.402Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/02/a2/e78fcc5df65467f0d207661b7ef86c5b7ac62eea337c0c0fcedbeee6fb13/fastuuid-0.14.0-cp312-cp312-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:77e94728324b63660ebf8adb27055e92d2e4611645bf12ed9d88d30486471d0a", size = 510164, upload-time = "2025-10-19T22:31:45.635Z" }, - { url = "https://files.pythonhosted.org/packages/2b/b3/c846f933f22f581f558ee63f81f29fa924acd971ce903dab1a9b6701816e/fastuuid-0.14.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:caa1f14d2102cb8d353096bc6ef6c13b2c81f347e6ab9d6fbd48b9dea41c153d", size = 261837, upload-time = "2025-10-19T22:38:38.53Z" }, - { url = "https://files.pythonhosted.org/packages/54/ea/682551030f8c4fa9a769d9825570ad28c0c71e30cf34020b85c1f7ee7382/fastuuid-0.14.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:d23ef06f9e67163be38cece704170486715b177f6baae338110983f99a72c070", size = 251370, upload-time = "2025-10-19T22:40:26.07Z" }, - { url = "https://files.pythonhosted.org/packages/14/dd/5927f0a523d8e6a76b70968e6004966ee7df30322f5fc9b6cdfb0276646a/fastuuid-0.14.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0c9ec605ace243b6dbe3bd27ebdd5d33b00d8d1d3f580b39fdd15cd96fd71796", size = 277766, upload-time = "2025-10-19T22:37:23.779Z" }, - { url = "https://files.pythonhosted.org/packages/16/6e/c0fb547eef61293153348f12e0f75a06abb322664b34a1573a7760501336/fastuuid-0.14.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:808527f2407f58a76c916d6aa15d58692a4a019fdf8d4c32ac7ff303b7d7af09", size = 278105, upload-time = "2025-10-19T22:26:56.821Z" }, - { url = "https://files.pythonhosted.org/packages/2d/b1/b9c75e03b768f61cf2e84ee193dc18601aeaf89a4684b20f2f0e9f52b62c/fastuuid-0.14.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2fb3c0d7fef6674bbeacdd6dbd386924a7b60b26de849266d1ff6602937675c8", size = 301564, upload-time = "2025-10-19T22:30:31.604Z" }, - { url = "https://files.pythonhosted.org/packages/fc/fa/f7395fdac07c7a54f18f801744573707321ca0cee082e638e36452355a9d/fastuuid-0.14.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:ab3f5d36e4393e628a4df337c2c039069344db5f4b9d2a3c9cea48284f1dd741", size = 459659, upload-time = "2025-10-19T22:31:32.341Z" }, - { url = "https://files.pythonhosted.org/packages/66/49/c9fd06a4a0b1f0f048aacb6599e7d96e5d6bc6fa680ed0d46bf111929d1b/fastuuid-0.14.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:b9a0ca4f03b7e0b01425281ffd44e99d360e15c895f1907ca105854ed85e2057", size = 478430, upload-time = "2025-10-19T22:26:22.962Z" }, - { url = "https://files.pythonhosted.org/packages/be/9c/909e8c95b494e8e140e8be6165d5fc3f61fdc46198c1554df7b3e1764471/fastuuid-0.14.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:3acdf655684cc09e60fb7e4cf524e8f42ea760031945aa8086c7eae2eeeabeb8", size = 450894, upload-time = "2025-10-19T22:27:01.647Z" }, - { url = "https://files.pythonhosted.org/packages/90/eb/d29d17521976e673c55ef7f210d4cdd72091a9ec6755d0fd4710d9b3c871/fastuuid-0.14.0-cp312-cp312-win32.whl", hash = "sha256:9579618be6280700ae36ac42c3efd157049fe4dd40ca49b021280481c78c3176", size = 154374, upload-time = "2025-10-19T22:29:19.879Z" }, - { url = "https://files.pythonhosted.org/packages/cc/fc/f5c799a6ea6d877faec0472d0b27c079b47c86b1cdc577720a5386483b36/fastuuid-0.14.0-cp312-cp312-win_amd64.whl", hash = "sha256:d9e4332dc4ba054434a9594cbfaf7823b57993d7d8e7267831c3e059857cf397", size = 156550, upload-time = "2025-10-19T22:27:49.658Z" }, - { url = "https://files.pythonhosted.org/packages/a5/83/ae12dd39b9a39b55d7f90abb8971f1a5f3c321fd72d5aa83f90dc67fe9ed/fastuuid-0.14.0-cp313-cp313-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:77a09cb7427e7af74c594e409f7731a0cf887221de2f698e1ca0ebf0f3139021", size = 510720, upload-time = "2025-10-19T22:42:34.633Z" }, - { url = "https://files.pythonhosted.org/packages/53/b0/a4b03ff5d00f563cc7546b933c28cb3f2a07344b2aec5834e874f7d44143/fastuuid-0.14.0-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:9bd57289daf7b153bfa3e8013446aa144ce5e8c825e9e366d455155ede5ea2dc", size = 262024, upload-time = "2025-10-19T22:30:25.482Z" }, - { url = "https://files.pythonhosted.org/packages/9c/6d/64aee0a0f6a58eeabadd582e55d0d7d70258ffdd01d093b30c53d668303b/fastuuid-0.14.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:ac60fc860cdf3c3f327374db87ab8e064c86566ca8c49d2e30df15eda1b0c2d5", size = 251679, upload-time = "2025-10-19T22:36:14.096Z" }, - { url = "https://files.pythonhosted.org/packages/60/f5/a7e9cda8369e4f7919d36552db9b2ae21db7915083bc6336f1b0082c8b2e/fastuuid-0.14.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ab32f74bd56565b186f036e33129da77db8be09178cd2f5206a5d4035fb2a23f", size = 277862, upload-time = "2025-10-19T22:36:23.302Z" }, - { url = "https://files.pythonhosted.org/packages/f0/d3/8ce11827c783affffd5bd4d6378b28eb6cc6d2ddf41474006b8d62e7448e/fastuuid-0.14.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:33e678459cf4addaedd9936bbb038e35b3f6b2061330fd8f2f6a1d80414c0f87", size = 278278, upload-time = "2025-10-19T22:29:43.809Z" }, - { url = "https://files.pythonhosted.org/packages/a2/51/680fb6352d0bbade04036da46264a8001f74b7484e2fd1f4da9e3db1c666/fastuuid-0.14.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1e3cc56742f76cd25ecb98e4b82a25f978ccffba02e4bdce8aba857b6d85d87b", size = 301788, upload-time = "2025-10-19T22:36:06.825Z" }, - { url = "https://files.pythonhosted.org/packages/fa/7c/2014b5785bd8ebdab04ec857635ebd84d5ee4950186a577db9eff0fb8ff6/fastuuid-0.14.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:cb9a030f609194b679e1660f7e32733b7a0f332d519c5d5a6a0a580991290022", size = 459819, upload-time = "2025-10-19T22:35:31.623Z" }, - { url = "https://files.pythonhosted.org/packages/01/d2/524d4ceeba9160e7a9bc2ea3e8f4ccf1ad78f3bde34090ca0c51f09a5e91/fastuuid-0.14.0-cp313-cp313-musllinux_1_1_i686.whl", hash = "sha256:09098762aad4f8da3a888eb9ae01c84430c907a297b97166b8abc07b640f2995", size = 478546, upload-time = "2025-10-19T22:26:03.023Z" }, - { url = "https://files.pythonhosted.org/packages/bc/17/354d04951ce114bf4afc78e27a18cfbd6ee319ab1829c2d5fb5e94063ac6/fastuuid-0.14.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:1383fff584fa249b16329a059c68ad45d030d5a4b70fb7c73a08d98fd53bcdab", size = 450921, upload-time = "2025-10-19T22:31:02.151Z" }, - { url = "https://files.pythonhosted.org/packages/fb/be/d7be8670151d16d88f15bb121c5b66cdb5ea6a0c2a362d0dcf30276ade53/fastuuid-0.14.0-cp313-cp313-win32.whl", hash = "sha256:a0809f8cc5731c066c909047f9a314d5f536c871a7a22e815cc4967c110ac9ad", size = 154559, upload-time = "2025-10-19T22:36:36.011Z" }, - { url = "https://files.pythonhosted.org/packages/22/1d/5573ef3624ceb7abf4a46073d3554e37191c868abc3aecd5289a72f9810a/fastuuid-0.14.0-cp313-cp313-win_amd64.whl", hash = "sha256:0df14e92e7ad3276327631c9e7cec09e32572ce82089c55cb1bb8df71cf394ed", size = 156539, upload-time = "2025-10-19T22:33:35.898Z" }, -] - -[[package]] -name = "filelock" -version = "3.20.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/58/46/0028a82567109b5ef6e4d2a1f04a583fb513e6cf9527fcdd09afd817deeb/filelock-3.20.0.tar.gz", hash = "sha256:711e943b4ec6be42e1d4e6690b48dc175c822967466bb31c0c293f34334c13f4", size = 18922, upload-time = "2025-10-08T18:03:50.056Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/76/91/7216b27286936c16f5b4d0c530087e4a54eead683e6b0b73dd0c64844af6/filelock-3.20.0-py3-none-any.whl", hash = "sha256:339b4732ffda5cd79b13f4e2711a31b0365ce445d95d243bb996273d072546a2", size = 16054, upload-time = "2025-10-08T18:03:48.35Z" }, -] - -[[package]] -name = "frozenlist" -version = "1.8.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/2d/f5/c831fac6cc817d26fd54c7eaccd04ef7e0288806943f7cc5bbf69f3ac1f0/frozenlist-1.8.0.tar.gz", hash = "sha256:3ede829ed8d842f6cd48fc7081d7a41001a56f1f38603f9d49bf3020d59a31ad", size = 45875, upload-time = "2025-10-06T05:38:17.865Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/69/29/948b9aa87e75820a38650af445d2ef2b6b8a6fab1a23b6bb9e4ef0be2d59/frozenlist-1.8.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:78f7b9e5d6f2fdb88cdde9440dc147259b62b9d3b019924def9f6478be254ac1", size = 87782, upload-time = "2025-10-06T05:36:06.649Z" }, - { url = "https://files.pythonhosted.org/packages/64/80/4f6e318ee2a7c0750ed724fa33a4bdf1eacdc5a39a7a24e818a773cd91af/frozenlist-1.8.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:229bf37d2e4acdaf808fd3f06e854a4a7a3661e871b10dc1f8f1896a3b05f18b", size = 50594, upload-time = "2025-10-06T05:36:07.69Z" }, - { url = "https://files.pythonhosted.org/packages/2b/94/5c8a2b50a496b11dd519f4a24cb5496cf125681dd99e94c604ccdea9419a/frozenlist-1.8.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f833670942247a14eafbb675458b4e61c82e002a148f49e68257b79296e865c4", size = 50448, upload-time = "2025-10-06T05:36:08.78Z" }, - { url = "https://files.pythonhosted.org/packages/6a/bd/d91c5e39f490a49df14320f4e8c80161cfcce09f1e2cde1edd16a551abb3/frozenlist-1.8.0-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:494a5952b1c597ba44e0e78113a7266e656b9794eec897b19ead706bd7074383", size = 242411, upload-time = "2025-10-06T05:36:09.801Z" }, - { url = "https://files.pythonhosted.org/packages/8f/83/f61505a05109ef3293dfb1ff594d13d64a2324ac3482be2cedc2be818256/frozenlist-1.8.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:96f423a119f4777a4a056b66ce11527366a8bb92f54e541ade21f2374433f6d4", size = 243014, upload-time = "2025-10-06T05:36:11.394Z" }, - { url = "https://files.pythonhosted.org/packages/d8/cb/cb6c7b0f7d4023ddda30cf56b8b17494eb3a79e3fda666bf735f63118b35/frozenlist-1.8.0-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:3462dd9475af2025c31cc61be6652dfa25cbfb56cbbf52f4ccfe029f38decaf8", size = 234909, upload-time = "2025-10-06T05:36:12.598Z" }, - { url = "https://files.pythonhosted.org/packages/31/c5/cd7a1f3b8b34af009fb17d4123c5a778b44ae2804e3ad6b86204255f9ec5/frozenlist-1.8.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:c4c800524c9cd9bac5166cd6f55285957fcfc907db323e193f2afcd4d9abd69b", size = 250049, upload-time = "2025-10-06T05:36:14.065Z" }, - { url = "https://files.pythonhosted.org/packages/c0/01/2f95d3b416c584a1e7f0e1d6d31998c4a795f7544069ee2e0962a4b60740/frozenlist-1.8.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:d6a5df73acd3399d893dafc71663ad22534b5aa4f94e8a2fabfe856c3c1b6a52", size = 256485, upload-time = "2025-10-06T05:36:15.39Z" }, - { url = "https://files.pythonhosted.org/packages/ce/03/024bf7720b3abaebcff6d0793d73c154237b85bdf67b7ed55e5e9596dc9a/frozenlist-1.8.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:405e8fe955c2280ce66428b3ca55e12b3c4e9c336fb2103a4937e891c69a4a29", size = 237619, upload-time = "2025-10-06T05:36:16.558Z" }, - { url = "https://files.pythonhosted.org/packages/69/fa/f8abdfe7d76b731f5d8bd217827cf6764d4f1d9763407e42717b4bed50a0/frozenlist-1.8.0-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:908bd3f6439f2fef9e85031b59fd4f1297af54415fb60e4254a95f75b3cab3f3", size = 250320, upload-time = "2025-10-06T05:36:17.821Z" }, - { url = "https://files.pythonhosted.org/packages/f5/3c/b051329f718b463b22613e269ad72138cc256c540f78a6de89452803a47d/frozenlist-1.8.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:294e487f9ec720bd8ffcebc99d575f7eff3568a08a253d1ee1a0378754b74143", size = 246820, upload-time = "2025-10-06T05:36:19.046Z" }, - { url = "https://files.pythonhosted.org/packages/0f/ae/58282e8f98e444b3f4dd42448ff36fa38bef29e40d40f330b22e7108f565/frozenlist-1.8.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:74c51543498289c0c43656701be6b077f4b265868fa7f8a8859c197006efb608", size = 250518, upload-time = "2025-10-06T05:36:20.763Z" }, - { url = "https://files.pythonhosted.org/packages/8f/96/007e5944694d66123183845a106547a15944fbbb7154788cbf7272789536/frozenlist-1.8.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:776f352e8329135506a1d6bf16ac3f87bc25b28e765949282dcc627af36123aa", size = 239096, upload-time = "2025-10-06T05:36:22.129Z" }, - { url = "https://files.pythonhosted.org/packages/66/bb/852b9d6db2fa40be96f29c0d1205c306288f0684df8fd26ca1951d461a56/frozenlist-1.8.0-cp312-cp312-win32.whl", hash = "sha256:433403ae80709741ce34038da08511d4a77062aa924baf411ef73d1146e74faf", size = 39985, upload-time = "2025-10-06T05:36:23.661Z" }, - { url = "https://files.pythonhosted.org/packages/b8/af/38e51a553dd66eb064cdf193841f16f077585d4d28394c2fa6235cb41765/frozenlist-1.8.0-cp312-cp312-win_amd64.whl", hash = "sha256:34187385b08f866104f0c0617404c8eb08165ab1272e884abc89c112e9c00746", size = 44591, upload-time = "2025-10-06T05:36:24.958Z" }, - { url = "https://files.pythonhosted.org/packages/a7/06/1dc65480ab147339fecc70797e9c2f69d9cea9cf38934ce08df070fdb9cb/frozenlist-1.8.0-cp312-cp312-win_arm64.whl", hash = "sha256:fe3c58d2f5db5fbd18c2987cba06d51b0529f52bc3a6cdc33d3f4eab725104bd", size = 40102, upload-time = "2025-10-06T05:36:26.333Z" }, - { url = "https://files.pythonhosted.org/packages/2d/40/0832c31a37d60f60ed79e9dfb5a92e1e2af4f40a16a29abcc7992af9edff/frozenlist-1.8.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:8d92f1a84bb12d9e56f818b3a746f3efba93c1b63c8387a73dde655e1e42282a", size = 85717, upload-time = "2025-10-06T05:36:27.341Z" }, - { url = "https://files.pythonhosted.org/packages/30/ba/b0b3de23f40bc55a7057bd38434e25c34fa48e17f20ee273bbde5e0650f3/frozenlist-1.8.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:96153e77a591c8adc2ee805756c61f59fef4cf4073a9275ee86fe8cba41241f7", size = 49651, upload-time = "2025-10-06T05:36:28.855Z" }, - { url = "https://files.pythonhosted.org/packages/0c/ab/6e5080ee374f875296c4243c381bbdef97a9ac39c6e3ce1d5f7d42cb78d6/frozenlist-1.8.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f21f00a91358803399890ab167098c131ec2ddd5f8f5fd5fe9c9f2c6fcd91e40", size = 49417, upload-time = "2025-10-06T05:36:29.877Z" }, - { url = "https://files.pythonhosted.org/packages/d5/4e/e4691508f9477ce67da2015d8c00acd751e6287739123113a9fca6f1604e/frozenlist-1.8.0-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:fb30f9626572a76dfe4293c7194a09fb1fe93ba94c7d4f720dfae3b646b45027", size = 234391, upload-time = "2025-10-06T05:36:31.301Z" }, - { url = "https://files.pythonhosted.org/packages/40/76/c202df58e3acdf12969a7895fd6f3bc016c642e6726aa63bd3025e0fc71c/frozenlist-1.8.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:eaa352d7047a31d87dafcacbabe89df0aa506abb5b1b85a2fb91bc3faa02d822", size = 233048, upload-time = "2025-10-06T05:36:32.531Z" }, - { url = "https://files.pythonhosted.org/packages/f9/c0/8746afb90f17b73ca5979c7a3958116e105ff796e718575175319b5bb4ce/frozenlist-1.8.0-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:03ae967b4e297f58f8c774c7eabcce57fe3c2434817d4385c50661845a058121", size = 226549, upload-time = "2025-10-06T05:36:33.706Z" }, - { url = "https://files.pythonhosted.org/packages/7e/eb/4c7eefc718ff72f9b6c4893291abaae5fbc0c82226a32dcd8ef4f7a5dbef/frozenlist-1.8.0-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:f6292f1de555ffcc675941d65fffffb0a5bcd992905015f85d0592201793e0e5", size = 239833, upload-time = "2025-10-06T05:36:34.947Z" }, - { url = "https://files.pythonhosted.org/packages/c2/4e/e5c02187cf704224f8b21bee886f3d713ca379535f16893233b9d672ea71/frozenlist-1.8.0-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:29548f9b5b5e3460ce7378144c3010363d8035cea44bc0bf02d57f5a685e084e", size = 245363, upload-time = "2025-10-06T05:36:36.534Z" }, - { url = "https://files.pythonhosted.org/packages/1f/96/cb85ec608464472e82ad37a17f844889c36100eed57bea094518bf270692/frozenlist-1.8.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ec3cc8c5d4084591b4237c0a272cc4f50a5b03396a47d9caaf76f5d7b38a4f11", size = 229314, upload-time = "2025-10-06T05:36:38.582Z" }, - { url = "https://files.pythonhosted.org/packages/5d/6f/4ae69c550e4cee66b57887daeebe006fe985917c01d0fff9caab9883f6d0/frozenlist-1.8.0-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:517279f58009d0b1f2e7c1b130b377a349405da3f7621ed6bfae50b10adf20c1", size = 243365, upload-time = "2025-10-06T05:36:40.152Z" }, - { url = "https://files.pythonhosted.org/packages/7a/58/afd56de246cf11780a40a2c28dc7cbabbf06337cc8ddb1c780a2d97e88d8/frozenlist-1.8.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:db1e72ede2d0d7ccb213f218df6a078a9c09a7de257c2fe8fcef16d5925230b1", size = 237763, upload-time = "2025-10-06T05:36:41.355Z" }, - { url = "https://files.pythonhosted.org/packages/cb/36/cdfaf6ed42e2644740d4a10452d8e97fa1c062e2a8006e4b09f1b5fd7d63/frozenlist-1.8.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:b4dec9482a65c54a5044486847b8a66bf10c9cb4926d42927ec4e8fd5db7fed8", size = 240110, upload-time = "2025-10-06T05:36:42.716Z" }, - { url = "https://files.pythonhosted.org/packages/03/a8/9ea226fbefad669f11b52e864c55f0bd57d3c8d7eb07e9f2e9a0b39502e1/frozenlist-1.8.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:21900c48ae04d13d416f0e1e0c4d81f7931f73a9dfa0b7a8746fb2fe7dd970ed", size = 233717, upload-time = "2025-10-06T05:36:44.251Z" }, - { url = "https://files.pythonhosted.org/packages/1e/0b/1b5531611e83ba7d13ccc9988967ea1b51186af64c42b7a7af465dcc9568/frozenlist-1.8.0-cp313-cp313-win32.whl", hash = "sha256:8b7b94a067d1c504ee0b16def57ad5738701e4ba10cec90529f13fa03c833496", size = 39628, upload-time = "2025-10-06T05:36:45.423Z" }, - { url = "https://files.pythonhosted.org/packages/d8/cf/174c91dbc9cc49bc7b7aab74d8b734e974d1faa8f191c74af9b7e80848e6/frozenlist-1.8.0-cp313-cp313-win_amd64.whl", hash = "sha256:878be833caa6a3821caf85eb39c5ba92d28e85df26d57afb06b35b2efd937231", size = 43882, upload-time = "2025-10-06T05:36:46.796Z" }, - { url = "https://files.pythonhosted.org/packages/c1/17/502cd212cbfa96eb1388614fe39a3fc9ab87dbbe042b66f97acb57474834/frozenlist-1.8.0-cp313-cp313-win_arm64.whl", hash = "sha256:44389d135b3ff43ba8cc89ff7f51f5a0bb6b63d829c8300f79a2fe4fe61bcc62", size = 39676, upload-time = "2025-10-06T05:36:47.8Z" }, - { url = "https://files.pythonhosted.org/packages/d2/5c/3bbfaa920dfab09e76946a5d2833a7cbdf7b9b4a91c714666ac4855b88b4/frozenlist-1.8.0-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:e25ac20a2ef37e91c1b39938b591457666a0fa835c7783c3a8f33ea42870db94", size = 89235, upload-time = "2025-10-06T05:36:48.78Z" }, - { url = "https://files.pythonhosted.org/packages/d2/d6/f03961ef72166cec1687e84e8925838442b615bd0b8854b54923ce5b7b8a/frozenlist-1.8.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:07cdca25a91a4386d2e76ad992916a85038a9b97561bf7a3fd12d5d9ce31870c", size = 50742, upload-time = "2025-10-06T05:36:49.837Z" }, - { url = "https://files.pythonhosted.org/packages/1e/bb/a6d12b7ba4c3337667d0e421f7181c82dda448ce4e7ad7ecd249a16fa806/frozenlist-1.8.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:4e0c11f2cc6717e0a741f84a527c52616140741cd812a50422f83dc31749fb52", size = 51725, upload-time = "2025-10-06T05:36:50.851Z" }, - { url = "https://files.pythonhosted.org/packages/bc/71/d1fed0ffe2c2ccd70b43714c6cab0f4188f09f8a67a7914a6b46ee30f274/frozenlist-1.8.0-cp313-cp313t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:b3210649ee28062ea6099cfda39e147fa1bc039583c8ee4481cb7811e2448c51", size = 284533, upload-time = "2025-10-06T05:36:51.898Z" }, - { url = "https://files.pythonhosted.org/packages/c9/1f/fb1685a7b009d89f9bf78a42d94461bc06581f6e718c39344754a5d9bada/frozenlist-1.8.0-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:581ef5194c48035a7de2aefc72ac6539823bb71508189e5de01d60c9dcd5fa65", size = 292506, upload-time = "2025-10-06T05:36:53.101Z" }, - { url = "https://files.pythonhosted.org/packages/e6/3b/b991fe1612703f7e0d05c0cf734c1b77aaf7c7d321df4572e8d36e7048c8/frozenlist-1.8.0-cp313-cp313t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:3ef2d026f16a2b1866e1d86fc4e1291e1ed8a387b2c333809419a2f8b3a77b82", size = 274161, upload-time = "2025-10-06T05:36:54.309Z" }, - { url = "https://files.pythonhosted.org/packages/ca/ec/c5c618767bcdf66e88945ec0157d7f6c4a1322f1473392319b7a2501ded7/frozenlist-1.8.0-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:5500ef82073f599ac84d888e3a8c1f77ac831183244bfd7f11eaa0289fb30714", size = 294676, upload-time = "2025-10-06T05:36:55.566Z" }, - { url = "https://files.pythonhosted.org/packages/7c/ce/3934758637d8f8a88d11f0585d6495ef54b2044ed6ec84492a91fa3b27aa/frozenlist-1.8.0-cp313-cp313t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:50066c3997d0091c411a66e710f4e11752251e6d2d73d70d8d5d4c76442a199d", size = 300638, upload-time = "2025-10-06T05:36:56.758Z" }, - { url = "https://files.pythonhosted.org/packages/fc/4f/a7e4d0d467298f42de4b41cbc7ddaf19d3cfeabaf9ff97c20c6c7ee409f9/frozenlist-1.8.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:5c1c8e78426e59b3f8005e9b19f6ff46e5845895adbde20ece9218319eca6506", size = 283067, upload-time = "2025-10-06T05:36:57.965Z" }, - { url = "https://files.pythonhosted.org/packages/dc/48/c7b163063d55a83772b268e6d1affb960771b0e203b632cfe09522d67ea5/frozenlist-1.8.0-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:eefdba20de0d938cec6a89bd4d70f346a03108a19b9df4248d3cf0d88f1b0f51", size = 292101, upload-time = "2025-10-06T05:36:59.237Z" }, - { url = "https://files.pythonhosted.org/packages/9f/d0/2366d3c4ecdc2fd391e0afa6e11500bfba0ea772764d631bbf82f0136c9d/frozenlist-1.8.0-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:cf253e0e1c3ceb4aaff6df637ce033ff6535fb8c70a764a8f46aafd3d6ab798e", size = 289901, upload-time = "2025-10-06T05:37:00.811Z" }, - { url = "https://files.pythonhosted.org/packages/b8/94/daff920e82c1b70e3618a2ac39fbc01ae3e2ff6124e80739ce5d71c9b920/frozenlist-1.8.0-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:032efa2674356903cd0261c4317a561a6850f3ac864a63fc1583147fb05a79b0", size = 289395, upload-time = "2025-10-06T05:37:02.115Z" }, - { url = "https://files.pythonhosted.org/packages/e3/20/bba307ab4235a09fdcd3cc5508dbabd17c4634a1af4b96e0f69bfe551ebd/frozenlist-1.8.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:6da155091429aeba16851ecb10a9104a108bcd32f6c1642867eadaee401c1c41", size = 283659, upload-time = "2025-10-06T05:37:03.711Z" }, - { url = "https://files.pythonhosted.org/packages/fd/00/04ca1c3a7a124b6de4f8a9a17cc2fcad138b4608e7a3fc5877804b8715d7/frozenlist-1.8.0-cp313-cp313t-win32.whl", hash = "sha256:0f96534f8bfebc1a394209427d0f8a63d343c9779cda6fc25e8e121b5fd8555b", size = 43492, upload-time = "2025-10-06T05:37:04.915Z" }, - { url = "https://files.pythonhosted.org/packages/59/5e/c69f733a86a94ab10f68e496dc6b7e8bc078ebb415281d5698313e3af3a1/frozenlist-1.8.0-cp313-cp313t-win_amd64.whl", hash = "sha256:5d63a068f978fc69421fb0e6eb91a9603187527c86b7cd3f534a5b77a592b888", size = 48034, upload-time = "2025-10-06T05:37:06.343Z" }, - { url = "https://files.pythonhosted.org/packages/16/6c/be9d79775d8abe79b05fa6d23da99ad6e7763a1d080fbae7290b286093fd/frozenlist-1.8.0-cp313-cp313t-win_arm64.whl", hash = "sha256:bf0a7e10b077bf5fb9380ad3ae8ce20ef919a6ad93b4552896419ac7e1d8e042", size = 41749, upload-time = "2025-10-06T05:37:07.431Z" }, - { url = "https://files.pythonhosted.org/packages/9a/9a/e35b4a917281c0b8419d4207f4334c8e8c5dbf4f3f5f9ada73958d937dcc/frozenlist-1.8.0-py3-none-any.whl", hash = "sha256:0c18a16eab41e82c295618a77502e17b195883241c563b00f0aa5106fc4eaa0d", size = 13409, upload-time = "2025-10-06T05:38:16.721Z" }, -] - -[[package]] -name = "fsspec" -version = "2025.10.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/24/7f/2747c0d332b9acfa75dc84447a066fdf812b5a6b8d30472b74d309bfe8cb/fsspec-2025.10.0.tar.gz", hash = "sha256:b6789427626f068f9a83ca4e8a3cc050850b6c0f71f99ddb4f542b8266a26a59", size = 309285, upload-time = "2025-10-30T14:58:44.036Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/eb/02/a6b21098b1d5d6249b7c5ab69dde30108a71e4e819d4a9778f1de1d5b70d/fsspec-2025.10.0-py3-none-any.whl", hash = "sha256:7c7712353ae7d875407f97715f0e1ffcc21e33d5b24556cb1e090ae9409ec61d", size = 200966, upload-time = "2025-10-30T14:58:42.53Z" }, -] - -[[package]] -name = "grpcio" -version = "1.67.1" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/20/53/d9282a66a5db45981499190b77790570617a604a38f3d103d0400974aeb5/grpcio-1.67.1.tar.gz", hash = "sha256:3dc2ed4cabea4dc14d5e708c2b426205956077cc5de419b4d4079315017e9732", size = 12580022, upload-time = "2024-10-29T06:30:07.787Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/6e/25/6f95bd18d5f506364379eabc0d5874873cc7dbdaf0757df8d1e82bc07a88/grpcio-1.67.1-cp312-cp312-linux_armv7l.whl", hash = "sha256:267d1745894200e4c604958da5f856da6293f063327cb049a51fe67348e4f953", size = 5089809, upload-time = "2024-10-29T06:24:31.24Z" }, - { url = "https://files.pythonhosted.org/packages/10/3f/d79e32e5d0354be33a12db2267c66d3cfeff700dd5ccdd09fd44a3ff4fb6/grpcio-1.67.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:85f69fdc1d28ce7cff8de3f9c67db2b0ca9ba4449644488c1e0303c146135ddb", size = 10981985, upload-time = "2024-10-29T06:24:34.942Z" }, - { url = "https://files.pythonhosted.org/packages/21/f2/36fbc14b3542e3a1c20fb98bd60c4732c55a44e374a4eb68f91f28f14aab/grpcio-1.67.1-cp312-cp312-manylinux_2_17_aarch64.whl", hash = "sha256:f26b0b547eb8d00e195274cdfc63ce64c8fc2d3e2d00b12bf468ece41a0423a0", size = 5588770, upload-time = "2024-10-29T06:24:38.145Z" }, - { url = "https://files.pythonhosted.org/packages/0d/af/bbc1305df60c4e65de8c12820a942b5e37f9cf684ef5e49a63fbb1476a73/grpcio-1.67.1-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4422581cdc628f77302270ff839a44f4c24fdc57887dc2a45b7e53d8fc2376af", size = 6214476, upload-time = "2024-10-29T06:24:41.006Z" }, - { url = "https://files.pythonhosted.org/packages/92/cf/1d4c3e93efa93223e06a5c83ac27e32935f998bc368e276ef858b8883154/grpcio-1.67.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1d7616d2ded471231c701489190379e0c311ee0a6c756f3c03e6a62b95a7146e", size = 5850129, upload-time = "2024-10-29T06:24:43.553Z" }, - { url = "https://files.pythonhosted.org/packages/ae/ca/26195b66cb253ac4d5ef59846e354d335c9581dba891624011da0e95d67b/grpcio-1.67.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8a00efecde9d6fcc3ab00c13f816313c040a28450e5e25739c24f432fc6d3c75", size = 6568489, upload-time = "2024-10-29T06:24:46.453Z" }, - { url = "https://files.pythonhosted.org/packages/d1/94/16550ad6b3f13b96f0856ee5dfc2554efac28539ee84a51d7b14526da985/grpcio-1.67.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:699e964923b70f3101393710793289e42845791ea07565654ada0969522d0a38", size = 6149369, upload-time = "2024-10-29T06:24:49.112Z" }, - { url = "https://files.pythonhosted.org/packages/33/0d/4c3b2587e8ad7f121b597329e6c2620374fccbc2e4e1aa3c73ccc670fde4/grpcio-1.67.1-cp312-cp312-win32.whl", hash = "sha256:4e7b904484a634a0fff132958dabdb10d63e0927398273917da3ee103e8d1f78", size = 3599176, upload-time = "2024-10-29T06:24:51.443Z" }, - { url = "https://files.pythonhosted.org/packages/7d/36/0c03e2d80db69e2472cf81c6123aa7d14741de7cf790117291a703ae6ae1/grpcio-1.67.1-cp312-cp312-win_amd64.whl", hash = "sha256:5721e66a594a6c4204458004852719b38f3d5522082be9061d6510b455c90afc", size = 4346574, upload-time = "2024-10-29T06:24:54.587Z" }, - { url = "https://files.pythonhosted.org/packages/12/d2/2f032b7a153c7723ea3dea08bffa4bcaca9e0e5bdf643ce565b76da87461/grpcio-1.67.1-cp313-cp313-linux_armv7l.whl", hash = "sha256:aa0162e56fd10a5547fac8774c4899fc3e18c1aa4a4759d0ce2cd00d3696ea6b", size = 5091487, upload-time = "2024-10-29T06:24:57.416Z" }, - { url = "https://files.pythonhosted.org/packages/d0/ae/ea2ff6bd2475a082eb97db1104a903cf5fc57c88c87c10b3c3f41a184fc0/grpcio-1.67.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:beee96c8c0b1a75d556fe57b92b58b4347c77a65781ee2ac749d550f2a365dc1", size = 10943530, upload-time = "2024-10-29T06:25:01.062Z" }, - { url = "https://files.pythonhosted.org/packages/07/62/646be83d1a78edf8d69b56647327c9afc223e3140a744c59b25fbb279c3b/grpcio-1.67.1-cp313-cp313-manylinux_2_17_aarch64.whl", hash = "sha256:a93deda571a1bf94ec1f6fcda2872dad3ae538700d94dc283c672a3b508ba3af", size = 5589079, upload-time = "2024-10-29T06:25:04.254Z" }, - { url = "https://files.pythonhosted.org/packages/d0/25/71513d0a1b2072ce80d7f5909a93596b7ed10348b2ea4fdcbad23f6017bf/grpcio-1.67.1-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0e6f255980afef598a9e64a24efce87b625e3e3c80a45162d111a461a9f92955", size = 6213542, upload-time = "2024-10-29T06:25:06.824Z" }, - { url = "https://files.pythonhosted.org/packages/76/9a/d21236297111052dcb5dc85cd77dc7bf25ba67a0f55ae028b2af19a704bc/grpcio-1.67.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9e838cad2176ebd5d4a8bb03955138d6589ce9e2ce5d51c3ada34396dbd2dba8", size = 5850211, upload-time = "2024-10-29T06:25:10.149Z" }, - { url = "https://files.pythonhosted.org/packages/2d/fe/70b1da9037f5055be14f359026c238821b9bcf6ca38a8d760f59a589aacd/grpcio-1.67.1-cp313-cp313-musllinux_1_1_i686.whl", hash = "sha256:a6703916c43b1d468d0756c8077b12017a9fcb6a1ef13faf49e67d20d7ebda62", size = 6572129, upload-time = "2024-10-29T06:25:12.853Z" }, - { url = "https://files.pythonhosted.org/packages/74/0d/7df509a2cd2a54814598caf2fb759f3e0b93764431ff410f2175a6efb9e4/grpcio-1.67.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:917e8d8994eed1d86b907ba2a61b9f0aef27a2155bca6cbb322430fc7135b7bb", size = 6149819, upload-time = "2024-10-29T06:25:15.803Z" }, - { url = "https://files.pythonhosted.org/packages/0a/08/bc3b0155600898fd10f16b79054e1cca6cb644fa3c250c0fe59385df5e6f/grpcio-1.67.1-cp313-cp313-win32.whl", hash = "sha256:e279330bef1744040db8fc432becc8a727b84f456ab62b744d3fdb83f327e121", size = 3596561, upload-time = "2024-10-29T06:25:19.348Z" }, - { url = "https://files.pythonhosted.org/packages/5a/96/44759eca966720d0f3e1b105c43f8ad4590c97bf8eb3cd489656e9590baa/grpcio-1.67.1-cp313-cp313-win_amd64.whl", hash = "sha256:fa0c739ad8b1996bd24823950e3cb5152ae91fca1c09cc791190bf1627ffefba", size = 4346042, upload-time = "2024-10-29T06:25:21.939Z" }, -] - -[[package]] -name = "h11" -version = "0.16.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/01/ee/02a2c011bdab74c6fb3c75474d40b3052059d95df7e73351460c8588d963/h11-0.16.0.tar.gz", hash = "sha256:4e35b956cf45792e4caa5885e69fba00bdbc6ffafbfa020300e549b208ee5ff1", size = 101250, upload-time = "2025-04-24T03:35:25.427Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/04/4b/29cac41a4d98d144bf5f6d33995617b185d14b22401f75ca86f384e87ff1/h11-0.16.0-py3-none-any.whl", hash = "sha256:63cf8bbe7522de3bf65932fda1d9c2772064ffb3dae62d55932da54b31cb6c86", size = 37515, upload-time = "2025-04-24T03:35:24.344Z" }, -] - -[[package]] -name = "hf-xet" -version = "1.2.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/5e/6e/0f11bacf08a67f7fb5ee09740f2ca54163863b07b70d579356e9222ce5d8/hf_xet-1.2.0.tar.gz", hash = "sha256:a8c27070ca547293b6890c4bf389f713f80e8c478631432962bb7f4bc0bd7d7f", size = 506020, upload-time = "2025-10-24T19:04:32.129Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/9e/a5/85ef910a0aa034a2abcfadc360ab5ac6f6bc4e9112349bd40ca97551cff0/hf_xet-1.2.0-cp313-cp313t-macosx_10_12_x86_64.whl", hash = "sha256:ceeefcd1b7aed4956ae8499e2199607765fbd1c60510752003b6cc0b8413b649", size = 2861870, upload-time = "2025-10-24T19:04:11.422Z" }, - { url = "https://files.pythonhosted.org/packages/ea/40/e2e0a7eb9a51fe8828ba2d47fe22a7e74914ea8a0db68a18c3aa7449c767/hf_xet-1.2.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:b70218dd548e9840224df5638fdc94bd033552963cfa97f9170829381179c813", size = 2717584, upload-time = "2025-10-24T19:04:09.586Z" }, - { url = "https://files.pythonhosted.org/packages/a5/7d/daf7f8bc4594fdd59a8a596f9e3886133fdc68e675292218a5e4c1b7e834/hf_xet-1.2.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7d40b18769bb9a8bc82a9ede575ce1a44c75eb80e7375a01d76259089529b5dc", size = 3315004, upload-time = "2025-10-24T19:04:00.314Z" }, - { url = "https://files.pythonhosted.org/packages/b1/ba/45ea2f605fbf6d81c8b21e4d970b168b18a53515923010c312c06cd83164/hf_xet-1.2.0-cp313-cp313t-manylinux_2_28_aarch64.whl", hash = "sha256:cd3a6027d59cfb60177c12d6424e31f4b5ff13d8e3a1247b3a584bf8977e6df5", size = 3222636, upload-time = "2025-10-24T19:03:58.111Z" }, - { url = "https://files.pythonhosted.org/packages/4a/1d/04513e3cab8f29ab8c109d309ddd21a2705afab9d52f2ba1151e0c14f086/hf_xet-1.2.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:6de1fc44f58f6dd937956c8d304d8c2dea264c80680bcfa61ca4a15e7b76780f", size = 3408448, upload-time = "2025-10-24T19:04:20.951Z" }, - { url = "https://files.pythonhosted.org/packages/f0/7c/60a2756d7feec7387db3a1176c632357632fbe7849fce576c5559d4520c7/hf_xet-1.2.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:f182f264ed2acd566c514e45da9f2119110e48a87a327ca271027904c70c5832", size = 3503401, upload-time = "2025-10-24T19:04:22.549Z" }, - { url = "https://files.pythonhosted.org/packages/4e/64/48fffbd67fb418ab07451e4ce641a70de1c40c10a13e25325e24858ebe5a/hf_xet-1.2.0-cp313-cp313t-win_amd64.whl", hash = "sha256:293a7a3787e5c95d7be1857358a9130694a9c6021de3f27fa233f37267174382", size = 2900866, upload-time = "2025-10-24T19:04:33.461Z" }, - { url = "https://files.pythonhosted.org/packages/96/2d/22338486473df5923a9ab7107d375dbef9173c338ebef5098ef593d2b560/hf_xet-1.2.0-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:46740d4ac024a7ca9b22bebf77460ff43332868b661186a8e46c227fdae01848", size = 2866099, upload-time = "2025-10-24T19:04:15.366Z" }, - { url = "https://files.pythonhosted.org/packages/7f/8c/c5becfa53234299bc2210ba314eaaae36c2875e0045809b82e40a9544f0c/hf_xet-1.2.0-cp37-abi3-macosx_11_0_arm64.whl", hash = "sha256:27df617a076420d8845bea087f59303da8be17ed7ec0cd7ee3b9b9f579dff0e4", size = 2722178, upload-time = "2025-10-24T19:04:13.695Z" }, - { url = "https://files.pythonhosted.org/packages/9a/92/cf3ab0b652b082e66876d08da57fcc6fa2f0e6c70dfbbafbd470bb73eb47/hf_xet-1.2.0-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3651fd5bfe0281951b988c0facbe726aa5e347b103a675f49a3fa8144c7968fd", size = 3320214, upload-time = "2025-10-24T19:04:03.596Z" }, - { url = "https://files.pythonhosted.org/packages/46/92/3f7ec4a1b6a65bf45b059b6d4a5d38988f63e193056de2f420137e3c3244/hf_xet-1.2.0-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:d06fa97c8562fb3ee7a378dd9b51e343bc5bc8190254202c9771029152f5e08c", size = 3229054, upload-time = "2025-10-24T19:04:01.949Z" }, - { url = "https://files.pythonhosted.org/packages/0b/dd/7ac658d54b9fb7999a0ccb07ad863b413cbaf5cf172f48ebcd9497ec7263/hf_xet-1.2.0-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:4c1428c9ae73ec0939410ec73023c4f842927f39db09b063b9482dac5a3bb737", size = 3413812, upload-time = "2025-10-24T19:04:24.585Z" }, - { url = "https://files.pythonhosted.org/packages/92/68/89ac4e5b12a9ff6286a12174c8538a5930e2ed662091dd2572bbe0a18c8a/hf_xet-1.2.0-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:a55558084c16b09b5ed32ab9ed38421e2d87cf3f1f89815764d1177081b99865", size = 3508920, upload-time = "2025-10-24T19:04:26.927Z" }, - { url = "https://files.pythonhosted.org/packages/cb/44/870d44b30e1dcfb6a65932e3e1506c103a8a5aea9103c337e7a53180322c/hf_xet-1.2.0-cp37-abi3-win_amd64.whl", hash = "sha256:e6584a52253f72c9f52f9e549d5895ca7a471608495c4ecaa6cc73dba2b24d69", size = 2905735, upload-time = "2025-10-24T19:04:35.928Z" }, -] - -[[package]] -name = "httpcore" -version = "1.0.9" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "certifi" }, - { name = "h11" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/06/94/82699a10bca87a5556c9c59b5963f2d039dbd239f25bc2a63907a05a14cb/httpcore-1.0.9.tar.gz", hash = "sha256:6e34463af53fd2ab5d807f399a9b45ea31c3dfa2276f15a2c3f00afff6e176e8", size = 85484, upload-time = "2025-04-24T22:06:22.219Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/7e/f5/f66802a942d491edb555dd61e3a9961140fd64c90bce1eafd741609d334d/httpcore-1.0.9-py3-none-any.whl", hash = "sha256:2d400746a40668fc9dec9810239072b40b4484b640a8c38fd654a024c7a1bf55", size = 78784, upload-time = "2025-04-24T22:06:20.566Z" }, -] - -[[package]] -name = "httpx" -version = "0.28.1" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "anyio" }, - { name = "certifi" }, - { name = "httpcore" }, - { name = "idna" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/b1/df/48c586a5fe32a0f01324ee087459e112ebb7224f646c0b5023f5e79e9956/httpx-0.28.1.tar.gz", hash = "sha256:75e98c5f16b0f35b567856f597f06ff2270a374470a5c2392242528e3e3e42fc", size = 141406, upload-time = "2024-12-06T15:37:23.222Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/2a/39/e50c7c3a983047577ee07d2a9e53faf5a69493943ec3f6a384bdc792deb2/httpx-0.28.1-py3-none-any.whl", hash = "sha256:d909fcccc110f8c7faf814ca82a9a4d816bc5a6dbfea25d6591d6985b8ba59ad", size = 73517, upload-time = "2024-12-06T15:37:21.509Z" }, -] - -[[package]] -name = "huggingface-hub" -version = "1.1.6" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "filelock" }, - { name = "fsspec" }, - { name = "hf-xet", marker = "platform_machine == 'AMD64' or platform_machine == 'aarch64' or platform_machine == 'amd64' or platform_machine == 'arm64' or platform_machine == 'x86_64'" }, - { name = "httpx" }, - { name = "packaging" }, - { name = "pyyaml" }, - { name = "shellingham" }, - { name = "tqdm" }, - { name = "typer-slim" }, - { name = "typing-extensions" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/4c/08/dc669fa8c7267752ce2d536683436f0c46661aca45e9450c635a365ca2df/huggingface_hub-1.1.6.tar.gz", hash = "sha256:e1beacb611d74a8189b4c5298e8675fb518256af73b38143171f6efa7d822cf6", size = 607477, upload-time = "2025-11-28T10:23:35.223Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/c2/3c/168062db8c0068315ed3f137db450869eb14d98f00144234c118f294b461/huggingface_hub-1.1.6-py3-none-any.whl", hash = "sha256:09726c4fc4c0dc5d83568234daff1ccb815c39b310784359c9d8b5906f679de2", size = 516110, upload-time = "2025-11-28T10:23:33.63Z" }, -] - -[[package]] -name = "idna" -version = "3.11" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/6f/6d/0703ccc57f3a7233505399edb88de3cbd678da106337b9fcde432b65ed60/idna-3.11.tar.gz", hash = "sha256:795dafcc9c04ed0c1fb032c2aa73654d8e8c5023a7df64a53f39190ada629902", size = 194582, upload-time = "2025-10-12T14:55:20.501Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/0e/61/66938bbb5fc52dbdf84594873d5b51fb1f7c7794e9c0f5bd885f30bc507b/idna-3.11-py3-none-any.whl", hash = "sha256:771a87f49d9defaf64091e6e6fe9c18d4833f140bd19464795bc32d966ca37ea", size = 71008, upload-time = "2025-10-12T14:55:18.883Z" }, -] - -[[package]] -name = "imagesize" -version = "1.4.1" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/a7/84/62473fb57d61e31fef6e36d64a179c8781605429fd927b5dd608c997be31/imagesize-1.4.1.tar.gz", hash = "sha256:69150444affb9cb0d5cc5a92b3676f0b2fb7cd9ae39e947a5e11a36b4497cd4a", size = 1280026, upload-time = "2022-07-01T12:21:05.687Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/ff/62/85c4c919272577931d407be5ba5d71c20f0b616d31a0befe0ae45bb79abd/imagesize-1.4.1-py2.py3-none-any.whl", hash = "sha256:0d8d18d08f840c19d0ee7ca1fd82490fdc3729b7ac93f49870406ddde8ef8d8b", size = 8769, upload-time = "2022-07-01T12:21:02.467Z" }, -] - -[[package]] -name = "importlib-metadata" -version = "8.7.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "zipp" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/76/66/650a33bd90f786193e4de4b3ad86ea60b53c89b669a5c7be931fac31cdb0/importlib_metadata-8.7.0.tar.gz", hash = "sha256:d13b81ad223b890aa16c5471f2ac3056cf76c5f10f82d6f9292f0b415f389000", size = 56641, upload-time = "2025-04-27T15:29:01.736Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/20/b0/36bd937216ec521246249be3bf9855081de4c5e06a0c9b4219dbeda50373/importlib_metadata-8.7.0-py3-none-any.whl", hash = "sha256:e5dd1551894c77868a30651cef00984d50e1002d06942a7101d34870c5f02afd", size = 27656, upload-time = "2025-04-27T15:29:00.214Z" }, -] - -[[package]] -name = "iniconfig" -version = "2.3.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/72/34/14ca021ce8e5dfedc35312d08ba8bf51fdd999c576889fc2c24cb97f4f10/iniconfig-2.3.0.tar.gz", hash = "sha256:c76315c77db068650d49c5b56314774a7804df16fee4402c1f19d6d15d8c4730", size = 20503, upload-time = "2025-10-18T21:55:43.219Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/cb/b1/3846dd7f199d53cb17f49cba7e651e9ce294d8497c8c150530ed11865bb8/iniconfig-2.3.0-py3-none-any.whl", hash = "sha256:f631c04d2c48c52b84d0d0549c99ff3859c98df65b3101406327ecc7d53fbf12", size = 7484, upload-time = "2025-10-18T21:55:41.639Z" }, -] - -[[package]] -name = "ipykernel" -version = "7.1.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "appnope", marker = "sys_platform == 'darwin'" }, - { name = "comm" }, - { name = "debugpy" }, - { name = "ipython" }, - { name = "jupyter-client" }, - { name = "jupyter-core" }, - { name = "matplotlib-inline" }, - { name = "nest-asyncio" }, - { name = "packaging" }, - { name = "psutil" }, - { name = "pyzmq" }, - { name = "tornado" }, - { name = "traitlets" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/b9/a4/4948be6eb88628505b83a1f2f40d90254cab66abf2043b3c40fa07dfce0f/ipykernel-7.1.0.tar.gz", hash = "sha256:58a3fc88533d5930c3546dc7eac66c6d288acde4f801e2001e65edc5dc9cf0db", size = 174579, upload-time = "2025-10-27T09:46:39.471Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/a3/17/20c2552266728ceba271967b87919664ecc0e33efca29c3efc6baf88c5f9/ipykernel-7.1.0-py3-none-any.whl", hash = "sha256:763b5ec6c5b7776f6a8d7ce09b267693b4e5ce75cb50ae696aaefb3c85e1ea4c", size = 117968, upload-time = "2025-10-27T09:46:37.805Z" }, -] - -[[package]] -name = "ipython" -version = "9.7.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "colorama", marker = "sys_platform == 'win32'" }, - { name = "decorator" }, - { name = "ipython-pygments-lexers" }, - { name = "jedi" }, - { name = "matplotlib-inline" }, - { name = "pexpect", marker = "sys_platform != 'emscripten' and sys_platform != 'win32'" }, - { name = "prompt-toolkit" }, - { name = "pygments" }, - { name = "stack-data" }, - { name = "traitlets" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/29/e6/48c74d54039241a456add616464ea28c6ebf782e4110d419411b83dae06f/ipython-9.7.0.tar.gz", hash = "sha256:5f6de88c905a566c6a9d6c400a8fed54a638e1f7543d17aae2551133216b1e4e", size = 4422115, upload-time = "2025-11-05T12:18:54.646Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/05/aa/62893d6a591d337aa59dcc4c6f6c842f1fe20cd72c8c5c1f980255243252/ipython-9.7.0-py3-none-any.whl", hash = "sha256:bce8ac85eb9521adc94e1845b4c03d88365fd6ac2f4908ec4ed1eb1b0a065f9f", size = 618911, upload-time = "2025-11-05T12:18:52.484Z" }, -] - -[[package]] -name = "ipython-pygments-lexers" -version = "1.1.1" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "pygments" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/ef/4c/5dd1d8af08107f88c7f741ead7a40854b8ac24ddf9ae850afbcf698aa552/ipython_pygments_lexers-1.1.1.tar.gz", hash = "sha256:09c0138009e56b6854f9535736f4171d855c8c08a563a0dcd8022f78355c7e81", size = 8393, upload-time = "2025-01-17T11:24:34.505Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/d9/33/1f075bf72b0b747cb3288d011319aaf64083cf2efef8354174e3ed4540e2/ipython_pygments_lexers-1.1.1-py3-none-any.whl", hash = "sha256:a9462224a505ade19a605f71f8fa63c2048833ce50abc86768a0d81d876dc81c", size = 8074, upload-time = "2025-01-17T11:24:33.271Z" }, -] - -[[package]] -name = "jax" -version = "0.8.1" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "jaxlib" }, - { name = "ml-dtypes" }, - { name = "numpy" }, - { name = "opt-einsum" }, - { name = "scipy" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/32/82/84fd2c662e4d410a34b0402de9b56bb69d7f72d1b875c3ae0edc07df18cc/jax-0.8.1.tar.gz", hash = "sha256:e53f67b15315f5e154851a7fd77a192b59c6c75b3f7ac56e214296765391cca7", size = 2509320, upload-time = "2025-11-18T19:50:02.609Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/f9/e7/19b8cfc8963b2e10a01a4db7bb27ec5fa39ecd024bc62f8e2d1de5625a9d/jax-0.8.1-py3-none-any.whl", hash = "sha256:4cbdc5548f3095cdd69d38e4337950b2fc1f250a740a0234d190e4a319077564", size = 2922137, upload-time = "2025-11-18T19:47:43.693Z" }, -] - -[[package]] -name = "jaxlib" -version = "0.8.1" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "ml-dtypes" }, - { name = "numpy" }, - { name = "scipy" }, -] -wheels = [ - { url = "https://files.pythonhosted.org/packages/d9/9d/59b36e2f348e599d5812743f263ca54aa03be1a4c9dfc11504d19864b72d/jaxlib-0.8.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:88bde0f535eeea6689e0cd57d40b7660d5206ac95c7d42e09562a109b963a49f", size = 55728156, upload-time = "2025-11-18T19:48:56.254Z" }, - { url = "https://files.pythonhosted.org/packages/7e/73/2aa891de9f5f4c60ba3c63bda97ec4ace50ffb900ff3bf750ce42c514a3b/jaxlib-0.8.1-cp312-cp312-manylinux_2_27_aarch64.whl", hash = "sha256:bed1e94ae8c7c16bca4476d8d7f582f0d1a102a4e69c3a9bd2069a0dc42274a9", size = 74209108, upload-time = "2025-11-18T19:48:59.572Z" }, - { url = "https://files.pythonhosted.org/packages/eb/4b/3c7e373d81219ee7493c1581c85a926c413ddeb3794cff87a37023a337e4/jaxlib-0.8.1-cp312-cp312-manylinux_2_27_x86_64.whl", hash = "sha256:af4924189fc53b69237715b56ebcbfc71bb91ca16184143dcef0d430c8173de6", size = 80256943, upload-time = "2025-11-18T19:49:02.92Z" }, - { url = "https://files.pythonhosted.org/packages/07/6c/a6f449a7d1c7f91d73c3b8e00ceba92dff9dfd642508bbe1ddba9cb9ea57/jaxlib-0.8.1-cp312-cp312-win_amd64.whl", hash = "sha256:24ec3f3a9c45d6de060020dc94c444d69e18099fab927ea3979ff8cedf0ed2c9", size = 59787068, upload-time = "2025-11-18T19:49:06.275Z" }, - { url = "https://files.pythonhosted.org/packages/f8/67/97c62849b5d8fc075f902201ff136ad224a2ef113d1fa655ece0ffe8b2a4/jaxlib-0.8.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a0349f6e8179dc897d33aeb90ec66b4a8041330fbbba8d071dc6167cd2271539", size = 55726611, upload-time = "2025-11-18T19:49:09.162Z" }, - { url = "https://files.pythonhosted.org/packages/fd/2a/9fb7599e43d66958b6a9859e045b605afea31f7fd96cfa35a7a8e978b0f8/jaxlib-0.8.1-cp313-cp313-manylinux_2_27_aarch64.whl", hash = "sha256:bd697c171ace1e2e9d6ed910a78f385b3c4095cee290b0255aa58848f2acdeab", size = 74207596, upload-time = "2025-11-18T19:49:12.39Z" }, - { url = "https://files.pythonhosted.org/packages/7d/61/ab5c98641e15f9844dd49efbf6f22c6a9c5d17304319e5be8c51a1dfd088/jaxlib-0.8.1-cp313-cp313-manylinux_2_27_x86_64.whl", hash = "sha256:d245bd6a279c72ca5f796df84cdd64d7c9c8abc4b8d89adf4acf45898dab958b", size = 80254560, upload-time = "2025-11-18T19:49:16.172Z" }, - { url = "https://files.pythonhosted.org/packages/1c/71/82a04ce93baeca5b3d10340f574e0668d327b7d0d18e32d9080917c507f6/jaxlib-0.8.1-cp313-cp313-win_amd64.whl", hash = "sha256:8e118e1fbe714f37a94ba26777c17faab7dca4a33646a3d98cd1d99673bbd6b1", size = 59786828, upload-time = "2025-11-18T19:49:19.563Z" }, - { url = "https://files.pythonhosted.org/packages/97/65/e7c625f1fdb54d45ac248d8398a28d6c02528c31feaa6e1c146a08192d77/jaxlib-0.8.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:4933298fcfb07a5aa2d1fed21c111d07cea50e6f180dba2cdb5463c13fb98f2f", size = 55835933, upload-time = "2025-11-18T19:49:27.362Z" }, - { url = "https://files.pythonhosted.org/packages/1f/04/e09ff7b5ba0af93501cb196c65103a30e5050083203c1ff581f18718a356/jaxlib-0.8.1-cp313-cp313t-manylinux_2_27_aarch64.whl", hash = "sha256:f2f11491b077d05249d63813e811401194a41edc8e9cc60af8f4b554057cfad0", size = 74323389, upload-time = "2025-11-18T19:49:30.457Z" }, - { url = "https://files.pythonhosted.org/packages/44/9f/8b7f6ad9eebf8946e73049dae85f86544f5743bc8b2190898415646fa7ec/jaxlib-0.8.1-cp313-cp313t-manylinux_2_27_x86_64.whl", hash = "sha256:7a5d381fad89622750fae29fab83c0847e2931ad8d6a34dc13b28fc4d67f75a3", size = 80358249, upload-time = "2025-11-18T19:49:33.682Z" }, -] - -[[package]] -name = "jedi" -version = "0.19.2" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "parso" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/72/3a/79a912fbd4d8dd6fbb02bf69afd3bb72cf0c729bb3063c6f4498603db17a/jedi-0.19.2.tar.gz", hash = "sha256:4770dc3de41bde3966b02eb84fbcf557fb33cce26ad23da12c742fb50ecb11f0", size = 1231287, upload-time = "2024-11-11T01:41:42.873Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/c0/5a/9cac0c82afec3d09ccd97c8b6502d48f165f9124db81b4bcb90b4af974ee/jedi-0.19.2-py2.py3-none-any.whl", hash = "sha256:a8ef22bde8490f57fe5c7681a3c83cb58874daf72b4784de3cce5b6ef6edb5b9", size = 1572278, upload-time = "2024-11-11T01:41:40.175Z" }, -] - -[[package]] -name = "jinja2" -version = "3.1.6" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "markupsafe" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/df/bf/f7da0350254c0ed7c72f3e33cef02e048281fec7ecec5f032d4aac52226b/jinja2-3.1.6.tar.gz", hash = "sha256:0137fb05990d35f1275a587e9aee6d56da821fc83491a0fb838183be43f66d6d", size = 245115, upload-time = "2025-03-05T20:05:02.478Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/62/a1/3d680cbfd5f4b8f15abc1d571870c5fc3e594bb582bc3b64ea099db13e56/jinja2-3.1.6-py3-none-any.whl", hash = "sha256:85ece4451f492d0c13c5dd7c13a64681a86afae63a5f347908daf103ce6d2f67", size = 134899, upload-time = "2025-03-05T20:05:00.369Z" }, -] - -[[package]] -name = "jiter" -version = "0.12.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/45/9d/e0660989c1370e25848bb4c52d061c71837239738ad937e83edca174c273/jiter-0.12.0.tar.gz", hash = "sha256:64dfcd7d5c168b38d3f9f8bba7fc639edb3418abcc74f22fdbe6b8938293f30b", size = 168294, upload-time = "2025-11-09T20:49:23.302Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/92/c9/5b9f7b4983f1b542c64e84165075335e8a236fa9e2ea03a0c79780062be8/jiter-0.12.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:305e061fa82f4680607a775b2e8e0bcb071cd2205ac38e6ef48c8dd5ebe1cf37", size = 314449, upload-time = "2025-11-09T20:47:22.999Z" }, - { url = "https://files.pythonhosted.org/packages/98/6e/e8efa0e78de00db0aee82c0cf9e8b3f2027efd7f8a71f859d8f4be8e98ef/jiter-0.12.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5c1860627048e302a528333c9307c818c547f214d8659b0705d2195e1a94b274", size = 319855, upload-time = "2025-11-09T20:47:24.779Z" }, - { url = "https://files.pythonhosted.org/packages/20/26/894cd88e60b5d58af53bec5c6759d1292bd0b37a8b5f60f07abf7a63ae5f/jiter-0.12.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:df37577a4f8408f7e0ec3205d2a8f87672af8f17008358063a4d6425b6081ce3", size = 350171, upload-time = "2025-11-09T20:47:26.469Z" }, - { url = "https://files.pythonhosted.org/packages/f5/27/a7b818b9979ac31b3763d25f3653ec3a954044d5e9f5d87f2f247d679fd1/jiter-0.12.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:75fdd787356c1c13a4f40b43c2156276ef7a71eb487d98472476476d803fb2cf", size = 365590, upload-time = "2025-11-09T20:47:27.918Z" }, - { url = "https://files.pythonhosted.org/packages/ba/7e/e46195801a97673a83746170b17984aa8ac4a455746354516d02ca5541b4/jiter-0.12.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1eb5db8d9c65b112aacf14fcd0faae9913d07a8afea5ed06ccdd12b724e966a1", size = 479462, upload-time = "2025-11-09T20:47:29.654Z" }, - { url = "https://files.pythonhosted.org/packages/ca/75/f833bfb009ab4bd11b1c9406d333e3b4357709ed0570bb48c7c06d78c7dd/jiter-0.12.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:73c568cc27c473f82480abc15d1301adf333a7ea4f2e813d6a2c7d8b6ba8d0df", size = 378983, upload-time = "2025-11-09T20:47:31.026Z" }, - { url = "https://files.pythonhosted.org/packages/71/b3/7a69d77943cc837d30165643db753471aff5df39692d598da880a6e51c24/jiter-0.12.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4321e8a3d868919bcb1abb1db550d41f2b5b326f72df29e53b2df8b006eb9403", size = 361328, upload-time = "2025-11-09T20:47:33.286Z" }, - { url = "https://files.pythonhosted.org/packages/b0/ac/a78f90caf48d65ba70d8c6efc6f23150bc39dc3389d65bbec2a95c7bc628/jiter-0.12.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:0a51bad79f8cc9cac2b4b705039f814049142e0050f30d91695a2d9a6611f126", size = 386740, upload-time = "2025-11-09T20:47:34.703Z" }, - { url = "https://files.pythonhosted.org/packages/39/b6/5d31c2cc8e1b6a6bcf3c5721e4ca0a3633d1ab4754b09bc7084f6c4f5327/jiter-0.12.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:2a67b678f6a5f1dd6c36d642d7db83e456bc8b104788262aaefc11a22339f5a9", size = 520875, upload-time = "2025-11-09T20:47:36.058Z" }, - { url = "https://files.pythonhosted.org/packages/30/b5/4df540fae4e9f68c54b8dab004bd8c943a752f0b00efd6e7d64aa3850339/jiter-0.12.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:efe1a211fe1fd14762adea941e3cfd6c611a136e28da6c39272dbb7a1bbe6a86", size = 511457, upload-time = "2025-11-09T20:47:37.932Z" }, - { url = "https://files.pythonhosted.org/packages/07/65/86b74010e450a1a77b2c1aabb91d4a91dd3cd5afce99f34d75fd1ac64b19/jiter-0.12.0-cp312-cp312-win32.whl", hash = "sha256:d779d97c834b4278276ec703dc3fc1735fca50af63eb7262f05bdb4e62203d44", size = 204546, upload-time = "2025-11-09T20:47:40.47Z" }, - { url = "https://files.pythonhosted.org/packages/1c/c7/6659f537f9562d963488e3e55573498a442503ced01f7e169e96a6110383/jiter-0.12.0-cp312-cp312-win_amd64.whl", hash = "sha256:e8269062060212b373316fe69236096aaf4c49022d267c6736eebd66bbbc60bb", size = 205196, upload-time = "2025-11-09T20:47:41.794Z" }, - { url = "https://files.pythonhosted.org/packages/21/f4/935304f5169edadfec7f9c01eacbce4c90bb9a82035ac1de1f3bd2d40be6/jiter-0.12.0-cp312-cp312-win_arm64.whl", hash = "sha256:06cb970936c65de926d648af0ed3d21857f026b1cf5525cb2947aa5e01e05789", size = 186100, upload-time = "2025-11-09T20:47:43.007Z" }, - { url = "https://files.pythonhosted.org/packages/3d/a6/97209693b177716e22576ee1161674d1d58029eb178e01866a0422b69224/jiter-0.12.0-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:6cc49d5130a14b732e0612bc76ae8db3b49898732223ef8b7599aa8d9810683e", size = 313658, upload-time = "2025-11-09T20:47:44.424Z" }, - { url = "https://files.pythonhosted.org/packages/06/4d/125c5c1537c7d8ee73ad3d530a442d6c619714b95027143f1b61c0b4dfe0/jiter-0.12.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:37f27a32ce36364d2fa4f7fdc507279db604d27d239ea2e044c8f148410defe1", size = 318605, upload-time = "2025-11-09T20:47:45.973Z" }, - { url = "https://files.pythonhosted.org/packages/99/bf/a840b89847885064c41a5f52de6e312e91fa84a520848ee56c97e4fa0205/jiter-0.12.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bbc0944aa3d4b4773e348cda635252824a78f4ba44328e042ef1ff3f6080d1cf", size = 349803, upload-time = "2025-11-09T20:47:47.535Z" }, - { url = "https://files.pythonhosted.org/packages/8a/88/e63441c28e0db50e305ae23e19c1d8fae012d78ed55365da392c1f34b09c/jiter-0.12.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:da25c62d4ee1ffbacb97fac6dfe4dcd6759ebdc9015991e92a6eae5816287f44", size = 365120, upload-time = "2025-11-09T20:47:49.284Z" }, - { url = "https://files.pythonhosted.org/packages/0a/7c/49b02714af4343970eb8aca63396bc1c82fa01197dbb1e9b0d274b550d4e/jiter-0.12.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:048485c654b838140b007390b8182ba9774621103bd4d77c9c3f6f117474ba45", size = 479918, upload-time = "2025-11-09T20:47:50.807Z" }, - { url = "https://files.pythonhosted.org/packages/69/ba/0a809817fdd5a1db80490b9150645f3aae16afad166960bcd562be194f3b/jiter-0.12.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:635e737fbb7315bef0037c19b88b799143d2d7d3507e61a76751025226b3ac87", size = 379008, upload-time = "2025-11-09T20:47:52.211Z" }, - { url = "https://files.pythonhosted.org/packages/5f/c3/c9fc0232e736c8877d9e6d83d6eeb0ba4e90c6c073835cc2e8f73fdeef51/jiter-0.12.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4e017c417b1ebda911bd13b1e40612704b1f5420e30695112efdbed8a4b389ed", size = 361785, upload-time = "2025-11-09T20:47:53.512Z" }, - { url = "https://files.pythonhosted.org/packages/96/61/61f69b7e442e97ca6cd53086ddc1cf59fb830549bc72c0a293713a60c525/jiter-0.12.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:89b0bfb8b2bf2351fba36bb211ef8bfceba73ef58e7f0c68fb67b5a2795ca2f9", size = 386108, upload-time = "2025-11-09T20:47:54.893Z" }, - { url = "https://files.pythonhosted.org/packages/e9/2e/76bb3332f28550c8f1eba3bf6e5efe211efda0ddbbaf24976bc7078d42a5/jiter-0.12.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:f5aa5427a629a824a543672778c9ce0c5e556550d1569bb6ea28a85015287626", size = 519937, upload-time = "2025-11-09T20:47:56.253Z" }, - { url = "https://files.pythonhosted.org/packages/84/d6/fa96efa87dc8bff2094fb947f51f66368fa56d8d4fc9e77b25d7fbb23375/jiter-0.12.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:ed53b3d6acbcb0fd0b90f20c7cb3b24c357fe82a3518934d4edfa8c6898e498c", size = 510853, upload-time = "2025-11-09T20:47:58.32Z" }, - { url = "https://files.pythonhosted.org/packages/8a/28/93f67fdb4d5904a708119a6ab58a8f1ec226ff10a94a282e0215402a8462/jiter-0.12.0-cp313-cp313-win32.whl", hash = "sha256:4747de73d6b8c78f2e253a2787930f4fffc68da7fa319739f57437f95963c4de", size = 204699, upload-time = "2025-11-09T20:47:59.686Z" }, - { url = "https://files.pythonhosted.org/packages/c4/1f/30b0eb087045a0abe2a5c9c0c0c8da110875a1d3be83afd4a9a4e548be3c/jiter-0.12.0-cp313-cp313-win_amd64.whl", hash = "sha256:e25012eb0c456fcc13354255d0338cd5397cce26c77b2832b3c4e2e255ea5d9a", size = 204258, upload-time = "2025-11-09T20:48:01.01Z" }, - { url = "https://files.pythonhosted.org/packages/2c/f4/2b4daf99b96bce6fc47971890b14b2a36aef88d7beb9f057fafa032c6141/jiter-0.12.0-cp313-cp313-win_arm64.whl", hash = "sha256:c97b92c54fe6110138c872add030a1f99aea2401ddcdaa21edf74705a646dd60", size = 185503, upload-time = "2025-11-09T20:48:02.35Z" }, - { url = "https://files.pythonhosted.org/packages/39/ca/67bb15a7061d6fe20b9b2a2fd783e296a1e0f93468252c093481a2f00efa/jiter-0.12.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:53839b35a38f56b8be26a7851a48b89bc47e5d88e900929df10ed93b95fea3d6", size = 317965, upload-time = "2025-11-09T20:48:03.783Z" }, - { url = "https://files.pythonhosted.org/packages/18/af/1788031cd22e29c3b14bc6ca80b16a39a0b10e611367ffd480c06a259831/jiter-0.12.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:94f669548e55c91ab47fef8bddd9c954dab1938644e715ea49d7e117015110a4", size = 345831, upload-time = "2025-11-09T20:48:05.55Z" }, - { url = "https://files.pythonhosted.org/packages/05/17/710bf8472d1dff0d3caf4ced6031060091c1320f84ee7d5dcbed1f352417/jiter-0.12.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:351d54f2b09a41600ffea43d081522d792e81dcfb915f6d2d242744c1cc48beb", size = 361272, upload-time = "2025-11-09T20:48:06.951Z" }, - { url = "https://files.pythonhosted.org/packages/fb/f1/1dcc4618b59761fef92d10bcbb0b038b5160be653b003651566a185f1a5c/jiter-0.12.0-cp313-cp313t-win_amd64.whl", hash = "sha256:2a5e90604620f94bf62264e7c2c038704d38217b7465b863896c6d7c902b06c7", size = 204604, upload-time = "2025-11-09T20:48:08.328Z" }, - { url = "https://files.pythonhosted.org/packages/d9/32/63cb1d9f1c5c6632a783c0052cde9ef7ba82688f7065e2f0d5f10a7e3edb/jiter-0.12.0-cp313-cp313t-win_arm64.whl", hash = "sha256:88ef757017e78d2860f96250f9393b7b577b06a956ad102c29c8237554380db3", size = 185628, upload-time = "2025-11-09T20:48:09.572Z" }, - { url = "https://files.pythonhosted.org/packages/cb/f5/12efb8ada5f5c9edc1d4555fe383c1fb2eac05ac5859258a72d61981d999/jiter-0.12.0-graalpy312-graalpy250_312_native-macosx_10_12_x86_64.whl", hash = "sha256:e8547883d7b96ef2e5fe22b88f8a4c8725a56e7f4abafff20fd5272d634c7ecb", size = 309974, upload-time = "2025-11-09T20:49:17.187Z" }, - { url = "https://files.pythonhosted.org/packages/85/15/d6eb3b770f6a0d332675141ab3962fd4a7c270ede3515d9f3583e1d28276/jiter-0.12.0-graalpy312-graalpy250_312_native-macosx_11_0_arm64.whl", hash = "sha256:89163163c0934854a668ed783a2546a0617f71706a2551a4a0666d91ab365d6b", size = 304233, upload-time = "2025-11-09T20:49:18.734Z" }, - { url = "https://files.pythonhosted.org/packages/8c/3e/e7e06743294eea2cf02ced6aa0ff2ad237367394e37a0e2b4a1108c67a36/jiter-0.12.0-graalpy312-graalpy250_312_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d96b264ab7d34bbb2312dedc47ce07cd53f06835eacbc16dde3761f47c3a9e7f", size = 338537, upload-time = "2025-11-09T20:49:20.317Z" }, - { url = "https://files.pythonhosted.org/packages/2f/9c/6753e6522b8d0ef07d3a3d239426669e984fb0eba15a315cdbc1253904e4/jiter-0.12.0-graalpy312-graalpy250_312_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c24e864cb30ab82311c6425655b0cdab0a98c5d973b065c66a3f020740c2324c", size = 346110, upload-time = "2025-11-09T20:49:21.817Z" }, -] - -[[package]] -name = "jsonschema" -version = "4.25.1" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "attrs" }, - { name = "jsonschema-specifications" }, - { name = "referencing" }, - { name = "rpds-py" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/74/69/f7185de793a29082a9f3c7728268ffb31cb5095131a9c139a74078e27336/jsonschema-4.25.1.tar.gz", hash = "sha256:e4a9655ce0da0c0b67a085847e00a3a51449e1157f4f75e9fb5aa545e122eb85", size = 357342, upload-time = "2025-08-18T17:03:50.038Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/bf/9c/8c95d856233c1f82500c2450b8c68576b4cf1c871db3afac5c34ff84e6fd/jsonschema-4.25.1-py3-none-any.whl", hash = "sha256:3fba0169e345c7175110351d456342c364814cfcf3b964ba4587f22915230a63", size = 90040, upload-time = "2025-08-18T17:03:48.373Z" }, -] - -[[package]] -name = "jsonschema-specifications" -version = "2025.9.1" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "referencing" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/19/74/a633ee74eb36c44aa6d1095e7cc5569bebf04342ee146178e2d36600708b/jsonschema_specifications-2025.9.1.tar.gz", hash = "sha256:b540987f239e745613c7a9176f3edb72b832a4ac465cf02712288397832b5e8d", size = 32855, upload-time = "2025-09-08T01:34:59.186Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/41/45/1a4ed80516f02155c51f51e8cedb3c1902296743db0bbc66608a0db2814f/jsonschema_specifications-2025.9.1-py3-none-any.whl", hash = "sha256:98802fee3a11ee76ecaca44429fda8a41bff98b00a0f2838151b113f210cc6fe", size = 18437, upload-time = "2025-09-08T01:34:57.871Z" }, -] - -[[package]] -name = "jupyter-client" -version = "8.6.3" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "jupyter-core" }, - { name = "python-dateutil" }, - { name = "pyzmq" }, - { name = "tornado" }, - { name = "traitlets" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/71/22/bf9f12fdaeae18019a468b68952a60fe6dbab5d67cd2a103cac7659b41ca/jupyter_client-8.6.3.tar.gz", hash = "sha256:35b3a0947c4a6e9d589eb97d7d4cd5e90f910ee73101611f01283732bd6d9419", size = 342019, upload-time = "2024-09-17T10:44:17.613Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/11/85/b0394e0b6fcccd2c1eeefc230978a6f8cb0c5df1e4cd3e7625735a0d7d1e/jupyter_client-8.6.3-py3-none-any.whl", hash = "sha256:e8a19cc986cc45905ac3362915f410f3af85424b4c0905e94fa5f2cb08e8f23f", size = 106105, upload-time = "2024-09-17T10:44:15.218Z" }, -] - -[[package]] -name = "jupyter-core" -version = "5.9.1" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "platformdirs" }, - { name = "traitlets" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/02/49/9d1284d0dc65e2c757b74c6687b6d319b02f822ad039e5c512df9194d9dd/jupyter_core-5.9.1.tar.gz", hash = "sha256:4d09aaff303b9566c3ce657f580bd089ff5c91f5f89cf7d8846c3cdf465b5508", size = 89814, upload-time = "2025-10-16T19:19:18.444Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/e7/e7/80988e32bf6f73919a113473a604f5a8f09094de312b9d52b79c2df7612b/jupyter_core-5.9.1-py3-none-any.whl", hash = "sha256:ebf87fdc6073d142e114c72c9e29a9d7ca03fad818c5d300ce2adc1fb0743407", size = 29032, upload-time = "2025-10-16T19:19:16.783Z" }, -] - -[[package]] -name = "jupyterlab-pygments" -version = "0.3.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/90/51/9187be60d989df97f5f0aba133fa54e7300f17616e065d1ada7d7646b6d6/jupyterlab_pygments-0.3.0.tar.gz", hash = "sha256:721aca4d9029252b11cfa9d185e5b5af4d54772bb8072f9b7036f4170054d35d", size = 512900, upload-time = "2023-11-23T09:26:37.44Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/b1/dd/ead9d8ea85bf202d90cc513b533f9c363121c7792674f78e0d8a854b63b4/jupyterlab_pygments-0.3.0-py3-none-any.whl", hash = "sha256:841a89020971da1d8693f1a99997aefc5dc424bb1b251fd6322462a1b8842780", size = 15884, upload-time = "2023-11-23T09:26:34.325Z" }, -] - -[[package]] -name = "latexcodec" -version = "3.0.1" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/27/dd/4270b2c5e2ee49316c3859e62293bd2ea8e382339d63ab7bbe9f39c0ec3b/latexcodec-3.0.1.tar.gz", hash = "sha256:e78a6911cd72f9dec35031c6ec23584de6842bfbc4610a9678868d14cdfb0357", size = 31222, upload-time = "2025-06-17T18:47:34.051Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/b5/40/23569737873cc9637fd488606347e9dd92b9fa37ba4fcda1f98ee5219a97/latexcodec-3.0.1-py3-none-any.whl", hash = "sha256:a9eb8200bff693f0437a69581f7579eb6bca25c4193515c09900ce76451e452e", size = 18532, upload-time = "2025-06-17T18:47:30.726Z" }, -] - -[[package]] -name = "librt" -version = "0.6.2" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/72/c3/86e94f888f65ba1731f97c33ef10016c7286e0fa70d4a309eab41937183a/librt-0.6.2.tar.gz", hash = "sha256:3898faf00cada0bf2a97106936e92fe107ee4fbdf4e5ebd922cfd5ee9f052884", size = 53420, upload-time = "2025-11-18T16:51:17.097Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/36/0c/825aece0e99f1f948e1e423ac443913d753ddbcbc0e48e649f46dd3e6adc/librt-0.6.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:29f4e8888de87eb637c1b1c3ca9e97f3d8828e481f5ef0b86bb90ae026215d4c", size = 27842, upload-time = "2025-11-18T16:50:13.751Z" }, - { url = "https://files.pythonhosted.org/packages/2f/64/74190707875d3db4c6e2655dd804577e85bdbb437fdf32206003dda0bb83/librt-0.6.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f5cdacbe18f91741a5f45bb169a92ab5299e0c6a7245798d075885480706c4e5", size = 27841, upload-time = "2025-11-18T16:50:14.74Z" }, - { url = "https://files.pythonhosted.org/packages/db/0c/b783a58fc741cf30872a9947f3c777c57c2845e5e805d78c5147bc2c6c06/librt-0.6.2-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:de0461670334c16b76885d8a93a3c1f1b0259fb7d817cec326193325c24898e0", size = 84136, upload-time = "2025-11-18T16:50:16.002Z" }, - { url = "https://files.pythonhosted.org/packages/e5/87/5ad8119cc2128cce01a07198daaff02114b0dffc0951a5577f1980756d22/librt-0.6.2-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:fcddd735029802e9ab56d482f977ca08920c432382c9382334e7cfa9ad0bb0de", size = 88004, upload-time = "2025-11-18T16:50:17.052Z" }, - { url = "https://files.pythonhosted.org/packages/46/96/9f7a25150c54614b756c1e6ae3898a798e665e938df4d5b054299082c5e6/librt-0.6.2-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:06c82cf56b3c2fab8e19e7415b6eb1b958356f6e6ee082b0077a582356801185", size = 88934, upload-time = "2025-11-18T16:50:18.485Z" }, - { url = "https://files.pythonhosted.org/packages/40/ed/e7da561b2169f02f4281ad806f800f94afa69eaeb994e65b0f178f2be52b/librt-0.6.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3a426287d679aebd6dd3000192d054cdd2d90ae7612b51d0f4931b2f37dd1d13", size = 90599, upload-time = "2025-11-18T16:50:19.587Z" }, - { url = "https://files.pythonhosted.org/packages/ea/ba/aa06f14eba3d6f19f34ef73d5c0b17b1cdf7543661912a9b9e2e991f4b13/librt-0.6.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:75fa4126883da85600f4763930e8791949f50ab323fa8fc17fb31185b4fd16af", size = 88603, upload-time = "2025-11-18T16:50:20.901Z" }, - { url = "https://files.pythonhosted.org/packages/08/52/56c449119dc3b942d3ff2e985969571819db123f655e3744a08819d1f013/librt-0.6.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:73cf76b5814d268d777eca17db45a2bdd9c80f50eab01cf8b642f8bf18497358", size = 92112, upload-time = "2025-11-18T16:50:22.064Z" }, - { url = "https://files.pythonhosted.org/packages/20/aa/fe6faf84b5cc0ae3001adfe4f23aaa06cf9881965c7d9decce6180605244/librt-0.6.2-cp312-cp312-win32.whl", hash = "sha256:93cd69497046d67f35e1d00cef099bf32f97c277ff950c406e7e062ccf86852e", size = 20128, upload-time = "2025-11-18T16:50:23.182Z" }, - { url = "https://files.pythonhosted.org/packages/08/58/96086add1333d0ca6607b768bbb5633bc7a6265d11fa953be9392e789c46/librt-0.6.2-cp312-cp312-win_amd64.whl", hash = "sha256:2ada7182335b25120ec960fbbf22d8f534bb9bb101f248f849bc977bc51165c8", size = 21547, upload-time = "2025-11-18T16:50:24.157Z" }, - { url = "https://files.pythonhosted.org/packages/71/e6/7e533225c4f05ba03c15e4f1788617539a19a47182cc677bc8b9feaeacf8/librt-0.6.2-cp312-cp312-win_arm64.whl", hash = "sha256:e2deaac245f6ce54caf6ccb5dabeadd35950e669f4ed31addd300ff4eaee981c", size = 20945, upload-time = "2025-11-18T16:50:25.915Z" }, - { url = "https://files.pythonhosted.org/packages/5b/e7/e4ff31452298cda5008dede6d5805921a75f95aaaa2bfd1ac9d547efd47d/librt-0.6.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:ad4014a959de1b4c020e0de0b92b637463e80d54fc6f12b8c0a357ef7289190f", size = 27875, upload-time = "2025-11-18T16:50:27.22Z" }, - { url = "https://files.pythonhosted.org/packages/a4/6b/fcbfc8243ff2f207f51566604b7a538ba2ee7c10222a82a827adacdaa9ad/librt-0.6.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:1eea7c6633cdd6ee3fd8d1677949c278bd2db9f6f39d2b34affe2d70c8dc0258", size = 27854, upload-time = "2025-11-18T16:50:28.475Z" }, - { url = "https://files.pythonhosted.org/packages/04/32/ff7041ff7d513e195bef955b4d7313ccd41436c539c481e2d28e78fd1581/librt-0.6.2-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:28d159adc310be1aba21480d56a6ebc06b98948fb60e15ccc77a77c6a037cd5f", size = 84321, upload-time = "2025-11-18T16:50:29.463Z" }, - { url = "https://files.pythonhosted.org/packages/8f/04/c0935cd6dcad97789d6bf9ae87bb1c98f56c4f237dc3e0cbd0062b893717/librt-0.6.2-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:cd85a818a58871a7d3fe3e9821423c06c1d2b5ac6d7ad21f62c28243b858c920", size = 88232, upload-time = "2025-11-18T16:50:30.481Z" }, - { url = "https://files.pythonhosted.org/packages/cb/68/14f2641852fafbeb62a72bd113ad71adc616b961238f96a41c8b6d4b2f39/librt-0.6.2-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:3d58f22191217c6474d1a26269db2347c3862ef9fa379bd0c86bca659fe84145", size = 89113, upload-time = "2025-11-18T16:50:31.613Z" }, - { url = "https://files.pythonhosted.org/packages/5d/84/ebdb7ecfe7f3035dd8dec57c01086f089e255dac828c77535dd90dee3065/librt-0.6.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:6408501b01add8913cfdf795ba57bce7095ac2a2ee170de660d4bff8ad589074", size = 90808, upload-time = "2025-11-18T16:50:32.753Z" }, - { url = "https://files.pythonhosted.org/packages/f8/fc/4445de50cb1445fe2cd013f81cd5b102e9a5d4ae573e567a12de50d5ea89/librt-0.6.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:fd1d5b3867feeecf3b627178f43b7bb940e0390e81bafab6b681b17112591198", size = 88891, upload-time = "2025-11-18T16:50:33.812Z" }, - { url = "https://files.pythonhosted.org/packages/c0/dc/ff70e69a9f1001d33ae377bf715b3ca8df0566bdd36317a79e1a8d922793/librt-0.6.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:c2920f525b54cd00adbb0e727d5d3ba6292a2d038788529ad8810a3d77acdf0f", size = 92300, upload-time = "2025-11-18T16:50:34.988Z" }, - { url = "https://files.pythonhosted.org/packages/07/3f/0b7e34d90cf76c617b90811905f4c2d0f46e7f8037817cd9c83279bc5e4a/librt-0.6.2-cp313-cp313-win32.whl", hash = "sha256:74213ad49b127da47a22f2c877be216820215880c527f28df726ad5d505f1239", size = 20162, upload-time = "2025-11-18T16:50:36.001Z" }, - { url = "https://files.pythonhosted.org/packages/14/c0/c81266c308e1449ed9197b059feea91205832a1cd37e12443c0f7d3e0743/librt-0.6.2-cp313-cp313-win_amd64.whl", hash = "sha256:778667b8688bbacba06739eb5b0b78d99d2c65a99262dac5ab25eba473b34d5f", size = 21483, upload-time = "2025-11-18T16:50:36.923Z" }, - { url = "https://files.pythonhosted.org/packages/35/8e/9ba1d7e4aedec42bb5384ac68d65745f59a91944c2af16fb264cfd2fe42e/librt-0.6.2-cp313-cp313-win_arm64.whl", hash = "sha256:e787bfcccdf0f25e02310d7f1e2b9bfea714f594cda37a6ce6da84502f14acbf", size = 20937, upload-time = "2025-11-18T16:50:37.905Z" }, -] - -[[package]] -name = "litellm" -version = "1.80.7" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "aiohttp" }, - { name = "click" }, - { name = "fastuuid" }, - { name = "grpcio" }, - { name = "httpx" }, - { name = "importlib-metadata" }, - { name = "jinja2" }, - { name = "jsonschema" }, - { name = "openai" }, - { name = "pydantic" }, - { name = "python-dotenv" }, - { name = "tiktoken" }, - { name = "tokenizers" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/a5/3f/af532014449c3931ae6cad2d97d267dd43d0de006060a8cbf0962e004024/litellm-1.80.7.tar.gz", hash = "sha256:3977a8d195aef842d01c18bf9e22984829363c6a4b54daf9a43c9dd9f190b42c", size = 12023127, upload-time = "2025-11-27T23:03:52.474Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/54/e0/2e60a0c09235fd7b55297390c557923f3c35a9cf001914222c26a7857d2b/litellm-1.80.7-py3-none-any.whl", hash = "sha256:f7d993f78c1e0e4e1202b2a925cc6540b55b6e5fb055dd342d88b145ab3102ed", size = 10848321, upload-time = "2025-11-27T23:03:50.002Z" }, -] - -[[package]] -name = "markdown-it-py" -version = "3.0.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "mdurl" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/38/71/3b932df36c1a044d397a1f92d1cf91ee0a503d91e470cbd670aa66b07ed0/markdown-it-py-3.0.0.tar.gz", hash = "sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb", size = 74596, upload-time = "2023-06-03T06:41:14.443Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/42/d7/1ec15b46af6af88f19b8e5ffea08fa375d433c998b8a7639e76935c14f1f/markdown_it_py-3.0.0-py3-none-any.whl", hash = "sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1", size = 87528, upload-time = "2023-06-03T06:41:11.019Z" }, -] - -[[package]] -name = "markupsafe" -version = "3.0.3" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/7e/99/7690b6d4034fffd95959cbe0c02de8deb3098cc577c67bb6a24fe5d7caa7/markupsafe-3.0.3.tar.gz", hash = "sha256:722695808f4b6457b320fdc131280796bdceb04ab50fe1795cd540799ebe1698", size = 80313, upload-time = "2025-09-27T18:37:40.426Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/5a/72/147da192e38635ada20e0a2e1a51cf8823d2119ce8883f7053879c2199b5/markupsafe-3.0.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:d53197da72cc091b024dd97249dfc7794d6a56530370992a5e1a08983ad9230e", size = 11615, upload-time = "2025-09-27T18:36:30.854Z" }, - { url = "https://files.pythonhosted.org/packages/9a/81/7e4e08678a1f98521201c3079f77db69fb552acd56067661f8c2f534a718/markupsafe-3.0.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1872df69a4de6aead3491198eaf13810b565bdbeec3ae2dc8780f14458ec73ce", size = 12020, upload-time = "2025-09-27T18:36:31.971Z" }, - { url = "https://files.pythonhosted.org/packages/1e/2c/799f4742efc39633a1b54a92eec4082e4f815314869865d876824c257c1e/markupsafe-3.0.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3a7e8ae81ae39e62a41ec302f972ba6ae23a5c5396c8e60113e9066ef893da0d", size = 24332, upload-time = "2025-09-27T18:36:32.813Z" }, - { url = "https://files.pythonhosted.org/packages/3c/2e/8d0c2ab90a8c1d9a24f0399058ab8519a3279d1bd4289511d74e909f060e/markupsafe-3.0.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d6dd0be5b5b189d31db7cda48b91d7e0a9795f31430b7f271219ab30f1d3ac9d", size = 22947, upload-time = "2025-09-27T18:36:33.86Z" }, - { url = "https://files.pythonhosted.org/packages/2c/54/887f3092a85238093a0b2154bd629c89444f395618842e8b0c41783898ea/markupsafe-3.0.3-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:94c6f0bb423f739146aec64595853541634bde58b2135f27f61c1ffd1cd4d16a", size = 21962, upload-time = "2025-09-27T18:36:35.099Z" }, - { url = "https://files.pythonhosted.org/packages/c9/2f/336b8c7b6f4a4d95e91119dc8521402461b74a485558d8f238a68312f11c/markupsafe-3.0.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:be8813b57049a7dc738189df53d69395eba14fb99345e0a5994914a3864c8a4b", size = 23760, upload-time = "2025-09-27T18:36:36.001Z" }, - { url = "https://files.pythonhosted.org/packages/32/43/67935f2b7e4982ffb50a4d169b724d74b62a3964bc1a9a527f5ac4f1ee2b/markupsafe-3.0.3-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:83891d0e9fb81a825d9a6d61e3f07550ca70a076484292a70fde82c4b807286f", size = 21529, upload-time = "2025-09-27T18:36:36.906Z" }, - { url = "https://files.pythonhosted.org/packages/89/e0/4486f11e51bbba8b0c041098859e869e304d1c261e59244baa3d295d47b7/markupsafe-3.0.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:77f0643abe7495da77fb436f50f8dab76dbc6e5fd25d39589a0f1fe6548bfa2b", size = 23015, upload-time = "2025-09-27T18:36:37.868Z" }, - { url = "https://files.pythonhosted.org/packages/2f/e1/78ee7a023dac597a5825441ebd17170785a9dab23de95d2c7508ade94e0e/markupsafe-3.0.3-cp312-cp312-win32.whl", hash = "sha256:d88b440e37a16e651bda4c7c2b930eb586fd15ca7406cb39e211fcff3bf3017d", size = 14540, upload-time = "2025-09-27T18:36:38.761Z" }, - { url = "https://files.pythonhosted.org/packages/aa/5b/bec5aa9bbbb2c946ca2733ef9c4ca91c91b6a24580193e891b5f7dbe8e1e/markupsafe-3.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:26a5784ded40c9e318cfc2bdb30fe164bdb8665ded9cd64d500a34fb42067b1c", size = 15105, upload-time = "2025-09-27T18:36:39.701Z" }, - { url = "https://files.pythonhosted.org/packages/e5/f1/216fc1bbfd74011693a4fd837e7026152e89c4bcf3e77b6692fba9923123/markupsafe-3.0.3-cp312-cp312-win_arm64.whl", hash = "sha256:35add3b638a5d900e807944a078b51922212fb3dedb01633a8defc4b01a3c85f", size = 13906, upload-time = "2025-09-27T18:36:40.689Z" }, - { url = "https://files.pythonhosted.org/packages/38/2f/907b9c7bbba283e68f20259574b13d005c121a0fa4c175f9bed27c4597ff/markupsafe-3.0.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:e1cf1972137e83c5d4c136c43ced9ac51d0e124706ee1c8aa8532c1287fa8795", size = 11622, upload-time = "2025-09-27T18:36:41.777Z" }, - { url = "https://files.pythonhosted.org/packages/9c/d9/5f7756922cdd676869eca1c4e3c0cd0df60ed30199ffd775e319089cb3ed/markupsafe-3.0.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:116bb52f642a37c115f517494ea5feb03889e04df47eeff5b130b1808ce7c219", size = 12029, upload-time = "2025-09-27T18:36:43.257Z" }, - { url = "https://files.pythonhosted.org/packages/00/07/575a68c754943058c78f30db02ee03a64b3c638586fba6a6dd56830b30a3/markupsafe-3.0.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:133a43e73a802c5562be9bbcd03d090aa5a1fe899db609c29e8c8d815c5f6de6", size = 24374, upload-time = "2025-09-27T18:36:44.508Z" }, - { url = "https://files.pythonhosted.org/packages/a9/21/9b05698b46f218fc0e118e1f8168395c65c8a2c750ae2bab54fc4bd4e0e8/markupsafe-3.0.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ccfcd093f13f0f0b7fdd0f198b90053bf7b2f02a3927a30e63f3ccc9df56b676", size = 22980, upload-time = "2025-09-27T18:36:45.385Z" }, - { url = "https://files.pythonhosted.org/packages/7f/71/544260864f893f18b6827315b988c146b559391e6e7e8f7252839b1b846a/markupsafe-3.0.3-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:509fa21c6deb7a7a273d629cf5ec029bc209d1a51178615ddf718f5918992ab9", size = 21990, upload-time = "2025-09-27T18:36:46.916Z" }, - { url = "https://files.pythonhosted.org/packages/c2/28/b50fc2f74d1ad761af2f5dcce7492648b983d00a65b8c0e0cb457c82ebbe/markupsafe-3.0.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a4afe79fb3de0b7097d81da19090f4df4f8d3a2b3adaa8764138aac2e44f3af1", size = 23784, upload-time = "2025-09-27T18:36:47.884Z" }, - { url = "https://files.pythonhosted.org/packages/ed/76/104b2aa106a208da8b17a2fb72e033a5a9d7073c68f7e508b94916ed47a9/markupsafe-3.0.3-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:795e7751525cae078558e679d646ae45574b47ed6e7771863fcc079a6171a0fc", size = 21588, upload-time = "2025-09-27T18:36:48.82Z" }, - { url = "https://files.pythonhosted.org/packages/b5/99/16a5eb2d140087ebd97180d95249b00a03aa87e29cc224056274f2e45fd6/markupsafe-3.0.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:8485f406a96febb5140bfeca44a73e3ce5116b2501ac54fe953e488fb1d03b12", size = 23041, upload-time = "2025-09-27T18:36:49.797Z" }, - { url = "https://files.pythonhosted.org/packages/19/bc/e7140ed90c5d61d77cea142eed9f9c303f4c4806f60a1044c13e3f1471d0/markupsafe-3.0.3-cp313-cp313-win32.whl", hash = "sha256:bdd37121970bfd8be76c5fb069c7751683bdf373db1ed6c010162b2a130248ed", size = 14543, upload-time = "2025-09-27T18:36:51.584Z" }, - { url = "https://files.pythonhosted.org/packages/05/73/c4abe620b841b6b791f2edc248f556900667a5a1cf023a6646967ae98335/markupsafe-3.0.3-cp313-cp313-win_amd64.whl", hash = "sha256:9a1abfdc021a164803f4d485104931fb8f8c1efd55bc6b748d2f5774e78b62c5", size = 15113, upload-time = "2025-09-27T18:36:52.537Z" }, - { url = "https://files.pythonhosted.org/packages/f0/3a/fa34a0f7cfef23cf9500d68cb7c32dd64ffd58a12b09225fb03dd37d5b80/markupsafe-3.0.3-cp313-cp313-win_arm64.whl", hash = "sha256:7e68f88e5b8799aa49c85cd116c932a1ac15caaa3f5db09087854d218359e485", size = 13911, upload-time = "2025-09-27T18:36:53.513Z" }, - { url = "https://files.pythonhosted.org/packages/e4/d7/e05cd7efe43a88a17a37b3ae96e79a19e846f3f456fe79c57ca61356ef01/markupsafe-3.0.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:218551f6df4868a8d527e3062d0fb968682fe92054e89978594c28e642c43a73", size = 11658, upload-time = "2025-09-27T18:36:54.819Z" }, - { url = "https://files.pythonhosted.org/packages/99/9e/e412117548182ce2148bdeacdda3bb494260c0b0184360fe0d56389b523b/markupsafe-3.0.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:3524b778fe5cfb3452a09d31e7b5adefeea8c5be1d43c4f810ba09f2ceb29d37", size = 12066, upload-time = "2025-09-27T18:36:55.714Z" }, - { url = "https://files.pythonhosted.org/packages/bc/e6/fa0ffcda717ef64a5108eaa7b4f5ed28d56122c9a6d70ab8b72f9f715c80/markupsafe-3.0.3-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4e885a3d1efa2eadc93c894a21770e4bc67899e3543680313b09f139e149ab19", size = 25639, upload-time = "2025-09-27T18:36:56.908Z" }, - { url = "https://files.pythonhosted.org/packages/96/ec/2102e881fe9d25fc16cb4b25d5f5cde50970967ffa5dddafdb771237062d/markupsafe-3.0.3-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8709b08f4a89aa7586de0aadc8da56180242ee0ada3999749b183aa23df95025", size = 23569, upload-time = "2025-09-27T18:36:57.913Z" }, - { url = "https://files.pythonhosted.org/packages/4b/30/6f2fce1f1f205fc9323255b216ca8a235b15860c34b6798f810f05828e32/markupsafe-3.0.3-cp313-cp313t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:b8512a91625c9b3da6f127803b166b629725e68af71f8184ae7e7d54686a56d6", size = 23284, upload-time = "2025-09-27T18:36:58.833Z" }, - { url = "https://files.pythonhosted.org/packages/58/47/4a0ccea4ab9f5dcb6f79c0236d954acb382202721e704223a8aafa38b5c8/markupsafe-3.0.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:9b79b7a16f7fedff2495d684f2b59b0457c3b493778c9eed31111be64d58279f", size = 24801, upload-time = "2025-09-27T18:36:59.739Z" }, - { url = "https://files.pythonhosted.org/packages/6a/70/3780e9b72180b6fecb83a4814d84c3bf4b4ae4bf0b19c27196104149734c/markupsafe-3.0.3-cp313-cp313t-musllinux_1_2_riscv64.whl", hash = "sha256:12c63dfb4a98206f045aa9563db46507995f7ef6d83b2f68eda65c307c6829eb", size = 22769, upload-time = "2025-09-27T18:37:00.719Z" }, - { url = "https://files.pythonhosted.org/packages/98/c5/c03c7f4125180fc215220c035beac6b9cb684bc7a067c84fc69414d315f5/markupsafe-3.0.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:8f71bc33915be5186016f675cd83a1e08523649b0e33efdb898db577ef5bb009", size = 23642, upload-time = "2025-09-27T18:37:01.673Z" }, - { url = "https://files.pythonhosted.org/packages/80/d6/2d1b89f6ca4bff1036499b1e29a1d02d282259f3681540e16563f27ebc23/markupsafe-3.0.3-cp313-cp313t-win32.whl", hash = "sha256:69c0b73548bc525c8cb9a251cddf1931d1db4d2258e9599c28c07ef3580ef354", size = 14612, upload-time = "2025-09-27T18:37:02.639Z" }, - { url = "https://files.pythonhosted.org/packages/2b/98/e48a4bfba0a0ffcf9925fe2d69240bfaa19c6f7507b8cd09c70684a53c1e/markupsafe-3.0.3-cp313-cp313t-win_amd64.whl", hash = "sha256:1b4b79e8ebf6b55351f0d91fe80f893b4743f104bff22e90697db1590e47a218", size = 15200, upload-time = "2025-09-27T18:37:03.582Z" }, - { url = "https://files.pythonhosted.org/packages/0e/72/e3cc540f351f316e9ed0f092757459afbc595824ca724cbc5a5d4263713f/markupsafe-3.0.3-cp313-cp313t-win_arm64.whl", hash = "sha256:ad2cf8aa28b8c020ab2fc8287b0f823d0a7d8630784c31e9ee5edea20f406287", size = 13973, upload-time = "2025-09-27T18:37:04.929Z" }, -] - -[[package]] -name = "matplotlib-inline" -version = "0.2.1" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "traitlets" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/c7/74/97e72a36efd4ae2bccb3463284300f8953f199b5ffbc04cbbb0ec78f74b1/matplotlib_inline-0.2.1.tar.gz", hash = "sha256:e1ee949c340d771fc39e241ea75683deb94762c8fa5f2927ec57c83c4dffa9fe", size = 8110, upload-time = "2025-10-23T09:00:22.126Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/af/33/ee4519fa02ed11a94aef9559552f3b17bb863f2ecfe1a35dc7f548cde231/matplotlib_inline-0.2.1-py3-none-any.whl", hash = "sha256:d56ce5156ba6085e00a9d54fead6ed29a9c47e215cd1bba2e976ef39f5710a76", size = 9516, upload-time = "2025-10-23T09:00:20.675Z" }, -] - -[[package]] -name = "mdit-py-plugins" -version = "0.5.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "markdown-it-py" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/b2/fd/a756d36c0bfba5f6e39a1cdbdbfdd448dc02692467d83816dff4592a1ebc/mdit_py_plugins-0.5.0.tar.gz", hash = "sha256:f4918cb50119f50446560513a8e311d574ff6aaed72606ddae6d35716fe809c6", size = 44655, upload-time = "2025-08-11T07:25:49.083Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/fb/86/dd6e5db36df29e76c7a7699123569a4a18c1623ce68d826ed96c62643cae/mdit_py_plugins-0.5.0-py3-none-any.whl", hash = "sha256:07a08422fc1936a5d26d146759e9155ea466e842f5ab2f7d2266dd084c8dab1f", size = 57205, upload-time = "2025-08-11T07:25:47.597Z" }, -] - -[[package]] -name = "mdurl" -version = "0.1.2" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/d6/54/cfe61301667036ec958cb99bd3efefba235e65cdeb9c84d24a8293ba1d90/mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba", size = 8729, upload-time = "2022-08-14T12:40:10.846Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/b3/38/89ba8ad64ae25be8de66a6d463314cf1eb366222074cfda9ee839c56a4b4/mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8", size = 9979, upload-time = "2022-08-14T12:40:09.779Z" }, -] - -[[package]] -name = "mistune" -version = "3.1.4" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/d7/02/a7fb8b21d4d55ac93cdcde9d3638da5dd0ebdd3a4fed76c7725e10b81cbe/mistune-3.1.4.tar.gz", hash = "sha256:b5a7f801d389f724ec702840c11d8fc48f2b33519102fc7ee739e8177b672164", size = 94588, upload-time = "2025-08-29T07:20:43.594Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/7a/f0/8282d9641415e9e33df173516226b404d367a0fc55e1a60424a152913abc/mistune-3.1.4-py3-none-any.whl", hash = "sha256:93691da911e5d9d2e23bc54472892aff676df27a75274962ff9edc210364266d", size = 53481, upload-time = "2025-08-29T07:20:42.218Z" }, -] - -[[package]] -name = "ml-dtypes" -version = "0.5.4" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "numpy" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/0e/4a/c27b42ed9b1c7d13d9ba8b6905dece787d6259152f2309338aed29b2447b/ml_dtypes-0.5.4.tar.gz", hash = "sha256:8ab06a50fb9bf9666dd0fe5dfb4676fa2b0ac0f31ecff72a6c3af8e22c063453", size = 692314, upload-time = "2025-11-17T22:32:31.031Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/a8/b8/3c70881695e056f8a32f8b941126cf78775d9a4d7feba8abcb52cb7b04f2/ml_dtypes-0.5.4-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:a174837a64f5b16cab6f368171a1a03a27936b31699d167684073ff1c4237dac", size = 676927, upload-time = "2025-11-17T22:31:48.182Z" }, - { url = "https://files.pythonhosted.org/packages/54/0f/428ef6881782e5ebb7eca459689448c0394fa0a80bea3aa9262cba5445ea/ml_dtypes-0.5.4-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a7f7c643e8b1320fd958bf098aa7ecf70623a42ec5154e3be3be673f4c34d900", size = 5028464, upload-time = "2025-11-17T22:31:50.135Z" }, - { url = "https://files.pythonhosted.org/packages/3a/cb/28ce52eb94390dda42599c98ea0204d74799e4d8047a0eb559b6fd648056/ml_dtypes-0.5.4-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9ad459e99793fa6e13bd5b7e6792c8f9190b4e5a1b45c63aba14a4d0a7f1d5ff", size = 5009002, upload-time = "2025-11-17T22:31:52.001Z" }, - { url = "https://files.pythonhosted.org/packages/f5/f0/0cfadd537c5470378b1b32bd859cf2824972174b51b873c9d95cfd7475a5/ml_dtypes-0.5.4-cp312-cp312-win_amd64.whl", hash = "sha256:c1a953995cccb9e25a4ae19e34316671e4e2edaebe4cf538229b1fc7109087b7", size = 212222, upload-time = "2025-11-17T22:31:53.742Z" }, - { url = "https://files.pythonhosted.org/packages/16/2e/9acc86985bfad8f2c2d30291b27cd2bb4c74cea08695bd540906ed744249/ml_dtypes-0.5.4-cp312-cp312-win_arm64.whl", hash = "sha256:9bad06436568442575beb2d03389aa7456c690a5b05892c471215bfd8cf39460", size = 160793, upload-time = "2025-11-17T22:31:55.358Z" }, - { url = "https://files.pythonhosted.org/packages/d9/a1/4008f14bbc616cfb1ac5b39ea485f9c63031c4634ab3f4cf72e7541f816a/ml_dtypes-0.5.4-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:8c760d85a2f82e2bed75867079188c9d18dae2ee77c25a54d60e9cc79be1bc48", size = 676888, upload-time = "2025-11-17T22:31:56.907Z" }, - { url = "https://files.pythonhosted.org/packages/d3/b7/dff378afc2b0d5a7d6cd9d3209b60474d9819d1189d347521e1688a60a53/ml_dtypes-0.5.4-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ce756d3a10d0c4067172804c9cc276ba9cc0ff47af9078ad439b075d1abdc29b", size = 5036993, upload-time = "2025-11-17T22:31:58.497Z" }, - { url = "https://files.pythonhosted.org/packages/eb/33/40cd74219417e78b97c47802037cf2d87b91973e18bb968a7da48a96ea44/ml_dtypes-0.5.4-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:533ce891ba774eabf607172254f2e7260ba5f57bdd64030c9a4fcfbd99815d0d", size = 5010956, upload-time = "2025-11-17T22:31:59.931Z" }, - { url = "https://files.pythonhosted.org/packages/e1/8b/200088c6859d8221454825959df35b5244fa9bdf263fd0249ac5fb75e281/ml_dtypes-0.5.4-cp313-cp313-win_amd64.whl", hash = "sha256:f21c9219ef48ca5ee78402d5cc831bd58ea27ce89beda894428bc67a52da5328", size = 212224, upload-time = "2025-11-17T22:32:01.349Z" }, - { url = "https://files.pythonhosted.org/packages/8f/75/dfc3775cb36367816e678f69a7843f6f03bd4e2bcd79941e01ea960a068e/ml_dtypes-0.5.4-cp313-cp313-win_arm64.whl", hash = "sha256:35f29491a3e478407f7047b8a4834e4640a77d2737e0b294d049746507af5175", size = 160798, upload-time = "2025-11-17T22:32:02.864Z" }, - { url = "https://files.pythonhosted.org/packages/4f/74/e9ddb35fd1dd43b1106c20ced3f53c2e8e7fc7598c15638e9f80677f81d4/ml_dtypes-0.5.4-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:304ad47faa395415b9ccbcc06a0350800bc50eda70f0e45326796e27c62f18b6", size = 702083, upload-time = "2025-11-17T22:32:04.08Z" }, - { url = "https://files.pythonhosted.org/packages/74/f5/667060b0aed1aa63166b22897fdf16dca9eb704e6b4bbf86848d5a181aa7/ml_dtypes-0.5.4-cp313-cp313t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6a0df4223b514d799b8a1629c65ddc351b3efa833ccf7f8ea0cf654a61d1e35d", size = 5354111, upload-time = "2025-11-17T22:32:05.546Z" }, - { url = "https://files.pythonhosted.org/packages/40/49/0f8c498a28c0efa5f5c95a9e374c83ec1385ca41d0e85e7cf40e5d519a21/ml_dtypes-0.5.4-cp313-cp313t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:531eff30e4d368cb6255bc2328d070e35836aa4f282a0fb5f3a0cd7260257298", size = 5366453, upload-time = "2025-11-17T22:32:07.115Z" }, - { url = "https://files.pythonhosted.org/packages/8c/27/12607423d0a9c6bbbcc780ad19f1f6baa2b68b18ce4bddcdc122c4c68dc9/ml_dtypes-0.5.4-cp313-cp313t-win_amd64.whl", hash = "sha256:cb73dccfc991691c444acc8c0012bee8f2470da826a92e3a20bb333b1a7894e6", size = 225612, upload-time = "2025-11-17T22:32:08.615Z" }, - { url = "https://files.pythonhosted.org/packages/e5/80/5a5929e92c72936d5b19872c5fb8fc09327c1da67b3b68c6a13139e77e20/ml_dtypes-0.5.4-cp313-cp313t-win_arm64.whl", hash = "sha256:3bbbe120b915090d9dd1375e4684dd17a20a2491ef25d640a908281da85e73f1", size = 164145, upload-time = "2025-11-17T22:32:09.782Z" }, -] - -[[package]] -name = "mpmath" -version = "1.3.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/e0/47/dd32fa426cc72114383ac549964eecb20ecfd886d1e5ccf5340b55b02f57/mpmath-1.3.0.tar.gz", hash = "sha256:7a28eb2a9774d00c7bc92411c19a89209d5da7c4c9a9e227be8330a23a25b91f", size = 508106, upload-time = "2023-03-07T16:47:11.061Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/43/e3/7d92a15f894aa0c9c4b49b8ee9ac9850d6e63b03c9c32c0367a13ae62209/mpmath-1.3.0-py3-none-any.whl", hash = "sha256:a0b2b9fe80bbcd81a6647ff13108738cfb482d481d826cc0e02f5b35e5c88d2c", size = 536198, upload-time = "2023-03-07T16:47:09.197Z" }, -] - -[[package]] -name = "multidict" -version = "6.7.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/80/1e/5492c365f222f907de1039b91f922b93fa4f764c713ee858d235495d8f50/multidict-6.7.0.tar.gz", hash = "sha256:c6e99d9a65ca282e578dfea819cfa9c0a62b2499d8677392e09feaf305e9e6f5", size = 101834, upload-time = "2025-10-06T14:52:30.657Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/c2/9e/9f61ac18d9c8b475889f32ccfa91c9f59363480613fc807b6e3023d6f60b/multidict-6.7.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:8a3862568a36d26e650a19bb5cbbba14b71789032aebc0423f8cc5f150730184", size = 76877, upload-time = "2025-10-06T14:49:20.884Z" }, - { url = "https://files.pythonhosted.org/packages/38/6f/614f09a04e6184f8824268fce4bc925e9849edfa654ddd59f0b64508c595/multidict-6.7.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:960c60b5849b9b4f9dcc9bea6e3626143c252c74113df2c1540aebce70209b45", size = 45467, upload-time = "2025-10-06T14:49:22.054Z" }, - { url = "https://files.pythonhosted.org/packages/b3/93/c4f67a436dd026f2e780c433277fff72be79152894d9fc36f44569cab1a6/multidict-6.7.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2049be98fb57a31b4ccf870bf377af2504d4ae35646a19037ec271e4c07998aa", size = 43834, upload-time = "2025-10-06T14:49:23.566Z" }, - { url = "https://files.pythonhosted.org/packages/7f/f5/013798161ca665e4a422afbc5e2d9e4070142a9ff8905e482139cd09e4d0/multidict-6.7.0-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:0934f3843a1860dd465d38895c17fce1f1cb37295149ab05cd1b9a03afacb2a7", size = 250545, upload-time = "2025-10-06T14:49:24.882Z" }, - { url = "https://files.pythonhosted.org/packages/71/2f/91dbac13e0ba94669ea5119ba267c9a832f0cb65419aca75549fcf09a3dc/multidict-6.7.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b3e34f3a1b8131ba06f1a73adab24f30934d148afcd5f5de9a73565a4404384e", size = 258305, upload-time = "2025-10-06T14:49:26.778Z" }, - { url = "https://files.pythonhosted.org/packages/ef/b0/754038b26f6e04488b48ac621f779c341338d78503fb45403755af2df477/multidict-6.7.0-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:efbb54e98446892590dc2458c19c10344ee9a883a79b5cec4bc34d6656e8d546", size = 242363, upload-time = "2025-10-06T14:49:28.562Z" }, - { url = "https://files.pythonhosted.org/packages/87/15/9da40b9336a7c9fa606c4cf2ed80a649dffeb42b905d4f63a1d7eb17d746/multidict-6.7.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:a35c5fc61d4f51eb045061e7967cfe3123d622cd500e8868e7c0c592a09fedc4", size = 268375, upload-time = "2025-10-06T14:49:29.96Z" }, - { url = "https://files.pythonhosted.org/packages/82/72/c53fcade0cc94dfaad583105fd92b3a783af2091eddcb41a6d5a52474000/multidict-6.7.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:29fe6740ebccba4175af1b9b87bf553e9c15cd5868ee967e010efcf94e4fd0f1", size = 269346, upload-time = "2025-10-06T14:49:31.404Z" }, - { url = "https://files.pythonhosted.org/packages/0d/e2/9baffdae21a76f77ef8447f1a05a96ec4bc0a24dae08767abc0a2fe680b8/multidict-6.7.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:123e2a72e20537add2f33a79e605f6191fba2afda4cbb876e35c1a7074298a7d", size = 256107, upload-time = "2025-10-06T14:49:32.974Z" }, - { url = "https://files.pythonhosted.org/packages/3c/06/3f06f611087dc60d65ef775f1fb5aca7c6d61c6db4990e7cda0cef9b1651/multidict-6.7.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:b284e319754366c1aee2267a2036248b24eeb17ecd5dc16022095e747f2f4304", size = 253592, upload-time = "2025-10-06T14:49:34.52Z" }, - { url = "https://files.pythonhosted.org/packages/20/24/54e804ec7945b6023b340c412ce9c3f81e91b3bf5fa5ce65558740141bee/multidict-6.7.0-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:803d685de7be4303b5a657b76e2f6d1240e7e0a8aa2968ad5811fa2285553a12", size = 251024, upload-time = "2025-10-06T14:49:35.956Z" }, - { url = "https://files.pythonhosted.org/packages/14/48/011cba467ea0b17ceb938315d219391d3e421dfd35928e5dbdc3f4ae76ef/multidict-6.7.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:c04a328260dfd5db8c39538f999f02779012268f54614902d0afc775d44e0a62", size = 251484, upload-time = "2025-10-06T14:49:37.631Z" }, - { url = "https://files.pythonhosted.org/packages/0d/2f/919258b43bb35b99fa127435cfb2d91798eb3a943396631ef43e3720dcf4/multidict-6.7.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:8a19cdb57cd3df4cd865849d93ee14920fb97224300c88501f16ecfa2604b4e0", size = 263579, upload-time = "2025-10-06T14:49:39.502Z" }, - { url = "https://files.pythonhosted.org/packages/31/22/a0e884d86b5242b5a74cf08e876bdf299e413016b66e55511f7a804a366e/multidict-6.7.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:9b2fd74c52accced7e75de26023b7dccee62511a600e62311b918ec5c168fc2a", size = 259654, upload-time = "2025-10-06T14:49:41.32Z" }, - { url = "https://files.pythonhosted.org/packages/b2/e5/17e10e1b5c5f5a40f2fcbb45953c9b215f8a4098003915e46a93f5fcaa8f/multidict-6.7.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:3e8bfdd0e487acf992407a140d2589fe598238eaeffa3da8448d63a63cd363f8", size = 251511, upload-time = "2025-10-06T14:49:46.021Z" }, - { url = "https://files.pythonhosted.org/packages/e3/9a/201bb1e17e7af53139597069c375e7b0dcbd47594604f65c2d5359508566/multidict-6.7.0-cp312-cp312-win32.whl", hash = "sha256:dd32a49400a2c3d52088e120ee00c1e3576cbff7e10b98467962c74fdb762ed4", size = 41895, upload-time = "2025-10-06T14:49:48.718Z" }, - { url = "https://files.pythonhosted.org/packages/46/e2/348cd32faad84eaf1d20cce80e2bb0ef8d312c55bca1f7fa9865e7770aaf/multidict-6.7.0-cp312-cp312-win_amd64.whl", hash = "sha256:92abb658ef2d7ef22ac9f8bb88e8b6c3e571671534e029359b6d9e845923eb1b", size = 46073, upload-time = "2025-10-06T14:49:50.28Z" }, - { url = "https://files.pythonhosted.org/packages/25/ec/aad2613c1910dce907480e0c3aa306905830f25df2e54ccc9dea450cb5aa/multidict-6.7.0-cp312-cp312-win_arm64.whl", hash = "sha256:490dab541a6a642ce1a9d61a4781656b346a55c13038f0b1244653828e3a83ec", size = 43226, upload-time = "2025-10-06T14:49:52.304Z" }, - { url = "https://files.pythonhosted.org/packages/d2/86/33272a544eeb36d66e4d9a920602d1a2f57d4ebea4ef3cdfe5a912574c95/multidict-6.7.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:bee7c0588aa0076ce77c0ea5d19a68d76ad81fcd9fe8501003b9a24f9d4000f6", size = 76135, upload-time = "2025-10-06T14:49:54.26Z" }, - { url = "https://files.pythonhosted.org/packages/91/1c/eb97db117a1ebe46d457a3d235a7b9d2e6dcab174f42d1b67663dd9e5371/multidict-6.7.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:7ef6b61cad77091056ce0e7ce69814ef72afacb150b7ac6a3e9470def2198159", size = 45117, upload-time = "2025-10-06T14:49:55.82Z" }, - { url = "https://files.pythonhosted.org/packages/f1/d8/6c3442322e41fb1dd4de8bd67bfd11cd72352ac131f6368315617de752f1/multidict-6.7.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:9c0359b1ec12b1d6849c59f9d319610b7f20ef990a6d454ab151aa0e3b9f78ca", size = 43472, upload-time = "2025-10-06T14:49:57.048Z" }, - { url = "https://files.pythonhosted.org/packages/75/3f/e2639e80325af0b6c6febdf8e57cc07043ff15f57fa1ef808f4ccb5ac4cd/multidict-6.7.0-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:cd240939f71c64bd658f186330603aac1a9a81bf6273f523fca63673cb7378a8", size = 249342, upload-time = "2025-10-06T14:49:58.368Z" }, - { url = "https://files.pythonhosted.org/packages/5d/cc/84e0585f805cbeaa9cbdaa95f9a3d6aed745b9d25700623ac89a6ecff400/multidict-6.7.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a60a4d75718a5efa473ebd5ab685786ba0c67b8381f781d1be14da49f1a2dc60", size = 257082, upload-time = "2025-10-06T14:49:59.89Z" }, - { url = "https://files.pythonhosted.org/packages/b0/9c/ac851c107c92289acbbf5cfb485694084690c1b17e555f44952c26ddc5bd/multidict-6.7.0-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:53a42d364f323275126aff81fb67c5ca1b7a04fda0546245730a55c8c5f24bc4", size = 240704, upload-time = "2025-10-06T14:50:01.485Z" }, - { url = "https://files.pythonhosted.org/packages/50/cc/5f93e99427248c09da95b62d64b25748a5f5c98c7c2ab09825a1d6af0e15/multidict-6.7.0-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:3b29b980d0ddbecb736735ee5bef69bb2ddca56eff603c86f3f29a1128299b4f", size = 266355, upload-time = "2025-10-06T14:50:02.955Z" }, - { url = "https://files.pythonhosted.org/packages/ec/0c/2ec1d883ceb79c6f7f6d7ad90c919c898f5d1c6ea96d322751420211e072/multidict-6.7.0-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:f8a93b1c0ed2d04b97a5e9336fd2d33371b9a6e29ab7dd6503d63407c20ffbaf", size = 267259, upload-time = "2025-10-06T14:50:04.446Z" }, - { url = "https://files.pythonhosted.org/packages/c6/2d/f0b184fa88d6630aa267680bdb8623fb69cb0d024b8c6f0d23f9a0f406d3/multidict-6.7.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9ff96e8815eecacc6645da76c413eb3b3d34cfca256c70b16b286a687d013c32", size = 254903, upload-time = "2025-10-06T14:50:05.98Z" }, - { url = "https://files.pythonhosted.org/packages/06/c9/11ea263ad0df7dfabcad404feb3c0dd40b131bc7f232d5537f2fb1356951/multidict-6.7.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:7516c579652f6a6be0e266aec0acd0db80829ca305c3d771ed898538804c2036", size = 252365, upload-time = "2025-10-06T14:50:07.511Z" }, - { url = "https://files.pythonhosted.org/packages/41/88/d714b86ee2c17d6e09850c70c9d310abac3d808ab49dfa16b43aba9d53fd/multidict-6.7.0-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:040f393368e63fb0f3330e70c26bfd336656bed925e5cbe17c9da839a6ab13ec", size = 250062, upload-time = "2025-10-06T14:50:09.074Z" }, - { url = "https://files.pythonhosted.org/packages/15/fe/ad407bb9e818c2b31383f6131ca19ea7e35ce93cf1310fce69f12e89de75/multidict-6.7.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:b3bc26a951007b1057a1c543af845f1c7e3e71cc240ed1ace7bf4484aa99196e", size = 249683, upload-time = "2025-10-06T14:50:10.714Z" }, - { url = "https://files.pythonhosted.org/packages/8c/a4/a89abdb0229e533fb925e7c6e5c40201c2873efebc9abaf14046a4536ee6/multidict-6.7.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:7b022717c748dd1992a83e219587aabe45980d88969f01b316e78683e6285f64", size = 261254, upload-time = "2025-10-06T14:50:12.28Z" }, - { url = "https://files.pythonhosted.org/packages/8d/aa/0e2b27bd88b40a4fb8dc53dd74eecac70edaa4c1dd0707eb2164da3675b3/multidict-6.7.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:9600082733859f00d79dee64effc7aef1beb26adb297416a4ad2116fd61374bd", size = 257967, upload-time = "2025-10-06T14:50:14.16Z" }, - { url = "https://files.pythonhosted.org/packages/d0/8e/0c67b7120d5d5f6d874ed85a085f9dc770a7f9d8813e80f44a9fec820bb7/multidict-6.7.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:94218fcec4d72bc61df51c198d098ce2b378e0ccbac41ddbed5ef44092913288", size = 250085, upload-time = "2025-10-06T14:50:15.639Z" }, - { url = "https://files.pythonhosted.org/packages/ba/55/b73e1d624ea4b8fd4dd07a3bb70f6e4c7c6c5d9d640a41c6ffe5cdbd2a55/multidict-6.7.0-cp313-cp313-win32.whl", hash = "sha256:a37bd74c3fa9d00be2d7b8eca074dc56bd8077ddd2917a839bd989612671ed17", size = 41713, upload-time = "2025-10-06T14:50:17.066Z" }, - { url = "https://files.pythonhosted.org/packages/32/31/75c59e7d3b4205075b4c183fa4ca398a2daf2303ddf616b04ae6ef55cffe/multidict-6.7.0-cp313-cp313-win_amd64.whl", hash = "sha256:30d193c6cc6d559db42b6bcec8a5d395d34d60c9877a0b71ecd7c204fcf15390", size = 45915, upload-time = "2025-10-06T14:50:18.264Z" }, - { url = "https://files.pythonhosted.org/packages/31/2a/8987831e811f1184c22bc2e45844934385363ee61c0a2dcfa8f71b87e608/multidict-6.7.0-cp313-cp313-win_arm64.whl", hash = "sha256:ea3334cabe4d41b7ccd01e4d349828678794edbc2d3ae97fc162a3312095092e", size = 43077, upload-time = "2025-10-06T14:50:19.853Z" }, - { url = "https://files.pythonhosted.org/packages/e8/68/7b3a5170a382a340147337b300b9eb25a9ddb573bcdfff19c0fa3f31ffba/multidict-6.7.0-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:ad9ce259f50abd98a1ca0aa6e490b58c316a0fce0617f609723e40804add2c00", size = 83114, upload-time = "2025-10-06T14:50:21.223Z" }, - { url = "https://files.pythonhosted.org/packages/55/5c/3fa2d07c84df4e302060f555bbf539310980362236ad49f50eeb0a1c1eb9/multidict-6.7.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:07f5594ac6d084cbb5de2df218d78baf55ef150b91f0ff8a21cc7a2e3a5a58eb", size = 48442, upload-time = "2025-10-06T14:50:22.871Z" }, - { url = "https://files.pythonhosted.org/packages/fc/56/67212d33239797f9bd91962bb899d72bb0f4c35a8652dcdb8ed049bef878/multidict-6.7.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:0591b48acf279821a579282444814a2d8d0af624ae0bc600aa4d1b920b6e924b", size = 46885, upload-time = "2025-10-06T14:50:24.258Z" }, - { url = "https://files.pythonhosted.org/packages/46/d1/908f896224290350721597a61a69cd19b89ad8ee0ae1f38b3f5cd12ea2ac/multidict-6.7.0-cp313-cp313t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:749a72584761531d2b9467cfbdfd29487ee21124c304c4b6cb760d8777b27f9c", size = 242588, upload-time = "2025-10-06T14:50:25.716Z" }, - { url = "https://files.pythonhosted.org/packages/ab/67/8604288bbd68680eee0ab568fdcb56171d8b23a01bcd5cb0c8fedf6e5d99/multidict-6.7.0-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6b4c3d199f953acd5b446bf7c0de1fe25d94e09e79086f8dc2f48a11a129cdf1", size = 249966, upload-time = "2025-10-06T14:50:28.192Z" }, - { url = "https://files.pythonhosted.org/packages/20/33/9228d76339f1ba51e3efef7da3ebd91964d3006217aae13211653193c3ff/multidict-6.7.0-cp313-cp313t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:9fb0211dfc3b51efea2f349ec92c114d7754dd62c01f81c3e32b765b70c45c9b", size = 228618, upload-time = "2025-10-06T14:50:29.82Z" }, - { url = "https://files.pythonhosted.org/packages/f8/2d/25d9b566d10cab1c42b3b9e5b11ef79c9111eaf4463b8c257a3bd89e0ead/multidict-6.7.0-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:a027ec240fe73a8d6281872690b988eed307cd7d91b23998ff35ff577ca688b5", size = 257539, upload-time = "2025-10-06T14:50:31.731Z" }, - { url = "https://files.pythonhosted.org/packages/b6/b1/8d1a965e6637fc33de3c0d8f414485c2b7e4af00f42cab3d84e7b955c222/multidict-6.7.0-cp313-cp313t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:d1d964afecdf3a8288789df2f5751dc0a8261138c3768d9af117ed384e538fad", size = 256345, upload-time = "2025-10-06T14:50:33.26Z" }, - { url = "https://files.pythonhosted.org/packages/ba/0c/06b5a8adbdeedada6f4fb8d8f193d44a347223b11939b42953eeb6530b6b/multidict-6.7.0-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:caf53b15b1b7df9fbd0709aa01409000a2b4dd03a5f6f5cc548183c7c8f8b63c", size = 247934, upload-time = "2025-10-06T14:50:34.808Z" }, - { url = "https://files.pythonhosted.org/packages/8f/31/b2491b5fe167ca044c6eb4b8f2c9f3b8a00b24c432c365358eadac5d7625/multidict-6.7.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:654030da3197d927f05a536a66186070e98765aa5142794c9904555d3a9d8fb5", size = 245243, upload-time = "2025-10-06T14:50:36.436Z" }, - { url = "https://files.pythonhosted.org/packages/61/1a/982913957cb90406c8c94f53001abd9eafc271cb3e70ff6371590bec478e/multidict-6.7.0-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:2090d3718829d1e484706a2f525e50c892237b2bf9b17a79b059cb98cddc2f10", size = 235878, upload-time = "2025-10-06T14:50:37.953Z" }, - { url = "https://files.pythonhosted.org/packages/be/c0/21435d804c1a1cf7a2608593f4d19bca5bcbd7a81a70b253fdd1c12af9c0/multidict-6.7.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:2d2cfeec3f6f45651b3d408c4acec0ebf3daa9bc8a112a084206f5db5d05b754", size = 243452, upload-time = "2025-10-06T14:50:39.574Z" }, - { url = "https://files.pythonhosted.org/packages/54/0a/4349d540d4a883863191be6eb9a928846d4ec0ea007d3dcd36323bb058ac/multidict-6.7.0-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:4ef089f985b8c194d341eb2c24ae6e7408c9a0e2e5658699c92f497437d88c3c", size = 252312, upload-time = "2025-10-06T14:50:41.612Z" }, - { url = "https://files.pythonhosted.org/packages/26/64/d5416038dbda1488daf16b676e4dbfd9674dde10a0cc8f4fc2b502d8125d/multidict-6.7.0-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:e93a0617cd16998784bf4414c7e40f17a35d2350e5c6f0bd900d3a8e02bd3762", size = 246935, upload-time = "2025-10-06T14:50:43.972Z" }, - { url = "https://files.pythonhosted.org/packages/9f/8c/8290c50d14e49f35e0bd4abc25e1bc7711149ca9588ab7d04f886cdf03d9/multidict-6.7.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:f0feece2ef8ebc42ed9e2e8c78fc4aa3cf455733b507c09ef7406364c94376c6", size = 243385, upload-time = "2025-10-06T14:50:45.648Z" }, - { url = "https://files.pythonhosted.org/packages/ef/a0/f83ae75e42d694b3fbad3e047670e511c138be747bc713cf1b10d5096416/multidict-6.7.0-cp313-cp313t-win32.whl", hash = "sha256:19a1d55338ec1be74ef62440ca9e04a2f001a04d0cc49a4983dc320ff0f3212d", size = 47777, upload-time = "2025-10-06T14:50:47.154Z" }, - { url = "https://files.pythonhosted.org/packages/dc/80/9b174a92814a3830b7357307a792300f42c9e94664b01dee8e457551fa66/multidict-6.7.0-cp313-cp313t-win_amd64.whl", hash = "sha256:3da4fb467498df97e986af166b12d01f05d2e04f978a9c1c680ea1988e0bc4b6", size = 53104, upload-time = "2025-10-06T14:50:48.851Z" }, - { url = "https://files.pythonhosted.org/packages/cc/28/04baeaf0428d95bb7a7bea0e691ba2f31394338ba424fb0679a9ed0f4c09/multidict-6.7.0-cp313-cp313t-win_arm64.whl", hash = "sha256:b4121773c49a0776461f4a904cdf6264c88e42218aaa8407e803ca8025872792", size = 45503, upload-time = "2025-10-06T14:50:50.16Z" }, - { url = "https://files.pythonhosted.org/packages/b7/da/7d22601b625e241d4f23ef1ebff8acfc60da633c9e7e7922e24d10f592b3/multidict-6.7.0-py3-none-any.whl", hash = "sha256:394fc5c42a333c9ffc3e421a4c85e08580d990e08b99f6bf35b4132114c5dcb3", size = 12317, upload-time = "2025-10-06T14:52:29.272Z" }, -] - -[[package]] -name = "multipledispatch" -version = "1.0.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/fe/3e/a62c3b824c7dec33c4a1578bcc842e6c30300051033a4e5975ed86cc2536/multipledispatch-1.0.0.tar.gz", hash = "sha256:5c839915465c68206c3e9c473357908216c28383b425361e5d144594bf85a7e0", size = 12385, upload-time = "2023-06-27T16:45:11.074Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/51/c0/00c9809d8b9346eb238a6bbd5f83e846a4ce4503da94a4c08cb7284c325b/multipledispatch-1.0.0-py3-none-any.whl", hash = "sha256:0c53cd8b077546da4e48869f49b13164bebafd0c2a5afceb6bb6a316e7fb46e4", size = 12818, upload-time = "2023-06-27T16:45:09.418Z" }, -] - -[[package]] -name = "mypy" -version = "1.19.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "librt" }, - { name = "mypy-extensions" }, - { name = "pathspec" }, - { name = "typing-extensions" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/f9/b5/b58cdc25fadd424552804bf410855d52324183112aa004f0732c5f6324cf/mypy-1.19.0.tar.gz", hash = "sha256:f6b874ca77f733222641e5c46e4711648c4037ea13646fd0cdc814c2eaec2528", size = 3579025, upload-time = "2025-11-28T15:49:01.26Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/11/7e/1afa8fb188b876abeaa14460dc4983f909aaacaa4bf5718c00b2c7e0b3d5/mypy-1.19.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:0fb3115cb8fa7c5f887c8a8d81ccdcb94cff334684980d847e5a62e926910e1d", size = 13207728, upload-time = "2025-11-28T15:46:26.463Z" }, - { url = "https://files.pythonhosted.org/packages/b2/13/f103d04962bcbefb1644f5ccb235998b32c337d6c13145ea390b9da47f3e/mypy-1.19.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f3e19e3b897562276bb331074d64c076dbdd3e79213f36eed4e592272dabd760", size = 12202945, upload-time = "2025-11-28T15:48:49.143Z" }, - { url = "https://files.pythonhosted.org/packages/e4/93/a86a5608f74a22284a8ccea8592f6e270b61f95b8588951110ad797c2ddd/mypy-1.19.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b9d491295825182fba01b6ffe2c6fe4e5a49dbf4e2bb4d1217b6ced3b4797bc6", size = 12718673, upload-time = "2025-11-28T15:47:37.193Z" }, - { url = "https://files.pythonhosted.org/packages/3d/58/cf08fff9ced0423b858f2a7495001fda28dc058136818ee9dffc31534ea9/mypy-1.19.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6016c52ab209919b46169651b362068f632efcd5eb8ef9d1735f6f86da7853b2", size = 13608336, upload-time = "2025-11-28T15:48:32.625Z" }, - { url = "https://files.pythonhosted.org/packages/64/ed/9c509105c5a6d4b73bb08733102a3ea62c25bc02c51bca85e3134bf912d3/mypy-1.19.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:f188dcf16483b3e59f9278c4ed939ec0254aa8a60e8fc100648d9ab5ee95a431", size = 13833174, upload-time = "2025-11-28T15:45:48.091Z" }, - { url = "https://files.pythonhosted.org/packages/cd/71/01939b66e35c6f8cb3e6fdf0b657f0fd24de2f8ba5e523625c8e72328208/mypy-1.19.0-cp312-cp312-win_amd64.whl", hash = "sha256:0e3c3d1e1d62e678c339e7ade72746a9e0325de42cd2cccc51616c7b2ed1a018", size = 10112208, upload-time = "2025-11-28T15:46:41.702Z" }, - { url = "https://files.pythonhosted.org/packages/cb/0d/a1357e6bb49e37ce26fcf7e3cc55679ce9f4ebee0cd8b6ee3a0e301a9210/mypy-1.19.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:7686ed65dbabd24d20066f3115018d2dce030d8fa9db01aa9f0a59b6813e9f9e", size = 13191993, upload-time = "2025-11-28T15:47:22.336Z" }, - { url = "https://files.pythonhosted.org/packages/5d/75/8e5d492a879ec4490e6ba664b5154e48c46c85b5ac9785792a5ec6a4d58f/mypy-1.19.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:fd4a985b2e32f23bead72e2fb4bbe5d6aceee176be471243bd831d5b2644672d", size = 12174411, upload-time = "2025-11-28T15:44:55.492Z" }, - { url = "https://files.pythonhosted.org/packages/71/31/ad5dcee9bfe226e8eaba777e9d9d251c292650130f0450a280aec3485370/mypy-1.19.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:fc51a5b864f73a3a182584b1ac75c404396a17eced54341629d8bdcb644a5bba", size = 12727751, upload-time = "2025-11-28T15:44:14.169Z" }, - { url = "https://files.pythonhosted.org/packages/77/06/b6b8994ce07405f6039701f4b66e9d23f499d0b41c6dd46ec28f96d57ec3/mypy-1.19.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:37af5166f9475872034b56c5efdcf65ee25394e9e1d172907b84577120714364", size = 13593323, upload-time = "2025-11-28T15:46:34.699Z" }, - { url = "https://files.pythonhosted.org/packages/68/b1/126e274484cccdf099a8e328d4fda1c7bdb98a5e888fa6010b00e1bbf330/mypy-1.19.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:510c014b722308c9bd377993bcbf9a07d7e0692e5fa8fc70e639c1eb19fc6bee", size = 13818032, upload-time = "2025-11-28T15:46:18.286Z" }, - { url = "https://files.pythonhosted.org/packages/f8/56/53a8f70f562dfc466c766469133a8a4909f6c0012d83993143f2a9d48d2d/mypy-1.19.0-cp313-cp313-win_amd64.whl", hash = "sha256:cabbee74f29aa9cd3b444ec2f1e4fa5a9d0d746ce7567a6a609e224429781f53", size = 10120644, upload-time = "2025-11-28T15:47:43.99Z" }, - { url = "https://files.pythonhosted.org/packages/09/0e/fe228ed5aeab470c6f4eb82481837fadb642a5aa95cc8215fd2214822c10/mypy-1.19.0-py3-none-any.whl", hash = "sha256:0c01c99d626380752e527d5ce8e69ffbba2046eb8a060db0329690849cf9b6f9", size = 2469714, upload-time = "2025-11-28T15:45:33.22Z" }, -] - -[[package]] -name = "mypy-extensions" -version = "1.1.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/a2/6e/371856a3fb9d31ca8dac321cda606860fa4548858c0cc45d9d1d4ca2628b/mypy_extensions-1.1.0.tar.gz", hash = "sha256:52e68efc3284861e772bbcd66823fde5ae21fd2fdb51c62a211403730b916558", size = 6343, upload-time = "2025-04-22T14:54:24.164Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/79/7b/2c79738432f5c924bef5071f933bcc9efd0473bac3b4aa584a6f7c1c8df8/mypy_extensions-1.1.0-py3-none-any.whl", hash = "sha256:1be4cccdb0f2482337c4743e60421de3a356cd97508abadd57d47403e94f5505", size = 4963, upload-time = "2025-04-22T14:54:22.983Z" }, -] - -[[package]] -name = "myst-parser" -version = "4.0.1" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "docutils" }, - { name = "jinja2" }, - { name = "markdown-it-py" }, - { name = "mdit-py-plugins" }, - { name = "pyyaml" }, - { name = "sphinx" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/66/a5/9626ba4f73555b3735ad86247a8077d4603aa8628537687c839ab08bfe44/myst_parser-4.0.1.tar.gz", hash = "sha256:5cfea715e4f3574138aecbf7d54132296bfd72bb614d31168f48c477a830a7c4", size = 93985, upload-time = "2025-02-12T10:53:03.833Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/5f/df/76d0321c3797b54b60fef9ec3bd6f4cfd124b9e422182156a1dd418722cf/myst_parser-4.0.1-py3-none-any.whl", hash = "sha256:9134e88959ec3b5780aedf8a99680ea242869d012e8821db3126d427edc9c95d", size = 84579, upload-time = "2025-02-12T10:53:02.078Z" }, -] - -[[package]] -name = "nbclient" -version = "0.10.2" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "jupyter-client" }, - { name = "jupyter-core" }, - { name = "nbformat" }, - { name = "traitlets" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/87/66/7ffd18d58eae90d5721f9f39212327695b749e23ad44b3881744eaf4d9e8/nbclient-0.10.2.tar.gz", hash = "sha256:90b7fc6b810630db87a6d0c2250b1f0ab4cf4d3c27a299b0cde78a4ed3fd9193", size = 62424, upload-time = "2024-12-19T10:32:27.164Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/34/6d/e7fa07f03a4a7b221d94b4d586edb754a9b0dc3c9e2c93353e9fa4e0d117/nbclient-0.10.2-py3-none-any.whl", hash = "sha256:4ffee11e788b4a27fabeb7955547e4318a5298f34342a4bfd01f2e1faaeadc3d", size = 25434, upload-time = "2024-12-19T10:32:24.139Z" }, -] - -[[package]] -name = "nbconvert" -version = "7.16.6" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "beautifulsoup4" }, - { name = "bleach", extra = ["css"] }, - { name = "defusedxml" }, - { name = "jinja2" }, - { name = "jupyter-core" }, - { name = "jupyterlab-pygments" }, - { name = "markupsafe" }, - { name = "mistune" }, - { name = "nbclient" }, - { name = "nbformat" }, - { name = "packaging" }, - { name = "pandocfilters" }, - { name = "pygments" }, - { name = "traitlets" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/a3/59/f28e15fc47ffb73af68a8d9b47367a8630d76e97ae85ad18271b9db96fdf/nbconvert-7.16.6.tar.gz", hash = "sha256:576a7e37c6480da7b8465eefa66c17844243816ce1ccc372633c6b71c3c0f582", size = 857715, upload-time = "2025-01-28T09:29:14.724Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/cc/9a/cd673b2f773a12c992f41309ef81b99da1690426bd2f96957a7ade0d3ed7/nbconvert-7.16.6-py3-none-any.whl", hash = "sha256:1375a7b67e0c2883678c48e506dc320febb57685e5ee67faa51b18a90f3a712b", size = 258525, upload-time = "2025-01-28T09:29:12.551Z" }, -] - -[[package]] -name = "nbformat" -version = "5.10.4" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "fastjsonschema" }, - { name = "jsonschema" }, - { name = "jupyter-core" }, - { name = "traitlets" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/6d/fd/91545e604bc3dad7dca9ed03284086039b294c6b3d75c0d2fa45f9e9caf3/nbformat-5.10.4.tar.gz", hash = "sha256:322168b14f937a5d11362988ecac2a4952d3d8e3a2cbeb2319584631226d5b3a", size = 142749, upload-time = "2024-04-04T11:20:37.371Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/a9/82/0340caa499416c78e5d8f5f05947ae4bc3cba53c9f038ab6e9ed964e22f1/nbformat-5.10.4-py3-none-any.whl", hash = "sha256:3b48d6c8fbca4b299bf3982ea7db1af21580e4fec269ad087b9e81588891200b", size = 78454, upload-time = "2024-04-04T11:20:34.895Z" }, -] - -[[package]] -name = "nbqa" -version = "1.9.1" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "autopep8" }, - { name = "ipython" }, - { name = "tokenize-rt" }, - { name = "tomli" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/aa/76/62d2609924cf34445148cd6b5de694cf64c179cc416cac93182579620e57/nbqa-1.9.1.tar.gz", hash = "sha256:a1f4bcf587c597302fed295951001fc4e1be4ce0e77e1ab1b25ac2fbe3db0cdd", size = 38348, upload-time = "2024-11-10T12:21:58.333Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/28/88/4789719fbbe166d12d345b3ac66b96105f10001b16e00a9765ba29261a21/nbqa-1.9.1-py3-none-any.whl", hash = "sha256:95552d2f6c2c038136252a805aa78d85018aef922586270c3a074332737282e5", size = 35259, upload-time = "2024-11-10T12:21:56.731Z" }, -] - -[[package]] -name = "nbsphinx" -version = "0.9.8" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "docutils" }, - { name = "jinja2" }, - { name = "nbconvert" }, - { name = "nbformat" }, - { name = "sphinx" }, - { name = "traitlets" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/e7/d1/82081750f8a78ad0399c6ed831d42623b891904e8e7b8a75878225cf1dce/nbsphinx-0.9.8.tar.gz", hash = "sha256:d0765908399a8ee2b57be7ae881cf2ea58d66db3af7bbf33e6eb48f83bea5495", size = 417469, upload-time = "2025-11-28T17:41:02.336Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/03/78/843bcf0cf31f88d2f8a9a063d2d80817b1901657d83d65b89b3aa835732e/nbsphinx-0.9.8-py3-none-any.whl", hash = "sha256:92d95ee91784e56bc633b60b767a6b6f23a0445f891e24641ce3c3f004759ccf", size = 31961, upload-time = "2025-11-28T17:41:00.796Z" }, -] - -[[package]] -name = "nbval" -version = "0.11.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "coverage" }, - { name = "ipykernel" }, - { name = "jupyter-client" }, - { name = "nbformat" }, - { name = "pytest" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/28/be/22bd64d09e0cb53258f83b6fc455f05f18a78e3e5c109ccb6af42f1f49a2/nbval-0.11.0.tar.gz", hash = "sha256:77c95797607b0a968babd2597ee3494102d25c3ad37435debbdac0e46e379094", size = 62718, upload-time = "2024-03-04T14:36:58.256Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/2c/5c/eb1e3ce54c4e94c7734b3831756c63f21badb3de91a98d77b9e23c0ca76a/nbval-0.11.0-py2.py3-none-any.whl", hash = "sha256:307aecc866c9a1e8a13bb5bbb008a702bacfda2394dff6fe504a3108a58042a0", size = 24013, upload-time = "2024-03-04T14:36:57.126Z" }, -] - -[[package]] -name = "nest-asyncio" -version = "1.6.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/83/f8/51569ac65d696c8ecbee95938f89d4abf00f47d58d48f6fbabfe8f0baefe/nest_asyncio-1.6.0.tar.gz", hash = "sha256:6f172d5449aca15afd6c646851f4e31e02c598d553a667e38cafa997cfec55fe", size = 7418, upload-time = "2024-01-21T14:25:19.227Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/a0/c4/c2971a3ba4c6103a3d10c4b0f24f461ddc027f0f09763220cf35ca1401b3/nest_asyncio-1.6.0-py3-none-any.whl", hash = "sha256:87af6efd6b5e897c81050477ef65c62e2b2f35d51703cae01aff2905b1852e1c", size = 5195, upload-time = "2024-01-21T14:25:17.223Z" }, -] - -[[package]] -name = "networkx" -version = "3.6" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/e8/fc/7b6fd4d22c8c4dc5704430140d8b3f520531d4fe7328b8f8d03f5a7950e8/networkx-3.6.tar.gz", hash = "sha256:285276002ad1f7f7da0f7b42f004bcba70d381e936559166363707fdad3d72ad", size = 2511464, upload-time = "2025-11-24T03:03:47.158Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/07/c7/d64168da60332c17d24c0d2f08bdf3987e8d1ae9d84b5bbd0eec2eb26a55/networkx-3.6-py3-none-any.whl", hash = "sha256:cdb395b105806062473d3be36458d8f1459a4e4b98e236a66c3a48996e07684f", size = 2063713, upload-time = "2025-11-24T03:03:45.21Z" }, -] - -[[package]] -name = "numpy" -version = "2.3.5" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/76/65/21b3bc86aac7b8f2862db1e808f1ea22b028e30a225a34a5ede9bf8678f2/numpy-2.3.5.tar.gz", hash = "sha256:784db1dcdab56bf0517743e746dfb0f885fc68d948aba86eeec2cba234bdf1c0", size = 20584950, upload-time = "2025-11-16T22:52:42.067Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/44/37/e669fe6cbb2b96c62f6bbedc6a81c0f3b7362f6a59230b23caa673a85721/numpy-2.3.5-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:74ae7b798248fe62021dbf3c914245ad45d1a6b0cb4a29ecb4b31d0bfbc4cc3e", size = 16733873, upload-time = "2025-11-16T22:49:49.84Z" }, - { url = "https://files.pythonhosted.org/packages/c5/65/df0db6c097892c9380851ab9e44b52d4f7ba576b833996e0080181c0c439/numpy-2.3.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ee3888d9ff7c14604052b2ca5535a30216aa0a58e948cdd3eeb8d3415f638769", size = 12259838, upload-time = "2025-11-16T22:49:52.863Z" }, - { url = "https://files.pythonhosted.org/packages/5b/e1/1ee06e70eb2136797abe847d386e7c0e830b67ad1d43f364dd04fa50d338/numpy-2.3.5-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:612a95a17655e213502f60cfb9bf9408efdc9eb1d5f50535cc6eb365d11b42b5", size = 5088378, upload-time = "2025-11-16T22:49:55.055Z" }, - { url = "https://files.pythonhosted.org/packages/6d/9c/1ca85fb86708724275103b81ec4cf1ac1d08f465368acfc8da7ab545bdae/numpy-2.3.5-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:3101e5177d114a593d79dd79658650fe28b5a0d8abeb8ce6f437c0e6df5be1a4", size = 6628559, upload-time = "2025-11-16T22:49:57.371Z" }, - { url = "https://files.pythonhosted.org/packages/74/78/fcd41e5a0ce4f3f7b003da85825acddae6d7ecb60cf25194741b036ca7d6/numpy-2.3.5-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8b973c57ff8e184109db042c842423ff4f60446239bd585a5131cc47f06f789d", size = 14250702, upload-time = "2025-11-16T22:49:59.632Z" }, - { url = "https://files.pythonhosted.org/packages/b6/23/2a1b231b8ff672b4c450dac27164a8b2ca7d9b7144f9c02d2396518352eb/numpy-2.3.5-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0d8163f43acde9a73c2a33605353a4f1bc4798745a8b1d73183b28e5b435ae28", size = 16606086, upload-time = "2025-11-16T22:50:02.127Z" }, - { url = "https://files.pythonhosted.org/packages/a0/c5/5ad26fbfbe2012e190cc7d5003e4d874b88bb18861d0829edc140a713021/numpy-2.3.5-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:51c1e14eb1e154ebd80e860722f9e6ed6ec89714ad2db2d3aa33c31d7c12179b", size = 16025985, upload-time = "2025-11-16T22:50:04.536Z" }, - { url = "https://files.pythonhosted.org/packages/d2/fa/dd48e225c46c819288148d9d060b047fd2a6fb1eb37eae25112ee4cb4453/numpy-2.3.5-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b46b4ec24f7293f23adcd2d146960559aaf8020213de8ad1909dba6c013bf89c", size = 18542976, upload-time = "2025-11-16T22:50:07.557Z" }, - { url = "https://files.pythonhosted.org/packages/05/79/ccbd23a75862d95af03d28b5c6901a1b7da4803181513d52f3b86ed9446e/numpy-2.3.5-cp312-cp312-win32.whl", hash = "sha256:3997b5b3c9a771e157f9aae01dd579ee35ad7109be18db0e85dbdbe1de06e952", size = 6285274, upload-time = "2025-11-16T22:50:10.746Z" }, - { url = "https://files.pythonhosted.org/packages/2d/57/8aeaf160312f7f489dea47ab61e430b5cb051f59a98ae68b7133ce8fa06a/numpy-2.3.5-cp312-cp312-win_amd64.whl", hash = "sha256:86945f2ee6d10cdfd67bcb4069c1662dd711f7e2a4343db5cecec06b87cf31aa", size = 12782922, upload-time = "2025-11-16T22:50:12.811Z" }, - { url = "https://files.pythonhosted.org/packages/78/a6/aae5cc2ca78c45e64b9ef22f089141d661516856cf7c8a54ba434576900d/numpy-2.3.5-cp312-cp312-win_arm64.whl", hash = "sha256:f28620fe26bee16243be2b7b874da327312240a7cdc38b769a697578d2100013", size = 10194667, upload-time = "2025-11-16T22:50:16.16Z" }, - { url = "https://files.pythonhosted.org/packages/db/69/9cde09f36da4b5a505341180a3f2e6fadc352fd4d2b7096ce9778db83f1a/numpy-2.3.5-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:d0f23b44f57077c1ede8c5f26b30f706498b4862d3ff0a7298b8411dd2f043ff", size = 16728251, upload-time = "2025-11-16T22:50:19.013Z" }, - { url = "https://files.pythonhosted.org/packages/79/fb/f505c95ceddd7027347b067689db71ca80bd5ecc926f913f1a23e65cf09b/numpy-2.3.5-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:aa5bc7c5d59d831d9773d1170acac7893ce3a5e130540605770ade83280e7188", size = 12254652, upload-time = "2025-11-16T22:50:21.487Z" }, - { url = "https://files.pythonhosted.org/packages/78/da/8c7738060ca9c31b30e9301ee0cf6c5ffdbf889d9593285a1cead337f9a5/numpy-2.3.5-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:ccc933afd4d20aad3c00bcef049cb40049f7f196e0397f1109dba6fed63267b0", size = 5083172, upload-time = "2025-11-16T22:50:24.562Z" }, - { url = "https://files.pythonhosted.org/packages/a4/b4/ee5bb2537fb9430fd2ef30a616c3672b991a4129bb1c7dcc42aa0abbe5d7/numpy-2.3.5-cp313-cp313-macosx_14_0_x86_64.whl", hash = "sha256:afaffc4393205524af9dfa400fa250143a6c3bc646c08c9f5e25a9f4b4d6a903", size = 6622990, upload-time = "2025-11-16T22:50:26.47Z" }, - { url = "https://files.pythonhosted.org/packages/95/03/dc0723a013c7d7c19de5ef29e932c3081df1c14ba582b8b86b5de9db7f0f/numpy-2.3.5-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9c75442b2209b8470d6d5d8b1c25714270686f14c749028d2199c54e29f20b4d", size = 14248902, upload-time = "2025-11-16T22:50:28.861Z" }, - { url = "https://files.pythonhosted.org/packages/f5/10/ca162f45a102738958dcec8023062dad0cbc17d1ab99d68c4e4a6c45fb2b/numpy-2.3.5-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:11e06aa0af8c0f05104d56450d6093ee639e15f24ecf62d417329d06e522e017", size = 16597430, upload-time = "2025-11-16T22:50:31.56Z" }, - { url = "https://files.pythonhosted.org/packages/2a/51/c1e29be863588db58175175f057286900b4b3327a1351e706d5e0f8dd679/numpy-2.3.5-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ed89927b86296067b4f81f108a2271d8926467a8868e554eaf370fc27fa3ccaf", size = 16024551, upload-time = "2025-11-16T22:50:34.242Z" }, - { url = "https://files.pythonhosted.org/packages/83/68/8236589d4dbb87253d28259d04d9b814ec0ecce7cb1c7fed29729f4c3a78/numpy-2.3.5-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:51c55fe3451421f3a6ef9a9c1439e82101c57a2c9eab9feb196a62b1a10b58ce", size = 18533275, upload-time = "2025-11-16T22:50:37.651Z" }, - { url = "https://files.pythonhosted.org/packages/40/56/2932d75b6f13465239e3b7b7e511be27f1b8161ca2510854f0b6e521c395/numpy-2.3.5-cp313-cp313-win32.whl", hash = "sha256:1978155dd49972084bd6ef388d66ab70f0c323ddee6f693d539376498720fb7e", size = 6277637, upload-time = "2025-11-16T22:50:40.11Z" }, - { url = "https://files.pythonhosted.org/packages/0c/88/e2eaa6cffb115b85ed7c7c87775cb8bcf0816816bc98ca8dbfa2ee33fe6e/numpy-2.3.5-cp313-cp313-win_amd64.whl", hash = "sha256:00dc4e846108a382c5869e77c6ed514394bdeb3403461d25a829711041217d5b", size = 12779090, upload-time = "2025-11-16T22:50:42.503Z" }, - { url = "https://files.pythonhosted.org/packages/8f/88/3f41e13a44ebd4034ee17baa384acac29ba6a4fcc2aca95f6f08ca0447d1/numpy-2.3.5-cp313-cp313-win_arm64.whl", hash = "sha256:0472f11f6ec23a74a906a00b48a4dcf3849209696dff7c189714511268d103ae", size = 10194710, upload-time = "2025-11-16T22:50:44.971Z" }, - { url = "https://files.pythonhosted.org/packages/13/cb/71744144e13389d577f867f745b7df2d8489463654a918eea2eeb166dfc9/numpy-2.3.5-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:414802f3b97f3c1eef41e530aaba3b3c1620649871d8cb38c6eaff034c2e16bd", size = 16827292, upload-time = "2025-11-16T22:50:47.715Z" }, - { url = "https://files.pythonhosted.org/packages/71/80/ba9dc6f2a4398e7f42b708a7fdc841bb638d353be255655498edbf9a15a8/numpy-2.3.5-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:5ee6609ac3604fa7780e30a03e5e241a7956f8e2fcfe547d51e3afa5247ac47f", size = 12378897, upload-time = "2025-11-16T22:50:51.327Z" }, - { url = "https://files.pythonhosted.org/packages/2e/6d/db2151b9f64264bcceccd51741aa39b50150de9b602d98ecfe7e0c4bff39/numpy-2.3.5-cp313-cp313t-macosx_14_0_arm64.whl", hash = "sha256:86d835afea1eaa143012a2d7a3f45a3adce2d7adc8b4961f0b362214d800846a", size = 5207391, upload-time = "2025-11-16T22:50:54.542Z" }, - { url = "https://files.pythonhosted.org/packages/80/ae/429bacace5ccad48a14c4ae5332f6aa8ab9f69524193511d60ccdfdc65fa/numpy-2.3.5-cp313-cp313t-macosx_14_0_x86_64.whl", hash = "sha256:30bc11310e8153ca664b14c5f1b73e94bd0503681fcf136a163de856f3a50139", size = 6721275, upload-time = "2025-11-16T22:50:56.794Z" }, - { url = "https://files.pythonhosted.org/packages/74/5b/1919abf32d8722646a38cd527bc3771eb229a32724ee6ba340ead9b92249/numpy-2.3.5-cp313-cp313t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1062fde1dcf469571705945b0f221b73928f34a20c904ffb45db101907c3454e", size = 14306855, upload-time = "2025-11-16T22:50:59.208Z" }, - { url = "https://files.pythonhosted.org/packages/a5/87/6831980559434973bebc30cd9c1f21e541a0f2b0c280d43d3afd909b66d0/numpy-2.3.5-cp313-cp313t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ce581db493ea1a96c0556360ede6607496e8bf9b3a8efa66e06477267bc831e9", size = 16657359, upload-time = "2025-11-16T22:51:01.991Z" }, - { url = "https://files.pythonhosted.org/packages/dd/91/c797f544491ee99fd00495f12ebb7802c440c1915811d72ac5b4479a3356/numpy-2.3.5-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:cc8920d2ec5fa99875b670bb86ddeb21e295cb07aa331810d9e486e0b969d946", size = 16093374, upload-time = "2025-11-16T22:51:05.291Z" }, - { url = "https://files.pythonhosted.org/packages/74/a6/54da03253afcbe7a72785ec4da9c69fb7a17710141ff9ac5fcb2e32dbe64/numpy-2.3.5-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:9ee2197ef8c4f0dfe405d835f3b6a14f5fee7782b5de51ba06fb65fc9b36e9f1", size = 18594587, upload-time = "2025-11-16T22:51:08.585Z" }, - { url = "https://files.pythonhosted.org/packages/80/e9/aff53abbdd41b0ecca94285f325aff42357c6b5abc482a3fcb4994290b18/numpy-2.3.5-cp313-cp313t-win32.whl", hash = "sha256:70b37199913c1bd300ff6e2693316c6f869c7ee16378faf10e4f5e3275b299c3", size = 6405940, upload-time = "2025-11-16T22:51:11.541Z" }, - { url = "https://files.pythonhosted.org/packages/d5/81/50613fec9d4de5480de18d4f8ef59ad7e344d497edbef3cfd80f24f98461/numpy-2.3.5-cp313-cp313t-win_amd64.whl", hash = "sha256:b501b5fa195cc9e24fe102f21ec0a44dffc231d2af79950b451e0d99cea02234", size = 12920341, upload-time = "2025-11-16T22:51:14.312Z" }, - { url = "https://files.pythonhosted.org/packages/bb/ab/08fd63b9a74303947f34f0bd7c5903b9c5532c2d287bead5bdf4c556c486/numpy-2.3.5-cp313-cp313t-win_arm64.whl", hash = "sha256:a80afd79f45f3c4a7d341f13acbe058d1ca8ac017c165d3fa0d3de6bc1a079d7", size = 10262507, upload-time = "2025-11-16T22:51:16.846Z" }, -] - -[[package]] -name = "numpyro" -version = "0.19.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "jax" }, - { name = "jaxlib" }, - { name = "multipledispatch" }, - { name = "numpy" }, - { name = "tqdm" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/02/7c/5d1b55401b023b58f792483c71690315d4d5cd1653fd3631fa5bcbd68601/numpyro-0.19.0.tar.gz", hash = "sha256:bbf5b772a6ba8b7a79448fa6787afb069e5eb2dff8295078c3ec04d3e6276742", size = 404421, upload-time = "2025-08-05T10:26:33.055Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/88/31/9b5da5995988437756bc3f1eead2e314d8916259875c6924cb41692f2b41/numpyro-0.19.0-py3-none-any.whl", hash = "sha256:1063a2c131a0785719e13c8e55f1b82e41850d814df149418097531f4dbdeda8", size = 370906, upload-time = "2025-08-05T10:26:31.35Z" }, -] - -[[package]] -name = "nvidia-cublas-cu12" -version = "12.8.4.1" -source = { registry = "https://pypi.org/simple" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/dc/61/e24b560ab2e2eaeb3c839129175fb330dfcfc29e5203196e5541a4c44682/nvidia_cublas_cu12-12.8.4.1-py3-none-manylinux_2_27_x86_64.whl", hash = "sha256:8ac4e771d5a348c551b2a426eda6193c19aa630236b418086020df5ba9667142", size = 594346921, upload-time = "2025-03-07T01:44:31.254Z" }, -] - -[[package]] -name = "nvidia-cuda-cupti-cu12" -version = "12.8.90" -source = { registry = "https://pypi.org/simple" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/f8/02/2adcaa145158bf1a8295d83591d22e4103dbfd821bcaf6f3f53151ca4ffa/nvidia_cuda_cupti_cu12-12.8.90-py3-none-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:ea0cb07ebda26bb9b29ba82cda34849e73c166c18162d3913575b0c9db9a6182", size = 10248621, upload-time = "2025-03-07T01:40:21.213Z" }, -] - -[[package]] -name = "nvidia-cuda-nvrtc-cu12" -version = "12.8.93" -source = { registry = "https://pypi.org/simple" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/05/6b/32f747947df2da6994e999492ab306a903659555dddc0fbdeb9d71f75e52/nvidia_cuda_nvrtc_cu12-12.8.93-py3-none-manylinux2010_x86_64.manylinux_2_12_x86_64.whl", hash = "sha256:a7756528852ef889772a84c6cd89d41dfa74667e24cca16bb31f8f061e3e9994", size = 88040029, upload-time = "2025-03-07T01:42:13.562Z" }, -] - -[[package]] -name = "nvidia-cuda-runtime-cu12" -version = "12.8.90" -source = { registry = "https://pypi.org/simple" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/0d/9b/a997b638fcd068ad6e4d53b8551a7d30fe8b404d6f1804abf1df69838932/nvidia_cuda_runtime_cu12-12.8.90-py3-none-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:adade8dcbd0edf427b7204d480d6066d33902cab2a4707dcfc48a2d0fd44ab90", size = 954765, upload-time = "2025-03-07T01:40:01.615Z" }, -] - -[[package]] -name = "nvidia-cudnn-cu12" -version = "9.10.2.21" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "nvidia-cublas-cu12" }, -] -wheels = [ - { url = "https://files.pythonhosted.org/packages/ba/51/e123d997aa098c61d029f76663dedbfb9bc8dcf8c60cbd6adbe42f76d049/nvidia_cudnn_cu12-9.10.2.21-py3-none-manylinux_2_27_x86_64.whl", hash = "sha256:949452be657fa16687d0930933f032835951ef0892b37d2d53824d1a84dc97a8", size = 706758467, upload-time = "2025-06-06T21:54:08.597Z" }, -] - -[[package]] -name = "nvidia-cufft-cu12" -version = "11.3.3.83" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "nvidia-nvjitlink-cu12" }, -] -wheels = [ - { url = "https://files.pythonhosted.org/packages/1f/13/ee4e00f30e676b66ae65b4f08cb5bcbb8392c03f54f2d5413ea99a5d1c80/nvidia_cufft_cu12-11.3.3.83-py3-none-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:4d2dd21ec0b88cf61b62e6b43564355e5222e4a3fb394cac0db101f2dd0d4f74", size = 193118695, upload-time = "2025-03-07T01:45:27.821Z" }, -] - -[[package]] -name = "nvidia-cufile-cu12" -version = "1.13.1.3" -source = { registry = "https://pypi.org/simple" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/bb/fe/1bcba1dfbfb8d01be8d93f07bfc502c93fa23afa6fd5ab3fc7c1df71038a/nvidia_cufile_cu12-1.13.1.3-py3-none-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:1d069003be650e131b21c932ec3d8969c1715379251f8d23a1860554b1cb24fc", size = 1197834, upload-time = "2025-03-07T01:45:50.723Z" }, -] - -[[package]] -name = "nvidia-curand-cu12" -version = "10.3.9.90" -source = { registry = "https://pypi.org/simple" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/fb/aa/6584b56dc84ebe9cf93226a5cde4d99080c8e90ab40f0c27bda7a0f29aa1/nvidia_curand_cu12-10.3.9.90-py3-none-manylinux_2_27_x86_64.whl", hash = "sha256:b32331d4f4df5d6eefa0554c565b626c7216f87a06a4f56fab27c3b68a830ec9", size = 63619976, upload-time = "2025-03-07T01:46:23.323Z" }, -] - -[[package]] -name = "nvidia-cusolver-cu12" -version = "11.7.3.90" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "nvidia-cublas-cu12" }, - { name = "nvidia-cusparse-cu12" }, - { name = "nvidia-nvjitlink-cu12" }, -] -wheels = [ - { url = "https://files.pythonhosted.org/packages/85/48/9a13d2975803e8cf2777d5ed57b87a0b6ca2cc795f9a4f59796a910bfb80/nvidia_cusolver_cu12-11.7.3.90-py3-none-manylinux_2_27_x86_64.whl", hash = "sha256:4376c11ad263152bd50ea295c05370360776f8c3427b30991df774f9fb26c450", size = 267506905, upload-time = "2025-03-07T01:47:16.273Z" }, -] - -[[package]] -name = "nvidia-cusparse-cu12" -version = "12.5.8.93" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "nvidia-nvjitlink-cu12" }, -] -wheels = [ - { url = "https://files.pythonhosted.org/packages/c2/f5/e1854cb2f2bcd4280c44736c93550cc300ff4b8c95ebe370d0aa7d2b473d/nvidia_cusparse_cu12-12.5.8.93-py3-none-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:1ec05d76bbbd8b61b06a80e1eaf8cf4959c3d4ce8e711b65ebd0443bb0ebb13b", size = 288216466, upload-time = "2025-03-07T01:48:13.779Z" }, -] - -[[package]] -name = "nvidia-cusparselt-cu12" -version = "0.7.1" -source = { registry = "https://pypi.org/simple" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/56/79/12978b96bd44274fe38b5dde5cfb660b1d114f70a65ef962bcbbed99b549/nvidia_cusparselt_cu12-0.7.1-py3-none-manylinux2014_x86_64.whl", hash = "sha256:f1bb701d6b930d5a7cea44c19ceb973311500847f81b634d802b7b539dc55623", size = 287193691, upload-time = "2025-02-26T00:15:44.104Z" }, -] - -[[package]] -name = "nvidia-nccl-cu12" -version = "2.27.5" -source = { registry = "https://pypi.org/simple" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/6e/89/f7a07dc961b60645dbbf42e80f2bc85ade7feb9a491b11a1e973aa00071f/nvidia_nccl_cu12-2.27.5-py3-none-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:ad730cf15cb5d25fe849c6e6ca9eb5b76db16a80f13f425ac68d8e2e55624457", size = 322348229, upload-time = "2025-06-26T04:11:28.385Z" }, -] - -[[package]] -name = "nvidia-nvjitlink-cu12" -version = "12.8.93" -source = { registry = "https://pypi.org/simple" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/f6/74/86a07f1d0f42998ca31312f998bd3b9a7eff7f52378f4f270c8679c77fb9/nvidia_nvjitlink_cu12-12.8.93-py3-none-manylinux2010_x86_64.manylinux_2_12_x86_64.whl", hash = "sha256:81ff63371a7ebd6e6451970684f916be2eab07321b73c9d244dc2b4da7f73b88", size = 39254836, upload-time = "2025-03-07T01:49:55.661Z" }, -] - -[[package]] -name = "nvidia-nvshmem-cu12" -version = "3.3.20" -source = { registry = "https://pypi.org/simple" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/3b/6c/99acb2f9eb85c29fc6f3a7ac4dccfd992e22666dd08a642b303311326a97/nvidia_nvshmem_cu12-3.3.20-py3-none-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:d00f26d3f9b2e3c3065be895e3059d6479ea5c638a3f38c9fec49b1b9dd7c1e5", size = 124657145, upload-time = "2025-08-04T20:25:19.995Z" }, -] - -[[package]] -name = "nvidia-nvtx-cu12" -version = "12.8.90" -source = { registry = "https://pypi.org/simple" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/a2/eb/86626c1bbc2edb86323022371c39aa48df6fd8b0a1647bc274577f72e90b/nvidia_nvtx_cu12-12.8.90-py3-none-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:5b17e2001cc0d751a5bc2c6ec6d26ad95913324a4adb86788c944f8ce9ba441f", size = 89954, upload-time = "2025-03-07T01:42:44.131Z" }, -] - -[[package]] -name = "openai" -version = "2.8.1" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "anyio" }, - { name = "distro" }, - { name = "httpx" }, - { name = "jiter" }, - { name = "pydantic" }, - { name = "sniffio" }, - { name = "tqdm" }, - { name = "typing-extensions" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/d5/e4/42591e356f1d53c568418dc7e30dcda7be31dd5a4d570bca22acb0525862/openai-2.8.1.tar.gz", hash = "sha256:cb1b79eef6e809f6da326a7ef6038719e35aa944c42d081807bfa1be8060f15f", size = 602490, upload-time = "2025-11-17T22:39:59.549Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/55/4f/dbc0c124c40cb390508a82770fb9f6e3ed162560181a85089191a851c59a/openai-2.8.1-py3-none-any.whl", hash = "sha256:c6c3b5a04994734386e8dad3c00a393f56d3b68a27cd2e8acae91a59e4122463", size = 1022688, upload-time = "2025-11-17T22:39:57.675Z" }, -] - -[[package]] -name = "opt-einsum" -version = "3.4.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/8c/b9/2ac072041e899a52f20cf9510850ff58295003aa75525e58343591b0cbfb/opt_einsum-3.4.0.tar.gz", hash = "sha256:96ca72f1b886d148241348783498194c577fa30a8faac108586b14f1ba4473ac", size = 63004, upload-time = "2024-09-26T14:33:24.483Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/23/cd/066e86230ae37ed0be70aae89aabf03ca8d9f39c8aea0dec8029455b5540/opt_einsum-3.4.0-py3-none-any.whl", hash = "sha256:69bb92469f86a1565195ece4ac0323943e83477171b91d24c35afe028a90d7cd", size = 71932, upload-time = "2024-09-26T14:33:23.039Z" }, -] - -[[package]] -name = "packaging" -version = "25.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/a1/d4/1fc4078c65507b51b96ca8f8c3ba19e6a61c8253c72794544580a7b6c24d/packaging-25.0.tar.gz", hash = "sha256:d443872c98d677bf60f6a1f2f8c1cb748e8fe762d2bf9d3148b5599295b0fc4f", size = 165727, upload-time = "2025-04-19T11:48:59.673Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/20/12/38679034af332785aac8774540895e234f4d07f7545804097de4b666afd8/packaging-25.0-py3-none-any.whl", hash = "sha256:29572ef2b1f17581046b3a2227d5c611fb25ec70ca1ba8554b24b0e69331a484", size = 66469, upload-time = "2025-04-19T11:48:57.875Z" }, -] - -[[package]] -name = "pandocfilters" -version = "1.5.1" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/70/6f/3dd4940bbe001c06a65f88e36bad298bc7a0de5036115639926b0c5c0458/pandocfilters-1.5.1.tar.gz", hash = "sha256:002b4a555ee4ebc03f8b66307e287fa492e4a77b4ea14d3f934328297bb4939e", size = 8454, upload-time = "2024-01-18T20:08:13.726Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/ef/af/4fbc8cab944db5d21b7e2a5b8e9211a03a79852b1157e2c102fcc61ac440/pandocfilters-1.5.1-py2.py3-none-any.whl", hash = "sha256:93be382804a9cdb0a7267585f157e5d1731bbe5545a85b268d6f5fe6232de2bc", size = 8663, upload-time = "2024-01-18T20:08:11.28Z" }, -] - -[[package]] -name = "parso" -version = "0.8.5" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/d4/de/53e0bcf53d13e005bd8c92e7855142494f41171b34c2536b86187474184d/parso-0.8.5.tar.gz", hash = "sha256:034d7354a9a018bdce352f48b2a8a450f05e9d6ee85db84764e9b6bd96dafe5a", size = 401205, upload-time = "2025-08-23T15:15:28.028Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/16/32/f8e3c85d1d5250232a5d3477a2a28cc291968ff175caeadaf3cc19ce0e4a/parso-0.8.5-py2.py3-none-any.whl", hash = "sha256:646204b5ee239c396d040b90f9e272e9a8017c630092bf59980beb62fd033887", size = 106668, upload-time = "2025-08-23T15:15:25.663Z" }, -] - -[[package]] -name = "pathspec" -version = "0.12.1" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/ca/bc/f35b8446f4531a7cb215605d100cd88b7ac6f44ab3fc94870c120ab3adbf/pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712", size = 51043, upload-time = "2023-12-10T22:30:45Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/cc/20/ff623b09d963f88bfde16306a54e12ee5ea43e9b597108672ff3a408aad6/pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08", size = 31191, upload-time = "2023-12-10T22:30:43.14Z" }, -] - -[[package]] -name = "pexpect" -version = "4.9.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "ptyprocess" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/42/92/cc564bf6381ff43ce1f4d06852fc19a2f11d180f23dc32d9588bee2f149d/pexpect-4.9.0.tar.gz", hash = "sha256:ee7d41123f3c9911050ea2c2dac107568dc43b2d3b0c7557a33212c398ead30f", size = 166450, upload-time = "2023-11-25T09:07:26.339Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/9e/c3/059298687310d527a58bb01f3b1965787ee3b40dce76752eda8b44e9a2c5/pexpect-4.9.0-py2.py3-none-any.whl", hash = "sha256:7236d1e080e4936be2dc3e326cec0af72acf9212a7e1d060210e70a47e253523", size = 63772, upload-time = "2023-11-25T06:56:14.81Z" }, -] - -[[package]] -name = "pillow" -version = "12.0.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/5a/b0/cace85a1b0c9775a9f8f5d5423c8261c858760e2466c79b2dd184638b056/pillow-12.0.0.tar.gz", hash = "sha256:87d4f8125c9988bfbed67af47dd7a953e2fc7b0cc1e7800ec6d2080d490bb353", size = 47008828, upload-time = "2025-10-15T18:24:14.008Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/2c/90/4fcce2c22caf044e660a198d740e7fbc14395619e3cb1abad12192c0826c/pillow-12.0.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:53561a4ddc36facb432fae7a9d8afbfaf94795414f5cdc5fc52f28c1dca90371", size = 5249377, upload-time = "2025-10-15T18:22:05.993Z" }, - { url = "https://files.pythonhosted.org/packages/fd/e0/ed960067543d080691d47d6938ebccbf3976a931c9567ab2fbfab983a5dd/pillow-12.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:71db6b4c1653045dacc1585c1b0d184004f0d7e694c7b34ac165ca70c0838082", size = 4650343, upload-time = "2025-10-15T18:22:07.718Z" }, - { url = "https://files.pythonhosted.org/packages/e7/a1/f81fdeddcb99c044bf7d6faa47e12850f13cee0849537a7d27eeab5534d4/pillow-12.0.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:2fa5f0b6716fc88f11380b88b31fe591a06c6315e955c096c35715788b339e3f", size = 6232981, upload-time = "2025-10-15T18:22:09.287Z" }, - { url = "https://files.pythonhosted.org/packages/88/e1/9098d3ce341a8750b55b0e00c03f1630d6178f38ac191c81c97a3b047b44/pillow-12.0.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:82240051c6ca513c616f7f9da06e871f61bfd7805f566275841af15015b8f98d", size = 8041399, upload-time = "2025-10-15T18:22:10.872Z" }, - { url = "https://files.pythonhosted.org/packages/a7/62/a22e8d3b602ae8cc01446d0c57a54e982737f44b6f2e1e019a925143771d/pillow-12.0.0-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:55f818bd74fe2f11d4d7cbc65880a843c4075e0ac7226bc1a23261dbea531953", size = 6347740, upload-time = "2025-10-15T18:22:12.769Z" }, - { url = "https://files.pythonhosted.org/packages/4f/87/424511bdcd02c8d7acf9f65caa09f291a519b16bd83c3fb3374b3d4ae951/pillow-12.0.0-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b87843e225e74576437fd5b6a4c2205d422754f84a06942cfaf1dc32243e45a8", size = 7040201, upload-time = "2025-10-15T18:22:14.813Z" }, - { url = "https://files.pythonhosted.org/packages/dc/4d/435c8ac688c54d11755aedfdd9f29c9eeddf68d150fe42d1d3dbd2365149/pillow-12.0.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:c607c90ba67533e1b2355b821fef6764d1dd2cbe26b8c1005ae84f7aea25ff79", size = 6462334, upload-time = "2025-10-15T18:22:16.375Z" }, - { url = "https://files.pythonhosted.org/packages/2b/f2/ad34167a8059a59b8ad10bc5c72d4d9b35acc6b7c0877af8ac885b5f2044/pillow-12.0.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:21f241bdd5080a15bc86d3466a9f6074a9c2c2b314100dd896ac81ee6db2f1ba", size = 7134162, upload-time = "2025-10-15T18:22:17.996Z" }, - { url = "https://files.pythonhosted.org/packages/0c/b1/a7391df6adacf0a5c2cf6ac1cf1fcc1369e7d439d28f637a847f8803beb3/pillow-12.0.0-cp312-cp312-win32.whl", hash = "sha256:dd333073e0cacdc3089525c7df7d39b211bcdf31fc2824e49d01c6b6187b07d0", size = 6298769, upload-time = "2025-10-15T18:22:19.923Z" }, - { url = "https://files.pythonhosted.org/packages/a2/0b/d87733741526541c909bbf159e338dcace4f982daac6e5a8d6be225ca32d/pillow-12.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:9fe611163f6303d1619bbcb653540a4d60f9e55e622d60a3108be0d5b441017a", size = 7001107, upload-time = "2025-10-15T18:22:21.644Z" }, - { url = "https://files.pythonhosted.org/packages/bc/96/aaa61ce33cc98421fb6088af2a03be4157b1e7e0e87087c888e2370a7f45/pillow-12.0.0-cp312-cp312-win_arm64.whl", hash = "sha256:7dfb439562f234f7d57b1ac6bc8fe7f838a4bd49c79230e0f6a1da93e82f1fad", size = 2436012, upload-time = "2025-10-15T18:22:23.621Z" }, - { url = "https://files.pythonhosted.org/packages/62/f2/de993bb2d21b33a98d031ecf6a978e4b61da207bef02f7b43093774c480d/pillow-12.0.0-cp313-cp313-ios_13_0_arm64_iphoneos.whl", hash = "sha256:0869154a2d0546545cde61d1789a6524319fc1897d9ee31218eae7a60ccc5643", size = 4045493, upload-time = "2025-10-15T18:22:25.758Z" }, - { url = "https://files.pythonhosted.org/packages/0e/b6/bc8d0c4c9f6f111a783d045310945deb769b806d7574764234ffd50bc5ea/pillow-12.0.0-cp313-cp313-ios_13_0_arm64_iphonesimulator.whl", hash = "sha256:a7921c5a6d31b3d756ec980f2f47c0cfdbce0fc48c22a39347a895f41f4a6ea4", size = 4120461, upload-time = "2025-10-15T18:22:27.286Z" }, - { url = "https://files.pythonhosted.org/packages/5d/57/d60d343709366a353dc56adb4ee1e7d8a2cc34e3fbc22905f4167cfec119/pillow-12.0.0-cp313-cp313-ios_13_0_x86_64_iphonesimulator.whl", hash = "sha256:1ee80a59f6ce048ae13cda1abf7fbd2a34ab9ee7d401c46be3ca685d1999a399", size = 3576912, upload-time = "2025-10-15T18:22:28.751Z" }, - { url = "https://files.pythonhosted.org/packages/a4/a4/a0a31467e3f83b94d37568294b01d22b43ae3c5d85f2811769b9c66389dd/pillow-12.0.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:c50f36a62a22d350c96e49ad02d0da41dbd17ddc2e29750dbdba4323f85eb4a5", size = 5249132, upload-time = "2025-10-15T18:22:30.641Z" }, - { url = "https://files.pythonhosted.org/packages/83/06/48eab21dd561de2914242711434c0c0eb992ed08ff3f6107a5f44527f5e9/pillow-12.0.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:5193fde9a5f23c331ea26d0cf171fbf67e3f247585f50c08b3e205c7aeb4589b", size = 4650099, upload-time = "2025-10-15T18:22:32.73Z" }, - { url = "https://files.pythonhosted.org/packages/fc/bd/69ed99fd46a8dba7c1887156d3572fe4484e3f031405fcc5a92e31c04035/pillow-12.0.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:bde737cff1a975b70652b62d626f7785e0480918dece11e8fef3c0cf057351c3", size = 6230808, upload-time = "2025-10-15T18:22:34.337Z" }, - { url = "https://files.pythonhosted.org/packages/ea/94/8fad659bcdbf86ed70099cb60ae40be6acca434bbc8c4c0d4ef356d7e0de/pillow-12.0.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:a6597ff2b61d121172f5844b53f21467f7082f5fb385a9a29c01414463f93b07", size = 8037804, upload-time = "2025-10-15T18:22:36.402Z" }, - { url = "https://files.pythonhosted.org/packages/20/39/c685d05c06deecfd4e2d1950e9a908aa2ca8bc4e6c3b12d93b9cafbd7837/pillow-12.0.0-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0b817e7035ea7f6b942c13aa03bb554fc44fea70838ea21f8eb31c638326584e", size = 6345553, upload-time = "2025-10-15T18:22:38.066Z" }, - { url = "https://files.pythonhosted.org/packages/38/57/755dbd06530a27a5ed74f8cb0a7a44a21722ebf318edbe67ddbd7fb28f88/pillow-12.0.0-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f4f1231b7dec408e8670264ce63e9c71409d9583dd21d32c163e25213ee2a344", size = 7037729, upload-time = "2025-10-15T18:22:39.769Z" }, - { url = "https://files.pythonhosted.org/packages/ca/b6/7e94f4c41d238615674d06ed677c14883103dce1c52e4af16f000338cfd7/pillow-12.0.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:6e51b71417049ad6ab14c49608b4a24d8fb3fe605e5dfabfe523b58064dc3d27", size = 6459789, upload-time = "2025-10-15T18:22:41.437Z" }, - { url = "https://files.pythonhosted.org/packages/9c/14/4448bb0b5e0f22dd865290536d20ec8a23b64e2d04280b89139f09a36bb6/pillow-12.0.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:d120c38a42c234dc9a8c5de7ceaaf899cf33561956acb4941653f8bdc657aa79", size = 7130917, upload-time = "2025-10-15T18:22:43.152Z" }, - { url = "https://files.pythonhosted.org/packages/dd/ca/16c6926cc1c015845745d5c16c9358e24282f1e588237a4c36d2b30f182f/pillow-12.0.0-cp313-cp313-win32.whl", hash = "sha256:4cc6b3b2efff105c6a1656cfe59da4fdde2cda9af1c5e0b58529b24525d0a098", size = 6302391, upload-time = "2025-10-15T18:22:44.753Z" }, - { url = "https://files.pythonhosted.org/packages/6d/2a/dd43dcfd6dae9b6a49ee28a8eedb98c7d5ff2de94a5d834565164667b97b/pillow-12.0.0-cp313-cp313-win_amd64.whl", hash = "sha256:4cf7fed4b4580601c4345ceb5d4cbf5a980d030fd5ad07c4d2ec589f95f09905", size = 7007477, upload-time = "2025-10-15T18:22:46.838Z" }, - { url = "https://files.pythonhosted.org/packages/77/f0/72ea067f4b5ae5ead653053212af05ce3705807906ba3f3e8f58ddf617e6/pillow-12.0.0-cp313-cp313-win_arm64.whl", hash = "sha256:9f0b04c6b8584c2c193babcccc908b38ed29524b29dd464bc8801bf10d746a3a", size = 2435918, upload-time = "2025-10-15T18:22:48.399Z" }, - { url = "https://files.pythonhosted.org/packages/f5/5e/9046b423735c21f0487ea6cb5b10f89ea8f8dfbe32576fe052b5ba9d4e5b/pillow-12.0.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:7fa22993bac7b77b78cae22bad1e2a987ddf0d9015c63358032f84a53f23cdc3", size = 5251406, upload-time = "2025-10-15T18:22:49.905Z" }, - { url = "https://files.pythonhosted.org/packages/12/66/982ceebcdb13c97270ef7a56c3969635b4ee7cd45227fa707c94719229c5/pillow-12.0.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:f135c702ac42262573fe9714dfe99c944b4ba307af5eb507abef1667e2cbbced", size = 4653218, upload-time = "2025-10-15T18:22:51.587Z" }, - { url = "https://files.pythonhosted.org/packages/16/b3/81e625524688c31859450119bf12674619429cab3119eec0e30a7a1029cb/pillow-12.0.0-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:c85de1136429c524e55cfa4e033b4a7940ac5c8ee4d9401cc2d1bf48154bbc7b", size = 6266564, upload-time = "2025-10-15T18:22:53.215Z" }, - { url = "https://files.pythonhosted.org/packages/98/59/dfb38f2a41240d2408096e1a76c671d0a105a4a8471b1871c6902719450c/pillow-12.0.0-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:38df9b4bfd3db902c9c2bd369bcacaf9d935b2fff73709429d95cc41554f7b3d", size = 8069260, upload-time = "2025-10-15T18:22:54.933Z" }, - { url = "https://files.pythonhosted.org/packages/dc/3d/378dbea5cd1874b94c312425ca77b0f47776c78e0df2df751b820c8c1d6c/pillow-12.0.0-cp313-cp313t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:7d87ef5795da03d742bf49439f9ca4d027cde49c82c5371ba52464aee266699a", size = 6379248, upload-time = "2025-10-15T18:22:56.605Z" }, - { url = "https://files.pythonhosted.org/packages/84/b0/d525ef47d71590f1621510327acec75ae58c721dc071b17d8d652ca494d8/pillow-12.0.0-cp313-cp313t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:aff9e4d82d082ff9513bdd6acd4f5bd359f5b2c870907d2b0a9c5e10d40c88fe", size = 7066043, upload-time = "2025-10-15T18:22:58.53Z" }, - { url = "https://files.pythonhosted.org/packages/61/2c/aced60e9cf9d0cde341d54bf7932c9ffc33ddb4a1595798b3a5150c7ec4e/pillow-12.0.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:8d8ca2b210ada074d57fcee40c30446c9562e542fc46aedc19baf758a93532ee", size = 6490915, upload-time = "2025-10-15T18:23:00.582Z" }, - { url = "https://files.pythonhosted.org/packages/ef/26/69dcb9b91f4e59f8f34b2332a4a0a951b44f547c4ed39d3e4dcfcff48f89/pillow-12.0.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:99a7f72fb6249302aa62245680754862a44179b545ded638cf1fef59befb57ef", size = 7157998, upload-time = "2025-10-15T18:23:02.627Z" }, - { url = "https://files.pythonhosted.org/packages/61/2b/726235842220ca95fa441ddf55dd2382b52ab5b8d9c0596fe6b3f23dafe8/pillow-12.0.0-cp313-cp313t-win32.whl", hash = "sha256:4078242472387600b2ce8d93ade8899c12bf33fa89e55ec89fe126e9d6d5d9e9", size = 6306201, upload-time = "2025-10-15T18:23:04.709Z" }, - { url = "https://files.pythonhosted.org/packages/c0/3d/2afaf4e840b2df71344ababf2f8edd75a705ce500e5dc1e7227808312ae1/pillow-12.0.0-cp313-cp313t-win_amd64.whl", hash = "sha256:2c54c1a783d6d60595d3514f0efe9b37c8808746a66920315bfd34a938d7994b", size = 7013165, upload-time = "2025-10-15T18:23:06.46Z" }, - { url = "https://files.pythonhosted.org/packages/6f/75/3fa09aa5cf6ed04bee3fa575798ddf1ce0bace8edb47249c798077a81f7f/pillow-12.0.0-cp313-cp313t-win_arm64.whl", hash = "sha256:26d9f7d2b604cd23aba3e9faf795787456ac25634d82cd060556998e39c6fa47", size = 2437834, upload-time = "2025-10-15T18:23:08.194Z" }, -] - -[[package]] -name = "platformdirs" -version = "4.5.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/61/33/9611380c2bdb1225fdef633e2a9610622310fed35ab11dac9620972ee088/platformdirs-4.5.0.tar.gz", hash = "sha256:70ddccdd7c99fc5942e9fc25636a8b34d04c24b335100223152c2803e4063312", size = 21632, upload-time = "2025-10-08T17:44:48.791Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/73/cb/ac7874b3e5d58441674fb70742e6c374b28b0c7cb988d37d991cde47166c/platformdirs-4.5.0-py3-none-any.whl", hash = "sha256:e578a81bb873cbb89a41fcc904c7ef523cc18284b7e3b3ccf06aca1403b7ebd3", size = 18651, upload-time = "2025-10-08T17:44:47.223Z" }, -] - -[[package]] -name = "pluggy" -version = "1.6.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/f9/e2/3e91f31a7d2b083fe6ef3fa267035b518369d9511ffab804f839851d2779/pluggy-1.6.0.tar.gz", hash = "sha256:7dcc130b76258d33b90f61b658791dede3486c3e6bfb003ee5c9bfb396dd22f3", size = 69412, upload-time = "2025-05-15T12:30:07.975Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/54/20/4d324d65cc6d9205fabedc306948156824eb9f0ee1633355a8f7ec5c66bf/pluggy-1.6.0-py3-none-any.whl", hash = "sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746", size = 20538, upload-time = "2025-05-15T12:30:06.134Z" }, -] - -[[package]] -name = "prettyprinter" -version = "0.18.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "colorful" }, - { name = "pygments" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/97/41/967b5e033b5b50eebe0b8154a9e9827c517e244b9b612ec3357c40a4a33c/prettyprinter-0.18.0.tar.gz", hash = "sha256:9fe5da7ec53510881dd35d7a5c677ba45f34cfe6a8e78d1abd20652cf82139a8", size = 651884, upload-time = "2019-06-22T07:04:40.337Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/9f/d0/9effbeca8f1b8df9d33154de3477a51e55a9c46cb15612dd7791a1624397/prettyprinter-0.18.0-py2.py3-none-any.whl", hash = "sha256:358a58f276cb312e3ca29d7a7f244c91e4e0bda7848249d30e4f36d2eb58b67c", size = 48013, upload-time = "2019-06-22T07:04:43.916Z" }, -] - -[[package]] -name = "prompt-toolkit" -version = "3.0.52" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "wcwidth" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/a1/96/06e01a7b38dce6fe1db213e061a4602dd6032a8a97ef6c1a862537732421/prompt_toolkit-3.0.52.tar.gz", hash = "sha256:28cde192929c8e7321de85de1ddbe736f1375148b02f2e17edd840042b1be855", size = 434198, upload-time = "2025-08-27T15:24:02.057Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/84/03/0d3ce49e2505ae70cf43bc5bb3033955d2fc9f932163e84dc0779cc47f48/prompt_toolkit-3.0.52-py3-none-any.whl", hash = "sha256:9aac639a3bbd33284347de5ad8d68ecc044b91a762dc39b7c21095fcd6a19955", size = 391431, upload-time = "2025-08-27T15:23:59.498Z" }, -] - -[[package]] -name = "propcache" -version = "0.4.1" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/9e/da/e9fc233cf63743258bff22b3dfa7ea5baef7b5bc324af47a0ad89b8ffc6f/propcache-0.4.1.tar.gz", hash = "sha256:f48107a8c637e80362555f37ecf49abe20370e557cc4ab374f04ec4423c97c3d", size = 46442, upload-time = "2025-10-08T19:49:02.291Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/a2/0f/f17b1b2b221d5ca28b4b876e8bb046ac40466513960646bda8e1853cdfa2/propcache-0.4.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:e153e9cd40cc8945138822807139367f256f89c6810c2634a4f6902b52d3b4e2", size = 80061, upload-time = "2025-10-08T19:46:46.075Z" }, - { url = "https://files.pythonhosted.org/packages/76/47/8ccf75935f51448ba9a16a71b783eb7ef6b9ee60f5d14c7f8a8a79fbeed7/propcache-0.4.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:cd547953428f7abb73c5ad82cbb32109566204260d98e41e5dfdc682eb7f8403", size = 46037, upload-time = "2025-10-08T19:46:47.23Z" }, - { url = "https://files.pythonhosted.org/packages/0a/b6/5c9a0e42df4d00bfb4a3cbbe5cf9f54260300c88a0e9af1f47ca5ce17ac0/propcache-0.4.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f048da1b4f243fc44f205dfd320933a951b8d89e0afd4c7cacc762a8b9165207", size = 47324, upload-time = "2025-10-08T19:46:48.384Z" }, - { url = "https://files.pythonhosted.org/packages/9e/d3/6c7ee328b39a81ee877c962469f1e795f9db87f925251efeb0545e0020d0/propcache-0.4.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ec17c65562a827bba85e3872ead335f95405ea1674860d96483a02f5c698fa72", size = 225505, upload-time = "2025-10-08T19:46:50.055Z" }, - { url = "https://files.pythonhosted.org/packages/01/5d/1c53f4563490b1d06a684742cc6076ef944bc6457df6051b7d1a877c057b/propcache-0.4.1-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:405aac25c6394ef275dee4c709be43745d36674b223ba4eb7144bf4d691b7367", size = 230242, upload-time = "2025-10-08T19:46:51.815Z" }, - { url = "https://files.pythonhosted.org/packages/20/e1/ce4620633b0e2422207c3cb774a0ee61cac13abc6217763a7b9e2e3f4a12/propcache-0.4.1-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:0013cb6f8dde4b2a2f66903b8ba740bdfe378c943c4377a200551ceb27f379e4", size = 238474, upload-time = "2025-10-08T19:46:53.208Z" }, - { url = "https://files.pythonhosted.org/packages/46/4b/3aae6835b8e5f44ea6a68348ad90f78134047b503765087be2f9912140ea/propcache-0.4.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:15932ab57837c3368b024473a525e25d316d8353016e7cc0e5ba9eb343fbb1cf", size = 221575, upload-time = "2025-10-08T19:46:54.511Z" }, - { url = "https://files.pythonhosted.org/packages/6e/a5/8a5e8678bcc9d3a1a15b9a29165640d64762d424a16af543f00629c87338/propcache-0.4.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:031dce78b9dc099f4c29785d9cf5577a3faf9ebf74ecbd3c856a7b92768c3df3", size = 216736, upload-time = "2025-10-08T19:46:56.212Z" }, - { url = "https://files.pythonhosted.org/packages/f1/63/b7b215eddeac83ca1c6b934f89d09a625aa9ee4ba158338854c87210cc36/propcache-0.4.1-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:ab08df6c9a035bee56e31af99be621526bd237bea9f32def431c656b29e41778", size = 213019, upload-time = "2025-10-08T19:46:57.595Z" }, - { url = "https://files.pythonhosted.org/packages/57/74/f580099a58c8af587cac7ba19ee7cb418506342fbbe2d4a4401661cca886/propcache-0.4.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:4d7af63f9f93fe593afbf104c21b3b15868efb2c21d07d8732c0c4287e66b6a6", size = 220376, upload-time = "2025-10-08T19:46:59.067Z" }, - { url = "https://files.pythonhosted.org/packages/c4/ee/542f1313aff7eaf19c2bb758c5d0560d2683dac001a1c96d0774af799843/propcache-0.4.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:cfc27c945f422e8b5071b6e93169679e4eb5bf73bbcbf1ba3ae3a83d2f78ebd9", size = 226988, upload-time = "2025-10-08T19:47:00.544Z" }, - { url = "https://files.pythonhosted.org/packages/8f/18/9c6b015dd9c6930f6ce2229e1f02fb35298b847f2087ea2b436a5bfa7287/propcache-0.4.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:35c3277624a080cc6ec6f847cbbbb5b49affa3598c4535a0a4682a697aaa5c75", size = 215615, upload-time = "2025-10-08T19:47:01.968Z" }, - { url = "https://files.pythonhosted.org/packages/80/9e/e7b85720b98c45a45e1fca6a177024934dc9bc5f4d5dd04207f216fc33ed/propcache-0.4.1-cp312-cp312-win32.whl", hash = "sha256:671538c2262dadb5ba6395e26c1731e1d52534bfe9ae56d0b5573ce539266aa8", size = 38066, upload-time = "2025-10-08T19:47:03.503Z" }, - { url = "https://files.pythonhosted.org/packages/54/09/d19cff2a5aaac632ec8fc03737b223597b1e347416934c1b3a7df079784c/propcache-0.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:cb2d222e72399fcf5890d1d5cc1060857b9b236adff2792ff48ca2dfd46c81db", size = 41655, upload-time = "2025-10-08T19:47:04.973Z" }, - { url = "https://files.pythonhosted.org/packages/68/ab/6b5c191bb5de08036a8c697b265d4ca76148efb10fa162f14af14fb5f076/propcache-0.4.1-cp312-cp312-win_arm64.whl", hash = "sha256:204483131fb222bdaaeeea9f9e6c6ed0cac32731f75dfc1d4a567fc1926477c1", size = 37789, upload-time = "2025-10-08T19:47:06.077Z" }, - { url = "https://files.pythonhosted.org/packages/bf/df/6d9c1b6ac12b003837dde8a10231a7344512186e87b36e855bef32241942/propcache-0.4.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:43eedf29202c08550aac1d14e0ee619b0430aaef78f85864c1a892294fbc28cf", size = 77750, upload-time = "2025-10-08T19:47:07.648Z" }, - { url = "https://files.pythonhosted.org/packages/8b/e8/677a0025e8a2acf07d3418a2e7ba529c9c33caf09d3c1f25513023c1db56/propcache-0.4.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:d62cdfcfd89ccb8de04e0eda998535c406bf5e060ffd56be6c586cbcc05b3311", size = 44780, upload-time = "2025-10-08T19:47:08.851Z" }, - { url = "https://files.pythonhosted.org/packages/89/a4/92380f7ca60f99ebae761936bc48a72a639e8a47b29050615eef757cb2a7/propcache-0.4.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:cae65ad55793da34db5f54e4029b89d3b9b9490d8abe1b4c7ab5d4b8ec7ebf74", size = 46308, upload-time = "2025-10-08T19:47:09.982Z" }, - { url = "https://files.pythonhosted.org/packages/2d/48/c5ac64dee5262044348d1d78a5f85dd1a57464a60d30daee946699963eb3/propcache-0.4.1-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:333ddb9031d2704a301ee3e506dc46b1fe5f294ec198ed6435ad5b6a085facfe", size = 208182, upload-time = "2025-10-08T19:47:11.319Z" }, - { url = "https://files.pythonhosted.org/packages/c6/0c/cd762dd011a9287389a6a3eb43aa30207bde253610cca06824aeabfe9653/propcache-0.4.1-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:fd0858c20f078a32cf55f7e81473d96dcf3b93fd2ccdb3d40fdf54b8573df3af", size = 211215, upload-time = "2025-10-08T19:47:13.146Z" }, - { url = "https://files.pythonhosted.org/packages/30/3e/49861e90233ba36890ae0ca4c660e95df565b2cd15d4a68556ab5865974e/propcache-0.4.1-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:678ae89ebc632c5c204c794f8dab2837c5f159aeb59e6ed0539500400577298c", size = 218112, upload-time = "2025-10-08T19:47:14.913Z" }, - { url = "https://files.pythonhosted.org/packages/f1/8b/544bc867e24e1bd48f3118cecd3b05c694e160a168478fa28770f22fd094/propcache-0.4.1-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d472aeb4fbf9865e0c6d622d7f4d54a4e101a89715d8904282bb5f9a2f476c3f", size = 204442, upload-time = "2025-10-08T19:47:16.277Z" }, - { url = "https://files.pythonhosted.org/packages/50/a6/4282772fd016a76d3e5c0df58380a5ea64900afd836cec2c2f662d1b9bb3/propcache-0.4.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:4d3df5fa7e36b3225954fba85589da77a0fe6a53e3976de39caf04a0db4c36f1", size = 199398, upload-time = "2025-10-08T19:47:17.962Z" }, - { url = "https://files.pythonhosted.org/packages/3e/ec/d8a7cd406ee1ddb705db2139f8a10a8a427100347bd698e7014351c7af09/propcache-0.4.1-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:ee17f18d2498f2673e432faaa71698032b0127ebf23ae5974eeaf806c279df24", size = 196920, upload-time = "2025-10-08T19:47:19.355Z" }, - { url = "https://files.pythonhosted.org/packages/f6/6c/f38ab64af3764f431e359f8baf9e0a21013e24329e8b85d2da32e8ed07ca/propcache-0.4.1-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:580e97762b950f993ae618e167e7be9256b8353c2dcd8b99ec100eb50f5286aa", size = 203748, upload-time = "2025-10-08T19:47:21.338Z" }, - { url = "https://files.pythonhosted.org/packages/d6/e3/fa846bd70f6534d647886621388f0a265254d30e3ce47e5c8e6e27dbf153/propcache-0.4.1-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:501d20b891688eb8e7aa903021f0b72d5a55db40ffaab27edefd1027caaafa61", size = 205877, upload-time = "2025-10-08T19:47:23.059Z" }, - { url = "https://files.pythonhosted.org/packages/e2/39/8163fc6f3133fea7b5f2827e8eba2029a0277ab2c5beee6c1db7b10fc23d/propcache-0.4.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:9a0bd56e5b100aef69bd8562b74b46254e7c8812918d3baa700c8a8009b0af66", size = 199437, upload-time = "2025-10-08T19:47:24.445Z" }, - { url = "https://files.pythonhosted.org/packages/93/89/caa9089970ca49c7c01662bd0eeedfe85494e863e8043565aeb6472ce8fe/propcache-0.4.1-cp313-cp313-win32.whl", hash = "sha256:bcc9aaa5d80322bc2fb24bb7accb4a30f81e90ab8d6ba187aec0744bc302ad81", size = 37586, upload-time = "2025-10-08T19:47:25.736Z" }, - { url = "https://files.pythonhosted.org/packages/f5/ab/f76ec3c3627c883215b5c8080debb4394ef5a7a29be811f786415fc1e6fd/propcache-0.4.1-cp313-cp313-win_amd64.whl", hash = "sha256:381914df18634f5494334d201e98245c0596067504b9372d8cf93f4bb23e025e", size = 40790, upload-time = "2025-10-08T19:47:26.847Z" }, - { url = "https://files.pythonhosted.org/packages/59/1b/e71ae98235f8e2ba5004d8cb19765a74877abf189bc53fc0c80d799e56c3/propcache-0.4.1-cp313-cp313-win_arm64.whl", hash = "sha256:8873eb4460fd55333ea49b7d189749ecf6e55bf85080f11b1c4530ed3034cba1", size = 37158, upload-time = "2025-10-08T19:47:27.961Z" }, - { url = "https://files.pythonhosted.org/packages/83/ce/a31bbdfc24ee0dcbba458c8175ed26089cf109a55bbe7b7640ed2470cfe9/propcache-0.4.1-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:92d1935ee1f8d7442da9c0c4fa7ac20d07e94064184811b685f5c4fada64553b", size = 81451, upload-time = "2025-10-08T19:47:29.445Z" }, - { url = "https://files.pythonhosted.org/packages/25/9c/442a45a470a68456e710d96cacd3573ef26a1d0a60067e6a7d5e655621ed/propcache-0.4.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:473c61b39e1460d386479b9b2f337da492042447c9b685f28be4f74d3529e566", size = 46374, upload-time = "2025-10-08T19:47:30.579Z" }, - { url = "https://files.pythonhosted.org/packages/f4/bf/b1d5e21dbc3b2e889ea4327044fb16312a736d97640fb8b6aa3f9c7b3b65/propcache-0.4.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:c0ef0aaafc66fbd87842a3fe3902fd889825646bc21149eafe47be6072725835", size = 48396, upload-time = "2025-10-08T19:47:31.79Z" }, - { url = "https://files.pythonhosted.org/packages/f4/04/5b4c54a103d480e978d3c8a76073502b18db0c4bc17ab91b3cb5092ad949/propcache-0.4.1-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f95393b4d66bfae908c3ca8d169d5f79cd65636ae15b5e7a4f6e67af675adb0e", size = 275950, upload-time = "2025-10-08T19:47:33.481Z" }, - { url = "https://files.pythonhosted.org/packages/b4/c1/86f846827fb969c4b78b0af79bba1d1ea2156492e1b83dea8b8a6ae27395/propcache-0.4.1-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:c07fda85708bc48578467e85099645167a955ba093be0a2dcba962195676e859", size = 273856, upload-time = "2025-10-08T19:47:34.906Z" }, - { url = "https://files.pythonhosted.org/packages/36/1d/fc272a63c8d3bbad6878c336c7a7dea15e8f2d23a544bda43205dfa83ada/propcache-0.4.1-cp313-cp313t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:af223b406d6d000830c6f65f1e6431783fc3f713ba3e6cc8c024d5ee96170a4b", size = 280420, upload-time = "2025-10-08T19:47:36.338Z" }, - { url = "https://files.pythonhosted.org/packages/07/0c/01f2219d39f7e53d52e5173bcb09c976609ba30209912a0680adfb8c593a/propcache-0.4.1-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a78372c932c90ee474559c5ddfffd718238e8673c340dc21fe45c5b8b54559a0", size = 263254, upload-time = "2025-10-08T19:47:37.692Z" }, - { url = "https://files.pythonhosted.org/packages/2d/18/cd28081658ce597898f0c4d174d4d0f3c5b6d4dc27ffafeef835c95eb359/propcache-0.4.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:564d9f0d4d9509e1a870c920a89b2fec951b44bf5ba7d537a9e7c1ccec2c18af", size = 261205, upload-time = "2025-10-08T19:47:39.659Z" }, - { url = "https://files.pythonhosted.org/packages/7a/71/1f9e22eb8b8316701c2a19fa1f388c8a3185082607da8e406a803c9b954e/propcache-0.4.1-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:17612831fda0138059cc5546f4d12a2aacfb9e47068c06af35c400ba58ba7393", size = 247873, upload-time = "2025-10-08T19:47:41.084Z" }, - { url = "https://files.pythonhosted.org/packages/4a/65/3d4b61f36af2b4eddba9def857959f1016a51066b4f1ce348e0cf7881f58/propcache-0.4.1-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:41a89040cb10bd345b3c1a873b2bf36413d48da1def52f268a055f7398514874", size = 262739, upload-time = "2025-10-08T19:47:42.51Z" }, - { url = "https://files.pythonhosted.org/packages/2a/42/26746ab087faa77c1c68079b228810436ccd9a5ce9ac85e2b7307195fd06/propcache-0.4.1-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:e35b88984e7fa64aacecea39236cee32dd9bd8c55f57ba8a75cf2399553f9bd7", size = 263514, upload-time = "2025-10-08T19:47:43.927Z" }, - { url = "https://files.pythonhosted.org/packages/94/13/630690fe201f5502d2403dd3cfd451ed8858fe3c738ee88d095ad2ff407b/propcache-0.4.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:6f8b465489f927b0df505cbe26ffbeed4d6d8a2bbc61ce90eb074ff129ef0ab1", size = 257781, upload-time = "2025-10-08T19:47:45.448Z" }, - { url = "https://files.pythonhosted.org/packages/92/f7/1d4ec5841505f423469efbfc381d64b7b467438cd5a4bbcbb063f3b73d27/propcache-0.4.1-cp313-cp313t-win32.whl", hash = "sha256:2ad890caa1d928c7c2965b48f3a3815c853180831d0e5503d35cf00c472f4717", size = 41396, upload-time = "2025-10-08T19:47:47.202Z" }, - { url = "https://files.pythonhosted.org/packages/48/f0/615c30622316496d2cbbc29f5985f7777d3ada70f23370608c1d3e081c1f/propcache-0.4.1-cp313-cp313t-win_amd64.whl", hash = "sha256:f7ee0e597f495cf415bcbd3da3caa3bd7e816b74d0d52b8145954c5e6fd3ff37", size = 44897, upload-time = "2025-10-08T19:47:48.336Z" }, - { url = "https://files.pythonhosted.org/packages/fd/ca/6002e46eccbe0e33dcd4069ef32f7f1c9e243736e07adca37ae8c4830ec3/propcache-0.4.1-cp313-cp313t-win_arm64.whl", hash = "sha256:929d7cbe1f01bb7baffb33dc14eb5691c95831450a26354cd210a8155170c93a", size = 39789, upload-time = "2025-10-08T19:47:49.876Z" }, - { url = "https://files.pythonhosted.org/packages/5b/5a/bc7b4a4ef808fa59a816c17b20c4bef6884daebbdf627ff2a161da67da19/propcache-0.4.1-py3-none-any.whl", hash = "sha256:af2a6052aeb6cf17d3e46ee169099044fd8224cbaf75c76a2ef596e8163e2237", size = 13305, upload-time = "2025-10-08T19:49:00.792Z" }, -] - -[[package]] -name = "psutil" -version = "7.1.3" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/e1/88/bdd0a41e5857d5d703287598cbf08dad90aed56774ea52ae071bae9071b6/psutil-7.1.3.tar.gz", hash = "sha256:6c86281738d77335af7aec228328e944b30930899ea760ecf33a4dba66be5e74", size = 489059, upload-time = "2025-11-02T12:25:54.619Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/bd/93/0c49e776b8734fef56ec9c5c57f923922f2cf0497d62e0f419465f28f3d0/psutil-7.1.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:0005da714eee687b4b8decd3d6cc7c6db36215c9e74e5ad2264b90c3df7d92dc", size = 239751, upload-time = "2025-11-02T12:25:58.161Z" }, - { url = "https://files.pythonhosted.org/packages/6f/8d/b31e39c769e70780f007969815195a55c81a63efebdd4dbe9e7a113adb2f/psutil-7.1.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:19644c85dcb987e35eeeaefdc3915d059dac7bd1167cdcdbf27e0ce2df0c08c0", size = 240368, upload-time = "2025-11-02T12:26:00.491Z" }, - { url = "https://files.pythonhosted.org/packages/62/61/23fd4acc3c9eebbf6b6c78bcd89e5d020cfde4acf0a9233e9d4e3fa698b4/psutil-7.1.3-cp313-cp313t-manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:95ef04cf2e5ba0ab9eaafc4a11eaae91b44f4ef5541acd2ee91d9108d00d59a7", size = 287134, upload-time = "2025-11-02T12:26:02.613Z" }, - { url = "https://files.pythonhosted.org/packages/30/1c/f921a009ea9ceb51aa355cb0cc118f68d354db36eae18174bab63affb3e6/psutil-7.1.3-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1068c303be3a72f8e18e412c5b2a8f6d31750fb152f9cb106b54090296c9d251", size = 289904, upload-time = "2025-11-02T12:26:05.207Z" }, - { url = "https://files.pythonhosted.org/packages/a6/82/62d68066e13e46a5116df187d319d1724b3f437ddd0f958756fc052677f4/psutil-7.1.3-cp313-cp313t-win_amd64.whl", hash = "sha256:18349c5c24b06ac5612c0428ec2a0331c26443d259e2a0144a9b24b4395b58fa", size = 249642, upload-time = "2025-11-02T12:26:07.447Z" }, - { url = "https://files.pythonhosted.org/packages/df/ad/c1cd5fe965c14a0392112f68362cfceb5230819dbb5b1888950d18a11d9f/psutil-7.1.3-cp313-cp313t-win_arm64.whl", hash = "sha256:c525ffa774fe4496282fb0b1187725793de3e7c6b29e41562733cae9ada151ee", size = 245518, upload-time = "2025-11-02T12:26:09.719Z" }, - { url = "https://files.pythonhosted.org/packages/ef/94/46b9154a800253e7ecff5aaacdf8ebf43db99de4a2dfa18575b02548654e/psutil-7.1.3-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:2bdbcd0e58ca14996a42adf3621a6244f1bb2e2e528886959c72cf1e326677ab", size = 238359, upload-time = "2025-11-02T12:26:25.284Z" }, - { url = "https://files.pythonhosted.org/packages/68/3a/9f93cff5c025029a36d9a92fef47220ab4692ee7f2be0fba9f92813d0cb8/psutil-7.1.3-cp36-abi3-macosx_11_0_arm64.whl", hash = "sha256:bc31fa00f1fbc3c3802141eede66f3a2d51d89716a194bf2cd6fc68310a19880", size = 239171, upload-time = "2025-11-02T12:26:27.23Z" }, - { url = "https://files.pythonhosted.org/packages/ce/b1/5f49af514f76431ba4eea935b8ad3725cdeb397e9245ab919dbc1d1dc20f/psutil-7.1.3-cp36-abi3-manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:3bb428f9f05c1225a558f53e30ccbad9930b11c3fc206836242de1091d3e7dd3", size = 263261, upload-time = "2025-11-02T12:26:29.48Z" }, - { url = "https://files.pythonhosted.org/packages/e0/95/992c8816a74016eb095e73585d747e0a8ea21a061ed3689474fabb29a395/psutil-7.1.3-cp36-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:56d974e02ca2c8eb4812c3f76c30e28836fffc311d55d979f1465c1feeb2b68b", size = 264635, upload-time = "2025-11-02T12:26:31.74Z" }, - { url = "https://files.pythonhosted.org/packages/55/4c/c3ed1a622b6ae2fd3c945a366e64eb35247a31e4db16cf5095e269e8eb3c/psutil-7.1.3-cp37-abi3-win_amd64.whl", hash = "sha256:f39c2c19fe824b47484b96f9692932248a54c43799a84282cfe58d05a6449efd", size = 247633, upload-time = "2025-11-02T12:26:33.887Z" }, - { url = "https://files.pythonhosted.org/packages/c9/ad/33b2ccec09bf96c2b2ef3f9a6f66baac8253d7565d8839e024a6b905d45d/psutil-7.1.3-cp37-abi3-win_arm64.whl", hash = "sha256:bd0d69cee829226a761e92f28140bec9a5ee9d5b4fb4b0cc589068dbfff559b1", size = 244608, upload-time = "2025-11-02T12:26:36.136Z" }, -] - -[[package]] -name = "ptyprocess" -version = "0.7.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/20/e5/16ff212c1e452235a90aeb09066144d0c5a6a8c0834397e03f5224495c4e/ptyprocess-0.7.0.tar.gz", hash = "sha256:5c5d0a3b48ceee0b48485e0c26037c0acd7d29765ca3fbb5cb3831d347423220", size = 70762, upload-time = "2020-12-28T15:15:30.155Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/22/a6/858897256d0deac81a172289110f31629fc4cee19b6f01283303e18c8db3/ptyprocess-0.7.0-py2.py3-none-any.whl", hash = "sha256:4b41f3967fce3af57cc7e94b888626c18bf37a083e3651ca8feeb66d492fef35", size = 13993, upload-time = "2020-12-28T15:15:28.35Z" }, -] - -[[package]] -name = "pure-eval" -version = "0.2.3" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/cd/05/0a34433a064256a578f1783a10da6df098ceaa4a57bbeaa96a6c0352786b/pure_eval-0.2.3.tar.gz", hash = "sha256:5f4e983f40564c576c7c8635ae88db5956bb2229d7e9237d03b3c0b0190eaf42", size = 19752, upload-time = "2024-07-21T12:58:21.801Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/8e/37/efad0257dc6e593a18957422533ff0f87ede7c9c6ea010a2177d738fb82f/pure_eval-0.2.3-py3-none-any.whl", hash = "sha256:1db8e35b67b3d218d818ae653e27f06c3aa420901fa7b081ca98cbedc874e0d0", size = 11842, upload-time = "2024-07-21T12:58:20.04Z" }, -] - -[[package]] -name = "py-cpuinfo" -version = "9.0.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/37/a8/d832f7293ebb21690860d2e01d8115e5ff6f2ae8bbdc953f0eb0fa4bd2c7/py-cpuinfo-9.0.0.tar.gz", hash = "sha256:3cdbbf3fac90dc6f118bfd64384f309edeadd902d7c8fb17f02ffa1fc3f49690", size = 104716, upload-time = "2022-10-25T20:38:06.303Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/e0/a9/023730ba63db1e494a271cb018dcd361bd2c917ba7004c3e49d5daf795a2/py_cpuinfo-9.0.0-py3-none-any.whl", hash = "sha256:859625bc251f64e21f077d099d4162689c762b5d6a4c3c97553d56241c9674d5", size = 22335, upload-time = "2022-10-25T20:38:27.636Z" }, -] - -[[package]] -name = "pybtex" -version = "0.25.1" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "latexcodec" }, - { name = "pyyaml" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/5f/bc/c2be05ca72f8c103670e983df8be26d1e288bc6556f487fa8cccaa27779f/pybtex-0.25.1.tar.gz", hash = "sha256:9eaf90267c7e83e225af89fea65c370afbf65f458220d3946a9e3049e1eca491", size = 406157, upload-time = "2025-06-26T13:27:41.903Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/25/68/ceb5d6679baa326261f5d3e5113d9cfed6efef2810afd9f18bffb8ed312b/pybtex-0.25.1-py2.py3-none-any.whl", hash = "sha256:9053b0d619409a0a83f38abad5d9921de5f7b3ede00742beafcd9f10ad0d8c5c", size = 127437, upload-time = "2025-06-26T13:27:43.585Z" }, -] - -[[package]] -name = "pybtex-docutils" -version = "1.0.3" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "docutils" }, - { name = "pybtex" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/7e/84/796ea94d26188a853660f81bded39f8de4cfe595130aef0dea1088705a11/pybtex-docutils-1.0.3.tar.gz", hash = "sha256:3a7ebdf92b593e00e8c1c538aa9a20bca5d92d84231124715acc964d51d93c6b", size = 18348, upload-time = "2023-08-22T18:47:54.833Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/11/b1/ce1f4596211efb5410e178a803f08e59b20bedb66837dcf41e21c54f9ec1/pybtex_docutils-1.0.3-py3-none-any.whl", hash = "sha256:8fd290d2ae48e32fcb54d86b0efb8d573198653c7e2447d5bec5847095f430b9", size = 6385, upload-time = "2023-08-22T06:43:20.513Z" }, -] - -[[package]] -name = "pycodestyle" -version = "2.14.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/11/e0/abfd2a0d2efe47670df87f3e3a0e2edda42f055053c85361f19c0e2c1ca8/pycodestyle-2.14.0.tar.gz", hash = "sha256:c4b5b517d278089ff9d0abdec919cd97262a3367449ea1c8b49b91529167b783", size = 39472, upload-time = "2025-06-20T18:49:48.75Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/d7/27/a58ddaf8c588a3ef080db9d0b7e0b97215cee3a45df74f3a94dbbf5c893a/pycodestyle-2.14.0-py2.py3-none-any.whl", hash = "sha256:dd6bf7cb4ee77f8e016f9c8e74a35ddd9f67e1d5fd4184d86c3b98e07099f42d", size = 31594, upload-time = "2025-06-20T18:49:47.491Z" }, -] - -[[package]] -name = "pycparser" -version = "2.23" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/fe/cf/d2d3b9f5699fb1e4615c8e32ff220203e43b248e1dfcc6736ad9057731ca/pycparser-2.23.tar.gz", hash = "sha256:78816d4f24add8f10a06d6f05b4d424ad9e96cfebf68a4ddc99c65c0720d00c2", size = 173734, upload-time = "2025-09-09T13:23:47.91Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/a0/e3/59cd50310fc9b59512193629e1984c1f95e5c8ae6e5d8c69532ccc65a7fe/pycparser-2.23-py3-none-any.whl", hash = "sha256:e5c6e8d3fbad53479cab09ac03729e0a9faf2bee3db8208a550daf5af81a5934", size = 118140, upload-time = "2025-09-09T13:23:46.651Z" }, -] - -[[package]] -name = "pydantic" -version = "2.12.5" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "annotated-types" }, - { name = "pydantic-core" }, - { name = "typing-extensions" }, - { name = "typing-inspection" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/69/44/36f1a6e523abc58ae5f928898e4aca2e0ea509b5aa6f6f392a5d882be928/pydantic-2.12.5.tar.gz", hash = "sha256:4d351024c75c0f085a9febbb665ce8c0c6ec5d30e903bdb6394b7ede26aebb49", size = 821591, upload-time = "2025-11-26T15:11:46.471Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/5a/87/b70ad306ebb6f9b585f114d0ac2137d792b48be34d732d60e597c2f8465a/pydantic-2.12.5-py3-none-any.whl", hash = "sha256:e561593fccf61e8a20fc46dfc2dfe075b8be7d0188df33f221ad1f0139180f9d", size = 463580, upload-time = "2025-11-26T15:11:44.605Z" }, -] - -[[package]] -name = "pydantic-core" -version = "2.41.5" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "typing-extensions" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/71/70/23b021c950c2addd24ec408e9ab05d59b035b39d97cdc1130e1bce647bb6/pydantic_core-2.41.5.tar.gz", hash = "sha256:08daa51ea16ad373ffd5e7606252cc32f07bc72b28284b6bc9c6df804816476e", size = 460952, upload-time = "2025-11-04T13:43:49.098Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/5f/5d/5f6c63eebb5afee93bcaae4ce9a898f3373ca23df3ccaef086d0233a35a7/pydantic_core-2.41.5-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:f41a7489d32336dbf2199c8c0a215390a751c5b014c2c1c5366e817202e9cdf7", size = 2110990, upload-time = "2025-11-04T13:39:58.079Z" }, - { url = "https://files.pythonhosted.org/packages/aa/32/9c2e8ccb57c01111e0fd091f236c7b371c1bccea0fa85247ac55b1e2b6b6/pydantic_core-2.41.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:070259a8818988b9a84a449a2a7337c7f430a22acc0859c6b110aa7212a6d9c0", size = 1896003, upload-time = "2025-11-04T13:39:59.956Z" }, - { url = "https://files.pythonhosted.org/packages/68/b8/a01b53cb0e59139fbc9e4fda3e9724ede8de279097179be4ff31f1abb65a/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e96cea19e34778f8d59fe40775a7a574d95816eb150850a85a7a4c8f4b94ac69", size = 1919200, upload-time = "2025-11-04T13:40:02.241Z" }, - { url = "https://files.pythonhosted.org/packages/38/de/8c36b5198a29bdaade07b5985e80a233a5ac27137846f3bc2d3b40a47360/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ed2e99c456e3fadd05c991f8f437ef902e00eedf34320ba2b0842bd1c3ca3a75", size = 2052578, upload-time = "2025-11-04T13:40:04.401Z" }, - { url = "https://files.pythonhosted.org/packages/00/b5/0e8e4b5b081eac6cb3dbb7e60a65907549a1ce035a724368c330112adfdd/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:65840751b72fbfd82c3c640cff9284545342a4f1eb1586ad0636955b261b0b05", size = 2208504, upload-time = "2025-11-04T13:40:06.072Z" }, - { url = "https://files.pythonhosted.org/packages/77/56/87a61aad59c7c5b9dc8caad5a41a5545cba3810c3e828708b3d7404f6cef/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e536c98a7626a98feb2d3eaf75944ef6f3dbee447e1f841eae16f2f0a72d8ddc", size = 2335816, upload-time = "2025-11-04T13:40:07.835Z" }, - { url = "https://files.pythonhosted.org/packages/0d/76/941cc9f73529988688a665a5c0ecff1112b3d95ab48f81db5f7606f522d3/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eceb81a8d74f9267ef4081e246ffd6d129da5d87e37a77c9bde550cb04870c1c", size = 2075366, upload-time = "2025-11-04T13:40:09.804Z" }, - { url = "https://files.pythonhosted.org/packages/d3/43/ebef01f69baa07a482844faaa0a591bad1ef129253ffd0cdaa9d8a7f72d3/pydantic_core-2.41.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d38548150c39b74aeeb0ce8ee1d8e82696f4a4e16ddc6de7b1d8823f7de4b9b5", size = 2171698, upload-time = "2025-11-04T13:40:12.004Z" }, - { url = "https://files.pythonhosted.org/packages/b1/87/41f3202e4193e3bacfc2c065fab7706ebe81af46a83d3e27605029c1f5a6/pydantic_core-2.41.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:c23e27686783f60290e36827f9c626e63154b82b116d7fe9adba1fda36da706c", size = 2132603, upload-time = "2025-11-04T13:40:13.868Z" }, - { url = "https://files.pythonhosted.org/packages/49/7d/4c00df99cb12070b6bccdef4a195255e6020a550d572768d92cc54dba91a/pydantic_core-2.41.5-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:482c982f814460eabe1d3bb0adfdc583387bd4691ef00b90575ca0d2b6fe2294", size = 2329591, upload-time = "2025-11-04T13:40:15.672Z" }, - { url = "https://files.pythonhosted.org/packages/cc/6a/ebf4b1d65d458f3cda6a7335d141305dfa19bdc61140a884d165a8a1bbc7/pydantic_core-2.41.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:bfea2a5f0b4d8d43adf9d7b8bf019fb46fdd10a2e5cde477fbcb9d1fa08c68e1", size = 2319068, upload-time = "2025-11-04T13:40:17.532Z" }, - { url = "https://files.pythonhosted.org/packages/49/3b/774f2b5cd4192d5ab75870ce4381fd89cf218af999515baf07e7206753f0/pydantic_core-2.41.5-cp312-cp312-win32.whl", hash = "sha256:b74557b16e390ec12dca509bce9264c3bbd128f8a2c376eaa68003d7f327276d", size = 1985908, upload-time = "2025-11-04T13:40:19.309Z" }, - { url = "https://files.pythonhosted.org/packages/86/45/00173a033c801cacf67c190fef088789394feaf88a98a7035b0e40d53dc9/pydantic_core-2.41.5-cp312-cp312-win_amd64.whl", hash = "sha256:1962293292865bca8e54702b08a4f26da73adc83dd1fcf26fbc875b35d81c815", size = 2020145, upload-time = "2025-11-04T13:40:21.548Z" }, - { url = "https://files.pythonhosted.org/packages/f9/22/91fbc821fa6d261b376a3f73809f907cec5ca6025642c463d3488aad22fb/pydantic_core-2.41.5-cp312-cp312-win_arm64.whl", hash = "sha256:1746d4a3d9a794cacae06a5eaaccb4b8643a131d45fbc9af23e353dc0a5ba5c3", size = 1976179, upload-time = "2025-11-04T13:40:23.393Z" }, - { url = "https://files.pythonhosted.org/packages/87/06/8806241ff1f70d9939f9af039c6c35f2360cf16e93c2ca76f184e76b1564/pydantic_core-2.41.5-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:941103c9be18ac8daf7b7adca8228f8ed6bb7a1849020f643b3a14d15b1924d9", size = 2120403, upload-time = "2025-11-04T13:40:25.248Z" }, - { url = "https://files.pythonhosted.org/packages/94/02/abfa0e0bda67faa65fef1c84971c7e45928e108fe24333c81f3bfe35d5f5/pydantic_core-2.41.5-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:112e305c3314f40c93998e567879e887a3160bb8689ef3d2c04b6cc62c33ac34", size = 1896206, upload-time = "2025-11-04T13:40:27.099Z" }, - { url = "https://files.pythonhosted.org/packages/15/df/a4c740c0943e93e6500f9eb23f4ca7ec9bf71b19e608ae5b579678c8d02f/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0cbaad15cb0c90aa221d43c00e77bb33c93e8d36e0bf74760cd00e732d10a6a0", size = 1919307, upload-time = "2025-11-04T13:40:29.806Z" }, - { url = "https://files.pythonhosted.org/packages/9a/e3/6324802931ae1d123528988e0e86587c2072ac2e5394b4bc2bc34b61ff6e/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:03ca43e12fab6023fc79d28ca6b39b05f794ad08ec2feccc59a339b02f2b3d33", size = 2063258, upload-time = "2025-11-04T13:40:33.544Z" }, - { url = "https://files.pythonhosted.org/packages/c9/d4/2230d7151d4957dd79c3044ea26346c148c98fbf0ee6ebd41056f2d62ab5/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dc799088c08fa04e43144b164feb0c13f9a0bc40503f8df3e9fde58a3c0c101e", size = 2214917, upload-time = "2025-11-04T13:40:35.479Z" }, - { url = "https://files.pythonhosted.org/packages/e6/9f/eaac5df17a3672fef0081b6c1bb0b82b33ee89aa5cec0d7b05f52fd4a1fa/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:97aeba56665b4c3235a0e52b2c2f5ae9cd071b8a8310ad27bddb3f7fb30e9aa2", size = 2332186, upload-time = "2025-11-04T13:40:37.436Z" }, - { url = "https://files.pythonhosted.org/packages/cf/4e/35a80cae583a37cf15604b44240e45c05e04e86f9cfd766623149297e971/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:406bf18d345822d6c21366031003612b9c77b3e29ffdb0f612367352aab7d586", size = 2073164, upload-time = "2025-11-04T13:40:40.289Z" }, - { url = "https://files.pythonhosted.org/packages/bf/e3/f6e262673c6140dd3305d144d032f7bd5f7497d3871c1428521f19f9efa2/pydantic_core-2.41.5-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b93590ae81f7010dbe380cdeab6f515902ebcbefe0b9327cc4804d74e93ae69d", size = 2179146, upload-time = "2025-11-04T13:40:42.809Z" }, - { url = "https://files.pythonhosted.org/packages/75/c7/20bd7fc05f0c6ea2056a4565c6f36f8968c0924f19b7d97bbfea55780e73/pydantic_core-2.41.5-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:01a3d0ab748ee531f4ea6c3e48ad9dac84ddba4b0d82291f87248f2f9de8d740", size = 2137788, upload-time = "2025-11-04T13:40:44.752Z" }, - { url = "https://files.pythonhosted.org/packages/3a/8d/34318ef985c45196e004bc46c6eab2eda437e744c124ef0dbe1ff2c9d06b/pydantic_core-2.41.5-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:6561e94ba9dacc9c61bce40e2d6bdc3bfaa0259d3ff36ace3b1e6901936d2e3e", size = 2340133, upload-time = "2025-11-04T13:40:46.66Z" }, - { url = "https://files.pythonhosted.org/packages/9c/59/013626bf8c78a5a5d9350d12e7697d3d4de951a75565496abd40ccd46bee/pydantic_core-2.41.5-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:915c3d10f81bec3a74fbd4faebe8391013ba61e5a1a8d48c4455b923bdda7858", size = 2324852, upload-time = "2025-11-04T13:40:48.575Z" }, - { url = "https://files.pythonhosted.org/packages/1a/d9/c248c103856f807ef70c18a4f986693a46a8ffe1602e5d361485da502d20/pydantic_core-2.41.5-cp313-cp313-win32.whl", hash = "sha256:650ae77860b45cfa6e2cdafc42618ceafab3a2d9a3811fcfbd3bbf8ac3c40d36", size = 1994679, upload-time = "2025-11-04T13:40:50.619Z" }, - { url = "https://files.pythonhosted.org/packages/9e/8b/341991b158ddab181cff136acd2552c9f35bd30380422a639c0671e99a91/pydantic_core-2.41.5-cp313-cp313-win_amd64.whl", hash = "sha256:79ec52ec461e99e13791ec6508c722742ad745571f234ea6255bed38c6480f11", size = 2019766, upload-time = "2025-11-04T13:40:52.631Z" }, - { url = "https://files.pythonhosted.org/packages/73/7d/f2f9db34af103bea3e09735bb40b021788a5e834c81eedb541991badf8f5/pydantic_core-2.41.5-cp313-cp313-win_arm64.whl", hash = "sha256:3f84d5c1b4ab906093bdc1ff10484838aca54ef08de4afa9de0f5f14d69639cd", size = 1981005, upload-time = "2025-11-04T13:40:54.734Z" }, - { url = "https://files.pythonhosted.org/packages/09/32/59b0c7e63e277fa7911c2fc70ccfb45ce4b98991e7ef37110663437005af/pydantic_core-2.41.5-graalpy312-graalpy250_312_native-macosx_10_12_x86_64.whl", hash = "sha256:7da7087d756b19037bc2c06edc6c170eeef3c3bafcb8f532ff17d64dc427adfd", size = 2110495, upload-time = "2025-11-04T13:42:49.689Z" }, - { url = "https://files.pythonhosted.org/packages/aa/81/05e400037eaf55ad400bcd318c05bb345b57e708887f07ddb2d20e3f0e98/pydantic_core-2.41.5-graalpy312-graalpy250_312_native-macosx_11_0_arm64.whl", hash = "sha256:aabf5777b5c8ca26f7824cb4a120a740c9588ed58df9b2d196ce92fba42ff8dc", size = 1915388, upload-time = "2025-11-04T13:42:52.215Z" }, - { url = "https://files.pythonhosted.org/packages/6e/0d/e3549b2399f71d56476b77dbf3cf8937cec5cd70536bdc0e374a421d0599/pydantic_core-2.41.5-graalpy312-graalpy250_312_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c007fe8a43d43b3969e8469004e9845944f1a80e6acd47c150856bb87f230c56", size = 1942879, upload-time = "2025-11-04T13:42:56.483Z" }, - { url = "https://files.pythonhosted.org/packages/f7/07/34573da085946b6a313d7c42f82f16e8920bfd730665de2d11c0c37a74b5/pydantic_core-2.41.5-graalpy312-graalpy250_312_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:76d0819de158cd855d1cbb8fcafdf6f5cf1eb8e470abe056d5d161106e38062b", size = 2139017, upload-time = "2025-11-04T13:42:59.471Z" }, -] - -[[package]] -name = "pygments" -version = "2.19.2" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/b0/77/a5b8c569bf593b0140bde72ea885a803b82086995367bf2037de0159d924/pygments-2.19.2.tar.gz", hash = "sha256:636cb2477cec7f8952536970bc533bc43743542f70392ae026374600add5b887", size = 4968631, upload-time = "2025-06-21T13:39:12.283Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/c7/21/705964c7812476f378728bdf590ca4b771ec72385c533964653c68e86bdc/pygments-2.19.2-py3-none-any.whl", hash = "sha256:86540386c03d588bb81d44bc3928634ff26449851e99741617ecb9037ee5ec0b", size = 1225217, upload-time = "2025-06-21T13:39:07.939Z" }, -] - -[[package]] -name = "pypandoc-binary" -version = "1.15" -source = { registry = "https://pypi.org/simple" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/a3/59/ad3a065f512c3fc3bdf8d97d30e82fb40a292b3f38b06ef13f9c2f69df29/pypandoc_binary-1.15-py3-none-macosx_10_9_x86_64.whl", hash = "sha256:12a43df66f59cb489a9a2659b6f049ec103132e43e4f284b7e33e7ae4dd9cadb", size = 22145734, upload-time = "2025-01-08T17:39:13.98Z" }, - { url = "https://files.pythonhosted.org/packages/59/d6/ef3b4927797bbf4146fdcb54b12a8ce766d077b44f37bcef0581a931a8a6/pypandoc_binary-1.15-py3-none-macosx_11_0_arm64.whl", hash = "sha256:79b215279a6470d3e2a24135826b446f44259f6abce361e31d713ac1b0b5aa69", size = 22145735, upload-time = "2025-01-08T17:39:21.562Z" }, - { url = "https://files.pythonhosted.org/packages/e1/74/970c50ef0f62b69afbae957d7be92072748b1f2d9b690c763c1afc81e326/pypandoc_binary-1.15-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fa29176f4eb9eb93f555dac6926a8d53c6ab4fd2deba3a1edb002e04899b237d", size = 35362526, upload-time = "2025-01-08T17:39:28.48Z" }, - { url = "https://files.pythonhosted.org/packages/cb/00/dc9377725392fa3c7d5f04dc42ab012d1cd40a0a0705b6eb794444559072/pypandoc_binary-1.15-py3-none-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:df54edb00331a8fc80516c71cd81de69954512163037c3d4577c315091a0d6f6", size = 32649183, upload-time = "2025-01-08T17:39:34.414Z" }, - { url = "https://files.pythonhosted.org/packages/80/8b/341d6a298a97ba66b6a5f93583f03e027cb0e3322ce6d46082eca0e95c93/pypandoc_binary-1.15-py3-none-musllinux_1_1_aarch64.whl", hash = "sha256:e465a09e8aac18e8fda37516a6d1a891e51cdd1d066ab585f85a81620de575e2", size = 35362516, upload-time = "2025-01-08T17:39:42.438Z" }, - { url = "https://files.pythonhosted.org/packages/d9/a9/5c3c2fba01a008e7aa784268fa88b612c3ef94b89d60c1e838d96b5a1735/pypandoc_binary-1.15-py3-none-musllinux_1_1_x86_64.whl", hash = "sha256:f9ebdf92059e9c1ae5231ee193d1fdafbc0188d5ec9d5f53e95fa21a42339481", size = 32649164, upload-time = "2025-01-08T17:39:49.381Z" }, - { url = "https://files.pythonhosted.org/packages/fd/a7/2295d4f1036cedbd27b4d6c220fe3bc40601b618245bfd5837623ecee4cb/pypandoc_binary-1.15-py3-none-win_amd64.whl", hash = "sha256:de7a234ffb674a4e650490acc7a5986161e2fd8b5bb106f1c9ffc30d76d2cf23", size = 38577212, upload-time = "2025-01-08T17:39:54.85Z" }, -] - -[[package]] -name = "pyro-api" -version = "0.1.2" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/25/d7/a0812f5c16b0d4464f80a64a44626c5fe200098070be0f32436dbb662775/pyro-api-0.1.2.tar.gz", hash = "sha256:a1b900d9580aa1c2fab3b123ab7ff33413744da7c5f440bd4aadc4d40d14d920", size = 7349, upload-time = "2020-05-15T16:17:41.501Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/fc/81/957ae78e6398460a7230b0eb9b8f1cb954c5e913e868e48d89324c68cec7/pyro_api-0.1.2-py3-none-any.whl", hash = "sha256:10e0e42e9e4401ce464dab79c870e50dfb4f413d326fa777f3582928ef9caf8f", size = 11981, upload-time = "2020-05-15T16:17:40.492Z" }, -] - -[[package]] -name = "pyro-ppl" -version = "1.9.1" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "numpy" }, - { name = "opt-einsum" }, - { name = "pyro-api" }, - { name = "torch" }, - { name = "tqdm" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/4c/2e/3bcba8688d58f8dc954cef6831c19d52b6017b035d783685d67cd99fa351/pyro_ppl-1.9.1.tar.gz", hash = "sha256:5e1596de276c038a3f77d2580a90d0a97126e0104900444a088eee620bb0d65e", size = 570861, upload-time = "2024-06-02T00:37:39.688Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/ed/37/def183a2a2c8619d92649d62fe0622c4c6c62f60e4151e8fbaa409e7d5ab/pyro_ppl-1.9.1-py3-none-any.whl", hash = "sha256:91fb2c8740d9d3bd548180ac5ecfa04552ed8c471a1ab66870180663b8f09852", size = 755956, upload-time = "2024-06-02T00:37:37.486Z" }, -] - -[[package]] -name = "pytest" -version = "9.0.1" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "colorama", marker = "sys_platform == 'win32'" }, - { name = "iniconfig" }, - { name = "packaging" }, - { name = "pluggy" }, - { name = "pygments" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/07/56/f013048ac4bc4c1d9be45afd4ab209ea62822fb1598f40687e6bf45dcea4/pytest-9.0.1.tar.gz", hash = "sha256:3e9c069ea73583e255c3b21cf46b8d3c56f6e3a1a8f6da94ccb0fcf57b9d73c8", size = 1564125, upload-time = "2025-11-12T13:05:09.333Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/0b/8b/6300fb80f858cda1c51ffa17075df5d846757081d11ab4aa35cef9e6258b/pytest-9.0.1-py3-none-any.whl", hash = "sha256:67be0030d194df2dfa7b556f2e56fb3c3315bd5c8822c6951162b92b32ce7dad", size = 373668, upload-time = "2025-11-12T13:05:07.379Z" }, -] - -[[package]] -name = "pytest-benchmark" -version = "5.2.3" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "py-cpuinfo" }, - { name = "pytest" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/24/34/9f732b76456d64faffbef6232f1f9dbec7a7c4999ff46282fa418bd1af66/pytest_benchmark-5.2.3.tar.gz", hash = "sha256:deb7317998a23c650fd4ff76e1230066a76cb45dcece0aca5607143c619e7779", size = 341340, upload-time = "2025-11-09T18:48:43.215Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/33/29/e756e715a48959f1c0045342088d7ca9762a2f509b945f362a316e9412b7/pytest_benchmark-5.2.3-py3-none-any.whl", hash = "sha256:bc839726ad20e99aaa0d11a127445457b4219bdb9e80a1afc4b51da7f96b0803", size = 45255, upload-time = "2025-11-09T18:48:39.765Z" }, -] - -[[package]] -name = "pytest-cov" -version = "7.0.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "coverage" }, - { name = "pluggy" }, - { name = "pytest" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/5e/f7/c933acc76f5208b3b00089573cf6a2bc26dc80a8aece8f52bb7d6b1855ca/pytest_cov-7.0.0.tar.gz", hash = "sha256:33c97eda2e049a0c5298e91f519302a1334c26ac65c1a483d6206fd458361af1", size = 54328, upload-time = "2025-09-09T10:57:02.113Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/ee/49/1377b49de7d0c1ce41292161ea0f721913fa8722c19fb9c1e3aa0367eecb/pytest_cov-7.0.0-py3-none-any.whl", hash = "sha256:3b8e9558b16cc1479da72058bdecf8073661c7f57f7d3c5f22a1c23507f2d861", size = 22424, upload-time = "2025-09-09T10:57:00.695Z" }, -] - -[[package]] -name = "pytest-xdist" -version = "3.8.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "execnet" }, - { name = "pytest" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/78/b4/439b179d1ff526791eb921115fca8e44e596a13efeda518b9d845a619450/pytest_xdist-3.8.0.tar.gz", hash = "sha256:7e578125ec9bc6050861aa93f2d59f1d8d085595d6551c2c90b6f4fad8d3a9f1", size = 88069, upload-time = "2025-07-01T13:30:59.346Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/ca/31/d4e37e9e550c2b92a9cbc2e4d0b7420a27224968580b5a447f420847c975/pytest_xdist-3.8.0-py3-none-any.whl", hash = "sha256:202ca578cfeb7370784a8c33d6d05bc6e13b4f25b5053c30a152269fd10f0b88", size = 46396, upload-time = "2025-07-01T13:30:56.632Z" }, -] - -[[package]] -name = "python-dateutil" -version = "2.9.0.post0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "six" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/66/c0/0c8b6ad9f17a802ee498c46e004a0eb49bc148f2fd230864601a86dcf6db/python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3", size = 342432, upload-time = "2024-03-01T18:36:20.211Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/ec/57/56b9bcc3c9c6a792fcbaf139543cee77261f3651ca9da0c93f5c1221264b/python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427", size = 229892, upload-time = "2024-03-01T18:36:18.57Z" }, -] - -[[package]] -name = "python-dotenv" -version = "1.2.1" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/f0/26/19cadc79a718c5edbec86fd4919a6b6d3f681039a2f6d66d14be94e75fb9/python_dotenv-1.2.1.tar.gz", hash = "sha256:42667e897e16ab0d66954af0e60a9caa94f0fd4ecf3aaf6d2d260eec1aa36ad6", size = 44221, upload-time = "2025-10-26T15:12:10.434Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/14/1b/a298b06749107c305e1fe0f814c6c74aea7b2f1e10989cb30f544a1b3253/python_dotenv-1.2.1-py3-none-any.whl", hash = "sha256:b81ee9561e9ca4004139c6cbba3a238c32b03e4894671e181b671e8cb8425d61", size = 21230, upload-time = "2025-10-26T15:12:09.109Z" }, -] - -[[package]] -name = "pyyaml" -version = "6.0.3" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/05/8e/961c0007c59b8dd7729d542c61a4d537767a59645b82a0b521206e1e25c2/pyyaml-6.0.3.tar.gz", hash = "sha256:d76623373421df22fb4cf8817020cbb7ef15c725b9d5e45f17e189bfc384190f", size = 130960, upload-time = "2025-09-25T21:33:16.546Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/d1/33/422b98d2195232ca1826284a76852ad5a86fe23e31b009c9886b2d0fb8b2/pyyaml-6.0.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:7f047e29dcae44602496db43be01ad42fc6f1cc0d8cd6c83d342306c32270196", size = 182063, upload-time = "2025-09-25T21:32:11.445Z" }, - { url = "https://files.pythonhosted.org/packages/89/a0/6cf41a19a1f2f3feab0e9c0b74134aa2ce6849093d5517a0c550fe37a648/pyyaml-6.0.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:fc09d0aa354569bc501d4e787133afc08552722d3ab34836a80547331bb5d4a0", size = 173973, upload-time = "2025-09-25T21:32:12.492Z" }, - { url = "https://files.pythonhosted.org/packages/ed/23/7a778b6bd0b9a8039df8b1b1d80e2e2ad78aa04171592c8a5c43a56a6af4/pyyaml-6.0.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9149cad251584d5fb4981be1ecde53a1ca46c891a79788c0df828d2f166bda28", size = 775116, upload-time = "2025-09-25T21:32:13.652Z" }, - { url = "https://files.pythonhosted.org/packages/65/30/d7353c338e12baef4ecc1b09e877c1970bd3382789c159b4f89d6a70dc09/pyyaml-6.0.3-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:5fdec68f91a0c6739b380c83b951e2c72ac0197ace422360e6d5a959d8d97b2c", size = 844011, upload-time = "2025-09-25T21:32:15.21Z" }, - { url = "https://files.pythonhosted.org/packages/8b/9d/b3589d3877982d4f2329302ef98a8026e7f4443c765c46cfecc8858c6b4b/pyyaml-6.0.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ba1cc08a7ccde2d2ec775841541641e4548226580ab850948cbfda66a1befcdc", size = 807870, upload-time = "2025-09-25T21:32:16.431Z" }, - { url = "https://files.pythonhosted.org/packages/05/c0/b3be26a015601b822b97d9149ff8cb5ead58c66f981e04fedf4e762f4bd4/pyyaml-6.0.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:8dc52c23056b9ddd46818a57b78404882310fb473d63f17b07d5c40421e47f8e", size = 761089, upload-time = "2025-09-25T21:32:17.56Z" }, - { url = "https://files.pythonhosted.org/packages/be/8e/98435a21d1d4b46590d5459a22d88128103f8da4c2d4cb8f14f2a96504e1/pyyaml-6.0.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:41715c910c881bc081f1e8872880d3c650acf13dfa8214bad49ed4cede7c34ea", size = 790181, upload-time = "2025-09-25T21:32:18.834Z" }, - { url = "https://files.pythonhosted.org/packages/74/93/7baea19427dcfbe1e5a372d81473250b379f04b1bd3c4c5ff825e2327202/pyyaml-6.0.3-cp312-cp312-win32.whl", hash = "sha256:96b533f0e99f6579b3d4d4995707cf36df9100d67e0c8303a0c55b27b5f99bc5", size = 137658, upload-time = "2025-09-25T21:32:20.209Z" }, - { url = "https://files.pythonhosted.org/packages/86/bf/899e81e4cce32febab4fb42bb97dcdf66bc135272882d1987881a4b519e9/pyyaml-6.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:5fcd34e47f6e0b794d17de1b4ff496c00986e1c83f7ab2fb8fcfe9616ff7477b", size = 154003, upload-time = "2025-09-25T21:32:21.167Z" }, - { url = "https://files.pythonhosted.org/packages/1a/08/67bd04656199bbb51dbed1439b7f27601dfb576fb864099c7ef0c3e55531/pyyaml-6.0.3-cp312-cp312-win_arm64.whl", hash = "sha256:64386e5e707d03a7e172c0701abfb7e10f0fb753ee1d773128192742712a98fd", size = 140344, upload-time = "2025-09-25T21:32:22.617Z" }, - { url = "https://files.pythonhosted.org/packages/d1/11/0fd08f8192109f7169db964b5707a2f1e8b745d4e239b784a5a1dd80d1db/pyyaml-6.0.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:8da9669d359f02c0b91ccc01cac4a67f16afec0dac22c2ad09f46bee0697eba8", size = 181669, upload-time = "2025-09-25T21:32:23.673Z" }, - { url = "https://files.pythonhosted.org/packages/b1/16/95309993f1d3748cd644e02e38b75d50cbc0d9561d21f390a76242ce073f/pyyaml-6.0.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:2283a07e2c21a2aa78d9c4442724ec1eb15f5e42a723b99cb3d822d48f5f7ad1", size = 173252, upload-time = "2025-09-25T21:32:25.149Z" }, - { url = "https://files.pythonhosted.org/packages/50/31/b20f376d3f810b9b2371e72ef5adb33879b25edb7a6d072cb7ca0c486398/pyyaml-6.0.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ee2922902c45ae8ccada2c5b501ab86c36525b883eff4255313a253a3160861c", size = 767081, upload-time = "2025-09-25T21:32:26.575Z" }, - { url = "https://files.pythonhosted.org/packages/49/1e/a55ca81e949270d5d4432fbbd19dfea5321eda7c41a849d443dc92fd1ff7/pyyaml-6.0.3-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:a33284e20b78bd4a18c8c2282d549d10bc8408a2a7ff57653c0cf0b9be0afce5", size = 841159, upload-time = "2025-09-25T21:32:27.727Z" }, - { url = "https://files.pythonhosted.org/packages/74/27/e5b8f34d02d9995b80abcef563ea1f8b56d20134d8f4e5e81733b1feceb2/pyyaml-6.0.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0f29edc409a6392443abf94b9cf89ce99889a1dd5376d94316ae5145dfedd5d6", size = 801626, upload-time = "2025-09-25T21:32:28.878Z" }, - { url = "https://files.pythonhosted.org/packages/f9/11/ba845c23988798f40e52ba45f34849aa8a1f2d4af4b798588010792ebad6/pyyaml-6.0.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f7057c9a337546edc7973c0d3ba84ddcdf0daa14533c2065749c9075001090e6", size = 753613, upload-time = "2025-09-25T21:32:30.178Z" }, - { url = "https://files.pythonhosted.org/packages/3d/e0/7966e1a7bfc0a45bf0a7fb6b98ea03fc9b8d84fa7f2229e9659680b69ee3/pyyaml-6.0.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:eda16858a3cab07b80edaf74336ece1f986ba330fdb8ee0d6c0d68fe82bc96be", size = 794115, upload-time = "2025-09-25T21:32:31.353Z" }, - { url = "https://files.pythonhosted.org/packages/de/94/980b50a6531b3019e45ddeada0626d45fa85cbe22300844a7983285bed3b/pyyaml-6.0.3-cp313-cp313-win32.whl", hash = "sha256:d0eae10f8159e8fdad514efdc92d74fd8d682c933a6dd088030f3834bc8e6b26", size = 137427, upload-time = "2025-09-25T21:32:32.58Z" }, - { url = "https://files.pythonhosted.org/packages/97/c9/39d5b874e8b28845e4ec2202b5da735d0199dbe5b8fb85f91398814a9a46/pyyaml-6.0.3-cp313-cp313-win_amd64.whl", hash = "sha256:79005a0d97d5ddabfeeea4cf676af11e647e41d81c9a7722a193022accdb6b7c", size = 154090, upload-time = "2025-09-25T21:32:33.659Z" }, - { url = "https://files.pythonhosted.org/packages/73/e8/2bdf3ca2090f68bb3d75b44da7bbc71843b19c9f2b9cb9b0f4ab7a5a4329/pyyaml-6.0.3-cp313-cp313-win_arm64.whl", hash = "sha256:5498cd1645aa724a7c71c8f378eb29ebe23da2fc0d7a08071d89469bf1d2defb", size = 140246, upload-time = "2025-09-25T21:32:34.663Z" }, -] - -[[package]] -name = "pyzmq" -version = "27.1.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "cffi", marker = "implementation_name == 'pypy'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/04/0b/3c9baedbdf613ecaa7aa07027780b8867f57b6293b6ee50de316c9f3222b/pyzmq-27.1.0.tar.gz", hash = "sha256:ac0765e3d44455adb6ddbf4417dcce460fc40a05978c08efdf2948072f6db540", size = 281750, upload-time = "2025-09-08T23:10:18.157Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/92/e7/038aab64a946d535901103da16b953c8c9cc9c961dadcbf3609ed6428d23/pyzmq-27.1.0-cp312-abi3-macosx_10_15_universal2.whl", hash = "sha256:452631b640340c928fa343801b0d07eb0c3789a5ffa843f6e1a9cee0ba4eb4fc", size = 1306279, upload-time = "2025-09-08T23:08:03.807Z" }, - { url = "https://files.pythonhosted.org/packages/e8/5e/c3c49fdd0f535ef45eefcc16934648e9e59dace4a37ee88fc53f6cd8e641/pyzmq-27.1.0-cp312-abi3-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:1c179799b118e554b66da67d88ed66cd37a169f1f23b5d9f0a231b4e8d44a113", size = 895645, upload-time = "2025-09-08T23:08:05.301Z" }, - { url = "https://files.pythonhosted.org/packages/f8/e5/b0b2504cb4e903a74dcf1ebae157f9e20ebb6ea76095f6cfffea28c42ecd/pyzmq-27.1.0-cp312-abi3-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3837439b7f99e60312f0c926a6ad437b067356dc2bc2ec96eb395fd0fe804233", size = 652574, upload-time = "2025-09-08T23:08:06.828Z" }, - { url = "https://files.pythonhosted.org/packages/f8/9b/c108cdb55560eaf253f0cbdb61b29971e9fb34d9c3499b0e96e4e60ed8a5/pyzmq-27.1.0-cp312-abi3-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:43ad9a73e3da1fab5b0e7e13402f0b2fb934ae1c876c51d0afff0e7c052eca31", size = 840995, upload-time = "2025-09-08T23:08:08.396Z" }, - { url = "https://files.pythonhosted.org/packages/c2/bb/b79798ca177b9eb0825b4c9998c6af8cd2a7f15a6a1a4272c1d1a21d382f/pyzmq-27.1.0-cp312-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:0de3028d69d4cdc475bfe47a6128eb38d8bc0e8f4d69646adfbcd840facbac28", size = 1642070, upload-time = "2025-09-08T23:08:09.989Z" }, - { url = "https://files.pythonhosted.org/packages/9c/80/2df2e7977c4ede24c79ae39dcef3899bfc5f34d1ca7a5b24f182c9b7a9ca/pyzmq-27.1.0-cp312-abi3-musllinux_1_2_i686.whl", hash = "sha256:cf44a7763aea9298c0aa7dbf859f87ed7012de8bda0f3977b6fb1d96745df856", size = 2021121, upload-time = "2025-09-08T23:08:11.907Z" }, - { url = "https://files.pythonhosted.org/packages/46/bd/2d45ad24f5f5ae7e8d01525eb76786fa7557136555cac7d929880519e33a/pyzmq-27.1.0-cp312-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:f30f395a9e6fbca195400ce833c731e7b64c3919aa481af4d88c3759e0cb7496", size = 1878550, upload-time = "2025-09-08T23:08:13.513Z" }, - { url = "https://files.pythonhosted.org/packages/e6/2f/104c0a3c778d7c2ab8190e9db4f62f0b6957b53c9d87db77c284b69f33ea/pyzmq-27.1.0-cp312-abi3-win32.whl", hash = "sha256:250e5436a4ba13885494412b3da5d518cd0d3a278a1ae640e113c073a5f88edd", size = 559184, upload-time = "2025-09-08T23:08:15.163Z" }, - { url = "https://files.pythonhosted.org/packages/fc/7f/a21b20d577e4100c6a41795842028235998a643b1ad406a6d4163ea8f53e/pyzmq-27.1.0-cp312-abi3-win_amd64.whl", hash = "sha256:9ce490cf1d2ca2ad84733aa1d69ce6855372cb5ce9223802450c9b2a7cba0ccf", size = 619480, upload-time = "2025-09-08T23:08:17.192Z" }, - { url = "https://files.pythonhosted.org/packages/78/c2/c012beae5f76b72f007a9e91ee9401cb88c51d0f83c6257a03e785c81cc2/pyzmq-27.1.0-cp312-abi3-win_arm64.whl", hash = "sha256:75a2f36223f0d535a0c919e23615fc85a1e23b71f40c7eb43d7b1dedb4d8f15f", size = 552993, upload-time = "2025-09-08T23:08:18.926Z" }, - { url = "https://files.pythonhosted.org/packages/60/cb/84a13459c51da6cec1b7b1dc1a47e6db6da50b77ad7fd9c145842750a011/pyzmq-27.1.0-cp313-cp313-android_24_arm64_v8a.whl", hash = "sha256:93ad4b0855a664229559e45c8d23797ceac03183c7b6f5b4428152a6b06684a5", size = 1122436, upload-time = "2025-09-08T23:08:20.801Z" }, - { url = "https://files.pythonhosted.org/packages/dc/b6/94414759a69a26c3dd674570a81813c46a078767d931a6c70ad29fc585cb/pyzmq-27.1.0-cp313-cp313-android_24_x86_64.whl", hash = "sha256:fbb4f2400bfda24f12f009cba62ad5734148569ff4949b1b6ec3b519444342e6", size = 1156301, upload-time = "2025-09-08T23:08:22.47Z" }, - { url = "https://files.pythonhosted.org/packages/a5/ad/15906493fd40c316377fd8a8f6b1f93104f97a752667763c9b9c1b71d42d/pyzmq-27.1.0-cp313-cp313t-macosx_10_15_universal2.whl", hash = "sha256:e343d067f7b151cfe4eb3bb796a7752c9d369eed007b91231e817071d2c2fec7", size = 1341197, upload-time = "2025-09-08T23:08:24.286Z" }, - { url = "https://files.pythonhosted.org/packages/14/1d/d343f3ce13db53a54cb8946594e567410b2125394dafcc0268d8dda027e0/pyzmq-27.1.0-cp313-cp313t-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:08363b2011dec81c354d694bdecaef4770e0ae96b9afea70b3f47b973655cc05", size = 897275, upload-time = "2025-09-08T23:08:26.063Z" }, - { url = "https://files.pythonhosted.org/packages/69/2d/d83dd6d7ca929a2fc67d2c3005415cdf322af7751d773524809f9e585129/pyzmq-27.1.0-cp313-cp313t-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d54530c8c8b5b8ddb3318f481297441af102517602b569146185fa10b63f4fa9", size = 660469, upload-time = "2025-09-08T23:08:27.623Z" }, - { url = "https://files.pythonhosted.org/packages/3e/cd/9822a7af117f4bc0f1952dbe9ef8358eb50a24928efd5edf54210b850259/pyzmq-27.1.0-cp313-cp313t-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6f3afa12c392f0a44a2414056d730eebc33ec0926aae92b5ad5cf26ebb6cc128", size = 847961, upload-time = "2025-09-08T23:08:29.672Z" }, - { url = "https://files.pythonhosted.org/packages/9a/12/f003e824a19ed73be15542f172fd0ec4ad0b60cf37436652c93b9df7c585/pyzmq-27.1.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:c65047adafe573ff023b3187bb93faa583151627bc9c51fc4fb2c561ed689d39", size = 1650282, upload-time = "2025-09-08T23:08:31.349Z" }, - { url = "https://files.pythonhosted.org/packages/d5/4a/e82d788ed58e9a23995cee70dbc20c9aded3d13a92d30d57ec2291f1e8a3/pyzmq-27.1.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:90e6e9441c946a8b0a667356f7078d96411391a3b8f80980315455574177ec97", size = 2024468, upload-time = "2025-09-08T23:08:33.543Z" }, - { url = "https://files.pythonhosted.org/packages/d9/94/2da0a60841f757481e402b34bf4c8bf57fa54a5466b965de791b1e6f747d/pyzmq-27.1.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:add071b2d25f84e8189aaf0882d39a285b42fa3853016ebab234a5e78c7a43db", size = 1885394, upload-time = "2025-09-08T23:08:35.51Z" }, - { url = "https://files.pythonhosted.org/packages/4f/6f/55c10e2e49ad52d080dc24e37adb215e5b0d64990b57598abc2e3f01725b/pyzmq-27.1.0-cp313-cp313t-win32.whl", hash = "sha256:7ccc0700cfdf7bd487bea8d850ec38f204478681ea02a582a8da8171b7f90a1c", size = 574964, upload-time = "2025-09-08T23:08:37.178Z" }, - { url = "https://files.pythonhosted.org/packages/87/4d/2534970ba63dd7c522d8ca80fb92777f362c0f321900667c615e2067cb29/pyzmq-27.1.0-cp313-cp313t-win_amd64.whl", hash = "sha256:8085a9fba668216b9b4323be338ee5437a235fe275b9d1610e422ccc279733e2", size = 641029, upload-time = "2025-09-08T23:08:40.595Z" }, - { url = "https://files.pythonhosted.org/packages/f6/fa/f8aea7a28b0641f31d40dea42d7ef003fded31e184ef47db696bc74cd610/pyzmq-27.1.0-cp313-cp313t-win_arm64.whl", hash = "sha256:6bb54ca21bcfe361e445256c15eedf083f153811c37be87e0514934d6913061e", size = 561541, upload-time = "2025-09-08T23:08:42.668Z" }, -] - -[[package]] -name = "referencing" -version = "0.37.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "attrs" }, - { name = "rpds-py" }, - { name = "typing-extensions", marker = "python_full_version < '3.13'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/22/f5/df4e9027acead3ecc63e50fe1e36aca1523e1719559c499951bb4b53188f/referencing-0.37.0.tar.gz", hash = "sha256:44aefc3142c5b842538163acb373e24cce6632bd54bdb01b21ad5863489f50d8", size = 78036, upload-time = "2025-10-13T15:30:48.871Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/2c/58/ca301544e1fa93ed4f80d724bf5b194f6e4b945841c5bfd555878eea9fcb/referencing-0.37.0-py3-none-any.whl", hash = "sha256:381329a9f99628c9069361716891d34ad94af76e461dcb0335825aecc7692231", size = 26766, upload-time = "2025-10-13T15:30:47.625Z" }, -] - -[[package]] -name = "regex" -version = "2025.11.3" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/cc/a9/546676f25e573a4cf00fe8e119b78a37b6a8fe2dc95cda877b30889c9c45/regex-2025.11.3.tar.gz", hash = "sha256:1fedc720f9bb2494ce31a58a1631f9c82df6a09b49c19517ea5cc280b4541e01", size = 414669, upload-time = "2025-11-03T21:34:22.089Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/e8/74/18f04cb53e58e3fb107439699bd8375cf5a835eec81084e0bddbd122e4c2/regex-2025.11.3-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:bc8ab71e2e31b16e40868a40a69007bc305e1109bd4658eb6cad007e0bf67c41", size = 489312, upload-time = "2025-11-03T21:31:34.343Z" }, - { url = "https://files.pythonhosted.org/packages/78/3f/37fcdd0d2b1e78909108a876580485ea37c91e1acf66d3bb8e736348f441/regex-2025.11.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:22b29dda7e1f7062a52359fca6e58e548e28c6686f205e780b02ad8ef710de36", size = 291256, upload-time = "2025-11-03T21:31:35.675Z" }, - { url = "https://files.pythonhosted.org/packages/bf/26/0a575f58eb23b7ebd67a45fccbc02ac030b737b896b7e7a909ffe43ffd6a/regex-2025.11.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3a91e4a29938bc1a082cc28fdea44be420bf2bebe2665343029723892eb073e1", size = 288921, upload-time = "2025-11-03T21:31:37.07Z" }, - { url = "https://files.pythonhosted.org/packages/ea/98/6a8dff667d1af907150432cf5abc05a17ccd32c72a3615410d5365ac167a/regex-2025.11.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:08b884f4226602ad40c5d55f52bf91a9df30f513864e0054bad40c0e9cf1afb7", size = 798568, upload-time = "2025-11-03T21:31:38.784Z" }, - { url = "https://files.pythonhosted.org/packages/64/15/92c1db4fa4e12733dd5a526c2dd2b6edcbfe13257e135fc0f6c57f34c173/regex-2025.11.3-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:3e0b11b2b2433d1c39c7c7a30e3f3d0aeeea44c2a8d0bae28f6b95f639927a69", size = 864165, upload-time = "2025-11-03T21:31:40.559Z" }, - { url = "https://files.pythonhosted.org/packages/f9/e7/3ad7da8cdee1ce66c7cd37ab5ab05c463a86ffeb52b1a25fe7bd9293b36c/regex-2025.11.3-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:87eb52a81ef58c7ba4d45c3ca74e12aa4b4e77816f72ca25258a85b3ea96cb48", size = 912182, upload-time = "2025-11-03T21:31:42.002Z" }, - { url = "https://files.pythonhosted.org/packages/84/bd/9ce9f629fcb714ffc2c3faf62b6766ecb7a585e1e885eb699bcf130a5209/regex-2025.11.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a12ab1f5c29b4e93db518f5e3872116b7e9b1646c9f9f426f777b50d44a09e8c", size = 803501, upload-time = "2025-11-03T21:31:43.815Z" }, - { url = "https://files.pythonhosted.org/packages/7c/0f/8dc2e4349d8e877283e6edd6c12bdcebc20f03744e86f197ab6e4492bf08/regex-2025.11.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:7521684c8c7c4f6e88e35ec89680ee1aa8358d3f09d27dfbdf62c446f5d4c695", size = 787842, upload-time = "2025-11-03T21:31:45.353Z" }, - { url = "https://files.pythonhosted.org/packages/f9/73/cff02702960bc185164d5619c0c62a2f598a6abff6695d391b096237d4ab/regex-2025.11.3-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:7fe6e5440584e94cc4b3f5f4d98a25e29ca12dccf8873679a635638349831b98", size = 858519, upload-time = "2025-11-03T21:31:46.814Z" }, - { url = "https://files.pythonhosted.org/packages/61/83/0e8d1ae71e15bc1dc36231c90b46ee35f9d52fab2e226b0e039e7ea9c10a/regex-2025.11.3-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:8e026094aa12b43f4fd74576714e987803a315c76edb6b098b9809db5de58f74", size = 850611, upload-time = "2025-11-03T21:31:48.289Z" }, - { url = "https://files.pythonhosted.org/packages/c8/f5/70a5cdd781dcfaa12556f2955bf170cd603cb1c96a1827479f8faea2df97/regex-2025.11.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:435bbad13e57eb5606a68443af62bed3556de2f46deb9f7d4237bc2f1c9fb3a0", size = 789759, upload-time = "2025-11-03T21:31:49.759Z" }, - { url = "https://files.pythonhosted.org/packages/59/9b/7c29be7903c318488983e7d97abcf8ebd3830e4c956c4c540005fcfb0462/regex-2025.11.3-cp312-cp312-win32.whl", hash = "sha256:3839967cf4dc4b985e1570fd8d91078f0c519f30491c60f9ac42a8db039be204", size = 266194, upload-time = "2025-11-03T21:31:51.53Z" }, - { url = "https://files.pythonhosted.org/packages/1a/67/3b92df89f179d7c367be654ab5626ae311cb28f7d5c237b6bb976cd5fbbb/regex-2025.11.3-cp312-cp312-win_amd64.whl", hash = "sha256:e721d1b46e25c481dc5ded6f4b3f66c897c58d2e8cfdf77bbced84339108b0b9", size = 277069, upload-time = "2025-11-03T21:31:53.151Z" }, - { url = "https://files.pythonhosted.org/packages/d7/55/85ba4c066fe5094d35b249c3ce8df0ba623cfd35afb22d6764f23a52a1c5/regex-2025.11.3-cp312-cp312-win_arm64.whl", hash = "sha256:64350685ff08b1d3a6fff33f45a9ca183dc1d58bbfe4981604e70ec9801bbc26", size = 270330, upload-time = "2025-11-03T21:31:54.514Z" }, - { url = "https://files.pythonhosted.org/packages/e1/a7/dda24ebd49da46a197436ad96378f17df30ceb40e52e859fc42cac45b850/regex-2025.11.3-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:c1e448051717a334891f2b9a620fe36776ebf3dd8ec46a0b877c8ae69575feb4", size = 489081, upload-time = "2025-11-03T21:31:55.9Z" }, - { url = "https://files.pythonhosted.org/packages/19/22/af2dc751aacf88089836aa088a1a11c4f21a04707eb1b0478e8e8fb32847/regex-2025.11.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:9b5aca4d5dfd7fbfbfbdaf44850fcc7709a01146a797536a8f84952e940cca76", size = 291123, upload-time = "2025-11-03T21:31:57.758Z" }, - { url = "https://files.pythonhosted.org/packages/a3/88/1a3ea5672f4b0a84802ee9891b86743438e7c04eb0b8f8c4e16a42375327/regex-2025.11.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:04d2765516395cf7dda331a244a3282c0f5ae96075f728629287dfa6f76ba70a", size = 288814, upload-time = "2025-11-03T21:32:01.12Z" }, - { url = "https://files.pythonhosted.org/packages/fb/8c/f5987895bf42b8ddeea1b315c9fedcfe07cadee28b9c98cf50d00adcb14d/regex-2025.11.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5d9903ca42bfeec4cebedba8022a7c97ad2aab22e09573ce9976ba01b65e4361", size = 798592, upload-time = "2025-11-03T21:32:03.006Z" }, - { url = "https://files.pythonhosted.org/packages/99/2a/6591ebeede78203fa77ee46a1c36649e02df9eaa77a033d1ccdf2fcd5d4e/regex-2025.11.3-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:639431bdc89d6429f6721625e8129413980ccd62e9d3f496be618a41d205f160", size = 864122, upload-time = "2025-11-03T21:32:04.553Z" }, - { url = "https://files.pythonhosted.org/packages/94/d6/be32a87cf28cf8ed064ff281cfbd49aefd90242a83e4b08b5a86b38e8eb4/regex-2025.11.3-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:f117efad42068f9715677c8523ed2be1518116d1c49b1dd17987716695181efe", size = 912272, upload-time = "2025-11-03T21:32:06.148Z" }, - { url = "https://files.pythonhosted.org/packages/62/11/9bcef2d1445665b180ac7f230406ad80671f0fc2a6ffb93493b5dd8cd64c/regex-2025.11.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4aecb6f461316adf9f1f0f6a4a1a3d79e045f9b71ec76055a791affa3b285850", size = 803497, upload-time = "2025-11-03T21:32:08.162Z" }, - { url = "https://files.pythonhosted.org/packages/e5/a7/da0dc273d57f560399aa16d8a68ae7f9b57679476fc7ace46501d455fe84/regex-2025.11.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:3b3a5f320136873cc5561098dfab677eea139521cb9a9e8db98b7e64aef44cbc", size = 787892, upload-time = "2025-11-03T21:32:09.769Z" }, - { url = "https://files.pythonhosted.org/packages/da/4b/732a0c5a9736a0b8d6d720d4945a2f1e6f38f87f48f3173559f53e8d5d82/regex-2025.11.3-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:75fa6f0056e7efb1f42a1c34e58be24072cb9e61a601340cc1196ae92326a4f9", size = 858462, upload-time = "2025-11-03T21:32:11.769Z" }, - { url = "https://files.pythonhosted.org/packages/0c/f5/a2a03df27dc4c2d0c769220f5110ba8c4084b0bfa9ab0f9b4fcfa3d2b0fc/regex-2025.11.3-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:dbe6095001465294f13f1adcd3311e50dd84e5a71525f20a10bd16689c61ce0b", size = 850528, upload-time = "2025-11-03T21:32:13.906Z" }, - { url = "https://files.pythonhosted.org/packages/d6/09/e1cd5bee3841c7f6eb37d95ca91cdee7100b8f88b81e41c2ef426910891a/regex-2025.11.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:454d9b4ae7881afbc25015b8627c16d88a597479b9dea82b8c6e7e2e07240dc7", size = 789866, upload-time = "2025-11-03T21:32:15.748Z" }, - { url = "https://files.pythonhosted.org/packages/eb/51/702f5ea74e2a9c13d855a6a85b7f80c30f9e72a95493260193c07f3f8d74/regex-2025.11.3-cp313-cp313-win32.whl", hash = "sha256:28ba4d69171fc6e9896337d4fc63a43660002b7da53fc15ac992abcf3410917c", size = 266189, upload-time = "2025-11-03T21:32:17.493Z" }, - { url = "https://files.pythonhosted.org/packages/8b/00/6e29bb314e271a743170e53649db0fdb8e8ff0b64b4f425f5602f4eb9014/regex-2025.11.3-cp313-cp313-win_amd64.whl", hash = "sha256:bac4200befe50c670c405dc33af26dad5a3b6b255dd6c000d92fe4629f9ed6a5", size = 277054, upload-time = "2025-11-03T21:32:19.042Z" }, - { url = "https://files.pythonhosted.org/packages/25/f1/b156ff9f2ec9ac441710764dda95e4edaf5f36aca48246d1eea3f1fd96ec/regex-2025.11.3-cp313-cp313-win_arm64.whl", hash = "sha256:2292cd5a90dab247f9abe892ac584cb24f0f54680c73fcb4a7493c66c2bf2467", size = 270325, upload-time = "2025-11-03T21:32:21.338Z" }, - { url = "https://files.pythonhosted.org/packages/20/28/fd0c63357caefe5680b8ea052131acbd7f456893b69cc2a90cc3e0dc90d4/regex-2025.11.3-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:1eb1ebf6822b756c723e09f5186473d93236c06c579d2cc0671a722d2ab14281", size = 491984, upload-time = "2025-11-03T21:32:23.466Z" }, - { url = "https://files.pythonhosted.org/packages/df/ec/7014c15626ab46b902b3bcc4b28a7bae46d8f281fc7ea9c95e22fcaaa917/regex-2025.11.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:1e00ec2970aab10dc5db34af535f21fcf32b4a31d99e34963419636e2f85ae39", size = 292673, upload-time = "2025-11-03T21:32:25.034Z" }, - { url = "https://files.pythonhosted.org/packages/23/ab/3b952ff7239f20d05f1f99e9e20188513905f218c81d52fb5e78d2bf7634/regex-2025.11.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:a4cb042b615245d5ff9b3794f56be4138b5adc35a4166014d31d1814744148c7", size = 291029, upload-time = "2025-11-03T21:32:26.528Z" }, - { url = "https://files.pythonhosted.org/packages/21/7e/3dc2749fc684f455f162dcafb8a187b559e2614f3826877d3844a131f37b/regex-2025.11.3-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:44f264d4bf02f3176467d90b294d59bf1db9fe53c141ff772f27a8b456b2a9ed", size = 807437, upload-time = "2025-11-03T21:32:28.363Z" }, - { url = "https://files.pythonhosted.org/packages/1b/0b/d529a85ab349c6a25d1ca783235b6e3eedf187247eab536797021f7126c6/regex-2025.11.3-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:7be0277469bf3bd7a34a9c57c1b6a724532a0d235cd0dc4e7f4316f982c28b19", size = 873368, upload-time = "2025-11-03T21:32:30.4Z" }, - { url = "https://files.pythonhosted.org/packages/7d/18/2d868155f8c9e3e9d8f9e10c64e9a9f496bb8f7e037a88a8bed26b435af6/regex-2025.11.3-cp313-cp313t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:0d31e08426ff4b5b650f68839f5af51a92a5b51abd8554a60c2fbc7c71f25d0b", size = 914921, upload-time = "2025-11-03T21:32:32.123Z" }, - { url = "https://files.pythonhosted.org/packages/2d/71/9d72ff0f354fa783fe2ba913c8734c3b433b86406117a8db4ea2bf1c7a2f/regex-2025.11.3-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e43586ce5bd28f9f285a6e729466841368c4a0353f6fd08d4ce4630843d3648a", size = 812708, upload-time = "2025-11-03T21:32:34.305Z" }, - { url = "https://files.pythonhosted.org/packages/e7/19/ce4bf7f5575c97f82b6e804ffb5c4e940c62609ab2a0d9538d47a7fdf7d4/regex-2025.11.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:0f9397d561a4c16829d4e6ff75202c1c08b68a3bdbfe29dbfcdb31c9830907c6", size = 795472, upload-time = "2025-11-03T21:32:36.364Z" }, - { url = "https://files.pythonhosted.org/packages/03/86/fd1063a176ffb7b2315f9a1b08d17b18118b28d9df163132615b835a26ee/regex-2025.11.3-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:dd16e78eb18ffdb25ee33a0682d17912e8cc8a770e885aeee95020046128f1ce", size = 868341, upload-time = "2025-11-03T21:32:38.042Z" }, - { url = "https://files.pythonhosted.org/packages/12/43/103fb2e9811205e7386366501bc866a164a0430c79dd59eac886a2822950/regex-2025.11.3-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:ffcca5b9efe948ba0661e9df0fa50d2bc4b097c70b9810212d6b62f05d83b2dd", size = 854666, upload-time = "2025-11-03T21:32:40.079Z" }, - { url = "https://files.pythonhosted.org/packages/7d/22/e392e53f3869b75804762c7c848bd2dd2abf2b70fb0e526f58724638bd35/regex-2025.11.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:c56b4d162ca2b43318ac671c65bd4d563e841a694ac70e1a976ac38fcf4ca1d2", size = 799473, upload-time = "2025-11-03T21:32:42.148Z" }, - { url = "https://files.pythonhosted.org/packages/4f/f9/8bd6b656592f925b6845fcbb4d57603a3ac2fb2373344ffa1ed70aa6820a/regex-2025.11.3-cp313-cp313t-win32.whl", hash = "sha256:9ddc42e68114e161e51e272f667d640f97e84a2b9ef14b7477c53aac20c2d59a", size = 268792, upload-time = "2025-11-03T21:32:44.13Z" }, - { url = "https://files.pythonhosted.org/packages/e5/87/0e7d603467775ff65cd2aeabf1b5b50cc1c3708556a8b849a2fa4dd1542b/regex-2025.11.3-cp313-cp313t-win_amd64.whl", hash = "sha256:7a7c7fdf755032ffdd72c77e3d8096bdcb0eb92e89e17571a196f03d88b11b3c", size = 280214, upload-time = "2025-11-03T21:32:45.853Z" }, - { url = "https://files.pythonhosted.org/packages/8d/d0/2afc6f8e94e2b64bfb738a7c2b6387ac1699f09f032d363ed9447fd2bb57/regex-2025.11.3-cp313-cp313t-win_arm64.whl", hash = "sha256:df9eb838c44f570283712e7cff14c16329a9f0fb19ca492d21d4b7528ee6821e", size = 271469, upload-time = "2025-11-03T21:32:48.026Z" }, -] - -[[package]] -name = "requests" -version = "2.32.5" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "certifi" }, - { name = "charset-normalizer" }, - { name = "idna" }, - { name = "urllib3" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/c9/74/b3ff8e6c8446842c3f5c837e9c3dfcfe2018ea6ecef224c710c85ef728f4/requests-2.32.5.tar.gz", hash = "sha256:dbba0bac56e100853db0ea71b82b4dfd5fe2bf6d3754a8893c3af500cec7d7cf", size = 134517, upload-time = "2025-08-18T20:46:02.573Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/1e/db/4254e3eabe8020b458f1a747140d32277ec7a271daf1d235b70dc0b4e6e3/requests-2.32.5-py3-none-any.whl", hash = "sha256:2462f94637a34fd532264295e186976db0f5d453d1cdd31473c85a6a161affb6", size = 64738, upload-time = "2025-08-18T20:46:00.542Z" }, -] - -[[package]] -name = "roman-numerals-py" -version = "3.1.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/30/76/48fd56d17c5bdbdf65609abbc67288728a98ed4c02919428d4f52d23b24b/roman_numerals_py-3.1.0.tar.gz", hash = "sha256:be4bf804f083a4ce001b5eb7e3c0862479d10f94c936f6c4e5f250aa5ff5bd2d", size = 9017, upload-time = "2025-02-22T07:34:54.333Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/53/97/d2cbbaa10c9b826af0e10fdf836e1bf344d9f0abb873ebc34d1f49642d3f/roman_numerals_py-3.1.0-py3-none-any.whl", hash = "sha256:9da2ad2fb670bcf24e81070ceb3be72f6c11c440d73bd579fbeca1e9f330954c", size = 7742, upload-time = "2025-02-22T07:34:52.422Z" }, -] - -[[package]] -name = "rpds-py" -version = "0.29.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/98/33/23b3b3419b6a3e0f559c7c0d2ca8fc1b9448382b25245033788785921332/rpds_py-0.29.0.tar.gz", hash = "sha256:fe55fe686908f50154d1dc599232016e50c243b438c3b7432f24e2895b0e5359", size = 69359, upload-time = "2025-11-16T14:50:39.532Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/3c/50/bc0e6e736d94e420df79be4deb5c9476b63165c87bb8f19ef75d100d21b3/rpds_py-0.29.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:a0891cfd8db43e085c0ab93ab7e9b0c8fee84780d436d3b266b113e51e79f954", size = 376000, upload-time = "2025-11-16T14:48:19.141Z" }, - { url = "https://files.pythonhosted.org/packages/3e/3a/46676277160f014ae95f24de53bed0e3b7ea66c235e7de0b9df7bd5d68ba/rpds_py-0.29.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3897924d3f9a0361472d884051f9a2460358f9a45b1d85a39a158d2f8f1ad71c", size = 360575, upload-time = "2025-11-16T14:48:20.443Z" }, - { url = "https://files.pythonhosted.org/packages/75/ba/411d414ed99ea1afdd185bbabeeaac00624bd1e4b22840b5e9967ade6337/rpds_py-0.29.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2a21deb8e0d1571508c6491ce5ea5e25669b1dd4adf1c9d64b6314842f708b5d", size = 392159, upload-time = "2025-11-16T14:48:22.12Z" }, - { url = "https://files.pythonhosted.org/packages/8f/b1/e18aa3a331f705467a48d0296778dc1fea9d7f6cf675bd261f9a846c7e90/rpds_py-0.29.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9efe71687d6427737a0a2de9ca1c0a216510e6cd08925c44162be23ed7bed2d5", size = 410602, upload-time = "2025-11-16T14:48:23.563Z" }, - { url = "https://files.pythonhosted.org/packages/2f/6c/04f27f0c9f2299274c76612ac9d2c36c5048bb2c6c2e52c38c60bf3868d9/rpds_py-0.29.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:40f65470919dc189c833e86b2c4bd21bd355f98436a2cef9e0a9a92aebc8e57e", size = 515808, upload-time = "2025-11-16T14:48:24.949Z" }, - { url = "https://files.pythonhosted.org/packages/83/56/a8412aa464fb151f8bc0d91fb0bb888adc9039bd41c1c6ba8d94990d8cf8/rpds_py-0.29.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:def48ff59f181130f1a2cb7c517d16328efac3ec03951cca40c1dc2049747e83", size = 416015, upload-time = "2025-11-16T14:48:26.782Z" }, - { url = "https://files.pythonhosted.org/packages/04/4c/f9b8a05faca3d9e0a6397c90d13acb9307c9792b2bff621430c58b1d6e76/rpds_py-0.29.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ad7bd570be92695d89285a4b373006930715b78d96449f686af422debb4d3949", size = 395325, upload-time = "2025-11-16T14:48:28.055Z" }, - { url = "https://files.pythonhosted.org/packages/34/60/869f3bfbf8ed7b54f1ad9a5543e0fdffdd40b5a8f587fe300ee7b4f19340/rpds_py-0.29.0-cp312-cp312-manylinux_2_31_riscv64.whl", hash = "sha256:5a572911cd053137bbff8e3a52d31c5d2dba51d3a67ad902629c70185f3f2181", size = 410160, upload-time = "2025-11-16T14:48:29.338Z" }, - { url = "https://files.pythonhosted.org/packages/91/aa/e5b496334e3aba4fe4c8a80187b89f3c1294c5c36f2a926da74338fa5a73/rpds_py-0.29.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d583d4403bcbf10cffc3ab5cee23d7643fcc960dff85973fd3c2d6c86e8dbb0c", size = 425309, upload-time = "2025-11-16T14:48:30.691Z" }, - { url = "https://files.pythonhosted.org/packages/85/68/4e24a34189751ceb6d66b28f18159922828dd84155876551f7ca5b25f14f/rpds_py-0.29.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:070befbb868f257d24c3bb350dbd6e2f645e83731f31264b19d7231dd5c396c7", size = 574644, upload-time = "2025-11-16T14:48:31.964Z" }, - { url = "https://files.pythonhosted.org/packages/8c/cf/474a005ea4ea9c3b4f17b6108b6b13cebfc98ebaff11d6e1b193204b3a93/rpds_py-0.29.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:fc935f6b20b0c9f919a8ff024739174522abd331978f750a74bb68abd117bd19", size = 601605, upload-time = "2025-11-16T14:48:33.252Z" }, - { url = "https://files.pythonhosted.org/packages/f4/b1/c56f6a9ab8c5f6bb5c65c4b5f8229167a3a525245b0773f2c0896686b64e/rpds_py-0.29.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:8c5a8ecaa44ce2d8d9d20a68a2483a74c07f05d72e94a4dff88906c8807e77b0", size = 564593, upload-time = "2025-11-16T14:48:34.643Z" }, - { url = "https://files.pythonhosted.org/packages/b3/13/0494cecce4848f68501e0a229432620b4b57022388b071eeff95f3e1e75b/rpds_py-0.29.0-cp312-cp312-win32.whl", hash = "sha256:ba5e1aeaf8dd6d8f6caba1f5539cddda87d511331714b7b5fc908b6cfc3636b7", size = 223853, upload-time = "2025-11-16T14:48:36.419Z" }, - { url = "https://files.pythonhosted.org/packages/1f/6a/51e9aeb444a00cdc520b032a28b07e5f8dc7bc328b57760c53e7f96997b4/rpds_py-0.29.0-cp312-cp312-win_amd64.whl", hash = "sha256:b5f6134faf54b3cb83375db0f113506f8b7770785be1f95a631e7e2892101977", size = 239895, upload-time = "2025-11-16T14:48:37.956Z" }, - { url = "https://files.pythonhosted.org/packages/d1/d4/8bce56cdad1ab873e3f27cb31c6a51d8f384d66b022b820525b879f8bed1/rpds_py-0.29.0-cp312-cp312-win_arm64.whl", hash = "sha256:b016eddf00dca7944721bf0cd85b6af7f6c4efaf83ee0b37c4133bd39757a8c7", size = 230321, upload-time = "2025-11-16T14:48:39.71Z" }, - { url = "https://files.pythonhosted.org/packages/fd/d9/c5de60d9d371bbb186c3e9bf75f4fc5665e11117a25a06a6b2e0afb7380e/rpds_py-0.29.0-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:1585648d0760b88292eecab5181f5651111a69d90eff35d6b78aa32998886a61", size = 375710, upload-time = "2025-11-16T14:48:41.063Z" }, - { url = "https://files.pythonhosted.org/packages/b3/b3/0860cdd012291dc21272895ce107f1e98e335509ba986dd83d72658b82b9/rpds_py-0.29.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:521807963971a23996ddaf764c682b3e46459b3c58ccd79fefbe16718db43154", size = 360582, upload-time = "2025-11-16T14:48:42.423Z" }, - { url = "https://files.pythonhosted.org/packages/92/8a/a18c2f4a61b3407e56175f6aab6deacdf9d360191a3d6f38566e1eaf7266/rpds_py-0.29.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0a8896986efaa243ab713c69e6491a4138410f0fe36f2f4c71e18bd5501e8014", size = 391172, upload-time = "2025-11-16T14:48:43.75Z" }, - { url = "https://files.pythonhosted.org/packages/fd/49/e93354258508c50abc15cdcd5fcf7ac4117f67bb6233ad7859f75e7372a0/rpds_py-0.29.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:1d24564a700ef41480a984c5ebed62b74e6ce5860429b98b1fede76049e953e6", size = 409586, upload-time = "2025-11-16T14:48:45.498Z" }, - { url = "https://files.pythonhosted.org/packages/5a/8d/a27860dae1c19a6bdc901f90c81f0d581df1943355802961a57cdb5b6cd1/rpds_py-0.29.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e6596b93c010d386ae46c9fba9bfc9fc5965fa8228edeac51576299182c2e31c", size = 516339, upload-time = "2025-11-16T14:48:47.308Z" }, - { url = "https://files.pythonhosted.org/packages/fc/ad/a75e603161e79b7110c647163d130872b271c6b28712c803c65d492100f7/rpds_py-0.29.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5cc58aac218826d054c7da7f95821eba94125d88be673ff44267bb89d12a5866", size = 416201, upload-time = "2025-11-16T14:48:48.615Z" }, - { url = "https://files.pythonhosted.org/packages/b9/42/555b4ee17508beafac135c8b450816ace5a96194ce97fefc49d58e5652ea/rpds_py-0.29.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:de73e40ebc04dd5d9556f50180395322193a78ec247e637e741c1b954810f295", size = 395095, upload-time = "2025-11-16T14:48:50.027Z" }, - { url = "https://files.pythonhosted.org/packages/cd/f0/c90b671b9031e800ec45112be42ea9f027f94f9ac25faaac8770596a16a1/rpds_py-0.29.0-cp313-cp313-manylinux_2_31_riscv64.whl", hash = "sha256:295ce5ac7f0cf69a651ea75c8f76d02a31f98e5698e82a50a5f4d4982fbbae3b", size = 410077, upload-time = "2025-11-16T14:48:51.515Z" }, - { url = "https://files.pythonhosted.org/packages/3d/80/9af8b640b81fe21e6f718e9dec36c0b5f670332747243130a5490f292245/rpds_py-0.29.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1ea59b23ea931d494459c8338056fe7d93458c0bf3ecc061cd03916505369d55", size = 424548, upload-time = "2025-11-16T14:48:53.237Z" }, - { url = "https://files.pythonhosted.org/packages/e4/0b/b5647446e991736e6a495ef510e6710df91e880575a586e763baeb0aa770/rpds_py-0.29.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f49d41559cebd608042fdcf54ba597a4a7555b49ad5c1c0c03e0af82692661cd", size = 573661, upload-time = "2025-11-16T14:48:54.769Z" }, - { url = "https://files.pythonhosted.org/packages/f7/b3/1b1c9576839ff583d1428efbf59f9ee70498d8ce6c0b328ac02f1e470879/rpds_py-0.29.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:05a2bd42768ea988294ca328206efbcc66e220d2d9b7836ee5712c07ad6340ea", size = 600937, upload-time = "2025-11-16T14:48:56.247Z" }, - { url = "https://files.pythonhosted.org/packages/6c/7b/b6cfca2f9fee4c4494ce54f7fb1b9f578867495a9aa9fc0d44f5f735c8e0/rpds_py-0.29.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:33ca7bdfedd83339ca55da3a5e1527ee5870d4b8369456b5777b197756f3ca22", size = 564496, upload-time = "2025-11-16T14:48:57.691Z" }, - { url = "https://files.pythonhosted.org/packages/b9/fb/ba29ec7f0f06eb801bac5a23057a9ff7670623b5e8013bd59bec4aa09de8/rpds_py-0.29.0-cp313-cp313-win32.whl", hash = "sha256:20c51ae86a0bb9accc9ad4e6cdeec58d5ebb7f1b09dd4466331fc65e1766aae7", size = 223126, upload-time = "2025-11-16T14:48:59.058Z" }, - { url = "https://files.pythonhosted.org/packages/3c/6b/0229d3bed4ddaa409e6d90b0ae967ed4380e4bdd0dad6e59b92c17d42457/rpds_py-0.29.0-cp313-cp313-win_amd64.whl", hash = "sha256:6410e66f02803600edb0b1889541f4b5cc298a5ccda0ad789cc50ef23b54813e", size = 239771, upload-time = "2025-11-16T14:49:00.872Z" }, - { url = "https://files.pythonhosted.org/packages/e4/38/d2868f058b164f8efd89754d85d7b1c08b454f5c07ac2e6cc2e9bd4bd05b/rpds_py-0.29.0-cp313-cp313-win_arm64.whl", hash = "sha256:56838e1cd9174dc23c5691ee29f1d1be9eab357f27efef6bded1328b23e1ced2", size = 229994, upload-time = "2025-11-16T14:49:02.673Z" }, - { url = "https://files.pythonhosted.org/packages/52/91/5de91c5ec7d41759beec9b251630824dbb8e32d20c3756da1a9a9d309709/rpds_py-0.29.0-cp313-cp313t-macosx_10_12_x86_64.whl", hash = "sha256:37d94eadf764d16b9a04307f2ab1d7af6dc28774bbe0535c9323101e14877b4c", size = 365886, upload-time = "2025-11-16T14:49:04.133Z" }, - { url = "https://files.pythonhosted.org/packages/85/7c/415d8c1b016d5f47ecec5145d9d6d21002d39dce8761b30f6c88810b455a/rpds_py-0.29.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:d472cf73efe5726a067dce63eebe8215b14beabea7c12606fd9994267b3cfe2b", size = 355262, upload-time = "2025-11-16T14:49:05.543Z" }, - { url = "https://files.pythonhosted.org/packages/3d/14/bf83e2daa4f980e4dc848aed9299792a8b84af95e12541d9e7562f84a6ef/rpds_py-0.29.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:72fdfd5ff8992e4636621826371e3ac5f3e3b8323e9d0e48378e9c13c3dac9d0", size = 384826, upload-time = "2025-11-16T14:49:07.301Z" }, - { url = "https://files.pythonhosted.org/packages/33/b8/53330c50a810ae22b4fbba5e6cf961b68b9d72d9bd6780a7c0a79b070857/rpds_py-0.29.0-cp313-cp313t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2549d833abdf8275c901313b9e8ff8fba57e50f6a495035a2a4e30621a2f7cc4", size = 394234, upload-time = "2025-11-16T14:49:08.782Z" }, - { url = "https://files.pythonhosted.org/packages/cc/32/01e2e9645cef0e584f518cfde4567563e57db2257244632b603f61b40e50/rpds_py-0.29.0-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4448dad428f28a6a767c3e3b80cde3446a22a0efbddaa2360f4bb4dc836d0688", size = 520008, upload-time = "2025-11-16T14:49:10.253Z" }, - { url = "https://files.pythonhosted.org/packages/98/c3/0d1b95a81affae2b10f950782e33a1fd2edd6ce2a479966cac98c9a66f57/rpds_py-0.29.0-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:115f48170fd4296a33938d8c11f697f5f26e0472e43d28f35624764173a60e4d", size = 409569, upload-time = "2025-11-16T14:49:12.478Z" }, - { url = "https://files.pythonhosted.org/packages/fa/60/aa3b8678f3f009f675b99174fa2754302a7fbfe749162e8043d111de2d88/rpds_py-0.29.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8e5bb73ffc029820f4348e9b66b3027493ae00bca6629129cd433fd7a76308ee", size = 385188, upload-time = "2025-11-16T14:49:13.88Z" }, - { url = "https://files.pythonhosted.org/packages/92/02/5546c1c8aa89c18d40c1fcffdcc957ba730dee53fb7c3ca3a46f114761d2/rpds_py-0.29.0-cp313-cp313t-manylinux_2_31_riscv64.whl", hash = "sha256:b1581fcde18fcdf42ea2403a16a6b646f8eb1e58d7f90a0ce693da441f76942e", size = 398587, upload-time = "2025-11-16T14:49:15.339Z" }, - { url = "https://files.pythonhosted.org/packages/6c/e0/ad6eeaf47e236eba052fa34c4073078b9e092bd44da6bbb35aaae9580669/rpds_py-0.29.0-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:16e9da2bda9eb17ea318b4c335ec9ac1818e88922cbe03a5743ea0da9ecf74fb", size = 416641, upload-time = "2025-11-16T14:49:16.832Z" }, - { url = "https://files.pythonhosted.org/packages/1a/93/0acedfd50ad9cdd3879c615a6dc8c5f1ce78d2fdf8b87727468bb5bb4077/rpds_py-0.29.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:28fd300326dd21198f311534bdb6d7e989dd09b3418b3a91d54a0f384c700967", size = 566683, upload-time = "2025-11-16T14:49:18.342Z" }, - { url = "https://files.pythonhosted.org/packages/62/53/8c64e0f340a9e801459fc6456821abc15b3582cb5dc3932d48705a9d9ac7/rpds_py-0.29.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:2aba991e041d031c7939e1358f583ae405a7bf04804ca806b97a5c0e0af1ea5e", size = 592730, upload-time = "2025-11-16T14:49:19.767Z" }, - { url = "https://files.pythonhosted.org/packages/85/ef/3109b6584f8c4b0d2490747c916df833c127ecfa82be04d9a40a376f2090/rpds_py-0.29.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:7f437026dbbc3f08c99cc41a5b2570c6e1a1ddbe48ab19a9b814254128d4ea7a", size = 557361, upload-time = "2025-11-16T14:49:21.574Z" }, - { url = "https://files.pythonhosted.org/packages/ff/3b/61586475e82d57f01da2c16edb9115a618afe00ce86fe1b58936880b15af/rpds_py-0.29.0-cp313-cp313t-win32.whl", hash = "sha256:6e97846e9800a5d0fe7be4d008f0c93d0feeb2700da7b1f7528dabafb31dfadb", size = 211227, upload-time = "2025-11-16T14:49:23.03Z" }, - { url = "https://files.pythonhosted.org/packages/3b/3a/12dc43f13594a54ea0c9d7e9d43002116557330e3ad45bc56097ddf266e2/rpds_py-0.29.0-cp313-cp313t-win_amd64.whl", hash = "sha256:f49196aec7c4b406495f60e6f947ad71f317a765f956d74bbd83996b9edc0352", size = 225248, upload-time = "2025-11-16T14:49:24.841Z" }, -] - -[[package]] -name = "ruff" -version = "0.14.6" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/52/f0/62b5a1a723fe183650109407fa56abb433b00aa1c0b9ba555f9c4efec2c6/ruff-0.14.6.tar.gz", hash = "sha256:6f0c742ca6a7783a736b867a263b9a7a80a45ce9bee391eeda296895f1b4e1cc", size = 5669501, upload-time = "2025-11-21T14:26:17.903Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/67/d2/7dd544116d107fffb24a0064d41a5d2ed1c9d6372d142f9ba108c8e39207/ruff-0.14.6-py3-none-linux_armv6l.whl", hash = "sha256:d724ac2f1c240dbd01a2ae98db5d1d9a5e1d9e96eba999d1c48e30062df578a3", size = 13326119, upload-time = "2025-11-21T14:25:24.2Z" }, - { url = "https://files.pythonhosted.org/packages/36/6a/ad66d0a3315d6327ed6b01f759d83df3c4d5f86c30462121024361137b6a/ruff-0.14.6-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:9f7539ea257aa4d07b7ce87aed580e485c40143f2473ff2f2b75aee003186004", size = 13526007, upload-time = "2025-11-21T14:25:26.906Z" }, - { url = "https://files.pythonhosted.org/packages/a3/9d/dae6db96df28e0a15dea8e986ee393af70fc97fd57669808728080529c37/ruff-0.14.6-py3-none-macosx_11_0_arm64.whl", hash = "sha256:7f6007e55b90a2a7e93083ba48a9f23c3158c433591c33ee2e99a49b889c6332", size = 12676572, upload-time = "2025-11-21T14:25:29.826Z" }, - { url = "https://files.pythonhosted.org/packages/76/a4/f319e87759949062cfee1b26245048e92e2acce900ad3a909285f9db1859/ruff-0.14.6-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0a8e7b9d73d8728b68f632aa8e824ef041d068d231d8dbc7808532d3629a6bef", size = 13140745, upload-time = "2025-11-21T14:25:32.788Z" }, - { url = "https://files.pythonhosted.org/packages/95/d3/248c1efc71a0a8ed4e8e10b4b2266845d7dfc7a0ab64354afe049eaa1310/ruff-0.14.6-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d50d45d4553a3ebcbd33e7c5e0fe6ca4aafd9a9122492de357205c2c48f00775", size = 13076486, upload-time = "2025-11-21T14:25:35.601Z" }, - { url = "https://files.pythonhosted.org/packages/a5/19/b68d4563fe50eba4b8c92aa842149bb56dd24d198389c0ed12e7faff4f7d/ruff-0.14.6-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:118548dd121f8a21bfa8ab2c5b80e5b4aed67ead4b7567790962554f38e598ce", size = 13727563, upload-time = "2025-11-21T14:25:38.514Z" }, - { url = "https://files.pythonhosted.org/packages/47/ac/943169436832d4b0e867235abbdb57ce3a82367b47e0280fa7b4eabb7593/ruff-0.14.6-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:57256efafbfefcb8748df9d1d766062f62b20150691021f8ab79e2d919f7c11f", size = 15199755, upload-time = "2025-11-21T14:25:41.516Z" }, - { url = "https://files.pythonhosted.org/packages/c9/b9/288bb2399860a36d4bb0541cb66cce3c0f4156aaff009dc8499be0c24bf2/ruff-0.14.6-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ff18134841e5c68f8e5df1999a64429a02d5549036b394fafbe410f886e1989d", size = 14850608, upload-time = "2025-11-21T14:25:44.428Z" }, - { url = "https://files.pythonhosted.org/packages/ee/b1/a0d549dd4364e240f37e7d2907e97ee80587480d98c7799d2d8dc7a2f605/ruff-0.14.6-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:29c4b7ec1e66a105d5c27bd57fa93203637d66a26d10ca9809dc7fc18ec58440", size = 14118754, upload-time = "2025-11-21T14:25:47.214Z" }, - { url = "https://files.pythonhosted.org/packages/13/ac/9b9fe63716af8bdfddfacd0882bc1586f29985d3b988b3c62ddce2e202c3/ruff-0.14.6-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:167843a6f78680746d7e226f255d920aeed5e4ad9c03258094a2d49d3028b105", size = 13949214, upload-time = "2025-11-21T14:25:50.002Z" }, - { url = "https://files.pythonhosted.org/packages/12/27/4dad6c6a77fede9560b7df6802b1b697e97e49ceabe1f12baf3ea20862e9/ruff-0.14.6-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:16a33af621c9c523b1ae006b1b99b159bf5ac7e4b1f20b85b2572455018e0821", size = 14106112, upload-time = "2025-11-21T14:25:52.841Z" }, - { url = "https://files.pythonhosted.org/packages/6a/db/23e322d7177873eaedea59a7932ca5084ec5b7e20cb30f341ab594130a71/ruff-0.14.6-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:1432ab6e1ae2dc565a7eea707d3b03a0c234ef401482a6f1621bc1f427c2ff55", size = 13035010, upload-time = "2025-11-21T14:25:55.536Z" }, - { url = "https://files.pythonhosted.org/packages/a8/9c/20e21d4d69dbb35e6a1df7691e02f363423658a20a2afacf2a2c011800dc/ruff-0.14.6-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:4c55cfbbe7abb61eb914bfd20683d14cdfb38a6d56c6c66efa55ec6570ee4e71", size = 13054082, upload-time = "2025-11-21T14:25:58.625Z" }, - { url = "https://files.pythonhosted.org/packages/66/25/906ee6a0464c3125c8d673c589771a974965c2be1a1e28b5c3b96cb6ef88/ruff-0.14.6-py3-none-musllinux_1_2_i686.whl", hash = "sha256:efea3c0f21901a685fff4befda6d61a1bf4cb43de16da87e8226a281d614350b", size = 13303354, upload-time = "2025-11-21T14:26:01.816Z" }, - { url = "https://files.pythonhosted.org/packages/4c/58/60577569e198d56922b7ead07b465f559002b7b11d53f40937e95067ca1c/ruff-0.14.6-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:344d97172576d75dc6afc0e9243376dbe1668559c72de1864439c4fc95f78185", size = 14054487, upload-time = "2025-11-21T14:26:05.058Z" }, - { url = "https://files.pythonhosted.org/packages/67/0b/8e4e0639e4cc12547f41cb771b0b44ec8225b6b6a93393176d75fe6f7d40/ruff-0.14.6-py3-none-win32.whl", hash = "sha256:00169c0c8b85396516fdd9ce3446c7ca20c2a8f90a77aa945ba6b8f2bfe99e85", size = 13013361, upload-time = "2025-11-21T14:26:08.152Z" }, - { url = "https://files.pythonhosted.org/packages/fb/02/82240553b77fd1341f80ebb3eaae43ba011c7a91b4224a9f317d8e6591af/ruff-0.14.6-py3-none-win_amd64.whl", hash = "sha256:390e6480c5e3659f8a4c8d6a0373027820419ac14fa0d2713bd8e6c3e125b8b9", size = 14432087, upload-time = "2025-11-21T14:26:10.891Z" }, - { url = "https://files.pythonhosted.org/packages/a5/1f/93f9b0fad9470e4c829a5bb678da4012f0c710d09331b860ee555216f4ea/ruff-0.14.6-py3-none-win_arm64.whl", hash = "sha256:d43c81fbeae52cfa8728d8766bbf46ee4298c888072105815b392da70ca836b2", size = 13520930, upload-time = "2025-11-21T14:26:13.951Z" }, -] - -[[package]] -name = "scipy" -version = "1.16.3" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "numpy" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/0a/ca/d8ace4f98322d01abcd52d381134344bf7b431eba7ed8b42bdea5a3c2ac9/scipy-1.16.3.tar.gz", hash = "sha256:01e87659402762f43bd2fee13370553a17ada367d42e7487800bf2916535aecb", size = 30597883, upload-time = "2025-10-28T17:38:54.068Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/40/41/5bf55c3f386b1643812f3a5674edf74b26184378ef0f3e7c7a09a7e2ca7f/scipy-1.16.3-cp312-cp312-macosx_10_14_x86_64.whl", hash = "sha256:81fc5827606858cf71446a5e98715ba0e11f0dbc83d71c7409d05486592a45d6", size = 36659043, upload-time = "2025-10-28T17:32:40.285Z" }, - { url = "https://files.pythonhosted.org/packages/1e/0f/65582071948cfc45d43e9870bf7ca5f0e0684e165d7c9ef4e50d783073eb/scipy-1.16.3-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:c97176013d404c7346bf57874eaac5187d969293bf40497140b0a2b2b7482e07", size = 28898986, upload-time = "2025-10-28T17:32:45.325Z" }, - { url = "https://files.pythonhosted.org/packages/96/5e/36bf3f0ac298187d1ceadde9051177d6a4fe4d507e8f59067dc9dd39e650/scipy-1.16.3-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:2b71d93c8a9936046866acebc915e2af2e292b883ed6e2cbe5c34beb094b82d9", size = 20889814, upload-time = "2025-10-28T17:32:49.277Z" }, - { url = "https://files.pythonhosted.org/packages/80/35/178d9d0c35394d5d5211bbff7ac4f2986c5488b59506fef9e1de13ea28d3/scipy-1.16.3-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:3d4a07a8e785d80289dfe66b7c27d8634a773020742ec7187b85ccc4b0e7b686", size = 23565795, upload-time = "2025-10-28T17:32:53.337Z" }, - { url = "https://files.pythonhosted.org/packages/fa/46/d1146ff536d034d02f83c8afc3c4bab2eddb634624d6529a8512f3afc9da/scipy-1.16.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:0553371015692a898e1aa858fed67a3576c34edefa6b7ebdb4e9dde49ce5c203", size = 33349476, upload-time = "2025-10-28T17:32:58.353Z" }, - { url = "https://files.pythonhosted.org/packages/79/2e/415119c9ab3e62249e18c2b082c07aff907a273741b3f8160414b0e9193c/scipy-1.16.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:72d1717fd3b5e6ec747327ce9bda32d5463f472c9dce9f54499e81fbd50245a1", size = 35676692, upload-time = "2025-10-28T17:33:03.88Z" }, - { url = "https://files.pythonhosted.org/packages/27/82/df26e44da78bf8d2aeaf7566082260cfa15955a5a6e96e6a29935b64132f/scipy-1.16.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:1fb2472e72e24d1530debe6ae078db70fb1605350c88a3d14bc401d6306dbffe", size = 36019345, upload-time = "2025-10-28T17:33:09.773Z" }, - { url = "https://files.pythonhosted.org/packages/82/31/006cbb4b648ba379a95c87262c2855cd0d09453e500937f78b30f02fa1cd/scipy-1.16.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:c5192722cffe15f9329a3948c4b1db789fbb1f05c97899187dcf009b283aea70", size = 38678975, upload-time = "2025-10-28T17:33:15.809Z" }, - { url = "https://files.pythonhosted.org/packages/c2/7f/acbd28c97e990b421af7d6d6cd416358c9c293fc958b8529e0bd5d2a2a19/scipy-1.16.3-cp312-cp312-win_amd64.whl", hash = "sha256:56edc65510d1331dae01ef9b658d428e33ed48b4f77b1d51caf479a0253f96dc", size = 38555926, upload-time = "2025-10-28T17:33:21.388Z" }, - { url = "https://files.pythonhosted.org/packages/ce/69/c5c7807fd007dad4f48e0a5f2153038dc96e8725d3345b9ee31b2b7bed46/scipy-1.16.3-cp312-cp312-win_arm64.whl", hash = "sha256:a8a26c78ef223d3e30920ef759e25625a0ecdd0d60e5a8818b7513c3e5384cf2", size = 25463014, upload-time = "2025-10-28T17:33:25.975Z" }, - { url = "https://files.pythonhosted.org/packages/72/f1/57e8327ab1508272029e27eeef34f2302ffc156b69e7e233e906c2a5c379/scipy-1.16.3-cp313-cp313-macosx_10_14_x86_64.whl", hash = "sha256:d2ec56337675e61b312179a1ad124f5f570c00f920cc75e1000025451b88241c", size = 36617856, upload-time = "2025-10-28T17:33:31.375Z" }, - { url = "https://files.pythonhosted.org/packages/44/13/7e63cfba8a7452eb756306aa2fd9b37a29a323b672b964b4fdeded9a3f21/scipy-1.16.3-cp313-cp313-macosx_12_0_arm64.whl", hash = "sha256:16b8bc35a4cc24db80a0ec836a9286d0e31b2503cb2fd7ff7fb0e0374a97081d", size = 28874306, upload-time = "2025-10-28T17:33:36.516Z" }, - { url = "https://files.pythonhosted.org/packages/15/65/3a9400efd0228a176e6ec3454b1fa998fbbb5a8defa1672c3f65706987db/scipy-1.16.3-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:5803c5fadd29de0cf27fa08ccbfe7a9e5d741bf63e4ab1085437266f12460ff9", size = 20865371, upload-time = "2025-10-28T17:33:42.094Z" }, - { url = "https://files.pythonhosted.org/packages/33/d7/eda09adf009a9fb81827194d4dd02d2e4bc752cef16737cc4ef065234031/scipy-1.16.3-cp313-cp313-macosx_14_0_x86_64.whl", hash = "sha256:b81c27fc41954319a943d43b20e07c40bdcd3ff7cf013f4fb86286faefe546c4", size = 23524877, upload-time = "2025-10-28T17:33:48.483Z" }, - { url = "https://files.pythonhosted.org/packages/7d/6b/3f911e1ebc364cb81320223a3422aab7d26c9c7973109a9cd0f27c64c6c0/scipy-1.16.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:0c3b4dd3d9b08dbce0f3440032c52e9e2ab9f96ade2d3943313dfe51a7056959", size = 33342103, upload-time = "2025-10-28T17:33:56.495Z" }, - { url = "https://files.pythonhosted.org/packages/21/f6/4bfb5695d8941e5c570a04d9fcd0d36bce7511b7d78e6e75c8f9791f82d0/scipy-1.16.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:7dc1360c06535ea6116a2220f760ae572db9f661aba2d88074fe30ec2aa1ff88", size = 35697297, upload-time = "2025-10-28T17:34:04.722Z" }, - { url = "https://files.pythonhosted.org/packages/04/e1/6496dadbc80d8d896ff72511ecfe2316b50313bfc3ebf07a3f580f08bd8c/scipy-1.16.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:663b8d66a8748051c3ee9c96465fb417509315b99c71550fda2591d7dd634234", size = 36021756, upload-time = "2025-10-28T17:34:13.482Z" }, - { url = "https://files.pythonhosted.org/packages/fe/bd/a8c7799e0136b987bda3e1b23d155bcb31aec68a4a472554df5f0937eef7/scipy-1.16.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:eab43fae33a0c39006a88096cd7b4f4ef545ea0447d250d5ac18202d40b6611d", size = 38696566, upload-time = "2025-10-28T17:34:22.384Z" }, - { url = "https://files.pythonhosted.org/packages/cd/01/1204382461fcbfeb05b6161b594f4007e78b6eba9b375382f79153172b4d/scipy-1.16.3-cp313-cp313-win_amd64.whl", hash = "sha256:062246acacbe9f8210de8e751b16fc37458213f124bef161a5a02c7a39284304", size = 38529877, upload-time = "2025-10-28T17:35:51.076Z" }, - { url = "https://files.pythonhosted.org/packages/7f/14/9d9fbcaa1260a94f4bb5b64ba9213ceb5d03cd88841fe9fd1ffd47a45b73/scipy-1.16.3-cp313-cp313-win_arm64.whl", hash = "sha256:50a3dbf286dbc7d84f176f9a1574c705f277cb6565069f88f60db9eafdbe3ee2", size = 25455366, upload-time = "2025-10-28T17:35:59.014Z" }, - { url = "https://files.pythonhosted.org/packages/e2/a3/9ec205bd49f42d45d77f1730dbad9ccf146244c1647605cf834b3a8c4f36/scipy-1.16.3-cp313-cp313t-macosx_10_14_x86_64.whl", hash = "sha256:fb4b29f4cf8cc5a8d628bc8d8e26d12d7278cd1f219f22698a378c3d67db5e4b", size = 37027931, upload-time = "2025-10-28T17:34:31.451Z" }, - { url = "https://files.pythonhosted.org/packages/25/06/ca9fd1f3a4589cbd825b1447e5db3a8ebb969c1eaf22c8579bd286f51b6d/scipy-1.16.3-cp313-cp313t-macosx_12_0_arm64.whl", hash = "sha256:8d09d72dc92742988b0e7750bddb8060b0c7079606c0d24a8cc8e9c9c11f9079", size = 29400081, upload-time = "2025-10-28T17:34:39.087Z" }, - { url = "https://files.pythonhosted.org/packages/6a/56/933e68210d92657d93fb0e381683bc0e53a965048d7358ff5fbf9e6a1b17/scipy-1.16.3-cp313-cp313t-macosx_14_0_arm64.whl", hash = "sha256:03192a35e661470197556de24e7cb1330d84b35b94ead65c46ad6f16f6b28f2a", size = 21391244, upload-time = "2025-10-28T17:34:45.234Z" }, - { url = "https://files.pythonhosted.org/packages/a8/7e/779845db03dc1418e215726329674b40576879b91814568757ff0014ad65/scipy-1.16.3-cp313-cp313t-macosx_14_0_x86_64.whl", hash = "sha256:57d01cb6f85e34f0946b33caa66e892aae072b64b034183f3d87c4025802a119", size = 23929753, upload-time = "2025-10-28T17:34:51.793Z" }, - { url = "https://files.pythonhosted.org/packages/4c/4b/f756cf8161d5365dcdef9e5f460ab226c068211030a175d2fc7f3f41ca64/scipy-1.16.3-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:96491a6a54e995f00a28a3c3badfff58fd093bf26cd5fb34a2188c8c756a3a2c", size = 33496912, upload-time = "2025-10-28T17:34:59.8Z" }, - { url = "https://files.pythonhosted.org/packages/09/b5/222b1e49a58668f23839ca1542a6322bb095ab8d6590d4f71723869a6c2c/scipy-1.16.3-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:cd13e354df9938598af2be05822c323e97132d5e6306b83a3b4ee6724c6e522e", size = 35802371, upload-time = "2025-10-28T17:35:08.173Z" }, - { url = "https://files.pythonhosted.org/packages/c1/8d/5964ef68bb31829bde27611f8c9deeac13764589fe74a75390242b64ca44/scipy-1.16.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:63d3cdacb8a824a295191a723ee5e4ea7768ca5ca5f2838532d9f2e2b3ce2135", size = 36190477, upload-time = "2025-10-28T17:35:16.7Z" }, - { url = "https://files.pythonhosted.org/packages/ab/f2/b31d75cb9b5fa4dd39a0a931ee9b33e7f6f36f23be5ef560bf72e0f92f32/scipy-1.16.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:e7efa2681ea410b10dde31a52b18b0154d66f2485328830e45fdf183af5aefc6", size = 38796678, upload-time = "2025-10-28T17:35:26.354Z" }, - { url = "https://files.pythonhosted.org/packages/b4/1e/b3723d8ff64ab548c38d87055483714fefe6ee20e0189b62352b5e015bb1/scipy-1.16.3-cp313-cp313t-win_amd64.whl", hash = "sha256:2d1ae2cf0c350e7705168ff2429962a89ad90c2d49d1dd300686d8b2a5af22fc", size = 38640178, upload-time = "2025-10-28T17:35:35.304Z" }, - { url = "https://files.pythonhosted.org/packages/8e/f3/d854ff38789aca9b0cc23008d607ced9de4f7ab14fa1ca4329f86b3758ca/scipy-1.16.3-cp313-cp313t-win_arm64.whl", hash = "sha256:0c623a54f7b79dd88ef56da19bc2873afec9673a48f3b85b18e4d402bdd29a5a", size = 25803246, upload-time = "2025-10-28T17:35:42.155Z" }, -] - -[[package]] -name = "setuptools" -version = "80.9.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/18/5d/3bf57dcd21979b887f014ea83c24ae194cfcd12b9e0fda66b957c69d1fca/setuptools-80.9.0.tar.gz", hash = "sha256:f36b47402ecde768dbfafc46e8e4207b4360c654f1f3bb84475f0a28628fb19c", size = 1319958, upload-time = "2025-05-27T00:56:51.443Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/a3/dc/17031897dae0efacfea57dfd3a82fdd2a2aeb58e0ff71b77b87e44edc772/setuptools-80.9.0-py3-none-any.whl", hash = "sha256:062d34222ad13e0cc312a4c02d73f059e86a4acbfbdea8f8f76b28c99f306922", size = 1201486, upload-time = "2025-05-27T00:56:49.664Z" }, -] - -[[package]] -name = "shellingham" -version = "1.5.4" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/58/15/8b3609fd3830ef7b27b655beb4b4e9c62313a4e8da8c676e142cc210d58e/shellingham-1.5.4.tar.gz", hash = "sha256:8dbca0739d487e5bd35ab3ca4b36e11c4078f3a234bfce294b0a0291363404de", size = 10310, upload-time = "2023-10-24T04:13:40.426Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/e0/f9/0595336914c5619e5f28a1fb793285925a8cd4b432c9da0a987836c7f822/shellingham-1.5.4-py2.py3-none-any.whl", hash = "sha256:7ecfff8f2fd72616f7481040475a65b2bf8af90a56c89140852d1120324e8686", size = 9755, upload-time = "2023-10-24T04:13:38.866Z" }, -] - -[[package]] -name = "six" -version = "1.17.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/94/e7/b2c673351809dca68a0e064b6af791aa332cf192da575fd474ed7d6f16a2/six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81", size = 34031, upload-time = "2024-12-04T17:35:28.174Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/b7/ce/149a00dd41f10bc29e5921b496af8b574d8413afcd5e30dfa0ed46c2cc5e/six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274", size = 11050, upload-time = "2024-12-04T17:35:26.475Z" }, -] - -[[package]] -name = "sniffio" -version = "1.3.1" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/a2/87/a6771e1546d97e7e041b6ae58d80074f81b7d5121207425c964ddf5cfdbd/sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc", size = 20372, upload-time = "2024-02-25T23:20:04.057Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/e9/44/75a9c9421471a6c4805dbf2356f7c181a29c1879239abab1ea2cc8f38b40/sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2", size = 10235, upload-time = "2024-02-25T23:20:01.196Z" }, -] - -[[package]] -name = "snowballstemmer" -version = "3.0.1" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/75/a7/9810d872919697c9d01295633f5d574fb416d47e535f258272ca1f01f447/snowballstemmer-3.0.1.tar.gz", hash = "sha256:6d5eeeec8e9f84d4d56b847692bacf79bc2c8e90c7f80ca4444ff8b6f2e52895", size = 105575, upload-time = "2025-05-09T16:34:51.843Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/c8/78/3565d011c61f5a43488987ee32b6f3f656e7f107ac2782dd57bdd7d91d9a/snowballstemmer-3.0.1-py3-none-any.whl", hash = "sha256:6cd7b3897da8d6c9ffb968a6781fa6532dce9c3618a4b127d920dab764a19064", size = 103274, upload-time = "2025-05-09T16:34:50.371Z" }, -] - -[[package]] -name = "soupsieve" -version = "2.8" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/6d/e6/21ccce3262dd4889aa3332e5a119a3491a95e8f60939870a3a035aabac0d/soupsieve-2.8.tar.gz", hash = "sha256:e2dd4a40a628cb5f28f6d4b0db8800b8f581b65bb380b97de22ba5ca8d72572f", size = 103472, upload-time = "2025-08-27T15:39:51.78Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/14/a0/bb38d3b76b8cae341dad93a2dd83ab7462e6dbcdd84d43f54ee60a8dc167/soupsieve-2.8-py3-none-any.whl", hash = "sha256:0cc76456a30e20f5d7f2e14a98a4ae2ee4e5abdc7c5ea0aafe795f344bc7984c", size = 36679, upload-time = "2025-08-27T15:39:50.179Z" }, -] - -[[package]] -name = "sphinx" -version = "8.2.3" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "alabaster" }, - { name = "babel" }, - { name = "colorama", marker = "sys_platform == 'win32'" }, - { name = "docutils" }, - { name = "imagesize" }, - { name = "jinja2" }, - { name = "packaging" }, - { name = "pygments" }, - { name = "requests" }, - { name = "roman-numerals-py" }, - { name = "snowballstemmer" }, - { name = "sphinxcontrib-applehelp" }, - { name = "sphinxcontrib-devhelp" }, - { name = "sphinxcontrib-htmlhelp" }, - { name = "sphinxcontrib-jsmath" }, - { name = "sphinxcontrib-qthelp" }, - { name = "sphinxcontrib-serializinghtml" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/38/ad/4360e50ed56cb483667b8e6dadf2d3fda62359593faabbe749a27c4eaca6/sphinx-8.2.3.tar.gz", hash = "sha256:398ad29dee7f63a75888314e9424d40f52ce5a6a87ae88e7071e80af296ec348", size = 8321876, upload-time = "2025-03-02T22:31:59.658Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/31/53/136e9eca6e0b9dc0e1962e2c908fbea2e5ac000c2a2fbd9a35797958c48b/sphinx-8.2.3-py3-none-any.whl", hash = "sha256:4405915165f13521d875a8c29c8970800a0141c14cc5416a38feca4ea5d9b9c3", size = 3589741, upload-time = "2025-03-02T22:31:56.836Z" }, -] - -[[package]] -name = "sphinx-autodoc-typehints" -version = "3.5.2" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "sphinx" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/34/4f/4fd5583678bb7dc8afa69e9b309e6a99ee8d79ad3a4728f4e52fd7cb37c7/sphinx_autodoc_typehints-3.5.2.tar.gz", hash = "sha256:5fcd4a3eb7aa89424c1e2e32bedca66edc38367569c9169a80f4b3e934171fdb", size = 37839, upload-time = "2025-10-16T00:50:15.743Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/05/f2/9657c98a66973b7c35bfd48ba65d1922860de9598fbb535cd96e3f58a908/sphinx_autodoc_typehints-3.5.2-py3-none-any.whl", hash = "sha256:0accd043619f53c86705958e323b419e41667917045ac9215d7be1b493648d8c", size = 21184, upload-time = "2025-10-16T00:50:13.973Z" }, -] - -[[package]] -name = "sphinx-rtd-theme" -version = "3.0.2" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "docutils" }, - { name = "sphinx" }, - { name = "sphinxcontrib-jquery" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/91/44/c97faec644d29a5ceddd3020ae2edffa69e7d00054a8c7a6021e82f20335/sphinx_rtd_theme-3.0.2.tar.gz", hash = "sha256:b7457bc25dda723b20b086a670b9953c859eab60a2a03ee8eb2bb23e176e5f85", size = 7620463, upload-time = "2024-11-13T11:06:04.545Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/85/77/46e3bac77b82b4df5bb5b61f2de98637724f246b4966cfc34bc5895d852a/sphinx_rtd_theme-3.0.2-py2.py3-none-any.whl", hash = "sha256:422ccc750c3a3a311de4ae327e82affdaf59eb695ba4936538552f3b00f4ee13", size = 7655561, upload-time = "2024-11-13T11:06:02.094Z" }, -] - -[[package]] -name = "sphinxcontrib-applehelp" -version = "2.0.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/ba/6e/b837e84a1a704953c62ef8776d45c3e8d759876b4a84fe14eba2859106fe/sphinxcontrib_applehelp-2.0.0.tar.gz", hash = "sha256:2f29ef331735ce958efa4734873f084941970894c6090408b079c61b2e1c06d1", size = 20053, upload-time = "2024-07-29T01:09:00.465Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/5d/85/9ebeae2f76e9e77b952f4b274c27238156eae7979c5421fba91a28f4970d/sphinxcontrib_applehelp-2.0.0-py3-none-any.whl", hash = "sha256:4cd3f0ec4ac5dd9c17ec65e9ab272c9b867ea77425228e68ecf08d6b28ddbdb5", size = 119300, upload-time = "2024-07-29T01:08:58.99Z" }, -] - -[[package]] -name = "sphinxcontrib-bibtex" -version = "2.6.5" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "docutils" }, - { name = "pybtex" }, - { name = "pybtex-docutils" }, - { name = "sphinx" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/de/83/1488c9879f2fa3c2cbd6f666c7a3a42a1fa9e08462bec73281fa6c092cba/sphinxcontrib_bibtex-2.6.5.tar.gz", hash = "sha256:9b3224dd6fece9268ebd8c905dc0a83ff2f6c54148a9235fe70e9d1e9ff149c0", size = 118462, upload-time = "2025-06-27T10:40:14.061Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/9e/a0/3a612da94f828f26cabb247817393e79472c32b12c49222bf85fb6d7b6c8/sphinxcontrib_bibtex-2.6.5-py3-none-any.whl", hash = "sha256:455ea4509642ea0b28ede3721550273626f85af65af01f161bfd8e19dc1edd7d", size = 40410, upload-time = "2025-06-27T10:40:12.274Z" }, -] - -[[package]] -name = "sphinxcontrib-devhelp" -version = "2.0.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/f6/d2/5beee64d3e4e747f316bae86b55943f51e82bb86ecd325883ef65741e7da/sphinxcontrib_devhelp-2.0.0.tar.gz", hash = "sha256:411f5d96d445d1d73bb5d52133377b4248ec79db5c793ce7dbe59e074b4dd1ad", size = 12967, upload-time = "2024-07-29T01:09:23.417Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/35/7a/987e583882f985fe4d7323774889ec58049171828b58c2217e7f79cdf44e/sphinxcontrib_devhelp-2.0.0-py3-none-any.whl", hash = "sha256:aefb8b83854e4b0998877524d1029fd3e6879210422ee3780459e28a1f03a8a2", size = 82530, upload-time = "2024-07-29T01:09:21.945Z" }, -] - -[[package]] -name = "sphinxcontrib-htmlhelp" -version = "2.1.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/43/93/983afd9aa001e5201eab16b5a444ed5b9b0a7a010541e0ddfbbfd0b2470c/sphinxcontrib_htmlhelp-2.1.0.tar.gz", hash = "sha256:c9e2916ace8aad64cc13a0d233ee22317f2b9025b9cf3295249fa985cc7082e9", size = 22617, upload-time = "2024-07-29T01:09:37.889Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/0a/7b/18a8c0bcec9182c05a0b3ec2a776bba4ead82750a55ff798e8d406dae604/sphinxcontrib_htmlhelp-2.1.0-py3-none-any.whl", hash = "sha256:166759820b47002d22914d64a075ce08f4c46818e17cfc9470a9786b759b19f8", size = 98705, upload-time = "2024-07-29T01:09:36.407Z" }, -] - -[[package]] -name = "sphinxcontrib-jquery" -version = "4.1" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "sphinx" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/de/f3/aa67467e051df70a6330fe7770894b3e4f09436dea6881ae0b4f3d87cad8/sphinxcontrib-jquery-4.1.tar.gz", hash = "sha256:1620739f04e36a2c779f1a131a2dfd49b2fd07351bf1968ced074365933abc7a", size = 122331, upload-time = "2023-03-14T15:01:01.944Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/76/85/749bd22d1a68db7291c89e2ebca53f4306c3f205853cf31e9de279034c3c/sphinxcontrib_jquery-4.1-py2.py3-none-any.whl", hash = "sha256:f936030d7d0147dd026a4f2b5a57343d233f1fc7b363f68b3d4f1cb0993878ae", size = 121104, upload-time = "2023-03-14T15:01:00.356Z" }, -] - -[[package]] -name = "sphinxcontrib-jsmath" -version = "1.0.1" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/b2/e8/9ed3830aeed71f17c026a07a5097edcf44b692850ef215b161b8ad875729/sphinxcontrib-jsmath-1.0.1.tar.gz", hash = "sha256:a9925e4a4587247ed2191a22df5f6970656cb8ca2bd6284309578f2153e0c4b8", size = 5787, upload-time = "2019-01-21T16:10:16.347Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/c2/42/4c8646762ee83602e3fb3fbe774c2fac12f317deb0b5dbeeedd2d3ba4b77/sphinxcontrib_jsmath-1.0.1-py2.py3-none-any.whl", hash = "sha256:2ec2eaebfb78f3f2078e73666b1415417a116cc848b72e5172e596c871103178", size = 5071, upload-time = "2019-01-21T16:10:14.333Z" }, -] - -[[package]] -name = "sphinxcontrib-qthelp" -version = "2.0.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/68/bc/9104308fc285eb3e0b31b67688235db556cd5b0ef31d96f30e45f2e51cae/sphinxcontrib_qthelp-2.0.0.tar.gz", hash = "sha256:4fe7d0ac8fc171045be623aba3e2a8f613f8682731f9153bb2e40ece16b9bbab", size = 17165, upload-time = "2024-07-29T01:09:56.435Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/27/83/859ecdd180cacc13b1f7e857abf8582a64552ea7a061057a6c716e790fce/sphinxcontrib_qthelp-2.0.0-py3-none-any.whl", hash = "sha256:b18a828cdba941ccd6ee8445dbe72ffa3ef8cbe7505d8cd1fa0d42d3f2d5f3eb", size = 88743, upload-time = "2024-07-29T01:09:54.885Z" }, -] - -[[package]] -name = "sphinxcontrib-serializinghtml" -version = "2.0.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/3b/44/6716b257b0aa6bfd51a1b31665d1c205fb12cb5ad56de752dfa15657de2f/sphinxcontrib_serializinghtml-2.0.0.tar.gz", hash = "sha256:e9d912827f872c029017a53f0ef2180b327c3f7fd23c87229f7a8e8b70031d4d", size = 16080, upload-time = "2024-07-29T01:10:09.332Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/52/a7/d2782e4e3f77c8450f727ba74a8f12756d5ba823d81b941f1b04da9d033a/sphinxcontrib_serializinghtml-2.0.0-py3-none-any.whl", hash = "sha256:6e2cb0eef194e10c27ec0023bfeb25badbbb5868244cf5bc5bdc04e4464bf331", size = 92072, upload-time = "2024-07-29T01:10:08.203Z" }, -] - -[[package]] -name = "stack-data" -version = "0.6.3" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "asttokens" }, - { name = "executing" }, - { name = "pure-eval" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/28/e3/55dcc2cfbc3ca9c29519eb6884dd1415ecb53b0e934862d3559ddcb7e20b/stack_data-0.6.3.tar.gz", hash = "sha256:836a778de4fec4dcd1dcd89ed8abff8a221f58308462e1c4aa2a3cf30148f0b9", size = 44707, upload-time = "2023-09-30T13:58:05.479Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/f1/7b/ce1eafaf1a76852e2ec9b22edecf1daa58175c090266e9f6c64afcd81d91/stack_data-0.6.3-py3-none-any.whl", hash = "sha256:d5558e0c25a4cb0853cddad3d77da9891a08cb85dd9f9f91b9f8cd66e511e695", size = 24521, upload-time = "2023-09-30T13:58:03.53Z" }, -] - -[[package]] -name = "sympy" -version = "1.14.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "mpmath" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/83/d3/803453b36afefb7c2bb238361cd4ae6125a569b4db67cd9e79846ba2d68c/sympy-1.14.0.tar.gz", hash = "sha256:d3d3fe8df1e5a0b42f0e7bdf50541697dbe7d23746e894990c030e2b05e72517", size = 7793921, upload-time = "2025-04-27T18:05:01.611Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/a2/09/77d55d46fd61b4a135c444fc97158ef34a095e5681d0a6c10b75bf356191/sympy-1.14.0-py3-none-any.whl", hash = "sha256:e091cc3e99d2141a0ba2847328f5479b05d94a6635cb96148ccb3f34671bd8f5", size = 6299353, upload-time = "2025-04-27T18:04:59.103Z" }, -] - -[[package]] -name = "tiktoken" -version = "0.12.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "regex" }, - { name = "requests" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/7d/ab/4d017d0f76ec3171d469d80fc03dfbb4e48a4bcaddaa831b31d526f05edc/tiktoken-0.12.0.tar.gz", hash = "sha256:b18ba7ee2b093863978fcb14f74b3707cdc8d4d4d3836853ce7ec60772139931", size = 37806, upload-time = "2025-10-06T20:22:45.419Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/a4/85/be65d39d6b647c79800fd9d29241d081d4eeb06271f383bb87200d74cf76/tiktoken-0.12.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:b97f74aca0d78a1ff21b8cd9e9925714c15a9236d6ceacf5c7327c117e6e21e8", size = 1050728, upload-time = "2025-10-06T20:21:52.756Z" }, - { url = "https://files.pythonhosted.org/packages/4a/42/6573e9129bc55c9bf7300b3a35bef2c6b9117018acca0dc760ac2d93dffe/tiktoken-0.12.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2b90f5ad190a4bb7c3eb30c5fa32e1e182ca1ca79f05e49b448438c3e225a49b", size = 994049, upload-time = "2025-10-06T20:21:53.782Z" }, - { url = "https://files.pythonhosted.org/packages/66/c5/ed88504d2f4a5fd6856990b230b56d85a777feab84e6129af0822f5d0f70/tiktoken-0.12.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:65b26c7a780e2139e73acc193e5c63ac754021f160df919add909c1492c0fb37", size = 1129008, upload-time = "2025-10-06T20:21:54.832Z" }, - { url = "https://files.pythonhosted.org/packages/f4/90/3dae6cc5436137ebd38944d396b5849e167896fc2073da643a49f372dc4f/tiktoken-0.12.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:edde1ec917dfd21c1f2f8046b86348b0f54a2c0547f68149d8600859598769ad", size = 1152665, upload-time = "2025-10-06T20:21:56.129Z" }, - { url = "https://files.pythonhosted.org/packages/a3/fe/26df24ce53ffde419a42f5f53d755b995c9318908288c17ec3f3448313a3/tiktoken-0.12.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:35a2f8ddd3824608b3d650a000c1ef71f730d0c56486845705a8248da00f9fe5", size = 1194230, upload-time = "2025-10-06T20:21:57.546Z" }, - { url = "https://files.pythonhosted.org/packages/20/cc/b064cae1a0e9fac84b0d2c46b89f4e57051a5f41324e385d10225a984c24/tiktoken-0.12.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:83d16643edb7fa2c99eff2ab7733508aae1eebb03d5dfc46f5565862810f24e3", size = 1254688, upload-time = "2025-10-06T20:21:58.619Z" }, - { url = "https://files.pythonhosted.org/packages/81/10/b8523105c590c5b8349f2587e2fdfe51a69544bd5a76295fc20f2374f470/tiktoken-0.12.0-cp312-cp312-win_amd64.whl", hash = "sha256:ffc5288f34a8bc02e1ea7047b8d041104791d2ddbf42d1e5fa07822cbffe16bd", size = 878694, upload-time = "2025-10-06T20:21:59.876Z" }, - { url = "https://files.pythonhosted.org/packages/00/61/441588ee21e6b5cdf59d6870f86beb9789e532ee9718c251b391b70c68d6/tiktoken-0.12.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:775c2c55de2310cc1bc9a3ad8826761cbdc87770e586fd7b6da7d4589e13dab3", size = 1050802, upload-time = "2025-10-06T20:22:00.96Z" }, - { url = "https://files.pythonhosted.org/packages/1f/05/dcf94486d5c5c8d34496abe271ac76c5b785507c8eae71b3708f1ad9b45a/tiktoken-0.12.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a01b12f69052fbe4b080a2cfb867c4de12c704b56178edf1d1d7b273561db160", size = 993995, upload-time = "2025-10-06T20:22:02.788Z" }, - { url = "https://files.pythonhosted.org/packages/a0/70/5163fe5359b943f8db9946b62f19be2305de8c3d78a16f629d4165e2f40e/tiktoken-0.12.0-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:01d99484dc93b129cd0964f9d34eee953f2737301f18b3c7257bf368d7615baa", size = 1128948, upload-time = "2025-10-06T20:22:03.814Z" }, - { url = "https://files.pythonhosted.org/packages/0c/da/c028aa0babf77315e1cef357d4d768800c5f8a6de04d0eac0f377cb619fa/tiktoken-0.12.0-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:4a1a4fcd021f022bfc81904a911d3df0f6543b9e7627b51411da75ff2fe7a1be", size = 1151986, upload-time = "2025-10-06T20:22:05.173Z" }, - { url = "https://files.pythonhosted.org/packages/a0/5a/886b108b766aa53e295f7216b509be95eb7d60b166049ce2c58416b25f2a/tiktoken-0.12.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:981a81e39812d57031efdc9ec59fa32b2a5a5524d20d4776574c4b4bd2e9014a", size = 1194222, upload-time = "2025-10-06T20:22:06.265Z" }, - { url = "https://files.pythonhosted.org/packages/f4/f8/4db272048397636ac7a078d22773dd2795b1becee7bc4922fe6207288d57/tiktoken-0.12.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:9baf52f84a3f42eef3ff4e754a0db79a13a27921b457ca9832cf944c6be4f8f3", size = 1255097, upload-time = "2025-10-06T20:22:07.403Z" }, - { url = "https://files.pythonhosted.org/packages/8e/32/45d02e2e0ea2be3a9ed22afc47d93741247e75018aac967b713b2941f8ea/tiktoken-0.12.0-cp313-cp313-win_amd64.whl", hash = "sha256:b8a0cd0c789a61f31bf44851defbd609e8dd1e2c8589c614cc1060940ef1f697", size = 879117, upload-time = "2025-10-06T20:22:08.418Z" }, - { url = "https://files.pythonhosted.org/packages/ce/76/994fc868f88e016e6d05b0da5ac24582a14c47893f4474c3e9744283f1d5/tiktoken-0.12.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:d5f89ea5680066b68bcb797ae85219c72916c922ef0fcdd3480c7d2315ffff16", size = 1050309, upload-time = "2025-10-06T20:22:10.939Z" }, - { url = "https://files.pythonhosted.org/packages/f6/b8/57ef1456504c43a849821920d582a738a461b76a047f352f18c0b26c6516/tiktoken-0.12.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:b4e7ed1c6a7a8a60a3230965bdedba8cc58f68926b835e519341413370e0399a", size = 993712, upload-time = "2025-10-06T20:22:12.115Z" }, - { url = "https://files.pythonhosted.org/packages/72/90/13da56f664286ffbae9dbcfadcc625439142675845baa62715e49b87b68b/tiktoken-0.12.0-cp313-cp313t-manylinux_2_28_aarch64.whl", hash = "sha256:fc530a28591a2d74bce821d10b418b26a094bf33839e69042a6e86ddb7a7fb27", size = 1128725, upload-time = "2025-10-06T20:22:13.541Z" }, - { url = "https://files.pythonhosted.org/packages/05/df/4f80030d44682235bdaecd7346c90f67ae87ec8f3df4a3442cb53834f7e4/tiktoken-0.12.0-cp313-cp313t-manylinux_2_28_x86_64.whl", hash = "sha256:06a9f4f49884139013b138920a4c393aa6556b2f8f536345f11819389c703ebb", size = 1151875, upload-time = "2025-10-06T20:22:14.559Z" }, - { url = "https://files.pythonhosted.org/packages/22/1f/ae535223a8c4ef4c0c1192e3f9b82da660be9eb66b9279e95c99288e9dab/tiktoken-0.12.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:04f0e6a985d95913cabc96a741c5ffec525a2c72e9df086ff17ebe35985c800e", size = 1194451, upload-time = "2025-10-06T20:22:15.545Z" }, - { url = "https://files.pythonhosted.org/packages/78/a7/f8ead382fce0243cb625c4f266e66c27f65ae65ee9e77f59ea1653b6d730/tiktoken-0.12.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:0ee8f9ae00c41770b5f9b0bb1235474768884ae157de3beb5439ca0fd70f3e25", size = 1253794, upload-time = "2025-10-06T20:22:16.624Z" }, - { url = "https://files.pythonhosted.org/packages/93/e0/6cc82a562bc6365785a3ff0af27a2a092d57c47d7a81d9e2295d8c36f011/tiktoken-0.12.0-cp313-cp313t-win_amd64.whl", hash = "sha256:dc2dd125a62cb2b3d858484d6c614d136b5b848976794edfb63688d539b8b93f", size = 878777, upload-time = "2025-10-06T20:22:18.036Z" }, -] - -[[package]] -name = "tinycss2" -version = "1.4.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "webencodings" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/7a/fd/7a5ee21fd08ff70d3d33a5781c255cbe779659bd03278feb98b19ee550f4/tinycss2-1.4.0.tar.gz", hash = "sha256:10c0972f6fc0fbee87c3edb76549357415e94548c1ae10ebccdea16fb404a9b7", size = 87085, upload-time = "2024-10-24T14:58:29.895Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/e6/34/ebdc18bae6aa14fbee1a08b63c015c72b64868ff7dae68808ab500c492e2/tinycss2-1.4.0-py3-none-any.whl", hash = "sha256:3a49cf47b7675da0b15d0c6e1df8df4ebd96e9394bb905a5775adb0d884c5289", size = 26610, upload-time = "2024-10-24T14:58:28.029Z" }, -] - -[[package]] -name = "tokenize-rt" -version = "6.2.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/69/ed/8f07e893132d5051d86a553e749d5c89b2a4776eb3a579b72ed61f8559ca/tokenize_rt-6.2.0.tar.gz", hash = "sha256:8439c042b330c553fdbe1758e4a05c0ed460dbbbb24a606f11f0dee75da4cad6", size = 5476, upload-time = "2025-05-23T23:48:00.035Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/33/f0/3fe8c6e69135a845f4106f2ff8b6805638d4e85c264e70114e8126689587/tokenize_rt-6.2.0-py2.py3-none-any.whl", hash = "sha256:a152bf4f249c847a66497a4a95f63376ed68ac6abf092a2f7cfb29d044ecff44", size = 6004, upload-time = "2025-05-23T23:47:58.812Z" }, -] - -[[package]] -name = "tokenizers" -version = "0.22.1" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "huggingface-hub" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/1c/46/fb6854cec3278fbfa4a75b50232c77622bc517ac886156e6afbfa4d8fc6e/tokenizers-0.22.1.tar.gz", hash = "sha256:61de6522785310a309b3407bac22d99c4db5dba349935e99e4d15ea2226af2d9", size = 363123, upload-time = "2025-09-19T09:49:23.424Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/bf/33/f4b2d94ada7ab297328fc671fed209368ddb82f965ec2224eb1892674c3a/tokenizers-0.22.1-cp39-abi3-macosx_10_12_x86_64.whl", hash = "sha256:59fdb013df17455e5f950b4b834a7b3ee2e0271e6378ccb33aa74d178b513c73", size = 3069318, upload-time = "2025-09-19T09:49:11.848Z" }, - { url = "https://files.pythonhosted.org/packages/1c/58/2aa8c874d02b974990e89ff95826a4852a8b2a273c7d1b4411cdd45a4565/tokenizers-0.22.1-cp39-abi3-macosx_11_0_arm64.whl", hash = "sha256:8d4e484f7b0827021ac5f9f71d4794aaef62b979ab7608593da22b1d2e3c4edc", size = 2926478, upload-time = "2025-09-19T09:49:09.759Z" }, - { url = "https://files.pythonhosted.org/packages/1e/3b/55e64befa1e7bfea963cf4b787b2cea1011362c4193f5477047532ce127e/tokenizers-0.22.1-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:19d2962dd28bc67c1f205ab180578a78eef89ac60ca7ef7cbe9635a46a56422a", size = 3256994, upload-time = "2025-09-19T09:48:56.701Z" }, - { url = "https://files.pythonhosted.org/packages/71/0b/fbfecf42f67d9b7b80fde4aabb2b3110a97fac6585c9470b5bff103a80cb/tokenizers-0.22.1-cp39-abi3-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:38201f15cdb1f8a6843e6563e6e79f4abd053394992b9bbdf5213ea3469b4ae7", size = 3153141, upload-time = "2025-09-19T09:48:59.749Z" }, - { url = "https://files.pythonhosted.org/packages/17/a9/b38f4e74e0817af8f8ef925507c63c6ae8171e3c4cb2d5d4624bf58fca69/tokenizers-0.22.1-cp39-abi3-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d1cbe5454c9a15df1b3443c726063d930c16f047a3cc724b9e6e1a91140e5a21", size = 3508049, upload-time = "2025-09-19T09:49:05.868Z" }, - { url = "https://files.pythonhosted.org/packages/d2/48/dd2b3dac46bb9134a88e35d72e1aa4869579eacc1a27238f1577270773ff/tokenizers-0.22.1-cp39-abi3-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e7d094ae6312d69cc2a872b54b91b309f4f6fbce871ef28eb27b52a98e4d0214", size = 3710730, upload-time = "2025-09-19T09:49:01.832Z" }, - { url = "https://files.pythonhosted.org/packages/93/0e/ccabc8d16ae4ba84a55d41345207c1e2ea88784651a5a487547d80851398/tokenizers-0.22.1-cp39-abi3-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:afd7594a56656ace95cdd6df4cca2e4059d294c5cfb1679c57824b605556cb2f", size = 3412560, upload-time = "2025-09-19T09:49:03.867Z" }, - { url = "https://files.pythonhosted.org/packages/d0/c6/dc3a0db5a6766416c32c034286d7c2d406da1f498e4de04ab1b8959edd00/tokenizers-0.22.1-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e2ef6063d7a84994129732b47e7915e8710f27f99f3a3260b8a38fc7ccd083f4", size = 3250221, upload-time = "2025-09-19T09:49:07.664Z" }, - { url = "https://files.pythonhosted.org/packages/d7/a6/2c8486eef79671601ff57b093889a345dd3d576713ef047776015dc66de7/tokenizers-0.22.1-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:ba0a64f450b9ef412c98f6bcd2a50c6df6e2443b560024a09fa6a03189726879", size = 9345569, upload-time = "2025-09-19T09:49:14.214Z" }, - { url = "https://files.pythonhosted.org/packages/6b/16/32ce667f14c35537f5f605fe9bea3e415ea1b0a646389d2295ec348d5657/tokenizers-0.22.1-cp39-abi3-musllinux_1_2_armv7l.whl", hash = "sha256:331d6d149fa9c7d632cde4490fb8bbb12337fa3a0232e77892be656464f4b446", size = 9271599, upload-time = "2025-09-19T09:49:16.639Z" }, - { url = "https://files.pythonhosted.org/packages/51/7c/a5f7898a3f6baa3fc2685c705e04c98c1094c523051c805cdd9306b8f87e/tokenizers-0.22.1-cp39-abi3-musllinux_1_2_i686.whl", hash = "sha256:607989f2ea68a46cb1dfbaf3e3aabdf3f21d8748312dbeb6263d1b3b66c5010a", size = 9533862, upload-time = "2025-09-19T09:49:19.146Z" }, - { url = "https://files.pythonhosted.org/packages/36/65/7e75caea90bc73c1dd8d40438adf1a7bc26af3b8d0a6705ea190462506e1/tokenizers-0.22.1-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:a0f307d490295717726598ef6fa4f24af9d484809223bbc253b201c740a06390", size = 9681250, upload-time = "2025-09-19T09:49:21.501Z" }, - { url = "https://files.pythonhosted.org/packages/30/2c/959dddef581b46e6209da82df3b78471e96260e2bc463f89d23b1bf0e52a/tokenizers-0.22.1-cp39-abi3-win32.whl", hash = "sha256:b5120eed1442765cd90b903bb6cfef781fd8fe64e34ccaecbae4c619b7b12a82", size = 2472003, upload-time = "2025-09-19T09:49:27.089Z" }, - { url = "https://files.pythonhosted.org/packages/b3/46/e33a8c93907b631a99377ef4c5f817ab453d0b34f93529421f42ff559671/tokenizers-0.22.1-cp39-abi3-win_amd64.whl", hash = "sha256:65fd6e3fb11ca1e78a6a93602490f134d1fdeb13bcef99389d5102ea318ed138", size = 2674684, upload-time = "2025-09-19T09:49:24.953Z" }, -] - -[[package]] -name = "tomli" -version = "2.3.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/52/ed/3f73f72945444548f33eba9a87fc7a6e969915e7b1acc8260b30e1f76a2f/tomli-2.3.0.tar.gz", hash = "sha256:64be704a875d2a59753d80ee8a533c3fe183e3f06807ff7dc2232938ccb01549", size = 17392, upload-time = "2025-10-08T22:01:47.119Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/ff/b7/40f36368fcabc518bb11c8f06379a0fd631985046c038aca08c6d6a43c6e/tomli-2.3.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:d7d86942e56ded512a594786a5ba0a5e521d02529b3826e7761a05138341a2ac", size = 154891, upload-time = "2025-10-08T22:01:09.082Z" }, - { url = "https://files.pythonhosted.org/packages/f9/3f/d9dd692199e3b3aab2e4e4dd948abd0f790d9ded8cd10cbaae276a898434/tomli-2.3.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:73ee0b47d4dad1c5e996e3cd33b8a76a50167ae5f96a2607cbe8cc773506ab22", size = 148796, upload-time = "2025-10-08T22:01:10.266Z" }, - { url = "https://files.pythonhosted.org/packages/60/83/59bff4996c2cf9f9387a0f5a3394629c7efa5ef16142076a23a90f1955fa/tomli-2.3.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:792262b94d5d0a466afb5bc63c7daa9d75520110971ee269152083270998316f", size = 242121, upload-time = "2025-10-08T22:01:11.332Z" }, - { url = "https://files.pythonhosted.org/packages/45/e5/7c5119ff39de8693d6baab6c0b6dcb556d192c165596e9fc231ea1052041/tomli-2.3.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4f195fe57ecceac95a66a75ac24d9d5fbc98ef0962e09b2eddec5d39375aae52", size = 250070, upload-time = "2025-10-08T22:01:12.498Z" }, - { url = "https://files.pythonhosted.org/packages/45/12/ad5126d3a278f27e6701abde51d342aa78d06e27ce2bb596a01f7709a5a2/tomli-2.3.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:e31d432427dcbf4d86958c184b9bfd1e96b5b71f8eb17e6d02531f434fd335b8", size = 245859, upload-time = "2025-10-08T22:01:13.551Z" }, - { url = "https://files.pythonhosted.org/packages/fb/a1/4d6865da6a71c603cfe6ad0e6556c73c76548557a8d658f9e3b142df245f/tomli-2.3.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:7b0882799624980785240ab732537fcfc372601015c00f7fc367c55308c186f6", size = 250296, upload-time = "2025-10-08T22:01:14.614Z" }, - { url = "https://files.pythonhosted.org/packages/a0/b7/a7a7042715d55c9ba6e8b196d65d2cb662578b4d8cd17d882d45322b0d78/tomli-2.3.0-cp312-cp312-win32.whl", hash = "sha256:ff72b71b5d10d22ecb084d345fc26f42b5143c5533db5e2eaba7d2d335358876", size = 97124, upload-time = "2025-10-08T22:01:15.629Z" }, - { url = "https://files.pythonhosted.org/packages/06/1e/f22f100db15a68b520664eb3328fb0ae4e90530887928558112c8d1f4515/tomli-2.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:1cb4ed918939151a03f33d4242ccd0aa5f11b3547d0cf30f7c74a408a5b99878", size = 107698, upload-time = "2025-10-08T22:01:16.51Z" }, - { url = "https://files.pythonhosted.org/packages/89/48/06ee6eabe4fdd9ecd48bf488f4ac783844fd777f547b8d1b61c11939974e/tomli-2.3.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:5192f562738228945d7b13d4930baffda67b69425a7f0da96d360b0a3888136b", size = 154819, upload-time = "2025-10-08T22:01:17.964Z" }, - { url = "https://files.pythonhosted.org/packages/f1/01/88793757d54d8937015c75dcdfb673c65471945f6be98e6a0410fba167ed/tomli-2.3.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:be71c93a63d738597996be9528f4abe628d1adf5e6eb11607bc8fe1a510b5dae", size = 148766, upload-time = "2025-10-08T22:01:18.959Z" }, - { url = "https://files.pythonhosted.org/packages/42/17/5e2c956f0144b812e7e107f94f1cc54af734eb17b5191c0bbfb72de5e93e/tomli-2.3.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c4665508bcbac83a31ff8ab08f424b665200c0e1e645d2bd9ab3d3e557b6185b", size = 240771, upload-time = "2025-10-08T22:01:20.106Z" }, - { url = "https://files.pythonhosted.org/packages/d5/f4/0fbd014909748706c01d16824eadb0307115f9562a15cbb012cd9b3512c5/tomli-2.3.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4021923f97266babc6ccab9f5068642a0095faa0a51a246a6a02fccbb3514eaf", size = 248586, upload-time = "2025-10-08T22:01:21.164Z" }, - { url = "https://files.pythonhosted.org/packages/30/77/fed85e114bde5e81ecf9bc5da0cc69f2914b38f4708c80ae67d0c10180c5/tomli-2.3.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a4ea38c40145a357d513bffad0ed869f13c1773716cf71ccaa83b0fa0cc4e42f", size = 244792, upload-time = "2025-10-08T22:01:22.417Z" }, - { url = "https://files.pythonhosted.org/packages/55/92/afed3d497f7c186dc71e6ee6d4fcb0acfa5f7d0a1a2878f8beae379ae0cc/tomli-2.3.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:ad805ea85eda330dbad64c7ea7a4556259665bdf9d2672f5dccc740eb9d3ca05", size = 248909, upload-time = "2025-10-08T22:01:23.859Z" }, - { url = "https://files.pythonhosted.org/packages/f8/84/ef50c51b5a9472e7265ce1ffc7f24cd4023d289e109f669bdb1553f6a7c2/tomli-2.3.0-cp313-cp313-win32.whl", hash = "sha256:97d5eec30149fd3294270e889b4234023f2c69747e555a27bd708828353ab606", size = 96946, upload-time = "2025-10-08T22:01:24.893Z" }, - { url = "https://files.pythonhosted.org/packages/b2/b7/718cd1da0884f281f95ccfa3a6cc572d30053cba64603f79d431d3c9b61b/tomli-2.3.0-cp313-cp313-win_amd64.whl", hash = "sha256:0c95ca56fbe89e065c6ead5b593ee64b84a26fca063b5d71a1122bf26e533999", size = 107705, upload-time = "2025-10-08T22:01:26.153Z" }, - { url = "https://files.pythonhosted.org/packages/77/b8/0135fadc89e73be292b473cb820b4f5a08197779206b33191e801feeae40/tomli-2.3.0-py3-none-any.whl", hash = "sha256:e95b1af3c5b07d9e643909b5abbec77cd9f1217e6d0bca72b0234736b9fb1f1b", size = 14408, upload-time = "2025-10-08T22:01:46.04Z" }, -] - -[[package]] -name = "torch" -version = "2.9.1" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "filelock" }, - { name = "fsspec" }, - { name = "jinja2" }, - { name = "networkx" }, - { name = "nvidia-cublas-cu12", marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" }, - { name = "nvidia-cuda-cupti-cu12", marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" }, - { name = "nvidia-cuda-nvrtc-cu12", marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" }, - { name = "nvidia-cuda-runtime-cu12", marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" }, - { name = "nvidia-cudnn-cu12", marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" }, - { name = "nvidia-cufft-cu12", marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" }, - { name = "nvidia-cufile-cu12", marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" }, - { name = "nvidia-curand-cu12", marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" }, - { name = "nvidia-cusolver-cu12", marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" }, - { name = "nvidia-cusparse-cu12", marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" }, - { name = "nvidia-cusparselt-cu12", marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" }, - { name = "nvidia-nccl-cu12", marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" }, - { name = "nvidia-nvjitlink-cu12", marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" }, - { name = "nvidia-nvshmem-cu12", marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" }, - { name = "nvidia-nvtx-cu12", marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" }, - { name = "setuptools" }, - { name = "sympy" }, - { name = "triton", marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" }, - { name = "typing-extensions" }, -] -wheels = [ - { url = "https://files.pythonhosted.org/packages/0f/27/07c645c7673e73e53ded71705045d6cb5bae94c4b021b03aa8d03eee90ab/torch-2.9.1-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:da5f6f4d7f4940a173e5572791af238cb0b9e21b1aab592bd8b26da4c99f1cd6", size = 104126592, upload-time = "2025-11-12T15:20:41.62Z" }, - { url = "https://files.pythonhosted.org/packages/19/17/e377a460603132b00760511299fceba4102bd95db1a0ee788da21298ccff/torch-2.9.1-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:27331cd902fb4322252657f3902adf1c4f6acad9dcad81d8df3ae14c7c4f07c4", size = 899742281, upload-time = "2025-11-12T15:22:17.602Z" }, - { url = "https://files.pythonhosted.org/packages/b1/1a/64f5769025db846a82567fa5b7d21dba4558a7234ee631712ee4771c436c/torch-2.9.1-cp312-cp312-win_amd64.whl", hash = "sha256:81a285002d7b8cfd3fdf1b98aa8df138d41f1a8334fd9ea37511517cedf43083", size = 110940568, upload-time = "2025-11-12T15:21:18.689Z" }, - { url = "https://files.pythonhosted.org/packages/6e/ab/07739fd776618e5882661d04c43f5b5586323e2f6a2d7d84aac20d8f20bd/torch-2.9.1-cp312-none-macosx_11_0_arm64.whl", hash = "sha256:c0d25d1d8e531b8343bea0ed811d5d528958f1dcbd37e7245bc686273177ad7e", size = 74479191, upload-time = "2025-11-12T15:21:25.816Z" }, - { url = "https://files.pythonhosted.org/packages/20/60/8fc5e828d050bddfab469b3fe78e5ab9a7e53dda9c3bdc6a43d17ce99e63/torch-2.9.1-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:c29455d2b910b98738131990394da3e50eea8291dfeb4b12de71ecf1fdeb21cb", size = 104135743, upload-time = "2025-11-12T15:21:34.936Z" }, - { url = "https://files.pythonhosted.org/packages/f2/b7/6d3f80e6918213babddb2a37b46dbb14c15b14c5f473e347869a51f40e1f/torch-2.9.1-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:524de44cd13931208ba2c4bde9ec7741fd4ae6bfd06409a604fc32f6520c2bc9", size = 899749493, upload-time = "2025-11-12T15:24:36.356Z" }, - { url = "https://files.pythonhosted.org/packages/a6/47/c7843d69d6de8938c1cbb1eba426b1d48ddf375f101473d3e31a5fc52b74/torch-2.9.1-cp313-cp313-win_amd64.whl", hash = "sha256:545844cc16b3f91e08ce3b40e9c2d77012dd33a48d505aed34b7740ed627a1b2", size = 110944162, upload-time = "2025-11-12T15:21:53.151Z" }, - { url = "https://files.pythonhosted.org/packages/28/0e/2a37247957e72c12151b33a01e4df651d9d155dd74d8cfcbfad15a79b44a/torch-2.9.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:5be4bf7496f1e3ffb1dd44b672adb1ac3f081f204c5ca81eba6442f5f634df8e", size = 74830751, upload-time = "2025-11-12T15:21:43.792Z" }, - { url = "https://files.pythonhosted.org/packages/4b/f7/7a18745edcd7b9ca2381aa03353647bca8aace91683c4975f19ac233809d/torch-2.9.1-cp313-cp313t-manylinux_2_28_aarch64.whl", hash = "sha256:30a3e170a84894f3652434b56d59a64a2c11366b0ed5776fab33c2439396bf9a", size = 104142929, upload-time = "2025-11-12T15:21:48.319Z" }, - { url = "https://files.pythonhosted.org/packages/f4/dd/f1c0d879f2863ef209e18823a988dc7a1bf40470750e3ebe927efdb9407f/torch-2.9.1-cp313-cp313t-manylinux_2_28_x86_64.whl", hash = "sha256:8301a7b431e51764629208d0edaa4f9e4c33e6df0f2f90b90e261d623df6a4e2", size = 899748978, upload-time = "2025-11-12T15:23:04.568Z" }, - { url = "https://files.pythonhosted.org/packages/1f/9f/6986b83a53b4d043e36f3f898b798ab51f7f20fdf1a9b01a2720f445043d/torch-2.9.1-cp313-cp313t-win_amd64.whl", hash = "sha256:2e1c42c0ae92bf803a4b2409fdfed85e30f9027a66887f5e7dcdbc014c7531db", size = 111176995, upload-time = "2025-11-12T15:22:01.618Z" }, - { url = "https://files.pythonhosted.org/packages/40/60/71c698b466dd01e65d0e9514b5405faae200c52a76901baf6906856f17e4/torch-2.9.1-cp313-none-macosx_11_0_arm64.whl", hash = "sha256:2c14b3da5df416cf9cb5efab83aa3056f5b8cd8620b8fde81b4987ecab730587", size = 74480347, upload-time = "2025-11-12T15:21:57.648Z" }, -] - -[[package]] -name = "tornado" -version = "6.5.2" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/09/ce/1eb500eae19f4648281bb2186927bb062d2438c2e5093d1360391afd2f90/tornado-6.5.2.tar.gz", hash = "sha256:ab53c8f9a0fa351e2c0741284e06c7a45da86afb544133201c5cc8578eb076a0", size = 510821, upload-time = "2025-08-08T18:27:00.78Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/f6/48/6a7529df2c9cc12efd2e8f5dd219516184d703b34c06786809670df5b3bd/tornado-6.5.2-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:2436822940d37cde62771cff8774f4f00b3c8024fe482e16ca8387b8a2724db6", size = 442563, upload-time = "2025-08-08T18:26:42.945Z" }, - { url = "https://files.pythonhosted.org/packages/f2/b5/9b575a0ed3e50b00c40b08cbce82eb618229091d09f6d14bce80fc01cb0b/tornado-6.5.2-cp39-abi3-macosx_10_9_x86_64.whl", hash = "sha256:583a52c7aa94ee046854ba81d9ebb6c81ec0fd30386d96f7640c96dad45a03ef", size = 440729, upload-time = "2025-08-08T18:26:44.473Z" }, - { url = "https://files.pythonhosted.org/packages/1b/4e/619174f52b120efcf23633c817fd3fed867c30bff785e2cd5a53a70e483c/tornado-6.5.2-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b0fe179f28d597deab2842b86ed4060deec7388f1fd9c1b4a41adf8af058907e", size = 444295, upload-time = "2025-08-08T18:26:46.021Z" }, - { url = "https://files.pythonhosted.org/packages/95/fa/87b41709552bbd393c85dd18e4e3499dcd8983f66e7972926db8d96aa065/tornado-6.5.2-cp39-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b186e85d1e3536d69583d2298423744740986018e393d0321df7340e71898882", size = 443644, upload-time = "2025-08-08T18:26:47.625Z" }, - { url = "https://files.pythonhosted.org/packages/f9/41/fb15f06e33d7430ca89420283a8762a4e6b8025b800ea51796ab5e6d9559/tornado-6.5.2-cp39-abi3-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e792706668c87709709c18b353da1f7662317b563ff69f00bab83595940c7108", size = 443878, upload-time = "2025-08-08T18:26:50.599Z" }, - { url = "https://files.pythonhosted.org/packages/11/92/fe6d57da897776ad2e01e279170ea8ae726755b045fe5ac73b75357a5a3f/tornado-6.5.2-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:06ceb1300fd70cb20e43b1ad8aaee0266e69e7ced38fa910ad2e03285009ce7c", size = 444549, upload-time = "2025-08-08T18:26:51.864Z" }, - { url = "https://files.pythonhosted.org/packages/9b/02/c8f4f6c9204526daf3d760f4aa555a7a33ad0e60843eac025ccfd6ff4a93/tornado-6.5.2-cp39-abi3-musllinux_1_2_i686.whl", hash = "sha256:74db443e0f5251be86cbf37929f84d8c20c27a355dd452a5cfa2aada0d001ec4", size = 443973, upload-time = "2025-08-08T18:26:53.625Z" }, - { url = "https://files.pythonhosted.org/packages/ae/2d/f5f5707b655ce2317190183868cd0f6822a1121b4baeae509ceb9590d0bd/tornado-6.5.2-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:b5e735ab2889d7ed33b32a459cac490eda71a1ba6857b0118de476ab6c366c04", size = 443954, upload-time = "2025-08-08T18:26:55.072Z" }, - { url = "https://files.pythonhosted.org/packages/e8/59/593bd0f40f7355806bf6573b47b8c22f8e1374c9b6fd03114bd6b7a3dcfd/tornado-6.5.2-cp39-abi3-win32.whl", hash = "sha256:c6f29e94d9b37a95013bb669616352ddb82e3bfe8326fccee50583caebc8a5f0", size = 445023, upload-time = "2025-08-08T18:26:56.677Z" }, - { url = "https://files.pythonhosted.org/packages/c7/2a/f609b420c2f564a748a2d80ebfb2ee02a73ca80223af712fca591386cafb/tornado-6.5.2-cp39-abi3-win_amd64.whl", hash = "sha256:e56a5af51cc30dd2cae649429af65ca2f6571da29504a07995175df14c18f35f", size = 445427, upload-time = "2025-08-08T18:26:57.91Z" }, - { url = "https://files.pythonhosted.org/packages/5e/4f/e1f65e8f8c76d73658b33d33b81eed4322fb5085350e4328d5c956f0c8f9/tornado-6.5.2-cp39-abi3-win_arm64.whl", hash = "sha256:d6c33dc3672e3a1f3618eb63b7ef4683a7688e7b9e6e8f0d9aa5726360a004af", size = 444456, upload-time = "2025-08-08T18:26:59.207Z" }, -] - -[[package]] -name = "tqdm" -version = "4.67.1" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "colorama", marker = "sys_platform == 'win32'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/a8/4b/29b4ef32e036bb34e4ab51796dd745cdba7ed47ad142a9f4a1eb8e0c744d/tqdm-4.67.1.tar.gz", hash = "sha256:f8aef9c52c08c13a65f30ea34f4e5aac3fd1a34959879d7e59e63027286627f2", size = 169737, upload-time = "2024-11-24T20:12:22.481Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/d0/30/dc54f88dd4a2b5dc8a0279bdd7270e735851848b762aeb1c1184ed1f6b14/tqdm-4.67.1-py3-none-any.whl", hash = "sha256:26445eca388f82e72884e0d580d5464cd801a3ea01e63e5601bdff9ba6a48de2", size = 78540, upload-time = "2024-11-24T20:12:19.698Z" }, -] - -[[package]] -name = "traitlets" -version = "5.14.3" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/eb/79/72064e6a701c2183016abbbfedaba506d81e30e232a68c9f0d6f6fcd1574/traitlets-5.14.3.tar.gz", hash = "sha256:9ed0579d3502c94b4b3732ac120375cda96f923114522847de4b3bb98b96b6b7", size = 161621, upload-time = "2024-04-19T11:11:49.746Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/00/c0/8f5d070730d7836adc9c9b6408dec68c6ced86b304a9b26a14df072a6e8c/traitlets-5.14.3-py3-none-any.whl", hash = "sha256:b74e89e397b1ed28cc831db7aea759ba6640cb3de13090ca145426688ff1ac4f", size = 85359, upload-time = "2024-04-19T11:11:46.763Z" }, -] - -[[package]] -name = "triton" -version = "3.5.1" -source = { registry = "https://pypi.org/simple" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/f2/50/9a8358d3ef58162c0a415d173cfb45b67de60176e1024f71fbc4d24c0b6d/triton-3.5.1-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d2c6b915a03888ab931a9fd3e55ba36785e1fe70cbea0b40c6ef93b20fc85232", size = 170470207, upload-time = "2025-11-11T17:41:00.253Z" }, - { url = "https://files.pythonhosted.org/packages/27/46/8c3bbb5b0a19313f50edcaa363b599e5a1a5ac9683ead82b9b80fe497c8d/triton-3.5.1-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f3f4346b6ebbd4fad18773f5ba839114f4826037c9f2f34e0148894cd5dd3dba", size = 170470410, upload-time = "2025-11-11T17:41:06.319Z" }, - { url = "https://files.pythonhosted.org/packages/37/92/e97fcc6b2c27cdb87ce5ee063d77f8f26f19f06916aa680464c8104ef0f6/triton-3.5.1-cp313-cp313t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0b4d2c70127fca6a23e247f9348b8adde979d2e7a20391bfbabaac6aebc7e6a8", size = 170579924, upload-time = "2025-11-11T17:41:12.455Z" }, -] - -[[package]] -name = "typer-slim" -version = "0.20.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "click" }, - { name = "typing-extensions" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/8e/45/81b94a52caed434b94da65729c03ad0fb7665fab0f7db9ee54c94e541403/typer_slim-0.20.0.tar.gz", hash = "sha256:9fc6607b3c6c20f5c33ea9590cbeb17848667c51feee27d9e314a579ab07d1a3", size = 106561, upload-time = "2025-10-20T17:03:46.642Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/5e/dd/5cbf31f402f1cc0ab087c94d4669cfa55bd1e818688b910631e131d74e75/typer_slim-0.20.0-py3-none-any.whl", hash = "sha256:f42a9b7571a12b97dddf364745d29f12221865acef7a2680065f9bb29c7dc89d", size = 47087, upload-time = "2025-10-20T17:03:44.546Z" }, -] - -[[package]] -name = "typing-extensions" -version = "4.15.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/72/94/1a15dd82efb362ac84269196e94cf00f187f7ed21c242792a923cdb1c61f/typing_extensions-4.15.0.tar.gz", hash = "sha256:0cea48d173cc12fa28ecabc3b837ea3cf6f38c6d1136f85cbaaf598984861466", size = 109391, upload-time = "2025-08-25T13:49:26.313Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/18/67/36e9267722cc04a6b9f15c7f3441c2363321a3ea07da7ae0c0707beb2a9c/typing_extensions-4.15.0-py3-none-any.whl", hash = "sha256:f0fa19c6845758ab08074a0cfa8b7aecb71c999ca73d62883bc25cc018c4e548", size = 44614, upload-time = "2025-08-25T13:49:24.86Z" }, -] - -[[package]] -name = "typing-inspection" -version = "0.4.2" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "typing-extensions" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/55/e3/70399cb7dd41c10ac53367ae42139cf4b1ca5f36bb3dc6c9d33acdb43655/typing_inspection-0.4.2.tar.gz", hash = "sha256:ba561c48a67c5958007083d386c3295464928b01faa735ab8547c5692e87f464", size = 75949, upload-time = "2025-10-01T02:14:41.687Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/dc/9b/47798a6c91d8bdb567fe2698fe81e0c6b7cb7ef4d13da4114b41d239f65d/typing_inspection-0.4.2-py3-none-any.whl", hash = "sha256:4ed1cacbdc298c220f1bd249ed5287caa16f34d44ef4e9c3d0cbad5b521545e7", size = 14611, upload-time = "2025-10-01T02:14:40.154Z" }, -] - -[[package]] -name = "urllib3" -version = "2.5.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/15/22/9ee70a2574a4f4599c47dd506532914ce044817c7752a79b6a51286319bc/urllib3-2.5.0.tar.gz", hash = "sha256:3fc47733c7e419d4bc3f6b3dc2b4f890bb743906a30d56ba4a5bfa4bbff92760", size = 393185, upload-time = "2025-06-18T14:07:41.644Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/a7/c2/fe1e52489ae3122415c51f387e221dd0773709bad6c6cdaa599e8a2c5185/urllib3-2.5.0-py3-none-any.whl", hash = "sha256:e6b01673c0fa6a13e374b50871808eb3bf7046c4b125b216f6bf1cc604cff0dc", size = 129795, upload-time = "2025-06-18T14:07:40.39Z" }, -] - -[[package]] -name = "wcwidth" -version = "0.2.14" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/24/30/6b0809f4510673dc723187aeaf24c7f5459922d01e2f794277a3dfb90345/wcwidth-0.2.14.tar.gz", hash = "sha256:4d478375d31bc5395a3c55c40ccdf3354688364cd61c4f6adacaa9215d0b3605", size = 102293, upload-time = "2025-09-22T16:29:53.023Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/af/b5/123f13c975e9f27ab9c0770f514345bd406d0e8d3b7a0723af9d43f710af/wcwidth-0.2.14-py2.py3-none-any.whl", hash = "sha256:a7bb560c8aee30f9957e5f9895805edd20602f2d7f720186dfd906e82b4982e1", size = 37286, upload-time = "2025-09-22T16:29:51.641Z" }, -] - -[[package]] -name = "webencodings" -version = "0.5.1" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/0b/02/ae6ceac1baeda530866a85075641cec12989bd8d31af6d5ab4a3e8c92f47/webencodings-0.5.1.tar.gz", hash = "sha256:b36a1c245f2d304965eb4e0a82848379241dc04b865afcc4aab16748587e1923", size = 9721, upload-time = "2017-04-05T20:21:34.189Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/f4/24/2a3e3df732393fed8b3ebf2ec078f05546de641fe1b667ee316ec1dcf3b7/webencodings-0.5.1-py2.py3-none-any.whl", hash = "sha256:a0af1213f3c2226497a97e2b3aa01a7e4bee4f403f95be16fc9acd2947514a78", size = 11774, upload-time = "2017-04-05T20:21:32.581Z" }, -] - -[[package]] -name = "wrapt" -version = "2.0.1" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/49/2a/6de8a50cb435b7f42c46126cf1a54b2aab81784e74c8595c8e025e8f36d3/wrapt-2.0.1.tar.gz", hash = "sha256:9c9c635e78497cacb81e84f8b11b23e0aacac7a136e73b8e5b2109a1d9fc468f", size = 82040, upload-time = "2025-11-07T00:45:33.312Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/cb/73/8cb252858dc8254baa0ce58ce382858e3a1cf616acebc497cb13374c95c6/wrapt-2.0.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:1fdbb34da15450f2b1d735a0e969c24bdb8d8924892380126e2a293d9902078c", size = 78129, upload-time = "2025-11-07T00:43:48.852Z" }, - { url = "https://files.pythonhosted.org/packages/19/42/44a0db2108526ee6e17a5ab72478061158f34b08b793df251d9fbb9a7eb4/wrapt-2.0.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:3d32794fe940b7000f0519904e247f902f0149edbe6316c710a8562fb6738841", size = 61205, upload-time = "2025-11-07T00:43:50.402Z" }, - { url = "https://files.pythonhosted.org/packages/4d/8a/5b4b1e44b791c22046e90d9b175f9a7581a8cc7a0debbb930f81e6ae8e25/wrapt-2.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:386fb54d9cd903ee0012c09291336469eb7b244f7183d40dc3e86a16a4bace62", size = 61692, upload-time = "2025-11-07T00:43:51.678Z" }, - { url = "https://files.pythonhosted.org/packages/11/53/3e794346c39f462bcf1f58ac0487ff9bdad02f9b6d5ee2dc84c72e0243b2/wrapt-2.0.1-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:7b219cb2182f230676308cdcacd428fa837987b89e4b7c5c9025088b8a6c9faf", size = 121492, upload-time = "2025-11-07T00:43:55.017Z" }, - { url = "https://files.pythonhosted.org/packages/c6/7e/10b7b0e8841e684c8ca76b462a9091c45d62e8f2de9c4b1390b690eadf16/wrapt-2.0.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:641e94e789b5f6b4822bb8d8ebbdfc10f4e4eae7756d648b717d980f657a9eb9", size = 123064, upload-time = "2025-11-07T00:43:56.323Z" }, - { url = "https://files.pythonhosted.org/packages/0e/d1/3c1e4321fc2f5ee7fd866b2d822aa89b84495f28676fd976c47327c5b6aa/wrapt-2.0.1-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:fe21b118b9f58859b5ebaa4b130dee18669df4bd111daad082b7beb8799ad16b", size = 117403, upload-time = "2025-11-07T00:43:53.258Z" }, - { url = "https://files.pythonhosted.org/packages/a4/b0/d2f0a413cf201c8c2466de08414a15420a25aa83f53e647b7255cc2fab5d/wrapt-2.0.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:17fb85fa4abc26a5184d93b3efd2dcc14deb4b09edcdb3535a536ad34f0b4dba", size = 121500, upload-time = "2025-11-07T00:43:57.468Z" }, - { url = "https://files.pythonhosted.org/packages/bd/45/bddb11d28ca39970a41ed48a26d210505120f925918592283369219f83cc/wrapt-2.0.1-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:b89ef9223d665ab255ae42cc282d27d69704d94be0deffc8b9d919179a609684", size = 116299, upload-time = "2025-11-07T00:43:58.877Z" }, - { url = "https://files.pythonhosted.org/packages/81/af/34ba6dd570ef7a534e7eec0c25e2615c355602c52aba59413411c025a0cb/wrapt-2.0.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:a453257f19c31b31ba593c30d997d6e5be39e3b5ad9148c2af5a7314061c63eb", size = 120622, upload-time = "2025-11-07T00:43:59.962Z" }, - { url = "https://files.pythonhosted.org/packages/e2/3e/693a13b4146646fb03254636f8bafd20c621955d27d65b15de07ab886187/wrapt-2.0.1-cp312-cp312-win32.whl", hash = "sha256:3e271346f01e9c8b1130a6a3b0e11908049fe5be2d365a5f402778049147e7e9", size = 58246, upload-time = "2025-11-07T00:44:03.169Z" }, - { url = "https://files.pythonhosted.org/packages/a7/36/715ec5076f925a6be95f37917b66ebbeaa1372d1862c2ccd7a751574b068/wrapt-2.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:2da620b31a90cdefa9cd0c2b661882329e2e19d1d7b9b920189956b76c564d75", size = 60492, upload-time = "2025-11-07T00:44:01.027Z" }, - { url = "https://files.pythonhosted.org/packages/ef/3e/62451cd7d80f65cc125f2b426b25fbb6c514bf6f7011a0c3904fc8c8df90/wrapt-2.0.1-cp312-cp312-win_arm64.whl", hash = "sha256:aea9c7224c302bc8bfc892b908537f56c430802560e827b75ecbde81b604598b", size = 58987, upload-time = "2025-11-07T00:44:02.095Z" }, - { url = "https://files.pythonhosted.org/packages/ad/fe/41af4c46b5e498c90fc87981ab2972fbd9f0bccda597adb99d3d3441b94b/wrapt-2.0.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:47b0f8bafe90f7736151f61482c583c86b0693d80f075a58701dd1549b0010a9", size = 78132, upload-time = "2025-11-07T00:44:04.628Z" }, - { url = "https://files.pythonhosted.org/packages/1c/92/d68895a984a5ebbbfb175512b0c0aad872354a4a2484fbd5552e9f275316/wrapt-2.0.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:cbeb0971e13b4bd81d34169ed57a6dda017328d1a22b62fda45e1d21dd06148f", size = 61211, upload-time = "2025-11-07T00:44:05.626Z" }, - { url = "https://files.pythonhosted.org/packages/e8/26/ba83dc5ae7cf5aa2b02364a3d9cf74374b86169906a1f3ade9a2d03cf21c/wrapt-2.0.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:eb7cffe572ad0a141a7886a1d2efa5bef0bf7fe021deeea76b3ab334d2c38218", size = 61689, upload-time = "2025-11-07T00:44:06.719Z" }, - { url = "https://files.pythonhosted.org/packages/cf/67/d7a7c276d874e5d26738c22444d466a3a64ed541f6ef35f740dbd865bab4/wrapt-2.0.1-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:c8d60527d1ecfc131426b10d93ab5d53e08a09c5fa0175f6b21b3252080c70a9", size = 121502, upload-time = "2025-11-07T00:44:09.557Z" }, - { url = "https://files.pythonhosted.org/packages/0f/6b/806dbf6dd9579556aab22fc92908a876636e250f063f71548a8660382184/wrapt-2.0.1-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c654eafb01afac55246053d67a4b9a984a3567c3808bb7df2f8de1c1caba2e1c", size = 123110, upload-time = "2025-11-07T00:44:10.64Z" }, - { url = "https://files.pythonhosted.org/packages/e5/08/cdbb965fbe4c02c5233d185d070cabed2ecc1f1e47662854f95d77613f57/wrapt-2.0.1-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:98d873ed6c8b4ee2418f7afce666751854d6d03e3c0ec2a399bb039cd2ae89db", size = 117434, upload-time = "2025-11-07T00:44:08.138Z" }, - { url = "https://files.pythonhosted.org/packages/2d/d1/6aae2ce39db4cb5216302fa2e9577ad74424dfbe315bd6669725569e048c/wrapt-2.0.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:c9e850f5b7fc67af856ff054c71690d54fa940c3ef74209ad9f935b4f66a0233", size = 121533, upload-time = "2025-11-07T00:44:12.142Z" }, - { url = "https://files.pythonhosted.org/packages/79/35/565abf57559fbe0a9155c29879ff43ce8bd28d2ca61033a3a3dd67b70794/wrapt-2.0.1-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:e505629359cb5f751e16e30cf3f91a1d3ddb4552480c205947da415d597f7ac2", size = 116324, upload-time = "2025-11-07T00:44:13.28Z" }, - { url = "https://files.pythonhosted.org/packages/e1/e0/53ff5e76587822ee33e560ad55876d858e384158272cd9947abdd4ad42ca/wrapt-2.0.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:2879af909312d0baf35f08edeea918ee3af7ab57c37fe47cb6a373c9f2749c7b", size = 120627, upload-time = "2025-11-07T00:44:14.431Z" }, - { url = "https://files.pythonhosted.org/packages/7c/7b/38df30fd629fbd7612c407643c63e80e1c60bcc982e30ceeae163a9800e7/wrapt-2.0.1-cp313-cp313-win32.whl", hash = "sha256:d67956c676be5a24102c7407a71f4126d30de2a569a1c7871c9f3cabc94225d7", size = 58252, upload-time = "2025-11-07T00:44:17.814Z" }, - { url = "https://files.pythonhosted.org/packages/85/64/d3954e836ea67c4d3ad5285e5c8fd9d362fd0a189a2db622df457b0f4f6a/wrapt-2.0.1-cp313-cp313-win_amd64.whl", hash = "sha256:9ca66b38dd642bf90c59b6738af8070747b610115a39af2498535f62b5cdc1c3", size = 60500, upload-time = "2025-11-07T00:44:15.561Z" }, - { url = "https://files.pythonhosted.org/packages/89/4e/3c8b99ac93527cfab7f116089db120fef16aac96e5f6cdb724ddf286086d/wrapt-2.0.1-cp313-cp313-win_arm64.whl", hash = "sha256:5a4939eae35db6b6cec8e7aa0e833dcca0acad8231672c26c2a9ab7a0f8ac9c8", size = 58993, upload-time = "2025-11-07T00:44:16.65Z" }, - { url = "https://files.pythonhosted.org/packages/f9/f4/eff2b7d711cae20d220780b9300faa05558660afb93f2ff5db61fe725b9a/wrapt-2.0.1-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:a52f93d95c8d38fed0669da2ebdb0b0376e895d84596a976c15a9eb45e3eccb3", size = 82028, upload-time = "2025-11-07T00:44:18.944Z" }, - { url = "https://files.pythonhosted.org/packages/0c/67/cb945563f66fd0f61a999339460d950f4735c69f18f0a87ca586319b1778/wrapt-2.0.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:4e54bbf554ee29fcceee24fa41c4d091398b911da6e7f5d7bffda963c9aed2e1", size = 62949, upload-time = "2025-11-07T00:44:20.074Z" }, - { url = "https://files.pythonhosted.org/packages/ec/ca/f63e177f0bbe1e5cf5e8d9b74a286537cd709724384ff20860f8f6065904/wrapt-2.0.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:908f8c6c71557f4deaa280f55d0728c3bca0960e8c3dd5ceeeafb3c19942719d", size = 63681, upload-time = "2025-11-07T00:44:21.345Z" }, - { url = "https://files.pythonhosted.org/packages/39/a1/1b88fcd21fd835dca48b556daef750952e917a2794fa20c025489e2e1f0f/wrapt-2.0.1-cp313-cp313t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:e2f84e9af2060e3904a32cea9bb6db23ce3f91cfd90c6b426757cf7cc01c45c7", size = 152696, upload-time = "2025-11-07T00:44:24.318Z" }, - { url = "https://files.pythonhosted.org/packages/62/1c/d9185500c1960d9f5f77b9c0b890b7fc62282b53af7ad1b6bd779157f714/wrapt-2.0.1-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e3612dc06b436968dfb9142c62e5dfa9eb5924f91120b3c8ff501ad878f90eb3", size = 158859, upload-time = "2025-11-07T00:44:25.494Z" }, - { url = "https://files.pythonhosted.org/packages/91/60/5d796ed0f481ec003220c7878a1d6894652efe089853a208ea0838c13086/wrapt-2.0.1-cp313-cp313t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:6d2d947d266d99a1477cd005b23cbd09465276e302515e122df56bb9511aca1b", size = 146068, upload-time = "2025-11-07T00:44:22.81Z" }, - { url = "https://files.pythonhosted.org/packages/04/f8/75282dd72f102ddbfba137e1e15ecba47b40acff32c08ae97edbf53f469e/wrapt-2.0.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:7d539241e87b650cbc4c3ac9f32c8d1ac8a54e510f6dca3f6ab60dcfd48c9b10", size = 155724, upload-time = "2025-11-07T00:44:26.634Z" }, - { url = "https://files.pythonhosted.org/packages/5a/27/fe39c51d1b344caebb4a6a9372157bdb8d25b194b3561b52c8ffc40ac7d1/wrapt-2.0.1-cp313-cp313t-musllinux_1_2_riscv64.whl", hash = "sha256:4811e15d88ee62dbf5c77f2c3ff3932b1e3ac92323ba3912f51fc4016ce81ecf", size = 144413, upload-time = "2025-11-07T00:44:27.939Z" }, - { url = "https://files.pythonhosted.org/packages/83/2b/9f6b643fe39d4505c7bf926d7c2595b7cb4b607c8c6b500e56c6b36ac238/wrapt-2.0.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:c1c91405fcf1d501fa5d55df21e58ea49e6b879ae829f1039faaf7e5e509b41e", size = 150325, upload-time = "2025-11-07T00:44:29.29Z" }, - { url = "https://files.pythonhosted.org/packages/bb/b6/20ffcf2558596a7f58a2e69c89597128781f0b88e124bf5a4cadc05b8139/wrapt-2.0.1-cp313-cp313t-win32.whl", hash = "sha256:e76e3f91f864e89db8b8d2a8311d57df93f01ad6bb1e9b9976d1f2e83e18315c", size = 59943, upload-time = "2025-11-07T00:44:33.211Z" }, - { url = "https://files.pythonhosted.org/packages/87/6a/0e56111cbb3320151eed5d3821ee1373be13e05b376ea0870711f18810c3/wrapt-2.0.1-cp313-cp313t-win_amd64.whl", hash = "sha256:83ce30937f0ba0d28818807b303a412440c4b63e39d3d8fc036a94764b728c92", size = 63240, upload-time = "2025-11-07T00:44:30.935Z" }, - { url = "https://files.pythonhosted.org/packages/1d/54/5ab4c53ea1f7f7e5c3e7c1095db92932cc32fd62359d285486d00c2884c3/wrapt-2.0.1-cp313-cp313t-win_arm64.whl", hash = "sha256:4b55cacc57e1dc2d0991dbe74c6419ffd415fb66474a02335cb10efd1aa3f84f", size = 60416, upload-time = "2025-11-07T00:44:32.002Z" }, - { url = "https://files.pythonhosted.org/packages/15/d1/b51471c11592ff9c012bd3e2f7334a6ff2f42a7aed2caffcf0bdddc9cb89/wrapt-2.0.1-py3-none-any.whl", hash = "sha256:4d2ce1bf1a48c5277d7969259232b57645aae5686dba1eaeade39442277afbca", size = 44046, upload-time = "2025-11-07T00:45:32.116Z" }, -] - -[[package]] -name = "yarl" -version = "1.22.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "idna" }, - { name = "multidict" }, - { name = "propcache" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/57/63/0c6ebca57330cd313f6102b16dd57ffaf3ec4c83403dcb45dbd15c6f3ea1/yarl-1.22.0.tar.gz", hash = "sha256:bebf8557577d4401ba8bd9ff33906f1376c877aa78d1fe216ad01b4d6745af71", size = 187169, upload-time = "2025-10-06T14:12:55.963Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/75/ff/46736024fee3429b80a165a732e38e5d5a238721e634ab41b040d49f8738/yarl-1.22.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:e340382d1afa5d32b892b3ff062436d592ec3d692aeea3bef3a5cfe11bbf8c6f", size = 142000, upload-time = "2025-10-06T14:09:44.631Z" }, - { url = "https://files.pythonhosted.org/packages/5a/9a/b312ed670df903145598914770eb12de1bac44599549b3360acc96878df8/yarl-1.22.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:f1e09112a2c31ffe8d80be1b0988fa6a18c5d5cad92a9ffbb1c04c91bfe52ad2", size = 94338, upload-time = "2025-10-06T14:09:46.372Z" }, - { url = "https://files.pythonhosted.org/packages/ba/f5/0601483296f09c3c65e303d60c070a5c19fcdbc72daa061e96170785bc7d/yarl-1.22.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:939fe60db294c786f6b7c2d2e121576628468f65453d86b0fe36cb52f987bd74", size = 94909, upload-time = "2025-10-06T14:09:48.648Z" }, - { url = "https://files.pythonhosted.org/packages/60/41/9a1fe0b73dbcefce72e46cf149b0e0a67612d60bfc90fb59c2b2efdfbd86/yarl-1.22.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e1651bf8e0398574646744c1885a41198eba53dc8a9312b954073f845c90a8df", size = 372940, upload-time = "2025-10-06T14:09:50.089Z" }, - { url = "https://files.pythonhosted.org/packages/17/7a/795cb6dfee561961c30b800f0ed616b923a2ec6258b5def2a00bf8231334/yarl-1.22.0-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:b8a0588521a26bf92a57a1705b77b8b59044cdceccac7151bd8d229e66b8dedb", size = 345825, upload-time = "2025-10-06T14:09:52.142Z" }, - { url = "https://files.pythonhosted.org/packages/d7/93/a58f4d596d2be2ae7bab1a5846c4d270b894958845753b2c606d666744d3/yarl-1.22.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:42188e6a615c1a75bcaa6e150c3fe8f3e8680471a6b10150c5f7e83f47cc34d2", size = 386705, upload-time = "2025-10-06T14:09:54.128Z" }, - { url = "https://files.pythonhosted.org/packages/61/92/682279d0e099d0e14d7fd2e176bd04f48de1484f56546a3e1313cd6c8e7c/yarl-1.22.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:f6d2cb59377d99718913ad9a151030d6f83ef420a2b8f521d94609ecc106ee82", size = 396518, upload-time = "2025-10-06T14:09:55.762Z" }, - { url = "https://files.pythonhosted.org/packages/db/0f/0d52c98b8a885aeda831224b78f3be7ec2e1aa4a62091f9f9188c3c65b56/yarl-1.22.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:50678a3b71c751d58d7908edc96d332af328839eea883bb554a43f539101277a", size = 377267, upload-time = "2025-10-06T14:09:57.958Z" }, - { url = "https://files.pythonhosted.org/packages/22/42/d2685e35908cbeaa6532c1fc73e89e7f2efb5d8a7df3959ea8e37177c5a3/yarl-1.22.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:1e8fbaa7cec507aa24ea27a01456e8dd4b6fab829059b69844bd348f2d467124", size = 365797, upload-time = "2025-10-06T14:09:59.527Z" }, - { url = "https://files.pythonhosted.org/packages/a2/83/cf8c7bcc6355631762f7d8bdab920ad09b82efa6b722999dfb05afa6cfac/yarl-1.22.0-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:433885ab5431bc3d3d4f2f9bd15bfa1614c522b0f1405d62c4f926ccd69d04fa", size = 365535, upload-time = "2025-10-06T14:10:01.139Z" }, - { url = "https://files.pythonhosted.org/packages/25/e1/5302ff9b28f0c59cac913b91fe3f16c59a033887e57ce9ca5d41a3a94737/yarl-1.22.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:b790b39c7e9a4192dc2e201a282109ed2985a1ddbd5ac08dc56d0e121400a8f7", size = 382324, upload-time = "2025-10-06T14:10:02.756Z" }, - { url = "https://files.pythonhosted.org/packages/bf/cd/4617eb60f032f19ae3a688dc990d8f0d89ee0ea378b61cac81ede3e52fae/yarl-1.22.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:31f0b53913220599446872d757257be5898019c85e7971599065bc55065dc99d", size = 383803, upload-time = "2025-10-06T14:10:04.552Z" }, - { url = "https://files.pythonhosted.org/packages/59/65/afc6e62bb506a319ea67b694551dab4a7e6fb7bf604e9bd9f3e11d575fec/yarl-1.22.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:a49370e8f711daec68d09b821a34e1167792ee2d24d405cbc2387be4f158b520", size = 374220, upload-time = "2025-10-06T14:10:06.489Z" }, - { url = "https://files.pythonhosted.org/packages/e7/3d/68bf18d50dc674b942daec86a9ba922d3113d8399b0e52b9897530442da2/yarl-1.22.0-cp312-cp312-win32.whl", hash = "sha256:70dfd4f241c04bd9239d53b17f11e6ab672b9f1420364af63e8531198e3f5fe8", size = 81589, upload-time = "2025-10-06T14:10:09.254Z" }, - { url = "https://files.pythonhosted.org/packages/c8/9a/6ad1a9b37c2f72874f93e691b2e7ecb6137fb2b899983125db4204e47575/yarl-1.22.0-cp312-cp312-win_amd64.whl", hash = "sha256:8884d8b332a5e9b88e23f60bb166890009429391864c685e17bd73a9eda9105c", size = 87213, upload-time = "2025-10-06T14:10:11.369Z" }, - { url = "https://files.pythonhosted.org/packages/44/c5/c21b562d1680a77634d748e30c653c3ca918beb35555cff24986fff54598/yarl-1.22.0-cp312-cp312-win_arm64.whl", hash = "sha256:ea70f61a47f3cc93bdf8b2f368ed359ef02a01ca6393916bc8ff877427181e74", size = 81330, upload-time = "2025-10-06T14:10:13.112Z" }, - { url = "https://files.pythonhosted.org/packages/ea/f3/d67de7260456ee105dc1d162d43a019ecad6b91e2f51809d6cddaa56690e/yarl-1.22.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:8dee9c25c74997f6a750cd317b8ca63545169c098faee42c84aa5e506c819b53", size = 139980, upload-time = "2025-10-06T14:10:14.601Z" }, - { url = "https://files.pythonhosted.org/packages/01/88/04d98af0b47e0ef42597b9b28863b9060bb515524da0a65d5f4db160b2d5/yarl-1.22.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:01e73b85a5434f89fc4fe27dcda2aff08ddf35e4d47bbbea3bdcd25321af538a", size = 93424, upload-time = "2025-10-06T14:10:16.115Z" }, - { url = "https://files.pythonhosted.org/packages/18/91/3274b215fd8442a03975ce6bee5fe6aa57a8326b29b9d3d56234a1dca244/yarl-1.22.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:22965c2af250d20c873cdbee8ff958fb809940aeb2e74ba5f20aaf6b7ac8c70c", size = 93821, upload-time = "2025-10-06T14:10:17.993Z" }, - { url = "https://files.pythonhosted.org/packages/61/3a/caf4e25036db0f2da4ca22a353dfeb3c9d3c95d2761ebe9b14df8fc16eb0/yarl-1.22.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b4f15793aa49793ec8d1c708ab7f9eded1aa72edc5174cae703651555ed1b601", size = 373243, upload-time = "2025-10-06T14:10:19.44Z" }, - { url = "https://files.pythonhosted.org/packages/6e/9e/51a77ac7516e8e7803b06e01f74e78649c24ee1021eca3d6a739cb6ea49c/yarl-1.22.0-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:e5542339dcf2747135c5c85f68680353d5cb9ffd741c0f2e8d832d054d41f35a", size = 342361, upload-time = "2025-10-06T14:10:21.124Z" }, - { url = "https://files.pythonhosted.org/packages/d4/f8/33b92454789dde8407f156c00303e9a891f1f51a0330b0fad7c909f87692/yarl-1.22.0-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:5c401e05ad47a75869c3ab3e35137f8468b846770587e70d71e11de797d113df", size = 387036, upload-time = "2025-10-06T14:10:22.902Z" }, - { url = "https://files.pythonhosted.org/packages/d9/9a/c5db84ea024f76838220280f732970aa4ee154015d7f5c1bfb60a267af6f/yarl-1.22.0-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:243dda95d901c733f5b59214d28b0120893d91777cb8aa043e6ef059d3cddfe2", size = 397671, upload-time = "2025-10-06T14:10:24.523Z" }, - { url = "https://files.pythonhosted.org/packages/11/c9/cd8538dc2e7727095e0c1d867bad1e40c98f37763e6d995c1939f5fdc7b1/yarl-1.22.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bec03d0d388060058f5d291a813f21c011041938a441c593374da6077fe21b1b", size = 377059, upload-time = "2025-10-06T14:10:26.406Z" }, - { url = "https://files.pythonhosted.org/packages/a1/b9/ab437b261702ced75122ed78a876a6dec0a1b0f5e17a4ac7a9a2482d8abe/yarl-1.22.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:b0748275abb8c1e1e09301ee3cf90c8a99678a4e92e4373705f2a2570d581273", size = 365356, upload-time = "2025-10-06T14:10:28.461Z" }, - { url = "https://files.pythonhosted.org/packages/b2/9d/8e1ae6d1d008a9567877b08f0ce4077a29974c04c062dabdb923ed98e6fe/yarl-1.22.0-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:47fdb18187e2a4e18fda2c25c05d8251a9e4a521edaed757fef033e7d8498d9a", size = 361331, upload-time = "2025-10-06T14:10:30.541Z" }, - { url = "https://files.pythonhosted.org/packages/ca/5a/09b7be3905962f145b73beb468cdd53db8aa171cf18c80400a54c5b82846/yarl-1.22.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:c7044802eec4524fde550afc28edda0dd5784c4c45f0be151a2d3ba017daca7d", size = 382590, upload-time = "2025-10-06T14:10:33.352Z" }, - { url = "https://files.pythonhosted.org/packages/aa/7f/59ec509abf90eda5048b0bc3e2d7b5099dffdb3e6b127019895ab9d5ef44/yarl-1.22.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:139718f35149ff544caba20fce6e8a2f71f1e39b92c700d8438a0b1d2a631a02", size = 385316, upload-time = "2025-10-06T14:10:35.034Z" }, - { url = "https://files.pythonhosted.org/packages/e5/84/891158426bc8036bfdfd862fabd0e0fa25df4176ec793e447f4b85cf1be4/yarl-1.22.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:e1b51bebd221006d3d2f95fbe124b22b247136647ae5dcc8c7acafba66e5ee67", size = 374431, upload-time = "2025-10-06T14:10:37.76Z" }, - { url = "https://files.pythonhosted.org/packages/bb/49/03da1580665baa8bef5e8ed34c6df2c2aca0a2f28bf397ed238cc1bbc6f2/yarl-1.22.0-cp313-cp313-win32.whl", hash = "sha256:d3e32536234a95f513bd374e93d717cf6b2231a791758de6c509e3653f234c95", size = 81555, upload-time = "2025-10-06T14:10:39.649Z" }, - { url = "https://files.pythonhosted.org/packages/9a/ee/450914ae11b419eadd067c6183ae08381cfdfcb9798b90b2b713bbebddda/yarl-1.22.0-cp313-cp313-win_amd64.whl", hash = "sha256:47743b82b76d89a1d20b83e60d5c20314cbd5ba2befc9cda8f28300c4a08ed4d", size = 86965, upload-time = "2025-10-06T14:10:41.313Z" }, - { url = "https://files.pythonhosted.org/packages/98/4d/264a01eae03b6cf629ad69bae94e3b0e5344741e929073678e84bf7a3e3b/yarl-1.22.0-cp313-cp313-win_arm64.whl", hash = "sha256:5d0fcda9608875f7d052eff120c7a5da474a6796fe4d83e152e0e4d42f6d1a9b", size = 81205, upload-time = "2025-10-06T14:10:43.167Z" }, - { url = "https://files.pythonhosted.org/packages/88/fc/6908f062a2f77b5f9f6d69cecb1747260831ff206adcbc5b510aff88df91/yarl-1.22.0-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:719ae08b6972befcba4310e49edb1161a88cdd331e3a694b84466bd938a6ab10", size = 146209, upload-time = "2025-10-06T14:10:44.643Z" }, - { url = "https://files.pythonhosted.org/packages/65/47/76594ae8eab26210b4867be6f49129861ad33da1f1ebdf7051e98492bf62/yarl-1.22.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:47d8a5c446df1c4db9d21b49619ffdba90e77c89ec6e283f453856c74b50b9e3", size = 95966, upload-time = "2025-10-06T14:10:46.554Z" }, - { url = "https://files.pythonhosted.org/packages/ab/ce/05e9828a49271ba6b5b038b15b3934e996980dd78abdfeb52a04cfb9467e/yarl-1.22.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:cfebc0ac8333520d2d0423cbbe43ae43c8838862ddb898f5ca68565e395516e9", size = 97312, upload-time = "2025-10-06T14:10:48.007Z" }, - { url = "https://files.pythonhosted.org/packages/d1/c5/7dffad5e4f2265b29c9d7ec869c369e4223166e4f9206fc2243ee9eea727/yarl-1.22.0-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4398557cbf484207df000309235979c79c4356518fd5c99158c7d38203c4da4f", size = 361967, upload-time = "2025-10-06T14:10:49.997Z" }, - { url = "https://files.pythonhosted.org/packages/50/b2/375b933c93a54bff7fc041e1a6ad2c0f6f733ffb0c6e642ce56ee3b39970/yarl-1.22.0-cp313-cp313t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:2ca6fd72a8cd803be290d42f2dec5cdcd5299eeb93c2d929bf060ad9efaf5de0", size = 323949, upload-time = "2025-10-06T14:10:52.004Z" }, - { url = "https://files.pythonhosted.org/packages/66/50/bfc2a29a1d78644c5a7220ce2f304f38248dc94124a326794e677634b6cf/yarl-1.22.0-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:ca1f59c4e1ab6e72f0a23c13fca5430f889634166be85dbf1013683e49e3278e", size = 361818, upload-time = "2025-10-06T14:10:54.078Z" }, - { url = "https://files.pythonhosted.org/packages/46/96/f3941a46af7d5d0f0498f86d71275696800ddcdd20426298e572b19b91ff/yarl-1.22.0-cp313-cp313t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:6c5010a52015e7c70f86eb967db0f37f3c8bd503a695a49f8d45700144667708", size = 372626, upload-time = "2025-10-06T14:10:55.767Z" }, - { url = "https://files.pythonhosted.org/packages/c1/42/8b27c83bb875cd89448e42cd627e0fb971fa1675c9ec546393d18826cb50/yarl-1.22.0-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9d7672ecf7557476642c88497c2f8d8542f8e36596e928e9bcba0e42e1e7d71f", size = 341129, upload-time = "2025-10-06T14:10:57.985Z" }, - { url = "https://files.pythonhosted.org/packages/49/36/99ca3122201b382a3cf7cc937b95235b0ac944f7e9f2d5331d50821ed352/yarl-1.22.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:3b7c88eeef021579d600e50363e0b6ee4f7f6f728cd3486b9d0f3ee7b946398d", size = 346776, upload-time = "2025-10-06T14:10:59.633Z" }, - { url = "https://files.pythonhosted.org/packages/85/b4/47328bf996acd01a4c16ef9dcd2f59c969f495073616586f78cd5f2efb99/yarl-1.22.0-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:f4afb5c34f2c6fecdcc182dfcfc6af6cccf1aa923eed4d6a12e9d96904e1a0d8", size = 334879, upload-time = "2025-10-06T14:11:01.454Z" }, - { url = "https://files.pythonhosted.org/packages/c2/ad/b77d7b3f14a4283bffb8e92c6026496f6de49751c2f97d4352242bba3990/yarl-1.22.0-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:59c189e3e99a59cf8d83cbb31d4db02d66cda5a1a4374e8a012b51255341abf5", size = 350996, upload-time = "2025-10-06T14:11:03.452Z" }, - { url = "https://files.pythonhosted.org/packages/81/c8/06e1d69295792ba54d556f06686cbd6a7ce39c22307100e3fb4a2c0b0a1d/yarl-1.22.0-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:5a3bf7f62a289fa90f1990422dc8dff5a458469ea71d1624585ec3a4c8d6960f", size = 356047, upload-time = "2025-10-06T14:11:05.115Z" }, - { url = "https://files.pythonhosted.org/packages/4b/b8/4c0e9e9f597074b208d18cef227d83aac36184bfbc6eab204ea55783dbc5/yarl-1.22.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:de6b9a04c606978fdfe72666fa216ffcf2d1a9f6a381058d4378f8d7b1e5de62", size = 342947, upload-time = "2025-10-06T14:11:08.137Z" }, - { url = "https://files.pythonhosted.org/packages/e0/e5/11f140a58bf4c6ad7aca69a892bff0ee638c31bea4206748fc0df4ebcb3a/yarl-1.22.0-cp313-cp313t-win32.whl", hash = "sha256:1834bb90991cc2999f10f97f5f01317f99b143284766d197e43cd5b45eb18d03", size = 86943, upload-time = "2025-10-06T14:11:10.284Z" }, - { url = "https://files.pythonhosted.org/packages/31/74/8b74bae38ed7fe6793d0c15a0c8207bbb819cf287788459e5ed230996cdd/yarl-1.22.0-cp313-cp313t-win_amd64.whl", hash = "sha256:ff86011bd159a9d2dfc89c34cfd8aff12875980e3bd6a39ff097887520e60249", size = 93715, upload-time = "2025-10-06T14:11:11.739Z" }, - { url = "https://files.pythonhosted.org/packages/69/66/991858aa4b5892d57aef7ee1ba6b4d01ec3b7eb3060795d34090a3ca3278/yarl-1.22.0-cp313-cp313t-win_arm64.whl", hash = "sha256:7861058d0582b847bc4e3a4a4c46828a410bca738673f35a29ba3ca5db0b473b", size = 83857, upload-time = "2025-10-06T14:11:13.586Z" }, - { url = "https://files.pythonhosted.org/packages/73/ae/b48f95715333080afb75a4504487cbe142cae1268afc482d06692d605ae6/yarl-1.22.0-py3-none-any.whl", hash = "sha256:1380560bdba02b6b6c90de54133c81c9f2a453dee9912fe58c1dcced1edb7cff", size = 46814, upload-time = "2025-10-06T14:12:53.872Z" }, -] - -[[package]] -name = "zipp" -version = "3.23.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/e3/02/0f2892c661036d50ede074e376733dca2ae7c6eb617489437771209d4180/zipp-3.23.0.tar.gz", hash = "sha256:a07157588a12518c9d4034df3fbbee09c814741a33ff63c05fa29d26a2404166", size = 25547, upload-time = "2025-06-08T17:06:39.4Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/2e/54/647ade08bf0db230bfea292f893923872fd20be6ac6f53b2b936ba839d75/zipp-3.23.0-py3-none-any.whl", hash = "sha256:071652d6115ed432f5ce1d34c336c0adfd6a884660d1e9712a256d3d3bd4b14e", size = 10276, upload-time = "2025-06-08T17:06:38.034Z" }, -] From c754b3e4ea0b0896aa37683c1a7f76b65fe642c5 Mon Sep 17 00:00:00 2001 From: datvo06 Date: Thu, 26 Feb 2026 12:43:25 -0500 Subject: [PATCH 35/39] Update mypy type check to support type check for type --- effectful/handlers/llm/encoding.py | 3 + effectful/handlers/llm/evaluation.py | 50 ++++--- tests/test_handlers_llm_encoding.py | 190 +++++++++++++++++++++++++++ 3 files changed, 222 insertions(+), 21 deletions(-) diff --git a/effectful/handlers/llm/encoding.py b/effectful/handlers/llm/encoding.py index dac57a1c..76c8008d 100644 --- a/effectful/handlers/llm/encoding.py +++ b/effectful/handlers/llm/encoding.py @@ -682,6 +682,9 @@ def decode(self, encoded_value: SynthesizedType) -> type: # Parse via evaluation effect (also registers source in linecache) tree = evaluation.parse(module_code, filename) + # Type-check the synthesized module + evaluation.type_check(tree, self.ctx, None, type) + # Inject __firstlineno__ into class bodies for Python 3.13+ compatibility # inspect.getsource() looks for __firstlineno__ in vars(cls), # which requires it to be in the class's __dict__. diff --git a/effectful/handlers/llm/evaluation.py b/effectful/handlers/llm/evaluation.py index 07348cc9..1624d405 100644 --- a/effectful/handlers/llm/evaluation.py +++ b/effectful/handlers/llm/evaluation.py @@ -564,12 +564,11 @@ def mypy_type_check( if not module.body: raise TypeError("mypy_type_check: module.body is empty") last = module.body[-1] - if not isinstance(last, ast.FunctionDef): + if not isinstance(last, ast.FunctionDef | ast.ClassDef): raise TypeError( - f"mypy_type_check: last statement must be a function definition, " + f"mypy_type_check: last statement must be a function or class definition, " f"got {type(last).__name__}" ) - func_name = last.name imports = collect_imports(ctx) # Ensure annotations in the postlude can be resolved (e.g. collections.abc.Callable, typing) @@ -614,33 +613,42 @@ def mypy_type_check( stub_module_body = ast.Module(body=module_body, type_ignores=[]) _RenameTransformer(rename_map).visit(stub_module_body) module_body = stub_module_body.body - tc_func_name = rename_map.get(func_name, func_name) else: module_body = list(module.body) - tc_func_name = func_name - - param_types = expected_params - expected_callable_type: type = typing.cast( - type, - collections.abc.Callable[param_types, expected_return] - if expected_params is not None - else collections.abc.Callable[..., expected_return], - ) - expected_callable_ast = type_to_ast(expected_callable_type) - postlude = ast.AnnAssign( - target=ast.Name(id="_synthesized_check", ctx=ast.Store()), - annotation=expected_callable_ast, - value=ast.Name(id=tc_func_name, ctx=ast.Load()), - simple=1, - ) + postlude: list[ast.stmt] = [] + if isinstance(last, ast.FunctionDef): + func_name = last.name + tc_func_name = ( + rename_map.get(func_name, func_name) + if colliding_names + else func_name + ) + param_types = expected_params + expected_callable_type: type = typing.cast( + type, + collections.abc.Callable[param_types, expected_return] + if expected_params is not None + else collections.abc.Callable[..., expected_return], + ) + expected_callable_ast = type_to_ast(expected_callable_type) + postlude = [ + ast.AnnAssign( + target=ast.Name(id="_synthesized_check", ctx=ast.Store()), + annotation=expected_callable_ast, + value=ast.Name(id=tc_func_name, ctx=ast.Load()), + simple=1, + ) + ] + # For ClassDef: no postlude needed, mypy checks the class body directly. + full_body = ( baseline_imports + list(imports) + list(stubs) + list(variables) + module_body - + [postlude] + + postlude ) stub_module = ast.Module(body=full_body, type_ignores=[]) source = ast.unparse(ast.fix_missing_locations(stub_module)) diff --git a/tests/test_handlers_llm_encoding.py b/tests/test_handlers_llm_encoding.py index bbcda789..eeafe4dc 100644 --- a/tests/test_handlers_llm_encoding.py +++ b/tests/test_handlers_llm_encoding.py @@ -21,7 +21,9 @@ from effectful.handlers.llm.encoding import ( DecodedToolCall, Encodable, + SynthesisError, SynthesizedFunction, + SynthesizedType, ) from effectful.handlers.llm.evaluation import RestrictedEvalProvider, UnsafeEvalProvider from effectful.handlers.llm.template import Tool @@ -680,6 +682,194 @@ def __call__(self): enc.encode(_NoDocCallable()) +# ============================================================================ +# Type: roundtrip, type_check pass/fail, serialize/deserialize +# ============================================================================ + +# NOTE: RestrictedPython rejects __firstlineno__ injection and underscore-prefixed +# names used in class synthesis. Type synthesis tests use UnsafeEvalProvider only. + +_UNSAFE_PROVIDER = pytest.param(UnsafeEvalProvider(), id="unsafe") + + +class SimplePoint: + x: int + y: int + + def __init__(self, x: int, y: int): + self.x = x + self.y = y + + +class Greeter: + def hello(self) -> str: + return "world" + + +# --- pass cases: type_check should succeed --- + + +def test_type_encode_decode_simple_class(): + """Roundtrip encode/decode of a simple class with typed attributes.""" + enc = Encodable.define(type) + with handler(UnsafeEvalProvider()): + decoded = enc.decode(enc.encode(SimplePoint)) + assert isinstance(decoded, type) + assert decoded.__name__ == "SimplePoint" + obj = decoded(1, 2) + assert obj.x == 1 + assert obj.y == 2 + + +def test_type_decode_valid_class_code(): + """Decode hand-crafted SynthesizedType with valid class code.""" + code = SynthesizedType( + type_name="Adder", + module_code="class Adder:\n def add(self, a: int, b: int) -> int:\n return a + b\n", + ) + enc = Encodable.define(type) + with handler(UnsafeEvalProvider()): + decoded = enc.decode(code) + assert decoded.__name__ == "Adder" + assert decoded().add(3, 4) == 7 + + +def test_type_decode_class_with_context(): + """Decode a class that references a type from lexical context.""" + code = SynthesizedType( + type_name="ChildPoint", + module_code=( + "class ChildPoint(BasePoint):\n" + " z: int\n" + " def __init__(self, x: int, y: int, z: int):\n" + " super().__init__(x, y)\n" + " self.z = z\n" + ), + ) + + class BasePoint: + x: int + y: int + + def __init__(self, x: int, y: int): + self.x = x + self.y = y + + ctx = {"BasePoint": BasePoint} + enc = Encodable.define(type, ctx) + with handler(UnsafeEvalProvider()): + decoded = enc.decode(code) + obj = decoded(1, 2, 3) + assert obj.x == 1 and obj.y == 2 and obj.z == 3 + assert isinstance(obj, BasePoint) + + +def test_type_full_pipeline(): + """Full encode->serialize->deserialize->decode pipeline.""" + enc = Encodable.define(type) + encoded = enc.encode(Greeter) + serialized = enc.serialize(encoded) + deserialized = enc.deserialize(serialized[0]["text"]) + with handler(UnsafeEvalProvider()): + decoded = enc.decode(deserialized) + assert isinstance(decoded, type) + assert decoded().hello() == "world" + + +def test_type_inspect_getsource_works(): + """inspect.getsource() works on decoded synthesized types.""" + code = SynthesizedType( + type_name="Greeter", + module_code="class Greeter:\n def hello(self) -> str:\n return 'world'\n", + ) + enc = Encodable.define(type) + with handler(UnsafeEvalProvider()): + decoded = enc.decode(code) + source = inspect.getsource(decoded) + assert "class Greeter" in source + assert "hello" in source + + +# --- fail cases: type_check should reject / decode should raise --- + + +@pytest.mark.parametrize("eval_provider", EVAL_PROVIDERS) +def test_type_decode_syntax_error(eval_provider): + """Syntax error in module_code raises SynthesisError.""" + code = SynthesizedType( + type_name="Bad", + module_code="class Bad:\n def __init__(self) # missing colon\n pass\n", + ) + enc = Encodable.define(type) + with pytest.raises(SynthesisError): + with handler(eval_provider): + enc.decode(code) + + +def test_type_decode_missing_type_name(): + """Code executes but doesn't define the expected type name.""" + code = SynthesizedType( + type_name="Expected", + module_code="class Actual:\n pass\n", + ) + enc = Encodable.define(type) + with pytest.raises(SynthesisError, match="Expected"): + with handler(UnsafeEvalProvider()): + enc.decode(code) + + +def test_type_decode_not_a_type(): + """Code that doesn't define a class is rejected by type_check or post-exec check.""" + code = SynthesizedType( + type_name="MyType", + module_code="MyType = 42\n", + ) + enc = Encodable.define(type) + with pytest.raises(SynthesisError): + with handler(UnsafeEvalProvider()): + enc.decode(code) + + +def test_type_decode_undefined_base_class(): + """Code references an undefined base class not in context.""" + code = SynthesizedType( + type_name="Child", + module_code="class Child(UndefinedBase):\n pass\n", + ) + enc = Encodable.define(type, {}) + with pytest.raises(SynthesisError): + with handler(UnsafeEvalProvider()): + enc.decode(code) + + +def test_type_decode_runtime_error_in_class_body(): + """Class body raises an error during execution.""" + code = SynthesizedType( + type_name="Broken", + module_code="class Broken:\n x = 1 / 0\n", + ) + enc = Encodable.define(type) + with pytest.raises(SynthesisError, match="Evaluation failed"): + with handler(UnsafeEvalProvider()): + enc.decode(code) + + +def test_type_decode_type_check_catches_bad_method_types(): + """type_check rejects a class with mistyped method (returns str, annotation says int).""" + code = SynthesizedType( + type_name="BadTypes", + module_code=( + "class BadTypes:\n" + " def compute(self) -> int:\n" + ' return "not an int"\n' + ), + ) + enc = Encodable.define(type) + with pytest.raises((TypeError, SynthesisError)): + with handler(UnsafeEvalProvider()): + enc.decode(code) + + # --------------------------------------------------------------------------- # Provider integration tests # --------------------------------------------------------------------------- From fe7e41cc2e076753c7dd52b7329dea860e47fa80 Mon Sep 17 00:00:00 2001 From: datvo06 Date: Thu, 26 Feb 2026 12:47:17 -0500 Subject: [PATCH 36/39] Lint --- effectful/handlers/llm/encoding.py | 24 ++-------- effectful/handlers/llm/evaluation.py | 4 +- tests/test_handlers_llm_encoding.py | 72 +++++++++++++++------------- 3 files changed, 44 insertions(+), 56 deletions(-) diff --git a/effectful/handlers/llm/encoding.py b/effectful/handlers/llm/encoding.py index 76c8008d..ec0960b7 100644 --- a/effectful/handlers/llm/encoding.py +++ b/effectful/handlers/llm/encoding.py @@ -597,14 +597,6 @@ def deserialize(self, serialized_value: str) -> SynthesizedFunction: return SynthesizedFunction.model_validate_json(serialized_value) -class SynthesisError(Exception): - """Raised when type synthesis fails.""" - - def __init__(self, message: str, code: str | None = None): - super().__init__(message) - self.code = code - - class _PyMappingProxyObject(ctypes.Structure): """Internal ctypes structure to access the underlying dict of a mappingproxy.""" @@ -704,25 +696,19 @@ def decode(self, encoded_value: SynthesizedType) -> type: code_obj = evaluation.compile(tree, filename) evaluation.exec(code_obj, g) except SyntaxError as exc: - raise SynthesisError( - f"Syntax error in generated code: {exc}", module_code - ) from exc - except Exception as exc: - raise SynthesisError(f"Evaluation failed: {exc!r}", module_code) from exc + raise ValueError(f"Syntax error in generated code: {exc}") from exc if type_name not in g: - raise SynthesisError( + raise ValueError( f"Type '{type_name}' not found after execution. " - f"Available names: {[k for k in g.keys() if not k.startswith('_')]}", - module_code, + f"Available names: {[k for k in g.keys() if not k.startswith('_')]}" ) synthesized_type = g[type_name] if not isinstance(synthesized_type, type): - raise SynthesisError( - f"'{type_name}' is not a type, got {type(synthesized_type).__name__}", - module_code, + raise ValueError( + f"'{type_name}' is not a type, got {type(synthesized_type).__name__}" ) # Attach source code and module name diff --git a/effectful/handlers/llm/evaluation.py b/effectful/handlers/llm/evaluation.py index 1624d405..e7042486 100644 --- a/effectful/handlers/llm/evaluation.py +++ b/effectful/handlers/llm/evaluation.py @@ -620,9 +620,7 @@ def mypy_type_check( if isinstance(last, ast.FunctionDef): func_name = last.name tc_func_name = ( - rename_map.get(func_name, func_name) - if colliding_names - else func_name + rename_map.get(func_name, func_name) if colliding_names else func_name ) param_types = expected_params expected_callable_type: type = typing.cast( diff --git a/tests/test_handlers_llm_encoding.py b/tests/test_handlers_llm_encoding.py index eeafe4dc..4491b38f 100644 --- a/tests/test_handlers_llm_encoding.py +++ b/tests/test_handlers_llm_encoding.py @@ -21,7 +21,6 @@ from effectful.handlers.llm.encoding import ( DecodedToolCall, Encodable, - SynthesisError, SynthesizedFunction, SynthesizedType, ) @@ -686,11 +685,6 @@ def __call__(self): # Type: roundtrip, type_check pass/fail, serialize/deserialize # ============================================================================ -# NOTE: RestrictedPython rejects __firstlineno__ injection and underscore-prefixed -# names used in class synthesis. Type synthesis tests use UnsafeEvalProvider only. - -_UNSAFE_PROVIDER = pytest.param(UnsafeEvalProvider(), id="unsafe") - class SimplePoint: x: int @@ -709,10 +703,11 @@ def hello(self) -> str: # --- pass cases: type_check should succeed --- -def test_type_encode_decode_simple_class(): +@pytest.mark.parametrize("eval_provider", EVAL_PROVIDERS) +def test_type_encode_decode_simple_class(eval_provider): """Roundtrip encode/decode of a simple class with typed attributes.""" enc = Encodable.define(type) - with handler(UnsafeEvalProvider()): + with handler(eval_provider): decoded = enc.decode(enc.encode(SimplePoint)) assert isinstance(decoded, type) assert decoded.__name__ == "SimplePoint" @@ -721,20 +716,22 @@ def test_type_encode_decode_simple_class(): assert obj.y == 2 -def test_type_decode_valid_class_code(): +@pytest.mark.parametrize("eval_provider", EVAL_PROVIDERS) +def test_type_decode_valid_class_code(eval_provider): """Decode hand-crafted SynthesizedType with valid class code.""" code = SynthesizedType( type_name="Adder", module_code="class Adder:\n def add(self, a: int, b: int) -> int:\n return a + b\n", ) enc = Encodable.define(type) - with handler(UnsafeEvalProvider()): + with handler(eval_provider): decoded = enc.decode(code) assert decoded.__name__ == "Adder" assert decoded().add(3, 4) == 7 -def test_type_decode_class_with_context(): +@pytest.mark.parametrize("eval_provider", EVAL_PROVIDERS) +def test_type_decode_class_with_context(eval_provider): """Decode a class that references a type from lexical context.""" code = SynthesizedType( type_name="ChildPoint", @@ -757,33 +754,35 @@ def __init__(self, x: int, y: int): ctx = {"BasePoint": BasePoint} enc = Encodable.define(type, ctx) - with handler(UnsafeEvalProvider()): + with handler(eval_provider): decoded = enc.decode(code) obj = decoded(1, 2, 3) assert obj.x == 1 and obj.y == 2 and obj.z == 3 assert isinstance(obj, BasePoint) -def test_type_full_pipeline(): +@pytest.mark.parametrize("eval_provider", EVAL_PROVIDERS) +def test_type_full_pipeline(eval_provider): """Full encode->serialize->deserialize->decode pipeline.""" enc = Encodable.define(type) encoded = enc.encode(Greeter) serialized = enc.serialize(encoded) deserialized = enc.deserialize(serialized[0]["text"]) - with handler(UnsafeEvalProvider()): + with handler(eval_provider): decoded = enc.decode(deserialized) assert isinstance(decoded, type) assert decoded().hello() == "world" -def test_type_inspect_getsource_works(): +@pytest.mark.parametrize("eval_provider", EVAL_PROVIDERS) +def test_type_inspect_getsource_works(eval_provider): """inspect.getsource() works on decoded synthesized types.""" code = SynthesizedType( type_name="Greeter", module_code="class Greeter:\n def hello(self) -> str:\n return 'world'\n", ) enc = Encodable.define(type) - with handler(UnsafeEvalProvider()): + with handler(eval_provider): decoded = enc.decode(code) source = inspect.getsource(decoded) assert "class Greeter" in source @@ -795,66 +794,71 @@ def test_type_inspect_getsource_works(): @pytest.mark.parametrize("eval_provider", EVAL_PROVIDERS) def test_type_decode_syntax_error(eval_provider): - """Syntax error in module_code raises SynthesisError.""" + """Syntax error in module_code raises ValueError.""" code = SynthesizedType( type_name="Bad", module_code="class Bad:\n def __init__(self) # missing colon\n pass\n", ) enc = Encodable.define(type) - with pytest.raises(SynthesisError): + with pytest.raises(ValueError): with handler(eval_provider): enc.decode(code) -def test_type_decode_missing_type_name(): +@pytest.mark.parametrize("eval_provider", EVAL_PROVIDERS) +def test_type_decode_missing_type_name(eval_provider): """Code executes but doesn't define the expected type name.""" code = SynthesizedType( type_name="Expected", module_code="class Actual:\n pass\n", ) enc = Encodable.define(type) - with pytest.raises(SynthesisError, match="Expected"): - with handler(UnsafeEvalProvider()): + with pytest.raises(ValueError, match="Expected"): + with handler(eval_provider): enc.decode(code) -def test_type_decode_not_a_type(): - """Code that doesn't define a class is rejected by type_check or post-exec check.""" +@pytest.mark.parametrize("eval_provider", EVAL_PROVIDERS) +def test_type_decode_not_a_type(eval_provider): + """Code that doesn't define a class is rejected by type_check.""" code = SynthesizedType( type_name="MyType", module_code="MyType = 42\n", ) enc = Encodable.define(type) - with pytest.raises(SynthesisError): - with handler(UnsafeEvalProvider()): + with pytest.raises(TypeError): + with handler(eval_provider): enc.decode(code) -def test_type_decode_undefined_base_class(): +@pytest.mark.parametrize("eval_provider", EVAL_PROVIDERS) +def test_type_decode_undefined_base_class(eval_provider): """Code references an undefined base class not in context.""" code = SynthesizedType( type_name="Child", module_code="class Child(UndefinedBase):\n pass\n", ) enc = Encodable.define(type, {}) - with pytest.raises(SynthesisError): - with handler(UnsafeEvalProvider()): + with pytest.raises((ValueError, TypeError)): + with handler(eval_provider): enc.decode(code) -def test_type_decode_runtime_error_in_class_body(): +@pytest.mark.parametrize("eval_provider", EVAL_PROVIDERS) +def test_type_decode_runtime_error_in_class_body(eval_provider): """Class body raises an error during execution.""" code = SynthesizedType( type_name="Broken", module_code="class Broken:\n x = 1 / 0\n", ) enc = Encodable.define(type) - with pytest.raises(SynthesisError, match="Evaluation failed"): - with handler(UnsafeEvalProvider()): + with pytest.raises((ValueError, ZeroDivisionError)): + with handler(eval_provider): enc.decode(code) -def test_type_decode_type_check_catches_bad_method_types(): +@pytest.mark.parametrize("eval_provider", EVAL_PROVIDERS) +def test_type_decode_type_check_catches_bad_method_types(eval_provider): """type_check rejects a class with mistyped method (returns str, annotation says int).""" code = SynthesizedType( type_name="BadTypes", @@ -865,8 +869,8 @@ def test_type_decode_type_check_catches_bad_method_types(): ), ) enc = Encodable.define(type) - with pytest.raises((TypeError, SynthesisError)): - with handler(UnsafeEvalProvider()): + with pytest.raises(TypeError): + with handler(eval_provider): enc.decode(code) From c940b7fb0e47cdc009259915ff53d794c4ab1157 Mon Sep 17 00:00:00 2001 From: datvo06 Date: Thu, 26 Feb 2026 13:57:17 -0500 Subject: [PATCH 37/39] Fix tests --- effectful/handlers/llm/encoding.py | 43 ----------------------------- tests/test_handlers_llm_encoding.py | 39 ++++++++++---------------- 2 files changed, 15 insertions(+), 67 deletions(-) diff --git a/effectful/handlers/llm/encoding.py b/effectful/handlers/llm/encoding.py index ec0960b7..ef4098fa 100644 --- a/effectful/handlers/llm/encoding.py +++ b/effectful/handlers/llm/encoding.py @@ -1,7 +1,6 @@ import ast import base64 import collections -import ctypes import functools import inspect import io @@ -597,16 +596,6 @@ def deserialize(self, serialized_value: str) -> SynthesizedFunction: return SynthesizedFunction.model_validate_json(serialized_value) -class _PyMappingProxyObject(ctypes.Structure): - """Internal ctypes structure to access the underlying dict of a mappingproxy.""" - - _fields_ = [ - ("ob_refcnt", ctypes.c_ssize_t), - ("ob_type", ctypes.py_object), - ("mapping", ctypes.py_object), - ] - - class SynthesizedType(pydantic.BaseModel): """Structured output for type/class synthesis. @@ -677,21 +666,6 @@ def decode(self, encoded_value: SynthesizedType) -> type: # Type-check the synthesized module evaluation.type_check(tree, self.ctx, None, type) - # Inject __firstlineno__ into class bodies for Python 3.13+ compatibility - # inspect.getsource() looks for __firstlineno__ in vars(cls), - # which requires it to be in the class's __dict__. - for node in ast.walk(tree): - if isinstance(node, ast.ClassDef): - assign = ast.Assign( - targets=[ast.Name(id="__firstlineno__", ctx=ast.Store())], - value=ast.Constant(value=node.lineno), - lineno=node.lineno, - col_offset=0, - ) - ast.fix_missing_locations(assign) - node.body.insert(0, assign) - ast.fix_missing_locations(tree) - # Compile and execute via evaluation effects code_obj = evaluation.compile(tree, filename) evaluation.exec(code_obj, g) @@ -716,23 +690,6 @@ def decode(self, encoded_value: SynthesizedType) -> type: synthesized_type.__synthesized__ = encoded_value # type: ignore[attr-defined] synthesized_type.__module__ = module_name - # NOTE: Set __firstlineno__ AFTER __module__ assignment! - # In Python 3.13, setting __module__ clears __firstlineno__ from vars(). - # We use ctypes to directly inject it into __dict__ for inspect.getsource(). - if "__firstlineno__" not in vars(synthesized_type): - firstlineno = next( - ( - n.lineno - for n in ast.walk(ast.parse(module_code)) - if isinstance(n, ast.ClassDef) and n.name == type_name - ), - 1, - ) - inner_dict = _PyMappingProxyObject.from_address( - id(vars(synthesized_type)) - ).mapping - inner_dict["__firstlineno__"] = firstlineno - return synthesized_type def serialize( diff --git a/tests/test_handlers_llm_encoding.py b/tests/test_handlers_llm_encoding.py index 4491b38f..c961d6ed 100644 --- a/tests/test_handlers_llm_encoding.py +++ b/tests/test_handlers_llm_encoding.py @@ -685,14 +685,11 @@ def __call__(self): # Type: roundtrip, type_check pass/fail, serialize/deserialize # ============================================================================ - class SimplePoint: - x: int - y: int - - def __init__(self, x: int, y: int): + def make(self, x: int, y: int) -> "SimplePoint": self.x = x self.y = y + return self class Greeter: @@ -705,13 +702,13 @@ def hello(self) -> str: @pytest.mark.parametrize("eval_provider", EVAL_PROVIDERS) def test_type_encode_decode_simple_class(eval_provider): - """Roundtrip encode/decode of a simple class with typed attributes.""" + """Roundtrip encode/decode of a simple class.""" enc = Encodable.define(type) with handler(eval_provider): decoded = enc.decode(enc.encode(SimplePoint)) assert isinstance(decoded, type) assert decoded.__name__ == "SimplePoint" - obj = decoded(1, 2) + obj = decoded().make(1, 2) assert obj.x == 1 assert obj.y == 2 @@ -734,31 +731,25 @@ def test_type_decode_valid_class_code(eval_provider): def test_type_decode_class_with_context(eval_provider): """Decode a class that references a type from lexical context.""" code = SynthesizedType( - type_name="ChildPoint", + type_name="ChildGreeter", module_code=( - "class ChildPoint(BasePoint):\n" - " z: int\n" - " def __init__(self, x: int, y: int, z: int):\n" - " super().__init__(x, y)\n" - " self.z = z\n" + "class ChildGreeter(BaseGreeter):\n" + " def greet(self) -> str:\n" + " return 'child'\n" ), ) - class BasePoint: - x: int - y: int - - def __init__(self, x: int, y: int): - self.x = x - self.y = y + class BaseGreeter: + def greet(self) -> str: + return "base" - ctx = {"BasePoint": BasePoint} + ctx = {"BaseGreeter": BaseGreeter} enc = Encodable.define(type, ctx) with handler(eval_provider): decoded = enc.decode(code) - obj = decoded(1, 2, 3) - assert obj.x == 1 and obj.y == 2 and obj.z == 3 - assert isinstance(obj, BasePoint) + obj = decoded() + assert obj.greet() == "child" + assert isinstance(obj, BaseGreeter) @pytest.mark.parametrize("eval_provider", EVAL_PROVIDERS) From db938957384c2295271f14bbcee18885fb154b27 Mon Sep 17 00:00:00 2001 From: datvo06 Date: Thu, 26 Feb 2026 14:10:56 -0500 Subject: [PATCH 38/39] Lint --- tests/test_handlers_llm_encoding.py | 1 + 1 file changed, 1 insertion(+) diff --git a/tests/test_handlers_llm_encoding.py b/tests/test_handlers_llm_encoding.py index c961d6ed..2afb829c 100644 --- a/tests/test_handlers_llm_encoding.py +++ b/tests/test_handlers_llm_encoding.py @@ -685,6 +685,7 @@ def __call__(self): # Type: roundtrip, type_check pass/fail, serialize/deserialize # ============================================================================ + class SimplePoint: def make(self, x: int, y: int) -> "SimplePoint": self.x = x From ec17e457d128131ac0303a6ab780f6a0e245e88d Mon Sep 17 00:00:00 2001 From: datvo06 Date: Thu, 26 Feb 2026 15:12:58 -0500 Subject: [PATCH 39/39] Minor fix for python 3.13 --- effectful/handlers/llm/encoding.py | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/effectful/handlers/llm/encoding.py b/effectful/handlers/llm/encoding.py index ef4098fa..841df00c 100644 --- a/effectful/handlers/llm/encoding.py +++ b/effectful/handlers/llm/encoding.py @@ -690,6 +690,18 @@ def decode(self, encoded_value: SynthesizedType) -> type: synthesized_type.__synthesized__ = encoded_value # type: ignore[attr-defined] synthesized_type.__module__ = module_name + # Set __firstlineno__ for Python 3.13+ (inspect.getsource requires it). + # Must be set AFTER __module__ since __module__ assignment can clear it. + firstlineno = next( + ( + n.lineno + for n in ast.walk(ast.parse(module_code)) + if isinstance(n, ast.ClassDef) and n.name == type_name + ), + 1, + ) + synthesized_type.__firstlineno__ = firstlineno # type: ignore[attr-defined] + return synthesized_type def serialize(