diff --git a/canopy-chat/README.md b/canopy-chat/README.md new file mode 100644 index 000000000..05c0316ff --- /dev/null +++ b/canopy-chat/README.md @@ -0,0 +1,22 @@ +--- +title: 'Chat with Canopy' +tags: ['pinecone', 'chainlit', 'canopy'] +--- + +# Chat your Canopy Server + +This folder contains an example application that demonstrates how to: +- Connect to a running Canopy Server. +- Answer user queries using the data stored in the Pinecone database, and provide the sources for the answers. + +## High-Level Description + +The application uses the the Canopy-API Server. After you vectorised your index in Pinecone and have a running Canopy-Server you can start chatting with Pinecone RAG-Pipeline using OpenAI as LLM. + +## Quickstart + +To run the example, ensure you have a running Canopy-Server. Follow these steps: + +1. Install the required dependencies by running `pip install -r requirements.txt` in your terminal. +2. Set your Canopy-API-URL into the `base_url`. +3. Run the application with `chainlit run app.py --port 8081`. \ No newline at end of file diff --git a/canopy-chat/app.py b/canopy-chat/app.py new file mode 100644 index 000000000..472ba0bf3 --- /dev/null +++ b/canopy-chat/app.py @@ -0,0 +1,42 @@ +from openai import AsyncOpenAI +import chainlit as cl + +client = AsyncOpenAI(base_url="http://localhost:8000/v1", api_key=None) + + +settings = { + "model": "gpt-3.5-turbo", + "temperature": 0.7, + "max_tokens": 500, + "top_p": 1, + "frequency_penalty": 0, + "presence_penalty": 0, +} + + +@cl.on_chat_start +def start_chat(): + cl.user_session.set( + "message_history", + [{"role": "system", "content": "You are a helpful assistant."}], + ) + + +@cl.on_message +async def main(message: cl.Message): + message_history = cl.user_session.get("message_history") + message_history.append({"role": "user", "content": message.content}) + + msg = cl.Message(content="") + await msg.send() + + stream = await client.chat.completions.create( + messages=message_history, stream=True, **settings + ) + + async for part in stream: + if token := part.choices[0].delta.content or "": + await msg.stream_token(token) + + message_history.append({"role": "assistant", "content": msg.content}) + await msg.update() diff --git a/canopy-chat/chainlit.md b/canopy-chat/chainlit.md new file mode 100644 index 000000000..e69de29bb diff --git a/canopy-chat/requirements.txt b/canopy-chat/requirements.txt new file mode 100644 index 000000000..fbf6fabd4 --- /dev/null +++ b/canopy-chat/requirements.txt @@ -0,0 +1,4 @@ +pinecone-client==2.2.1 +tiktoken==0.3.3 +langchain +chainlit \ No newline at end of file