Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions k8s/projects-frontend/values.dev.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@ config:
NUXT_PUBLIC_APP_ENVIRONMENT: dev
NUXT_APP_GOTENBERG_SERVER_URL: 'http://projects-gotenberg'
NUXT_PUBLIC_APP_GOTENBERG_ENABLED: 'true'
NUXT_APP_LANGCHAIN_PROMPT: ''

domain: k8s.lp-i.dev

Expand Down
1 change: 1 addition & 0 deletions k8s/projects-frontend/values.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -44,6 +44,7 @@ config:
NUXT_APP_SORBOBOT_API_TRACE: ''
NUXT_APP_GOTENBERG_SERVER_URL: ''
NUXT_PUBLIC_APP_GOTENBERG_ENABLED: ''
NUXT_APP_LANGCHAIN_PROMPT: ''
e2eEnv:
nonSensitive:
USER_ADMIN_EMAIL: testautomatatiquedministrateur1@outlook.fr
Expand Down
1 change: 1 addition & 0 deletions nuxt.config.ts
Original file line number Diff line number Diff line change
Expand Up @@ -138,6 +138,7 @@ export default defineNuxtConfig({
appMcpServerTrace: 0,
appSorbobotApiTrace: 0,
appGotenbergServerUrl: '',
appLangchainPrompt: '',
public: {
appVersion: '',
appApiOrgCode: '',
Expand Down
5 changes: 5 additions & 0 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -42,6 +42,9 @@
"@ckpack/vue-color": "^1.5.0",
"@hocuspocus/provider": "^2.15.0",
"@intlify/vue-i18n-loader": "^4.2.0",
"@langchain/core": "^1.1.17",
"@langchain/langgraph": "^1.1.2",
"@langchain/openai": "^1.2.3",
"@mdi/font": "^6",
"@modelcontextprotocol/sdk": "^1.20.2",
"@nuxt/test-utils": "^3.19.2",
Expand Down Expand Up @@ -81,6 +84,7 @@
"deep-chat": "^2.3.0",
"es-toolkit": "^1.40.0",
"highlight.js": "^11",
"langchain": "^1.2.14",
"leaflet": "^1",
"leaflet.markercluster": "^1.5.3",
"lowlight": "^3.3.0",
Expand All @@ -97,6 +101,7 @@
"remixicon": "^2",
"sortablejs": "^1.15.2",
"sortablejs-vue3": "^1.2.11",
"uuid": "^13.0.0",
"vue": "^3.5.13",
"vue-chart-3": "^3.1.8",
"vue-drag-resize": "^1",
Expand Down
7 changes: 5 additions & 2 deletions src/components/app/ChatBotDrawer.vue
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,8 @@ const emit = defineEmits(['close'])
const IS_STREAMED = ref(true)

const connectOptions = {
url: IS_STREAMED.value ? '/api/chat-stream' : '/api/chat',
// url: IS_STREAMED.value ? '/api/chat-stream' : '/api/chat',
url: '/api/chat-lg-stream',
stream: IS_STREAMED.value,
}
const usersStore = useUsersStore()
Expand Down Expand Up @@ -127,6 +128,7 @@ const spinnerMD = `![](data:image/svg+xml;base64,${btoa(spinner)}) `
// to handle 'meta' messages get replaced by next message
let replacedByNext = false
const responseInterceptor = (response) => {
console.log('ChatBotDrawer responseInterceptor', response)
if (response.role === 'meta') {
let text = spinnerMD + t(`chatbot.${response.text}`)
if (response.is_done) {
Expand All @@ -145,9 +147,10 @@ const responseInterceptor = (response) => {
role: 'assistant',
text: IS_STREAMED.value ? response.done_text : response.text,
})
conversationId.value = response.conversationId
analytics.chatbot.receive(response)
}
// console.log('Updated conversation', response.conversationId)
conversationId.value = response.conversationId
const overwrite = replacedByNext
// no way to know when a true message begin, so just assume next is not overwrite
replacedByNext = false
Expand Down
206 changes: 206 additions & 0 deletions src/server/routes/api/chat-lg-stream.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,206 @@
// import OpenAI from 'openai'
import { ChatOpenAI } from '@langchain/openai'
import { createAgent } from 'langchain'
import { MemorySaver } from '@langchain/langgraph'
import { SystemMessage, HumanMessage } from '@langchain/core/messages'
import { v4 as uuidv4 } from 'uuid'
const runtimeConfig = useRuntimeConfig()
const {
// appOpenaiApiPromptId,
// appOpenaiApiPromptVersion,
appOpenaiApiKey,
appOpenaiApiVectorStoreId,
appMcpServerUrl,
appMcpServerTrace,
appSorbobotApiTrace,
appLangchainPrompt,
} = runtimeConfig
const { appChatbotEnabled } = runtimeConfig.public

// Map conversationId to token and date for authed api requests in MCP
export const tokenMap = new Map<string, { date: Date; token: string }>()
export const checkpointer = new MemorySaver()

export const traceMcp = (...args) => {
if (appMcpServerTrace) {
console.log('[MCP TRACE]', ...args)
}
}

export const traceSorbobot = (...args) => {
if (appSorbobotApiTrace) {
console.log('[Sorbobot TRACE]', ...args)
}
}

export default defineLazyEventHandler(() => {
console.log('appChatbotEnabled:', appChatbotEnabled, 'openaiApiKey:', !!appOpenaiApiKey)
return defineEventHandler(async (event) => {
// return 404 if not configured
if (!appOpenaiApiKey || !appChatbotEnabled) {
setResponseStatus(event, 404)
return
}

// clean up token map as a bonus
const now = new Date()
for (const [key, value] of tokenMap.entries()) {
const diff = now.getTime() - value.date.getTime()
if (diff > 60 * 60 * 1000) {
tokenMap.delete(key)
}
}

const tokenHeader = getRequestHeader(event, 'authorization') || ''
if (tokenHeader) {
traceMcp('chat-stream: got Authorization header provided')
} else {
traceMcp('chat-stream: no Authorization header provided')
}

setResponseHeader(event, 'Content-Type', 'text/event-stream')
setResponseHeader(event, 'Cache-Control', 'no-cache')
setResponseHeader(event, 'Connection', 'keep-alive')
setResponseStatus(event, 200)

const body = await readBody<{
messages: Array<{ role: string; text: string }>
conversationId?: string
}>(event)

const messages = body.messages || []

// TODO handle conversation on our side
// const openai = new OpenAI({
// apiKey: appOpenaiApiKey,
// })
let conversationId = body.conversationId || null
if (!conversationId) {
// if no conversationId, we start a new conversation
conversationId = uuidv4()
console.log('Starting new conversation with id:', conversationId)
}

const tools = []

if (appMcpServerUrl) {
traceMcp('Adding MCP tool with server URL:', appMcpServerUrl)
tools.push({
type: 'mcp',
server_label: 'projects-local-mcp',
server_description:
'A MCP to fetch information about projects, people and groups on this Projects platform.',
server_url: appMcpServerUrl,
require_approval: 'never',
authorization: conversationId,
})
}

if (appOpenaiApiVectorStoreId) {
tools.push({
type: 'file_search',
vector_store_ids: [appOpenaiApiVectorStoreId],
})
}
const model = appOpenaiApiKey
? new ChatOpenAI({
apiKey: appOpenaiApiKey,
model: 'gpt-4o-mini',
temperature: 0,
})
: null

const agent = createAgent({
model,
tools,
checkpointer,
systemPrompt: new SystemMessage({
content: [
{
type: 'text',
content: appLangchainPrompt,
},
],
}),
})

traceMcp(
`Starting chat stream for conversation ${conversationId} with ${messages.length} messages`
)

tokenMap.set(conversationId, {
date: new Date(),
token: ('' + tokenHeader).replace('Bearer ', ''),
})
const config = {
configurable: { thread_id: conversationId },
}

/*
EXEMPLE OUTUT PUT TOKENS:

{
"type": "text",
"index": 0,
"text": " learning"
}
][
{
"type": "text",
"index": 0,
"text": " environments"
}
][
{
"type": "text",
"index": 0,
"text": "",
"annotations": [
{
"type": "citation",
"source": "file_citation",
"title": "FAQ - THE PROVEST PROJECT CONTEXT.txt",
"startIndex": 522,
"file_id": "file-R1phfjqpukKyqdBHFNRhm2"
}
]
}
]
*/
// TODO: fix typescript mess with agent.stream return type
for await (const [token, metadata] of (await agent.stream(
{ messages: messages.map((msg) => new HumanMessage(msg.text)) } as any,
{ ...config, streamMode: 'messages' }
// ,{ options: { stream: true }, previous_response_id: conversationId,}
)) as AsyncIterableIterator<
[
{
contentBlocks?: Array<{ type: string; index: number; text: string }>
},
{ status: string; langgraph_node?: any },
]
>) {
// TODO: handle tools and reaoning chunks
// console.log('chunk from lg node', metadata.langgraph_node)
const content = token.contentBlocks || []
// sort in ascending index order and join texts (is it really necessary ?)
const ordered_content = content.sort((a, b) => a.index - b.index)
const text = ordered_content
.filter((part) => part.type == 'text')
.map((part) => part.text)
.join('')
const is_done = metadata.status === 'completed'
const role = 'ai'
event.node.res.write(
`data: ${JSON.stringify({
text,
role,
is_done,
conversationId,
})}\n\n`
)
}

event.node.res.end()
})
})
Loading