Skip to content

Commit 861a169

Browse files
authored
feat: enable content generation without source for FAQs (#347) (#23)
1 parent 298948f commit 861a169

File tree

2 files changed

+139
-20
lines changed

2 files changed

+139
-20
lines changed

frontend/src/app/(app)/workspace/faq/page.tsx

Lines changed: 31 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -35,6 +35,8 @@ import { Slider } from '@/components/ui/slider'
3535
import { useContextAvailability } from '@/lib/hooks/use-context-availability'
3636
import { getSelectContextDescription } from '@/lib/utils/context-messages'
3737
import { ContextRequirementMessage } from '@/components/context-requirement-message'
38+
import { useCourses } from '@/lib/hooks/use-courses'
39+
import { usePersonaStore } from '@/lib/store/persona-store'
3840
import { Progress } from '@/components/ui/progress'
3941
import { Popover, PopoverContent, PopoverTrigger } from '@/components/ui/popover'
4042
import { Switch } from '@/components/ui/switch'
@@ -55,6 +57,8 @@ export default function FAQComponent() {
5557
const [useReranker, setUseReranker] = useState(true)
5658
const { getActiveContextModelName, getContextTypeLabel } = useContextAvailability()
5759
const selectedSources = useSourcesStore((state) => state.selectedSources)
60+
const { data: coursesData } = useCourses()
61+
const { selectedCourseId } = usePersonaStore()
5862

5963
// Function to trigger the API and fetch initial FAQs
6064
const fetchAPI = async () => {
@@ -66,8 +70,11 @@ export default function FAQComponent() {
6670
return false
6771
}
6872
const selectedSourcesCount = selectedSources.filter((source) => source.selected).length
69-
if (selectedSourcesCount !== 1) {
70-
toast.error('Please select exactly one source.')
73+
// Allow generation with no sources or exactly one source, but not multiple sources
74+
if (selectedSourcesCount > 1) {
75+
toast.error(
76+
'Multiple sources selected. Please select only one source or none to use course context.',
77+
)
7178
return
7279
}
7380

@@ -82,6 +89,16 @@ export default function FAQComponent() {
8289
try {
8390
const modelName = getActiveContextModelName()
8491

92+
// Get course information from context - using proven method from assessment page
93+
const selectedCourse = coursesData?.docs.find((course) => course.id === selectedCourseId)
94+
const courseDescription = selectedCourse?.description || ''
95+
const courseInfo = selectedCourse
96+
? {
97+
courseName: selectedCourse.name,
98+
courseDescription: courseDescription,
99+
}
100+
: undefined
101+
85102
const response = await fetch('/api/faq', {
86103
method: 'POST',
87104
headers: {
@@ -95,6 +112,7 @@ export default function FAQComponent() {
95112
multiPassState: null, // No state for initial request
96113
continueFaqs: false,
97114
useReranker: useReranker, // Add this line
115+
courseInfo, // Add course info
98116
}),
99117
})
100118

@@ -129,6 +147,16 @@ export default function FAQComponent() {
129147
try {
130148
const modelName = getActiveContextModelName()
131149

150+
// Get course information from context (same as initial call) - using proven method
151+
const selectedCourse = coursesData?.docs.find((course) => course.id === selectedCourseId)
152+
const courseDescription = selectedCourse?.description || ''
153+
const courseInfo = selectedCourse
154+
? {
155+
courseName: selectedCourse.name,
156+
courseDescription: courseDescription,
157+
}
158+
: undefined
159+
132160
const response = await fetch('/api/faq', {
133161
method: 'POST',
134162
headers: {
@@ -142,6 +170,7 @@ export default function FAQComponent() {
142170
multiPassState: multiPassState, // Pass the state for continuation
143171
continueFaqs: true, // Flag that this is a continuation request
144172
useReranker: useReranker, // Add this line
173+
courseInfo, // Add course info
145174
}),
146175
})
147176

frontend/src/app/api/faq/route.ts

Lines changed: 108 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -13,6 +13,7 @@ import {
1313
SystemPromptGenerator,
1414
ContentProcessor,
1515
GenerationFunction,
16+
ProcessResult,
1617
} from '@/lib/rag/multi-pass'
1718
import { ContextChunk } from '@/lib/types/context-chunk'
1819

@@ -44,9 +45,19 @@ const CONTENT_TYPE_FAQ = 'faq'
4445
*/
4546
const faqSystemPromptGenerator: SystemPromptGenerator = (isFirstPass, query, options) => {
4647
const faqCount = options.faqCount || 5
48+
const hasValidSources = options.hasValidSources || false
49+
const courseInfo = options.courseInfo as
50+
| { courseName?: string; courseDescription?: string }
51+
| undefined
52+
53+
const contextInstruction = hasValidSources
54+
? 'based on the context provided'
55+
: courseInfo?.courseName
56+
? `for the course "${courseInfo.courseName}"${courseInfo.courseDescription ? ` (${courseInfo.courseDescription})` : ''}. Use general academic knowledge relevant to this course`
57+
: 'using general academic knowledge'
4758

4859
return `
49-
Your job is to generate diverse and interesting FAQs with question answer pairs based on the context provided.
60+
Your job is to generate diverse and interesting FAQs with question answer pairs ${contextInstruction}.
5061
5162
Format ALL FAQs as a JSON object with this structure:
5263
{
@@ -65,7 +76,8 @@ const faqSystemPromptGenerator: SystemPromptGenerator = (isFirstPass, query, opt
6576
4. Focus on different aspects of the content - find unique angles and insights.
6677
5. Ensure all quotes and special characters in the JSON are properly escaped.
6778
6. The JSON must be valid and parsable without errors.
68-
7. Answers should be detailed, descriptive, and provide clear explanations based on the context.
79+
7. Answers should be detailed, descriptive, and provide clear explanations${hasValidSources ? ' based on the context' : ''}.
80+
${!hasValidSources && courseInfo?.courseName ? `8. Focus on questions and answers that would be relevant for students in ${courseInfo.courseName}.` : ''}
6981
`
7082
}
7183

@@ -233,8 +245,13 @@ export async function POST(req: Request) {
233245
continueFaqs = false,
234246
useReranker, // Add this line with default value true
235247
_recursionDepth = 0,
248+
courseInfo, // Add courseInfo parameter
236249
} = await req.json()
237250

251+
// Debug logging
252+
console.log('DEBUG FAQ API: courseInfo received:', courseInfo)
253+
console.log('DEBUG FAQ API: selectedSources length:', selectedSources?.length || 0)
254+
238255
// Safety check to prevent infinite loops
239256
if (_recursionDepth > 10) {
240257
return NextResponse.json({
@@ -253,15 +270,18 @@ export async function POST(req: Request) {
253270
}
254271
const ollama = createOllama({ baseURL: ollamaUrl + '/api' })
255272

273+
// Check if we have valid sources
274+
const hasValidSources = Array.isArray(selectedSources) && selectedSources.length > 0
275+
256276
// Process user query
257277
const safeSearchQuery = typeof searchQuery === 'string' ? searchQuery : ''
258278
const hasUserQuery = safeSearchQuery.trim() !== ''
259279
const userQuery = hasUserQuery ? safeSearchQuery.trim() : ''
260280
let retrievedChunks: ContextChunk[] = []
261281
let usedHybridSearch = false
262282

263-
// Only retrieve chunks on initial request
264-
if (!continueFaqs) {
283+
// Only retrieve chunks on initial request and if we have valid sources
284+
if (!continueFaqs && hasValidSources) {
265285
if (hasUserQuery) {
266286
retrievedChunks = await hybridSearch(
267287
userQuery,
@@ -278,6 +298,10 @@ export async function POST(req: Request) {
278298
}
279299
}
280300

301+
// Additional debug logging
302+
console.log('DEBUG FAQ API: hasValidSources:', hasValidSources)
303+
console.log('DEBUG FAQ API: retrievedChunks length:', retrievedChunks.length)
304+
281305
// Set up processing options
282306
const actualFaqCount = faqCount || 5
283307
const faqContentProcessor = createFaqContentProcessor(actualFaqCount)
@@ -289,6 +313,8 @@ export async function POST(req: Request) {
289313
temperature: TEMPERATURE + 0.1,
290314
faqCount: actualFaqCount,
291315
preserveOrder: !hasUserQuery,
316+
hasValidSources, // Add this for system prompt
317+
courseInfo, // Add this for system prompt
292318
}
293319

294320
// Start processing timer
@@ -297,20 +323,84 @@ export async function POST(req: Request) {
297323
// Create the FAQ generation function
298324
const faqGenerationFunction = createFaqGenerationFunction()
299325

300-
// Process chunks using multi-pass approach
301-
const processResult = await processChunksMultiPass<FaqResult>(
302-
userQuery,
303-
continueFaqs ? [] : retrievedChunks,
304-
faqGenerationFunction,
305-
ollama(selectedModel, { numCtx: TOKEN_RESPONSE_BUDGET }),
306-
options,
307-
CONTENT_TYPE_FAQ,
308-
faqSystemPromptGenerator,
309-
(query) =>
310-
`Generate FAQs for the following query: "${query}". Use the provided context to answer.`,
311-
faqContentProcessor,
312-
multiPassState,
313-
)
326+
let processResult: ProcessResult<FaqResult>
327+
328+
// Handle no-sources case differently
329+
if (!hasValidSources && !continueFaqs) {
330+
// Direct generation without chunks for course context
331+
console.log('DEBUG FAQ API: Generating FAQs using course context only')
332+
const systemPrompt = faqSystemPromptGenerator(true, userQuery, options)
333+
const userPrompt = courseInfo?.courseName
334+
? `Generate FAQs for the course "${courseInfo.courseName}"${userQuery ? ` related to: "${userQuery}"` : ''}. Use general academic knowledge relevant to this course.`
335+
: `Generate FAQs${userQuery ? ` for the topic: "${userQuery}"` : ''}. Use general academic knowledge to provide comprehensive answers.`
336+
337+
const messages = [
338+
{ role: 'system' as const, content: systemPrompt },
339+
{ role: 'user' as const, content: userPrompt },
340+
]
341+
342+
try {
343+
const { object: rawResult, usage } = await faqGenerationFunction({
344+
model: ollama(selectedModel, { numCtx: TOKEN_RESPONSE_BUDGET }),
345+
output: 'no-schema',
346+
messages: messages,
347+
temperature: TEMPERATURE + 0.1,
348+
maxTokens: TOKEN_RESPONSE_BUDGET,
349+
})
350+
351+
const processedResult = faqContentProcessor(rawResult, [])
352+
353+
processResult = {
354+
result: processedResult,
355+
state: {
356+
chunks: [],
357+
processedChunkIds: [],
358+
currentIndex: 0,
359+
isComplete: true,
360+
generatedContent: [processedResult],
361+
progress: 100,
362+
lastGenerated: processedResult,
363+
contentType: CONTENT_TYPE_FAQ,
364+
},
365+
debug: {
366+
chunksProcessed: 0,
367+
totalChunks: 0,
368+
remainingChunks: 0,
369+
tokenUsage: {
370+
prompt: usage?.promptTokens || 0,
371+
completion: usage?.completionTokens || 0,
372+
total: usage?.totalTokens || 0,
373+
},
374+
timeTaken: Date.now() - startTime,
375+
},
376+
}
377+
} catch (error) {
378+
console.error('DEBUG FAQ API: Error in direct generation:', error)
379+
throw error
380+
}
381+
} else {
382+
// Use multi-pass approach for sources
383+
processResult = await processChunksMultiPass<FaqResult>(
384+
userQuery,
385+
continueFaqs ? [] : retrievedChunks,
386+
faqGenerationFunction,
387+
ollama(selectedModel, { numCtx: TOKEN_RESPONSE_BUDGET }),
388+
options,
389+
CONTENT_TYPE_FAQ,
390+
faqSystemPromptGenerator,
391+
(query) => {
392+
if (hasValidSources) {
393+
return `Generate FAQs for the following query: "${query}". Use the provided context to answer.`
394+
} else if (courseInfo?.courseName) {
395+
return `Generate FAQs for the course "${courseInfo.courseName}"${query ? ` related to: "${query}"` : ''}. Use general academic knowledge relevant to this course.`
396+
} else {
397+
return `Generate FAQs${query ? ` for the topic: "${query}"` : ''}. Use general academic knowledge to provide comprehensive answers.`
398+
}
399+
},
400+
faqContentProcessor,
401+
multiPassState,
402+
)
403+
}
314404

315405
// Calculate processing time
316406
const timeTakenSeconds = (Date.now() - startTime) / 1000

0 commit comments

Comments
 (0)