@@ -4,7 +4,7 @@ import { Request, Response } from 'express';
44import * as configModule from '../../config' ;
55import { generateText } from '../../services/llmWrapper' ;
66import { initializedRagService } from '../../services/ragService' ;
7- import { GeminiChatCompletionResponse , GeminiStreamChunk } from '../../types' ;
7+ import { GeminiChatCompletionResponse } from '../../types' ;
88import { handleGeminiBatch , handleGeminiStream } from '../geminiQuery' ;
99
1010// Mock src/config
@@ -19,12 +19,12 @@ jest.mock('../../config', () => ({
1919
2020 chromaPort : '8000' ,
2121
22+ chromaUrl : 'http://localhost' ,
23+
2224 geminiApiKey : 'test-api-key' ,
2325
2426 geminiChatModel : 'gemini-pro' ,
2527
26- chromaUrl : 'http://localhost' ,
27-
2828 // Default mock, can be overridden per test
2929 geminiEmbeddingModel : 'text-embedding-004' ,
3030 // Default for most tests
@@ -291,10 +291,10 @@ describe('Gemini RAG Query Controllers', () => {
291291 const res = mockResponse ( ) as Response ;
292292 mockRagService . queryChunks . mockResolvedValue ( [ ] ) ; // No RAG context
293293
294- const streamChunk1 : GeminiStreamChunk = {
294+ const streamChunk1 = {
295295 candidates : [ { content : { parts : [ { text : 'Stream chunk 1' } ] , role : 'model' } , index : 0 } ] ,
296296 } ;
297- const streamChunk2 : GeminiStreamChunk = {
297+ const streamChunk2 = {
298298 candidates : [ { content : { parts : [ { text : 'Stream chunk 2' } ] , role : 'model' } , index : 0 } ] ,
299299 } ;
300300
0 commit comments