@@ -303,11 +303,12 @@ func TestBatchClassificationConfiguration(t *testing.T) {
303303}
304304
305305func TestOpenAIModelsEndpoint (t * testing.T ) {
306+ // Test with default config (IncludeConfigModelsInList = false)
306307 cfg := & config.RouterConfig {
307308 VLLMEndpoints : []config.VLLMEndpoint {
308309 {
309310 Name : "primary" ,
310- Address : "127.0.0.1" , // Changed from localhost to IP address
311+ Address : "127.0.0.1" ,
311312 Port : 8000 ,
312313 Weight : 1 ,
313314 },
@@ -320,6 +321,7 @@ func TestOpenAIModelsEndpoint(t *testing.T) {
320321 PreferredEndpoints : []string {"primary" },
321322 },
322323 },
324+ IncludeConfigModelsInList : false ,
323325 }
324326
325327 apiServer := & ClassificationAPIServer {
@@ -357,13 +359,82 @@ func TestOpenAIModelsEndpoint(t *testing.T) {
357359 }
358360 }
359361
360- // Must contain 'auto' and the configured models
361- if ! got ["auto" ] {
362- t .Errorf ("expected list to contain 'auto'" )
362+ // Must contain only 'MoM' (default auto model name) when IncludeConfigModelsInList is false
363+ if ! got ["MoM" ] {
364+ t .Errorf ("expected list to contain 'MoM', got: %v" , got )
365+ }
366+ if len (resp .Data ) != 1 {
367+ t .Errorf ("expected only 1 model (MoM), got %d: %v" , len (resp .Data ), got )
368+ }
369+ }
370+
371+ func TestOpenAIModelsEndpointWithConfigModels (t * testing.T ) {
372+ // Test with IncludeConfigModelsInList = true
373+ cfg := & config.RouterConfig {
374+ VLLMEndpoints : []config.VLLMEndpoint {
375+ {
376+ Name : "primary" ,
377+ Address : "127.0.0.1" ,
378+ Port : 8000 ,
379+ Weight : 1 ,
380+ },
381+ },
382+ ModelConfig : map [string ]config.ModelParams {
383+ "gpt-4o-mini" : {
384+ PreferredEndpoints : []string {"primary" },
385+ },
386+ "llama-3.1-8b-instruct" : {
387+ PreferredEndpoints : []string {"primary" },
388+ },
389+ },
390+ IncludeConfigModelsInList : true ,
391+ }
392+
393+ apiServer := & ClassificationAPIServer {
394+ classificationSvc : services .NewPlaceholderClassificationService (),
395+ config : cfg ,
396+ }
397+
398+ req := httptest .NewRequest ("GET" , "/v1/models" , nil )
399+ rr := httptest .NewRecorder ()
400+
401+ apiServer .handleOpenAIModels (rr , req )
402+
403+ if rr .Code != http .StatusOK {
404+ t .Fatalf ("expected 200 OK, got %d" , rr .Code )
405+ }
406+
407+ var resp OpenAIModelList
408+ if err := json .Unmarshal (rr .Body .Bytes (), & resp ); err != nil {
409+ t .Fatalf ("failed to parse response: %v" , err )
410+ }
411+
412+ if resp .Object != "list" {
413+ t .Errorf ("expected object 'list', got %s" , resp .Object )
414+ }
415+
416+ // Build a set for easy lookup
417+ got := map [string ]bool {}
418+ for _ , m := range resp .Data {
419+ got [m .ID ] = true
420+ if m .Object != "model" {
421+ t .Errorf ("expected each item.object to be 'model', got %s" , m .Object )
422+ }
423+ if m .Created == 0 {
424+ t .Errorf ("expected created timestamp to be non-zero" )
425+ }
426+ }
427+
428+ // Must contain 'MoM' (default auto model name) and the configured models when IncludeConfigModelsInList is true
429+ if ! got ["MoM" ] {
430+ t .Errorf ("expected list to contain 'MoM', got: %v" , got )
363431 }
364432 if ! got ["gpt-4o-mini" ] || ! got ["llama-3.1-8b-instruct" ] {
365433 t .Errorf ("expected configured models to be present, got=%v" , got )
366434 }
435+ if len (resp .Data ) != 3 {
436+ t .Errorf ("expected 3 models, got %d" , len (resp .Data ))
437+ }
367438}
368439
369440// TestSystemPromptEndpointSecurity tests that system prompt endpoints are only accessible when explicitly enabled
0 commit comments