1
+ #pragma warning disable OPENAI001
1
2
using BotSharp . Abstraction . Conversations . Enums ;
2
3
using BotSharp . Abstraction . Files . Utilities ;
3
4
using BotSharp . Abstraction . Hooks ;
@@ -347,17 +348,7 @@ public async Task<RoleDialogModel> GetChatCompletionsStreamingAsync(Agent agent,
347
348
renderedInstructions = [ ] ;
348
349
349
350
var messages = new List < ChatMessage > ( ) ;
350
-
351
- var temperature = float . Parse ( state . GetState ( "temperature" , "0.0" ) ) ;
352
- var maxTokens = int . TryParse ( state . GetState ( "max_tokens" ) , out var tokens )
353
- ? tokens
354
- : agent . LlmConfig ? . MaxOutputTokens ?? LlmConstant . DEFAULT_MAX_OUTPUT_TOKEN ;
355
-
356
- var options = new ChatCompletionOptions ( )
357
- {
358
- Temperature = temperature ,
359
- MaxOutputTokenCount = maxTokens
360
- } ;
351
+ var options = InitChatCompletionOption ( agent ) ;
361
352
362
353
// Prepare instruction and functions
363
354
var ( instruction , functions ) = agentService . PrepareInstructionAndFunctions ( agent ) ;
@@ -367,6 +358,22 @@ public async Task<RoleDialogModel> GetChatCompletionsStreamingAsync(Agent agent,
367
358
messages . Add ( new SystemChatMessage ( instruction ) ) ;
368
359
}
369
360
361
+ // Render functions
362
+ if ( options . WebSearchOptions == null )
363
+ {
364
+ foreach ( var function in functions )
365
+ {
366
+ if ( ! agentService . RenderFunction ( agent , function ) ) continue ;
367
+
368
+ var property = agentService . RenderFunctionProperty ( agent , function ) ;
369
+
370
+ options . Tools . Add ( ChatTool . CreateFunctionTool (
371
+ functionName : function . Name ,
372
+ functionDescription : function . Description ,
373
+ functionParameters : BinaryData . FromObjectAsJson ( property ) ) ) ;
374
+ }
375
+ }
376
+
370
377
foreach ( var function in functions )
371
378
{
372
379
if ( ! agentService . RenderFunction ( agent , function ) ) continue ;
@@ -397,6 +404,12 @@ public async Task<RoleDialogModel> GetChatCompletionsStreamingAsync(Agent agent,
397
404
filteredMessages = filteredMessages . Where ( ( _ , idx ) => idx >= firstUserMsgIdx ) . ToList ( ) ;
398
405
}
399
406
407
+ var imageDetailLevel = ChatImageDetailLevel . Auto ;
408
+ if ( allowMultiModal )
409
+ {
410
+ imageDetailLevel = ParseChatImageDetailLevel ( state . GetState ( "chat_image_detail_level" ) ) ;
411
+ }
412
+
400
413
foreach ( var message in filteredMessages )
401
414
{
402
415
if ( message . Role == AgentRole . Function )
@@ -416,41 +429,56 @@ public async Task<RoleDialogModel> GetChatCompletionsStreamingAsync(Agent agent,
416
429
417
430
if ( allowMultiModal && ! message . Files . IsNullOrEmpty ( ) )
418
431
{
419
- foreach ( var file in message . Files )
420
- {
421
- if ( ! string . IsNullOrEmpty ( file . FileData ) )
422
- {
423
- var ( contentType , binary ) = FileUtility . GetFileInfoFromData ( file . FileData ) ;
424
- var contentPart = ChatMessageContentPart . CreateImagePart ( binary , contentType . IfNullOrEmptyAs ( file . ContentType ) , ChatImageDetailLevel . Auto ) ;
425
- contentParts . Add ( contentPart ) ;
426
- }
427
- else if ( ! string . IsNullOrEmpty ( file . FileStorageUrl ) )
428
- {
429
- var contentType = FileUtility . GetFileContentType ( file . FileStorageUrl ) ;
430
- var binary = fileStorage . GetFileBytes ( file . FileStorageUrl ) ;
431
- var contentPart = ChatMessageContentPart . CreateImagePart ( binary , contentType . IfNullOrEmptyAs ( file . ContentType ) , ChatImageDetailLevel . Auto ) ;
432
- contentParts . Add ( contentPart ) ;
433
- }
434
- else if ( ! string . IsNullOrEmpty ( file . FileUrl ) )
435
- {
436
- var uri = new Uri ( file . FileUrl ) ;
437
- var contentPart = ChatMessageContentPart . CreateImagePart ( uri , ChatImageDetailLevel . Auto ) ;
438
- contentParts . Add ( contentPart ) ;
439
- }
440
- }
432
+ CollectMessageContentParts ( contentParts , message . Files , imageDetailLevel ) ;
441
433
}
442
434
messages . Add ( new UserChatMessage ( contentParts ) { ParticipantName = message . FunctionName } ) ;
443
435
}
444
436
else if ( message . Role == AgentRole . Assistant )
445
437
{
446
- messages . Add ( new AssistantChatMessage ( message . Content ) ) ;
438
+ var text = message . Content ;
439
+ var textPart = ChatMessageContentPart . CreateTextPart ( text ) ;
440
+ var contentParts = new List < ChatMessageContentPart > { textPart } ;
441
+
442
+ if ( allowMultiModal && ! message . Files . IsNullOrEmpty ( ) )
443
+ {
444
+ CollectMessageContentParts ( contentParts , message . Files , imageDetailLevel ) ;
445
+ }
446
+ messages . Add ( new AssistantChatMessage ( contentParts ) ) ;
447
447
}
448
448
}
449
449
450
450
var prompt = GetPrompt ( messages , options ) ;
451
451
return ( prompt , messages , options ) ;
452
452
}
453
453
454
+
455
+ private void CollectMessageContentParts ( List < ChatMessageContentPart > contentParts , List < BotSharpFile > files , ChatImageDetailLevel imageDetailLevel )
456
+ {
457
+ foreach ( var file in files )
458
+ {
459
+ if ( ! string . IsNullOrEmpty ( file . FileData ) )
460
+ {
461
+ var ( contentType , binary ) = FileUtility . GetFileInfoFromData ( file . FileData ) ;
462
+ var contentPart = ChatMessageContentPart . CreateImagePart ( binary , contentType . IfNullOrEmptyAs ( file . ContentType ) , imageDetailLevel ) ;
463
+ contentParts . Add ( contentPart ) ;
464
+ }
465
+ else if ( ! string . IsNullOrEmpty ( file . FileStorageUrl ) )
466
+ {
467
+ var fileStorage = _services . GetRequiredService < IFileStorageService > ( ) ;
468
+ var binary = fileStorage . GetFileBytes ( file . FileStorageUrl ) ;
469
+ var contentType = FileUtility . GetFileContentType ( file . FileStorageUrl ) ;
470
+ var contentPart = ChatMessageContentPart . CreateImagePart ( binary , contentType . IfNullOrEmptyAs ( file . ContentType ) , imageDetailLevel ) ;
471
+ contentParts . Add ( contentPart ) ;
472
+ }
473
+ else if ( ! string . IsNullOrEmpty ( file . FileUrl ) )
474
+ {
475
+ var uri = new Uri ( file . FileUrl ) ;
476
+ var contentPart = ChatMessageContentPart . CreateImagePart ( uri , imageDetailLevel ) ;
477
+ contentParts . Add ( contentPart ) ;
478
+ }
479
+ }
480
+ }
481
+
454
482
private string GetPrompt ( IEnumerable < ChatMessage > messages , ChatCompletionOptions options )
455
483
{
456
484
var prompt = string . Empty ;
@@ -518,6 +546,95 @@ private string GetPrompt(IEnumerable<ChatMessage> messages, ChatCompletionOption
518
546
return prompt ;
519
547
}
520
548
549
+ private ChatCompletionOptions InitChatCompletionOption ( Agent agent )
550
+ {
551
+ var state = _services . GetRequiredService < IConversationStateService > ( ) ;
552
+ var settingsService = _services . GetRequiredService < ILlmProviderService > ( ) ;
553
+ var settings = settingsService . GetSetting ( Provider , _model ) ;
554
+
555
+ // Reasoning effort
556
+ ChatReasoningEffortLevel ? reasoningEffortLevel = null ;
557
+ float ? temperature = float . Parse ( state . GetState ( "temperature" , "0.0" ) ) ;
558
+ if ( settings ? . Reasoning != null )
559
+ {
560
+ temperature = settings . Reasoning . Temperature ;
561
+ var level = state . GetState ( "reasoning_effort_level" )
562
+ . IfNullOrEmptyAs ( agent ? . LlmConfig ? . ReasoningEffortLevel )
563
+ . IfNullOrEmptyAs ( settings ? . Reasoning ? . EffortLevel ) ;
564
+ reasoningEffortLevel = ParseReasoningEffortLevel ( level ) ;
565
+ }
566
+
567
+ // Web search
568
+ ChatWebSearchOptions ? webSearchOptions = null ;
569
+ if ( settings ? . WebSearch != null )
570
+ {
571
+ temperature = null ;
572
+ reasoningEffortLevel = null ;
573
+ webSearchOptions = new ( ) ;
574
+ }
575
+
576
+ var maxTokens = int . TryParse ( state . GetState ( "max_tokens" ) , out var tokens )
577
+ ? tokens
578
+ : agent . LlmConfig ? . MaxOutputTokens ?? LlmConstant . DEFAULT_MAX_OUTPUT_TOKEN ;
579
+
580
+ return new ChatCompletionOptions ( )
581
+ {
582
+ Temperature = temperature ,
583
+ MaxOutputTokenCount = maxTokens ,
584
+ ReasoningEffortLevel = reasoningEffortLevel ,
585
+ WebSearchOptions = webSearchOptions
586
+ } ;
587
+ }
588
+
589
+ private ChatReasoningEffortLevel ? ParseReasoningEffortLevel ( string ? level )
590
+ {
591
+ if ( string . IsNullOrWhiteSpace ( level ) )
592
+ {
593
+ return null ;
594
+ }
595
+
596
+ var effortLevel = new ChatReasoningEffortLevel ( "minimal" ) ;
597
+ switch ( level . ToLower ( ) )
598
+ {
599
+ case "low" :
600
+ effortLevel = ChatReasoningEffortLevel . Low ;
601
+ break ;
602
+ case "medium" :
603
+ effortLevel = ChatReasoningEffortLevel . Medium ;
604
+ break ;
605
+ case "high" :
606
+ effortLevel = ChatReasoningEffortLevel . High ;
607
+ break ;
608
+ default :
609
+ break ;
610
+ }
611
+
612
+ return effortLevel ;
613
+ }
614
+
615
+ private ChatImageDetailLevel ParseChatImageDetailLevel ( string ? level )
616
+ {
617
+ if ( string . IsNullOrWhiteSpace ( level ) )
618
+ {
619
+ return ChatImageDetailLevel . Auto ;
620
+ }
621
+
622
+ var imageLevel = ChatImageDetailLevel . Auto ;
623
+ switch ( level . ToLower ( ) )
624
+ {
625
+ case "low" :
626
+ imageLevel = ChatImageDetailLevel . Low ;
627
+ break ;
628
+ case "high" :
629
+ imageLevel = ChatImageDetailLevel . High ;
630
+ break ;
631
+ default :
632
+ break ;
633
+ }
634
+
635
+ return imageLevel ;
636
+ }
637
+
521
638
public void SetModelName ( string model )
522
639
{
523
640
_model = model ;
0 commit comments