-
-
Notifications
You must be signed in to change notification settings - Fork 627
Features/refine template config #1321
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
base: master
Are you sure you want to change the base?
Changes from all commits
b4ab8d0
1400dd0
35c44ac
10c0714
985ba9d
9476f03
43d91d0
5375da1
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -105,6 +105,36 @@ private void AddDefaultInstruction(Agent agent, string instruction) | |
| agent.ChannelInstructions = instructions; | ||
| } | ||
|
|
||
| #if !DEBUG | ||
| [SharpCache(10, perInstanceCache: true)] | ||
| #endif | ||
| public async Task<AgentTemplate?> GetAgentTemplateDetail(string agentId, string templateName) | ||
| { | ||
| var template = await _db.GetAgentTemplateDetail(agentId, templateName); | ||
| if (template == null) | ||
| { | ||
| return template; | ||
| } | ||
|
|
||
| if (template.LlmConfig == null) | ||
| { | ||
| var agent = await _db.GetAgent(agentId); | ||
| if (!string.IsNullOrEmpty(agent?.LlmConfig?.Provider) | ||
| && !string.IsNullOrEmpty(agent?.LlmConfig?.Model)) | ||
| { | ||
| template.LlmConfig = new AgentTemplateLlmConfig | ||
| { | ||
| Provider = agent.LlmConfig.Provider, | ||
| Model = agent.LlmConfig.Model, | ||
| MaxOutputTokens = agent.LlmConfig.MaxOutputTokens, | ||
| ReasoningEffortLevel = agent.LlmConfig.ReasoningEffortLevel | ||
| }; | ||
| } | ||
|
Member
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. 这里是否需要返回 ResponseFormat
Collaborator
Author
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Response format is included in the template detail. It is outside the llm config |
||
| } | ||
|
|
||
| return template.DeepClone(); | ||
| } | ||
|
|
||
| public async Task InheritAgent(Agent agent) | ||
| { | ||
| if (string.IsNullOrWhiteSpace(agent?.InheritAgentId)) | ||
|
|
||
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -242,12 +242,31 @@ private async Task<InstructResult> RunLlm( | |
| var result = string.Empty; | ||
|
|
||
| // Render prompt | ||
| var prompt = string.IsNullOrEmpty(templateName) ? | ||
| agentService.RenderInstruction(agent) : | ||
| agentService.RenderTemplate(agent, templateName); | ||
| var prompt = string.Empty; | ||
| var llmConfig = agent.LlmConfig; | ||
|
|
||
| if (!string.IsNullOrEmpty(templateName)) | ||
| { | ||
| prompt = agentService.RenderTemplate(agent, templateName); | ||
| var templateLlmConfig = agent.Templates?.FirstOrDefault(x => x.Name.IsEqualTo(templateName))?.LlmConfig; | ||
| if (templateLlmConfig?.IsValid == true) | ||
| { | ||
| llmConfig = new AgentLlmConfig | ||
| { | ||
| Provider = templateLlmConfig.Provider, | ||
| Model = templateLlmConfig.Model, | ||
| MaxOutputTokens = templateLlmConfig.MaxOutputTokens, | ||
| ReasoningEffortLevel = templateLlmConfig.ReasoningEffortLevel | ||
| }; | ||
| } | ||
|
Comment on lines
+245
to
+261
Contributor
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. 1. Template override drops settings When a template has a valid LlmConfig, InstructService replaces agent.LlmConfig with a new AgentLlmConfig that only copies Provider/Model/MaxOutputTokens/ReasoningEffortLevel, resetting other AgentLlmConfig fields to defaults for that run. This can change behavior (e.g., recursion depth or modality configs) whenever template overrides are used. Agent Prompt
|
||
| } | ||
| else | ||
| { | ||
| prompt = agentService.RenderInstruction(agent); | ||
| } | ||
|
|
||
| var completer = CompletionProvider.GetCompletion(_services, | ||
| agentConfig: agent.LlmConfig); | ||
| agentConfig: llmConfig); | ||
|
|
||
| if (completer is ITextCompletion textCompleter) | ||
| { | ||
|
|
@@ -292,7 +311,7 @@ private async Task<InstructResult> RunLlm( | |
| } | ||
| else | ||
| { | ||
| result = await GetChatCompletion(chatCompleter, agent, instruction, prompt, message.MessageId, files); | ||
| result = await GetChatCompletion(chatCompleter, agent, instruction, prompt, message.MessageId, llmConfig, files); | ||
| } | ||
|
|
||
| // Repair JSON format if needed | ||
|
|
@@ -343,14 +362,15 @@ private async Task<string> GetChatCompletion( | |
| string instruction, | ||
| string text, | ||
| string messageId, | ||
| AgentLlmConfig? llmConfig = null, | ||
| IEnumerable<InstructFileModel>? files = null) | ||
| { | ||
| var result = await chatCompleter.GetChatCompletions(new Agent | ||
| { | ||
| Id = agent.Id, | ||
| Name = agent.Name, | ||
| Instruction = instruction, | ||
| LlmConfig = agent.LlmConfig | ||
| LlmConfig = llmConfig ?? agent.LlmConfig | ||
| }, new List<RoleDialogModel> | ||
| { | ||
| new RoleDialogModel(AgentRole.User, text) | ||
|
|
||
Uh oh!
There was an error while loading. Please reload this page.