diff --git a/aio/gpu-8g/text-to-text.yaml b/aio/gpu-8g/text-to-text.yaml
index c6f26c07..1a67169b 100644
--- a/aio/gpu-8g/text-to-text.yaml
+++ b/aio/gpu-8g/text-to-text.yaml
@@ -3,30 +3,27 @@ mmap: true
parameters:
model: huggingface://NousResearch/Hermes-2-Pro-Mistral-7B-GGUF/Hermes-2-Pro-Mistral-7B.Q6_K.gguf
-roles:
- assistant_function_call: assistant
- function: tool
template:
chat_message: |
- <|im_start|>{{if eq .RoleName "assistant"}}assistant{{else if eq .RoleName "system"}}system{{else if eq .RoleName "function"}}{{.Role}}{{else if eq .RoleName "user"}}user{{end}}
- {{ if eq .RoleName "assistant_function_call" }}{{end}}
- {{ if eq .RoleName "function" }}{{end}}
+ <|im_start|>{{if eq .RoleName "assistant"}}assistant{{else if eq .RoleName "system"}}system{{else if eq .RoleName "tool"}}tool{{else if eq .RoleName "user"}}user{{end}}
+ {{ if .FunctionCall }}{{end}}
+ {{ if eq .RoleName "tool" }}{{end}}
{{if .Content}}{{.Content}}{{end}}
{{if .FunctionCall}}{{toJson .FunctionCall}}{{end}}
- {{ if eq .RoleName "assistant_function_call" }}{{end}}
- {{ if eq .RoleName "function" }}{{end}}
+ {{ if .FunctionCall }}{{end}}
+ {{ if eq .RoleName "tool" }}{{end}}
<|im_end|>
# https://huggingface.co/NousResearch/Hermes-2-Pro-Mistral-7B-GGUF#prompt-format-for-function-calling
function: |
<|im_start|>system
- You are a function calling AI model. You are provided with function signatures within XML tags. You may call one or more functions to assist with the user query. Don't make assumptions about what values to plug into functions. Here are the available tools:
+ You are a function calling AI model. You are provided with function signatures within XML tags. You may call one or more functions to assist with the user query. Don't make assumptions about what values to plug into functions. Here are the available tools:
{{range .Functions}}
{'type': 'function', 'function': {'name': '{{.Name}}', 'description': '{{.Description}}', 'parameters': {{toJson .Parameters}} }}
{{end}}
-
- Use the following pydantic model json schema for each tool call you will make:
- {'title': 'FunctionCall', 'type': 'object', 'properties': {'arguments': {'title': 'Arguments', 'type': 'object'}, 'name': {'title': 'Name', 'type': 'string'}}, 'required': ['arguments', 'name']}
+
+ Use the following pydantic model json schema for each tool call you will make:
+ {'title': 'FunctionCall', 'type': 'object', 'properties': {'arguments': {'title': 'Arguments', 'type': 'object'}, 'name': {'title': 'Name', 'type': 'string'}}, 'required': ['arguments', 'name']}
For each function call return a json object with function name and arguments within XML tags as follows:
{'arguments': , 'name': }
diff --git a/aio/intel/text-to-text.yaml b/aio/intel/text-to-text.yaml
index ef36b562..0577d19b 100644
--- a/aio/intel/text-to-text.yaml
+++ b/aio/intel/text-to-text.yaml
@@ -4,30 +4,27 @@ f16: false
parameters:
model: huggingface://NousResearch/Hermes-2-Pro-Mistral-7B-GGUF/Hermes-2-Pro-Mistral-7B.Q6_K.gguf
-roles:
- assistant_function_call: assistant
- function: tool
template:
chat_message: |
- <|im_start|>{{if eq .RoleName "assistant"}}assistant{{else if eq .RoleName "system"}}system{{else if eq .RoleName "function"}}{{.Role}}{{else if eq .RoleName "user"}}user{{end}}
- {{ if eq .RoleName "assistant_function_call" }}{{end}}
- {{ if eq .RoleName "function" }}{{end}}
+ <|im_start|>{{if eq .RoleName "assistant"}}assistant{{else if eq .RoleName "system"}}system{{else if eq .RoleName "tool"}}tool{{else if eq .RoleName "user"}}user{{end}}
+ {{ if .FunctionCall }}{{end}}
+ {{ if eq .RoleName "tool" }}{{end}}
{{if .Content}}{{.Content}}{{end}}
{{if .FunctionCall}}{{toJson .FunctionCall}}{{end}}
- {{ if eq .RoleName "assistant_function_call" }}{{end}}
- {{ if eq .RoleName "function" }}{{end}}
+ {{ if .FunctionCall }}{{end}}
+ {{ if eq .RoleName "tool" }}{{end}}
<|im_end|>
# https://huggingface.co/NousResearch/Hermes-2-Pro-Mistral-7B-GGUF#prompt-format-for-function-calling
function: |
<|im_start|>system
- You are a function calling AI model. You are provided with function signatures within XML tags. You may call one or more functions to assist with the user query. Don't make assumptions about what values to plug into functions. Here are the available tools:
+ You are a function calling AI model. You are provided with function signatures within XML tags. You may call one or more functions to assist with the user query. Don't make assumptions about what values to plug into functions. Here are the available tools:
{{range .Functions}}
{'type': 'function', 'function': {'name': '{{.Name}}', 'description': '{{.Description}}', 'parameters': {{toJson .Parameters}} }}
{{end}}
-
- Use the following pydantic model json schema for each tool call you will make:
- {'title': 'FunctionCall', 'type': 'object', 'properties': {'arguments': {'title': 'Arguments', 'type': 'object'}, 'name': {'title': 'Name', 'type': 'string'}}, 'required': ['arguments', 'name']}
+
+ Use the following pydantic model json schema for each tool call you will make:
+ {'title': 'FunctionCall', 'type': 'object', 'properties': {'arguments': {'title': 'Arguments', 'type': 'object'}, 'name': {'title': 'Name', 'type': 'string'}}, 'required': ['arguments', 'name']}
For each function call return a json object with function name and arguments within XML tags as follows:
{'arguments': , 'name': }
diff --git a/core/http/endpoints/openai/chat.go b/core/http/endpoints/openai/chat.go
index c2e22962..f5f03eb4 100644
--- a/core/http/endpoints/openai/chat.go
+++ b/core/http/endpoints/openai/chat.go
@@ -236,7 +236,7 @@ func ChatEndpoint(cl *config.BackendConfigLoader, ml *model.ModelLoader, startup
// if function call, we might want to customize the role so we can display better that the "assistant called a json action"
// if an "assistant_function_call" role is defined, we use it, otherwise we use the role that is passed by in the request
- if i.FunctionCall != nil && i.Role == "assistant" {
+ if (i.FunctionCall != nil || i.ToolCalls != nil) && i.Role == "assistant" {
roleFn := "assistant_function_call"
r := config.Roles[roleFn]
if r != "" {
@@ -246,6 +246,11 @@ func ChatEndpoint(cl *config.BackendConfigLoader, ml *model.ModelLoader, startup
r := config.Roles[role]
contentExists := i.Content != nil && i.StringContent != ""
+ fcall := i.FunctionCall
+ if len(i.ToolCalls) > 0 {
+ fcall = i.ToolCalls
+ }
+
// First attempt to populate content via a chat message specific template
if config.TemplateConfig.ChatMessage != "" {
chatMessageData := model.ChatMessageTemplateData{
@@ -253,7 +258,7 @@ func ChatEndpoint(cl *config.BackendConfigLoader, ml *model.ModelLoader, startup
Role: r,
RoleName: role,
Content: i.StringContent,
- FunctionCall: i.FunctionCall,
+ FunctionCall: fcall,
FunctionName: i.Name,
LastMessage: messageIndex == (len(input.Messages) - 1),
Function: config.Grammar != "" && (messageIndex == (len(input.Messages) - 1)),
@@ -271,35 +276,49 @@ func ChatEndpoint(cl *config.BackendConfigLoader, ml *model.ModelLoader, startup
content = templatedChatMessage
}
}
+
+ marshalAnyRole := func(f any) {
+ j, err := json.Marshal(f)
+ if err == nil {
+ if contentExists {
+ content += "\n" + fmt.Sprint(r, " ", string(j))
+ } else {
+ content = fmt.Sprint(r, " ", string(j))
+ }
+ }
+ }
+ marshalAny := func(f any) {
+ j, err := json.Marshal(f)
+ if err == nil {
+ if contentExists {
+ content += "\n" + string(j)
+ } else {
+ content = string(j)
+ }
+ }
+ }
// If this model doesn't have such a template, or if that template fails to return a value, template at the message level.
if content == "" {
if r != "" {
if contentExists {
content = fmt.Sprint(r, i.StringContent)
}
+
if i.FunctionCall != nil {
- j, err := json.Marshal(i.FunctionCall)
- if err == nil {
- if contentExists {
- content += "\n" + fmt.Sprint(r, " ", string(j))
- } else {
- content = fmt.Sprint(r, " ", string(j))
- }
- }
+ marshalAnyRole(i.FunctionCall)
+ }
+ if i.ToolCalls != nil {
+ marshalAnyRole(i.ToolCalls)
}
} else {
if contentExists {
content = fmt.Sprint(i.StringContent)
}
if i.FunctionCall != nil {
- j, err := json.Marshal(i.FunctionCall)
- if err == nil {
- if contentExists {
- content += "\n" + string(j)
- } else {
- content = string(j)
- }
- }
+ marshalAny(i.FunctionCall)
+ }
+ if i.ToolCalls != nil {
+ marshalAny(i.ToolCalls)
}
}
// Special Handling: System. We care if it was printed at all, not the r branch, so check seperately
diff --git a/embedded/models/hermes-2-pro-mistral.yaml b/embedded/models/hermes-2-pro-mistral.yaml
index 84510d2a..108216f5 100644
--- a/embedded/models/hermes-2-pro-mistral.yaml
+++ b/embedded/models/hermes-2-pro-mistral.yaml
@@ -3,30 +3,27 @@ mmap: true
parameters:
model: huggingface://NousResearch/Hermes-2-Pro-Mistral-7B-GGUF/Hermes-2-Pro-Mistral-7B.Q6_K.gguf
-roles:
- assistant_function_call: assistant
- function: tool
template:
chat_message: |
- <|im_start|>{{if eq .RoleName "assistant"}}assistant{{else if eq .RoleName "system"}}system{{else if eq .RoleName "function"}}{{.Role}}{{else if eq .RoleName "user"}}user{{end}}
- {{ if eq .RoleName "assistant_function_call" }}{{end}}
- {{ if eq .RoleName "function" }}{{end}}
+ <|im_start|>{{if eq .RoleName "assistant"}}assistant{{else if eq .RoleName "system"}}system{{else if eq .RoleName "tool"}}tool{{else if eq .RoleName "user"}}user{{end}}
+ {{ if .FunctionCall }}{{end}}
+ {{ if eq .RoleName "tool" }}{{end}}
{{if .Content}}{{.Content}}{{end}}
{{if .FunctionCall}}{{toJson .FunctionCall}}{{end}}
- {{ if eq .RoleName "assistant_function_call" }}{{end}}
- {{ if eq .RoleName "function" }}{{end}}
+ {{ if .FunctionCall }}{{end}}
+ {{ if eq .RoleName "tool" }}{{end}}
<|im_end|>
# https://huggingface.co/NousResearch/Hermes-2-Pro-Mistral-7B-GGUF#prompt-format-for-function-calling
function: |
<|im_start|>system
- You are a function calling AI model. You are provided with function signatures within XML tags. You may call one or more functions to assist with the user query. Don't make assumptions about what values to plug into functions. Here are the available tools:
+ You are a function calling AI model. You are provided with function signatures within XML tags. You may call one or more functions to assist with the user query. Don't make assumptions about what values to plug into functions. Here are the available tools:
{{range .Functions}}
{'type': 'function', 'function': {'name': '{{.Name}}', 'description': '{{.Description}}', 'parameters': {{toJson .Parameters}} }}
{{end}}
-
- Use the following pydantic model json schema for each tool call you will make:
- {'title': 'FunctionCall', 'type': 'object', 'properties': {'arguments': {'title': 'Arguments', 'type': 'object'}, 'name': {'title': 'Name', 'type': 'string'}}, 'required': ['arguments', 'name']}
+
+ Use the following pydantic model json schema for each tool call you will make:
+ {'title': 'FunctionCall', 'type': 'object', 'properties': {'arguments': {'title': 'Arguments', 'type': 'object'}, 'name': {'title': 'Name', 'type': 'string'}}, 'required': ['arguments', 'name']}
For each function call return a json object with function name and arguments within XML tags as follows:
{'arguments': , 'name': }