Skip to content

Commit b790a7f

Browse files
committed
Allow simpler function calling sytax, like used with Phi-3 function calling model
1 parent 8550b76 commit b790a7f

File tree

3 files changed

+33
-11
lines changed

3 files changed

+33
-11
lines changed

examples/function-calling/README.md

+5
Original file line numberDiff line numberDiff line change
@@ -44,3 +44,8 @@ What is 37234 times 39?
4444
To calculate 37234 times 39, I'll perform the multiplication. Let's do that.
4545
The result of multiplying 37234 by 39 is 1,452,126. If you have any more calculations or questions, feel free to ask!
4646
```
47+
48+
## Function calling example, using Phi-3 function calling
49+
```
50+
./examples/function-calling/llama-cli-function-runner.py -m `huggingface-cli download nold/Phi-3-mini-4k-instruct-function-calling-GGUF Phi-3-mini-4k-instruct-function-calling_Q4_K_M.gguf` --special --display-prompt -i
51+
```

examples/function-calling/function_tool.py

+26-9
Original file line numberDiff line numberDiff line change
@@ -64,6 +64,9 @@ def generate_functionary_schema_from_functions(functions, namespace="functions")
6464
schema += "}} // namespace {}".format(namespace)
6565
return schema
6666

67+
def generate_simple_schema_from_functions(functions) -> str:
68+
return '\n'.join([json.dumps(function).replace('{', '{ ').replace('}', ' }') for function in functions])
69+
6770
functionary_prompt_start = """<|start_header_id|>system<|end_header_id|>
6871
6972
You are capable of executing available function(s) if required.
@@ -81,13 +84,27 @@ def generate_functionary_schema_from_functions(functions, namespace="functions")
8184
When you send a message containing Python code to python, it will be executed in a stateful Jupyter notebook environment. python will respond with the output of the execution or time out after 60.0 seconds. The drive at '/mnt/data' can be used to save and persist user files.<|eot_id|><|start_header_id|>user<|end_header_id|>
8285
"""
8386

87+
simple_prompt_start = """<s><|user|> You are a helpful assistant with access to the following functions. Use them if required - """
88+
simple_prompt_end = """<|end|>"""
89+
8490
def get_chat_tool_format(args, tools):
85-
return {
86-
'prompt': functionary_prompt_start + generate_functionary_schema_from_functions(tools) + functionary_prompt_end,
87-
'function_marker': '>>>',
88-
'function_re': r'>>>([^\n]*)\n(.*)<\|eot_id\|>',
89-
'user_start': '<|start_header_id|>user<|end_header_id|>\n',
90-
'user_end': '<|eot_id|><|start_header_id|>assistant<|end_header_id|>' + '\n',
91-
'tool_start': '',
92-
'tool_end': '<|eot_id|><|start_header_id|>assistant<|end_header_id|>'
93-
}
91+
if 'functionary' in args.model.lower():
92+
return {
93+
'prompt': functionary_prompt_start + generate_functionary_schema_from_functions(tools) + functionary_prompt_end,
94+
'function_marker': '>>>',
95+
'function_re': r'>>>([^\n]*)\n(.*)<\|eot_id\|>',
96+
'user_start': '<|start_header_id|>user<|end_header_id|>\n',
97+
'user_end': '<|eot_id|><|start_header_id|>assistant<|end_header_id|>' + '\n',
98+
'tool_start': '',
99+
'tool_end': '<|eot_id|><|start_header_id|>assistant<|end_header_id|>'
100+
}
101+
else:
102+
return {
103+
'prompt': simple_prompt_start + generate_simple_schema_from_functions(tools) + simple_prompt_end,
104+
'function_marker': '<functioncall>',
105+
'function_re': r'<functioncall> \n?(.*)<\|end\|>',
106+
'user_start': '<|user|> ',
107+
'user_end': '<|end|>' + '\n',
108+
'tool_start': '<|user|>',
109+
'tool_end': '<|end|> <|assistant|>'
110+
}

examples/function-calling/llama-cli-function-runner.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -23,6 +23,7 @@ def main():
2323
parser.add_argument('--display-prompt', action=argparse.BooleanOptionalAction, default=False)
2424
parser.add_argument('--special', action=argparse.BooleanOptionalAction, default=False)
2525
parser.add_argument('--reverse-prompt', type=str)
26+
parser.add_argument('-m', '--model', type=str, default='model.gguf')
2627
parser.add_argument('--ctx-size', type=int, default=1024)
2728
args, other_args = parser.parse_known_args()
2829

@@ -31,8 +32,7 @@ def main():
3132

3233
if args.display_prompt: print(tool_format['prompt'])
3334

34-
command = [ './llama-cli', '-i', '-p', tool_format['prompt'], '--reverse-prompt', args.reverse_prompt, '--escape', '--special', '--no-display-prompt', '--log-disable', '--simple-io', '--ctx-size', str(args.ctx_size), *other_args]
35-
print("'" + "' '".join(command) + "'")
35+
command = [ './llama-cli', '-i', '-p', tool_format['prompt'], '--model', args.model, '--reverse-prompt', args.reverse_prompt, '--escape', '--special', '--no-display-prompt', '--log-disable', '--simple-io', '--ctx-size', str(args.ctx_size), *other_args]
3636

3737
process = subprocess.Popen(
3838
command,

0 commit comments

Comments
 (0)