Skip to content

Commit

Permalink
Merge pull request #165 from Marenz/custom_prompt
Browse files Browse the repository at this point in the history
Allow custom prompt with blueprint
  • Loading branch information
tjbck authored Oct 20, 2024
2 parents 52ffa64 + 9776bdd commit 1136b83
Show file tree
Hide file tree
Showing 2 changed files with 56 additions and 42 deletions.
96 changes: 55 additions & 41 deletions blueprints/function_calling_blueprint.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,16 @@
get_tools_specs,
)

# System prompt for function calling
DEFAULT_SYSTEM_PROMPT = (
"""Tools: {}
If a function tool doesn't match the query, return an empty string. Else, pick a
function tool, fill in the parameters from the function tool's schema, and
return it in the format {{ "name": \"functionName\", "parameters": {{ "key":
"value" }} }}. Only pick a function if the user asks. Only return the object. Do not return any other text."
"""
)

class Pipeline:
class Valves(BaseModel):
Expand All @@ -29,7 +39,7 @@ class Valves(BaseModel):
TASK_MODEL: str
TEMPLATE: str

def __init__(self):
def __init__(self, prompt: str | None = None) -> None:
# Pipeline filters are only compatible with Open WebUI
# You can think of filter pipeline as a middleware that can be used to edit the form data before it is sent to the OpenAI API.
self.type = "filter"
Expand All @@ -40,6 +50,8 @@ def __init__(self):
# The identifier must be an alphanumeric string that can include underscores or hyphens. It cannot contain spaces, special characters, slashes, or backslashes.
# self.id = "function_calling_blueprint"
self.name = "Function Calling Blueprint"
self.prompt = prompt or DEFAULT_SYSTEM_PROMPT
self.tools: object = None

# Initialize valves
self.valves = self.Valves(
Expand Down Expand Up @@ -87,14 +99,45 @@ async def inlet(self, body: dict, user: Optional[dict] = None) -> dict:
# Get the tools specs
tools_specs = get_tools_specs(self.tools)

# System prompt for function calling
fc_system_prompt = (
f"Tools: {json.dumps(tools_specs, indent=2)}"
+ """
If a function tool doesn't match the query, return an empty string. Else, pick a function tool, fill in the parameters from the function tool's schema, and return it in the format { "name": \"functionName\", "parameters": { "key": "value" } }. Only pick a function if the user asks. Only return the object. Do not return any other text."
"""
)
prompt = self.prompt.format(json.dumps(tools_specs, indent=2))
content = "History:\n" + "\n".join(
[
f"{message['role']}: {message['content']}"
for message in body["messages"][::-1][:4]
]
) + f"Query: {user_message}"

result = self.run_completion(prompt, content)
messages = self.call_function(result, body["messages"])

return {**body, "messages": messages}

# Call the function
def call_function(self, result, messages: list[dict]) -> list[dict]:
if "name" not in result:
return messages

function = getattr(self.tools, result["name"])
function_result = None
try:
function_result = function(**result["parameters"])
except Exception as e:
print(e)

# Add the function result to the system prompt
if function_result:
system_prompt = self.valves.TEMPLATE.replace(
"{{CONTEXT}}", function_result
)

messages = add_or_update_system_message(
system_prompt, messages
)

# Return the updated messages
return messages

def run_completion(self, system_prompt: str, content: str) -> dict:
r = None
try:
# Call the OpenAI API to get the function response
Expand All @@ -105,18 +148,11 @@ async def inlet(self, body: dict, user: Optional[dict] = None) -> dict:
"messages": [
{
"role": "system",
"content": fc_system_prompt,
"content": system_prompt,
},
{
"role": "user",
"content": "History:\n"
+ "\n".join(
[
f"{message['role']}: {message['content']}"
for message in body["messages"][::-1][:4]
]
)
+ f"Query: {user_message}",
"content": content,
},
],
# TODO: dynamically add response_format?
Expand All @@ -137,29 +173,7 @@ async def inlet(self, body: dict, user: Optional[dict] = None) -> dict:
if content != "":
result = json.loads(content)
print(result)

# Call the function
if "name" in result:
function = getattr(self.tools, result["name"])
function_result = None
try:
function_result = function(**result["parameters"])
except Exception as e:
print(e)

# Add the function result to the system prompt
if function_result:
system_prompt = self.valves.TEMPLATE.replace(
"{{CONTEXT}}", function_result
)

print(system_prompt)
messages = add_or_update_system_message(
system_prompt, body["messages"]
)

# Return the updated messages
return {**body, "messages": messages}
return result

except Exception as e:
print(f"Error: {e}")
Expand All @@ -170,4 +184,4 @@ async def inlet(self, body: dict, user: Optional[dict] = None) -> dict:
except:
pass

return body
return {}
2 changes: 1 addition & 1 deletion utils/pipelines/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -62,7 +62,7 @@ def pop_system_message(messages: List[dict]) -> Tuple[dict, List[dict]]:
return get_system_message(messages), remove_system_message(messages)


def add_or_update_system_message(content: str, messages: List[dict]):
def add_or_update_system_message(content: str, messages: List[dict]) -> List[dict]:
"""
Adds a new system message at the beginning of the messages list
or updates the existing system message at the beginning.
Expand Down

0 comments on commit 1136b83

Please sign in to comment.