Skip to content

Commit

Permalink
Merge pull request #187 from reddiedev/main
Browse files Browse the repository at this point in the history
Added system prompt and conversation history support for Perplexity
  • Loading branch information
justinh-rahb authored Aug 2, 2024
2 parents ba1af55 + a76c9fd commit f920f7e
Showing 1 changed file with 26 additions and 9 deletions.
35 changes: 26 additions & 9 deletions examples/pipelines/providers/perplexity_manifold_pipeline.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,9 @@
import os
import requests

from utils.pipelines.main import pop_system_message


class Pipeline:
class Valves(BaseModel):
PERPLEXITY_API_BASE_URL: str = "https://api.perplexity.ai"
Expand All @@ -26,10 +29,14 @@ def __init__(self):

# List of models
self.pipelines = [
{"id": "llama-3-sonar-large-32k-online", "name": "Llama 3 Sonar Large 32K Online"},
{"id": "llama-3-sonar-small-32k-online", "name": "Llama 3 Sonar Small 32K Online"},
{"id": "llama-3-sonar-large-32k-chat", "name": "Llama 3 Sonar Large 32K Chat"},
{"id": "llama-3-sonar-small-32k-chat", "name": "Llama 3 Sonar Small 32K Chat"},
{"id": "llama-3-sonar-large-32k-online",
"name": "Llama 3 Sonar Large 32K Online"},
{"id": "llama-3-sonar-small-32k-online",
"name": "Llama 3 Sonar Small 32K Online"},
{"id": "llama-3-sonar-large-32k-chat",
"name": "Llama 3 Sonar Large 32K Chat"},
{"id": "llama-3-sonar-small-32k-chat",
"name": "Llama 3 Sonar Small 32K Chat"},
{"id": "llama-3-8b-instruct", "name": "Llama 3 8B Instruct"},
{"id": "llama-3-70b-instruct", "name": "Llama 3 70B Instruct"},
{"id": "mixtral-8x7b-instruct", "name": "Mixtral 8x7B Instruct"},
Expand Down Expand Up @@ -59,6 +66,12 @@ def pipe(
# This is where you can add your custom pipelines like RAG.
print(f"pipe:{__name__}")

system_message, messages = pop_system_message(messages)
system_prompt = "You are a helpful assistant."
if system_message is not None:
system_prompt = system_message["content"]

print(system_prompt)
print(messages)
print(user_message)

Expand All @@ -71,8 +84,8 @@ def pipe(
payload = {
"model": model_id,
"messages": [
{"role": "system", "content": "Be precise and concise."},
{"role": "user", "content": user_message}
{"role": "system", "content": system_prompt},
*messages
],
"stream": body.get("stream", True),
"return_citations": True,
Expand Down Expand Up @@ -124,17 +137,21 @@ def pipe(
except Exception as e:
return f"Error: {e}"


if __name__ == "__main__":
import argparse

parser = argparse.ArgumentParser(description="Perplexity API Client")
parser.add_argument("--api-key", type=str, required=True, help="API key for Perplexity")
parser.add_argument("--prompt", type=str, required=True, help="Prompt to send to the Perplexity API")
parser.add_argument("--api-key", type=str, required=True,
help="API key for Perplexity")
parser.add_argument("--prompt", type=str, required=True,
help="Prompt to send to the Perplexity API")

args = parser.parse_args()

pipeline = Pipeline()
pipeline.valves.PERPLEXITY_API_KEY = args.api_key
response = pipeline.pipe(user_message=args.prompt, model_id="llama-3-sonar-large-32k-online", messages=[], body={"stream": False})
response = pipeline.pipe(
user_message=args.prompt, model_id="llama-3-sonar-large-32k-online", messages=[], body={"stream": False})

print("Response:", response)

0 comments on commit f920f7e

Please sign in to comment.