-
Notifications
You must be signed in to change notification settings - Fork 0
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
* Simplify the example * Rename the example folder to simple-openai-assistant * Update README.md file * style: fix lint errors Signed-off-by: Isac Byeonghoon Yoo <[email protected]> --------- Signed-off-by: Isac Byeonghoon Yoo <[email protected]> Co-authored-by: Isac Byeonghoon Yoo <[email protected]>
- Loading branch information
Showing
7 changed files
with
130 additions
and
119 deletions.
There are no files selected for viewing
This file was deleted.
Oops, something went wrong.
This file was deleted.
Oops, something went wrong.
This file was deleted.
Oops, something went wrong.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,41 @@ | ||
# PlugBear Python SDK Example (FastAPI + OpenAI Assistant) | ||
|
||
This project introduces an example of integrating a Python LLM application with | ||
communication channels, such as Slack, using PlugBear. | ||
|
||
## Prerequisites | ||
|
||
- [Poetry](https://python-poetry.org) | ||
|
||
## Development | ||
|
||
### Installing Dependencies | ||
|
||
Use [Poetry](https://python-poetry.org/) to install dependencies. | ||
|
||
```bash | ||
poetry install | ||
``` | ||
|
||
### Running Server | ||
|
||
Run the command below to run the server: | ||
|
||
```bash | ||
OPENAI_API_KEY="YOUR_OPENAI_API_KEY" \ | ||
PLUGBEAR_API_KEY="YOUR_PLUGBEAR_API_KEY" \ | ||
poetry run python main.py | ||
``` | ||
|
||
You can obtain your `OPENAI_API_KEY` from the | ||
[OpenAI API Keys](https://platform.openai.com/api-keys) page, and your | ||
`PLUGBEAR_API_KEY` from the | ||
[PlugBear API Keys](https://auth.plugbear.io/org/api_keys) page. | ||
|
||
### Testing Integration | ||
|
||
Follow [PlugBear Documentation](https://docs.plugbear.io) to connect your app | ||
into communication channels and test it. | ||
|
||
Ask any math question to `@PlugBear` after connecting it. e.g., | ||
`` @PlugBear I need to solve the equation `3x + 11 = 14`. Can you help me? ``. |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,89 @@ | ||
from __future__ import annotations | ||
|
||
import asyncio | ||
import contextlib | ||
import os | ||
from collections.abc import AsyncGenerator | ||
|
||
from fastapi import FastAPI | ||
from openai import AsyncOpenAI | ||
|
||
import plugbear.fastapi | ||
|
||
|
||
OPENAI_API_KEY = os.environ["OPENAI_API_KEY"] | ||
PLUGBEAR_API_KEY = os.environ["PLUGBEAR_API_KEY"] | ||
openai_client = AsyncOpenAI(api_key=OPENAI_API_KEY) | ||
|
||
|
||
async def handle_request(request: plugbear.fastapi.Request) -> str: | ||
""" Handle the request received from PlugBear. | ||
""" | ||
# Finding or creating an example OpenAI Assistant. | ||
assistant_id = await find_or_create_example_assistant() | ||
|
||
# Creating a new OpenAI thread for the request. | ||
thread = await openai_client.beta.threads.create() | ||
|
||
# Adding messages to the thread. Adjust the message for your use case. | ||
for message in request.messages: | ||
await openai_client.beta.threads.messages.create( | ||
thread_id=thread.id, | ||
role="user", | ||
content=message.content, | ||
) | ||
|
||
# Generating a response from the OpenAI Assistant. | ||
run = await openai_client.beta.threads.runs.create( | ||
thread_id=thread.id, | ||
assistant_id=assistant_id | ||
) | ||
|
||
# Polling until the response is ready. | ||
while run.status != "completed": | ||
await asyncio.sleep(1) | ||
run = await openai_client.beta.threads.runs.retrieve(thread_id=thread.id, run_id=run.id) | ||
|
||
# Returning the generated message. | ||
resp = await openai_client.beta.threads.messages.list(thread_id=thread.id, order="desc") | ||
generated_messages = [ | ||
content.text.value for content in resp.data[0].content] | ||
return "\n".join(generated_messages) | ||
|
||
|
||
async def find_or_create_example_assistant() -> str: | ||
""" Find or create an example of an OpenAI Assistant named 'Math Tutor'. | ||
""" | ||
assistants = await openai_client.beta.assistants.list() | ||
tutors = [a for a in assistants.data if a.name == "Math Tutor"] | ||
if len(tutors) > 0: | ||
assistant = tutors[0] | ||
else: | ||
assistant = await openai_client.beta.assistants.create( | ||
name="Math Tutor", | ||
instructions="You are a personal math tutor. Write and run code to answer math questions.", | ||
tools=[{"type": "code_interpreter"}], | ||
model="gpt-4-1106-preview", | ||
) | ||
|
||
return assistant.id | ||
|
||
|
||
@contextlib.asynccontextmanager | ||
async def lifespan(app: FastAPI) -> AsyncGenerator[None, None]: | ||
await plugbear.fastapi.register( | ||
app, | ||
llm_func=handle_request, | ||
api_key=PLUGBEAR_API_KEY, | ||
endpoint="/plugbear", | ||
) | ||
|
||
yield | ||
|
||
|
||
app = FastAPI(lifespan=lifespan) | ||
|
||
if __name__ == "__main__": | ||
import uvicorn | ||
|
||
uvicorn.run(app, host="0.0.0.0", port=int(os.getenv("PORT", default=8000))) |
File renamed without changes.
File renamed without changes.