From 250454af384b4c68fede0cef10825e4b3c45e62a Mon Sep 17 00:00:00 2001 From: wangxinxin Date: Thu, 15 Aug 2024 10:10:33 +0800 Subject: [PATCH 1/2] =?UTF-8?q?=E4=BD=BF=E7=94=A8python=20-m=20mindsearch.?= =?UTF-8?q?app=20--lang=20en=20--model=5Fformat=20gpt4=E6=B5=8B=E8=AF=95?= =?UTF-8?q?=E6=88=90=E5=8A=9F?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- frontend/React/vite.config.ts | 9 +++++---- frontend/mindsearch_streamlit.py | 3 +++ mindsearch/agent/models.py | 5 +++-- requirements.txt | 2 +- 4 files changed, 12 insertions(+), 7 deletions(-) diff --git a/frontend/React/vite.config.ts b/frontend/React/vite.config.ts index 5457d7c..f828930 100644 --- a/frontend/React/vite.config.ts +++ b/frontend/React/vite.config.ts @@ -53,10 +53,11 @@ export default defineConfig({ server: { port: 8080, proxy: { - // "/solve": { - // target: "https://mindsearch.openxlab.org.cn", - // changeOrigin: true, - // }, + "/solve": { + target: "http://127.0.0.1:8002", + changeOrigin: true, + rewrite: (path) => path.replace(/^\/solve/, '/solve'), + }, }, }, }); diff --git a/frontend/mindsearch_streamlit.py b/frontend/mindsearch_streamlit.py index 2cc133f..d7798b1 100644 --- a/frontend/mindsearch_streamlit.py +++ b/frontend/mindsearch_streamlit.py @@ -317,3 +317,6 @@ def main(): if __name__ == '__main__': main() + +# 指定端口为7860的运行方法 +# streamlit run frontend/mindsearch_streamlit.py --server.port=7860 \ No newline at end of file diff --git a/mindsearch/agent/models.py b/mindsearch/agent/models.py index f695b8a..2af9d3a 100644 --- a/mindsearch/agent/models.py +++ b/mindsearch/agent/models.py @@ -36,9 +36,10 @@ stop_words=['<|im_end|>']) gpt4 = dict(type=GPTAPI, - model_type='gpt-4-turbo', + model_type='gpt-4o-mini', key=os.environ.get('OPENAI_API_KEY', 'YOUR OPENAI API KEY'), - openai_api_base=os.environ.get('OPENAI_API_BASE', 'https://api.openai.com/v1'), + openai_api_base=os.environ.get('OPENAI_API_BASE', 'https://api.openai.com/v1/chat/completions'), + max_new_tokens=16384, ) url = 'https://dashscope.aliyuncs.com/api/v1/services/aigc/text-generation/generation' diff --git a/requirements.txt b/requirements.txt index 3e9d4ac..0fb11c5 100644 --- a/requirements.txt +++ b/requirements.txt @@ -4,7 +4,7 @@ fastapi git+https://github.com/InternLM/lagent.git gradio janus -lmdeploy +git+https://github.com/InternLM/lmdeploy.git pyvis sse-starlette termcolor From 31f2e33a3284ceae92f2b6bd4980a85060bc3bed Mon Sep 17 00:00:00 2001 From: wangxinxin Date: Thu, 15 Aug 2024 19:05:22 +0800 Subject: [PATCH 2/2] =?UTF-8?q?=E4=BD=BF=E7=94=A8SenseChat-5=E6=B5=8B?= =?UTF-8?q?=E8=AF=95=E6=88=90=E5=8A=9F?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- README.md | 2 +- mindsearch/agent/models.py | 18 ++++++++++++++++-- mindsearch/app.py | 2 +- requirements.txt | 2 +- 4 files changed, 19 insertions(+), 5 deletions(-) diff --git a/README.md b/README.md index 9e7f68f..e0012bd 100644 --- a/README.md +++ b/README.md @@ -53,7 +53,7 @@ pip install -r requirements.txt Setup FastAPI Server. ```bash -python -m mindsearch.app --lang en --model_format internlm_server --search_engine DuckDuckGoSearch +python -m mindsearch.app --lang en --model_format sensenova --search_engine DuckDuckGoSearch ``` - `--lang`: language of the model, `en` for English and `cn` for Chinese. diff --git a/mindsearch/agent/models.py b/mindsearch/agent/models.py index 66d0fb1..4b0f1c3 100644 --- a/mindsearch/agent/models.py +++ b/mindsearch/agent/models.py @@ -1,6 +1,6 @@ import os -from lagent.llms import (GPTAPI, INTERNLM2_META, HFTransformerCasualLM, +from lagent.llms import (SENSENOVA_API, GPTAPI, INTERNLM2_META, HFTransformerCasualLM, LMDeployClient, LMDeployServer) internlm_server = dict(type=LMDeployServer, @@ -39,7 +39,21 @@ model_type='gpt-4o-mini', key=os.environ.get('OPENAI_API_KEY', 'YOUR OPENAI API KEY'), openai_api_base=os.environ.get('OPENAI_API_BASE', 'https://api.openai.com/v1/chat/completions'), - max_new_tokens=16384, + max_new_tokens=24576, + ) + +# First, apply for SenseNova's ak and sk from SenseTime staff +# Then, generated SENSENOVA_API_KEY using lagent.utils.gen_key.auto_gen_jwt_token(ak, sk) here +# https://github.com/InternLM/lagent/blob/ffc4ca71b4bcdbfb3a69bc0dccfa2dcc584a474d/lagent/utils/gen_key.py#L23 + +# If you want to switch to the locally deployed SenseNova model, you need to add the model name and context-window-length here +# https://github.com/winer632/lagent/blob/a5284a9af4c373a3ac666c51d6cef6de1e1de509/lagent/llms/sensenova.py#L21 +# You also need to change the SENSENOVA_API_BASE environment variable to the API address of the local inference framework +sensenova = dict(type=SENSENOVA_API, + model_type='SenseChat-5', + key=os.environ.get('SENSENOVA_API_KEY', 'YOUR SENSENOVA API KEY'), + sensenova_api_base=os.environ.get('SENSENOVA_API_BASE', 'https://api.sensenova.cn/v1/llm/chat-completions'), + max_new_tokens=24576, ) url = 'https://dashscope.aliyuncs.com/api/v1/services/aigc/text-generation/generation' diff --git a/mindsearch/app.py b/mindsearch/app.py index 00ed1d4..c279af1 100644 --- a/mindsearch/app.py +++ b/mindsearch/app.py @@ -23,7 +23,7 @@ def parse_arguments(): default='internlm_server', type=str, help='Model format') - parse.add_argument('--search_engine', + parser.add_argument('--search_engine', default='DuckDuckGoSearch', type=str, help='Search engine') diff --git a/requirements.txt b/requirements.txt index 0fb11c5..d7da184 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,7 +1,7 @@ duckduckgo_search==5.3.1b1 einops fastapi -git+https://github.com/InternLM/lagent.git +xlagent==0.2.1 gradio janus git+https://github.com/InternLM/lmdeploy.git