forked from continuedev/continue
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Together.ts
52 lines (45 loc) · 1.67 KB
/
Together.ts
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
import {
ChatMessage,
CompletionOptions,
LLMOptions,
ModelProvider,
} from "../../index.js";
import OpenAI from "./OpenAI.js";
class Together extends OpenAI {
static providerName: ModelProvider = "together";
static defaultOptions: Partial<LLMOptions> = {
apiBase: "https://api.together.xyz/v1/",
};
private static MODEL_IDS: { [name: string]: string } = {
"codellama-7b": "togethercomputer/CodeLlama-7b-Instruct",
"codellama-13b": "togethercomputer/CodeLlama-13b-Instruct",
"codellama-34b": "togethercomputer/CodeLlama-34b-Instruct",
"codellama-70b": "codellama/CodeLlama-70b-Instruct-hf",
"llama3-8b": "meta-llama/Llama-3-8b-chat-hf",
"llama3-70b": "meta-llama/Llama-3-70b-chat-hf",
"llama2-7b": "togethercomputer/llama-2-7b-chat",
"llama2-13b": "togethercomputer/llama-2-13b-chat",
"llama2-70b": "togethercomputer/llama-2-70b-chat",
"mistral-7b": "mistralai/Mistral-7B-Instruct-v0.1",
"mistral-8x7b": "mistralai/Mixtral-8x7B-Instruct-v0.1",
"phind-codellama-34b": "Phind/Phind-CodeLlama-34B-v2",
"wizardcoder-34b": "WizardLM/WizardCoder-Python-34B-V1.0",
};
private _getModelName(model: string) {
return Together.MODEL_IDS[model] || this.model;
}
protected _convertArgs(options: any, messages: ChatMessage[]) {
const finalOptions = super._convertArgs(options, messages);
finalOptions.model = this._getModelName(options.model);
return finalOptions;
}
protected async *_streamComplete(
prompt: string,
options: CompletionOptions,
): AsyncGenerator<string> {
for await (const chunk of this._legacystreamComplete(prompt, options)) {
yield chunk;
}
}
}
export default Together;