Skip to content

Commit

Permalink
Deprecate max tokens and system command (#121)
Browse files Browse the repository at this point in the history
* replace role in getMessagesFromEditor from system to developer

* replace max_tokens with max_completion_tokens  in OpenAIStreamPayload and methods using it

---------

Co-authored-by: Mehran Shahbaz <[email protected]>
  • Loading branch information
DenizOkcu and mehranShG authored Jan 14, 2025
1 parent 6700773 commit 42c3605
Show file tree
Hide file tree
Showing 2 changed files with 4 additions and 4 deletions.
2 changes: 1 addition & 1 deletion src/Services/EditorService.ts
Original file line number Diff line number Diff line change
Expand Up @@ -201,7 +201,7 @@ export class EditorService {
messagesWithRole.unshift(
...systemCommands.map((command: string) => {
return {
role: "system",
role: "developer",
content: command,
};
})
Expand Down
6 changes: 3 additions & 3 deletions src/Services/OpenAIService.ts
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ export interface OpenAIStreamPayload {
frequency_penalty: number;
stop: string[] | null;
n: number;
max_tokens: number;
max_completion_tokens: number;
stream: boolean;
}
export interface OpenAIConfig {
Expand Down Expand Up @@ -88,7 +88,7 @@ export class OpenAIService {
{
model: config.model,
messages: messages,
max_tokens: config.max_tokens,
max_completion_tokens: config.max_tokens,
temperature: config.temperature,
top_p: config.top_p,
presence_penalty: config.presence_penalty,
Expand Down Expand Up @@ -135,7 +135,7 @@ export class OpenAIService {
body: JSON.stringify({
model: config.model,
messages: messages,
max_tokens: config.max_tokens,
max_completion_tokens: config.max_tokens,
temperature: config.temperature,
top_p: config.top_p,
presence_penalty: config.presence_penalty,
Expand Down

0 comments on commit 42c3605

Please sign in to comment.