-
Notifications
You must be signed in to change notification settings - Fork 10
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
- Loading branch information
Showing
16 changed files
with
1,322 additions
and
2 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,144 @@ | ||
/* | ||
Copyright (C) 2022-2024 Traefik Labs | ||
This program is free software: you can redistribute it and/or modify | ||
it under the terms of the GNU Affero General Public License as published | ||
by the Free Software Foundation, either version 3 of the License, or | ||
(at your option) any later version. | ||
This program is distributed in the hope that it will be useful, | ||
but WITHOUT ANY WARRANTY; without even the implied warranty of | ||
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | ||
GNU Affero General Public License for more details. | ||
You should have received a copy of the GNU Affero General Public License | ||
along with this program. If not, see <https://www.gnu.org/licenses/>. | ||
*/ | ||
|
||
package v1alpha1 | ||
|
||
import metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" | ||
|
||
// +genclient | ||
// +k8s:deepcopy-gen:interfaces=k8s.io/apimachinery/pkg/runtime.Object | ||
|
||
// AIService is a Kubernetes-like Service to interact with a text-based LLM provider. It defines the parameters and credentials required to interact with various LLM providers. | ||
type AIService struct { | ||
metav1.TypeMeta `json:",inline"` | ||
// +optional | ||
metav1.ObjectMeta `json:"metadata,omitempty"` | ||
|
||
// The desired behavior of this AIService. | ||
Spec AIServiceSpec `json:"spec,omitempty"` | ||
} | ||
|
||
// +k8s:deepcopy-gen=true | ||
|
||
// AIServiceSpec describes the LLM service provider. | ||
type AIServiceSpec struct { | ||
Anthropic *Anthropic `json:"anthropic,omitempty"` | ||
AzureOpenAI *AzureOpenAI `json:"azureOpenai,omitempty"` | ||
Bedrock *Bedrock `json:"bedrock,omitempty"` | ||
Cohere *Cohere `json:"cohere,omitempty"` | ||
Gemini *Gemini `json:"gemini,omitempty"` | ||
Mistral *Mistral `json:"mistral,omitempty"` | ||
Ollama *Ollama `json:"ollama,omitempty"` | ||
OpenAI *OpenAI `json:"openai,omitempty"` | ||
} | ||
|
||
// +k8s:deepcopy-gen=true | ||
|
||
// Anthropic configures Anthropic backend. | ||
type Anthropic struct { | ||
Token string `json:"token"` | ||
Model string `json:"model,omitempty"` | ||
Params *Params `json:"params,omitempty"` | ||
} | ||
|
||
// +k8s:deepcopy-gen=true | ||
|
||
// AzureOpenAI configures AzureOpenAI. | ||
type AzureOpenAI struct { | ||
APIKey string `json:"apiKey"` | ||
Model string `json:"model,omitempty"` | ||
DeploymentName string `json:"deploymentName"` | ||
BaseURL string `json:"baseUrl"` | ||
Params *Params `json:"params,omitempty"` | ||
} | ||
|
||
// +k8s:deepcopy-gen=true | ||
|
||
// Bedrock configures Bedrock backend. | ||
type Bedrock struct { | ||
Model string `json:"model,omitempty"` | ||
Region string `json:"region,omitempty"` | ||
SystemMessage bool `json:"systemMessage,string,omitempty"` | ||
Params *Params `json:"params,omitempty"` | ||
} | ||
|
||
// +k8s:deepcopy-gen=true | ||
|
||
// Cohere configures Cohere backend. | ||
type Cohere struct { | ||
Token string `json:"token"` | ||
Model string `json:"model,omitempty"` | ||
Params *Params `json:"params,omitempty"` | ||
} | ||
|
||
// +k8s:deepcopy-gen=true | ||
|
||
// Gemini configures Gemini backend. | ||
type Gemini struct { | ||
APIKey string `json:"apiKey"` | ||
Model string `json:"model,omitempty"` | ||
Params *Params `json:"params,omitempty"` | ||
} | ||
|
||
// +k8s:deepcopy-gen=true | ||
|
||
// Mistral configures Mistral AI backend. | ||
type Mistral struct { | ||
APIKey string `json:"apiKey"` | ||
Model string `json:"model,omitempty"` | ||
Params *Params `json:"params,omitempty"` | ||
} | ||
|
||
// +k8s:deepcopy-gen=true | ||
|
||
// Ollama configures Ollama backend. | ||
type Ollama struct { | ||
Model string `json:"model,omitempty"` | ||
BaseURL string `json:"baseUrl"` | ||
Params *Params `json:"params,omitempty"` | ||
} | ||
|
||
// +k8s:deepcopy-gen=true | ||
|
||
// OpenAI configures OpenAI. | ||
type OpenAI struct { | ||
Token string `json:"token"` | ||
Model string `json:"model,omitempty"` | ||
Params *Params `json:"params,omitempty"` | ||
} | ||
|
||
// +k8s:deepcopy-gen=true | ||
|
||
// Params holds the LLM hyperparameters. | ||
type Params struct { | ||
Temperature float32 `json:"temperature,omitempty"` | ||
TopP float32 `json:"topP,omitempty"` | ||
MaxTokens int `json:"maxTokens,omitempty"` | ||
FrequencyPenalty float32 `json:"frequencyPenalty,omitempty"` | ||
PresencePenalty float32 `json:"presencePenalty,omitempty"` | ||
} | ||
|
||
// +k8s:deepcopy-gen:interfaces=k8s.io/apimachinery/pkg/runtime.Object | ||
|
||
// AIServiceList defines a list of AIService. | ||
type AIServiceList struct { | ||
metav1.TypeMeta `json:",inline"` | ||
// +optional | ||
metav1.ListMeta `json:"metadata,omitempty"` | ||
|
||
Items []AIService `json:"items"` | ||
} |
245 changes: 245 additions & 0 deletions
245
pkg/apis/hub/v1alpha1/crd/hub.traefik.io_aiservices.yaml
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,245 @@ | ||
--- | ||
apiVersion: apiextensions.k8s.io/v1 | ||
kind: CustomResourceDefinition | ||
metadata: | ||
annotations: | ||
controller-gen.kubebuilder.io/version: v0.14.0 | ||
name: aiservices.hub.traefik.io | ||
spec: | ||
group: hub.traefik.io | ||
names: | ||
kind: AIService | ||
listKind: AIServiceList | ||
plural: aiservices | ||
singular: aiservice | ||
scope: Namespaced | ||
versions: | ||
- name: v1alpha1 | ||
schema: | ||
openAPIV3Schema: | ||
description: AIService is a Kubernetes-like Service to interact with a text-based | ||
LLM provider. It defines the parameters and credentials required to interact | ||
with various LLM providers. | ||
properties: | ||
apiVersion: | ||
description: |- | ||
APIVersion defines the versioned schema of this representation of an object. | ||
Servers should convert recognized schemas to the latest internal value, and | ||
may reject unrecognized values. | ||
More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#resources | ||
type: string | ||
kind: | ||
description: |- | ||
Kind is a string value representing the REST resource this object represents. | ||
Servers may infer this from the endpoint the client submits requests to. | ||
Cannot be updated. | ||
In CamelCase. | ||
More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#types-kinds | ||
type: string | ||
metadata: | ||
type: object | ||
spec: | ||
description: The desired behavior of this AIService. | ||
properties: | ||
anthropic: | ||
description: Anthropic configures Anthropic backend. | ||
properties: | ||
model: | ||
type: string | ||
params: | ||
description: Params holds the LLM hyperparameters. | ||
properties: | ||
frequencyPenalty: | ||
type: number | ||
maxTokens: | ||
type: integer | ||
presencePenalty: | ||
type: number | ||
temperature: | ||
type: number | ||
topP: | ||
type: number | ||
type: object | ||
token: | ||
type: string | ||
required: | ||
- token | ||
type: object | ||
azureOpenai: | ||
description: AzureOpenAI configures AzureOpenAI. | ||
properties: | ||
apiKey: | ||
type: string | ||
baseUrl: | ||
type: string | ||
deploymentName: | ||
type: string | ||
model: | ||
type: string | ||
params: | ||
description: Params holds the LLM hyperparameters. | ||
properties: | ||
frequencyPenalty: | ||
type: number | ||
maxTokens: | ||
type: integer | ||
presencePenalty: | ||
type: number | ||
temperature: | ||
type: number | ||
topP: | ||
type: number | ||
type: object | ||
required: | ||
- apiKey | ||
- baseUrl | ||
- deploymentName | ||
type: object | ||
bedrock: | ||
description: Bedrock configures Bedrock backend. | ||
properties: | ||
model: | ||
type: string | ||
params: | ||
description: Params holds the LLM hyperparameters. | ||
properties: | ||
frequencyPenalty: | ||
type: number | ||
maxTokens: | ||
type: integer | ||
presencePenalty: | ||
type: number | ||
temperature: | ||
type: number | ||
topP: | ||
type: number | ||
type: object | ||
region: | ||
type: string | ||
systemMessage: | ||
type: boolean | ||
type: object | ||
cohere: | ||
description: Cohere configures Cohere backend. | ||
properties: | ||
model: | ||
type: string | ||
params: | ||
description: Params holds the LLM hyperparameters. | ||
properties: | ||
frequencyPenalty: | ||
type: number | ||
maxTokens: | ||
type: integer | ||
presencePenalty: | ||
type: number | ||
temperature: | ||
type: number | ||
topP: | ||
type: number | ||
type: object | ||
token: | ||
type: string | ||
required: | ||
- token | ||
type: object | ||
gemini: | ||
description: Gemini configures Gemini backend. | ||
properties: | ||
apiKey: | ||
type: string | ||
model: | ||
type: string | ||
params: | ||
description: Params holds the LLM hyperparameters. | ||
properties: | ||
frequencyPenalty: | ||
type: number | ||
maxTokens: | ||
type: integer | ||
presencePenalty: | ||
type: number | ||
temperature: | ||
type: number | ||
topP: | ||
type: number | ||
type: object | ||
required: | ||
- apiKey | ||
type: object | ||
mistral: | ||
description: Mistral configures Mistral AI backend. | ||
properties: | ||
apiKey: | ||
type: string | ||
model: | ||
type: string | ||
params: | ||
description: Params holds the LLM hyperparameters. | ||
properties: | ||
frequencyPenalty: | ||
type: number | ||
maxTokens: | ||
type: integer | ||
presencePenalty: | ||
type: number | ||
temperature: | ||
type: number | ||
topP: | ||
type: number | ||
type: object | ||
required: | ||
- apiKey | ||
type: object | ||
ollama: | ||
description: Ollama configures Ollama backend. | ||
properties: | ||
baseUrl: | ||
type: string | ||
model: | ||
type: string | ||
params: | ||
description: Params holds the LLM hyperparameters. | ||
properties: | ||
frequencyPenalty: | ||
type: number | ||
maxTokens: | ||
type: integer | ||
presencePenalty: | ||
type: number | ||
temperature: | ||
type: number | ||
topP: | ||
type: number | ||
type: object | ||
required: | ||
- baseUrl | ||
type: object | ||
openai: | ||
description: OpenAI configures OpenAI. | ||
properties: | ||
model: | ||
type: string | ||
params: | ||
description: Params holds the LLM hyperparameters. | ||
properties: | ||
frequencyPenalty: | ||
type: number | ||
maxTokens: | ||
type: integer | ||
presencePenalty: | ||
type: number | ||
temperature: | ||
type: number | ||
topP: | ||
type: number | ||
type: object | ||
token: | ||
type: string | ||
required: | ||
- token | ||
type: object | ||
type: object | ||
type: object | ||
served: true | ||
storage: true |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Oops, something went wrong.