inference: Update telemetry
Signed-off-by: Dorin Geman <dorin.geman@docker.com>
This commit is contained in:
parent
7a93a6e3db
commit
3201fb5049
|
|
@ -22,9 +22,9 @@ func trimRequestPathToOpenAIRoot(path string) string {
|
|||
return path[index:]
|
||||
}
|
||||
|
||||
// openAIInferenceRequest is used to extract the model specification from either
|
||||
// OpenAIInferenceRequest is used to extract the model specification from either
|
||||
// a chat completion or embedding request in the OpenAI API.
|
||||
type openAIInferenceRequest struct {
|
||||
type OpenAIInferenceRequest struct {
|
||||
// Model is the requested model name.
|
||||
Model string `json:"model"`
|
||||
}
|
||||
|
|
|
|||
|
|
@ -130,7 +130,7 @@ func (s *Scheduler) handleOpenAIInference(w http.ResponseWriter, r *http.Request
|
|||
}
|
||||
|
||||
// Decode the model specification portion of the request body.
|
||||
var request openAIInferenceRequest
|
||||
var request OpenAIInferenceRequest
|
||||
if err := json.Unmarshal(body, &request); err != nil {
|
||||
http.Error(w, "invalid request", http.StatusBadRequest)
|
||||
return
|
||||
|
|
|
|||
Loading…
Reference in New Issue