Removes repeated types
This commit is contained in:
parent
2b22896c95
commit
e64f4a9343
|
|
@ -1,5 +1,7 @@
|
|||
package models
|
||||
|
||||
import "github.com/docker/model-distribution/pkg/types"
|
||||
|
||||
// ModelCreateRequest represents a model create request. It is designed to
|
||||
// follow Docker Engine API conventions, most closely following the request
|
||||
// associated with POST /images/create. At the moment is only designed to
|
||||
|
|
@ -10,22 +12,8 @@ type ModelCreateRequest struct {
|
|||
From string `json:"from"`
|
||||
}
|
||||
|
||||
// Model represents a locally stored model. It is designed to follow Docker
|
||||
// Engine API conventions, most closely following the image model, though the
|
||||
// casing and typing of its fields have been made more idiomatic.
|
||||
type Model struct {
|
||||
// ID is the globally unique model identifier.
|
||||
ID string `json:"id"`
|
||||
// Tags are the list of tags associated with the model.
|
||||
Tags []string `json:"tags"`
|
||||
// Files are the GGUF files associated with the model.
|
||||
Files []string `json:"files"`
|
||||
// Created is the Unix epoch timestamp corresponding to the model creation.
|
||||
Created int64 `json:"created"`
|
||||
}
|
||||
|
||||
// Model converts the model to its OpenAI API representation.
|
||||
func (m *Model) toOpenAI() *OpenAIModel {
|
||||
// ToOpenAI converts a types.Model to its OpenAI API representation.
|
||||
func ToOpenAI(m *types.Model) *OpenAIModel {
|
||||
return &OpenAIModel{
|
||||
ID: m.Tags[0],
|
||||
Object: "model",
|
||||
|
|
@ -35,15 +23,15 @@ func (m *Model) toOpenAI() *OpenAIModel {
|
|||
}
|
||||
|
||||
// ModelList represents a list of models.
|
||||
type ModelList []*Model
|
||||
type ModelList []*types.Model
|
||||
|
||||
// Model converts the model to its OpenAI API representation. This method never
|
||||
// ToOpenAI converts the model list to its OpenAI API representation. This function never
|
||||
// returns a nil slice (though it may return an empty slice).
|
||||
func (l ModelList) toOpenAI() *OpenAIModelList {
|
||||
// Convert the constituent models.
|
||||
models := make([]*OpenAIModel, len(l))
|
||||
for m, model := range l {
|
||||
models[m] = model.toOpenAI()
|
||||
models[m] = ToOpenAI(model)
|
||||
}
|
||||
|
||||
// Create the OpenAI model list.
|
||||
|
|
|
|||
|
|
@ -1,10 +0,0 @@
|
|||
package models
|
||||
|
||||
import (
|
||||
"errors"
|
||||
)
|
||||
|
||||
// ErrModelNotFound is a sentinel error returned by Manager.GetModel if the
|
||||
// model could not be located. If returned in conjunction with an HTTP
|
||||
// request, it should be paired with a 404 response status.
|
||||
var ErrModelNotFound = errors.New("model not found")
|
||||
|
|
@ -8,6 +8,8 @@ import (
|
|||
"net/http"
|
||||
"strings"
|
||||
|
||||
"github.com/docker/model-distribution/pkg/types"
|
||||
|
||||
"github.com/docker/model-runner/pkg/paths"
|
||||
|
||||
"github.com/docker/model-distribution/pkg/distribution"
|
||||
|
|
@ -130,7 +132,7 @@ func (m *Manager) handleGetModel(w http.ResponseWriter, r *http.Request) {
|
|||
// Query the model.
|
||||
model, err := m.GetModel(r.PathValue("namespace") + "/" + r.PathValue("name"))
|
||||
if err != nil {
|
||||
if errors.Is(err, ErrModelNotFound) || errors.Is(err, distribution.ErrModelNotFound) { // TODO we should fix different types
|
||||
if errors.Is(err, distribution.ErrModelNotFound) {
|
||||
http.Error(w, err.Error(), http.StatusNotFound)
|
||||
} else {
|
||||
http.Error(w, err.Error(), http.StatusInternalServerError)
|
||||
|
|
@ -202,7 +204,7 @@ func (m *Manager) handleOpenAIGetModel(w http.ResponseWriter, r *http.Request) {
|
|||
// Query the model.
|
||||
model, err := m.GetModel(r.PathValue("namespace") + "/" + r.PathValue("name"))
|
||||
if err != nil {
|
||||
if errors.Is(err, ErrModelNotFound) {
|
||||
if errors.Is(err, distribution.ErrModelNotFound) {
|
||||
http.Error(w, err.Error(), http.StatusNotFound)
|
||||
} else {
|
||||
http.Error(w, err.Error(), http.StatusInternalServerError)
|
||||
|
|
@ -212,7 +214,7 @@ func (m *Manager) handleOpenAIGetModel(w http.ResponseWriter, r *http.Request) {
|
|||
|
||||
// Write the response.
|
||||
w.Header().Set("Content-Type", "application/json")
|
||||
if err := json.NewEncoder(w).Encode(model.toOpenAI()); err != nil {
|
||||
if err := json.NewEncoder(w).Encode(ToOpenAI(model)); err != nil {
|
||||
m.log.Warnln("Error while encoding OpenAI model response:", err)
|
||||
}
|
||||
}
|
||||
|
|
@ -236,29 +238,15 @@ func (m *Manager) getModels() (ModelList, error) {
|
|||
|
||||
// Convert distribution models to our model format
|
||||
for _, current := range available {
|
||||
models = append(models, &Model{
|
||||
ID: current.ID,
|
||||
Tags: current.Tags,
|
||||
Files: current.Files,
|
||||
Created: current.Created,
|
||||
})
|
||||
models = append(models, current)
|
||||
}
|
||||
|
||||
return models, nil
|
||||
}
|
||||
|
||||
// GetModel returns a single model.
|
||||
func (m *Manager) GetModel(ref string) (*Model, error) {
|
||||
model, err := m.distributionClient.GetModel(ref)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return &Model{
|
||||
ID: model.ID,
|
||||
Tags: model.Tags,
|
||||
Files: model.Files,
|
||||
Created: model.Created,
|
||||
}, nil
|
||||
func (m *Manager) GetModel(ref string) (*types.Model, error) {
|
||||
return m.distributionClient.GetModel(ref)
|
||||
}
|
||||
|
||||
// GetModelPath returns the path to a model's files.
|
||||
|
|
|
|||
Loading…
Reference in New Issue