Skip to content

Commit c8a0cea

Browse files
committed
fix: correctly propagate error during model load
Signed-off-by: Ettore Di Giacinto <mudler@localai.io>
1 parent b348a99 commit c8a0cea

File tree

1 file changed

+2
-3
lines changed

1 file changed

+2
-3
lines changed

pkg/model/initializers.go

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -173,9 +173,8 @@ func (ml *ModelLoader) backendLoader(opts ...Option) (client grpc.Backend, err e
173173

174174
model, err := ml.LoadModel(o.modelID, o.model, ml.grpcModel(backend, o))
175175
if err != nil {
176-
err := ml.StopGRPC(only(o.modelID))
177-
if err != nil {
178-
log.Error().Err(err).Str("model", o.modelID).Msg("error stopping model")
176+
if stopErr := ml.StopGRPC(only(o.modelID));stopErr != nil {
177+
log.Error().Err(stopErr).Str("model", o.modelID).Msg("error stopping model")
179178
}
180179
log.Error().Str("modelID", o.modelID).Err(err).Msgf("Failed to load model %s with backend %s", o.modelID, o.backendString)
181180
return nil, err

0 commit comments

Comments
 (0)