Skip to content
This repository was archived by the owner on Oct 6, 2025. It is now read-only.
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion commands/completion/functions.go
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ func ModelNames(desktopClient func() *desktop.Client, limit int) cobra.Completio
if limit > 0 && len(args) >= limit {
return nil, cobra.ShellCompDirectiveNoFileComp
}
models, err := desktopClient().List()
models, err := desktopClient().List(cmd.Context())
if err != nil {
return nil, cobra.ShellCompDirectiveError
}
Expand Down
11 changes: 6 additions & 5 deletions commands/compose.go
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
package commands

import (
"context"
"encoding/json"
"errors"
"fmt"
Expand Down Expand Up @@ -57,7 +58,7 @@ func newUpCommand() *cobra.Command {
return errors.New("unable to determine standalone runner endpoint")
}

if err := downloadModelsOnlyIfNotFound(desktopClient, models); err != nil {
if err := downloadModelsOnlyIfNotFound(cmd.Context(), desktopClient, models); err != nil {
return err
}

Expand All @@ -69,7 +70,7 @@ func newUpCommand() *cobra.Command {
}

for _, model := range models {
if err := desktopClient.ConfigureBackend(scheduling.ConfigureRequest{
if err := desktopClient.ConfigureBackend(cmd.Context(), scheduling.ConfigureRequest{
Model: model,
ContextSize: ctxSize,
RawRuntimeFlags: rawRuntimeFlags,
Expand Down Expand Up @@ -137,8 +138,8 @@ func newMetadataCommand(upCmd, downCmd *cobra.Command) *cobra.Command {
return c
}

func downloadModelsOnlyIfNotFound(desktopClient *desktop.Client, models []string) error {
modelsDownloaded, err := desktopClient.List()
func downloadModelsOnlyIfNotFound(ctx context.Context, desktopClient *desktop.Client, models []string) error {
modelsDownloaded, err := desktopClient.List(ctx)
if err != nil {
_ = sendErrorf("Failed to get models list: %v", err)
return err
Expand All @@ -156,7 +157,7 @@ func downloadModelsOnlyIfNotFound(desktopClient *desktop.Client, models []string
}
return false
}) {
_, _, err = desktopClient.Pull(model, false, func(s string) {
_, _, err = desktopClient.Pull(ctx, model, false, func(s string) {
_ = sendInfo(s)
})
if err != nil {
Expand Down
2 changes: 1 addition & 1 deletion commands/configure.go
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,7 @@ func newConfigureCmd() *cobra.Command {
return nil
},
RunE: func(cmd *cobra.Command, args []string) error {
return desktopClient.ConfigureBackend(opts)
return desktopClient.ConfigureBackend(cmd.Context(), opts)
},
ValidArgsFunction: completion.ModelNames(getDesktopClient, -1),
}
Expand Down
2 changes: 1 addition & 1 deletion commands/df.go
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ func newDFCmd() *cobra.Command {
Use: "df",
Short: "Show Docker Model Runner disk usage",
RunE: func(cmd *cobra.Command, args []string) error {
df, err := desktopClient.DF()
df, err := desktopClient.DF(cmd.Context())
if err != nil {
err = handleClientError(err, "Failed to list running models")
return handleNotRunningError(err)
Expand Down
9 changes: 5 additions & 4 deletions commands/inspect.go
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
package commands

import (
"context"
"fmt"

"github.com/docker/model-cli/commands/completion"
Expand Down Expand Up @@ -32,7 +33,7 @@ func newInspectCmd() *cobra.Command {
if openai && remote {
return fmt.Errorf("--remote flag cannot be used with --openai flag")
}
inspectedModel, err := inspectModel(args, openai, remote, desktopClient)
inspectedModel, err := inspectModel(cmd.Context(), args, openai, remote, desktopClient)
if err != nil {
return err
}
Expand All @@ -46,17 +47,17 @@ func newInspectCmd() *cobra.Command {
return c
}

func inspectModel(args []string, openai bool, remote bool, desktopClient *desktop.Client) (string, error) {
func inspectModel(ctx context.Context, args []string, openai bool, remote bool, desktopClient *desktop.Client) (string, error) {
modelName := args[0]
if openai {
model, err := desktopClient.InspectOpenAI(modelName)
model, err := desktopClient.InspectOpenAI(ctx, modelName)
if err != nil {
err = handleClientError(err, "Failed to get model "+modelName)
return "", handleNotRunningError(err)
}
return formatter.ToStandardJSON(model)
}
model, err := desktopClient.Inspect(modelName, remote)
model, err := desktopClient.Inspect(ctx, modelName, remote)
if err != nil {
err = handleClientError(err, "Failed to get model "+modelName)
return "", handleNotRunningError(err)
Expand Down
5 changes: 3 additions & 2 deletions commands/install-runner.go
Original file line number Diff line number Diff line change
Expand Up @@ -4,10 +4,11 @@ import (
"context"
"errors"
"fmt"
"github.com/docker/model-cli/pkg/types"
"os"
"time"

"github.com/docker/model-cli/pkg/types"

"github.com/docker/docker/api/types/container"
"github.com/docker/model-cli/commands/completion"
"github.com/docker/model-cli/desktop"
Expand All @@ -32,7 +33,7 @@ const (
// version can take several seconds.
func waitForStandaloneRunnerAfterInstall(ctx context.Context) error {
for tries := installWaitTries; tries > 0; tries-- {
if status := desktopClient.Status(); status.Error == nil && status.Running {
if status := desktopClient.Status(ctx); status.Error == nil && status.Running {
return nil
}
select {
Expand Down
9 changes: 5 additions & 4 deletions commands/list.go
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@ package commands

import (
"bytes"
"context"
"fmt"
"os"
"time"
Expand Down Expand Up @@ -50,7 +51,7 @@ func newListCmd() *cobra.Command {
if _, err := ensureStandaloneRunnerAvailable(cmd.Context(), standaloneInstallPrinter); err != nil {
return fmt.Errorf("unable to initialize standalone model runner: %w", err)
}
models, err := listModels(openai, backend, desktopClient, quiet, jsonFormat, apiKey)
models, err := listModels(cmd.Context(), openai, backend, desktopClient, quiet, jsonFormat, apiKey)
if err != nil {
return err
}
Expand All @@ -67,16 +68,16 @@ func newListCmd() *cobra.Command {
return c
}

func listModels(openai bool, backend string, desktopClient *desktop.Client, quiet bool, jsonFormat bool, apiKey string) (string, error) {
func listModels(ctx context.Context, openai bool, backend string, desktopClient *desktop.Client, quiet bool, jsonFormat bool, apiKey string) (string, error) {
if openai || backend == "openai" {
models, err := desktopClient.ListOpenAI(backend, apiKey)
models, err := desktopClient.ListOpenAI(ctx, backend, apiKey)
if err != nil {
err = handleClientError(err, "Failed to list models")
return "", handleNotRunningError(err)
}
return formatter.ToStandardJSON(models)
}
models, err := desktopClient.List()
models, err := desktopClient.List(ctx)
if err != nil {
err = handleClientError(err, "Failed to list models")
return "", handleNotRunningError(err)
Expand Down
2 changes: 1 addition & 1 deletion commands/package.go
Original file line number Diff line number Diff line change
Expand Up @@ -221,7 +221,7 @@ func (t *modelRunnerTarget) Write(ctx context.Context, mdl types.ModelArtifact,
return fmt.Errorf("get model ID: %w", err)
}
if t.tag.String() != "" {
if err := desktopClient.Tag(id, parseRepo(t.tag), t.tag.TagStr()); err != nil {
if err := desktopClient.Tag(ctx, id, parseRepo(t.tag), t.tag.TagStr()); err != nil {
return fmt.Errorf("tag model: %w", err)
}
}
Expand Down
2 changes: 1 addition & 1 deletion commands/ps.go
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ func newPSCmd() *cobra.Command {
Use: "ps",
Short: "List running models",
RunE: func(cmd *cobra.Command, args []string) error {
ps, err := desktopClient.PS()
ps, err := desktopClient.PS(cmd.Context())
if err != nil {
err = handleClientError(err, "Failed to list running models")
return handleNotRunningError(err)
Expand Down
2 changes: 1 addition & 1 deletion commands/pull.go
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,7 @@ func pullModel(cmd *cobra.Command, desktopClient *desktop.Client, model string,
} else {
progress = RawProgress
}
response, progressShown, err := desktopClient.Pull(model, ignoreRuntimeMemoryCheck, progress)
response, progressShown, err := desktopClient.Pull(cmd.Context(), model, ignoreRuntimeMemoryCheck, progress)

// Add a newline before any output (success or error) if progress was shown.
if progressShown {
Expand Down
2 changes: 1 addition & 1 deletion commands/push.go
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@ func newPushCmd() *cobra.Command {
}

func pushModel(cmd *cobra.Command, desktopClient *desktop.Client, model string) error {
response, progressShown, err := desktopClient.Push(model, TUIProgress)
response, progressShown, err := desktopClient.Push(cmd.Context(), model, TUIProgress)

// Add a newline before any output (success or error) if progress was shown.
if progressShown {
Expand Down
2 changes: 1 addition & 1 deletion commands/rm.go
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@ func newRemoveCmd() *cobra.Command {
if _, err := ensureStandaloneRunnerAvailable(cmd.Context(), cmd); err != nil {
return fmt.Errorf("unable to initialize standalone model runner: %w", err)
}
response, err := desktopClient.Remove(args, force)
response, err := desktopClient.Remove(cmd.Context(), args, force)
if response != "" {
cmd.Print(response)
}
Expand Down
25 changes: 21 additions & 4 deletions commands/run.go
Original file line number Diff line number Diff line change
Expand Up @@ -2,10 +2,12 @@ package commands

import (
"bufio"
"context"
"errors"
"fmt"
"io"
"os"
"os/signal"
"strings"

"github.com/docker/model-cli/commands/completion"
Expand Down Expand Up @@ -136,7 +138,7 @@ func newRunCmd() *cobra.Command {

// Do not validate the model in case of using OpenAI's backend, let OpenAI handle it
if backend != "openai" {
_, err := desktopClient.Inspect(model, false)
_, err := desktopClient.Inspect(cmd.Context(), model, false)
if err != nil {
if !errors.Is(err, desktop.ErrNotFound) {
return handleNotRunningError(handleClientError(err, "Failed to inspect model"))
Expand All @@ -149,7 +151,7 @@ func newRunCmd() *cobra.Command {
}

if prompt != "" {
if err := desktopClient.Chat(backend, model, prompt, apiKey); err != nil {
if err := desktopClient.Chat(cmd.Context(), backend, model, prompt, apiKey); err != nil {
return handleClientError(err, "Failed to generate a response")
}
cmd.Println()
Expand Down Expand Up @@ -178,8 +180,12 @@ func newRunCmd() *cobra.Command {
continue
}

if err := desktopClient.Chat(backend, model, userInput, apiKey); err != nil {
cmd.PrintErr(handleClientError(err, "Failed to generate a response"))
if err := cancellableChat(cmd.Context(), desktopClient, backend, model, userInput, apiKey); err != nil {
if errors.Is(err, context.Canceled) {
fmt.Println("\nChat cancelled - Press Ctrl-C again to exit.")
} else {
cmd.PrintErr(handleClientError(err, "Failed to generate a response"))
}
continue
}

Expand Down Expand Up @@ -208,3 +214,14 @@ func newRunCmd() *cobra.Command {

return c
}

// cancellableChat sends a chat request that can be cancelled with Ctrl-C, both on Unix and Windows.
func cancellableChat(ctx context.Context, desktopClient *desktop.Client, backend, model, userInput, apiKey string) error {
// Create a NotifyContext that will handle os.Interrupt by cancelling the chat request.
// Calling stop at the end restores the previous signal handling, allowing Ctrl-C to exit the program.
// On Windows, the mapping from CTRL_C_EVENT to os.Interrupt can be seen at
// https://github.com/golang/go/blob/13bb48e6fbc35419a28747688426eb3684242fbc/src/runtime/os_windows.go#L1029
chatContext, stop := signal.NotifyContext(ctx, os.Interrupt)
defer stop()
return desktopClient.Chat(chatContext, backend, model, userInput, apiKey)
}
5 changes: 3 additions & 2 deletions commands/status.go
Original file line number Diff line number Diff line change
Expand Up @@ -3,9 +3,10 @@ package commands
import (
"encoding/json"
"fmt"
"github.com/docker/model-cli/pkg/types"
"os"

"github.com/docker/model-cli/pkg/types"

"github.com/docker/cli/cli-plugins/hooks"
"github.com/docker/model-cli/commands/completion"
"github.com/docker/model-cli/desktop"
Expand All @@ -22,7 +23,7 @@ func newStatusCmd() *cobra.Command {
if err != nil {
return fmt.Errorf("unable to initialize standalone model runner: %w", err)
}
status := desktopClient.Status()
status := desktopClient.Status(cmd.Context())
if status.Error != nil {
return handleClientError(status.Error, "Failed to get Docker Model Runner status")
}
Expand Down
2 changes: 1 addition & 1 deletion commands/tag.go
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,7 @@ func tagModel(cmd *cobra.Command, desktopClient *desktop.Client, source, target
return fmt.Errorf("invalid tag: %w", err)
}
// Make tag request with model runner client
if err := desktopClient.Tag(source, parseRepo(tag), tag.TagStr()); err != nil {
if err := desktopClient.Tag(cmd.Context(), source, parseRepo(tag), tag.TagStr()); err != nil {
return fmt.Errorf("failed to tag model: %w", err)
}
cmd.Printf("Model %q tagged successfully with %q\n", source, target)
Expand Down
2 changes: 1 addition & 1 deletion commands/unload.go
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ func newUnloadCmd() *cobra.Command {
Use: "unload " + cmdArgs,
Short: "Unload running models",
RunE: func(cmd *cobra.Command, models []string) error {
unloadResp, err := desktopClient.Unload(desktop.UnloadRequest{All: all, Backend: backend, Models: models})
unloadResp, err := desktopClient.Unload(cmd.Context(), desktop.UnloadRequest{All: all, Backend: backend, Models: models})
if err != nil {
err = handleClientError(err, "Failed to unload models")
return handleNotRunningError(err)
Expand Down
Loading
Loading