From 79a8b3fbef18b0eddf58a966c68122e466928a77 Mon Sep 17 00:00:00 2001 From: Vignesh Date: Mon, 27 Jan 2025 15:39:20 -0500 Subject: [PATCH 1/9] first pass --- cli/app.go | 58 +++++++++++++++++++++++++++++++++++ cli/auth.go | 2 ++ cli/client.go | 22 +++++++------ cli/ml_inference.go | 75 +++++++++++++++++++++++++++++++++++++++++++++ 4 files changed, 147 insertions(+), 10 deletions(-) create mode 100644 cli/ml_inference.go diff --git a/cli/app.go b/cli/app.go index aa4590f1dbc..c87ae4ede8a 100644 --- a/cli/app.go +++ b/cli/app.go @@ -102,6 +102,13 @@ const ( mlTrainingFlagURL = "url" mlTrainingFlagArgs = "args" + inferenceFlagFileOrgID = "file-org-id" + inferenceFlagFileID = "file-id" + inferenceFlagFileLocationID = "file-location-id" + inferenceFlagModelID = "model-id" + inferenceFlagModelOrgID = "model-org-id" + inferenceFlagModelVersionID = "model-version" + dataFlagDataType = "data-type" dataFlagOrgIDs = "org-ids" dataFlagLocationIDs = "location-ids" @@ -2748,6 +2755,57 @@ This won't work unless you have an existing installation of our GitHub app on yo }, }, }, + { + Name: "inference", + Usage: "work with cloud hosted inference service", + UsageText: createUsageText("inference", nil, false, true), + HideHelpCommand: true, + Subcommands: []*cli.Command{ + { + Name: "infer", + Usage: "run inference on an image", + UsageText: createUsageText("inference infer", []string{generalFlagOrgID, inferenceFlagFileOrgID, inferenceFlagFileID, inferenceFlagFileLocationID, inferenceFlagModelID, inferenceFlagModelVersionID}, true, false), + Flags: []cli.Flag{ + &cli.StringFlag{ + Name: generalFlagOrgID, + Usage: "organization ID that is executing the inference job", + Required: true, + }, + &cli.StringFlag{ + Name: inferenceFlagFileOrgID, + Usage: "organization ID that owns the file to run inference on", + Required: true, + }, + &cli.StringFlag{ + Name: inferenceFlagFileID, + Usage: "file ID of the file to run inference on", + Required: true, + }, + &cli.StringFlag{ + Name: inferenceFlagFileLocationID, + Usage: "location ID of the file to run inference on", + Required: true, + }, + &cli.StringFlag{ + Name: inferenceFlagModelID, + Usage: "ID of the model to use to run inference", + Required: true, + }, + // &cli.StringFlag{ + // Name: inferenceFlagModelOrgID, + // Usage: "organization ID that owns the model to use to run inference", + // Required: true, + // }, + &cli.StringFlag{ + Name: inferenceFlagModelVersionID, + Usage: "version ID of the model to use to run inference", + Required: true, + }, + }, + Action: createCommandWithT[InferenceInferArgs](InferenceInferAction), + }, + }, + }, { Name: "version", Usage: "print version info for this program", diff --git a/cli/auth.go b/cli/auth.go index c35c04d7bfe..0119598ff44 100644 --- a/cli/auth.go +++ b/cli/auth.go @@ -21,6 +21,7 @@ import ( buildpb "go.viam.com/api/app/build/v1" datapb "go.viam.com/api/app/data/v1" datasetpb "go.viam.com/api/app/dataset/v1" + mlinferencepb "go.viam.com/api/app/mlinference/v1" mltrainingpb "go.viam.com/api/app/mltraining/v1" packagepb "go.viam.com/api/app/packages/v1" apppb "go.viam.com/api/app/v1" @@ -544,6 +545,7 @@ func (c *viamClient) ensureLoggedInInner() error { c.packageClient = packagepb.NewPackageServiceClient(conn) c.datasetClient = datasetpb.NewDatasetServiceClient(conn) c.mlTrainingClient = mltrainingpb.NewMLTrainingServiceClient(conn) + c.mlInferenceClient = mlinferencepb.NewMLInferenceServiceClient(conn) c.buildClient = buildpb.NewBuildServiceClient(conn) return nil diff --git a/cli/client.go b/cli/client.go index aae57c46267..8624e2ce3a4 100644 --- a/cli/client.go +++ b/cli/client.go @@ -30,6 +30,7 @@ import ( buildpb "go.viam.com/api/app/build/v1" datapb "go.viam.com/api/app/data/v1" datasetpb "go.viam.com/api/app/dataset/v1" + mlinferencepb "go.viam.com/api/app/mlinference/v1" mltrainingpb "go.viam.com/api/app/mltraining/v1" packagepb "go.viam.com/api/app/packages/v1" apppb "go.viam.com/api/app/v1" @@ -69,16 +70,17 @@ var errNoShellService = errors.New("shell service is not enabled on this machine // viamClient wraps a cli.Context and provides all the CLI command functionality // needed to talk to the app and data services but not directly to robot parts. type viamClient struct { - c *cli.Context - conf *Config - client apppb.AppServiceClient - dataClient datapb.DataServiceClient - packageClient packagepb.PackageServiceClient - datasetClient datasetpb.DatasetServiceClient - mlTrainingClient mltrainingpb.MLTrainingServiceClient - buildClient buildpb.BuildServiceClient - baseURL *url.URL - authFlow *authFlow + c *cli.Context + conf *Config + client apppb.AppServiceClient + dataClient datapb.DataServiceClient + packageClient packagepb.PackageServiceClient + datasetClient datasetpb.DatasetServiceClient + mlTrainingClient mltrainingpb.MLTrainingServiceClient + mlInferenceClient mlinferencepb.MLInferenceServiceClient + buildClient buildpb.BuildServiceClient + baseURL *url.URL + authFlow *authFlow selectedOrg *apppb.Organization selectedLoc *apppb.Location diff --git a/cli/ml_inference.go b/cli/ml_inference.go new file mode 100644 index 00000000000..b90fc00f575 --- /dev/null +++ b/cli/ml_inference.go @@ -0,0 +1,75 @@ +package cli + +import ( + "context" + "fmt" + + "github.com/pkg/errors" + "github.com/urfave/cli/v2" + v1 "go.viam.com/api/app/data/v1" + mlinferencepb "go.viam.com/api/app/mlinference/v1" +) + +// InferenceInferArgs holds the arguments for the inference action. +type InferenceInferArgs struct { + OrgID string + FileOrgID string + FileID string + FileLocationID string + ModelID string + // TODO: remove ModelOrgID + ModelOrgID string + ModelVersion string +} + +// InferenceInferAction is the corresponding action for 'inference infer'. +func InferenceInferAction(c *cli.Context, args InferenceInferArgs) error { + client, err := newViamClient(c) + if err != nil { + return err + } + + // Print the arguments + fmt.Println("OrgID: ", args.OrgID) + fmt.Println("FileOrgID: ", args.FileOrgID) + fmt.Println("FileID: ", args.FileID) + fmt.Println("FileLocationID: ", args.FileLocationID) + fmt.Println("ModelID: ", args.ModelID) + fmt.Println("ModelOrgID: ", args.ModelOrgID) + fmt.Println("ModelVersion: ", args.ModelVersion) + + inferenceJobID, err := client.runInference( + args.OrgID, args.FileOrgID, args.FileID, args.FileLocationID, + args.ModelID, args.ModelOrgID, args.ModelVersion) + if err != nil { + return err + } + printf(c.App.Writer, "Submitted inference job with ID %s", inferenceJobID) + return nil +} + +// runInference runs inference on an image with the specified parameters. +func (c *viamClient) runInference(orgID, fileOrgID, fileID, fileLocation, modelID, modelOrgID, modelVersion string) (*mlinferencepb.GetInferenceResponse, error) { + if err := c.ensureLoggedIn(); err != nil { + return nil, err + } + + req := &mlinferencepb.GetInferenceRequest{ + OrganizationId: orgID, + BinaryId: &v1.BinaryID{ + FileId: fileID, + OrganizationId: fileOrgID, + LocationId: fileLocation, + }, + RegistryItemId: modelID, + RegistryItemVersion: modelVersion, + } + + resp, err := c.mlInferenceClient.GetInference(context.Background(), req) + if err != nil { + return nil, errors.Wrapf(err, "received error from server") + } + fmt.Println("OutputTensors: ", resp.OutputTensors) + fmt.Println("Annotations: ", resp.Annotations) + return resp, nil +} From a3d684c350be84d8f9f40af5a247372d3806a0eb Mon Sep 17 00:00:00 2001 From: Vignesh Date: Tue, 28 Jan 2025 16:44:42 -0500 Subject: [PATCH 2/9] move some stuff --- cli/app.go | 9 ++------- cli/ml_inference.go | 22 +++++++++++++++------- 2 files changed, 17 insertions(+), 14 deletions(-) diff --git a/cli/app.go b/cli/app.go index c87ae4ede8a..96e73ac7455 100644 --- a/cli/app.go +++ b/cli/app.go @@ -102,12 +102,7 @@ const ( mlTrainingFlagURL = "url" mlTrainingFlagArgs = "args" - inferenceFlagFileOrgID = "file-org-id" - inferenceFlagFileID = "file-id" - inferenceFlagFileLocationID = "file-location-id" - inferenceFlagModelID = "model-id" - inferenceFlagModelOrgID = "model-org-id" - inferenceFlagModelVersionID = "model-version" + dataFlagDataType = "data-type" dataFlagOrgIDs = "org-ids" @@ -2802,7 +2797,7 @@ This won't work unless you have an existing installation of our GitHub app on yo Required: true, }, }, - Action: createCommandWithT[InferenceInferArgs](InferenceInferAction), + Action: createCommandWithT[mlInferenceInferArgs](MLInferenceInferAction), }, }, }, diff --git a/cli/ml_inference.go b/cli/ml_inference.go index b90fc00f575..135b5ee49a8 100644 --- a/cli/ml_inference.go +++ b/cli/ml_inference.go @@ -10,8 +10,16 @@ import ( mlinferencepb "go.viam.com/api/app/mlinference/v1" ) -// InferenceInferArgs holds the arguments for the inference action. -type InferenceInferArgs struct { +const ( + inferenceFlagFileOrgID = "file-org-id" + inferenceFlagFileID = "file-id" + inferenceFlagFileLocationID = "file-location-id" + inferenceFlagModelID = "model-id" + inferenceFlagModelOrgID = "model-org-id" + inferenceFlagModelVersionID = "model-version" +) + +type mlInferenceInferArgs struct { OrgID string FileOrgID string FileID string @@ -22,8 +30,8 @@ type InferenceInferArgs struct { ModelVersion string } -// InferenceInferAction is the corresponding action for 'inference infer'. -func InferenceInferAction(c *cli.Context, args InferenceInferArgs) error { +// MLInferenceInferAction is the corresponding action for 'inference infer'. +func MLInferenceInferAction(c *cli.Context, args mlInferenceInferArgs) error { client, err := newViamClient(c) if err != nil { return err @@ -38,7 +46,7 @@ func InferenceInferAction(c *cli.Context, args InferenceInferArgs) error { fmt.Println("ModelOrgID: ", args.ModelOrgID) fmt.Println("ModelVersion: ", args.ModelVersion) - inferenceJobID, err := client.runInference( + inferenceJobID, err := client.mlRunInference( args.OrgID, args.FileOrgID, args.FileID, args.FileLocationID, args.ModelID, args.ModelOrgID, args.ModelVersion) if err != nil { @@ -48,8 +56,8 @@ func InferenceInferAction(c *cli.Context, args InferenceInferArgs) error { return nil } -// runInference runs inference on an image with the specified parameters. -func (c *viamClient) runInference(orgID, fileOrgID, fileID, fileLocation, modelID, modelOrgID, modelVersion string) (*mlinferencepb.GetInferenceResponse, error) { +// mlRunInference runs inference on an image with the specified parameters. +func (c *viamClient) mlRunInference(orgID, fileOrgID, fileID, fileLocation, modelID, modelOrgID, modelVersion string) (*mlinferencepb.GetInferenceResponse, error) { if err := c.ensureLoggedIn(); err != nil { return nil, err } From fbb6b65c01fd84f5e408222fab4b618474a80f71 Mon Sep 17 00:00:00 2001 From: Vignesh Date: Tue, 28 Jan 2025 17:27:27 -0500 Subject: [PATCH 3/9] works, need to clean up output format --- cli/app.go | 7 ------- cli/ml_inference.go | 13 ++++--------- 2 files changed, 4 insertions(+), 16 deletions(-) diff --git a/cli/app.go b/cli/app.go index 96e73ac7455..55833c2a0e1 100644 --- a/cli/app.go +++ b/cli/app.go @@ -102,8 +102,6 @@ const ( mlTrainingFlagURL = "url" mlTrainingFlagArgs = "args" - - dataFlagDataType = "data-type" dataFlagOrgIDs = "org-ids" dataFlagLocationIDs = "location-ids" @@ -2786,11 +2784,6 @@ This won't work unless you have an existing installation of our GitHub app on yo Usage: "ID of the model to use to run inference", Required: true, }, - // &cli.StringFlag{ - // Name: inferenceFlagModelOrgID, - // Usage: "organization ID that owns the model to use to run inference", - // Required: true, - // }, &cli.StringFlag{ Name: inferenceFlagModelVersionID, Usage: "version ID of the model to use to run inference", diff --git a/cli/ml_inference.go b/cli/ml_inference.go index 135b5ee49a8..06a0a391254 100644 --- a/cli/ml_inference.go +++ b/cli/ml_inference.go @@ -15,7 +15,6 @@ const ( inferenceFlagFileID = "file-id" inferenceFlagFileLocationID = "file-location-id" inferenceFlagModelID = "model-id" - inferenceFlagModelOrgID = "model-org-id" inferenceFlagModelVersionID = "model-version" ) @@ -25,9 +24,7 @@ type mlInferenceInferArgs struct { FileID string FileLocationID string ModelID string - // TODO: remove ModelOrgID - ModelOrgID string - ModelVersion string + ModelVersion string } // MLInferenceInferAction is the corresponding action for 'inference infer'. @@ -43,21 +40,19 @@ func MLInferenceInferAction(c *cli.Context, args mlInferenceInferArgs) error { fmt.Println("FileID: ", args.FileID) fmt.Println("FileLocationID: ", args.FileLocationID) fmt.Println("ModelID: ", args.ModelID) - fmt.Println("ModelOrgID: ", args.ModelOrgID) fmt.Println("ModelVersion: ", args.ModelVersion) - inferenceJobID, err := client.mlRunInference( + _, err = client.mlRunInference( args.OrgID, args.FileOrgID, args.FileID, args.FileLocationID, - args.ModelID, args.ModelOrgID, args.ModelVersion) + args.ModelID, args.ModelVersion) if err != nil { return err } - printf(c.App.Writer, "Submitted inference job with ID %s", inferenceJobID) return nil } // mlRunInference runs inference on an image with the specified parameters. -func (c *viamClient) mlRunInference(orgID, fileOrgID, fileID, fileLocation, modelID, modelOrgID, modelVersion string) (*mlinferencepb.GetInferenceResponse, error) { +func (c *viamClient) mlRunInference(orgID, fileOrgID, fileID, fileLocation, modelID, modelVersion string) (*mlinferencepb.GetInferenceResponse, error) { if err := c.ensureLoggedIn(); err != nil { return nil, err } From d757dc95ae9256e905de2d87d266196ba72b78e0 Mon Sep 17 00:00:00 2001 From: Vignesh Date: Thu, 30 Jan 2025 15:25:42 -0500 Subject: [PATCH 4/9] working --- cli/ml_inference.go | 56 +++++++++++++++++++++++++++++++++++++-------- 1 file changed, 46 insertions(+), 10 deletions(-) diff --git a/cli/ml_inference.go b/cli/ml_inference.go index 06a0a391254..5136bfefdc9 100644 --- a/cli/ml_inference.go +++ b/cli/ml_inference.go @@ -34,14 +34,6 @@ func MLInferenceInferAction(c *cli.Context, args mlInferenceInferArgs) error { return err } - // Print the arguments - fmt.Println("OrgID: ", args.OrgID) - fmt.Println("FileOrgID: ", args.FileOrgID) - fmt.Println("FileID: ", args.FileID) - fmt.Println("FileLocationID: ", args.FileLocationID) - fmt.Println("ModelID: ", args.ModelID) - fmt.Println("ModelVersion: ", args.ModelVersion) - _, err = client.mlRunInference( args.OrgID, args.FileOrgID, args.FileID, args.FileLocationID, args.ModelID, args.ModelVersion) @@ -72,7 +64,51 @@ func (c *viamClient) mlRunInference(orgID, fileOrgID, fileID, fileLocation, mode if err != nil { return nil, errors.Wrapf(err, "received error from server") } - fmt.Println("OutputTensors: ", resp.OutputTensors) - fmt.Println("Annotations: ", resp.Annotations) + printInferenceResponse(resp) return resp, nil } + +// printInferenceResponse prints a neat representation of the GetInferenceResponse. +func printInferenceResponse(resp *mlinferencepb.GetInferenceResponse) { + fmt.Println("Inference Response:") + fmt.Println("Output Tensors:") + if resp.OutputTensors != nil { + for name, tensor := range resp.OutputTensors.Tensors { + fmt.Printf(" Tensor Name: %s\n", name) + fmt.Printf(" Shape: %v\n", tensor.Shape) + if tensor.Tensor != nil { + fmt.Print(" Values: [") + for i, value := range tensor.GetDoubleTensor().GetData() { + if i > 0 { + fmt.Print(", ") + } + fmt.Printf("%.4f", value) + } + fmt.Println("]") + } else { + fmt.Println(" No values available.") + } + } + } else { + fmt.Println(" No output tensors.") + } + + fmt.Println("Annotations:") + if resp.Annotations != nil { + for _, bbox := range resp.Annotations.Bboxes { + fmt.Printf(" Bounding Box ID: %s, Label: %s\n", bbox.Id, bbox.Label) + fmt.Printf(" Coordinates: [%f, %f, %f, %f]\n", bbox.XMinNormalized, bbox.YMinNormalized, bbox.XMaxNormalized, bbox.YMaxNormalized) + if bbox.Confidence != nil { + fmt.Printf(" Confidence: %.4f\n", *bbox.Confidence) + } + } + for _, classification := range resp.Annotations.Classifications { + fmt.Printf(" Classification Label: %s\n", classification.Label) + if classification.Confidence != nil { + fmt.Printf(" Confidence: %.4f\n", *classification.Confidence) + } + } + } else { + fmt.Println(" No annotations.") + } +} From 91e33ad601daf8f18a9ed5c25289e4ea45d531c0 Mon Sep 17 00:00:00 2001 From: Vignesh Date: Thu, 30 Jan 2025 17:43:28 -0500 Subject: [PATCH 5/9] lint --- cli/app.go | 9 ++++++--- cli/ml_inference.go | 45 +++++++++++++++++++++++++-------------------- 2 files changed, 31 insertions(+), 23 deletions(-) diff --git a/cli/app.go b/cli/app.go index 55833c2a0e1..2abb5d6c255 100644 --- a/cli/app.go +++ b/cli/app.go @@ -2755,9 +2755,12 @@ This won't work unless you have an existing installation of our GitHub app on yo HideHelpCommand: true, Subcommands: []*cli.Command{ { - Name: "infer", - Usage: "run inference on an image", - UsageText: createUsageText("inference infer", []string{generalFlagOrgID, inferenceFlagFileOrgID, inferenceFlagFileID, inferenceFlagFileLocationID, inferenceFlagModelID, inferenceFlagModelVersionID}, true, false), + Name: "infer", + Usage: "run inference on an image", + UsageText: createUsageText("inference infer", []string{ + generalFlagOrgID, inferenceFlagFileOrgID, inferenceFlagFileID, + inferenceFlagFileLocationID, inferenceFlagModelID, inferenceFlagModelVersionID, + }, true, false), Flags: []cli.Flag{ &cli.StringFlag{ Name: generalFlagOrgID, diff --git a/cli/ml_inference.go b/cli/ml_inference.go index 5136bfefdc9..02a24b2fba4 100644 --- a/cli/ml_inference.go +++ b/cli/ml_inference.go @@ -3,6 +3,7 @@ package cli import ( "context" "fmt" + "strings" "github.com/pkg/errors" "github.com/urfave/cli/v2" @@ -44,7 +45,9 @@ func MLInferenceInferAction(c *cli.Context, args mlInferenceInferArgs) error { } // mlRunInference runs inference on an image with the specified parameters. -func (c *viamClient) mlRunInference(orgID, fileOrgID, fileID, fileLocation, modelID, modelVersion string) (*mlinferencepb.GetInferenceResponse, error) { +func (c *viamClient) mlRunInference(orgID, fileOrgID, fileID, fileLocation, modelID, + modelVersion string, +) (*mlinferencepb.GetInferenceResponse, error) { if err := c.ensureLoggedIn(); err != nil { return nil, err } @@ -64,51 +67,53 @@ func (c *viamClient) mlRunInference(orgID, fileOrgID, fileID, fileLocation, mode if err != nil { return nil, errors.Wrapf(err, "received error from server") } - printInferenceResponse(resp) + c.printInferenceResponse(resp) return resp, nil } // printInferenceResponse prints a neat representation of the GetInferenceResponse. -func printInferenceResponse(resp *mlinferencepb.GetInferenceResponse) { - fmt.Println("Inference Response:") - fmt.Println("Output Tensors:") +func (c *viamClient) printInferenceResponse(resp *mlinferencepb.GetInferenceResponse) { + printf(c.c.App.Writer, "Inference Response:") + printf(c.c.App.Writer, "Output Tensors:") if resp.OutputTensors != nil { for name, tensor := range resp.OutputTensors.Tensors { - fmt.Printf(" Tensor Name: %s\n", name) - fmt.Printf(" Shape: %v\n", tensor.Shape) + printf(c.c.App.Writer, " Tensor Name: %s", name) + printf(c.c.App.Writer, " Shape: %v", tensor.Shape) if tensor.Tensor != nil { - fmt.Print(" Values: [") + var sb strings.Builder for i, value := range tensor.GetDoubleTensor().GetData() { if i > 0 { - fmt.Print(", ") + sb.WriteString(", ") } - fmt.Printf("%.4f", value) + sb.WriteString(fmt.Sprintf("%.4f", value)) } - fmt.Println("]") + printf(c.c.App.Writer, " Values: [%s]", sb.String()) } else { - fmt.Println(" No values available.") + printf(c.c.App.Writer, " No values available.") } } } else { - fmt.Println(" No output tensors.") + printf(c.c.App.Writer, " No output tensors.") } - fmt.Println("Annotations:") + printf(c.c.App.Writer, "Annotations:") if resp.Annotations != nil { for _, bbox := range resp.Annotations.Bboxes { - fmt.Printf(" Bounding Box ID: %s, Label: %s\n", bbox.Id, bbox.Label) - fmt.Printf(" Coordinates: [%f, %f, %f, %f]\n", bbox.XMinNormalized, bbox.YMinNormalized, bbox.XMaxNormalized, bbox.YMaxNormalized) + printf(c.c.App.Writer, " Bounding Box ID: %s, Label: %s", + bbox.Id, bbox.Label) + printf(c.c.App.Writer, " Coordinates: [%f, %f, %f, %f]", + bbox.XMinNormalized, bbox.YMinNormalized, bbox.XMaxNormalized, bbox.YMaxNormalized) if bbox.Confidence != nil { - fmt.Printf(" Confidence: %.4f\n", *bbox.Confidence) + printf(c.c.App.Writer, " Confidence: %.4f", *bbox.Confidence) } } for _, classification := range resp.Annotations.Classifications { - fmt.Printf(" Classification Label: %s\n", classification.Label) + printf(c.c.App.Writer, " Classification Label: %s", classification.Label) if classification.Confidence != nil { - fmt.Printf(" Confidence: %.4f\n", *classification.Confidence) + printf(c.c.App.Writer, " Confidence: %.4f", *classification.Confidence) } } } else { - fmt.Println(" No annotations.") + printf(c.c.App.Writer, " No annotations.") } } From e9bbe3e98949421abd967021e5b222f2d91efe31 Mon Sep 17 00:00:00 2001 From: Vignesh Date: Fri, 31 Jan 2025 11:22:35 -0500 Subject: [PATCH 6/9] pr broken? From 648a3cd08afa42f7dbfe7ea50b03a312a048f50e Mon Sep 17 00:00:00 2001 From: Vignesh Date: Tue, 4 Feb 2025 15:41:14 -0500 Subject: [PATCH 7/9] address comments --- cli/app.go | 15 ++++++++++----- cli/ml_inference.go | 17 ++++++++++------- 2 files changed, 20 insertions(+), 12 deletions(-) diff --git a/cli/app.go b/cli/app.go index 2abb5d6c255..b9b808f6fa9 100644 --- a/cli/app.go +++ b/cli/app.go @@ -2759,7 +2759,7 @@ This won't work unless you have an existing installation of our GitHub app on yo Usage: "run inference on an image", UsageText: createUsageText("inference infer", []string{ generalFlagOrgID, inferenceFlagFileOrgID, inferenceFlagFileID, - inferenceFlagFileLocationID, inferenceFlagModelID, inferenceFlagModelVersionID, + inferenceFlagFileLocationID, inferenceFlagModelOrgID, inferenceFlagModelName, inferenceFlagModelVersion, }, true, false), Flags: []cli.Flag{ &cli.StringFlag{ @@ -2783,13 +2783,18 @@ This won't work unless you have an existing installation of our GitHub app on yo Required: true, }, &cli.StringFlag{ - Name: inferenceFlagModelID, - Usage: "ID of the model to use to run inference", + Name: inferenceFlagModelOrgID, + Usage: "organization ID that hosts the model to use to run inference", Required: true, }, &cli.StringFlag{ - Name: inferenceFlagModelVersionID, - Usage: "version ID of the model to use to run inference", + Name: inferenceFlagModelName, + Usage: "name of the model to use to run inference", + Required: true, + }, + &cli.StringFlag{ + Name: inferenceFlagModelVersion, + Usage: "version of the model to use to run inference", Required: true, }, }, diff --git a/cli/ml_inference.go b/cli/ml_inference.go index 02a24b2fba4..fbf3b2d7d28 100644 --- a/cli/ml_inference.go +++ b/cli/ml_inference.go @@ -15,8 +15,9 @@ const ( inferenceFlagFileOrgID = "file-org-id" inferenceFlagFileID = "file-id" inferenceFlagFileLocationID = "file-location-id" - inferenceFlagModelID = "model-id" - inferenceFlagModelVersionID = "model-version" + inferenceFlagModelOrgID = "model-org-id" + inferenceFlagModelName = "model-name" + inferenceFlagModelVersion = "model-version" ) type mlInferenceInferArgs struct { @@ -24,7 +25,8 @@ type mlInferenceInferArgs struct { FileOrgID string FileID string FileLocationID string - ModelID string + ModelOrgID string + ModelName string ModelVersion string } @@ -37,7 +39,7 @@ func MLInferenceInferAction(c *cli.Context, args mlInferenceInferArgs) error { _, err = client.mlRunInference( args.OrgID, args.FileOrgID, args.FileID, args.FileLocationID, - args.ModelID, args.ModelVersion) + args.ModelOrgID, args.ModelName, args.ModelVersion) if err != nil { return err } @@ -45,8 +47,8 @@ func MLInferenceInferAction(c *cli.Context, args mlInferenceInferArgs) error { } // mlRunInference runs inference on an image with the specified parameters. -func (c *viamClient) mlRunInference(orgID, fileOrgID, fileID, fileLocation, modelID, - modelVersion string, +func (c *viamClient) mlRunInference(orgID, fileOrgID, fileID, fileLocation, modelOrgID, + modelName, modelVersion string, ) (*mlinferencepb.GetInferenceResponse, error) { if err := c.ensureLoggedIn(); err != nil { return nil, err @@ -59,7 +61,7 @@ func (c *viamClient) mlRunInference(orgID, fileOrgID, fileID, fileLocation, mode OrganizationId: fileOrgID, LocationId: fileLocation, }, - RegistryItemId: modelID, + RegistryItemId: fmt.Sprintf("%s:%s", modelOrgID, modelName), RegistryItemVersion: modelVersion, } @@ -97,6 +99,7 @@ func (c *viamClient) printInferenceResponse(resp *mlinferencepb.GetInferenceResp } printf(c.c.App.Writer, "Annotations:") + printf(c.c.App.Writer, "Bounding Box Format: [x_min, y_min, x_max, y_max]") if resp.Annotations != nil { for _, bbox := range resp.Annotations.Bboxes { printf(c.c.App.Writer, " Bounding Box ID: %s, Label: %s", From 1b111f91457158ec566d7185cb78b9b3fa2752fe Mon Sep 17 00:00:00 2001 From: Vignesh Date: Wed, 5 Feb 2025 10:24:16 -0500 Subject: [PATCH 8/9] change command to viam infer --- cli/app.go | 92 +++++++++++++++++++++++++----------------------------- 1 file changed, 42 insertions(+), 50 deletions(-) diff --git a/cli/app.go b/cli/app.go index b9b808f6fa9..3fd935602a3 100644 --- a/cli/app.go +++ b/cli/app.go @@ -2749,58 +2749,50 @@ This won't work unless you have an existing installation of our GitHub app on yo }, }, { - Name: "inference", - Usage: "work with cloud hosted inference service", - UsageText: createUsageText("inference", nil, false, true), - HideHelpCommand: true, - Subcommands: []*cli.Command{ - { - Name: "infer", - Usage: "run inference on an image", - UsageText: createUsageText("inference infer", []string{ - generalFlagOrgID, inferenceFlagFileOrgID, inferenceFlagFileID, - inferenceFlagFileLocationID, inferenceFlagModelOrgID, inferenceFlagModelName, inferenceFlagModelVersion, - }, true, false), - Flags: []cli.Flag{ - &cli.StringFlag{ - Name: generalFlagOrgID, - Usage: "organization ID that is executing the inference job", - Required: true, - }, - &cli.StringFlag{ - Name: inferenceFlagFileOrgID, - Usage: "organization ID that owns the file to run inference on", - Required: true, - }, - &cli.StringFlag{ - Name: inferenceFlagFileID, - Usage: "file ID of the file to run inference on", - Required: true, - }, - &cli.StringFlag{ - Name: inferenceFlagFileLocationID, - Usage: "location ID of the file to run inference on", - Required: true, - }, - &cli.StringFlag{ - Name: inferenceFlagModelOrgID, - Usage: "organization ID that hosts the model to use to run inference", - Required: true, - }, - &cli.StringFlag{ - Name: inferenceFlagModelName, - Usage: "name of the model to use to run inference", - Required: true, - }, - &cli.StringFlag{ - Name: inferenceFlagModelVersion, - Usage: "version of the model to use to run inference", - Required: true, - }, - }, - Action: createCommandWithT[mlInferenceInferArgs](MLInferenceInferAction), + Name: "infer", + Usage: "run inference on an image", + UsageText: createUsageText("inference infer", []string{ + generalFlagOrgID, inferenceFlagFileOrgID, inferenceFlagFileID, + inferenceFlagFileLocationID, inferenceFlagModelOrgID, inferenceFlagModelName, inferenceFlagModelVersion, + }, true, false), + Flags: []cli.Flag{ + &cli.StringFlag{ + Name: generalFlagOrgID, + Usage: "organization ID that is executing the inference job", + Required: true, + }, + &cli.StringFlag{ + Name: inferenceFlagFileOrgID, + Usage: "organization ID that owns the file to run inference on", + Required: true, + }, + &cli.StringFlag{ + Name: inferenceFlagFileID, + Usage: "file ID of the file to run inference on", + Required: true, + }, + &cli.StringFlag{ + Name: inferenceFlagFileLocationID, + Usage: "location ID of the file to run inference on", + Required: true, + }, + &cli.StringFlag{ + Name: inferenceFlagModelOrgID, + Usage: "organization ID that hosts the model to use to run inference", + Required: true, + }, + &cli.StringFlag{ + Name: inferenceFlagModelName, + Usage: "name of the model to use to run inference", + Required: true, + }, + &cli.StringFlag{ + Name: inferenceFlagModelVersion, + Usage: "version of the model to use to run inference", + Required: true, }, }, + Action: createCommandWithT[mlInferenceInferArgs](MLInferenceInferAction), }, { Name: "version", From 92e169c9f0b718adca924f94a7c70f96c0ff14d4 Mon Sep 17 00:00:00 2001 From: Vignesh Date: Wed, 5 Feb 2025 10:25:33 -0500 Subject: [PATCH 9/9] adjust comment --- cli/app.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/cli/app.go b/cli/app.go index 3fd935602a3..1ab596dc25b 100644 --- a/cli/app.go +++ b/cli/app.go @@ -2750,7 +2750,7 @@ This won't work unless you have an existing installation of our GitHub app on yo }, { Name: "infer", - Usage: "run inference on an image", + Usage: "run cloud hosted inference on an image", UsageText: createUsageText("inference infer", []string{ generalFlagOrgID, inferenceFlagFileOrgID, inferenceFlagFileID, inferenceFlagFileLocationID, inferenceFlagModelOrgID, inferenceFlagModelName, inferenceFlagModelVersion,