From 9890d0a68af7f14c09613ac65934f2db65687cf7 Mon Sep 17 00:00:00 2001 From: hnczlsr Date: Fri, 30 Sep 2022 03:33:40 +0800 Subject: [PATCH] Fix comment in inference.proto (#1876) Co-authored-by: Mark Saroufim --- frontend/server/src/main/resources/proto/inference.proto | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/frontend/server/src/main/resources/proto/inference.proto b/frontend/server/src/main/resources/proto/inference.proto index 1d3dad5589..cd9aa89d21 100644 --- a/frontend/server/src/main/resources/proto/inference.proto +++ b/frontend/server/src/main/resources/proto/inference.proto @@ -13,12 +13,12 @@ message PredictionsRequest { // Version of model to run prediction on. string model_version = 2; //optional - // input data for model prediction + // Input data for model prediction map input = 3; //required } message PredictionResponse { - // TorchServe health + // Response content for prediction bytes prediction = 1; } @@ -28,8 +28,9 @@ message TorchServeHealthResponse { } service InferenceAPIsService { + // Check health status of the TorchServe server. rpc Ping(google.protobuf.Empty) returns (TorchServeHealthResponse) {} // Predictions entry point to get inference using default model version. rpc Predictions(PredictionsRequest) returns (PredictionResponse) {} -} \ No newline at end of file +}