Skip to content

Commit

Permalink
Fix comment in inference.proto (#1876)
Browse files Browse the repository at this point in the history
Co-authored-by: Mark Saroufim <marksaroufim@fb.com>
  • Loading branch information
underspirit and msaroufim authored Sep 29, 2022
1 parent 723e8fd commit 9890d0a
Showing 1 changed file with 4 additions and 3 deletions.
7 changes: 4 additions & 3 deletions frontend/server/src/main/resources/proto/inference.proto
Original file line number Diff line number Diff line change
Expand Up @@ -13,12 +13,12 @@ message PredictionsRequest {
// Version of model to run prediction on.
string model_version = 2; //optional

// input data for model prediction
// Input data for model prediction
map<string, bytes> input = 3; //required
}

message PredictionResponse {
// TorchServe health
// Response content for prediction
bytes prediction = 1;
}

Expand All @@ -28,8 +28,9 @@ message TorchServeHealthResponse {
}

service InferenceAPIsService {
// Check health status of the TorchServe server.
rpc Ping(google.protobuf.Empty) returns (TorchServeHealthResponse) {}

// Predictions entry point to get inference using default model version.
rpc Predictions(PredictionsRequest) returns (PredictionResponse) {}
}
}

0 comments on commit 9890d0a

Please sign in to comment.