diff --git a/frontend/server/src/main/java/org/pytorch/serve/http/HttpRequestHandlerChain.java b/frontend/server/src/main/java/org/pytorch/serve/http/HttpRequestHandlerChain.java index 7219c5b81b..d360aa6b03 100644 --- a/frontend/server/src/main/java/org/pytorch/serve/http/HttpRequestHandlerChain.java +++ b/frontend/server/src/main/java/org/pytorch/serve/http/HttpRequestHandlerChain.java @@ -96,7 +96,7 @@ protected void handleCustomEndpoint( Long start = System.currentTimeMillis(); FullHttpResponse rsp = new DefaultFullHttpResponse( - HttpVersion.HTTP_1_1, HttpResponseStatus.OK, false); + HttpVersion.HTTP_1_1, HttpResponseStatus.OK, true); try { run(endpoint, req, rsp, decoder, req.method().toString()); NettyUtils.sendHttpResponse(ctx, rsp, true); diff --git a/frontend/server/src/main/java/org/pytorch/serve/job/RestJob.java b/frontend/server/src/main/java/org/pytorch/serve/job/RestJob.java index 66a5407163..18e3f11caa 100644 --- a/frontend/server/src/main/java/org/pytorch/serve/job/RestJob.java +++ b/frontend/server/src/main/java/org/pytorch/serve/job/RestJob.java @@ -109,8 +109,7 @@ private void responseDescribe( (statusPhrase == null) ? HttpResponseStatus.valueOf(statusCode) : new HttpResponseStatus(statusCode, statusPhrase); - FullHttpResponse resp = - new DefaultFullHttpResponse(HttpVersion.HTTP_1_1, status, false); + FullHttpResponse resp = new DefaultFullHttpResponse(HttpVersion.HTTP_1_1, status, true); if (contentType != null && contentType.length() > 0) { resp.headers().set(HttpHeaderNames.CONTENT_TYPE, contentType); @@ -148,10 +147,10 @@ private void responseInference( HttpResponse resp; if (responseHeaders != null && responseHeaders.containsKey(TS_STREAM_NEXT)) { - resp = new DefaultHttpResponse(HttpVersion.HTTP_1_1, status, false); + resp = new DefaultHttpResponse(HttpVersion.HTTP_1_1, status, true); numStreams = responseHeaders.get(TS_STREAM_NEXT).equals("true") ? numStreams + 1 : -1; } else { - resp = new DefaultFullHttpResponse(HttpVersion.HTTP_1_1, status, false); + resp = new DefaultFullHttpResponse(HttpVersion.HTTP_1_1, status, true); } if (contentType != null && contentType.length() > 0) { diff --git a/frontend/server/src/main/java/org/pytorch/serve/util/NettyUtils.java b/frontend/server/src/main/java/org/pytorch/serve/util/NettyUtils.java index 53b3cfcd62..fcf86e38e7 100644 --- a/frontend/server/src/main/java/org/pytorch/serve/util/NettyUtils.java +++ b/frontend/server/src/main/java/org/pytorch/serve/util/NettyUtils.java @@ -90,7 +90,7 @@ public static void sendJsonResponse(ChannelHandlerContext ctx, String json) { public static void sendJsonResponse( ChannelHandlerContext ctx, String json, HttpResponseStatus status) { - FullHttpResponse resp = new DefaultFullHttpResponse(HttpVersion.HTTP_1_1, status, false); + FullHttpResponse resp = new DefaultFullHttpResponse(HttpVersion.HTTP_1_1, status, true); resp.headers().set(HttpHeaderNames.CONTENT_TYPE, HttpHeaderValues.APPLICATION_JSON); ByteBuf content = resp.content(); content.writeCharSequence(json, CharsetUtil.UTF_8); diff --git a/frontend/server/src/main/java/org/pytorch/serve/workflow/WorkflowManager.java b/frontend/server/src/main/java/org/pytorch/serve/workflow/WorkflowManager.java index c707be86db..8f8bf87c08 100644 --- a/frontend/server/src/main/java/org/pytorch/serve/workflow/WorkflowManager.java +++ b/frontend/server/src/main/java/org/pytorch/serve/workflow/WorkflowManager.java @@ -386,7 +386,7 @@ public void predict(ChannelHandlerContext ctx, String wfName, RequestInput input new DefaultFullHttpResponse( HttpVersion.HTTP_1_1, HttpResponseStatus.OK, - false); + true); resp.headers() .set( HttpHeaderNames.CONTENT_TYPE,