Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

tsp, regen content safety lib #41564

Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -152,79 +152,71 @@ public ContentSafetyClientImpl(HttpPipeline httpPipeline, SerializerAdapter seri
@Host("{endpoint}/contentsafety")
@ServiceInterface(name = "ContentSafetyClient")
public interface ContentSafetyClientService {
@Post("/text:analyze")
@Post("/image:analyze")
@ExpectedResponses({ 200 })
@UnexpectedResponseExceptionType(value = ClientAuthenticationException.class, code = { 401 })
@UnexpectedResponseExceptionType(value = ResourceNotFoundException.class, code = { 404 })
@UnexpectedResponseExceptionType(value = ResourceModifiedException.class, code = { 409 })
@UnexpectedResponseExceptionType(HttpResponseException.class)
Mono<Response<BinaryData>> analyzeText(@HostParam("endpoint") String endpoint,
Mono<Response<BinaryData>> analyzeImage(@HostParam("endpoint") String endpoint,
@QueryParam("api-version") String apiVersion, @HeaderParam("accept") String accept,
@BodyParam("application/json") BinaryData options, RequestOptions requestOptions, Context context);

@Post("/text:analyze")
@Post("/image:analyze")
@ExpectedResponses({ 200 })
@UnexpectedResponseExceptionType(value = ClientAuthenticationException.class, code = { 401 })
@UnexpectedResponseExceptionType(value = ResourceNotFoundException.class, code = { 404 })
@UnexpectedResponseExceptionType(value = ResourceModifiedException.class, code = { 409 })
@UnexpectedResponseExceptionType(HttpResponseException.class)
Response<BinaryData> analyzeTextSync(@HostParam("endpoint") String endpoint,
Response<BinaryData> analyzeImageSync(@HostParam("endpoint") String endpoint,
@QueryParam("api-version") String apiVersion, @HeaderParam("accept") String accept,
@BodyParam("application/json") BinaryData options, RequestOptions requestOptions, Context context);

@Post("/image:analyze")
@Post("/text:analyze")
@ExpectedResponses({ 200 })
@UnexpectedResponseExceptionType(value = ClientAuthenticationException.class, code = { 401 })
@UnexpectedResponseExceptionType(value = ResourceNotFoundException.class, code = { 404 })
@UnexpectedResponseExceptionType(value = ResourceModifiedException.class, code = { 409 })
@UnexpectedResponseExceptionType(HttpResponseException.class)
Mono<Response<BinaryData>> analyzeImage(@HostParam("endpoint") String endpoint,
Mono<Response<BinaryData>> analyzeText(@HostParam("endpoint") String endpoint,
@QueryParam("api-version") String apiVersion, @HeaderParam("accept") String accept,
@BodyParam("application/json") BinaryData options, RequestOptions requestOptions, Context context);

@Post("/image:analyze")
@Post("/text:analyze")
@ExpectedResponses({ 200 })
@UnexpectedResponseExceptionType(value = ClientAuthenticationException.class, code = { 401 })
@UnexpectedResponseExceptionType(value = ResourceNotFoundException.class, code = { 404 })
@UnexpectedResponseExceptionType(value = ResourceModifiedException.class, code = { 409 })
@UnexpectedResponseExceptionType(HttpResponseException.class)
Response<BinaryData> analyzeImageSync(@HostParam("endpoint") String endpoint,
Response<BinaryData> analyzeTextSync(@HostParam("endpoint") String endpoint,
@QueryParam("api-version") String apiVersion, @HeaderParam("accept") String accept,
@BodyParam("application/json") BinaryData options, RequestOptions requestOptions, Context context);
}

/**
* Analyze Text
* Analyze Image
*
* A synchronous API for the analysis of potentially harmful text content. Currently, it supports four categories:
* A synchronous API for the analysis of potentially harmful image content. Currently, it supports four categories:
* Hate, SelfHarm, Sexual, and Violence.
* <p><strong>Request Body Schema</strong></p>
*
* <pre>{@code
* {
* text: String (Required)
* image (Required): {
* content: byte[] (Optional)
* blobUrl: String (Optional)
* }
* categories (Optional): [
* String(Hate/SelfHarm/Sexual/Violence) (Optional)
* ]
* blocklistNames (Optional): [
* String (Optional)
* ]
* haltOnBlocklistHit: Boolean (Optional)
* outputType: String(FourSeverityLevels/EightSeverityLevels) (Optional)
* outputType: String(FourSeverityLevels) (Optional)
* }
* }</pre>
*
* <p><strong>Response Body Schema</strong></p>
*
* <pre>{@code
* {
* blocklistsMatch (Optional): [
* (Optional){
* blocklistName: String (Required)
* blocklistItemId: String (Required)
* blocklistItemText: String (Required)
* }
* ]
* categoriesAnalysis (Required): [
* (Required){
* category: String(Hate/SelfHarm/Sexual/Violence) (Required)
Expand All @@ -234,53 +226,45 @@ Response<BinaryData> analyzeImageSync(@HostParam("endpoint") String endpoint,
* }
* }</pre>
*
* @param options The text analysis request.
* @param options The image analysis request.
* @param requestOptions The options to configure the HTTP request before HTTP client sends it.
* @throws HttpResponseException thrown if the request is rejected by server.
* @throws ClientAuthenticationException thrown if the request is rejected by server on status code 401.
* @throws ResourceNotFoundException thrown if the request is rejected by server on status code 404.
* @throws ResourceModifiedException thrown if the request is rejected by server on status code 409.
* @return the text analysis response along with {@link Response} on successful completion of {@link Mono}.
* @return the image analysis response along with {@link Response} on successful completion of {@link Mono}.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<Response<BinaryData>> analyzeTextWithResponseAsync(BinaryData options, RequestOptions requestOptions) {
public Mono<Response<BinaryData>> analyzeImageWithResponseAsync(BinaryData options, RequestOptions requestOptions) {
final String accept = "application/json";
return FluxUtil.withContext(context -> service.analyzeText(this.getEndpoint(),
return FluxUtil.withContext(context -> service.analyzeImage(this.getEndpoint(),
this.getServiceVersion().getVersion(), accept, options, requestOptions, context));
}

/**
* Analyze Text
* Analyze Image
*
* A synchronous API for the analysis of potentially harmful text content. Currently, it supports four categories:
* A synchronous API for the analysis of potentially harmful image content. Currently, it supports four categories:
* Hate, SelfHarm, Sexual, and Violence.
* <p><strong>Request Body Schema</strong></p>
*
* <pre>{@code
* {
* text: String (Required)
* image (Required): {
* content: byte[] (Optional)
* blobUrl: String (Optional)
* }
* categories (Optional): [
* String(Hate/SelfHarm/Sexual/Violence) (Optional)
* ]
* blocklistNames (Optional): [
* String (Optional)
* ]
* haltOnBlocklistHit: Boolean (Optional)
* outputType: String(FourSeverityLevels/EightSeverityLevels) (Optional)
* outputType: String(FourSeverityLevels) (Optional)
* }
* }</pre>
*
* <p><strong>Response Body Schema</strong></p>
*
* <pre>{@code
* {
* blocklistsMatch (Optional): [
* (Optional){
* blocklistName: String (Required)
* blocklistItemId: String (Required)
* blocklistItemText: String (Required)
* }
* ]
* categoriesAnalysis (Required): [
* (Required){
* category: String(Hate/SelfHarm/Sexual/Violence) (Required)
Expand All @@ -290,45 +274,53 @@ public Mono<Response<BinaryData>> analyzeTextWithResponseAsync(BinaryData option
* }
* }</pre>
*
* @param options The text analysis request.
* @param options The image analysis request.
* @param requestOptions The options to configure the HTTP request before HTTP client sends it.
* @throws HttpResponseException thrown if the request is rejected by server.
* @throws ClientAuthenticationException thrown if the request is rejected by server on status code 401.
* @throws ResourceNotFoundException thrown if the request is rejected by server on status code 404.
* @throws ResourceModifiedException thrown if the request is rejected by server on status code 409.
* @return the text analysis response along with {@link Response}.
* @return the image analysis response along with {@link Response}.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Response<BinaryData> analyzeTextWithResponse(BinaryData options, RequestOptions requestOptions) {
public Response<BinaryData> analyzeImageWithResponse(BinaryData options, RequestOptions requestOptions) {
final String accept = "application/json";
return service.analyzeTextSync(this.getEndpoint(), this.getServiceVersion().getVersion(), accept, options,
return service.analyzeImageSync(this.getEndpoint(), this.getServiceVersion().getVersion(), accept, options,
requestOptions, Context.NONE);
}

/**
* Analyze Image
* Analyze Text
*
* A synchronous API for the analysis of potentially harmful image content. Currently, it supports four categories:
* A synchronous API for the analysis of potentially harmful text content. Currently, it supports four categories:
* Hate, SelfHarm, Sexual, and Violence.
* <p><strong>Request Body Schema</strong></p>
*
* <pre>{@code
* {
* image (Required): {
* content: byte[] (Optional)
* blobUrl: String (Optional)
* }
* text: String (Required)
* categories (Optional): [
* String(Hate/SelfHarm/Sexual/Violence) (Optional)
* ]
* outputType: String(FourSeverityLevels) (Optional)
* blocklistNames (Optional): [
* String (Optional)
* ]
* haltOnBlocklistHit: Boolean (Optional)
* outputType: String(FourSeverityLevels/EightSeverityLevels) (Optional)
* }
* }</pre>
*
* <p><strong>Response Body Schema</strong></p>
*
* <pre>{@code
* {
* blocklistsMatch (Optional): [
* (Optional){
* blocklistName: String (Required)
* blocklistItemId: String (Required)
* blocklistItemText: String (Required)
* }
* ]
* categoriesAnalysis (Required): [
* (Required){
* category: String(Hate/SelfHarm/Sexual/Violence) (Required)
Expand All @@ -338,45 +330,53 @@ public Response<BinaryData> analyzeTextWithResponse(BinaryData options, RequestO
* }
* }</pre>
*
* @param options The image analysis request.
* @param options The text analysis request.
* @param requestOptions The options to configure the HTTP request before HTTP client sends it.
* @throws HttpResponseException thrown if the request is rejected by server.
* @throws ClientAuthenticationException thrown if the request is rejected by server on status code 401.
* @throws ResourceNotFoundException thrown if the request is rejected by server on status code 404.
* @throws ResourceModifiedException thrown if the request is rejected by server on status code 409.
* @return the image analysis response along with {@link Response} on successful completion of {@link Mono}.
* @return the text analysis response along with {@link Response} on successful completion of {@link Mono}.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<Response<BinaryData>> analyzeImageWithResponseAsync(BinaryData options, RequestOptions requestOptions) {
public Mono<Response<BinaryData>> analyzeTextWithResponseAsync(BinaryData options, RequestOptions requestOptions) {
final String accept = "application/json";
return FluxUtil.withContext(context -> service.analyzeImage(this.getEndpoint(),
return FluxUtil.withContext(context -> service.analyzeText(this.getEndpoint(),
this.getServiceVersion().getVersion(), accept, options, requestOptions, context));
}

/**
* Analyze Image
* Analyze Text
*
* A synchronous API for the analysis of potentially harmful image content. Currently, it supports four categories:
* A synchronous API for the analysis of potentially harmful text content. Currently, it supports four categories:
* Hate, SelfHarm, Sexual, and Violence.
* <p><strong>Request Body Schema</strong></p>
*
* <pre>{@code
* {
* image (Required): {
* content: byte[] (Optional)
* blobUrl: String (Optional)
* }
* text: String (Required)
* categories (Optional): [
* String(Hate/SelfHarm/Sexual/Violence) (Optional)
* ]
* outputType: String(FourSeverityLevels) (Optional)
* blocklistNames (Optional): [
* String (Optional)
* ]
* haltOnBlocklistHit: Boolean (Optional)
* outputType: String(FourSeverityLevels/EightSeverityLevels) (Optional)
* }
* }</pre>
*
* <p><strong>Response Body Schema</strong></p>
*
* <pre>{@code
* {
* blocklistsMatch (Optional): [
* (Optional){
* blocklistName: String (Required)
* blocklistItemId: String (Required)
* blocklistItemText: String (Required)
* }
* ]
* categoriesAnalysis (Required): [
* (Required){
* category: String(Hate/SelfHarm/Sexual/Violence) (Required)
Expand All @@ -386,18 +386,18 @@ public Mono<Response<BinaryData>> analyzeImageWithResponseAsync(BinaryData optio
* }
* }</pre>
*
* @param options The image analysis request.
* @param options The text analysis request.
* @param requestOptions The options to configure the HTTP request before HTTP client sends it.
* @throws HttpResponseException thrown if the request is rejected by server.
* @throws ClientAuthenticationException thrown if the request is rejected by server on status code 401.
* @throws ResourceNotFoundException thrown if the request is rejected by server on status code 404.
* @throws ResourceModifiedException thrown if the request is rejected by server on status code 409.
* @return the image analysis response along with {@link Response}.
* @return the text analysis response along with {@link Response}.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Response<BinaryData> analyzeImageWithResponse(BinaryData options, RequestOptions requestOptions) {
public Response<BinaryData> analyzeTextWithResponse(BinaryData options, RequestOptions requestOptions) {
final String accept = "application/json";
return service.analyzeImageSync(this.getEndpoint(), this.getServiceVersion().getVersion(), accept, options,
return service.analyzeTextSync(this.getEndpoint(), this.getServiceVersion().getVersion(), accept, options,
requestOptions, Context.NONE);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@
public final class AnalyzeImageOptions implements JsonSerializable<AnalyzeImageOptions> {

/*
* The image needs to be analyzed.
* The image to be analyzed.
*/
@Generated
private final ContentSafetyImageData image;
Expand Down Expand Up @@ -49,7 +49,7 @@ public AnalyzeImageOptions(ContentSafetyImageData image) {
}

/**
* Get the image property: The image needs to be analyzed.
* Get the image property: The image to be analyzed.
*
* @return the image value.
*/
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,8 +19,8 @@
public final class AnalyzeTextOptions implements JsonSerializable<AnalyzeTextOptions> {

/*
* The text needs to be analyzed. We support a maximum of 10k Unicode characters (Unicode code points) in the text
* of one request.
* The text to be analyzed. We support a maximum of 10k Unicode characters (Unicode code points) in the text of one
* request.
*/
@Generated
private final String text;
Expand Down Expand Up @@ -63,8 +63,8 @@ public AnalyzeTextOptions(String text) {
}

/**
* Get the text property: The text needs to be analyzed. We support a maximum of 10k Unicode characters (Unicode
* code points) in the text of one request.
* Get the text property: The text to be analyzed. We support a maximum of 10k Unicode characters (Unicode code
* points) in the text of one request.
*
* @return the text value.
*/
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -8,30 +8,30 @@
import java.util.Collection;

/**
* Image analyze category.
* The harm category supported in Image content analysis.
*/
public final class ImageCategory extends ExpandableStringEnum<ImageCategory> {

/**
* Static value Hate for ImageCategory.
* The harm category for Image - Hate.
*/
@Generated
public static final ImageCategory HATE = fromString("Hate");

/**
* Static value SelfHarm for ImageCategory.
* The harm category for Image - SelfHarm.
*/
@Generated
public static final ImageCategory SELF_HARM = fromString("SelfHarm");

/**
* Static value Sexual for ImageCategory.
* The harm category for Image - Sexual.
*/
@Generated
public static final ImageCategory SEXUAL = fromString("Sexual");

/**
* Static value Violence for ImageCategory.
* The harm category for Image - Violence.
*/
@Generated
public static final ImageCategory VIOLENCE = fromString("Violence");
Expand Down
Loading