comment
stringlengths
1
5.49k
method_body
stringlengths
27
75.2k
target_code
stringlengths
0
5.16k
method_body_after
stringlengths
27
76k
context_before
stringlengths
8
252k
context_after
stringlengths
8
253k
can this even happen?
public Mono<HttpResponse> process(HttpPipelineCallContext context, HttpPipelineNextPolicy next) { return Mono.defer(() -> { OffsetDateTime now = OffsetDateTime.now(); try { context.getHttpRequest().getHeaders().set("Date", DateTimeRfc1123.toRfc1123String(now)); } catch (IllegalArgumentException ignored) { context.getHttpRequest().getHeaders().set("Date", FORMATTER.format(now)); } return next.process(); }); }
}
public Mono<HttpResponse> process(HttpPipelineCallContext context, HttpPipelineNextPolicy next) { return Mono.defer(() -> { OffsetDateTime now = OffsetDateTime.now(); try { context.getHttpRequest().getHeaders().set("Date", DateTimeRfc1123.toRfc1123String(now)); } catch (IllegalArgumentException ignored) { context.getHttpRequest().getHeaders().set("Date", FORMATTER.format(now)); } return next.process(); }); }
class AddDatePolicy implements HttpPipelinePolicy { private static final DateTimeFormatter FORMATTER = DateTimeFormatter .ofPattern("EEE, dd MMM yyyy HH:mm:ss 'GMT'") .withZone(ZoneOffset.UTC) .withLocale(Locale.US); @Override }
class AddDatePolicy implements HttpPipelinePolicy { private static final DateTimeFormatter FORMATTER = DateTimeFormatter .ofPattern("EEE, dd MMM yyyy HH:mm:ss 'GMT'") .withZone(ZoneOffset.UTC) .withLocale(Locale.US); @Override }
Would it be better to wrap in UncheckedIOException ?
public Mono<Void> logRequest(ClientLogger logger, HttpRequestLoggingContext loggingOptions) { final LogLevel logLevel = getLogLevel(loggingOptions); if (!logger.canLogAtLevel(logLevel)) { return Mono.empty(); } final HttpRequest request = loggingOptions.getHttpRequest(); StringBuilder requestLogMessage = new StringBuilder(); if (httpLogDetailLevel.shouldLogUrl()) { requestLogMessage.append("--> ") .append(request.getHttpMethod()) .append(" ") .append(getRedactedUrl(request.getUrl(), allowedQueryParameterNames)) .append(System.lineSeparator()); Integer retryCount = loggingOptions.getTryCount(); if (retryCount != null) { requestLogMessage.append("Try count: ") .append(retryCount) .append(System.lineSeparator()); } } if (httpLogDetailLevel.shouldLogHeaders() && logger.canLogAtLevel(LogLevel.VERBOSE)) { addHeadersToLogMessage(allowedHeaderNames, request.getHeaders(), requestLogMessage); } if (!httpLogDetailLevel.shouldLogBody()) { return logAndReturn(logger, logLevel, requestLogMessage, null); } if (request.getBody() == null) { requestLogMessage.append("(empty body)") .append(System.lineSeparator()) .append("--> END ") .append(request.getHttpMethod()) .append(System.lineSeparator()); return logAndReturn(logger, logLevel, requestLogMessage, null); } String contentType = request.getHeaders().getValue("Content-Type"); long contentLength = getContentLength(logger, request.getHeaders()); if (shouldBodyBeLogged(contentType, contentLength)) { AccessibleByteArrayOutputStream stream = new AccessibleByteArrayOutputStream((int) contentLength); request.setBody( request.getBody() .doOnNext(byteBuffer -> { try { ImplUtils.writeByteBufferToStream(byteBuffer.duplicate(), stream); } catch (IOException ex) { throw LOGGER.logExceptionAsError(Exceptions.propagate(ex)); } }) .doFinally(ignored -> { requestLogMessage.append(contentLength) .append("-byte body:") .append(System.lineSeparator()) .append(prettyPrintIfNeeded(logger, prettyPrintBody, contentType, new String(stream.toByteArray(), 0, stream.count(), StandardCharsets.UTF_8))) .append(System.lineSeparator()) .append("--> END ") .append(request.getHttpMethod()) .append(System.lineSeparator()); logAndReturn(logger, logLevel, requestLogMessage, null); })); return Mono.empty(); } else { requestLogMessage.append(contentLength) .append("-byte body: (content not logged)") .append(System.lineSeparator()) .append("--> END ") .append(request.getHttpMethod()) .append(System.lineSeparator()); return logAndReturn(logger, logLevel, requestLogMessage, null); } }
throw LOGGER.logExceptionAsError(Exceptions.propagate(ex));
public Mono<Void> logRequest(ClientLogger logger, HttpRequestLoggingContext loggingOptions) { final LogLevel logLevel = getLogLevel(loggingOptions); if (!logger.canLogAtLevel(logLevel)) { return Mono.empty(); } final HttpRequest request = loggingOptions.getHttpRequest(); StringBuilder requestLogMessage = new StringBuilder(); if (httpLogDetailLevel.shouldLogUrl()) { requestLogMessage.append("--> ") .append(request.getHttpMethod()) .append(" ") .append(getRedactedUrl(request.getUrl(), allowedQueryParameterNames)) .append(System.lineSeparator()); Integer retryCount = loggingOptions.getTryCount(); if (retryCount != null) { requestLogMessage.append("Try count: ") .append(retryCount) .append(System.lineSeparator()); } } if (httpLogDetailLevel.shouldLogHeaders() && logger.canLogAtLevel(LogLevel.VERBOSE)) { addHeadersToLogMessage(allowedHeaderNames, request.getHeaders(), requestLogMessage); } if (!httpLogDetailLevel.shouldLogBody()) { logMessage(logger, logLevel, requestLogMessage); return Mono.empty(); } if (request.getBody() == null) { requestLogMessage.append("(empty body)") .append(System.lineSeparator()) .append("--> END ") .append(request.getHttpMethod()) .append(System.lineSeparator()); logMessage(logger, logLevel, requestLogMessage); return Mono.empty(); } String contentType = request.getHeaders().getValue("Content-Type"); long contentLength = getContentLength(logger, request.getHeaders()); if (shouldBodyBeLogged(contentType, contentLength)) { AccessibleByteArrayOutputStream stream = new AccessibleByteArrayOutputStream((int) contentLength); request.setBody( request.getBody() .doOnNext(byteBuffer -> { try { ImplUtils.writeByteBufferToStream(byteBuffer.duplicate(), stream); } catch (IOException ex) { throw LOGGER.logExceptionAsError(new UncheckedIOException(ex)); } }) .doFinally(ignored -> { requestLogMessage.append(contentLength) .append("-byte body:") .append(System.lineSeparator()) .append(prettyPrintIfNeeded(logger, prettyPrintBody, contentType, new String(stream.toByteArray(), 0, stream.count(), StandardCharsets.UTF_8))) .append(System.lineSeparator()) .append("--> END ") .append(request.getHttpMethod()) .append(System.lineSeparator()); logMessage(logger, logLevel, requestLogMessage); })); } else { requestLogMessage.append(contentLength) .append("-byte body: (content not logged)") .append(System.lineSeparator()) .append("--> END ") .append(request.getHttpMethod()) .append(System.lineSeparator()); logMessage(logger, logLevel, requestLogMessage); } return Mono.empty(); }
class DefaultHttpRequestLogger implements HttpRequestLogger { @Override }
class DefaultHttpRequestLogger implements HttpRequestLogger { @Override }
these could be static singletons if we want to be even faster.
private static HttpPipeline createDefaultPipeline() { return new HttpPipelineBuilder() .policies(new UserAgentPolicy(), new RetryPolicy(), new CookiePolicy()) .build(); }
.policies(new UserAgentPolicy(), new RetryPolicy(), new CookiePolicy())
private static HttpPipeline createDefaultPipeline() { return new HttpPipelineBuilder() .policies(new UserAgentPolicy(), new RetryPolicy(), new CookiePolicy()) .build(); }
class " + cls)))) .flatMap(ctr -> RESPONSE_CONSTRUCTORS_CACHE.invoke(ctr, response, bodyAsObject)); } private Mono<?> handleBodyReturnType(final HttpDecodedResponse response, final SwaggerMethodParser methodParser, final Type entityType) { final int responseStatusCode = response.getSourceResponse().getStatusCode(); final HttpMethod httpMethod = methodParser.getHttpMethod(); final Type returnValueWireType = methodParser.getReturnValueWireType(); final Mono<?> asyncResult; if (httpMethod == HttpMethod.HEAD && (TypeUtil.isTypeOrSubTypeOf(entityType, Boolean.TYPE) || TypeUtil.isTypeOrSubTypeOf(entityType, Boolean.class))) { boolean isSuccess = (responseStatusCode / 100) == 2; asyncResult = Mono.just(isSuccess); } else if (TypeUtil.isTypeOrSubTypeOf(entityType, byte[].class)) { Mono<byte[]> responseBodyBytesAsync = response.getSourceResponse().getBodyAsByteArray(); if (returnValueWireType == Base64Url.class) { responseBodyBytesAsync = responseBodyBytesAsync .mapNotNull(base64UrlBytes -> new Base64Url(base64UrlBytes).decodedBytes()); } asyncResult = responseBodyBytesAsync; } else if (FluxUtil.isFluxByteBuffer(entityType)) { asyncResult = Mono.just(response.getSourceResponse().getBody()); } else if (TypeUtil.isTypeOrSubTypeOf(entityType, BinaryData.class)) { asyncResult = BinaryData.fromFlux(response.getSourceResponse().getBody()); } else { asyncResult = response.getDecodedBody((byte[]) null); } return asyncResult; }
class " + cls)))) .flatMap(ctr -> RESPONSE_CONSTRUCTORS_CACHE.invoke(ctr, response, bodyAsObject)); } private Mono<?> handleBodyReturnType(final HttpDecodedResponse response, final SwaggerMethodParser methodParser, final Type entityType) { final int responseStatusCode = response.getSourceResponse().getStatusCode(); final HttpMethod httpMethod = methodParser.getHttpMethod(); final Type returnValueWireType = methodParser.getReturnValueWireType(); final Mono<?> asyncResult; if (httpMethod == HttpMethod.HEAD && (TypeUtil.isTypeOrSubTypeOf(entityType, Boolean.TYPE) || TypeUtil.isTypeOrSubTypeOf(entityType, Boolean.class))) { boolean isSuccess = (responseStatusCode / 100) == 2; asyncResult = Mono.just(isSuccess); } else if (TypeUtil.isTypeOrSubTypeOf(entityType, byte[].class)) { Mono<byte[]> responseBodyBytesAsync = response.getSourceResponse().getBodyAsByteArray(); if (returnValueWireType == Base64Url.class) { responseBodyBytesAsync = responseBodyBytesAsync .mapNotNull(base64UrlBytes -> new Base64Url(base64UrlBytes).decodedBytes()); } asyncResult = responseBodyBytesAsync; } else if (FluxUtil.isFluxByteBuffer(entityType)) { asyncResult = Mono.just(response.getSourceResponse().getBody()); } else if (TypeUtil.isTypeOrSubTypeOf(entityType, BinaryData.class)) { asyncResult = BinaryData.fromFlux(response.getSourceResponse().getBody()); } else { asyncResult = response.getDecodedBody((byte[]) null); } return asyncResult; }
why do we design the interface to accept an argument of `abstract type` while in the implementation use its implementation? How about always using the implementation for clarity? By this way, we avoid the `is instanceof ` judgement then.
private EventHubsTemplate getEventHubTemplate() { if (this.eventHubsTemplate == null) { DefaultEventHubsNamespaceProducerFactory factory = new DefaultEventHubsNamespaceProducerFactory( this.namespaceProperties, getProducerPropertiesSupplier()); producerFactoryCustomizers.forEach(customizer -> customizer.customize(factory)); factory.addListener((name, producerAsyncClient) -> { DefaultInstrumentation instrumentation = new DefaultInstrumentation(name, PRODUCER); instrumentation.setStatus(Instrumentation.Status.UP); instrumentationManager.addHealthInstrumentation(instrumentation); }); this.eventHubsTemplate = new EventHubsTemplate(factory); } return this.eventHubsTemplate; }
producerFactoryCustomizers.forEach(customizer -> customizer.customize(factory));
private EventHubsTemplate getEventHubTemplate() { if (this.eventHubsTemplate == null) { DefaultEventHubsNamespaceProducerFactory factory = new DefaultEventHubsNamespaceProducerFactory( this.namespaceProperties, getProducerPropertiesSupplier()); producerFactoryCustomizers.forEach(customizer -> customizer.customize(factory)); factory.addListener((name, producerAsyncClient) -> { DefaultInstrumentation instrumentation = new DefaultInstrumentation(name, PRODUCER); instrumentation.setStatus(Instrumentation.Status.UP); instrumentationManager.addHealthInstrumentation(instrumentation); }); this.eventHubsTemplate = new EventHubsTemplate(factory); } return this.eventHubsTemplate; }
class EventHubsMessageChannelBinder extends AbstractMessageChannelBinder<ExtendedConsumerProperties<EventHubsConsumerProperties>, ExtendedProducerProperties<EventHubsProducerProperties>, EventHubsChannelProvisioner> implements ExtendedPropertiesBinder<MessageChannel, EventHubsConsumerProperties, EventHubsProducerProperties> { private static final Logger LOGGER = LoggerFactory.getLogger(EventHubsMessageChannelBinder.class); private static final ExpressionParser EXPRESSION_PARSER = new SpelExpressionParser(); private NamespaceProperties namespaceProperties; private EventHubsTemplate eventHubsTemplate; private CheckpointStore checkpointStore; private DefaultEventHubsNamespaceProcessorFactory processorFactory; private final List<EventHubsMessageListenerContainer> eventHubsMessageListenerContainers = new ArrayList<>(); private final InstrumentationManager instrumentationManager = new DefaultInstrumentationManager(); private EventHubsExtendedBindingProperties bindingProperties = new EventHubsExtendedBindingProperties(); private final Map<String, ExtendedProducerProperties<EventHubsProducerProperties>> extendedProducerPropertiesMap = new ConcurrentHashMap<>(); private List<EventHubsProducerFactoryCustomizer> producerFactoryCustomizers = new ArrayList<>(); private List<EventHubsProcessorFactoryCustomizer> processorFactoryCustomizers = new ArrayList<>(); /** * Construct a {@link EventHubsMessageChannelBinder} with the specified headers to embed and {@link EventHubsChannelProvisioner}. * * @param headersToEmbed the headers to embed * @param provisioningProvider the provisioning provider */ public EventHubsMessageChannelBinder(String[] headersToEmbed, EventHubsChannelProvisioner provisioningProvider) { super(headersToEmbed, provisioningProvider); } @Override protected MessageHandler createProducerMessageHandler( ProducerDestination destination, ExtendedProducerProperties<EventHubsProducerProperties> producerProperties, MessageChannel errorChannel) { extendedProducerPropertiesMap.put(destination.getName(), producerProperties); Assert.notNull(getEventHubTemplate(), "eventHubsTemplate can't be null when create a producer"); DefaultMessageHandler handler = new DefaultMessageHandler(destination.getName(), this.eventHubsTemplate); handler.setBeanFactory(getBeanFactory()); handler.setSync(producerProperties.getExtension().isSync()); handler.setSendTimeout(producerProperties.getExtension().getSendTimeout().toMillis()); handler.setSendFailureChannel(errorChannel); String instrumentationId = Instrumentation.buildId(PRODUCER, destination.getName()); handler.setSendCallback(new InstrumentationSendCallback(instrumentationId, instrumentationManager)); if (producerProperties.isPartitioned()) { handler.setPartitionIdExpression( EXPRESSION_PARSER.parseExpression("headers['" + BinderHeaders.PARTITION_HEADER + "']")); } else { handler.setPartitionKeyExpression(new FunctionExpression<Message<?>>(m -> m.getPayload().hashCode())); } return handler; } @Override protected MessageProducer createConsumerEndpoint(ConsumerDestination destination, String group, ExtendedConsumerProperties<EventHubsConsumerProperties> properties) { Assert.notNull(getProcessorFactory(), "processor factory can't be null when create a consumer"); boolean anonymous = !StringUtils.hasText(group); if (anonymous) { group = "anonymous." + UUID.randomUUID(); } EventHubsContainerProperties containerProperties = createContainerProperties(destination, group, properties); EventHubsMessageListenerContainer listenerContainer = new EventHubsMessageListenerContainer( getProcessorFactory(), containerProperties); this.eventHubsMessageListenerContainers.add(listenerContainer); EventHubsInboundChannelAdapter inboundAdapter; if (properties.isBatchMode()) { inboundAdapter = new EventHubsInboundChannelAdapter(listenerContainer, ListenerMode.BATCH); } else { inboundAdapter = new EventHubsInboundChannelAdapter(listenerContainer); } inboundAdapter.setBeanFactory(getBeanFactory()); String instrumentationId = Instrumentation.buildId(CONSUMER, destination.getName() + "/" + group); inboundAdapter.setInstrumentationManager(instrumentationManager); inboundAdapter.setInstrumentationId(instrumentationId); ErrorInfrastructure errorInfrastructure = registerErrorInfrastructure(destination, group, properties); inboundAdapter.setErrorChannel(errorInfrastructure.getErrorChannel()); return inboundAdapter; } /** * Create {@link EventHubsContainerProperties} from the extended {@link EventHubsConsumerProperties}. * @param destination reference to the consumer destination. * @param group the consumer group. * @param properties the consumer properties. * @return the {@link EventHubsContainerProperties}. */ private EventHubsContainerProperties createContainerProperties( ConsumerDestination destination, String group, ExtendedConsumerProperties<EventHubsConsumerProperties> properties) { EventHubsContainerProperties containerProperties = new EventHubsContainerProperties(); AzurePropertiesUtils.copyAzureCommonProperties(properties.getExtension(), containerProperties); ProcessorPropertiesMerger.copyProcessorPropertiesIfNotNull(properties.getExtension(), containerProperties); containerProperties.setEventHubName(destination.getName()); containerProperties.setConsumerGroup(group); containerProperties.setCheckpointConfig(properties.getExtension().getCheckpoint()); return containerProperties; } @Override public EventHubsConsumerProperties getExtendedConsumerProperties(String destination) { return this.bindingProperties.getExtendedConsumerProperties(destination); } @Override public EventHubsProducerProperties getExtendedProducerProperties(String destination) { return this.bindingProperties.getExtendedProducerProperties(destination); } @Override public String getDefaultsPrefix() { return this.bindingProperties.getDefaultsPrefix(); } @Override public Class<? extends BinderSpecificPropertiesProvider> getExtendedPropertiesEntryClass() { return this.bindingProperties.getExtendedPropertiesEntryClass(); } /** * Set binding properties. * * @param bindingProperties the binding properties */ public void setBindingProperties(EventHubsExtendedBindingProperties bindingProperties) { this.bindingProperties = bindingProperties; } private PropertiesSupplier<String, ProducerProperties> getProducerPropertiesSupplier() { return key -> { if (this.extendedProducerPropertiesMap.containsKey(key)) { EventHubsProducerProperties producerProperties = this.extendedProducerPropertiesMap.get(key) .getExtension(); producerProperties.setEventHubName(key); return producerProperties; } else { LOGGER.debug("Can't find extended properties for {}", key); return null; } }; } private EventHubsProcessorFactory getProcessorFactory() { if (this.processorFactory == null) { this.processorFactory = new DefaultEventHubsNamespaceProcessorFactory( this.checkpointStore, this.namespaceProperties); processorFactoryCustomizers.forEach(customizer -> customizer.customize(processorFactory)); processorFactory.addListener((name, consumerGroup, processorClient) -> { String instrumentationName = name + "/" + consumerGroup; Instrumentation instrumentation = new EventHubsProcessorInstrumentation(instrumentationName, CONSUMER, Duration.ofMinutes(2)); instrumentation.setStatus(Instrumentation.Status.UP); instrumentationManager.addHealthInstrumentation(instrumentation); }); } return this.processorFactory; } /** * Set namespace properties. * * @param namespaceProperties the namespace properties */ public void setNamespaceProperties(NamespaceProperties namespaceProperties) { this.namespaceProperties = namespaceProperties; } /** * Set checkpoint store. * * @param checkpointStore the checkpoint store */ public void setCheckpointStore(CheckpointStore checkpointStore) { this.checkpointStore = checkpointStore; } /** * Get instrumentation manager. * * @return instrumentationManager the instrumentation manager * @see InstrumentationManager */ InstrumentationManager getInstrumentationManager() { return instrumentationManager; } /** * Set the producer factory customizers. * * @param producerFactoryCustomizers The producer factory customizers. */ public void setProducerFactoryCustomizers(List<EventHubsProducerFactoryCustomizer> producerFactoryCustomizers) { this.producerFactoryCustomizers = producerFactoryCustomizers; } /** * Set the processor factory customizers. * * @param processorFactoryCustomizers The processor factory customizers. */ public void setProcessorFactoryCustomizers(List<EventHubsProcessorFactoryCustomizer> processorFactoryCustomizers) { this.processorFactoryCustomizers = processorFactoryCustomizers; } }
class EventHubsMessageChannelBinder extends AbstractMessageChannelBinder<ExtendedConsumerProperties<EventHubsConsumerProperties>, ExtendedProducerProperties<EventHubsProducerProperties>, EventHubsChannelProvisioner> implements ExtendedPropertiesBinder<MessageChannel, EventHubsConsumerProperties, EventHubsProducerProperties> { private static final Logger LOGGER = LoggerFactory.getLogger(EventHubsMessageChannelBinder.class); private static final ExpressionParser EXPRESSION_PARSER = new SpelExpressionParser(); private NamespaceProperties namespaceProperties; private EventHubsTemplate eventHubsTemplate; private CheckpointStore checkpointStore; private DefaultEventHubsNamespaceProcessorFactory processorFactory; private final List<EventHubsMessageListenerContainer> eventHubsMessageListenerContainers = new ArrayList<>(); private final InstrumentationManager instrumentationManager = new DefaultInstrumentationManager(); private EventHubsExtendedBindingProperties bindingProperties = new EventHubsExtendedBindingProperties(); private final Map<String, ExtendedProducerProperties<EventHubsProducerProperties>> extendedProducerPropertiesMap = new ConcurrentHashMap<>(); private List<EventHubsProducerFactoryCustomizer> producerFactoryCustomizers = new ArrayList<>(); private List<EventHubsProcessorFactoryCustomizer> processorFactoryCustomizers = new ArrayList<>(); /** * Construct a {@link EventHubsMessageChannelBinder} with the specified headers to embed and {@link EventHubsChannelProvisioner}. * * @param headersToEmbed the headers to embed * @param provisioningProvider the provisioning provider */ public EventHubsMessageChannelBinder(String[] headersToEmbed, EventHubsChannelProvisioner provisioningProvider) { super(headersToEmbed, provisioningProvider); } @Override protected MessageHandler createProducerMessageHandler( ProducerDestination destination, ExtendedProducerProperties<EventHubsProducerProperties> producerProperties, MessageChannel errorChannel) { extendedProducerPropertiesMap.put(destination.getName(), producerProperties); Assert.notNull(getEventHubTemplate(), "eventHubsTemplate can't be null when create a producer"); DefaultMessageHandler handler = new DefaultMessageHandler(destination.getName(), this.eventHubsTemplate); handler.setBeanFactory(getBeanFactory()); handler.setSync(producerProperties.getExtension().isSync()); handler.setSendTimeout(producerProperties.getExtension().getSendTimeout().toMillis()); handler.setSendFailureChannel(errorChannel); String instrumentationId = Instrumentation.buildId(PRODUCER, destination.getName()); handler.setSendCallback(new InstrumentationSendCallback(instrumentationId, instrumentationManager)); if (producerProperties.isPartitioned()) { handler.setPartitionIdExpression( EXPRESSION_PARSER.parseExpression("headers['" + BinderHeaders.PARTITION_HEADER + "']")); } else { handler.setPartitionKeyExpression(new FunctionExpression<Message<?>>(m -> m.getPayload().hashCode())); } return handler; } @Override protected MessageProducer createConsumerEndpoint(ConsumerDestination destination, String group, ExtendedConsumerProperties<EventHubsConsumerProperties> properties) { Assert.notNull(getProcessorFactory(), "processor factory can't be null when create a consumer"); boolean anonymous = !StringUtils.hasText(group); if (anonymous) { group = "anonymous." + UUID.randomUUID(); } EventHubsContainerProperties containerProperties = createContainerProperties(destination, group, properties); EventHubsMessageListenerContainer listenerContainer = new EventHubsMessageListenerContainer( getProcessorFactory(), containerProperties); this.eventHubsMessageListenerContainers.add(listenerContainer); EventHubsInboundChannelAdapter inboundAdapter; if (properties.isBatchMode()) { inboundAdapter = new EventHubsInboundChannelAdapter(listenerContainer, ListenerMode.BATCH); } else { inboundAdapter = new EventHubsInboundChannelAdapter(listenerContainer); } inboundAdapter.setBeanFactory(getBeanFactory()); String instrumentationId = Instrumentation.buildId(CONSUMER, destination.getName() + "/" + group); inboundAdapter.setInstrumentationManager(instrumentationManager); inboundAdapter.setInstrumentationId(instrumentationId); ErrorInfrastructure errorInfrastructure = registerErrorInfrastructure(destination, group, properties); inboundAdapter.setErrorChannel(errorInfrastructure.getErrorChannel()); return inboundAdapter; } /** * Create {@link EventHubsContainerProperties} from the extended {@link EventHubsConsumerProperties}. * @param destination reference to the consumer destination. * @param group the consumer group. * @param properties the consumer properties. * @return the {@link EventHubsContainerProperties}. */ private EventHubsContainerProperties createContainerProperties( ConsumerDestination destination, String group, ExtendedConsumerProperties<EventHubsConsumerProperties> properties) { EventHubsContainerProperties containerProperties = new EventHubsContainerProperties(); AzurePropertiesUtils.copyAzureCommonProperties(properties.getExtension(), containerProperties); ProcessorPropertiesMerger.copyProcessorPropertiesIfNotNull(properties.getExtension(), containerProperties); containerProperties.setEventHubName(destination.getName()); containerProperties.setConsumerGroup(group); containerProperties.setCheckpointConfig(properties.getExtension().getCheckpoint()); return containerProperties; } @Override public EventHubsConsumerProperties getExtendedConsumerProperties(String destination) { return this.bindingProperties.getExtendedConsumerProperties(destination); } @Override public EventHubsProducerProperties getExtendedProducerProperties(String destination) { return this.bindingProperties.getExtendedProducerProperties(destination); } @Override public String getDefaultsPrefix() { return this.bindingProperties.getDefaultsPrefix(); } @Override public Class<? extends BinderSpecificPropertiesProvider> getExtendedPropertiesEntryClass() { return this.bindingProperties.getExtendedPropertiesEntryClass(); } /** * Set binding properties. * * @param bindingProperties the binding properties */ public void setBindingProperties(EventHubsExtendedBindingProperties bindingProperties) { this.bindingProperties = bindingProperties; } private PropertiesSupplier<String, ProducerProperties> getProducerPropertiesSupplier() { return key -> { if (this.extendedProducerPropertiesMap.containsKey(key)) { EventHubsProducerProperties producerProperties = this.extendedProducerPropertiesMap.get(key) .getExtension(); producerProperties.setEventHubName(key); return producerProperties; } else { LOGGER.debug("Can't find extended properties for {}", key); return null; } }; } private EventHubsProcessorFactory getProcessorFactory() { if (this.processorFactory == null) { this.processorFactory = new DefaultEventHubsNamespaceProcessorFactory( this.checkpointStore, this.namespaceProperties); processorFactoryCustomizers.forEach(customizer -> customizer.customize(processorFactory)); processorFactory.addListener((name, consumerGroup, processorClient) -> { String instrumentationName = name + "/" + consumerGroup; Instrumentation instrumentation = new EventHubsProcessorInstrumentation(instrumentationName, CONSUMER, Duration.ofMinutes(2)); instrumentation.setStatus(Instrumentation.Status.UP); instrumentationManager.addHealthInstrumentation(instrumentation); }); } return this.processorFactory; } /** * Set namespace properties. * * @param namespaceProperties the namespace properties */ public void setNamespaceProperties(NamespaceProperties namespaceProperties) { this.namespaceProperties = namespaceProperties; } /** * Set checkpoint store. * * @param checkpointStore the checkpoint store */ public void setCheckpointStore(CheckpointStore checkpointStore) { this.checkpointStore = checkpointStore; } /** * Get instrumentation manager. * * @return instrumentationManager the instrumentation manager * @see InstrumentationManager */ InstrumentationManager getInstrumentationManager() { return instrumentationManager; } /** * Set the producer factory customizers. * * @param producerFactoryCustomizers The producer factory customizers. */ public void setProducerFactoryCustomizers(List<EventHubsProducerFactoryCustomizer> producerFactoryCustomizers) { this.producerFactoryCustomizers = producerFactoryCustomizers; } /** * Set the processor factory customizers. * * @param processorFactoryCustomizers The processor factory customizers. */ public void setProcessorFactoryCustomizers(List<EventHubsProcessorFactoryCustomizer> processorFactoryCustomizers) { this.processorFactoryCustomizers = processorFactoryCustomizers; } }
This is neat, I wish `mocha` had something like that...
public void getKeyRotationPolicyOfNonExistentKey(HttpClient httpClient, KeyServiceVersion serviceVersion) { Assumptions.assumeTrue(!isHsmEnabled); createKeyClient(httpClient, serviceVersion); String keyName = testResourceNamer.randomName("nonExistentKey", 20); assertThrows(ResourceNotFoundException.class, () -> client.getKeyRotationPolicy(keyName)); }
Assumptions.assumeTrue(!isHsmEnabled);
public void getKeyRotationPolicyOfNonExistentKey(HttpClient httpClient, KeyServiceVersion serviceVersion) { Assumptions.assumeTrue(!isHsmEnabled); createKeyClient(httpClient, serviceVersion); String keyName = testResourceNamer.randomName("nonExistentKey", 20); assertThrows(ResourceNotFoundException.class, () -> client.getKeyRotationPolicy(keyName)); }
class KeyClientTest extends KeyClientTestBase { protected KeyClient client; @Override protected void beforeTest() { beforeTestSetup(); } protected void createKeyClient(HttpClient httpClient, KeyServiceVersion serviceVersion) { createKeyClient(httpClient, serviceVersion, null); } protected void createKeyClient(HttpClient httpClient, KeyServiceVersion serviceVersion, String testTenantId) { HttpPipeline httpPipeline = getHttpPipeline(httpClient, testTenantId); KeyAsyncClient asyncClient = spy(new KeyClientBuilder() .vaultUrl(getEndpoint()) .pipeline(httpPipeline) .serviceVersion(serviceVersion) .buildAsyncClient()); if (interceptorManager.isPlaybackMode()) { when(asyncClient.getDefaultPollingInterval()).thenReturn(Duration.ofMillis(10)); } client = new KeyClient(asyncClient); } /** * Tests that a key can be created in the key vault. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("getTestParameters") public void setKey(HttpClient httpClient, KeyServiceVersion serviceVersion) { createKeyClient(httpClient, serviceVersion); setKeyRunner((expected) -> assertKeyEquals(expected, client.createKey(expected))); } /** * Tests that a key can be created in the key vault while using a different tenant ID than the one that will be * provided in the authentication challenge. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("getTestParameters") public void setKeyWithMultipleTenants(HttpClient httpClient, KeyServiceVersion serviceVersion) { createKeyClient(httpClient, serviceVersion, testResourceNamer.randomUuid()); setKeyRunner((expected) -> assertKeyEquals(expected, client.createKey(expected))); KeyVaultCredentialPolicy.clearCache(); setKeyRunner((expected) -> assertKeyEquals(expected, client.createKey(expected))); } /** * Tests that an RSA key is created. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("getTestParameters") public void createRsaKey(HttpClient httpClient, KeyServiceVersion serviceVersion) { createKeyClient(httpClient, serviceVersion); createRsaKeyRunner((expected) -> assertKeyEquals(expected, client.createRsaKey(expected))); } /** * Tests that an attempt to create a key with empty string name throws an error. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("getTestParameters") public void setKeyEmptyName(HttpClient httpClient, KeyServiceVersion serviceVersion) { createKeyClient(httpClient, serviceVersion); final KeyType keyType; if (runManagedHsmTest) { keyType = KeyType.RSA_HSM; } else { keyType = KeyType.RSA; } assertRestException(() -> client.createKey("", keyType), ResourceModifiedException.class, HttpURLConnection.HTTP_BAD_REQUEST); } /** * Tests that we cannot create keys when key type is null. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("getTestParameters") public void setKeyNullType(HttpClient httpClient, KeyServiceVersion serviceVersion) { createKeyClient(httpClient, serviceVersion); setKeyEmptyValueRunner((key) -> { assertRestException(() -> client.createKey(key.getName(), key.getKeyType()), ResourceModifiedException.class, HttpURLConnection.HTTP_BAD_REQUEST); }); } /** * Verifies that an exception is thrown when null key object is passed for creation. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("getTestParameters") public void setKeyNull(HttpClient httpClient, KeyServiceVersion serviceVersion) { createKeyClient(httpClient, serviceVersion); assertRunnableThrowsException(() -> client.createKey(null), NullPointerException.class); assertRunnableThrowsException(() -> client.createKey(null), NullPointerException.class); } /** * Tests that a key is able to be updated when it exists. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("getTestParameters") public void updateKey(HttpClient httpClient, KeyServiceVersion serviceVersion) { createKeyClient(httpClient, serviceVersion); updateKeyRunner((createKeyOptions, updateKeyOptions) -> { KeyVaultKey createdKey = client.createKey(createKeyOptions); assertKeyEquals(createKeyOptions, createdKey); KeyVaultKey updatedKey = client.updateKeyProperties(createdKey.getProperties().setExpiresOn(updateKeyOptions.getExpiresOn())); assertKeyEquals(updateKeyOptions, updatedKey); }); } /** * Tests that a key is able to be updated when it is disabled. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("getTestParameters") public void updateDisabledKey(HttpClient httpClient, KeyServiceVersion serviceVersion) { createKeyClient(httpClient, serviceVersion); updateDisabledKeyRunner((createKeyOptions, updateKeyOptions) -> { KeyVaultKey createdKey = client.createKey(createKeyOptions); assertKeyEquals(createKeyOptions, createdKey); KeyVaultKey updatedKey = client.updateKeyProperties(createdKey.getProperties().setExpiresOn(updateKeyOptions.getExpiresOn())); assertKeyEquals(updateKeyOptions, updatedKey); }); } /** * Tests that an existing key can be retrieved. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("getTestParameters") public void getKey(HttpClient httpClient, KeyServiceVersion serviceVersion) { createKeyClient(httpClient, serviceVersion); getKeyRunner((original) -> { client.createKey(original); assertKeyEquals(original, client.getKey(original.getName())); }); } /** * Tests that a specific version of the key can be retrieved. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("getTestParameters") public void getKeySpecificVersion(HttpClient httpClient, KeyServiceVersion serviceVersion) { createKeyClient(httpClient, serviceVersion); getKeySpecificVersionRunner((key, keyWithNewVal) -> { KeyVaultKey keyVersionOne = client.createKey(key); KeyVaultKey keyVersionTwo = client.createKey(keyWithNewVal); assertKeyEquals(key, client.getKey(keyVersionOne.getName(), keyVersionOne.getProperties().getVersion())); assertKeyEquals(keyWithNewVal, client.getKey(keyVersionTwo.getName(), keyVersionTwo.getProperties().getVersion())); }); } /** * Tests that an attempt to get a non-existing key throws an error. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("getTestParameters") public void getKeyNotFound(HttpClient httpClient, KeyServiceVersion serviceVersion) { createKeyClient(httpClient, serviceVersion); assertRestException(() -> client.getKey("non-existing"), ResourceNotFoundException.class, HttpURLConnection.HTTP_NOT_FOUND); } /** * Tests that an existing key can be deleted. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("getTestParameters") public void deleteKey(HttpClient httpClient, KeyServiceVersion serviceVersion) { createKeyClient(httpClient, serviceVersion); deleteKeyRunner((keyToDelete) -> { sleepInRecordMode(30000); assertKeyEquals(keyToDelete, client.createKey(keyToDelete)); SyncPoller<DeletedKey, Void> deletedKeyPoller = client.beginDeleteKey(keyToDelete.getName()); PollResponse<DeletedKey> pollResponse = deletedKeyPoller.poll(); DeletedKey deletedKey = pollResponse.getValue(); while (!pollResponse.getStatus().isComplete()) { sleepInRecordMode(10000); pollResponse = deletedKeyPoller.poll(); } assertNotNull(deletedKey.getDeletedOn()); assertNotNull(deletedKey.getRecoveryId()); assertNotNull(deletedKey.getScheduledPurgeDate()); assertEquals(keyToDelete.getName(), deletedKey.getName()); }); } @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("getTestParameters") public void deleteKeyNotFound(HttpClient httpClient, KeyServiceVersion serviceVersion) { createKeyClient(httpClient, serviceVersion); assertRestException(() -> client.beginDeleteKey("non-existing"), ResourceNotFoundException.class, HttpURLConnection.HTTP_NOT_FOUND); } /** * Tests that an attempt to retrieve a non existing deleted key throws an error on a soft-delete enabled vault. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("getTestParameters") public void getDeletedKeyNotFound(HttpClient httpClient, KeyServiceVersion serviceVersion) { createKeyClient(httpClient, serviceVersion); assertRestException(() -> client.getDeletedKey("non-existing"), ResourceNotFoundException.class, HttpURLConnection.HTTP_NOT_FOUND); } /** * Tests that a deleted key can be recovered on a soft-delete enabled vault. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("getTestParameters") public void recoverDeletedKey(HttpClient httpClient, KeyServiceVersion serviceVersion) { createKeyClient(httpClient, serviceVersion); recoverDeletedKeyRunner((keyToDeleteAndRecover) -> { assertKeyEquals(keyToDeleteAndRecover, client.createKey(keyToDeleteAndRecover)); SyncPoller<DeletedKey, Void> poller = client.beginDeleteKey(keyToDeleteAndRecover.getName()); PollResponse<DeletedKey> pollResponse = poller.poll(); while (!pollResponse.getStatus().isComplete()) { sleepInRecordMode(1000); pollResponse = poller.poll(); } assertNotNull(pollResponse.getValue()); SyncPoller<KeyVaultKey, Void> recoverPoller = client.beginRecoverDeletedKey(keyToDeleteAndRecover.getName()); PollResponse<KeyVaultKey> recoverPollResponse = recoverPoller.poll(); KeyVaultKey recoveredKey = recoverPollResponse.getValue(); recoverPollResponse = recoverPoller.poll(); while (!recoverPollResponse.getStatus().isComplete()) { sleepInRecordMode(1000); recoverPollResponse = recoverPoller.poll(); } assertEquals(keyToDeleteAndRecover.getName(), recoveredKey.getName()); assertEquals(keyToDeleteAndRecover.getNotBefore(), recoveredKey.getProperties().getNotBefore()); assertEquals(keyToDeleteAndRecover.getExpiresOn(), recoveredKey.getProperties().getExpiresOn()); }); } /** * Tests that an attempt to recover a non existing deleted key throws an error on a soft-delete enabled vault. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("getTestParameters") public void recoverDeletedKeyNotFound(HttpClient httpClient, KeyServiceVersion serviceVersion) { createKeyClient(httpClient, serviceVersion); assertRestException(() -> client.beginRecoverDeletedKey("non-existing"), ResourceNotFoundException.class, HttpURLConnection.HTTP_NOT_FOUND); } /** * Tests that a key can be backed up in the key vault. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("getTestParameters") public void backupKey(HttpClient httpClient, KeyServiceVersion serviceVersion) { createKeyClient(httpClient, serviceVersion); backupKeyRunner((keyToBackup) -> { assertKeyEquals(keyToBackup, client.createKey(keyToBackup)); byte[] backupBytes = (client.backupKey(keyToBackup.getName())); assertNotNull(backupBytes); assertTrue(backupBytes.length > 0); }); } /** * Tests that an attempt to backup a non existing key throws an error. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("getTestParameters") public void backupKeyNotFound(HttpClient httpClient, KeyServiceVersion serviceVersion) { createKeyClient(httpClient, serviceVersion); assertRestException(() -> client.backupKey("non-existing"), ResourceNotFoundException.class, HttpURLConnection.HTTP_NOT_FOUND); } /** * Tests that a key can be backed up in the key vault. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("getTestParameters") public void restoreKey(HttpClient httpClient, KeyServiceVersion serviceVersion) { createKeyClient(httpClient, serviceVersion); restoreKeyRunner((keyToBackupAndRestore) -> { assertKeyEquals(keyToBackupAndRestore, client.createKey(keyToBackupAndRestore)); byte[] backupBytes = (client.backupKey(keyToBackupAndRestore.getName())); assertNotNull(backupBytes); assertTrue(backupBytes.length > 0); SyncPoller<DeletedKey, Void> poller = client.beginDeleteKey(keyToBackupAndRestore.getName()); PollResponse<DeletedKey> pollResponse = poller.poll(); while (!pollResponse.getStatus().isComplete()) { sleepInRecordMode(1000); pollResponse = poller.poll(); } client.purgeDeletedKey(keyToBackupAndRestore.getName()); pollOnKeyPurge(keyToBackupAndRestore.getName()); sleepInRecordMode(60000); KeyVaultKey restoredKey = client.restoreKeyBackup(backupBytes); assertEquals(keyToBackupAndRestore.getName(), restoredKey.getName()); assertEquals(keyToBackupAndRestore.getExpiresOn(), restoredKey.getProperties().getExpiresOn()); }); } /** * Tests that an attempt to restore a key from malformed backup bytes throws an error. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("getTestParameters") public void restoreKeyFromMalformedBackup(HttpClient httpClient, KeyServiceVersion serviceVersion) { createKeyClient(httpClient, serviceVersion); byte[] keyBackupBytes = "non-existing".getBytes(); assertRestException(() -> client.restoreKeyBackup(keyBackupBytes), ResourceModifiedException.class, HttpURLConnection.HTTP_BAD_REQUEST); } /** * Tests that keys can be listed in the key vault. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("getTestParameters") public void listKeys(HttpClient httpClient, KeyServiceVersion serviceVersion) { createKeyClient(httpClient, serviceVersion); listKeysRunner((keys) -> { HashMap<String, CreateKeyOptions> keysToList = keys; for (CreateKeyOptions key : keysToList.values()) { assertKeyEquals(key, client.createKey(key)); sleepInRecordMode(5000); } for (KeyProperties actualKey : client.listPropertiesOfKeys()) { if (keys.containsKey(actualKey.getName())) { CreateKeyOptions expectedKey = keys.get(actualKey.getName()); assertEquals(expectedKey.getExpiresOn(), actualKey.getExpiresOn()); assertEquals(expectedKey.getNotBefore(), actualKey.getNotBefore()); keys.remove(actualKey.getName()); } } assertEquals(0, keys.size()); }); } /** * Tests that a deleted key can be retrieved on a soft-delete enabled vault. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("getTestParameters") public void getDeletedKey(HttpClient httpClient, KeyServiceVersion serviceVersion) { createKeyClient(httpClient, serviceVersion); getDeletedKeyRunner((keyToDeleteAndGet) -> { assertKeyEquals(keyToDeleteAndGet, client.createKey(keyToDeleteAndGet)); SyncPoller<DeletedKey, Void> poller = client.beginDeleteKey(keyToDeleteAndGet.getName()); PollResponse<DeletedKey> pollResponse = poller.poll(); while (!pollResponse.getStatus().isComplete()) { sleepInRecordMode(1000); pollResponse = poller.poll(); } sleepInRecordMode(30000); DeletedKey deletedKey = client.getDeletedKey(keyToDeleteAndGet.getName()); assertNotNull(deletedKey.getDeletedOn()); assertNotNull(deletedKey.getRecoveryId()); assertNotNull(deletedKey.getScheduledPurgeDate()); assertEquals(keyToDeleteAndGet.getName(), deletedKey.getName()); }); } /** * Tests that deleted keys can be listed in the key vault. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("getTestParameters") public void listDeletedKeys(HttpClient httpClient, KeyServiceVersion serviceVersion) { createKeyClient(httpClient, serviceVersion); if (!interceptorManager.isPlaybackMode()) { return; } listDeletedKeysRunner((keys) -> { HashMap<String, CreateKeyOptions> keysToDelete = keys; for (CreateKeyOptions key : keysToDelete.values()) { assertKeyEquals(key, client.createKey(key)); } for (CreateKeyOptions key : keysToDelete.values()) { SyncPoller<DeletedKey, Void> poller = client.beginDeleteKey(key.getName()); PollResponse<DeletedKey> pollResponse = poller.poll(); while (!pollResponse.getStatus().isComplete()) { sleepInRecordMode(1000); pollResponse = poller.poll(); } } sleepInRecordMode(90000); Iterable<DeletedKey> deletedKeys = client.listDeletedKeys(); for (DeletedKey deletedKey : deletedKeys) { assertNotNull(deletedKey.getDeletedOn()); assertNotNull(deletedKey.getRecoveryId()); } }); } /** * Tests that key versions can be listed in the key vault. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("getTestParameters") public void listKeyVersions(HttpClient httpClient, KeyServiceVersion serviceVersion) { createKeyClient(httpClient, serviceVersion); listKeyVersionsRunner((keys) -> { List<CreateKeyOptions> keyVersions = keys; String keyName = null; for (CreateKeyOptions key : keyVersions) { keyName = key.getName(); sleepInRecordMode(4000); assertKeyEquals(key, client.createKey(key)); } Iterable<KeyProperties> keyVersionsOutput = client.listPropertiesOfKeyVersions(keyName); List<KeyProperties> keyVersionsList = new ArrayList<>(); keyVersionsOutput.forEach(keyVersionsList::add); assertEquals(keyVersions.size(), keyVersionsList.size()); }); } /** * Tests that an existing key can be released. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("getTestParameters") public void releaseKey(HttpClient httpClient, KeyServiceVersion serviceVersion) { Assumptions.assumeTrue(runManagedHsmTest); createKeyClient(httpClient, serviceVersion); releaseKeyRunner((keyToRelease, attestationUrl) -> { assertKeyEquals(keyToRelease, client.createRsaKey(keyToRelease)); String targetAttestationToken = "testAttestationToken"; if (getTestMode() != TestMode.PLAYBACK) { if (!attestationUrl.endsWith("/")) { attestationUrl = attestationUrl + "/"; } try { targetAttestationToken = getAttestationToken(attestationUrl + "generate-test-token"); } catch (IOException e) { fail("Found error when deserializing attestation token.", e); } } ReleaseKeyResult releaseKeyResult = client.releaseKey(keyToRelease.getName(), targetAttestationToken); assertNotNull(releaseKeyResult.getValue()); }); } /** * Tests that fetching the key rotation policy of a non-existent key throws. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("getTestParameters") @DisabledIfSystemProperty(named = "IS_SKIP_ROTATION_POLICY_TEST", matches = "true") /** * Tests that fetching the key rotation policy of a non-existent key throws. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("getTestParameters") @DisabledIfSystemProperty(named = "IS_SKIP_ROTATION_POLICY_TEST", matches = "true") public void getKeyRotationPolicyWithNoPolicySet(HttpClient httpClient, KeyServiceVersion serviceVersion) { Assumptions.assumeTrue(!isHsmEnabled); createKeyClient(httpClient, serviceVersion); String keyName = testResourceNamer.randomName("rotateKey", 20); client.createRsaKey(new CreateRsaKeyOptions(keyName)); KeyRotationPolicy keyRotationPolicy = client.getKeyRotationPolicy(keyName); assertNotNull(keyRotationPolicy); assertNull(keyRotationPolicy.getId()); assertNull(keyRotationPolicy.getCreatedOn()); assertNull(keyRotationPolicy.getUpdatedOn()); assertNull(keyRotationPolicy.getExpiresIn()); assertEquals(1, keyRotationPolicy.getLifetimeActions().size()); assertEquals(KeyRotationPolicyAction.NOTIFY, keyRotationPolicy.getLifetimeActions().get(0).getAction()); assertEquals("P30D", keyRotationPolicy.getLifetimeActions().get(0).getTimeBeforeExpiry()); assertNull(keyRotationPolicy.getLifetimeActions().get(0).getTimeAfterCreate()); } /** * Tests that fetching the key rotation policy of a non-existent key throws. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("getTestParameters") @DisabledIfSystemProperty(named = "IS_SKIP_ROTATION_POLICY_TEST", matches = "true") public void updateGetKeyRotationPolicyWithMinimumProperties(HttpClient httpClient, KeyServiceVersion serviceVersion) { Assumptions.assumeTrue(!isHsmEnabled); createKeyClient(httpClient, serviceVersion); updateGetKeyRotationPolicyWithMinimumPropertiesRunner((keyName, keyRotationPolicy) -> { client.createRsaKey(new CreateRsaKeyOptions(keyName)); KeyRotationPolicy updatedKeyRotationPolicy = client.updateKeyRotationPolicy(keyName, keyRotationPolicy); KeyRotationPolicy retrievedKeyRotationPolicy = client.getKeyRotationPolicy(keyName); assertKeyVaultRotationPolicyEquals(updatedKeyRotationPolicy, retrievedKeyRotationPolicy); }); } /** * Tests that an key rotation policy can be updated with all possible properties, then retrieves it. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("getTestParameters") @DisabledIfSystemProperty(named = "IS_SKIP_ROTATION_POLICY_TEST", matches = "true") public void updateGetKeyRotationPolicyWithAllProperties(HttpClient httpClient, KeyServiceVersion serviceVersion) { Assumptions.assumeTrue(!isHsmEnabled); createKeyClient(httpClient, serviceVersion); updateGetKeyRotationPolicyWithAllPropertiesRunner((keyName, keyRotationPolicy) -> { client.createRsaKey(new CreateRsaKeyOptions(keyName)); KeyRotationPolicy updatedKeyRotationPolicy = client.updateKeyRotationPolicy(keyName, keyRotationPolicy); KeyRotationPolicy retrievedKeyRotationPolicy = client.getKeyRotationPolicy(keyName); assertKeyVaultRotationPolicyEquals(updatedKeyRotationPolicy, retrievedKeyRotationPolicy); }); } /** * Tests that a key can be rotated. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("getTestParameters") @DisabledIfSystemProperty(named = "IS_SKIP_ROTATION_POLICY_TEST", matches = "true") public void rotateKey(HttpClient httpClient, KeyServiceVersion serviceVersion) { Assumptions.assumeTrue(!isHsmEnabled); createKeyClient(httpClient, serviceVersion); String keyName = testResourceNamer.randomName("rotateKey", 20); KeyVaultKey createdKey = client.createRsaKey(new CreateRsaKeyOptions(keyName)); KeyVaultKey rotatedKey = client.rotateKey(keyName); assertEquals(createdKey.getName(), rotatedKey.getName()); assertEquals(createdKey.getProperties().getTags(), rotatedKey.getProperties().getTags()); } /** * Tests that a {@link CryptographyClient} can be created for a given key and version using a {@link KeyClient}. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("getTestParameters") public void getCryptographyClient(HttpClient httpClient, KeyServiceVersion serviceVersion) { createKeyClient(httpClient, serviceVersion); CryptographyClient cryptographyClient = client.getCryptographyClient("myKey"); assertNotNull(cryptographyClient); } /** * Tests that a {@link CryptographyClient} can be created for a given key using a {@link KeyClient}. Also tests * that cryptographic operations can be performed with said cryptography client. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("getTestParameters") public void getCryptographyClientAndEncryptDecrypt(HttpClient httpClient, KeyServiceVersion serviceVersion) { createKeyClient(httpClient, serviceVersion); setKeyRunner((createKeyOptions) -> { assertKeyEquals(createKeyOptions, client.createKey(createKeyOptions)); CryptographyClient cryptographyClient = client.getCryptographyClient(createKeyOptions.getName()); assertNotNull(cryptographyClient); byte[] plaintext = "myPlaintext".getBytes(); byte[] ciphertext = cryptographyClient.encrypt(EncryptionAlgorithm.RSA_OAEP, plaintext).getCipherText(); byte[] decryptedText = cryptographyClient.decrypt(EncryptionAlgorithm.RSA_OAEP, ciphertext).getPlainText(); assertArrayEquals(plaintext, decryptedText); }); } /** * Tests that a {@link CryptographyClient} can be created for a given key and version using a {@link KeyClient}. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("getTestParameters") public void getCryptographyClientWithKeyVersion(HttpClient httpClient, KeyServiceVersion serviceVersion) { createKeyClient(httpClient, serviceVersion); CryptographyClient cryptographyClient = client.getCryptographyClient("myKey", "6A385B124DEF4096AF1361A85B16C204"); assertNotNull(cryptographyClient); } /** * Tests that a {@link CryptographyClient} can be created for a given key using a {@link KeyClient}. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("getTestParameters") public void getCryptographyClientWithEmptyKeyVersion(HttpClient httpClient, KeyServiceVersion serviceVersion) { createKeyClient(httpClient, serviceVersion); CryptographyClient cryptographyClient = client.getCryptographyClient("myKey", ""); assertNotNull(cryptographyClient); } /** * Tests that a {@link CryptographyClient} can be created for a given key using a {@link KeyClient}. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("getTestParameters") public void getCryptographyClientWithNullKeyVersion(HttpClient httpClient, KeyServiceVersion serviceVersion) { createKeyClient(httpClient, serviceVersion); CryptographyClient cryptographyClient = client.getCryptographyClient("myKey", null); assertNotNull(cryptographyClient); } private void pollOnKeyPurge(String keyName) { int pendingPollCount = 0; while (pendingPollCount < 10) { DeletedKey deletedKey = null; try { deletedKey = client.getDeletedKey(keyName); } catch (ResourceNotFoundException e) { } if (deletedKey != null) { sleepInRecordMode(2000); pendingPollCount += 1; continue; } else { return; } } System.err.printf("Deleted Key %s was not purged \n", keyName); } }
class KeyClientTest extends KeyClientTestBase { protected KeyClient client; @Override protected void beforeTest() { beforeTestSetup(); } protected void createKeyClient(HttpClient httpClient, KeyServiceVersion serviceVersion) { createKeyClient(httpClient, serviceVersion, null); } protected void createKeyClient(HttpClient httpClient, KeyServiceVersion serviceVersion, String testTenantId) { HttpPipeline httpPipeline = getHttpPipeline(httpClient, testTenantId); KeyAsyncClient asyncClient = spy(new KeyClientBuilder() .vaultUrl(getEndpoint()) .pipeline(httpPipeline) .serviceVersion(serviceVersion) .buildAsyncClient()); if (interceptorManager.isPlaybackMode()) { when(asyncClient.getDefaultPollingInterval()).thenReturn(Duration.ofMillis(10)); } client = new KeyClient(asyncClient); } /** * Tests that a key can be created in the key vault. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("getTestParameters") public void setKey(HttpClient httpClient, KeyServiceVersion serviceVersion) { createKeyClient(httpClient, serviceVersion); setKeyRunner((expected) -> assertKeyEquals(expected, client.createKey(expected))); } /** * Tests that a key can be created in the key vault while using a different tenant ID than the one that will be * provided in the authentication challenge. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("getTestParameters") public void setKeyWithMultipleTenants(HttpClient httpClient, KeyServiceVersion serviceVersion) { createKeyClient(httpClient, serviceVersion, testResourceNamer.randomUuid()); setKeyRunner((expected) -> assertKeyEquals(expected, client.createKey(expected))); KeyVaultCredentialPolicy.clearCache(); setKeyRunner((expected) -> assertKeyEquals(expected, client.createKey(expected))); } /** * Tests that an RSA key is created. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("getTestParameters") public void createRsaKey(HttpClient httpClient, KeyServiceVersion serviceVersion) { createKeyClient(httpClient, serviceVersion); createRsaKeyRunner((expected) -> assertKeyEquals(expected, client.createRsaKey(expected))); } /** * Tests that an attempt to create a key with empty string name throws an error. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("getTestParameters") public void setKeyEmptyName(HttpClient httpClient, KeyServiceVersion serviceVersion) { createKeyClient(httpClient, serviceVersion); final KeyType keyType; if (runManagedHsmTest) { keyType = KeyType.RSA_HSM; } else { keyType = KeyType.RSA; } assertRestException(() -> client.createKey("", keyType), ResourceModifiedException.class, HttpURLConnection.HTTP_BAD_REQUEST); } /** * Tests that we cannot create keys when key type is null. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("getTestParameters") public void setKeyNullType(HttpClient httpClient, KeyServiceVersion serviceVersion) { createKeyClient(httpClient, serviceVersion); setKeyEmptyValueRunner((key) -> { assertRestException(() -> client.createKey(key.getName(), key.getKeyType()), ResourceModifiedException.class, HttpURLConnection.HTTP_BAD_REQUEST); }); } /** * Verifies that an exception is thrown when null key object is passed for creation. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("getTestParameters") public void setKeyNull(HttpClient httpClient, KeyServiceVersion serviceVersion) { createKeyClient(httpClient, serviceVersion); assertRunnableThrowsException(() -> client.createKey(null), NullPointerException.class); assertRunnableThrowsException(() -> client.createKey(null), NullPointerException.class); } /** * Tests that a key is able to be updated when it exists. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("getTestParameters") public void updateKey(HttpClient httpClient, KeyServiceVersion serviceVersion) { createKeyClient(httpClient, serviceVersion); updateKeyRunner((createKeyOptions, updateKeyOptions) -> { KeyVaultKey createdKey = client.createKey(createKeyOptions); assertKeyEquals(createKeyOptions, createdKey); KeyVaultKey updatedKey = client.updateKeyProperties(createdKey.getProperties().setExpiresOn(updateKeyOptions.getExpiresOn())); assertKeyEquals(updateKeyOptions, updatedKey); }); } /** * Tests that a key is able to be updated when it is disabled. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("getTestParameters") public void updateDisabledKey(HttpClient httpClient, KeyServiceVersion serviceVersion) { createKeyClient(httpClient, serviceVersion); updateDisabledKeyRunner((createKeyOptions, updateKeyOptions) -> { KeyVaultKey createdKey = client.createKey(createKeyOptions); assertKeyEquals(createKeyOptions, createdKey); KeyVaultKey updatedKey = client.updateKeyProperties(createdKey.getProperties().setExpiresOn(updateKeyOptions.getExpiresOn())); assertKeyEquals(updateKeyOptions, updatedKey); }); } /** * Tests that an existing key can be retrieved. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("getTestParameters") public void getKey(HttpClient httpClient, KeyServiceVersion serviceVersion) { createKeyClient(httpClient, serviceVersion); getKeyRunner((original) -> { client.createKey(original); assertKeyEquals(original, client.getKey(original.getName())); }); } /** * Tests that a specific version of the key can be retrieved. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("getTestParameters") public void getKeySpecificVersion(HttpClient httpClient, KeyServiceVersion serviceVersion) { createKeyClient(httpClient, serviceVersion); getKeySpecificVersionRunner((key, keyWithNewVal) -> { KeyVaultKey keyVersionOne = client.createKey(key); KeyVaultKey keyVersionTwo = client.createKey(keyWithNewVal); assertKeyEquals(key, client.getKey(keyVersionOne.getName(), keyVersionOne.getProperties().getVersion())); assertKeyEquals(keyWithNewVal, client.getKey(keyVersionTwo.getName(), keyVersionTwo.getProperties().getVersion())); }); } /** * Tests that an attempt to get a non-existing key throws an error. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("getTestParameters") public void getKeyNotFound(HttpClient httpClient, KeyServiceVersion serviceVersion) { createKeyClient(httpClient, serviceVersion); assertRestException(() -> client.getKey("non-existing"), ResourceNotFoundException.class, HttpURLConnection.HTTP_NOT_FOUND); } /** * Tests that an existing key can be deleted. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("getTestParameters") public void deleteKey(HttpClient httpClient, KeyServiceVersion serviceVersion) { createKeyClient(httpClient, serviceVersion); deleteKeyRunner((keyToDelete) -> { sleepInRecordMode(30000); assertKeyEquals(keyToDelete, client.createKey(keyToDelete)); SyncPoller<DeletedKey, Void> deletedKeyPoller = client.beginDeleteKey(keyToDelete.getName()); PollResponse<DeletedKey> pollResponse = deletedKeyPoller.poll(); DeletedKey deletedKey = pollResponse.getValue(); while (!pollResponse.getStatus().isComplete()) { sleepInRecordMode(10000); pollResponse = deletedKeyPoller.poll(); } assertNotNull(deletedKey.getDeletedOn()); assertNotNull(deletedKey.getRecoveryId()); assertNotNull(deletedKey.getScheduledPurgeDate()); assertEquals(keyToDelete.getName(), deletedKey.getName()); }); } @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("getTestParameters") public void deleteKeyNotFound(HttpClient httpClient, KeyServiceVersion serviceVersion) { createKeyClient(httpClient, serviceVersion); assertRestException(() -> client.beginDeleteKey("non-existing"), ResourceNotFoundException.class, HttpURLConnection.HTTP_NOT_FOUND); } /** * Tests that an attempt to retrieve a non existing deleted key throws an error on a soft-delete enabled vault. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("getTestParameters") public void getDeletedKeyNotFound(HttpClient httpClient, KeyServiceVersion serviceVersion) { createKeyClient(httpClient, serviceVersion); assertRestException(() -> client.getDeletedKey("non-existing"), ResourceNotFoundException.class, HttpURLConnection.HTTP_NOT_FOUND); } /** * Tests that a deleted key can be recovered on a soft-delete enabled vault. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("getTestParameters") public void recoverDeletedKey(HttpClient httpClient, KeyServiceVersion serviceVersion) { createKeyClient(httpClient, serviceVersion); recoverDeletedKeyRunner((keyToDeleteAndRecover) -> { assertKeyEquals(keyToDeleteAndRecover, client.createKey(keyToDeleteAndRecover)); SyncPoller<DeletedKey, Void> poller = client.beginDeleteKey(keyToDeleteAndRecover.getName()); PollResponse<DeletedKey> pollResponse = poller.poll(); while (!pollResponse.getStatus().isComplete()) { sleepInRecordMode(1000); pollResponse = poller.poll(); } assertNotNull(pollResponse.getValue()); SyncPoller<KeyVaultKey, Void> recoverPoller = client.beginRecoverDeletedKey(keyToDeleteAndRecover.getName()); PollResponse<KeyVaultKey> recoverPollResponse = recoverPoller.poll(); KeyVaultKey recoveredKey = recoverPollResponse.getValue(); recoverPollResponse = recoverPoller.poll(); while (!recoverPollResponse.getStatus().isComplete()) { sleepInRecordMode(1000); recoverPollResponse = recoverPoller.poll(); } assertEquals(keyToDeleteAndRecover.getName(), recoveredKey.getName()); assertEquals(keyToDeleteAndRecover.getNotBefore(), recoveredKey.getProperties().getNotBefore()); assertEquals(keyToDeleteAndRecover.getExpiresOn(), recoveredKey.getProperties().getExpiresOn()); }); } /** * Tests that an attempt to recover a non existing deleted key throws an error on a soft-delete enabled vault. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("getTestParameters") public void recoverDeletedKeyNotFound(HttpClient httpClient, KeyServiceVersion serviceVersion) { createKeyClient(httpClient, serviceVersion); assertRestException(() -> client.beginRecoverDeletedKey("non-existing"), ResourceNotFoundException.class, HttpURLConnection.HTTP_NOT_FOUND); } /** * Tests that a key can be backed up in the key vault. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("getTestParameters") public void backupKey(HttpClient httpClient, KeyServiceVersion serviceVersion) { createKeyClient(httpClient, serviceVersion); backupKeyRunner((keyToBackup) -> { assertKeyEquals(keyToBackup, client.createKey(keyToBackup)); byte[] backupBytes = (client.backupKey(keyToBackup.getName())); assertNotNull(backupBytes); assertTrue(backupBytes.length > 0); }); } /** * Tests that an attempt to backup a non existing key throws an error. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("getTestParameters") public void backupKeyNotFound(HttpClient httpClient, KeyServiceVersion serviceVersion) { createKeyClient(httpClient, serviceVersion); assertRestException(() -> client.backupKey("non-existing"), ResourceNotFoundException.class, HttpURLConnection.HTTP_NOT_FOUND); } /** * Tests that a key can be backed up in the key vault. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("getTestParameters") public void restoreKey(HttpClient httpClient, KeyServiceVersion serviceVersion) { createKeyClient(httpClient, serviceVersion); restoreKeyRunner((keyToBackupAndRestore) -> { assertKeyEquals(keyToBackupAndRestore, client.createKey(keyToBackupAndRestore)); byte[] backupBytes = (client.backupKey(keyToBackupAndRestore.getName())); assertNotNull(backupBytes); assertTrue(backupBytes.length > 0); SyncPoller<DeletedKey, Void> poller = client.beginDeleteKey(keyToBackupAndRestore.getName()); PollResponse<DeletedKey> pollResponse = poller.poll(); while (!pollResponse.getStatus().isComplete()) { sleepInRecordMode(1000); pollResponse = poller.poll(); } client.purgeDeletedKey(keyToBackupAndRestore.getName()); pollOnKeyPurge(keyToBackupAndRestore.getName()); sleepInRecordMode(60000); KeyVaultKey restoredKey = client.restoreKeyBackup(backupBytes); assertEquals(keyToBackupAndRestore.getName(), restoredKey.getName()); assertEquals(keyToBackupAndRestore.getExpiresOn(), restoredKey.getProperties().getExpiresOn()); }); } /** * Tests that an attempt to restore a key from malformed backup bytes throws an error. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("getTestParameters") public void restoreKeyFromMalformedBackup(HttpClient httpClient, KeyServiceVersion serviceVersion) { createKeyClient(httpClient, serviceVersion); byte[] keyBackupBytes = "non-existing".getBytes(); assertRestException(() -> client.restoreKeyBackup(keyBackupBytes), ResourceModifiedException.class, HttpURLConnection.HTTP_BAD_REQUEST); } /** * Tests that keys can be listed in the key vault. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("getTestParameters") public void listKeys(HttpClient httpClient, KeyServiceVersion serviceVersion) { createKeyClient(httpClient, serviceVersion); listKeysRunner((keys) -> { HashMap<String, CreateKeyOptions> keysToList = keys; for (CreateKeyOptions key : keysToList.values()) { assertKeyEquals(key, client.createKey(key)); sleepInRecordMode(5000); } for (KeyProperties actualKey : client.listPropertiesOfKeys()) { if (keys.containsKey(actualKey.getName())) { CreateKeyOptions expectedKey = keys.get(actualKey.getName()); assertEquals(expectedKey.getExpiresOn(), actualKey.getExpiresOn()); assertEquals(expectedKey.getNotBefore(), actualKey.getNotBefore()); keys.remove(actualKey.getName()); } } assertEquals(0, keys.size()); }); } /** * Tests that a deleted key can be retrieved on a soft-delete enabled vault. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("getTestParameters") public void getDeletedKey(HttpClient httpClient, KeyServiceVersion serviceVersion) { createKeyClient(httpClient, serviceVersion); getDeletedKeyRunner((keyToDeleteAndGet) -> { assertKeyEquals(keyToDeleteAndGet, client.createKey(keyToDeleteAndGet)); SyncPoller<DeletedKey, Void> poller = client.beginDeleteKey(keyToDeleteAndGet.getName()); PollResponse<DeletedKey> pollResponse = poller.poll(); while (!pollResponse.getStatus().isComplete()) { sleepInRecordMode(1000); pollResponse = poller.poll(); } sleepInRecordMode(30000); DeletedKey deletedKey = client.getDeletedKey(keyToDeleteAndGet.getName()); assertNotNull(deletedKey.getDeletedOn()); assertNotNull(deletedKey.getRecoveryId()); assertNotNull(deletedKey.getScheduledPurgeDate()); assertEquals(keyToDeleteAndGet.getName(), deletedKey.getName()); }); } /** * Tests that deleted keys can be listed in the key vault. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("getTestParameters") public void listDeletedKeys(HttpClient httpClient, KeyServiceVersion serviceVersion) { createKeyClient(httpClient, serviceVersion); if (!interceptorManager.isPlaybackMode()) { return; } listDeletedKeysRunner((keys) -> { HashMap<String, CreateKeyOptions> keysToDelete = keys; for (CreateKeyOptions key : keysToDelete.values()) { assertKeyEquals(key, client.createKey(key)); } for (CreateKeyOptions key : keysToDelete.values()) { SyncPoller<DeletedKey, Void> poller = client.beginDeleteKey(key.getName()); PollResponse<DeletedKey> pollResponse = poller.poll(); while (!pollResponse.getStatus().isComplete()) { sleepInRecordMode(1000); pollResponse = poller.poll(); } } sleepInRecordMode(90000); Iterable<DeletedKey> deletedKeys = client.listDeletedKeys(); for (DeletedKey deletedKey : deletedKeys) { assertNotNull(deletedKey.getDeletedOn()); assertNotNull(deletedKey.getRecoveryId()); } }); } /** * Tests that key versions can be listed in the key vault. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("getTestParameters") public void listKeyVersions(HttpClient httpClient, KeyServiceVersion serviceVersion) { createKeyClient(httpClient, serviceVersion); listKeyVersionsRunner((keys) -> { List<CreateKeyOptions> keyVersions = keys; String keyName = null; for (CreateKeyOptions key : keyVersions) { keyName = key.getName(); sleepInRecordMode(4000); assertKeyEquals(key, client.createKey(key)); } Iterable<KeyProperties> keyVersionsOutput = client.listPropertiesOfKeyVersions(keyName); List<KeyProperties> keyVersionsList = new ArrayList<>(); keyVersionsOutput.forEach(keyVersionsList::add); assertEquals(keyVersions.size(), keyVersionsList.size()); }); } /** * Tests that an existing key can be released. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("getTestParameters") public void releaseKey(HttpClient httpClient, KeyServiceVersion serviceVersion) { Assumptions.assumeTrue(runManagedHsmTest); createKeyClient(httpClient, serviceVersion); releaseKeyRunner((keyToRelease, attestationUrl) -> { assertKeyEquals(keyToRelease, client.createRsaKey(keyToRelease)); String targetAttestationToken = "testAttestationToken"; if (getTestMode() != TestMode.PLAYBACK) { if (!attestationUrl.endsWith("/")) { attestationUrl = attestationUrl + "/"; } try { targetAttestationToken = getAttestationToken(attestationUrl + "generate-test-token"); } catch (IOException e) { fail("Found error when deserializing attestation token.", e); } } ReleaseKeyResult releaseKeyResult = client.releaseKey(keyToRelease.getName(), targetAttestationToken); assertNotNull(releaseKeyResult.getValue()); }); } /** * Tests that fetching the key rotation policy of a non-existent key throws. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("getTestParameters") @DisabledIfSystemProperty(named = "IS_SKIP_ROTATION_POLICY_TEST", matches = "true") /** * Tests that fetching the key rotation policy of a non-existent key throws. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("getTestParameters") @DisabledIfSystemProperty(named = "IS_SKIP_ROTATION_POLICY_TEST", matches = "true") public void getKeyRotationPolicyWithNoPolicySet(HttpClient httpClient, KeyServiceVersion serviceVersion) { Assumptions.assumeTrue(!isHsmEnabled); createKeyClient(httpClient, serviceVersion); String keyName = testResourceNamer.randomName("rotateKey", 20); client.createRsaKey(new CreateRsaKeyOptions(keyName)); KeyRotationPolicy keyRotationPolicy = client.getKeyRotationPolicy(keyName); assertNotNull(keyRotationPolicy); assertNull(keyRotationPolicy.getId()); assertNull(keyRotationPolicy.getCreatedOn()); assertNull(keyRotationPolicy.getUpdatedOn()); assertNull(keyRotationPolicy.getExpiresIn()); assertEquals(1, keyRotationPolicy.getLifetimeActions().size()); assertEquals(KeyRotationPolicyAction.NOTIFY, keyRotationPolicy.getLifetimeActions().get(0).getAction()); assertEquals("P30D", keyRotationPolicy.getLifetimeActions().get(0).getTimeBeforeExpiry()); assertNull(keyRotationPolicy.getLifetimeActions().get(0).getTimeAfterCreate()); } /** * Tests that fetching the key rotation policy of a non-existent key throws. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("getTestParameters") @DisabledIfSystemProperty(named = "IS_SKIP_ROTATION_POLICY_TEST", matches = "true") public void updateGetKeyRotationPolicyWithMinimumProperties(HttpClient httpClient, KeyServiceVersion serviceVersion) { Assumptions.assumeTrue(!isHsmEnabled); createKeyClient(httpClient, serviceVersion); updateGetKeyRotationPolicyWithMinimumPropertiesRunner((keyName, keyRotationPolicy) -> { client.createRsaKey(new CreateRsaKeyOptions(keyName)); KeyRotationPolicy updatedKeyRotationPolicy = client.updateKeyRotationPolicy(keyName, keyRotationPolicy); KeyRotationPolicy retrievedKeyRotationPolicy = client.getKeyRotationPolicy(keyName); assertKeyVaultRotationPolicyEquals(updatedKeyRotationPolicy, retrievedKeyRotationPolicy); }); } /** * Tests that an key rotation policy can be updated with all possible properties, then retrieves it. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("getTestParameters") @DisabledIfSystemProperty(named = "IS_SKIP_ROTATION_POLICY_TEST", matches = "true") public void updateGetKeyRotationPolicyWithAllProperties(HttpClient httpClient, KeyServiceVersion serviceVersion) { Assumptions.assumeTrue(!isHsmEnabled); createKeyClient(httpClient, serviceVersion); updateGetKeyRotationPolicyWithAllPropertiesRunner((keyName, keyRotationPolicy) -> { client.createRsaKey(new CreateRsaKeyOptions(keyName)); KeyRotationPolicy updatedKeyRotationPolicy = client.updateKeyRotationPolicy(keyName, keyRotationPolicy); KeyRotationPolicy retrievedKeyRotationPolicy = client.getKeyRotationPolicy(keyName); assertKeyVaultRotationPolicyEquals(updatedKeyRotationPolicy, retrievedKeyRotationPolicy); }); } /** * Tests that a key can be rotated. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("getTestParameters") @DisabledIfSystemProperty(named = "IS_SKIP_ROTATION_POLICY_TEST", matches = "true") public void rotateKey(HttpClient httpClient, KeyServiceVersion serviceVersion) { Assumptions.assumeTrue(!isHsmEnabled); createKeyClient(httpClient, serviceVersion); String keyName = testResourceNamer.randomName("rotateKey", 20); KeyVaultKey createdKey = client.createRsaKey(new CreateRsaKeyOptions(keyName)); KeyVaultKey rotatedKey = client.rotateKey(keyName); assertEquals(createdKey.getName(), rotatedKey.getName()); assertEquals(createdKey.getProperties().getTags(), rotatedKey.getProperties().getTags()); } /** * Tests that a {@link CryptographyClient} can be created for a given key and version using a {@link KeyClient}. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("getTestParameters") public void getCryptographyClient(HttpClient httpClient, KeyServiceVersion serviceVersion) { createKeyClient(httpClient, serviceVersion); CryptographyClient cryptographyClient = client.getCryptographyClient("myKey"); assertNotNull(cryptographyClient); } /** * Tests that a {@link CryptographyClient} can be created for a given key using a {@link KeyClient}. Also tests * that cryptographic operations can be performed with said cryptography client. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("getTestParameters") public void getCryptographyClientAndEncryptDecrypt(HttpClient httpClient, KeyServiceVersion serviceVersion) { createKeyClient(httpClient, serviceVersion); setKeyRunner((createKeyOptions) -> { assertKeyEquals(createKeyOptions, client.createKey(createKeyOptions)); CryptographyClient cryptographyClient = client.getCryptographyClient(createKeyOptions.getName()); assertNotNull(cryptographyClient); byte[] plaintext = "myPlaintext".getBytes(); byte[] ciphertext = cryptographyClient.encrypt(EncryptionAlgorithm.RSA_OAEP, plaintext).getCipherText(); byte[] decryptedText = cryptographyClient.decrypt(EncryptionAlgorithm.RSA_OAEP, ciphertext).getPlainText(); assertArrayEquals(plaintext, decryptedText); }); } /** * Tests that a {@link CryptographyClient} can be created for a given key and version using a {@link KeyClient}. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("getTestParameters") public void getCryptographyClientWithKeyVersion(HttpClient httpClient, KeyServiceVersion serviceVersion) { createKeyClient(httpClient, serviceVersion); CryptographyClient cryptographyClient = client.getCryptographyClient("myKey", "6A385B124DEF4096AF1361A85B16C204"); assertNotNull(cryptographyClient); } /** * Tests that a {@link CryptographyClient} can be created for a given key using a {@link KeyClient}. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("getTestParameters") public void getCryptographyClientWithEmptyKeyVersion(HttpClient httpClient, KeyServiceVersion serviceVersion) { createKeyClient(httpClient, serviceVersion); CryptographyClient cryptographyClient = client.getCryptographyClient("myKey", ""); assertNotNull(cryptographyClient); } /** * Tests that a {@link CryptographyClient} can be created for a given key using a {@link KeyClient}. */ @ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS) @MethodSource("getTestParameters") public void getCryptographyClientWithNullKeyVersion(HttpClient httpClient, KeyServiceVersion serviceVersion) { createKeyClient(httpClient, serviceVersion); CryptographyClient cryptographyClient = client.getCryptographyClient("myKey", null); assertNotNull(cryptographyClient); } private void pollOnKeyPurge(String keyName) { int pendingPollCount = 0; while (pendingPollCount < 10) { DeletedKey deletedKey = null; try { deletedKey = client.getDeletedKey(keyName); } catch (ResourceNotFoundException e) { } if (deletedKey != null) { sleepInRecordMode(2000); pendingPollCount += 1; continue; } else { return; } } System.err.printf("Deleted Key %s was not purged \n", keyName); } }
I notice in tests, we are adding prefix `"vmId:"`, if that's the prefix we always want to add, worth adding it here? like we have done in `setMachineId()` ?
public void withMachineId(String machineId) { this.machineId = machineId; }
this.machineId = machineId;
public void withMachineId(String machineId) { this.machineId = machineId; }
class DiagnosticsClientConfig { private AtomicInteger activeClientsCnt; private int clientId; private ConsistencyLevel consistencyLevel; private boolean connectionSharingAcrossClientsEnabled; private String consistencyRelatedConfigAsString; private String httpConfigAsString; private String otherCfgAsString; private List<String> preferredRegions; private boolean endpointDiscoveryEnabled; private boolean multipleWriteRegionsEnabled; private HttpClientConfig httpClientConfig; private RntbdTransportClient.Options options; private String rntbdConfigAsString; private ConnectionMode connectionMode; private String machineId; public void withActiveClientCounter(AtomicInteger activeClientsCnt) { this.activeClientsCnt = activeClientsCnt; } public void withClientId(int clientId) { this.clientId = clientId; } public DiagnosticsClientConfig withEndpointDiscoveryEnabled(boolean endpointDiscoveryEnabled) { this.endpointDiscoveryEnabled = endpointDiscoveryEnabled; return this; } public DiagnosticsClientConfig withMultipleWriteRegionsEnabled(boolean multipleWriteRegionsEnabled) { this.multipleWriteRegionsEnabled = multipleWriteRegionsEnabled; return this; } public DiagnosticsClientConfig withPreferredRegions(List<String> preferredRegions) { this.preferredRegions = preferredRegions; return this; } public DiagnosticsClientConfig withConnectionSharingAcrossClientsEnabled(boolean connectionSharingAcrossClientsEnabled) { this.connectionSharingAcrossClientsEnabled = connectionSharingAcrossClientsEnabled; return this; } public DiagnosticsClientConfig withConsistency(ConsistencyLevel consistencyLevel) { this.consistencyLevel = consistencyLevel; return this; } public DiagnosticsClientConfig withRntbdOptions(RntbdTransportClient.Options options) { this.options = options; return this; } public DiagnosticsClientConfig withGatewayHttpClientConfig(HttpClientConfig httpClientConfig) { this.httpClientConfig = httpClientConfig; return this; } public DiagnosticsClientConfig withConnectionMode(ConnectionMode connectionMode) { this.connectionMode = connectionMode; return this; } public ConnectionMode getConnectionMode() { return connectionMode; } public String consistencyRelatedConfig() { if (consistencyRelatedConfigAsString == null) { this.consistencyRelatedConfigAsString = this.consistencyRelatedConfigInternal(); } return this.consistencyRelatedConfigAsString; } public String rntbdConfig() { if (this.rntbdConfigAsString == null) { this.rntbdConfigAsString = this.rntbdConfigInternal(this.options); } return this.rntbdConfigAsString; } public String gwConfig() { if (this.httpConfigAsString == null) { this.httpConfigAsString = this.gwConfigInternal(); } return this.httpConfigAsString; } public String otherConnectionConfig() { if (this.otherCfgAsString == null) { this.otherCfgAsString = Strings.lenientFormat("(ed: %s, cs: %s)", this.endpointDiscoveryEnabled, this.connectionSharingAcrossClientsEnabled); } return this.otherCfgAsString; } public int getClientId() { return this.clientId; } public String getMachineId() { return this.machineId; } public int getActiveClientsCount() { return this.activeClientsCnt != null ? this.activeClientsCnt.get() : -1; } private String gwConfigInternal() { if (this.httpClientConfig == null) { return null; } return Strings.lenientFormat("(cps:%s, nrto:%s, icto:%s, p:%s)", this.httpClientConfig.getMaxPoolSize(), this.httpClientConfig.getNetworkRequestTimeout(), this.httpClientConfig.getMaxIdleConnectionTimeout(), this.httpClientConfig.getProxy() != null); } private String rntbdConfigInternal(RntbdTransportClient.Options rntbdOptions) { if (rntbdOptions == null) { return null; } return Strings.lenientFormat("(cto:%s, nrto:%s, icto:%s, ieto:%s, mcpe:%s, mrpc:%s, cer:%s)", rntbdOptions.connectTimeout(), rntbdOptions.tcpNetworkRequestTimeout(), rntbdOptions.idleChannelTimeout(), rntbdOptions.idleEndpointTimeout(), rntbdOptions.maxChannelsPerEndpoint(), rntbdOptions.maxRequestsPerChannel(), rntbdOptions.isConnectionEndpointRediscoveryEnabled()); } private String preferredRegionsInternal() { if (preferredRegions == null) { return ""; } return preferredRegions.stream().map(r -> r.toLowerCase(Locale.ROOT).replaceAll(" ", "")).collect(Collectors.joining(",")); } private String consistencyRelatedConfigInternal() { return Strings.lenientFormat("(consistency: %s, mm: %s, prgns: [%s])", this.consistencyLevel, this.multipleWriteRegionsEnabled, preferredRegionsInternal()); } }
class DiagnosticsClientConfig { private AtomicInteger activeClientsCnt; private int clientId; private ConsistencyLevel consistencyLevel; private boolean connectionSharingAcrossClientsEnabled; private String consistencyRelatedConfigAsString; private String httpConfigAsString; private String otherCfgAsString; private List<String> preferredRegions; private boolean endpointDiscoveryEnabled; private boolean multipleWriteRegionsEnabled; private HttpClientConfig httpClientConfig; private RntbdTransportClient.Options options; private String rntbdConfigAsString; private ConnectionMode connectionMode; private String machineId; public void withActiveClientCounter(AtomicInteger activeClientsCnt) { this.activeClientsCnt = activeClientsCnt; } public void withClientId(int clientId) { this.clientId = clientId; } public DiagnosticsClientConfig withEndpointDiscoveryEnabled(boolean endpointDiscoveryEnabled) { this.endpointDiscoveryEnabled = endpointDiscoveryEnabled; return this; } public DiagnosticsClientConfig withMultipleWriteRegionsEnabled(boolean multipleWriteRegionsEnabled) { this.multipleWriteRegionsEnabled = multipleWriteRegionsEnabled; return this; } public DiagnosticsClientConfig withPreferredRegions(List<String> preferredRegions) { this.preferredRegions = preferredRegions; return this; } public DiagnosticsClientConfig withConnectionSharingAcrossClientsEnabled(boolean connectionSharingAcrossClientsEnabled) { this.connectionSharingAcrossClientsEnabled = connectionSharingAcrossClientsEnabled; return this; } public DiagnosticsClientConfig withConsistency(ConsistencyLevel consistencyLevel) { this.consistencyLevel = consistencyLevel; return this; } public DiagnosticsClientConfig withRntbdOptions(RntbdTransportClient.Options options) { this.options = options; return this; } public DiagnosticsClientConfig withGatewayHttpClientConfig(HttpClientConfig httpClientConfig) { this.httpClientConfig = httpClientConfig; return this; } public DiagnosticsClientConfig withConnectionMode(ConnectionMode connectionMode) { this.connectionMode = connectionMode; return this; } public ConnectionMode getConnectionMode() { return connectionMode; } public String consistencyRelatedConfig() { if (consistencyRelatedConfigAsString == null) { this.consistencyRelatedConfigAsString = this.consistencyRelatedConfigInternal(); } return this.consistencyRelatedConfigAsString; } public String rntbdConfig() { if (this.rntbdConfigAsString == null) { this.rntbdConfigAsString = this.rntbdConfigInternal(this.options); } return this.rntbdConfigAsString; } public String gwConfig() { if (this.httpConfigAsString == null) { this.httpConfigAsString = this.gwConfigInternal(); } return this.httpConfigAsString; } public String otherConnectionConfig() { if (this.otherCfgAsString == null) { this.otherCfgAsString = Strings.lenientFormat("(ed: %s, cs: %s)", this.endpointDiscoveryEnabled, this.connectionSharingAcrossClientsEnabled); } return this.otherCfgAsString; } public int getClientId() { return this.clientId; } public String getMachineId() { return this.machineId; } public int getActiveClientsCount() { return this.activeClientsCnt != null ? this.activeClientsCnt.get() : -1; } private String gwConfigInternal() { if (this.httpClientConfig == null) { return null; } return Strings.lenientFormat("(cps:%s, nrto:%s, icto:%s, p:%s)", this.httpClientConfig.getMaxPoolSize(), this.httpClientConfig.getNetworkRequestTimeout(), this.httpClientConfig.getMaxIdleConnectionTimeout(), this.httpClientConfig.getProxy() != null); } private String rntbdConfigInternal(RntbdTransportClient.Options rntbdOptions) { if (rntbdOptions == null) { return null; } return Strings.lenientFormat("(cto:%s, nrto:%s, icto:%s, ieto:%s, mcpe:%s, mrpc:%s, cer:%s)", rntbdOptions.connectTimeout(), rntbdOptions.tcpNetworkRequestTimeout(), rntbdOptions.idleChannelTimeout(), rntbdOptions.idleEndpointTimeout(), rntbdOptions.maxChannelsPerEndpoint(), rntbdOptions.maxRequestsPerChannel(), rntbdOptions.isConnectionEndpointRediscoveryEnabled()); } private String preferredRegionsInternal() { if (preferredRegions == null) { return ""; } return preferredRegions.stream().map(r -> r.toLowerCase(Locale.ROOT).replaceAll(" ", "")).collect(Collectors.joining(",")); } private String consistencyRelatedConfigInternal() { return Strings.lenientFormat("(consistency: %s, mm: %s, prgns: [%s])", this.consistencyLevel, this.multipleWriteRegionsEnabled, preferredRegionsInternal()); } }
Should we also rename the getter and the instance variable to `getVmId()` and `vmId` ?
public String getMachineId() { return machineId; }
return machineId;
public String getMachineId() { return machineId; }
class ClientTelemetryInfo { private String timeStamp; private String machineId; private String clientId; private String processId; private String userAgent; private ConnectionMode connectionMode; private String globalDatabaseAccountName; private String applicationRegion; private String hostEnvInfo; private Boolean acceleratedNetworking; private int aggregationIntervalInSec; private List<String> preferredRegions; private Map<ReportPayload, ConcurrentDoubleHistogram> systemInfoMap; private Map<ReportPayload, ConcurrentDoubleHistogram> cacheRefreshInfoMap; private Map<ReportPayload, ConcurrentDoubleHistogram> operationInfoMap; public ClientTelemetryInfo(String machineId, String clientId, String processId, String userAgent, ConnectionMode connectionMode, String globalDatabaseAccountName, String applicationRegion, String hostEnvInfo, Boolean acceleratedNetworking, List<String> preferredRegions) { this.machineId = machineId; this.clientId = clientId; this.processId = processId; this.userAgent = userAgent; this.connectionMode = connectionMode; this.globalDatabaseAccountName = globalDatabaseAccountName; this.applicationRegion = applicationRegion; this.hostEnvInfo = hostEnvInfo; this.acceleratedNetworking = acceleratedNetworking; this.systemInfoMap = new ConcurrentHashMap<>(); this.cacheRefreshInfoMap = new ConcurrentHashMap<>(); this.operationInfoMap = new ConcurrentHashMap<>(); this.aggregationIntervalInSec = Configs.getClientTelemetrySchedulingInSec(); this.preferredRegions = preferredRegions; } public String getTimeStamp() { return timeStamp; } public void setTimeStamp(String timeStamp) { this.timeStamp = timeStamp; } public String getClientId() { return clientId; } public void setClientId(String clientId) { this.clientId = clientId; } public String getProcessId() { return processId; } public void setProcessId(String processId) { this.processId = processId; } public String getUserAgent() { return userAgent; } public void setUserAgent(String userAgent) { this.userAgent = userAgent; } public ConnectionMode getConnectionMode() { return connectionMode; } public void setConnectionMode(ConnectionMode connectionMode) { this.connectionMode = connectionMode; } public String getGlobalDatabaseAccountName() { return globalDatabaseAccountName; } public void setGlobalDatabaseAccountName(String globalDatabaseAccountName) { this.globalDatabaseAccountName = globalDatabaseAccountName; } public String getApplicationRegion() { return applicationRegion; } public void setApplicationRegion(String applicationRegion) { this.applicationRegion = applicationRegion; } public void setVmId(String vmId) { this.machineId = "vmId:" + machineId; } public String getHostEnvInfo() { return hostEnvInfo; } public void setHostEnvInfo(String hostEnvInfo) { this.hostEnvInfo = hostEnvInfo; } public Boolean getAcceleratedNetworking() { return acceleratedNetworking; } public void setAcceleratedNetworking(Boolean acceleratedNetworking) { this.acceleratedNetworking = acceleratedNetworking; } public int getAggregationIntervalInSec() { return aggregationIntervalInSec; } public void setAggregationIntervalInSec(int aggregationIntervalInSec) { this.aggregationIntervalInSec = aggregationIntervalInSec; } public List<String> getPreferredRegions() { return preferredRegions; } public void setPreferredRegions(List<String> preferredRegions) { this.preferredRegions = preferredRegions; } public Map<ReportPayload, ConcurrentDoubleHistogram> getSystemInfoMap() { return systemInfoMap; } public void setSystemInfoMap(Map<ReportPayload, ConcurrentDoubleHistogram> systemInfoMap) { this.systemInfoMap = systemInfoMap; } public Map<ReportPayload, ConcurrentDoubleHistogram> getCacheRefreshInfoMap() { return cacheRefreshInfoMap; } public void setCacheRefreshInfoMap(Map<ReportPayload, ConcurrentDoubleHistogram> cacheRefreshInfoMap) { this.cacheRefreshInfoMap = cacheRefreshInfoMap; } public Map<ReportPayload, ConcurrentDoubleHistogram> getOperationInfoMap() { return operationInfoMap; } public void setOperationInfoMap(Map<ReportPayload, ConcurrentDoubleHistogram> operationInfoMap) { this.operationInfoMap = operationInfoMap; } }
class ClientTelemetryInfo { private String timeStamp; private String machineId; private String clientId; private String processId; private String userAgent; private ConnectionMode connectionMode; private String globalDatabaseAccountName; private String applicationRegion; private String hostEnvInfo; private Boolean acceleratedNetworking; private int aggregationIntervalInSec; private List<String> preferredRegions; private Map<ReportPayload, ConcurrentDoubleHistogram> systemInfoMap; private Map<ReportPayload, ConcurrentDoubleHistogram> cacheRefreshInfoMap; private Map<ReportPayload, ConcurrentDoubleHistogram> operationInfoMap; public ClientTelemetryInfo(String machineId, String clientId, String processId, String userAgent, ConnectionMode connectionMode, String globalDatabaseAccountName, String applicationRegion, String hostEnvInfo, Boolean acceleratedNetworking, List<String> preferredRegions) { this.machineId = machineId; this.clientId = clientId; this.processId = processId; this.userAgent = userAgent; this.connectionMode = connectionMode; this.globalDatabaseAccountName = globalDatabaseAccountName; this.applicationRegion = applicationRegion; this.hostEnvInfo = hostEnvInfo; this.acceleratedNetworking = acceleratedNetworking; this.systemInfoMap = new ConcurrentHashMap<>(); this.cacheRefreshInfoMap = new ConcurrentHashMap<>(); this.operationInfoMap = new ConcurrentHashMap<>(); this.aggregationIntervalInSec = Configs.getClientTelemetrySchedulingInSec(); this.preferredRegions = preferredRegions; } public String getTimeStamp() { return timeStamp; } public void setTimeStamp(String timeStamp) { this.timeStamp = timeStamp; } public String getClientId() { return clientId; } public void setClientId(String clientId) { this.clientId = clientId; } public String getProcessId() { return processId; } public void setProcessId(String processId) { this.processId = processId; } public String getUserAgent() { return userAgent; } public void setUserAgent(String userAgent) { this.userAgent = userAgent; } public ConnectionMode getConnectionMode() { return connectionMode; } public void setConnectionMode(ConnectionMode connectionMode) { this.connectionMode = connectionMode; } public String getGlobalDatabaseAccountName() { return globalDatabaseAccountName; } public void setGlobalDatabaseAccountName(String globalDatabaseAccountName) { this.globalDatabaseAccountName = globalDatabaseAccountName; } public String getApplicationRegion() { return applicationRegion; } public void setApplicationRegion(String applicationRegion) { this.applicationRegion = applicationRegion; } public void setMachineId(String machineId) { this.machineId = machineId; } public String getHostEnvInfo() { return hostEnvInfo; } public void setHostEnvInfo(String hostEnvInfo) { this.hostEnvInfo = hostEnvInfo; } public Boolean getAcceleratedNetworking() { return acceleratedNetworking; } public void setAcceleratedNetworking(Boolean acceleratedNetworking) { this.acceleratedNetworking = acceleratedNetworking; } public int getAggregationIntervalInSec() { return aggregationIntervalInSec; } public void setAggregationIntervalInSec(int aggregationIntervalInSec) { this.aggregationIntervalInSec = aggregationIntervalInSec; } public List<String> getPreferredRegions() { return preferredRegions; } public void setPreferredRegions(List<String> preferredRegions) { this.preferredRegions = preferredRegions; } public Map<ReportPayload, ConcurrentDoubleHistogram> getSystemInfoMap() { return systemInfoMap; } public void setSystemInfoMap(Map<ReportPayload, ConcurrentDoubleHistogram> systemInfoMap) { this.systemInfoMap = systemInfoMap; } public Map<ReportPayload, ConcurrentDoubleHistogram> getCacheRefreshInfoMap() { return cacheRefreshInfoMap; } public void setCacheRefreshInfoMap(Map<ReportPayload, ConcurrentDoubleHistogram> cacheRefreshInfoMap) { this.cacheRefreshInfoMap = cacheRefreshInfoMap; } public Map<ReportPayload, ConcurrentDoubleHistogram> getOperationInfoMap() { return operationInfoMap; } public void setOperationInfoMap(Map<ReportPayload, ConcurrentDoubleHistogram> operationInfoMap) { this.operationInfoMap = operationInfoMap; } }
Not related to this change, but since you are here, can we also update the clientId? Currently, it is this.clientId = clientIdGenerator.getAndDecrement() ----> can we change to getAndIncrement() (><)
public void init(CosmosClientMetadataCachesSnapshot metadataCachesSnapshot, Function<HttpClient, HttpClient> httpClientInterceptor) { try { this.httpClientInterceptor = httpClientInterceptor; if (httpClientInterceptor != null) { this.reactorHttpClient = httpClientInterceptor.apply(httpClient()); } this.gatewayProxy = createRxGatewayProxy(this.sessionContainer, this.consistencyLevel, this.queryCompatibilityMode, this.userAgentContainer, this.globalEndpointManager, this.reactorHttpClient, this.apiType); this.globalEndpointManager.init(); this.initializeGatewayConfigurationReader(); if (metadataCachesSnapshot != null) { this.collectionCache = new RxClientCollectionCache(this, this.sessionContainer, this.gatewayProxy, this, this.retryPolicy, metadataCachesSnapshot.getCollectionInfoByNameCache(), metadataCachesSnapshot.getCollectionInfoByIdCache() ); } else { this.collectionCache = new RxClientCollectionCache(this, this.sessionContainer, this.gatewayProxy, this, this.retryPolicy); } this.resetSessionTokenRetryPolicy = new ResetSessionTokenRetryPolicyFactory(this.sessionContainer, this.collectionCache, this.retryPolicy); this.partitionKeyRangeCache = new RxPartitionKeyRangeCache(RxDocumentClientImpl.this, collectionCache); updateGatewayProxy(); clientTelemetry = new ClientTelemetry(this, null, UUID.randomUUID().toString(), ManagementFactory.getRuntimeMXBean().getName(), userAgentContainer.getUserAgent(), connectionPolicy.getConnectionMode(), globalEndpointManager.getLatestDatabaseAccount().getId(), null, null, this.reactorHttpClient, connectionPolicy.isClientTelemetryEnabled(), this, this.connectionPolicy.getPreferredRegions()); clientTelemetry.init(); if (this.connectionPolicy.getConnectionMode() == ConnectionMode.GATEWAY) { this.storeModel = this.gatewayProxy; } else { this.initializeDirectConnectivity(); } this.retryPolicy.setRxCollectionCache(this.collectionCache); } catch (Exception e) { logger.error("unexpected failure in initializing client.", e); close(); throw e; } }
clientTelemetry = new ClientTelemetry(this, null, UUID.randomUUID().toString(),
public void init(CosmosClientMetadataCachesSnapshot metadataCachesSnapshot, Function<HttpClient, HttpClient> httpClientInterceptor) { try { this.httpClientInterceptor = httpClientInterceptor; if (httpClientInterceptor != null) { this.reactorHttpClient = httpClientInterceptor.apply(httpClient()); } this.gatewayProxy = createRxGatewayProxy(this.sessionContainer, this.consistencyLevel, this.queryCompatibilityMode, this.userAgentContainer, this.globalEndpointManager, this.reactorHttpClient, this.apiType); this.globalEndpointManager.init(); this.initializeGatewayConfigurationReader(); if (metadataCachesSnapshot != null) { this.collectionCache = new RxClientCollectionCache(this, this.sessionContainer, this.gatewayProxy, this, this.retryPolicy, metadataCachesSnapshot.getCollectionInfoByNameCache(), metadataCachesSnapshot.getCollectionInfoByIdCache() ); } else { this.collectionCache = new RxClientCollectionCache(this, this.sessionContainer, this.gatewayProxy, this, this.retryPolicy); } this.resetSessionTokenRetryPolicy = new ResetSessionTokenRetryPolicyFactory(this.sessionContainer, this.collectionCache, this.retryPolicy); this.partitionKeyRangeCache = new RxPartitionKeyRangeCache(RxDocumentClientImpl.this, collectionCache); updateGatewayProxy(); clientTelemetry = new ClientTelemetry(this, null, UUID.randomUUID().toString(), ManagementFactory.getRuntimeMXBean().getName(), userAgentContainer.getUserAgent(), connectionPolicy.getConnectionMode(), globalEndpointManager.getLatestDatabaseAccount().getId(), null, null, this.reactorHttpClient, connectionPolicy.isClientTelemetryEnabled(), this, this.connectionPolicy.getPreferredRegions()); clientTelemetry.init(); if (this.connectionPolicy.getConnectionMode() == ConnectionMode.GATEWAY) { this.storeModel = this.gatewayProxy; } else { this.initializeDirectConnectivity(); } this.retryPolicy.setRxCollectionCache(this.collectionCache); } catch (Exception e) { logger.error("unexpected failure in initializing client.", e); close(); throw e; } }
class RxDocumentClientImpl implements AsyncDocumentClient, IAuthorizationTokenProvider, CpuMemoryListener, DiagnosticsClientContext { private static final String tempMachineId = "uuid:" + UUID.randomUUID(); private static final AtomicInteger activeClientsCnt = new AtomicInteger(0); private static final AtomicInteger clientIdGenerator = new AtomicInteger(0); private static final Range<String> RANGE_INCLUDING_ALL_PARTITION_KEY_RANGES = new Range<>( PartitionKeyInternalHelper.MinimumInclusiveEffectivePartitionKey, PartitionKeyInternalHelper.MaximumExclusiveEffectivePartitionKey, true, false); private static final String DUMMY_SQL_QUERY = "this is dummy and only used in creating " + "ParallelDocumentQueryExecutioncontext, but not used"; private final static ObjectMapper mapper = Utils.getSimpleObjectMapper(); private final ItemDeserializer itemDeserializer = new ItemDeserializer.JsonDeserializer(); private final Logger logger = LoggerFactory.getLogger(RxDocumentClientImpl.class); private final String masterKeyOrResourceToken; private final URI serviceEndpoint; private final ConnectionPolicy connectionPolicy; private final ConsistencyLevel consistencyLevel; private final BaseAuthorizationTokenProvider authorizationTokenProvider; private final UserAgentContainer userAgentContainer; private final boolean hasAuthKeyResourceToken; private final Configs configs; private final boolean connectionSharingAcrossClientsEnabled; private AzureKeyCredential credential; private final TokenCredential tokenCredential; private String[] tokenCredentialScopes; private SimpleTokenCache tokenCredentialCache; private CosmosAuthorizationTokenResolver cosmosAuthorizationTokenResolver; AuthorizationTokenType authorizationTokenType; private SessionContainer sessionContainer; private String firstResourceTokenFromPermissionFeed = StringUtils.EMPTY; private RxClientCollectionCache collectionCache; private RxStoreModel gatewayProxy; private RxStoreModel storeModel; private GlobalAddressResolver addressResolver; private RxPartitionKeyRangeCache partitionKeyRangeCache; private Map<String, List<PartitionKeyAndResourceTokenPair>> resourceTokensMap; private final boolean contentResponseOnWriteEnabled; private Map<String, PartitionedQueryExecutionInfo> queryPlanCache; private final AtomicBoolean closed = new AtomicBoolean(false); private final int clientId; private ClientTelemetry clientTelemetry; private ApiType apiType; private IRetryPolicyFactory resetSessionTokenRetryPolicy; /** * Compatibility mode: Allows to specify compatibility mode used by client when * making query requests. Should be removed when application/sql is no longer * supported. */ private final QueryCompatibilityMode queryCompatibilityMode = QueryCompatibilityMode.Default; private final GlobalEndpointManager globalEndpointManager; private final RetryPolicy retryPolicy; private HttpClient reactorHttpClient; private Function<HttpClient, HttpClient> httpClientInterceptor; private volatile boolean useMultipleWriteLocations; private StoreClientFactory storeClientFactory; private GatewayServiceConfigurationReader gatewayConfigurationReader; private final DiagnosticsClientConfig diagnosticsClientConfig; private final AtomicBoolean throughputControlEnabled; private ThroughputControlStore throughputControlStore; public RxDocumentClientImpl(URI serviceEndpoint, String masterKeyOrResourceToken, List<Permission> permissionFeed, ConnectionPolicy connectionPolicy, ConsistencyLevel consistencyLevel, Configs configs, CosmosAuthorizationTokenResolver cosmosAuthorizationTokenResolver, AzureKeyCredential credential, boolean sessionCapturingOverride, boolean connectionSharingAcrossClientsEnabled, boolean contentResponseOnWriteEnabled, CosmosClientMetadataCachesSnapshot metadataCachesSnapshot, ApiType apiType) { this(serviceEndpoint, masterKeyOrResourceToken, permissionFeed, connectionPolicy, consistencyLevel, configs, credential, null, sessionCapturingOverride, connectionSharingAcrossClientsEnabled, contentResponseOnWriteEnabled, metadataCachesSnapshot, apiType); this.cosmosAuthorizationTokenResolver = cosmosAuthorizationTokenResolver; } public RxDocumentClientImpl(URI serviceEndpoint, String masterKeyOrResourceToken, List<Permission> permissionFeed, ConnectionPolicy connectionPolicy, ConsistencyLevel consistencyLevel, Configs configs, CosmosAuthorizationTokenResolver cosmosAuthorizationTokenResolver, AzureKeyCredential credential, TokenCredential tokenCredential, boolean sessionCapturingOverride, boolean connectionSharingAcrossClientsEnabled, boolean contentResponseOnWriteEnabled, CosmosClientMetadataCachesSnapshot metadataCachesSnapshot, ApiType apiType) { this(serviceEndpoint, masterKeyOrResourceToken, permissionFeed, connectionPolicy, consistencyLevel, configs, credential, tokenCredential, sessionCapturingOverride, connectionSharingAcrossClientsEnabled, contentResponseOnWriteEnabled, metadataCachesSnapshot, apiType); this.cosmosAuthorizationTokenResolver = cosmosAuthorizationTokenResolver; } private RxDocumentClientImpl(URI serviceEndpoint, String masterKeyOrResourceToken, List<Permission> permissionFeed, ConnectionPolicy connectionPolicy, ConsistencyLevel consistencyLevel, Configs configs, AzureKeyCredential credential, TokenCredential tokenCredential, boolean sessionCapturingOverrideEnabled, boolean connectionSharingAcrossClientsEnabled, boolean contentResponseOnWriteEnabled, CosmosClientMetadataCachesSnapshot metadataCachesSnapshot, ApiType apiType) { this(serviceEndpoint, masterKeyOrResourceToken, connectionPolicy, consistencyLevel, configs, credential, tokenCredential, sessionCapturingOverrideEnabled, connectionSharingAcrossClientsEnabled, contentResponseOnWriteEnabled, metadataCachesSnapshot, apiType); if (permissionFeed != null && permissionFeed.size() > 0) { this.resourceTokensMap = new HashMap<>(); for (Permission permission : permissionFeed) { String[] segments = StringUtils.split(permission.getResourceLink(), Constants.Properties.PATH_SEPARATOR.charAt(0)); if (segments.length <= 0) { throw new IllegalArgumentException("resourceLink"); } List<PartitionKeyAndResourceTokenPair> partitionKeyAndResourceTokenPairs = null; PathInfo pathInfo = new PathInfo(false, StringUtils.EMPTY, StringUtils.EMPTY, false); if (!PathsHelper.tryParsePathSegments(permission.getResourceLink(), pathInfo, null)) { throw new IllegalArgumentException(permission.getResourceLink()); } partitionKeyAndResourceTokenPairs = resourceTokensMap.get(pathInfo.resourceIdOrFullName); if (partitionKeyAndResourceTokenPairs == null) { partitionKeyAndResourceTokenPairs = new ArrayList<>(); this.resourceTokensMap.put(pathInfo.resourceIdOrFullName, partitionKeyAndResourceTokenPairs); } PartitionKey partitionKey = permission.getResourcePartitionKey(); partitionKeyAndResourceTokenPairs.add(new PartitionKeyAndResourceTokenPair( partitionKey != null ? BridgeInternal.getPartitionKeyInternal(partitionKey) : PartitionKeyInternal.Empty, permission.getToken())); logger.debug("Initializing resource token map , with map key [{}] , partition key [{}] and resource token [{}]", pathInfo.resourceIdOrFullName, partitionKey != null ? partitionKey.toString() : null, permission.getToken()); } if(this.resourceTokensMap.isEmpty()) { throw new IllegalArgumentException("permissionFeed"); } String firstToken = permissionFeed.get(0).getToken(); if(ResourceTokenAuthorizationHelper.isResourceToken(firstToken)) { this.firstResourceTokenFromPermissionFeed = firstToken; } } } RxDocumentClientImpl(URI serviceEndpoint, String masterKeyOrResourceToken, ConnectionPolicy connectionPolicy, ConsistencyLevel consistencyLevel, Configs configs, AzureKeyCredential credential, TokenCredential tokenCredential, boolean sessionCapturingOverrideEnabled, boolean connectionSharingAcrossClientsEnabled, boolean contentResponseOnWriteEnabled, CosmosClientMetadataCachesSnapshot metadataCachesSnapshot, ApiType apiType) { activeClientsCnt.incrementAndGet(); this.clientId = clientIdGenerator.getAndDecrement(); this.diagnosticsClientConfig = new DiagnosticsClientConfig(); this.diagnosticsClientConfig.withClientId(this.clientId); this.diagnosticsClientConfig.withActiveClientCounter(activeClientsCnt); this.diagnosticsClientConfig.withConnectionSharingAcrossClientsEnabled(connectionSharingAcrossClientsEnabled); this.diagnosticsClientConfig.withConsistency(consistencyLevel); this.throughputControlEnabled = new AtomicBoolean(false); logger.info( "Initializing DocumentClient [{}] with" + " serviceEndpoint [{}], connectionPolicy [{}], consistencyLevel [{}], directModeProtocol [{}]", this.clientId, serviceEndpoint, connectionPolicy, consistencyLevel, configs.getProtocol()); try { this.connectionSharingAcrossClientsEnabled = connectionSharingAcrossClientsEnabled; this.configs = configs; this.masterKeyOrResourceToken = masterKeyOrResourceToken; this.serviceEndpoint = serviceEndpoint; this.credential = credential; this.tokenCredential = tokenCredential; this.contentResponseOnWriteEnabled = contentResponseOnWriteEnabled; this.authorizationTokenType = AuthorizationTokenType.Invalid; if (this.credential != null) { hasAuthKeyResourceToken = false; this.authorizationTokenType = AuthorizationTokenType.PrimaryMasterKey; this.authorizationTokenProvider = new BaseAuthorizationTokenProvider(this.credential); } else if (masterKeyOrResourceToken != null && ResourceTokenAuthorizationHelper.isResourceToken(masterKeyOrResourceToken)) { this.authorizationTokenProvider = null; hasAuthKeyResourceToken = true; this.authorizationTokenType = AuthorizationTokenType.ResourceToken; } else if(masterKeyOrResourceToken != null && !ResourceTokenAuthorizationHelper.isResourceToken(masterKeyOrResourceToken)) { this.credential = new AzureKeyCredential(this.masterKeyOrResourceToken); hasAuthKeyResourceToken = false; this.authorizationTokenType = AuthorizationTokenType.PrimaryMasterKey; this.authorizationTokenProvider = new BaseAuthorizationTokenProvider(this.credential); } else { hasAuthKeyResourceToken = false; this.authorizationTokenProvider = null; if (tokenCredential != null) { this.tokenCredentialScopes = new String[] { serviceEndpoint.getScheme() + ": }; this.tokenCredentialCache = new SimpleTokenCache(() -> this.tokenCredential .getToken(new TokenRequestContext().addScopes(this.tokenCredentialScopes))); this.authorizationTokenType = AuthorizationTokenType.AadToken; } } if (connectionPolicy != null) { this.connectionPolicy = connectionPolicy; } else { this.connectionPolicy = new ConnectionPolicy(DirectConnectionConfig.getDefaultConfig()); } this.diagnosticsClientConfig.withConnectionMode(this.getConnectionPolicy().getConnectionMode()); this.diagnosticsClientConfig.withMultipleWriteRegionsEnabled(this.connectionPolicy.isMultipleWriteRegionsEnabled()); this.diagnosticsClientConfig.withEndpointDiscoveryEnabled(this.connectionPolicy.isEndpointDiscoveryEnabled()); this.diagnosticsClientConfig.withPreferredRegions(this.connectionPolicy.getPreferredRegions()); this.diagnosticsClientConfig.withMachineId(tempMachineId); boolean disableSessionCapturing = (ConsistencyLevel.SESSION != consistencyLevel && !sessionCapturingOverrideEnabled); this.sessionContainer = new SessionContainer(this.serviceEndpoint.getHost(), disableSessionCapturing); this.consistencyLevel = consistencyLevel; this.userAgentContainer = new UserAgentContainer(); String userAgentSuffix = this.connectionPolicy.getUserAgentSuffix(); if (userAgentSuffix != null && userAgentSuffix.length() > 0) { userAgentContainer.setSuffix(userAgentSuffix); } this.httpClientInterceptor = null; this.reactorHttpClient = httpClient(); this.globalEndpointManager = new GlobalEndpointManager(asDatabaseAccountManagerInternal(), this.connectionPolicy, /**/configs); this.retryPolicy = new RetryPolicy(this, this.globalEndpointManager, this.connectionPolicy); this.resetSessionTokenRetryPolicy = retryPolicy; CpuMemoryMonitor.register(this); this.queryPlanCache = Collections.synchronizedMap(new SizeLimitingLRUCache(Constants.QUERYPLAN_CACHE_SIZE)); this.apiType = apiType; } catch (RuntimeException e) { logger.error("unexpected failure in initializing client.", e); close(); throw e; } } @Override public DiagnosticsClientConfig getConfig() { return diagnosticsClientConfig; } @Override public CosmosDiagnostics createDiagnostics() { return BridgeInternal.createCosmosDiagnostics(this, this.globalEndpointManager); } private void initializeGatewayConfigurationReader() { this.gatewayConfigurationReader = new GatewayServiceConfigurationReader(this.globalEndpointManager); DatabaseAccount databaseAccount = this.globalEndpointManager.getLatestDatabaseAccount(); if (databaseAccount == null) { logger.error("Client initialization failed." + " Check if the endpoint is reachable and if your auth token is valid. More info: https: throw new RuntimeException("Client initialization failed." + " Check if the endpoint is reachable and if your auth token is valid. More info: https: } this.useMultipleWriteLocations = this.connectionPolicy.isMultipleWriteRegionsEnabled() && BridgeInternal.isEnableMultipleWriteLocations(databaseAccount); } private void updateGatewayProxy() { ((RxGatewayStoreModel)this.gatewayProxy).setGatewayServiceConfigurationReader(this.gatewayConfigurationReader); ((RxGatewayStoreModel)this.gatewayProxy).setCollectionCache(this.collectionCache); ((RxGatewayStoreModel)this.gatewayProxy).setPartitionKeyRangeCache(this.partitionKeyRangeCache); ((RxGatewayStoreModel)this.gatewayProxy).setUseMultipleWriteLocations(this.useMultipleWriteLocations); } public void serialize(CosmosClientMetadataCachesSnapshot state) { RxCollectionCache.serialize(state, this.collectionCache); } private void initializeDirectConnectivity() { this.addressResolver = new GlobalAddressResolver(this, this.reactorHttpClient, this.globalEndpointManager, this.configs.getProtocol(), this, this.collectionCache, this.partitionKeyRangeCache, userAgentContainer, null, this.connectionPolicy, this.apiType); this.storeClientFactory = new StoreClientFactory( this.addressResolver, this.diagnosticsClientConfig, this.configs, this.connectionPolicy, this.userAgentContainer, this.connectionSharingAcrossClientsEnabled, this.clientTelemetry ); this.createStoreModel(true); } DatabaseAccountManagerInternal asDatabaseAccountManagerInternal() { return new DatabaseAccountManagerInternal() { @Override public URI getServiceEndpoint() { return RxDocumentClientImpl.this.getServiceEndpoint(); } @Override public Flux<DatabaseAccount> getDatabaseAccountFromEndpoint(URI endpoint) { logger.info("Getting database account endpoint from {}", endpoint); return RxDocumentClientImpl.this.getDatabaseAccountFromEndpoint(endpoint); } @Override public ConnectionPolicy getConnectionPolicy() { return RxDocumentClientImpl.this.getConnectionPolicy(); } }; } RxGatewayStoreModel createRxGatewayProxy(ISessionContainer sessionContainer, ConsistencyLevel consistencyLevel, QueryCompatibilityMode queryCompatibilityMode, UserAgentContainer userAgentContainer, GlobalEndpointManager globalEndpointManager, HttpClient httpClient, ApiType apiType) { return new RxGatewayStoreModel( this, sessionContainer, consistencyLevel, queryCompatibilityMode, userAgentContainer, globalEndpointManager, httpClient, apiType); } private HttpClient httpClient() { HttpClientConfig httpClientConfig = new HttpClientConfig(this.configs) .withMaxIdleConnectionTimeout(this.connectionPolicy.getIdleHttpConnectionTimeout()) .withPoolSize(this.connectionPolicy.getMaxConnectionPoolSize()) .withProxy(this.connectionPolicy.getProxy()) .withNetworkRequestTimeout(this.connectionPolicy.getHttpNetworkRequestTimeout()); if (connectionSharingAcrossClientsEnabled) { return SharedGatewayHttpClient.getOrCreateInstance(httpClientConfig, diagnosticsClientConfig); } else { diagnosticsClientConfig.withGatewayHttpClientConfig(httpClientConfig); return HttpClient.createFixed(httpClientConfig); } } private void createStoreModel(boolean subscribeRntbdStatus) { StoreClient storeClient = this.storeClientFactory.createStoreClient(this, this.addressResolver, this.sessionContainer, this.gatewayConfigurationReader, this, this.useMultipleWriteLocations ); this.storeModel = new ServerStoreModel(storeClient); } @Override public URI getServiceEndpoint() { return this.serviceEndpoint; } @Override public URI getWriteEndpoint() { return globalEndpointManager.getWriteEndpoints().stream().findFirst().orElse(null); } @Override public URI getReadEndpoint() { return globalEndpointManager.getReadEndpoints().stream().findFirst().orElse(null); } @Override public ConnectionPolicy getConnectionPolicy() { return this.connectionPolicy; } @Override public boolean isContentResponseOnWriteEnabled() { return contentResponseOnWriteEnabled; } @Override public ConsistencyLevel getConsistencyLevel() { return consistencyLevel; } @Override public ClientTelemetry getClientTelemetry() { return this.clientTelemetry; } @Override public Mono<ResourceResponse<Database>> createDatabase(Database database, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> createDatabaseInternal(database, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<Database>> createDatabaseInternal(Database database, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (database == null) { throw new IllegalArgumentException("Database"); } logger.debug("Creating a Database. id: [{}]", database.getId()); validateResource(database); Map<String, String> requestHeaders = this.getRequestHeaders(options, ResourceType.Database, OperationType.Create); Instant serializationStartTimeUTC = Instant.now(); ByteBuffer byteBuffer = ModelBridgeInternal.serializeJsonToByteBuffer(database); Instant serializationEndTimeUTC = Instant.now(); SerializationDiagnosticsContext.SerializationDiagnostics serializationDiagnostics = new SerializationDiagnosticsContext.SerializationDiagnostics( serializationStartTimeUTC, serializationEndTimeUTC, SerializationDiagnosticsContext.SerializationType.DATABASE_SERIALIZATION); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(this, OperationType.Create, ResourceType.Database, Paths.DATABASES_ROOT, byteBuffer, requestHeaders, options); if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } SerializationDiagnosticsContext serializationDiagnosticsContext = BridgeInternal.getSerializationDiagnosticsContext(request.requestContext.cosmosDiagnostics); if (serializationDiagnosticsContext != null) { serializationDiagnosticsContext.addSerializationDiagnostics(serializationDiagnostics); } return this.create(request, retryPolicyInstance, getOperationContextAndListenerTuple(options)) .map(response -> toResourceResponse(response, Database.class)); } catch (Exception e) { logger.debug("Failure in creating a database. due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<Database>> deleteDatabase(String databaseLink, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> deleteDatabaseInternal(databaseLink, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<Database>> deleteDatabaseInternal(String databaseLink, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (StringUtils.isEmpty(databaseLink)) { throw new IllegalArgumentException("databaseLink"); } logger.debug("Deleting a Database. databaseLink: [{}]", databaseLink); String path = Utils.joinPath(databaseLink, null); Map<String, String> requestHeaders = this.getRequestHeaders(options, ResourceType.Database, OperationType.Delete); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(this, OperationType.Delete, ResourceType.Database, path, requestHeaders, options); if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } return this.delete(request, retryPolicyInstance, getOperationContextAndListenerTuple(options)).map(response -> toResourceResponse(response, Database.class)); } catch (Exception e) { logger.debug("Failure in deleting a database. due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<Database>> readDatabase(String databaseLink, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> readDatabaseInternal(databaseLink, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<Database>> readDatabaseInternal(String databaseLink, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (StringUtils.isEmpty(databaseLink)) { throw new IllegalArgumentException("databaseLink"); } logger.debug("Reading a Database. databaseLink: [{}]", databaseLink); String path = Utils.joinPath(databaseLink, null); Map<String, String> requestHeaders = this.getRequestHeaders(options, ResourceType.Database, OperationType.Read); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(this, OperationType.Read, ResourceType.Database, path, requestHeaders, options); if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } return this.read(request, retryPolicyInstance).map(response -> toResourceResponse(response, Database.class)); } catch (Exception e) { logger.debug("Failure in reading a database. due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Flux<FeedResponse<Database>> readDatabases(CosmosQueryRequestOptions options) { return readFeed(options, ResourceType.Database, Database.class, Paths.DATABASES_ROOT); } private String parentResourceLinkToQueryLink(String parentResourceLink, ResourceType resourceTypeEnum) { switch (resourceTypeEnum) { case Database: return Paths.DATABASES_ROOT; case DocumentCollection: return Utils.joinPath(parentResourceLink, Paths.COLLECTIONS_PATH_SEGMENT); case Document: return Utils.joinPath(parentResourceLink, Paths.DOCUMENTS_PATH_SEGMENT); case Offer: return Paths.OFFERS_ROOT; case User: return Utils.joinPath(parentResourceLink, Paths.USERS_PATH_SEGMENT); case ClientEncryptionKey: return Utils.joinPath(parentResourceLink, Paths.CLIENT_ENCRYPTION_KEY_PATH_SEGMENT); case Permission: return Utils.joinPath(parentResourceLink, Paths.PERMISSIONS_PATH_SEGMENT); case Attachment: return Utils.joinPath(parentResourceLink, Paths.ATTACHMENTS_PATH_SEGMENT); case StoredProcedure: return Utils.joinPath(parentResourceLink, Paths.STORED_PROCEDURES_PATH_SEGMENT); case Trigger: return Utils.joinPath(parentResourceLink, Paths.TRIGGERS_PATH_SEGMENT); case UserDefinedFunction: return Utils.joinPath(parentResourceLink, Paths.USER_DEFINED_FUNCTIONS_PATH_SEGMENT); case Conflict: return Utils.joinPath(parentResourceLink, Paths.CONFLICTS_PATH_SEGMENT); default: throw new IllegalArgumentException("resource type not supported"); } } private OperationContextAndListenerTuple getOperationContextAndListenerTuple(CosmosQueryRequestOptions options) { if (options == null) { return null; } return ImplementationBridgeHelpers.CosmosQueryRequestOptionsHelper.getCosmosQueryRequestOptionsAccessor().getOperationContext(options); } private OperationContextAndListenerTuple getOperationContextAndListenerTuple(RequestOptions options) { if (options == null) { return null; } return options.getOperationContextAndListenerTuple(); } private <T extends Resource> Flux<FeedResponse<T>> createQuery( String parentResourceLink, SqlQuerySpec sqlQuery, CosmosQueryRequestOptions options, Class<T> klass, ResourceType resourceTypeEnum) { String resourceLink = parentResourceLinkToQueryLink(parentResourceLink, resourceTypeEnum); UUID correlationActivityIdOfRequestOptions = ImplementationBridgeHelpers .CosmosQueryRequestOptionsHelper .getCosmosQueryRequestOptionsAccessor() .getCorrelationActivityId(options); UUID correlationActivityId = correlationActivityIdOfRequestOptions != null ? correlationActivityIdOfRequestOptions : Utils.randomUUID(); IDocumentQueryClient queryClient = documentQueryClientImpl(RxDocumentClientImpl.this, getOperationContextAndListenerTuple(options)); InvalidPartitionExceptionRetryPolicy invalidPartitionExceptionRetryPolicy = new InvalidPartitionExceptionRetryPolicy( this.collectionCache, null, resourceLink, ModelBridgeInternal.getPropertiesFromQueryRequestOptions(options)); return ObservableHelper.fluxInlineIfPossibleAsObs( () -> createQueryInternal( resourceLink, sqlQuery, options, klass, resourceTypeEnum, queryClient, correlationActivityId), invalidPartitionExceptionRetryPolicy); } private <T extends Resource> Flux<FeedResponse<T>> createQueryInternal( String resourceLink, SqlQuerySpec sqlQuery, CosmosQueryRequestOptions options, Class<T> klass, ResourceType resourceTypeEnum, IDocumentQueryClient queryClient, UUID activityId) { Flux<? extends IDocumentQueryExecutionContext<T>> executionContext = DocumentQueryExecutionContextFactory .createDocumentQueryExecutionContextAsync(this, queryClient, resourceTypeEnum, klass, sqlQuery, options, resourceLink, false, activityId, Configs.isQueryPlanCachingEnabled(), queryPlanCache); AtomicBoolean isFirstResponse = new AtomicBoolean(true); return executionContext.flatMap(iDocumentQueryExecutionContext -> { QueryInfo queryInfo = null; if (iDocumentQueryExecutionContext instanceof PipelinedDocumentQueryExecutionContext) { queryInfo = ((PipelinedDocumentQueryExecutionContext<T>) iDocumentQueryExecutionContext).getQueryInfo(); } QueryInfo finalQueryInfo = queryInfo; return iDocumentQueryExecutionContext.executeAsync() .map(tFeedResponse -> { if (finalQueryInfo != null) { if (finalQueryInfo.hasSelectValue()) { ModelBridgeInternal .addQueryInfoToFeedResponse(tFeedResponse, finalQueryInfo); } if (isFirstResponse.compareAndSet(true, false)) { ModelBridgeInternal.addQueryPlanDiagnosticsContextToFeedResponse(tFeedResponse, finalQueryInfo.getQueryPlanDiagnosticsContext()); } } return tFeedResponse; }); }); } @Override public Flux<FeedResponse<Database>> queryDatabases(String query, CosmosQueryRequestOptions options) { return queryDatabases(new SqlQuerySpec(query), options); } @Override public Flux<FeedResponse<Database>> queryDatabases(SqlQuerySpec querySpec, CosmosQueryRequestOptions options) { return createQuery(Paths.DATABASES_ROOT, querySpec, options, Database.class, ResourceType.Database); } @Override public Mono<ResourceResponse<DocumentCollection>> createCollection(String databaseLink, DocumentCollection collection, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> this.createCollectionInternal(databaseLink, collection, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<DocumentCollection>> createCollectionInternal(String databaseLink, DocumentCollection collection, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (StringUtils.isEmpty(databaseLink)) { throw new IllegalArgumentException("databaseLink"); } if (collection == null) { throw new IllegalArgumentException("collection"); } logger.debug("Creating a Collection. databaseLink: [{}], Collection id: [{}]", databaseLink, collection.getId()); validateResource(collection); String path = Utils.joinPath(databaseLink, Paths.COLLECTIONS_PATH_SEGMENT); Map<String, String> requestHeaders = this.getRequestHeaders(options, ResourceType.DocumentCollection, OperationType.Create); Instant serializationStartTimeUTC = Instant.now(); ByteBuffer byteBuffer = ModelBridgeInternal.serializeJsonToByteBuffer(collection); Instant serializationEndTimeUTC = Instant.now(); SerializationDiagnosticsContext.SerializationDiagnostics serializationDiagnostics = new SerializationDiagnosticsContext.SerializationDiagnostics( serializationStartTimeUTC, serializationEndTimeUTC, SerializationDiagnosticsContext.SerializationType.CONTAINER_SERIALIZATION); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(this, OperationType.Create, ResourceType.DocumentCollection, path, byteBuffer, requestHeaders, options); if (retryPolicyInstance != null){ retryPolicyInstance.onBeforeSendRequest(request); } SerializationDiagnosticsContext serializationDiagnosticsContext = BridgeInternal.getSerializationDiagnosticsContext(request.requestContext.cosmosDiagnostics); if (serializationDiagnosticsContext != null) { serializationDiagnosticsContext.addSerializationDiagnostics(serializationDiagnostics); } return this.create(request, retryPolicyInstance, getOperationContextAndListenerTuple(options)).map(response -> toResourceResponse(response, DocumentCollection.class)) .doOnNext(resourceResponse -> { this.sessionContainer.setSessionToken(resourceResponse.getResource().getResourceId(), getAltLink(resourceResponse.getResource()), resourceResponse.getResponseHeaders()); }); } catch (Exception e) { logger.debug("Failure in creating a collection. due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<DocumentCollection>> replaceCollection(DocumentCollection collection, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> replaceCollectionInternal(collection, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<DocumentCollection>> replaceCollectionInternal(DocumentCollection collection, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (collection == null) { throw new IllegalArgumentException("collection"); } logger.debug("Replacing a Collection. id: [{}]", collection.getId()); validateResource(collection); String path = Utils.joinPath(collection.getSelfLink(), null); Map<String, String> requestHeaders = this.getRequestHeaders(options, ResourceType.DocumentCollection, OperationType.Replace); Instant serializationStartTimeUTC = Instant.now(); ByteBuffer byteBuffer = ModelBridgeInternal.serializeJsonToByteBuffer(collection); Instant serializationEndTimeUTC = Instant.now(); SerializationDiagnosticsContext.SerializationDiagnostics serializationDiagnostics = new SerializationDiagnosticsContext.SerializationDiagnostics( serializationStartTimeUTC, serializationEndTimeUTC, SerializationDiagnosticsContext.SerializationType.CONTAINER_SERIALIZATION); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(this, OperationType.Replace, ResourceType.DocumentCollection, path, byteBuffer, requestHeaders, options); if (retryPolicyInstance != null){ retryPolicyInstance.onBeforeSendRequest(request); } SerializationDiagnosticsContext serializationDiagnosticsContext = BridgeInternal.getSerializationDiagnosticsContext(request.requestContext.cosmosDiagnostics); if (serializationDiagnosticsContext != null) { serializationDiagnosticsContext.addSerializationDiagnostics(serializationDiagnostics); } return this.replace(request, retryPolicyInstance).map(response -> toResourceResponse(response, DocumentCollection.class)) .doOnNext(resourceResponse -> { if (resourceResponse.getResource() != null) { this.sessionContainer.setSessionToken(resourceResponse.getResource().getResourceId(), getAltLink(resourceResponse.getResource()), resourceResponse.getResponseHeaders()); } }); } catch (Exception e) { logger.debug("Failure in replacing a collection. due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<DocumentCollection>> deleteCollection(String collectionLink, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> deleteCollectionInternal(collectionLink, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<DocumentCollection>> deleteCollectionInternal(String collectionLink, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (StringUtils.isEmpty(collectionLink)) { throw new IllegalArgumentException("collectionLink"); } logger.debug("Deleting a Collection. collectionLink: [{}]", collectionLink); String path = Utils.joinPath(collectionLink, null); Map<String, String> requestHeaders = this.getRequestHeaders(options, ResourceType.DocumentCollection, OperationType.Delete); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(this, OperationType.Delete, ResourceType.DocumentCollection, path, requestHeaders, options); if (retryPolicyInstance != null){ retryPolicyInstance.onBeforeSendRequest(request); } return this.delete(request, retryPolicyInstance, getOperationContextAndListenerTuple(options)).map(response -> toResourceResponse(response, DocumentCollection.class)); } catch (Exception e) { logger.debug("Failure in deleting a collection, due to [{}]", e.getMessage(), e); return Mono.error(e); } } private Mono<RxDocumentServiceResponse> delete(RxDocumentServiceRequest request, DocumentClientRetryPolicy documentClientRetryPolicy, OperationContextAndListenerTuple operationContextAndListenerTuple) { return populateHeaders(request, RequestVerb.DELETE) .flatMap(requestPopulated -> { if (documentClientRetryPolicy.getRetryContext() != null && documentClientRetryPolicy.getRetryContext().getRetryCount() > 0) { documentClientRetryPolicy.getRetryContext().updateEndTime(); } return getStoreProxy(requestPopulated).processMessage(requestPopulated, operationContextAndListenerTuple); }); } private Mono<RxDocumentServiceResponse> deleteAllItemsByPartitionKey(RxDocumentServiceRequest request, DocumentClientRetryPolicy documentClientRetryPolicy, OperationContextAndListenerTuple operationContextAndListenerTuple) { return populateHeaders(request, RequestVerb.POST) .flatMap(requestPopulated -> { RxStoreModel storeProxy = this.getStoreProxy(requestPopulated); if (documentClientRetryPolicy.getRetryContext() != null && documentClientRetryPolicy.getRetryContext().getRetryCount() > 0) { documentClientRetryPolicy.getRetryContext().updateEndTime(); } return storeProxy.processMessage(requestPopulated, operationContextAndListenerTuple); }); } private Mono<RxDocumentServiceResponse> read(RxDocumentServiceRequest request, DocumentClientRetryPolicy documentClientRetryPolicy) { return populateHeaders(request, RequestVerb.GET) .flatMap(requestPopulated -> { if (documentClientRetryPolicy.getRetryContext() != null && documentClientRetryPolicy.getRetryContext().getRetryCount() > 0) { documentClientRetryPolicy.getRetryContext().updateEndTime(); } return getStoreProxy(requestPopulated).processMessage(requestPopulated); }); } Mono<RxDocumentServiceResponse> readFeed(RxDocumentServiceRequest request) { return populateHeaders(request, RequestVerb.GET) .flatMap(requestPopulated -> getStoreProxy(requestPopulated).processMessage(requestPopulated)); } private Mono<RxDocumentServiceResponse> query(RxDocumentServiceRequest request) { return populateHeaders(request, RequestVerb.POST) .flatMap(requestPopulated -> this.getStoreProxy(requestPopulated).processMessage(requestPopulated) .map(response -> { this.captureSessionToken(requestPopulated, response); return response; } )); } @Override public Mono<ResourceResponse<DocumentCollection>> readCollection(String collectionLink, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> readCollectionInternal(collectionLink, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<DocumentCollection>> readCollectionInternal(String collectionLink, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (StringUtils.isEmpty(collectionLink)) { throw new IllegalArgumentException("collectionLink"); } logger.debug("Reading a Collection. collectionLink: [{}]", collectionLink); String path = Utils.joinPath(collectionLink, null); Map<String, String> requestHeaders = this.getRequestHeaders(options, ResourceType.DocumentCollection, OperationType.Read); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(this, OperationType.Read, ResourceType.DocumentCollection, path, requestHeaders, options); if (retryPolicyInstance != null){ retryPolicyInstance.onBeforeSendRequest(request); } return this.read(request, retryPolicyInstance).map(response -> toResourceResponse(response, DocumentCollection.class)); } catch (Exception e) { logger.debug("Failure in reading a collection, due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Flux<FeedResponse<DocumentCollection>> readCollections(String databaseLink, CosmosQueryRequestOptions options) { if (StringUtils.isEmpty(databaseLink)) { throw new IllegalArgumentException("databaseLink"); } return readFeed(options, ResourceType.DocumentCollection, DocumentCollection.class, Utils.joinPath(databaseLink, Paths.COLLECTIONS_PATH_SEGMENT)); } @Override public Flux<FeedResponse<DocumentCollection>> queryCollections(String databaseLink, String query, CosmosQueryRequestOptions options) { return createQuery(databaseLink, new SqlQuerySpec(query), options, DocumentCollection.class, ResourceType.DocumentCollection); } @Override public Flux<FeedResponse<DocumentCollection>> queryCollections(String databaseLink, SqlQuerySpec querySpec, CosmosQueryRequestOptions options) { return createQuery(databaseLink, querySpec, options, DocumentCollection.class, ResourceType.DocumentCollection); } private static String serializeProcedureParams(List<Object> objectArray) { String[] stringArray = new String[objectArray.size()]; for (int i = 0; i < objectArray.size(); ++i) { Object object = objectArray.get(i); if (object instanceof JsonSerializable) { stringArray[i] = ModelBridgeInternal.toJsonFromJsonSerializable((JsonSerializable) object); } else { try { stringArray[i] = mapper.writeValueAsString(object); } catch (IOException e) { throw new IllegalArgumentException("Can't serialize the object into the json string", e); } } } return String.format("[%s]", StringUtils.join(stringArray, ",")); } private static void validateResource(Resource resource) { if (!StringUtils.isEmpty(resource.getId())) { if (resource.getId().indexOf('/') != -1 || resource.getId().indexOf('\\') != -1 || resource.getId().indexOf('?') != -1 || resource.getId().indexOf(' throw new IllegalArgumentException("Id contains illegal chars."); } if (resource.getId().endsWith(" ")) { throw new IllegalArgumentException("Id ends with a space."); } } } private Map<String, String> getRequestHeaders(RequestOptions options, ResourceType resourceType, OperationType operationType) { Map<String, String> headers = new HashMap<>(); if (this.useMultipleWriteLocations) { headers.put(HttpConstants.HttpHeaders.ALLOW_TENTATIVE_WRITES, Boolean.TRUE.toString()); } if (consistencyLevel != null) { headers.put(HttpConstants.HttpHeaders.CONSISTENCY_LEVEL, consistencyLevel.toString()); } if (options == null) { if (!this.contentResponseOnWriteEnabled && resourceType.equals(ResourceType.Document) && operationType.isWriteOperation()) { headers.put(HttpConstants.HttpHeaders.PREFER, HttpConstants.HeaderValues.PREFER_RETURN_MINIMAL); } return headers; } Map<String, String> customOptions = options.getHeaders(); if (customOptions != null) { headers.putAll(customOptions); } boolean contentResponseOnWriteEnabled = this.contentResponseOnWriteEnabled; if (options.isContentResponseOnWriteEnabled() != null) { contentResponseOnWriteEnabled = options.isContentResponseOnWriteEnabled(); } if (!contentResponseOnWriteEnabled && resourceType.equals(ResourceType.Document) && operationType.isWriteOperation()) { headers.put(HttpConstants.HttpHeaders.PREFER, HttpConstants.HeaderValues.PREFER_RETURN_MINIMAL); } if (options.getIfMatchETag() != null) { headers.put(HttpConstants.HttpHeaders.IF_MATCH, options.getIfMatchETag()); } if(options.getIfNoneMatchETag() != null) { headers.put(HttpConstants.HttpHeaders.IF_NONE_MATCH, options.getIfNoneMatchETag()); } if (options.getConsistencyLevel() != null) { headers.put(HttpConstants.HttpHeaders.CONSISTENCY_LEVEL, options.getConsistencyLevel().toString()); } if (options.getIndexingDirective() != null) { headers.put(HttpConstants.HttpHeaders.INDEXING_DIRECTIVE, options.getIndexingDirective().toString()); } if (options.getPostTriggerInclude() != null && options.getPostTriggerInclude().size() > 0) { String postTriggerInclude = StringUtils.join(options.getPostTriggerInclude(), ","); headers.put(HttpConstants.HttpHeaders.POST_TRIGGER_INCLUDE, postTriggerInclude); } if (options.getPreTriggerInclude() != null && options.getPreTriggerInclude().size() > 0) { String preTriggerInclude = StringUtils.join(options.getPreTriggerInclude(), ","); headers.put(HttpConstants.HttpHeaders.PRE_TRIGGER_INCLUDE, preTriggerInclude); } if (!Strings.isNullOrEmpty(options.getSessionToken())) { headers.put(HttpConstants.HttpHeaders.SESSION_TOKEN, options.getSessionToken()); } if (options.getResourceTokenExpirySeconds() != null) { headers.put(HttpConstants.HttpHeaders.RESOURCE_TOKEN_EXPIRY, String.valueOf(options.getResourceTokenExpirySeconds())); } if (options.getOfferThroughput() != null && options.getOfferThroughput() >= 0) { headers.put(HttpConstants.HttpHeaders.OFFER_THROUGHPUT, options.getOfferThroughput().toString()); } else if (options.getOfferType() != null) { headers.put(HttpConstants.HttpHeaders.OFFER_TYPE, options.getOfferType()); } if (options.getOfferThroughput() == null) { if (options.getThroughputProperties() != null) { Offer offer = ModelBridgeInternal.getOfferFromThroughputProperties(options.getThroughputProperties()); final OfferAutoscaleSettings offerAutoscaleSettings = offer.getOfferAutoScaleSettings(); OfferAutoscaleAutoUpgradeProperties autoscaleAutoUpgradeProperties = null; if (offerAutoscaleSettings != null) { autoscaleAutoUpgradeProperties = offer.getOfferAutoScaleSettings().getAutoscaleAutoUpgradeProperties(); } if (offer.hasOfferThroughput() && (offerAutoscaleSettings != null && offerAutoscaleSettings.getMaxThroughput() >= 0 || autoscaleAutoUpgradeProperties != null && autoscaleAutoUpgradeProperties .getAutoscaleThroughputProperties() .getIncrementPercent() >= 0)) { throw new IllegalArgumentException("Autoscale provisioned throughput can not be configured with " + "fixed offer"); } if (offer.hasOfferThroughput()) { headers.put(HttpConstants.HttpHeaders.OFFER_THROUGHPUT, String.valueOf(offer.getThroughput())); } else if (offer.getOfferAutoScaleSettings() != null) { headers.put(HttpConstants.HttpHeaders.OFFER_AUTOPILOT_SETTINGS, ModelBridgeInternal.toJsonFromJsonSerializable(offer.getOfferAutoScaleSettings())); } } } if (options.isQuotaInfoEnabled()) { headers.put(HttpConstants.HttpHeaders.POPULATE_QUOTA_INFO, String.valueOf(true)); } if (options.isScriptLoggingEnabled()) { headers.put(HttpConstants.HttpHeaders.SCRIPT_ENABLE_LOGGING, String.valueOf(true)); } if (options.getDedicatedGatewayRequestOptions() != null && options.getDedicatedGatewayRequestOptions().getMaxIntegratedCacheStaleness() != null) { headers.put(HttpConstants.HttpHeaders.DEDICATED_GATEWAY_PER_REQUEST_CACHE_STALENESS, String.valueOf(Utils.getMaxIntegratedCacheStalenessInMillis(options.getDedicatedGatewayRequestOptions()))); } return headers; } public IRetryPolicyFactory getResetSessionTokenRetryPolicy() { return this.resetSessionTokenRetryPolicy; } private Mono<RxDocumentServiceRequest> addPartitionKeyInformation(RxDocumentServiceRequest request, ByteBuffer contentAsByteBuffer, Document document, RequestOptions options) { Mono<Utils.ValueHolder<DocumentCollection>> collectionObs = this.collectionCache.resolveCollectionAsync(BridgeInternal.getMetaDataDiagnosticContext(request.requestContext.cosmosDiagnostics), request); return collectionObs .map(collectionValueHolder -> { addPartitionKeyInformation(request, contentAsByteBuffer, document, options, collectionValueHolder.v); return request; }); } private Mono<RxDocumentServiceRequest> addPartitionKeyInformation(RxDocumentServiceRequest request, ByteBuffer contentAsByteBuffer, Object document, RequestOptions options, Mono<Utils.ValueHolder<DocumentCollection>> collectionObs) { return collectionObs.map(collectionValueHolder -> { addPartitionKeyInformation(request, contentAsByteBuffer, document, options, collectionValueHolder.v); return request; }); } private void addPartitionKeyInformation(RxDocumentServiceRequest request, ByteBuffer contentAsByteBuffer, Object objectDoc, RequestOptions options, DocumentCollection collection) { PartitionKeyDefinition partitionKeyDefinition = collection.getPartitionKey(); PartitionKeyInternal partitionKeyInternal = null; if (options != null && options.getPartitionKey() != null && options.getPartitionKey().equals(PartitionKey.NONE)){ partitionKeyInternal = ModelBridgeInternal.getNonePartitionKey(partitionKeyDefinition); } else if (options != null && options.getPartitionKey() != null) { partitionKeyInternal = BridgeInternal.getPartitionKeyInternal(options.getPartitionKey()); } else if (partitionKeyDefinition == null || partitionKeyDefinition.getPaths().size() == 0) { partitionKeyInternal = PartitionKeyInternal.getEmpty(); } else if (contentAsByteBuffer != null || objectDoc != null) { InternalObjectNode internalObjectNode; if (objectDoc instanceof InternalObjectNode) { internalObjectNode = (InternalObjectNode) objectDoc; } else if (objectDoc instanceof ObjectNode) { internalObjectNode = new InternalObjectNode((ObjectNode)objectDoc); } else if (contentAsByteBuffer != null) { contentAsByteBuffer.rewind(); internalObjectNode = new InternalObjectNode(contentAsByteBuffer); } else { throw new IllegalStateException("ContentAsByteBuffer and objectDoc are null"); } Instant serializationStartTime = Instant.now(); partitionKeyInternal = extractPartitionKeyValueFromDocument(internalObjectNode, partitionKeyDefinition); Instant serializationEndTime = Instant.now(); SerializationDiagnosticsContext.SerializationDiagnostics serializationDiagnostics = new SerializationDiagnosticsContext.SerializationDiagnostics( serializationStartTime, serializationEndTime, SerializationDiagnosticsContext.SerializationType.PARTITION_KEY_FETCH_SERIALIZATION ); SerializationDiagnosticsContext serializationDiagnosticsContext = BridgeInternal.getSerializationDiagnosticsContext(request.requestContext.cosmosDiagnostics); if (serializationDiagnosticsContext != null) { serializationDiagnosticsContext.addSerializationDiagnostics(serializationDiagnostics); } } else { throw new UnsupportedOperationException("PartitionKey value must be supplied for this operation."); } request.setPartitionKeyInternal(partitionKeyInternal); request.getHeaders().put(HttpConstants.HttpHeaders.PARTITION_KEY, Utils.escapeNonAscii(partitionKeyInternal.toJson())); } public static PartitionKeyInternal extractPartitionKeyValueFromDocument( InternalObjectNode document, PartitionKeyDefinition partitionKeyDefinition) { if (partitionKeyDefinition != null) { switch (partitionKeyDefinition.getKind()) { case HASH: String path = partitionKeyDefinition.getPaths().iterator().next(); List<String> parts = PathParser.getPathParts(path); if (parts.size() >= 1) { Object value = ModelBridgeInternal.getObjectByPathFromJsonSerializable(document, parts); if (value == null || value.getClass() == ObjectNode.class) { value = ModelBridgeInternal.getNonePartitionKey(partitionKeyDefinition); } if (value instanceof PartitionKeyInternal) { return (PartitionKeyInternal) value; } else { return PartitionKeyInternal.fromObjectArray(Collections.singletonList(value), false); } } break; case MULTI_HASH: Object[] partitionKeyValues = new Object[partitionKeyDefinition.getPaths().size()]; for(int pathIter = 0 ; pathIter < partitionKeyDefinition.getPaths().size(); pathIter++){ String partitionPath = partitionKeyDefinition.getPaths().get(pathIter); List<String> partitionPathParts = PathParser.getPathParts(partitionPath); partitionKeyValues[pathIter] = ModelBridgeInternal.getObjectByPathFromJsonSerializable(document, partitionPathParts); } return PartitionKeyInternal.fromObjectArray(partitionKeyValues, false); default: throw new IllegalArgumentException("Unrecognized Partition kind: " + partitionKeyDefinition.getKind()); } } return null; } private Mono<RxDocumentServiceRequest> getCreateDocumentRequest(DocumentClientRetryPolicy requestRetryPolicy, String documentCollectionLink, Object document, RequestOptions options, boolean disableAutomaticIdGeneration, OperationType operationType) { if (StringUtils.isEmpty(documentCollectionLink)) { throw new IllegalArgumentException("documentCollectionLink"); } if (document == null) { throw new IllegalArgumentException("document"); } Instant serializationStartTimeUTC = Instant.now(); ByteBuffer content = BridgeInternal.serializeJsonToByteBuffer(document, mapper); Instant serializationEndTimeUTC = Instant.now(); SerializationDiagnosticsContext.SerializationDiagnostics serializationDiagnostics = new SerializationDiagnosticsContext.SerializationDiagnostics( serializationStartTimeUTC, serializationEndTimeUTC, SerializationDiagnosticsContext.SerializationType.ITEM_SERIALIZATION); String path = Utils.joinPath(documentCollectionLink, Paths.DOCUMENTS_PATH_SEGMENT); Map<String, String> requestHeaders = this.getRequestHeaders(options, ResourceType.Document, operationType); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(this, operationType, ResourceType.Document, path, requestHeaders, options, content); if (requestRetryPolicy != null) { requestRetryPolicy.onBeforeSendRequest(request); } SerializationDiagnosticsContext serializationDiagnosticsContext = BridgeInternal.getSerializationDiagnosticsContext(request.requestContext.cosmosDiagnostics); if (serializationDiagnosticsContext != null) { serializationDiagnosticsContext.addSerializationDiagnostics(serializationDiagnostics); } Mono<Utils.ValueHolder<DocumentCollection>> collectionObs = this.collectionCache.resolveCollectionAsync(BridgeInternal.getMetaDataDiagnosticContext(request.requestContext.cosmosDiagnostics), request); return addPartitionKeyInformation(request, content, document, options, collectionObs); } private Mono<RxDocumentServiceRequest> getBatchDocumentRequest(DocumentClientRetryPolicy requestRetryPolicy, String documentCollectionLink, ServerBatchRequest serverBatchRequest, RequestOptions options, boolean disableAutomaticIdGeneration) { checkArgument(StringUtils.isNotEmpty(documentCollectionLink), "expected non empty documentCollectionLink"); checkNotNull(serverBatchRequest, "expected non null serverBatchRequest"); Instant serializationStartTimeUTC = Instant.now(); ByteBuffer content = ByteBuffer.wrap(Utils.getUTF8Bytes(serverBatchRequest.getRequestBody())); Instant serializationEndTimeUTC = Instant.now(); SerializationDiagnosticsContext.SerializationDiagnostics serializationDiagnostics = new SerializationDiagnosticsContext.SerializationDiagnostics( serializationStartTimeUTC, serializationEndTimeUTC, SerializationDiagnosticsContext.SerializationType.ITEM_SERIALIZATION); String path = Utils.joinPath(documentCollectionLink, Paths.DOCUMENTS_PATH_SEGMENT); Map<String, String> requestHeaders = this.getRequestHeaders(options, ResourceType.Document, OperationType.Batch); RxDocumentServiceRequest request = RxDocumentServiceRequest.create( this, OperationType.Batch, ResourceType.Document, path, requestHeaders, options, content); if (requestRetryPolicy != null) { requestRetryPolicy.onBeforeSendRequest(request); } SerializationDiagnosticsContext serializationDiagnosticsContext = BridgeInternal.getSerializationDiagnosticsContext(request.requestContext.cosmosDiagnostics); if (serializationDiagnosticsContext != null) { serializationDiagnosticsContext.addSerializationDiagnostics(serializationDiagnostics); } Mono<Utils.ValueHolder<DocumentCollection>> collectionObs = this.collectionCache.resolveCollectionAsync(BridgeInternal.getMetaDataDiagnosticContext(request.requestContext.cosmosDiagnostics), request); return collectionObs.map((Utils.ValueHolder<DocumentCollection> collectionValueHolder) -> { addBatchHeaders(request, serverBatchRequest, collectionValueHolder.v); return request; }); } private RxDocumentServiceRequest addBatchHeaders(RxDocumentServiceRequest request, ServerBatchRequest serverBatchRequest, DocumentCollection collection) { if(serverBatchRequest instanceof SinglePartitionKeyServerBatchRequest) { PartitionKey partitionKey = ((SinglePartitionKeyServerBatchRequest) serverBatchRequest).getPartitionKeyValue(); PartitionKeyInternal partitionKeyInternal; if (partitionKey.equals(PartitionKey.NONE)) { PartitionKeyDefinition partitionKeyDefinition = collection.getPartitionKey(); partitionKeyInternal = ModelBridgeInternal.getNonePartitionKey(partitionKeyDefinition); } else { partitionKeyInternal = BridgeInternal.getPartitionKeyInternal(partitionKey); } request.setPartitionKeyInternal(partitionKeyInternal); request.getHeaders().put(HttpConstants.HttpHeaders.PARTITION_KEY, Utils.escapeNonAscii(partitionKeyInternal.toJson())); } else if(serverBatchRequest instanceof PartitionKeyRangeServerBatchRequest) { request.setPartitionKeyRangeIdentity(new PartitionKeyRangeIdentity(((PartitionKeyRangeServerBatchRequest) serverBatchRequest).getPartitionKeyRangeId())); } else { throw new UnsupportedOperationException("Unknown Server request."); } request.getHeaders().put(HttpConstants.HttpHeaders.IS_BATCH_REQUEST, Boolean.TRUE.toString()); request.getHeaders().put(HttpConstants.HttpHeaders.IS_BATCH_ATOMIC, String.valueOf(serverBatchRequest.isAtomicBatch())); request.getHeaders().put(HttpConstants.HttpHeaders.SHOULD_BATCH_CONTINUE_ON_ERROR, String.valueOf(serverBatchRequest.isShouldContinueOnError())); request.setNumberOfItemsInBatchRequest(serverBatchRequest.getOperations().size()); return request; } private Mono<RxDocumentServiceRequest> populateHeaders(RxDocumentServiceRequest request, RequestVerb httpMethod) { request.getHeaders().put(HttpConstants.HttpHeaders.X_DATE, Utils.nowAsRFC1123()); if (this.masterKeyOrResourceToken != null || this.resourceTokensMap != null || this.cosmosAuthorizationTokenResolver != null || this.credential != null) { String resourceName = request.getResourceAddress(); String authorization = this.getUserAuthorizationToken( resourceName, request.getResourceType(), httpMethod, request.getHeaders(), AuthorizationTokenType.PrimaryMasterKey, request.properties); try { authorization = URLEncoder.encode(authorization, "UTF-8"); } catch (UnsupportedEncodingException e) { throw new IllegalStateException("Failed to encode authtoken.", e); } request.getHeaders().put(HttpConstants.HttpHeaders.AUTHORIZATION, authorization); } if (this.apiType != null) { request.getHeaders().put(HttpConstants.HttpHeaders.API_TYPE, this.apiType.toString()); } if ((RequestVerb.POST.equals(httpMethod) || RequestVerb.PUT.equals(httpMethod)) && !request.getHeaders().containsKey(HttpConstants.HttpHeaders.CONTENT_TYPE)) { request.getHeaders().put(HttpConstants.HttpHeaders.CONTENT_TYPE, RuntimeConstants.MediaTypes.JSON); } if (RequestVerb.PATCH.equals(httpMethod) && !request.getHeaders().containsKey(HttpConstants.HttpHeaders.CONTENT_TYPE)) { request.getHeaders().put(HttpConstants.HttpHeaders.CONTENT_TYPE, RuntimeConstants.MediaTypes.JSON_PATCH); } if (!request.getHeaders().containsKey(HttpConstants.HttpHeaders.ACCEPT)) { request.getHeaders().put(HttpConstants.HttpHeaders.ACCEPT, RuntimeConstants.MediaTypes.JSON); } MetadataDiagnosticsContext metadataDiagnosticsCtx = BridgeInternal.getMetaDataDiagnosticContext(request.requestContext.cosmosDiagnostics); if (this.requiresFeedRangeFiltering(request)) { return request.getFeedRange() .populateFeedRangeFilteringHeaders( this.getPartitionKeyRangeCache(), request, this.collectionCache.resolveCollectionAsync(metadataDiagnosticsCtx, request)) .flatMap(this::populateAuthorizationHeader); } return this.populateAuthorizationHeader(request); } private boolean requiresFeedRangeFiltering(RxDocumentServiceRequest request) { if (request.getResourceType() != ResourceType.Document && request.getResourceType() != ResourceType.Conflict) { return false; } switch (request.getOperationType()) { case ReadFeed: case Query: case SqlQuery: return request.getFeedRange() != null; default: return false; } } @Override public Mono<RxDocumentServiceRequest> populateAuthorizationHeader(RxDocumentServiceRequest request) { if (request == null) { throw new IllegalArgumentException("request"); } if (this.authorizationTokenType == AuthorizationTokenType.AadToken) { return AadTokenAuthorizationHelper.getAuthorizationToken(this.tokenCredentialCache) .map(authorization -> { request.getHeaders().put(HttpConstants.HttpHeaders.AUTHORIZATION, authorization); return request; }); } else { return Mono.just(request); } } @Override public Mono<HttpHeaders> populateAuthorizationHeader(HttpHeaders httpHeaders) { if (httpHeaders == null) { throw new IllegalArgumentException("httpHeaders"); } if (this.authorizationTokenType == AuthorizationTokenType.AadToken) { return AadTokenAuthorizationHelper.getAuthorizationToken(this.tokenCredentialCache) .map(authorization -> { httpHeaders.set(HttpConstants.HttpHeaders.AUTHORIZATION, authorization); return httpHeaders; }); } return Mono.just(httpHeaders); } @Override public AuthorizationTokenType getAuthorizationTokenType() { return this.authorizationTokenType; } @Override public String getUserAuthorizationToken(String resourceName, ResourceType resourceType, RequestVerb requestVerb, Map<String, String> headers, AuthorizationTokenType tokenType, Map<String, Object> properties) { if (this.cosmosAuthorizationTokenResolver != null) { return this.cosmosAuthorizationTokenResolver.getAuthorizationToken(requestVerb.toUpperCase(), resourceName, this.resolveCosmosResourceType(resourceType).toString(), properties != null ? Collections.unmodifiableMap(properties) : null); } else if (credential != null) { return this.authorizationTokenProvider.generateKeyAuthorizationSignature(requestVerb, resourceName, resourceType, headers); } else if (masterKeyOrResourceToken != null && hasAuthKeyResourceToken && resourceTokensMap == null) { return masterKeyOrResourceToken; } else { assert resourceTokensMap != null; if(resourceType.equals(ResourceType.DatabaseAccount)) { return this.firstResourceTokenFromPermissionFeed; } return ResourceTokenAuthorizationHelper.getAuthorizationTokenUsingResourceTokens(resourceTokensMap, requestVerb, resourceName, headers); } } private CosmosResourceType resolveCosmosResourceType(ResourceType resourceType) { CosmosResourceType cosmosResourceType = ModelBridgeInternal.fromServiceSerializedFormat(resourceType.toString()); if (cosmosResourceType == null) { return CosmosResourceType.SYSTEM; } return cosmosResourceType; } void captureSessionToken(RxDocumentServiceRequest request, RxDocumentServiceResponse response) { this.sessionContainer.setSessionToken(request, response.getResponseHeaders()); } private Mono<RxDocumentServiceResponse> create(RxDocumentServiceRequest request, DocumentClientRetryPolicy documentClientRetryPolicy, OperationContextAndListenerTuple operationContextAndListenerTuple) { return populateHeaders(request, RequestVerb.POST) .flatMap(requestPopulated -> { RxStoreModel storeProxy = this.getStoreProxy(requestPopulated); if (documentClientRetryPolicy.getRetryContext() != null && documentClientRetryPolicy.getRetryContext().getRetryCount() > 0) { documentClientRetryPolicy.getRetryContext().updateEndTime(); } return storeProxy.processMessage(requestPopulated, operationContextAndListenerTuple); }); } private Mono<RxDocumentServiceResponse> upsert(RxDocumentServiceRequest request, DocumentClientRetryPolicy documentClientRetryPolicy, OperationContextAndListenerTuple operationContextAndListenerTuple) { return populateHeaders(request, RequestVerb.POST) .flatMap(requestPopulated -> { Map<String, String> headers = requestPopulated.getHeaders(); assert (headers != null); headers.put(HttpConstants.HttpHeaders.IS_UPSERT, "true"); if (documentClientRetryPolicy.getRetryContext() != null && documentClientRetryPolicy.getRetryContext().getRetryCount() > 0) { documentClientRetryPolicy.getRetryContext().updateEndTime(); } return getStoreProxy(requestPopulated).processMessage(requestPopulated, operationContextAndListenerTuple) .map(response -> { this.captureSessionToken(requestPopulated, response); return response; } ); }); } private Mono<RxDocumentServiceResponse> replace(RxDocumentServiceRequest request, DocumentClientRetryPolicy documentClientRetryPolicy) { return populateHeaders(request, RequestVerb.PUT) .flatMap(requestPopulated -> { if (documentClientRetryPolicy.getRetryContext() != null && documentClientRetryPolicy.getRetryContext().getRetryCount() > 0) { documentClientRetryPolicy.getRetryContext().updateEndTime(); } return getStoreProxy(requestPopulated).processMessage(requestPopulated); }); } private Mono<RxDocumentServiceResponse> patch(RxDocumentServiceRequest request, DocumentClientRetryPolicy documentClientRetryPolicy) { populateHeaders(request, RequestVerb.PATCH); if (documentClientRetryPolicy.getRetryContext() != null && documentClientRetryPolicy.getRetryContext().getRetryCount() > 0) { documentClientRetryPolicy.getRetryContext().updateEndTime(); } return getStoreProxy(request).processMessage(request); } @Override public Mono<ResourceResponse<Document>> createDocument(String collectionLink, Object document, RequestOptions options, boolean disableAutomaticIdGeneration) { DocumentClientRetryPolicy requestRetryPolicy = this.resetSessionTokenRetryPolicy.getRequestPolicy(); if (options == null || options.getPartitionKey() == null) { requestRetryPolicy = new PartitionKeyMismatchRetryPolicy(collectionCache, requestRetryPolicy, collectionLink, options); } DocumentClientRetryPolicy finalRetryPolicyInstance = requestRetryPolicy; return ObservableHelper.inlineIfPossibleAsObs(() -> createDocumentInternal(collectionLink, document, options, disableAutomaticIdGeneration, finalRetryPolicyInstance), requestRetryPolicy); } private Mono<ResourceResponse<Document>> createDocumentInternal(String collectionLink, Object document, RequestOptions options, boolean disableAutomaticIdGeneration, DocumentClientRetryPolicy requestRetryPolicy) { try { logger.debug("Creating a Document. collectionLink: [{}]", collectionLink); Mono<RxDocumentServiceRequest> requestObs = getCreateDocumentRequest(requestRetryPolicy, collectionLink, document, options, disableAutomaticIdGeneration, OperationType.Create); Mono<RxDocumentServiceResponse> responseObservable = requestObs.flatMap(request -> create(request, requestRetryPolicy, getOperationContextAndListenerTuple(options))); return responseObservable .map(serviceResponse -> toResourceResponse(serviceResponse, Document.class)); } catch (Exception e) { logger.debug("Failure in creating a document due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<Document>> upsertDocument(String collectionLink, Object document, RequestOptions options, boolean disableAutomaticIdGeneration) { DocumentClientRetryPolicy requestRetryPolicy = this.resetSessionTokenRetryPolicy.getRequestPolicy(); if (options == null || options.getPartitionKey() == null) { requestRetryPolicy = new PartitionKeyMismatchRetryPolicy(collectionCache, requestRetryPolicy, collectionLink, options); } DocumentClientRetryPolicy finalRetryPolicyInstance = requestRetryPolicy; return ObservableHelper.inlineIfPossibleAsObs(() -> upsertDocumentInternal(collectionLink, document, options, disableAutomaticIdGeneration, finalRetryPolicyInstance), finalRetryPolicyInstance); } private Mono<ResourceResponse<Document>> upsertDocumentInternal(String collectionLink, Object document, RequestOptions options, boolean disableAutomaticIdGeneration, DocumentClientRetryPolicy retryPolicyInstance) { try { logger.debug("Upserting a Document. collectionLink: [{}]", collectionLink); Mono<RxDocumentServiceRequest> reqObs = getCreateDocumentRequest(retryPolicyInstance, collectionLink, document, options, disableAutomaticIdGeneration, OperationType.Upsert); Mono<RxDocumentServiceResponse> responseObservable = reqObs.flatMap(request -> upsert(request, retryPolicyInstance, getOperationContextAndListenerTuple(options))); return responseObservable .map(serviceResponse -> toResourceResponse(serviceResponse, Document.class)); } catch (Exception e) { logger.debug("Failure in upserting a document due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<Document>> replaceDocument(String documentLink, Object document, RequestOptions options) { DocumentClientRetryPolicy requestRetryPolicy = this.resetSessionTokenRetryPolicy.getRequestPolicy(); if (options == null || options.getPartitionKey() == null) { String collectionLink = Utils.getCollectionName(documentLink); requestRetryPolicy = new PartitionKeyMismatchRetryPolicy(collectionCache, requestRetryPolicy, collectionLink, options); } DocumentClientRetryPolicy finalRequestRetryPolicy = requestRetryPolicy; return ObservableHelper.inlineIfPossibleAsObs(() -> replaceDocumentInternal(documentLink, document, options, finalRequestRetryPolicy), requestRetryPolicy); } private Mono<ResourceResponse<Document>> replaceDocumentInternal(String documentLink, Object document, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (StringUtils.isEmpty(documentLink)) { throw new IllegalArgumentException("documentLink"); } if (document == null) { throw new IllegalArgumentException("document"); } Document typedDocument = documentFromObject(document, mapper); return this.replaceDocumentInternal(documentLink, typedDocument, options, retryPolicyInstance); } catch (Exception e) { logger.debug("Failure in replacing a document due to [{}]", e.getMessage()); return Mono.error(e); } } @Override public Mono<ResourceResponse<Document>> replaceDocument(Document document, RequestOptions options) { DocumentClientRetryPolicy requestRetryPolicy = this.resetSessionTokenRetryPolicy.getRequestPolicy(); if (options == null || options.getPartitionKey() == null) { String collectionLink = document.getSelfLink(); requestRetryPolicy = new PartitionKeyMismatchRetryPolicy(collectionCache, requestRetryPolicy, collectionLink, options); } DocumentClientRetryPolicy finalRequestRetryPolicy = requestRetryPolicy; return ObservableHelper.inlineIfPossibleAsObs(() -> replaceDocumentInternal(document, options, finalRequestRetryPolicy), requestRetryPolicy); } private Mono<ResourceResponse<Document>> replaceDocumentInternal(Document document, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (document == null) { throw new IllegalArgumentException("document"); } return this.replaceDocumentInternal(document.getSelfLink(), document, options, retryPolicyInstance); } catch (Exception e) { logger.debug("Failure in replacing a database due to [{}]", e.getMessage()); return Mono.error(e); } } private Mono<ResourceResponse<Document>> replaceDocumentInternal(String documentLink, Document document, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { if (document == null) { throw new IllegalArgumentException("document"); } logger.debug("Replacing a Document. documentLink: [{}]", documentLink); final String path = Utils.joinPath(documentLink, null); final Map<String, String> requestHeaders = getRequestHeaders(options, ResourceType.Document, OperationType.Replace); Instant serializationStartTimeUTC = Instant.now(); ByteBuffer content = serializeJsonToByteBuffer(document); Instant serializationEndTime = Instant.now(); SerializationDiagnosticsContext.SerializationDiagnostics serializationDiagnostics = new SerializationDiagnosticsContext.SerializationDiagnostics( serializationStartTimeUTC, serializationEndTime, SerializationDiagnosticsContext.SerializationType.ITEM_SERIALIZATION); final RxDocumentServiceRequest request = RxDocumentServiceRequest.create(this, OperationType.Replace, ResourceType.Document, path, requestHeaders, options, content); if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } SerializationDiagnosticsContext serializationDiagnosticsContext = BridgeInternal.getSerializationDiagnosticsContext(request.requestContext.cosmosDiagnostics); if (serializationDiagnosticsContext != null) { serializationDiagnosticsContext.addSerializationDiagnostics(serializationDiagnostics); } Mono<Utils.ValueHolder<DocumentCollection>> collectionObs = collectionCache.resolveCollectionAsync(BridgeInternal.getMetaDataDiagnosticContext(request.requestContext.cosmosDiagnostics), request); Mono<RxDocumentServiceRequest> requestObs = addPartitionKeyInformation(request, content, document, options, collectionObs); return requestObs.flatMap(req -> replace(request, retryPolicyInstance) .map(resp -> toResourceResponse(resp, Document.class))); } @Override public Mono<ResourceResponse<Document>> patchDocument(String documentLink, CosmosPatchOperations cosmosPatchOperations, RequestOptions options) { DocumentClientRetryPolicy documentClientRetryPolicy = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> patchDocumentInternal(documentLink, cosmosPatchOperations, options, documentClientRetryPolicy), documentClientRetryPolicy); } private Mono<ResourceResponse<Document>> patchDocumentInternal(String documentLink, CosmosPatchOperations cosmosPatchOperations, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { checkArgument(StringUtils.isNotEmpty(documentLink), "expected non empty documentLink"); checkNotNull(cosmosPatchOperations, "expected non null cosmosPatchOperations"); logger.debug("Running patch operations on Document. documentLink: [{}]", documentLink); final String path = Utils.joinPath(documentLink, null); final Map<String, String> requestHeaders = getRequestHeaders(options, ResourceType.Document, OperationType.Patch); Instant serializationStartTimeUTC = Instant.now(); ByteBuffer content = ByteBuffer.wrap(PatchUtil.serializeCosmosPatchToByteArray(cosmosPatchOperations, options)); Instant serializationEndTime = Instant.now(); SerializationDiagnosticsContext.SerializationDiagnostics serializationDiagnostics = new SerializationDiagnosticsContext.SerializationDiagnostics( serializationStartTimeUTC, serializationEndTime, SerializationDiagnosticsContext.SerializationType.ITEM_SERIALIZATION); final RxDocumentServiceRequest request = RxDocumentServiceRequest.create( this, OperationType.Patch, ResourceType.Document, path, requestHeaders, options, content); if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } SerializationDiagnosticsContext serializationDiagnosticsContext = BridgeInternal.getSerializationDiagnosticsContext(request.requestContext.cosmosDiagnostics); if (serializationDiagnosticsContext != null) { serializationDiagnosticsContext.addSerializationDiagnostics(serializationDiagnostics); } Mono<Utils.ValueHolder<DocumentCollection>> collectionObs = collectionCache.resolveCollectionAsync( BridgeInternal.getMetaDataDiagnosticContext(request.requestContext.cosmosDiagnostics), request); Mono<RxDocumentServiceRequest> requestObs = addPartitionKeyInformation( request, null, null, options, collectionObs); return requestObs.flatMap(req -> patch(request, retryPolicyInstance) .map(resp -> toResourceResponse(resp, Document.class))); } @Override public Mono<ResourceResponse<Document>> deleteDocument(String documentLink, RequestOptions options) { DocumentClientRetryPolicy requestRetryPolicy = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> deleteDocumentInternal(documentLink, null, options, requestRetryPolicy), requestRetryPolicy); } @Override public Mono<ResourceResponse<Document>> deleteDocument(String documentLink, InternalObjectNode internalObjectNode, RequestOptions options) { DocumentClientRetryPolicy requestRetryPolicy = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> deleteDocumentInternal(documentLink, internalObjectNode, options, requestRetryPolicy), requestRetryPolicy); } private Mono<ResourceResponse<Document>> deleteDocumentInternal(String documentLink, InternalObjectNode internalObjectNode, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (StringUtils.isEmpty(documentLink)) { throw new IllegalArgumentException("documentLink"); } logger.debug("Deleting a Document. documentLink: [{}]", documentLink); String path = Utils.joinPath(documentLink, null); Map<String, String> requestHeaders = this.getRequestHeaders(options, ResourceType.Document, OperationType.Delete); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(this, OperationType.Delete, ResourceType.Document, path, requestHeaders, options); if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } Mono<Utils.ValueHolder<DocumentCollection>> collectionObs = collectionCache.resolveCollectionAsync(BridgeInternal.getMetaDataDiagnosticContext(request.requestContext.cosmosDiagnostics), request); Mono<RxDocumentServiceRequest> requestObs = addPartitionKeyInformation(request, null, internalObjectNode, options, collectionObs); return requestObs.flatMap(req -> this .delete(req, retryPolicyInstance, getOperationContextAndListenerTuple(options)) .map(serviceResponse -> toResourceResponse(serviceResponse, Document.class))); } catch (Exception e) { logger.debug("Failure in deleting a document due to [{}]", e.getMessage()); return Mono.error(e); } } @Override public Mono<ResourceResponse<Document>> deleteAllDocumentsByPartitionKey(String collectionLink, PartitionKey partitionKey, RequestOptions options) { DocumentClientRetryPolicy requestRetryPolicy = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> deleteAllDocumentsByPartitionKeyInternal(collectionLink, options, requestRetryPolicy), requestRetryPolicy); } private Mono<ResourceResponse<Document>> deleteAllDocumentsByPartitionKeyInternal(String collectionLink, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (StringUtils.isEmpty(collectionLink)) { throw new IllegalArgumentException("collectionLink"); } logger.debug("Deleting all items by Partition Key. collectionLink: [{}]", collectionLink); String path = Utils.joinPath(collectionLink, null); Map<String, String> requestHeaders = this.getRequestHeaders(options, ResourceType.PartitionKey, OperationType.Delete); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(this, OperationType.Delete, ResourceType.PartitionKey, path, requestHeaders, options); if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } Mono<Utils.ValueHolder<DocumentCollection>> collectionObs = collectionCache.resolveCollectionAsync(BridgeInternal.getMetaDataDiagnosticContext(request.requestContext.cosmosDiagnostics), request); Mono<RxDocumentServiceRequest> requestObs = addPartitionKeyInformation(request, null, null, options, collectionObs); return requestObs.flatMap(req -> this .deleteAllItemsByPartitionKey(req, retryPolicyInstance, getOperationContextAndListenerTuple(options)) .map(serviceResponse -> toResourceResponse(serviceResponse, Document.class))); } catch (Exception e) { logger.debug("Failure in deleting documents due to [{}]", e.getMessage()); return Mono.error(e); } } @Override public Mono<ResourceResponse<Document>> readDocument(String documentLink, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> readDocumentInternal(documentLink, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<Document>> readDocumentInternal(String documentLink, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (StringUtils.isEmpty(documentLink)) { throw new IllegalArgumentException("documentLink"); } logger.debug("Reading a Document. documentLink: [{}]", documentLink); String path = Utils.joinPath(documentLink, null); Map<String, String> requestHeaders = this.getRequestHeaders(options, ResourceType.Document, OperationType.Read); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(this, OperationType.Read, ResourceType.Document, path, requestHeaders, options); if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } Mono<Utils.ValueHolder<DocumentCollection>> collectionObs = this.collectionCache.resolveCollectionAsync(BridgeInternal.getMetaDataDiagnosticContext(request.requestContext.cosmosDiagnostics), request); Mono<RxDocumentServiceRequest> requestObs = addPartitionKeyInformation(request, null, null, options, collectionObs); return requestObs.flatMap(req -> { return this.read(request, retryPolicyInstance).map(serviceResponse -> toResourceResponse(serviceResponse, Document.class)); }); } catch (Exception e) { logger.debug("Failure in reading a document due to [{}]", e.getMessage()); return Mono.error(e); } } @Override public Flux<FeedResponse<Document>> readDocuments(String collectionLink, CosmosQueryRequestOptions options) { if (StringUtils.isEmpty(collectionLink)) { throw new IllegalArgumentException("collectionLink"); } return queryDocuments(collectionLink, "SELECT * FROM r", options); } @Override public <T> Mono<FeedResponse<T>> readMany( List<CosmosItemIdentity> itemIdentityList, String collectionLink, CosmosQueryRequestOptions options, Class<T> klass) { String resourceLink = parentResourceLinkToQueryLink(collectionLink, ResourceType.Document); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(this, OperationType.Query, ResourceType.Document, collectionLink, null ); Mono<Utils.ValueHolder<DocumentCollection>> collectionObs = collectionCache.resolveCollectionAsync(null, request); return collectionObs .flatMap(documentCollectionResourceResponse -> { final DocumentCollection collection = documentCollectionResourceResponse.v; if (collection == null) { throw new IllegalStateException("Collection cannot be null"); } final PartitionKeyDefinition pkDefinition = collection.getPartitionKey(); Mono<Utils.ValueHolder<CollectionRoutingMap>> valueHolderMono = partitionKeyRangeCache .tryLookupAsync(BridgeInternal.getMetaDataDiagnosticContext(request.requestContext.cosmosDiagnostics), collection.getResourceId(), null, null); return valueHolderMono.flatMap(collectionRoutingMapValueHolder -> { Map<PartitionKeyRange, List<CosmosItemIdentity>> partitionRangeItemKeyMap = new HashMap<>(); CollectionRoutingMap routingMap = collectionRoutingMapValueHolder.v; if (routingMap == null) { throw new IllegalStateException("Failed to get routing map."); } itemIdentityList .forEach(itemIdentity -> { String effectivePartitionKeyString = PartitionKeyInternalHelper .getEffectivePartitionKeyString( BridgeInternal.getPartitionKeyInternal( itemIdentity.getPartitionKey()), pkDefinition); PartitionKeyRange range = routingMap.getRangeByEffectivePartitionKey(effectivePartitionKeyString); if (partitionRangeItemKeyMap.get(range) == null) { List<CosmosItemIdentity> list = new ArrayList<>(); list.add(itemIdentity); partitionRangeItemKeyMap.put(range, list); } else { List<CosmosItemIdentity> pairs = partitionRangeItemKeyMap.get(range); pairs.add(itemIdentity); partitionRangeItemKeyMap.put(range, pairs); } }); Map<PartitionKeyRange, SqlQuerySpec> rangeQueryMap; rangeQueryMap = getRangeQueryMap(partitionRangeItemKeyMap, collection.getPartitionKey()); return createReadManyQuery( resourceLink, new SqlQuerySpec(DUMMY_SQL_QUERY), options, Document.class, ResourceType.Document, collection, Collections.unmodifiableMap(rangeQueryMap)) .collectList() .map(feedList -> { List<T> finalList = new ArrayList<>(); HashMap<String, String> headers = new HashMap<>(); ConcurrentMap<String, QueryMetrics> aggregatedQueryMetrics = new ConcurrentHashMap<>(); double requestCharge = 0; for (FeedResponse<Document> page : feedList) { ConcurrentMap<String, QueryMetrics> pageQueryMetrics = ModelBridgeInternal.queryMetrics(page); if (pageQueryMetrics != null) { pageQueryMetrics.forEach( aggregatedQueryMetrics::putIfAbsent); } requestCharge += page.getRequestCharge(); finalList.addAll(page.getResults().stream().map(document -> ModelBridgeInternal.toObjectFromJsonSerializable(document, klass)).collect(Collectors.toList())); } headers.put(HttpConstants.HttpHeaders.REQUEST_CHARGE, Double .toString(requestCharge)); FeedResponse<T> frp = BridgeInternal .createFeedResponse(finalList, headers); return frp; }); }); } ); } private Map<PartitionKeyRange, SqlQuerySpec> getRangeQueryMap( Map<PartitionKeyRange, List<CosmosItemIdentity>> partitionRangeItemKeyMap, PartitionKeyDefinition partitionKeyDefinition) { Map<PartitionKeyRange, SqlQuerySpec> rangeQueryMap = new HashMap<>(); String partitionKeySelector = createPkSelector(partitionKeyDefinition); for(Map.Entry<PartitionKeyRange, List<CosmosItemIdentity>> entry: partitionRangeItemKeyMap.entrySet()) { SqlQuerySpec sqlQuerySpec; if (partitionKeySelector.equals("[\"id\"]")) { sqlQuerySpec = createReadManyQuerySpecPartitionKeyIdSame(entry.getValue(), partitionKeySelector); } else { sqlQuerySpec = createReadManyQuerySpec(entry.getValue(), partitionKeySelector); } rangeQueryMap.put(entry.getKey(), sqlQuerySpec); } return rangeQueryMap; } private SqlQuerySpec createReadManyQuerySpecPartitionKeyIdSame( List<CosmosItemIdentity> idPartitionKeyPairList, String partitionKeySelector) { StringBuilder queryStringBuilder = new StringBuilder(); List<SqlParameter> parameters = new ArrayList<>(); queryStringBuilder.append("SELECT * FROM c WHERE c.id IN ( "); for (int i = 0; i < idPartitionKeyPairList.size(); i++) { CosmosItemIdentity itemIdentity = idPartitionKeyPairList.get(i); String idValue = itemIdentity.getId(); String idParamName = "@param" + i; PartitionKey pkValueAsPartitionKey = itemIdentity.getPartitionKey(); Object pkValue = ModelBridgeInternal.getPartitionKeyObject(pkValueAsPartitionKey); if (!Objects.equals(idValue, pkValue)) { continue; } parameters.add(new SqlParameter(idParamName, idValue)); queryStringBuilder.append(idParamName); if (i < idPartitionKeyPairList.size() - 1) { queryStringBuilder.append(", "); } } queryStringBuilder.append(" )"); return new SqlQuerySpec(queryStringBuilder.toString(), parameters); } private SqlQuerySpec createReadManyQuerySpec(List<CosmosItemIdentity> itemIdentities, String partitionKeySelector) { StringBuilder queryStringBuilder = new StringBuilder(); List<SqlParameter> parameters = new ArrayList<>(); queryStringBuilder.append("SELECT * FROM c WHERE ( "); for (int i = 0; i < itemIdentities.size(); i++) { CosmosItemIdentity itemIdentity = itemIdentities.get(i); PartitionKey pkValueAsPartitionKey = itemIdentity.getPartitionKey(); Object pkValue = ModelBridgeInternal.getPartitionKeyObject(pkValueAsPartitionKey); String pkParamName = "@param" + (2 * i); parameters.add(new SqlParameter(pkParamName, pkValue)); String idValue = itemIdentity.getId(); String idParamName = "@param" + (2 * i + 1); parameters.add(new SqlParameter(idParamName, idValue)); queryStringBuilder.append("("); queryStringBuilder.append("c.id = "); queryStringBuilder.append(idParamName); queryStringBuilder.append(" AND "); queryStringBuilder.append(" c"); queryStringBuilder.append(partitionKeySelector); queryStringBuilder.append((" = ")); queryStringBuilder.append(pkParamName); queryStringBuilder.append(" )"); if (i < itemIdentities.size() - 1) { queryStringBuilder.append(" OR "); } } queryStringBuilder.append(" )"); return new SqlQuerySpec(queryStringBuilder.toString(), parameters); } private String createPkSelector(PartitionKeyDefinition partitionKeyDefinition) { return partitionKeyDefinition.getPaths() .stream() .map(pathPart -> StringUtils.substring(pathPart, 1)) .map(pathPart -> StringUtils.replace(pathPart, "\"", "\\")) .map(part -> "[\"" + part + "\"]") .collect(Collectors.joining()); } private <T extends Resource> Flux<FeedResponse<T>> createReadManyQuery( String parentResourceLink, SqlQuerySpec sqlQuery, CosmosQueryRequestOptions options, Class<T> klass, ResourceType resourceTypeEnum, DocumentCollection collection, Map<PartitionKeyRange, SqlQuerySpec> rangeQueryMap) { UUID activityId = Utils.randomUUID(); IDocumentQueryClient queryClient = documentQueryClientImpl(RxDocumentClientImpl.this, getOperationContextAndListenerTuple(options)); Flux<? extends IDocumentQueryExecutionContext<T>> executionContext = DocumentQueryExecutionContextFactory.createReadManyQueryAsync(this, queryClient, collection.getResourceId(), sqlQuery, rangeQueryMap, options, collection.getResourceId(), parentResourceLink, activityId, klass, resourceTypeEnum); return executionContext.flatMap(IDocumentQueryExecutionContext<T>::executeAsync); } @Override public Flux<FeedResponse<Document>> queryDocuments(String collectionLink, String query, CosmosQueryRequestOptions options) { return queryDocuments(collectionLink, new SqlQuerySpec(query), options); } private IDocumentQueryClient documentQueryClientImpl(RxDocumentClientImpl rxDocumentClientImpl, OperationContextAndListenerTuple operationContextAndListenerTuple) { return new IDocumentQueryClient () { @Override public RxCollectionCache getCollectionCache() { return RxDocumentClientImpl.this.collectionCache; } @Override public RxPartitionKeyRangeCache getPartitionKeyRangeCache() { return RxDocumentClientImpl.this.partitionKeyRangeCache; } @Override public IRetryPolicyFactory getResetSessionTokenRetryPolicy() { return RxDocumentClientImpl.this.resetSessionTokenRetryPolicy; } @Override public ConsistencyLevel getDefaultConsistencyLevelAsync() { return RxDocumentClientImpl.this.gatewayConfigurationReader.getDefaultConsistencyLevel(); } @Override public ConsistencyLevel getDesiredConsistencyLevelAsync() { return RxDocumentClientImpl.this.consistencyLevel; } @Override public Mono<RxDocumentServiceResponse> executeQueryAsync(RxDocumentServiceRequest request) { if (operationContextAndListenerTuple == null) { return RxDocumentClientImpl.this.query(request).single(); } else { final OperationListener listener = operationContextAndListenerTuple.getOperationListener(); final OperationContext operationContext = operationContextAndListenerTuple.getOperationContext(); request.getHeaders().put(HttpConstants.HttpHeaders.CORRELATED_ACTIVITY_ID, operationContext.getCorrelationActivityId()); listener.requestListener(operationContext, request); return RxDocumentClientImpl.this.query(request).single().doOnNext( response -> listener.responseListener(operationContext, response) ).doOnError( ex -> listener.exceptionListener(operationContext, ex) ); } } @Override public QueryCompatibilityMode getQueryCompatibilityMode() { return QueryCompatibilityMode.Default; } @Override public Mono<RxDocumentServiceResponse> readFeedAsync(RxDocumentServiceRequest request) { return null; } }; } @Override public Flux<FeedResponse<Document>> queryDocuments(String collectionLink, SqlQuerySpec querySpec, CosmosQueryRequestOptions options) { SqlQuerySpecLogger.getInstance().logQuery(querySpec); return createQuery(collectionLink, querySpec, options, Document.class, ResourceType.Document); } @Override public Flux<FeedResponse<Document>> queryDocumentChangeFeed( final DocumentCollection collection, final CosmosChangeFeedRequestOptions changeFeedOptions) { checkNotNull(collection, "Argument 'collection' must not be null."); ChangeFeedQueryImpl<Document> changeFeedQueryImpl = new ChangeFeedQueryImpl<>( this, ResourceType.Document, Document.class, collection.getAltLink(), collection.getResourceId(), changeFeedOptions); return changeFeedQueryImpl.executeAsync(); } @Override public Flux<FeedResponse<Document>> readAllDocuments( String collectionLink, PartitionKey partitionKey, CosmosQueryRequestOptions options) { if (StringUtils.isEmpty(collectionLink)) { throw new IllegalArgumentException("collectionLink"); } if (partitionKey == null) { throw new IllegalArgumentException("partitionKey"); } RxDocumentServiceRequest request = RxDocumentServiceRequest.create(this, OperationType.Query, ResourceType.Document, collectionLink, null ); Flux<Utils.ValueHolder<DocumentCollection>> collectionObs = collectionCache.resolveCollectionAsync(null, request).flux(); return collectionObs.flatMap(documentCollectionResourceResponse -> { DocumentCollection collection = documentCollectionResourceResponse.v; if (collection == null) { throw new IllegalStateException("Collection cannot be null"); } PartitionKeyDefinition pkDefinition = collection.getPartitionKey(); String pkSelector = createPkSelector(pkDefinition); SqlQuerySpec querySpec = createLogicalPartitionScanQuerySpec(partitionKey, pkSelector); String resourceLink = parentResourceLinkToQueryLink(collectionLink, ResourceType.Document); UUID activityId = Utils.randomUUID(); IDocumentQueryClient queryClient = documentQueryClientImpl(RxDocumentClientImpl.this, getOperationContextAndListenerTuple(options)); final CosmosQueryRequestOptions effectiveOptions = ModelBridgeInternal.createQueryRequestOptions(options); InvalidPartitionExceptionRetryPolicy invalidPartitionExceptionRetryPolicy = new InvalidPartitionExceptionRetryPolicy( this.collectionCache, null, resourceLink, ModelBridgeInternal.getPropertiesFromQueryRequestOptions(effectiveOptions)); return ObservableHelper.fluxInlineIfPossibleAsObs( () -> { Flux<Utils.ValueHolder<CollectionRoutingMap>> valueHolderMono = this.partitionKeyRangeCache .tryLookupAsync( BridgeInternal.getMetaDataDiagnosticContext(request.requestContext.cosmosDiagnostics), collection.getResourceId(), null, null).flux(); return valueHolderMono.flatMap(collectionRoutingMapValueHolder -> { CollectionRoutingMap routingMap = collectionRoutingMapValueHolder.v; if (routingMap == null) { throw new IllegalStateException("Failed to get routing map."); } String effectivePartitionKeyString = PartitionKeyInternalHelper .getEffectivePartitionKeyString( BridgeInternal.getPartitionKeyInternal(partitionKey), pkDefinition); PartitionKeyRange range = routingMap.getRangeByEffectivePartitionKey(effectivePartitionKeyString); return createQueryInternal( resourceLink, querySpec, ModelBridgeInternal.setPartitionKeyRangeIdInternal(effectiveOptions, range.getId()), Document.class, ResourceType.Document, queryClient, activityId); }); }, invalidPartitionExceptionRetryPolicy); }); } @Override public Map<String, PartitionedQueryExecutionInfo> getQueryPlanCache() { return queryPlanCache; } @Override public Flux<FeedResponse<PartitionKeyRange>> readPartitionKeyRanges(final String collectionLink, CosmosQueryRequestOptions options) { if (StringUtils.isEmpty(collectionLink)) { throw new IllegalArgumentException("collectionLink"); } return readFeed(options, ResourceType.PartitionKeyRange, PartitionKeyRange.class, Utils.joinPath(collectionLink, Paths.PARTITION_KEY_RANGES_PATH_SEGMENT)); } private RxDocumentServiceRequest getStoredProcedureRequest(String collectionLink, StoredProcedure storedProcedure, RequestOptions options, OperationType operationType) { if (StringUtils.isEmpty(collectionLink)) { throw new IllegalArgumentException("collectionLink"); } if (storedProcedure == null) { throw new IllegalArgumentException("storedProcedure"); } validateResource(storedProcedure); String path = Utils.joinPath(collectionLink, Paths.STORED_PROCEDURES_PATH_SEGMENT); Map<String, String> requestHeaders = this.getRequestHeaders(options, ResourceType.StoredProcedure, operationType); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(this, operationType, ResourceType.StoredProcedure, path, storedProcedure, requestHeaders, options); return request; } private RxDocumentServiceRequest getUserDefinedFunctionRequest(String collectionLink, UserDefinedFunction udf, RequestOptions options, OperationType operationType) { if (StringUtils.isEmpty(collectionLink)) { throw new IllegalArgumentException("collectionLink"); } if (udf == null) { throw new IllegalArgumentException("udf"); } validateResource(udf); String path = Utils.joinPath(collectionLink, Paths.USER_DEFINED_FUNCTIONS_PATH_SEGMENT); Map<String, String> requestHeaders = this.getRequestHeaders(options, ResourceType.UserDefinedFunction, operationType); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(this, operationType, ResourceType.UserDefinedFunction, path, udf, requestHeaders, options); return request; } @Override public Mono<ResourceResponse<StoredProcedure>> createStoredProcedure(String collectionLink, StoredProcedure storedProcedure, RequestOptions options) { DocumentClientRetryPolicy requestRetryPolicy = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> createStoredProcedureInternal(collectionLink, storedProcedure, options, requestRetryPolicy), requestRetryPolicy); } private Mono<ResourceResponse<StoredProcedure>> createStoredProcedureInternal(String collectionLink, StoredProcedure storedProcedure, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { logger.debug("Creating a StoredProcedure. collectionLink: [{}], storedProcedure id [{}]", collectionLink, storedProcedure.getId()); RxDocumentServiceRequest request = getStoredProcedureRequest(collectionLink, storedProcedure, options, OperationType.Create); if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } return this.create(request, retryPolicyInstance, getOperationContextAndListenerTuple(options)).map(response -> toResourceResponse(response, StoredProcedure.class)); } catch (Exception e) { logger.debug("Failure in creating a StoredProcedure due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<StoredProcedure>> upsertStoredProcedure(String collectionLink, StoredProcedure storedProcedure, RequestOptions options) { DocumentClientRetryPolicy requestRetryPolicy = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> upsertStoredProcedureInternal(collectionLink, storedProcedure, options, requestRetryPolicy), requestRetryPolicy); } private Mono<ResourceResponse<StoredProcedure>> upsertStoredProcedureInternal(String collectionLink, StoredProcedure storedProcedure, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { logger.debug("Upserting a StoredProcedure. collectionLink: [{}], storedProcedure id [{}]", collectionLink, storedProcedure.getId()); RxDocumentServiceRequest request = getStoredProcedureRequest(collectionLink, storedProcedure, options, OperationType.Upsert); if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } return this.upsert(request, retryPolicyInstance, getOperationContextAndListenerTuple(options)).map(response -> toResourceResponse(response, StoredProcedure.class)); } catch (Exception e) { logger.debug("Failure in upserting a StoredProcedure due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<StoredProcedure>> replaceStoredProcedure(StoredProcedure storedProcedure, RequestOptions options) { DocumentClientRetryPolicy requestRetryPolicy = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> replaceStoredProcedureInternal(storedProcedure, options, requestRetryPolicy), requestRetryPolicy); } private Mono<ResourceResponse<StoredProcedure>> replaceStoredProcedureInternal(StoredProcedure storedProcedure, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (storedProcedure == null) { throw new IllegalArgumentException("storedProcedure"); } logger.debug("Replacing a StoredProcedure. storedProcedure id [{}]", storedProcedure.getId()); RxDocumentClientImpl.validateResource(storedProcedure); String path = Utils.joinPath(storedProcedure.getSelfLink(), null); Map<String, String> requestHeaders = getRequestHeaders(options, ResourceType.StoredProcedure, OperationType.Replace); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(this, OperationType.Replace, ResourceType.StoredProcedure, path, storedProcedure, requestHeaders, options); if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } return this.replace(request, retryPolicyInstance).map(response -> toResourceResponse(response, StoredProcedure.class)); } catch (Exception e) { logger.debug("Failure in replacing a StoredProcedure due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<StoredProcedure>> deleteStoredProcedure(String storedProcedureLink, RequestOptions options) { DocumentClientRetryPolicy requestRetryPolicy = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> deleteStoredProcedureInternal(storedProcedureLink, options, requestRetryPolicy), requestRetryPolicy); } private Mono<ResourceResponse<StoredProcedure>> deleteStoredProcedureInternal(String storedProcedureLink, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (StringUtils.isEmpty(storedProcedureLink)) { throw new IllegalArgumentException("storedProcedureLink"); } logger.debug("Deleting a StoredProcedure. storedProcedureLink [{}]", storedProcedureLink); String path = Utils.joinPath(storedProcedureLink, null); Map<String, String> requestHeaders = this.getRequestHeaders(options, ResourceType.StoredProcedure, OperationType.Delete); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(this, OperationType.Delete, ResourceType.StoredProcedure, path, requestHeaders, options); if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } return this.delete(request, retryPolicyInstance, getOperationContextAndListenerTuple(options)).map(response -> toResourceResponse(response, StoredProcedure.class)); } catch (Exception e) { logger.debug("Failure in deleting a StoredProcedure due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<StoredProcedure>> readStoredProcedure(String storedProcedureLink, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> readStoredProcedureInternal(storedProcedureLink, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<StoredProcedure>> readStoredProcedureInternal(String storedProcedureLink, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (StringUtils.isEmpty(storedProcedureLink)) { throw new IllegalArgumentException("storedProcedureLink"); } logger.debug("Reading a StoredProcedure. storedProcedureLink [{}]", storedProcedureLink); String path = Utils.joinPath(storedProcedureLink, null); Map<String, String> requestHeaders = this.getRequestHeaders(options, ResourceType.StoredProcedure, OperationType.Read); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(this, OperationType.Read, ResourceType.StoredProcedure, path, requestHeaders, options); if (retryPolicyInstance != null){ retryPolicyInstance.onBeforeSendRequest(request); } return this.read(request, retryPolicyInstance).map(response -> toResourceResponse(response, StoredProcedure.class)); } catch (Exception e) { logger.debug("Failure in reading a StoredProcedure due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Flux<FeedResponse<StoredProcedure>> readStoredProcedures(String collectionLink, CosmosQueryRequestOptions options) { if (StringUtils.isEmpty(collectionLink)) { throw new IllegalArgumentException("collectionLink"); } return readFeed(options, ResourceType.StoredProcedure, StoredProcedure.class, Utils.joinPath(collectionLink, Paths.STORED_PROCEDURES_PATH_SEGMENT)); } @Override public Flux<FeedResponse<StoredProcedure>> queryStoredProcedures(String collectionLink, String query, CosmosQueryRequestOptions options) { return queryStoredProcedures(collectionLink, new SqlQuerySpec(query), options); } @Override public Flux<FeedResponse<StoredProcedure>> queryStoredProcedures(String collectionLink, SqlQuerySpec querySpec, CosmosQueryRequestOptions options) { return createQuery(collectionLink, querySpec, options, StoredProcedure.class, ResourceType.StoredProcedure); } @Override public Mono<StoredProcedureResponse> executeStoredProcedure(String storedProcedureLink, List<Object> procedureParams) { return this.executeStoredProcedure(storedProcedureLink, null, procedureParams); } @Override public Mono<StoredProcedureResponse> executeStoredProcedure(String storedProcedureLink, RequestOptions options, List<Object> procedureParams) { DocumentClientRetryPolicy documentClientRetryPolicy = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> executeStoredProcedureInternal(storedProcedureLink, options, procedureParams, documentClientRetryPolicy), documentClientRetryPolicy); } @Override public Mono<CosmosBatchResponse> executeBatchRequest(String collectionLink, ServerBatchRequest serverBatchRequest, RequestOptions options, boolean disableAutomaticIdGeneration) { DocumentClientRetryPolicy documentClientRetryPolicy = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> executeBatchRequestInternal(collectionLink, serverBatchRequest, options, documentClientRetryPolicy, disableAutomaticIdGeneration), documentClientRetryPolicy); } private Mono<StoredProcedureResponse> executeStoredProcedureInternal(String storedProcedureLink, RequestOptions options, List<Object> procedureParams, DocumentClientRetryPolicy retryPolicy) { try { logger.debug("Executing a StoredProcedure. storedProcedureLink [{}]", storedProcedureLink); String path = Utils.joinPath(storedProcedureLink, null); Map<String, String> requestHeaders = getRequestHeaders(options, ResourceType.StoredProcedure, OperationType.ExecuteJavaScript); requestHeaders.put(HttpConstants.HttpHeaders.ACCEPT, RuntimeConstants.MediaTypes.JSON); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(this, OperationType.ExecuteJavaScript, ResourceType.StoredProcedure, path, procedureParams != null && !procedureParams.isEmpty() ? RxDocumentClientImpl.serializeProcedureParams(procedureParams) : "", requestHeaders, options); if (retryPolicy != null) { retryPolicy.onBeforeSendRequest(request); } Mono<RxDocumentServiceRequest> reqObs = addPartitionKeyInformation(request, null, null, options); return reqObs.flatMap(req -> create(request, retryPolicy, getOperationContextAndListenerTuple(options)) .map(response -> { this.captureSessionToken(request, response); return toStoredProcedureResponse(response); })); } catch (Exception e) { logger.debug("Failure in executing a StoredProcedure due to [{}]", e.getMessage(), e); return Mono.error(e); } } private Mono<CosmosBatchResponse> executeBatchRequestInternal(String collectionLink, ServerBatchRequest serverBatchRequest, RequestOptions options, DocumentClientRetryPolicy requestRetryPolicy, boolean disableAutomaticIdGeneration) { try { logger.debug("Executing a Batch request with number of operations {}", serverBatchRequest.getOperations().size()); Mono<RxDocumentServiceRequest> requestObs = getBatchDocumentRequest(requestRetryPolicy, collectionLink, serverBatchRequest, options, disableAutomaticIdGeneration); Mono<RxDocumentServiceResponse> responseObservable = requestObs.flatMap(request -> create(request, requestRetryPolicy, getOperationContextAndListenerTuple(options))); return responseObservable .map(serviceResponse -> BatchResponseParser.fromDocumentServiceResponse(serviceResponse, serverBatchRequest, true)); } catch (Exception ex) { logger.debug("Failure in executing a batch due to [{}]", ex.getMessage(), ex); return Mono.error(ex); } } @Override public Mono<ResourceResponse<Trigger>> createTrigger(String collectionLink, Trigger trigger, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> createTriggerInternal(collectionLink, trigger, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<Trigger>> createTriggerInternal(String collectionLink, Trigger trigger, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { logger.debug("Creating a Trigger. collectionLink [{}], trigger id [{}]", collectionLink, trigger.getId()); RxDocumentServiceRequest request = getTriggerRequest(collectionLink, trigger, options, OperationType.Create); if (retryPolicyInstance != null){ retryPolicyInstance.onBeforeSendRequest(request); } return this.create(request, retryPolicyInstance, getOperationContextAndListenerTuple(options)).map(response -> toResourceResponse(response, Trigger.class)); } catch (Exception e) { logger.debug("Failure in creating a Trigger due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<Trigger>> upsertTrigger(String collectionLink, Trigger trigger, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> upsertTriggerInternal(collectionLink, trigger, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<Trigger>> upsertTriggerInternal(String collectionLink, Trigger trigger, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { logger.debug("Upserting a Trigger. collectionLink [{}], trigger id [{}]", collectionLink, trigger.getId()); RxDocumentServiceRequest request = getTriggerRequest(collectionLink, trigger, options, OperationType.Upsert); if (retryPolicyInstance != null){ retryPolicyInstance.onBeforeSendRequest(request); } return this.upsert(request, retryPolicyInstance, getOperationContextAndListenerTuple(options)).map(response -> toResourceResponse(response, Trigger.class)); } catch (Exception e) { logger.debug("Failure in upserting a Trigger due to [{}]", e.getMessage(), e); return Mono.error(e); } } private RxDocumentServiceRequest getTriggerRequest(String collectionLink, Trigger trigger, RequestOptions options, OperationType operationType) { if (StringUtils.isEmpty(collectionLink)) { throw new IllegalArgumentException("collectionLink"); } if (trigger == null) { throw new IllegalArgumentException("trigger"); } RxDocumentClientImpl.validateResource(trigger); String path = Utils.joinPath(collectionLink, Paths.TRIGGERS_PATH_SEGMENT); Map<String, String> requestHeaders = getRequestHeaders(options, ResourceType.Trigger, operationType); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(this, operationType, ResourceType.Trigger, path, trigger, requestHeaders, options); return request; } @Override public Mono<ResourceResponse<Trigger>> replaceTrigger(Trigger trigger, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> replaceTriggerInternal(trigger, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<Trigger>> replaceTriggerInternal(Trigger trigger, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (trigger == null) { throw new IllegalArgumentException("trigger"); } logger.debug("Replacing a Trigger. trigger id [{}]", trigger.getId()); RxDocumentClientImpl.validateResource(trigger); String path = Utils.joinPath(trigger.getSelfLink(), null); Map<String, String> requestHeaders = getRequestHeaders(options, ResourceType.Trigger, OperationType.Replace); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(this, OperationType.Replace, ResourceType.Trigger, path, trigger, requestHeaders, options); if (retryPolicyInstance != null){ retryPolicyInstance.onBeforeSendRequest(request); } return this.replace(request, retryPolicyInstance).map(response -> toResourceResponse(response, Trigger.class)); } catch (Exception e) { logger.debug("Failure in replacing a Trigger due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<Trigger>> deleteTrigger(String triggerLink, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> deleteTriggerInternal(triggerLink, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<Trigger>> deleteTriggerInternal(String triggerLink, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (StringUtils.isEmpty(triggerLink)) { throw new IllegalArgumentException("triggerLink"); } logger.debug("Deleting a Trigger. triggerLink [{}]", triggerLink); String path = Utils.joinPath(triggerLink, null); Map<String, String> requestHeaders = getRequestHeaders(options, ResourceType.Trigger, OperationType.Delete); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(this, OperationType.Delete, ResourceType.Trigger, path, requestHeaders, options); if (retryPolicyInstance != null){ retryPolicyInstance.onBeforeSendRequest(request); } return this.delete(request, retryPolicyInstance, getOperationContextAndListenerTuple(options)).map(response -> toResourceResponse(response, Trigger.class)); } catch (Exception e) { logger.debug("Failure in deleting a Trigger due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<Trigger>> readTrigger(String triggerLink, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> readTriggerInternal(triggerLink, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<Trigger>> readTriggerInternal(String triggerLink, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (StringUtils.isEmpty(triggerLink)) { throw new IllegalArgumentException("triggerLink"); } logger.debug("Reading a Trigger. triggerLink [{}]", triggerLink); String path = Utils.joinPath(triggerLink, null); Map<String, String> requestHeaders = getRequestHeaders(options, ResourceType.Trigger, OperationType.Read); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(this, OperationType.Read, ResourceType.Trigger, path, requestHeaders, options); if (retryPolicyInstance != null){ retryPolicyInstance.onBeforeSendRequest(request); } return this.read(request, retryPolicyInstance).map(response -> toResourceResponse(response, Trigger.class)); } catch (Exception e) { logger.debug("Failure in reading a Trigger due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Flux<FeedResponse<Trigger>> readTriggers(String collectionLink, CosmosQueryRequestOptions options) { if (StringUtils.isEmpty(collectionLink)) { throw new IllegalArgumentException("collectionLink"); } return readFeed(options, ResourceType.Trigger, Trigger.class, Utils.joinPath(collectionLink, Paths.TRIGGERS_PATH_SEGMENT)); } @Override public Flux<FeedResponse<Trigger>> queryTriggers(String collectionLink, String query, CosmosQueryRequestOptions options) { return queryTriggers(collectionLink, new SqlQuerySpec(query), options); } @Override public Flux<FeedResponse<Trigger>> queryTriggers(String collectionLink, SqlQuerySpec querySpec, CosmosQueryRequestOptions options) { return createQuery(collectionLink, querySpec, options, Trigger.class, ResourceType.Trigger); } @Override public Mono<ResourceResponse<UserDefinedFunction>> createUserDefinedFunction(String collectionLink, UserDefinedFunction udf, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> createUserDefinedFunctionInternal(collectionLink, udf, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<UserDefinedFunction>> createUserDefinedFunctionInternal(String collectionLink, UserDefinedFunction udf, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { logger.debug("Creating a UserDefinedFunction. collectionLink [{}], udf id [{}]", collectionLink, udf.getId()); RxDocumentServiceRequest request = getUserDefinedFunctionRequest(collectionLink, udf, options, OperationType.Create); if (retryPolicyInstance != null){ retryPolicyInstance.onBeforeSendRequest(request); } return this.create(request, retryPolicyInstance, getOperationContextAndListenerTuple(options)).map(response -> toResourceResponse(response, UserDefinedFunction.class)); } catch (Exception e) { logger.debug("Failure in creating a UserDefinedFunction due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<UserDefinedFunction>> upsertUserDefinedFunction(String collectionLink, UserDefinedFunction udf, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> upsertUserDefinedFunctionInternal(collectionLink, udf, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<UserDefinedFunction>> upsertUserDefinedFunctionInternal(String collectionLink, UserDefinedFunction udf, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { logger.debug("Upserting a UserDefinedFunction. collectionLink [{}], udf id [{}]", collectionLink, udf.getId()); RxDocumentServiceRequest request = getUserDefinedFunctionRequest(collectionLink, udf, options, OperationType.Upsert); if (retryPolicyInstance != null){ retryPolicyInstance.onBeforeSendRequest(request); } return this.upsert(request, retryPolicyInstance, getOperationContextAndListenerTuple(options)).map(response -> toResourceResponse(response, UserDefinedFunction.class)); } catch (Exception e) { logger.debug("Failure in upserting a UserDefinedFunction due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<UserDefinedFunction>> replaceUserDefinedFunction(UserDefinedFunction udf, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> replaceUserDefinedFunctionInternal(udf, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<UserDefinedFunction>> replaceUserDefinedFunctionInternal(UserDefinedFunction udf, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (udf == null) { throw new IllegalArgumentException("udf"); } logger.debug("Replacing a UserDefinedFunction. udf id [{}]", udf.getId()); validateResource(udf); String path = Utils.joinPath(udf.getSelfLink(), null); Map<String, String> requestHeaders = this.getRequestHeaders(options, ResourceType.UserDefinedFunction, OperationType.Replace); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(this, OperationType.Replace, ResourceType.UserDefinedFunction, path, udf, requestHeaders, options); if (retryPolicyInstance != null){ retryPolicyInstance.onBeforeSendRequest(request); } return this.replace(request, retryPolicyInstance).map(response -> toResourceResponse(response, UserDefinedFunction.class)); } catch (Exception e) { logger.debug("Failure in replacing a UserDefinedFunction due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<UserDefinedFunction>> deleteUserDefinedFunction(String udfLink, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> deleteUserDefinedFunctionInternal(udfLink, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<UserDefinedFunction>> deleteUserDefinedFunctionInternal(String udfLink, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (StringUtils.isEmpty(udfLink)) { throw new IllegalArgumentException("udfLink"); } logger.debug("Deleting a UserDefinedFunction. udfLink [{}]", udfLink); String path = Utils.joinPath(udfLink, null); Map<String, String> requestHeaders = this.getRequestHeaders(options, ResourceType.UserDefinedFunction, OperationType.Delete); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(this, OperationType.Delete, ResourceType.UserDefinedFunction, path, requestHeaders, options); if (retryPolicyInstance != null){ retryPolicyInstance.onBeforeSendRequest(request); } return this.delete(request, retryPolicyInstance, getOperationContextAndListenerTuple(options)).map(response -> toResourceResponse(response, UserDefinedFunction.class)); } catch (Exception e) { logger.debug("Failure in deleting a UserDefinedFunction due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<UserDefinedFunction>> readUserDefinedFunction(String udfLink, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> readUserDefinedFunctionInternal(udfLink, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<UserDefinedFunction>> readUserDefinedFunctionInternal(String udfLink, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (StringUtils.isEmpty(udfLink)) { throw new IllegalArgumentException("udfLink"); } logger.debug("Reading a UserDefinedFunction. udfLink [{}]", udfLink); String path = Utils.joinPath(udfLink, null); Map<String, String> requestHeaders = this.getRequestHeaders(options, ResourceType.UserDefinedFunction, OperationType.Read); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(this, OperationType.Read, ResourceType.UserDefinedFunction, path, requestHeaders, options); if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } return this.read(request, retryPolicyInstance).map(response -> toResourceResponse(response, UserDefinedFunction.class)); } catch (Exception e) { logger.debug("Failure in reading a UserDefinedFunction due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Flux<FeedResponse<UserDefinedFunction>> readUserDefinedFunctions(String collectionLink, CosmosQueryRequestOptions options) { if (StringUtils.isEmpty(collectionLink)) { throw new IllegalArgumentException("collectionLink"); } return readFeed(options, ResourceType.UserDefinedFunction, UserDefinedFunction.class, Utils.joinPath(collectionLink, Paths.USER_DEFINED_FUNCTIONS_PATH_SEGMENT)); } @Override public Flux<FeedResponse<UserDefinedFunction>> queryUserDefinedFunctions(String collectionLink, String query, CosmosQueryRequestOptions options) { return queryUserDefinedFunctions(collectionLink, new SqlQuerySpec(query), options); } @Override public Flux<FeedResponse<UserDefinedFunction>> queryUserDefinedFunctions(String collectionLink, SqlQuerySpec querySpec, CosmosQueryRequestOptions options) { return createQuery(collectionLink, querySpec, options, UserDefinedFunction.class, ResourceType.UserDefinedFunction); } @Override public Mono<ResourceResponse<Conflict>> readConflict(String conflictLink, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> readConflictInternal(conflictLink, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<Conflict>> readConflictInternal(String conflictLink, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (StringUtils.isEmpty(conflictLink)) { throw new IllegalArgumentException("conflictLink"); } logger.debug("Reading a Conflict. conflictLink [{}]", conflictLink); String path = Utils.joinPath(conflictLink, null); Map<String, String> requestHeaders = getRequestHeaders(options, ResourceType.Conflict, OperationType.Read); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(this, OperationType.Read, ResourceType.Conflict, path, requestHeaders, options); Mono<RxDocumentServiceRequest> reqObs = addPartitionKeyInformation(request, null, null, options); return reqObs.flatMap(req -> { if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } return this.read(request, retryPolicyInstance).map(response -> toResourceResponse(response, Conflict.class)); }); } catch (Exception e) { logger.debug("Failure in reading a Conflict due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Flux<FeedResponse<Conflict>> readConflicts(String collectionLink, CosmosQueryRequestOptions options) { if (StringUtils.isEmpty(collectionLink)) { throw new IllegalArgumentException("collectionLink"); } return readFeed(options, ResourceType.Conflict, Conflict.class, Utils.joinPath(collectionLink, Paths.CONFLICTS_PATH_SEGMENT)); } @Override public Flux<FeedResponse<Conflict>> queryConflicts(String collectionLink, String query, CosmosQueryRequestOptions options) { return queryConflicts(collectionLink, new SqlQuerySpec(query), options); } @Override public Flux<FeedResponse<Conflict>> queryConflicts(String collectionLink, SqlQuerySpec querySpec, CosmosQueryRequestOptions options) { return createQuery(collectionLink, querySpec, options, Conflict.class, ResourceType.Conflict); } @Override public Mono<ResourceResponse<Conflict>> deleteConflict(String conflictLink, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> deleteConflictInternal(conflictLink, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<Conflict>> deleteConflictInternal(String conflictLink, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (StringUtils.isEmpty(conflictLink)) { throw new IllegalArgumentException("conflictLink"); } logger.debug("Deleting a Conflict. conflictLink [{}]", conflictLink); String path = Utils.joinPath(conflictLink, null); Map<String, String> requestHeaders = getRequestHeaders(options, ResourceType.Conflict, OperationType.Delete); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(this, OperationType.Delete, ResourceType.Conflict, path, requestHeaders, options); Mono<RxDocumentServiceRequest> reqObs = addPartitionKeyInformation(request, null, null, options); return reqObs.flatMap(req -> { if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } return this.delete(request, retryPolicyInstance, getOperationContextAndListenerTuple(options)).map(response -> toResourceResponse(response, Conflict.class)); }); } catch (Exception e) { logger.debug("Failure in deleting a Conflict due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<User>> createUser(String databaseLink, User user, RequestOptions options) { DocumentClientRetryPolicy documentClientRetryPolicy = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> createUserInternal(databaseLink, user, options, documentClientRetryPolicy), documentClientRetryPolicy); } private Mono<ResourceResponse<User>> createUserInternal(String databaseLink, User user, RequestOptions options, DocumentClientRetryPolicy documentClientRetryPolicy) { try { logger.debug("Creating a User. databaseLink [{}], user id [{}]", databaseLink, user.getId()); RxDocumentServiceRequest request = getUserRequest(databaseLink, user, options, OperationType.Create); return this.create(request, documentClientRetryPolicy, getOperationContextAndListenerTuple(options)).map(response -> toResourceResponse(response, User.class)); } catch (Exception e) { logger.debug("Failure in creating a User due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<User>> upsertUser(String databaseLink, User user, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> upsertUserInternal(databaseLink, user, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<User>> upsertUserInternal(String databaseLink, User user, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { logger.debug("Upserting a User. databaseLink [{}], user id [{}]", databaseLink, user.getId()); RxDocumentServiceRequest request = getUserRequest(databaseLink, user, options, OperationType.Upsert); if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } return this.upsert(request, retryPolicyInstance, getOperationContextAndListenerTuple(options)).map(response -> toResourceResponse(response, User.class)); } catch (Exception e) { logger.debug("Failure in upserting a User due to [{}]", e.getMessage(), e); return Mono.error(e); } } private RxDocumentServiceRequest getUserRequest(String databaseLink, User user, RequestOptions options, OperationType operationType) { if (StringUtils.isEmpty(databaseLink)) { throw new IllegalArgumentException("databaseLink"); } if (user == null) { throw new IllegalArgumentException("user"); } RxDocumentClientImpl.validateResource(user); String path = Utils.joinPath(databaseLink, Paths.USERS_PATH_SEGMENT); Map<String, String> requestHeaders = getRequestHeaders(options, ResourceType.User, operationType); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(this, operationType, ResourceType.User, path, user, requestHeaders, options); return request; } @Override public Mono<ResourceResponse<User>> replaceUser(User user, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> replaceUserInternal(user, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<User>> replaceUserInternal(User user, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (user == null) { throw new IllegalArgumentException("user"); } logger.debug("Replacing a User. user id [{}]", user.getId()); RxDocumentClientImpl.validateResource(user); String path = Utils.joinPath(user.getSelfLink(), null); Map<String, String> requestHeaders = getRequestHeaders(options, ResourceType.User, OperationType.Replace); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(this, OperationType.Replace, ResourceType.User, path, user, requestHeaders, options); if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } return this.replace(request, retryPolicyInstance).map(response -> toResourceResponse(response, User.class)); } catch (Exception e) { logger.debug("Failure in replacing a User due to [{}]", e.getMessage(), e); return Mono.error(e); } } public Mono<ResourceResponse<User>> deleteUser(String userLink, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> deleteUserInternal(userLink, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<User>> deleteUserInternal(String userLink, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (StringUtils.isEmpty(userLink)) { throw new IllegalArgumentException("userLink"); } logger.debug("Deleting a User. userLink [{}]", userLink); String path = Utils.joinPath(userLink, null); Map<String, String> requestHeaders = getRequestHeaders(options, ResourceType.User, OperationType.Delete); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(this, OperationType.Delete, ResourceType.User, path, requestHeaders, options); if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } return this.delete(request, retryPolicyInstance, getOperationContextAndListenerTuple(options)).map(response -> toResourceResponse(response, User.class)); } catch (Exception e) { logger.debug("Failure in deleting a User due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<User>> readUser(String userLink, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> readUserInternal(userLink, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<User>> readUserInternal(String userLink, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (StringUtils.isEmpty(userLink)) { throw new IllegalArgumentException("userLink"); } logger.debug("Reading a User. userLink [{}]", userLink); String path = Utils.joinPath(userLink, null); Map<String, String> requestHeaders = getRequestHeaders(options, ResourceType.User, OperationType.Read); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(this, OperationType.Read, ResourceType.User, path, requestHeaders, options); if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } return this.read(request, retryPolicyInstance).map(response -> toResourceResponse(response, User.class)); } catch (Exception e) { logger.debug("Failure in reading a User due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Flux<FeedResponse<User>> readUsers(String databaseLink, CosmosQueryRequestOptions options) { if (StringUtils.isEmpty(databaseLink)) { throw new IllegalArgumentException("databaseLink"); } return readFeed(options, ResourceType.User, User.class, Utils.joinPath(databaseLink, Paths.USERS_PATH_SEGMENT)); } @Override public Flux<FeedResponse<User>> queryUsers(String databaseLink, String query, CosmosQueryRequestOptions options) { return queryUsers(databaseLink, new SqlQuerySpec(query), options); } @Override public Flux<FeedResponse<User>> queryUsers(String databaseLink, SqlQuerySpec querySpec, CosmosQueryRequestOptions options) { return createQuery(databaseLink, querySpec, options, User.class, ResourceType.User); } @Override public Mono<ResourceResponse<ClientEncryptionKey>> readClientEncryptionKey(String clientEncryptionKeyLink, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> readClientEncryptionKeyInternal(clientEncryptionKeyLink, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<ClientEncryptionKey>> readClientEncryptionKeyInternal(String clientEncryptionKeyLink, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (StringUtils.isEmpty(clientEncryptionKeyLink)) { throw new IllegalArgumentException("clientEncryptionKeyLink"); } logger.debug("Reading a client encryption key. clientEncryptionKeyLink [{}]", clientEncryptionKeyLink); String path = Utils.joinPath(clientEncryptionKeyLink, null); Map<String, String> requestHeaders = getRequestHeaders(options, ResourceType.ClientEncryptionKey, OperationType.Read); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(this, OperationType.Read, ResourceType.ClientEncryptionKey, path, requestHeaders, options); if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } return this.read(request, retryPolicyInstance).map(response -> toResourceResponse(response, ClientEncryptionKey.class)); } catch (Exception e) { logger.debug("Failure in reading a client encryption key due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<ClientEncryptionKey>> createClientEncryptionKey(String databaseLink, ClientEncryptionKey clientEncryptionKey, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> createClientEncryptionKeyInternal(databaseLink, clientEncryptionKey, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<ClientEncryptionKey>> createClientEncryptionKeyInternal(String databaseLink, ClientEncryptionKey clientEncryptionKey, RequestOptions options, DocumentClientRetryPolicy documentClientRetryPolicy) { try { logger.debug("Creating a client encryption key. databaseLink [{}], clientEncryptionKey id [{}]", databaseLink, clientEncryptionKey.getId()); RxDocumentServiceRequest request = getClientEncryptionKeyRequest(databaseLink, clientEncryptionKey, options, OperationType.Create); return this.create(request, documentClientRetryPolicy, getOperationContextAndListenerTuple(options)).map(response -> toResourceResponse(response, ClientEncryptionKey.class)); } catch (Exception e) { logger.debug("Failure in creating a client encryption key due to [{}]", e.getMessage(), e); return Mono.error(e); } } private RxDocumentServiceRequest getClientEncryptionKeyRequest(String databaseLink, ClientEncryptionKey clientEncryptionKey, RequestOptions options, OperationType operationType) { if (StringUtils.isEmpty(databaseLink)) { throw new IllegalArgumentException("databaseLink"); } if (clientEncryptionKey == null) { throw new IllegalArgumentException("clientEncryptionKey"); } RxDocumentClientImpl.validateResource(clientEncryptionKey); String path = Utils.joinPath(databaseLink, Paths.CLIENT_ENCRYPTION_KEY_PATH_SEGMENT); Map<String, String> requestHeaders = getRequestHeaders(options, ResourceType.ClientEncryptionKey, operationType); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(this, operationType, ResourceType.ClientEncryptionKey, path, clientEncryptionKey, requestHeaders, options); return request; } @Override public Mono<ResourceResponse<ClientEncryptionKey>> replaceClientEncryptionKey(ClientEncryptionKey clientEncryptionKey, String nameBasedLink, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> replaceClientEncryptionKeyInternal(clientEncryptionKey, nameBasedLink, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<ClientEncryptionKey>> replaceClientEncryptionKeyInternal(ClientEncryptionKey clientEncryptionKey, String nameBasedLink, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (clientEncryptionKey == null) { throw new IllegalArgumentException("clientEncryptionKey"); } logger.debug("Replacing a clientEncryptionKey. clientEncryptionKey id [{}]", clientEncryptionKey.getId()); RxDocumentClientImpl.validateResource(clientEncryptionKey); String path = Utils.joinPath(nameBasedLink, null); Map<String, String> requestHeaders = getRequestHeaders(options, ResourceType.ClientEncryptionKey, OperationType.Replace); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(this, OperationType.Replace, ResourceType.ClientEncryptionKey, path, clientEncryptionKey, requestHeaders, options); if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } return this.replace(request, retryPolicyInstance).map(response -> toResourceResponse(response, ClientEncryptionKey.class)); } catch (Exception e) { logger.debug("Failure in replacing a clientEncryptionKey due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Flux<FeedResponse<ClientEncryptionKey>> readClientEncryptionKeys(String databaseLink, CosmosQueryRequestOptions options) { if (StringUtils.isEmpty(databaseLink)) { throw new IllegalArgumentException("databaseLink"); } return readFeed(options, ResourceType.ClientEncryptionKey, ClientEncryptionKey.class, Utils.joinPath(databaseLink, Paths.CLIENT_ENCRYPTION_KEY_PATH_SEGMENT)); } @Override public Flux<FeedResponse<ClientEncryptionKey>> queryClientEncryptionKeys(String databaseLink, String query, CosmosQueryRequestOptions options) { return queryClientEncryptionKeys(databaseLink, new SqlQuerySpec(query), options); } @Override public Flux<FeedResponse<ClientEncryptionKey>> queryClientEncryptionKeys(String databaseLink, SqlQuerySpec querySpec, CosmosQueryRequestOptions options) { return createQuery(databaseLink, querySpec, options, ClientEncryptionKey.class, ResourceType.ClientEncryptionKey); } @Override public Mono<ResourceResponse<Permission>> createPermission(String userLink, Permission permission, RequestOptions options) { DocumentClientRetryPolicy documentClientRetryPolicy = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> createPermissionInternal(userLink, permission, options, documentClientRetryPolicy), this.resetSessionTokenRetryPolicy.getRequestPolicy()); } private Mono<ResourceResponse<Permission>> createPermissionInternal(String userLink, Permission permission, RequestOptions options, DocumentClientRetryPolicy documentClientRetryPolicy) { try { logger.debug("Creating a Permission. userLink [{}], permission id [{}]", userLink, permission.getId()); RxDocumentServiceRequest request = getPermissionRequest(userLink, permission, options, OperationType.Create); return this.create(request, documentClientRetryPolicy, getOperationContextAndListenerTuple(options)).map(response -> toResourceResponse(response, Permission.class)); } catch (Exception e) { logger.debug("Failure in creating a Permission due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<Permission>> upsertPermission(String userLink, Permission permission, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> upsertPermissionInternal(userLink, permission, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<Permission>> upsertPermissionInternal(String userLink, Permission permission, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { logger.debug("Upserting a Permission. userLink [{}], permission id [{}]", userLink, permission.getId()); RxDocumentServiceRequest request = getPermissionRequest(userLink, permission, options, OperationType.Upsert); if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } return this.upsert(request, retryPolicyInstance, getOperationContextAndListenerTuple(options)).map(response -> toResourceResponse(response, Permission.class)); } catch (Exception e) { logger.debug("Failure in upserting a Permission due to [{}]", e.getMessage(), e); return Mono.error(e); } } private RxDocumentServiceRequest getPermissionRequest(String userLink, Permission permission, RequestOptions options, OperationType operationType) { if (StringUtils.isEmpty(userLink)) { throw new IllegalArgumentException("userLink"); } if (permission == null) { throw new IllegalArgumentException("permission"); } RxDocumentClientImpl.validateResource(permission); String path = Utils.joinPath(userLink, Paths.PERMISSIONS_PATH_SEGMENT); Map<String, String> requestHeaders = getRequestHeaders(options, ResourceType.Permission, operationType); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(this, operationType, ResourceType.Permission, path, permission, requestHeaders, options); return request; } @Override public Mono<ResourceResponse<Permission>> replacePermission(Permission permission, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> replacePermissionInternal(permission, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<Permission>> replacePermissionInternal(Permission permission, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (permission == null) { throw new IllegalArgumentException("permission"); } logger.debug("Replacing a Permission. permission id [{}]", permission.getId()); RxDocumentClientImpl.validateResource(permission); String path = Utils.joinPath(permission.getSelfLink(), null); Map<String, String> requestHeaders = getRequestHeaders(options, ResourceType.Permission, OperationType.Replace); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(this, OperationType.Replace, ResourceType.Permission, path, permission, requestHeaders, options); if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } return this.replace(request, retryPolicyInstance).map(response -> toResourceResponse(response, Permission.class)); } catch (Exception e) { logger.debug("Failure in replacing a Permission due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<Permission>> deletePermission(String permissionLink, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> deletePermissionInternal(permissionLink, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<Permission>> deletePermissionInternal(String permissionLink, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (StringUtils.isEmpty(permissionLink)) { throw new IllegalArgumentException("permissionLink"); } logger.debug("Deleting a Permission. permissionLink [{}]", permissionLink); String path = Utils.joinPath(permissionLink, null); Map<String, String> requestHeaders = getRequestHeaders(options, ResourceType.Permission, OperationType.Delete); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(this, OperationType.Delete, ResourceType.Permission, path, requestHeaders, options); if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } return this.delete(request, retryPolicyInstance, getOperationContextAndListenerTuple(options)).map(response -> toResourceResponse(response, Permission.class)); } catch (Exception e) { logger.debug("Failure in deleting a Permission due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<Permission>> readPermission(String permissionLink, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> readPermissionInternal(permissionLink, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<Permission>> readPermissionInternal(String permissionLink, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance ) { try { if (StringUtils.isEmpty(permissionLink)) { throw new IllegalArgumentException("permissionLink"); } logger.debug("Reading a Permission. permissionLink [{}]", permissionLink); String path = Utils.joinPath(permissionLink, null); Map<String, String> requestHeaders = getRequestHeaders(options, ResourceType.Permission, OperationType.Read); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(this, OperationType.Read, ResourceType.Permission, path, requestHeaders, options); if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } return this.read(request, retryPolicyInstance).map(response -> toResourceResponse(response, Permission.class)); } catch (Exception e) { logger.debug("Failure in reading a Permission due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Flux<FeedResponse<Permission>> readPermissions(String userLink, CosmosQueryRequestOptions options) { if (StringUtils.isEmpty(userLink)) { throw new IllegalArgumentException("userLink"); } return readFeed(options, ResourceType.Permission, Permission.class, Utils.joinPath(userLink, Paths.PERMISSIONS_PATH_SEGMENT)); } @Override public Flux<FeedResponse<Permission>> queryPermissions(String userLink, String query, CosmosQueryRequestOptions options) { return queryPermissions(userLink, new SqlQuerySpec(query), options); } @Override public Flux<FeedResponse<Permission>> queryPermissions(String userLink, SqlQuerySpec querySpec, CosmosQueryRequestOptions options) { return createQuery(userLink, querySpec, options, Permission.class, ResourceType.Permission); } @Override public Mono<ResourceResponse<Offer>> replaceOffer(Offer offer) { DocumentClientRetryPolicy documentClientRetryPolicy = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> replaceOfferInternal(offer, documentClientRetryPolicy), documentClientRetryPolicy); } private Mono<ResourceResponse<Offer>> replaceOfferInternal(Offer offer, DocumentClientRetryPolicy documentClientRetryPolicy) { try { if (offer == null) { throw new IllegalArgumentException("offer"); } logger.debug("Replacing an Offer. offer id [{}]", offer.getId()); RxDocumentClientImpl.validateResource(offer); String path = Utils.joinPath(offer.getSelfLink(), null); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(this, OperationType.Replace, ResourceType.Offer, path, offer, null, null); return this.replace(request, documentClientRetryPolicy).map(response -> toResourceResponse(response, Offer.class)); } catch (Exception e) { logger.debug("Failure in replacing an Offer due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<Offer>> readOffer(String offerLink) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> readOfferInternal(offerLink, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<Offer>> readOfferInternal(String offerLink, DocumentClientRetryPolicy retryPolicyInstance) { try { if (StringUtils.isEmpty(offerLink)) { throw new IllegalArgumentException("offerLink"); } logger.debug("Reading an Offer. offerLink [{}]", offerLink); String path = Utils.joinPath(offerLink, null); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(this, OperationType.Read, ResourceType.Offer, path, (HashMap<String, String>)null, null); if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } return this.read(request, retryPolicyInstance).map(response -> toResourceResponse(response, Offer.class)); } catch (Exception e) { logger.debug("Failure in reading an Offer due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Flux<FeedResponse<Offer>> readOffers(CosmosQueryRequestOptions options) { return readFeed(options, ResourceType.Offer, Offer.class, Utils.joinPath(Paths.OFFERS_PATH_SEGMENT, null)); } private <T extends Resource> Flux<FeedResponse<T>> readFeed(CosmosQueryRequestOptions options, ResourceType resourceType, Class<T> klass, String resourceLink) { if (options == null) { options = new CosmosQueryRequestOptions(); } Integer maxItemCount = ModelBridgeInternal.getMaxItemCountFromQueryRequestOptions(options); int maxPageSize = maxItemCount != null ? maxItemCount : -1; final CosmosQueryRequestOptions finalCosmosQueryRequestOptions = options; DocumentClientRetryPolicy retryPolicy = this.resetSessionTokenRetryPolicy.getRequestPolicy(); BiFunction<String, Integer, RxDocumentServiceRequest> createRequestFunc = (continuationToken, pageSize) -> { Map<String, String> requestHeaders = new HashMap<>(); if (continuationToken != null) { requestHeaders.put(HttpConstants.HttpHeaders.CONTINUATION, continuationToken); } requestHeaders.put(HttpConstants.HttpHeaders.PAGE_SIZE, Integer.toString(pageSize)); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(this, OperationType.ReadFeed, resourceType, resourceLink, requestHeaders, finalCosmosQueryRequestOptions); retryPolicy.onBeforeSendRequest(request); return request; }; Function<RxDocumentServiceRequest, Mono<FeedResponse<T>>> executeFunc = request -> ObservableHelper .inlineIfPossibleAsObs(() -> readFeed(request).map(response -> toFeedResponsePage(response, klass)), retryPolicy); return Paginator.getPaginatedQueryResultAsObservable(options, createRequestFunc, executeFunc, klass, maxPageSize); } @Override public Flux<FeedResponse<Offer>> queryOffers(String query, CosmosQueryRequestOptions options) { return queryOffers(new SqlQuerySpec(query), options); } @Override public Flux<FeedResponse<Offer>> queryOffers(SqlQuerySpec querySpec, CosmosQueryRequestOptions options) { return createQuery(null, querySpec, options, Offer.class, ResourceType.Offer); } @Override public Mono<DatabaseAccount> getDatabaseAccount() { DocumentClientRetryPolicy documentClientRetryPolicy = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> getDatabaseAccountInternal(documentClientRetryPolicy), documentClientRetryPolicy); } @Override public DatabaseAccount getLatestDatabaseAccount() { return this.globalEndpointManager.getLatestDatabaseAccount(); } private Mono<DatabaseAccount> getDatabaseAccountInternal(DocumentClientRetryPolicy documentClientRetryPolicy) { try { logger.debug("Getting Database Account"); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(this, OperationType.Read, ResourceType.DatabaseAccount, "", (HashMap<String, String>) null, null); return this.read(request, documentClientRetryPolicy).map(ModelBridgeInternal::toDatabaseAccount); } catch (Exception e) { logger.debug("Failure in getting Database Account due to [{}]", e.getMessage(), e); return Mono.error(e); } } public Object getSession() { return this.sessionContainer; } public void setSession(Object sessionContainer) { this.sessionContainer = (SessionContainer) sessionContainer; } @Override public RxClientCollectionCache getCollectionCache() { return this.collectionCache; } @Override public RxPartitionKeyRangeCache getPartitionKeyRangeCache() { return partitionKeyRangeCache; } public Flux<DatabaseAccount> getDatabaseAccountFromEndpoint(URI endpoint) { return Flux.defer(() -> { RxDocumentServiceRequest request = RxDocumentServiceRequest.create(this, OperationType.Read, ResourceType.DatabaseAccount, "", null, (Object) null); return this.populateHeaders(request, RequestVerb.GET) .flatMap(requestPopulated -> { requestPopulated.setEndpointOverride(endpoint); return this.gatewayProxy.processMessage(requestPopulated).doOnError(e -> { String message = String.format("Failed to retrieve database account information. %s", e.getCause() != null ? e.getCause().toString() : e.toString()); logger.warn(message); }).map(rsp -> rsp.getResource(DatabaseAccount.class)) .doOnNext(databaseAccount -> this.useMultipleWriteLocations = this.connectionPolicy.isMultipleWriteRegionsEnabled() && BridgeInternal.isEnableMultipleWriteLocations(databaseAccount)); }); }); } /** * Certain requests must be routed through gateway even when the client connectivity mode is direct. * * @param request * @return RxStoreModel */ private RxStoreModel getStoreProxy(RxDocumentServiceRequest request) { if (request.UseGatewayMode) { return this.gatewayProxy; } ResourceType resourceType = request.getResourceType(); OperationType operationType = request.getOperationType(); if (resourceType == ResourceType.Offer || resourceType == ResourceType.ClientEncryptionKey || resourceType.isScript() && operationType != OperationType.ExecuteJavaScript || resourceType == ResourceType.PartitionKeyRange || resourceType == ResourceType.PartitionKey && operationType == OperationType.Delete) { return this.gatewayProxy; } if (operationType == OperationType.Create || operationType == OperationType.Upsert) { if (resourceType == ResourceType.Database || resourceType == ResourceType.User || resourceType == ResourceType.DocumentCollection || resourceType == ResourceType.Permission) { return this.gatewayProxy; } else { return this.storeModel; } } else if (operationType == OperationType.Delete) { if (resourceType == ResourceType.Database || resourceType == ResourceType.User || resourceType == ResourceType.DocumentCollection) { return this.gatewayProxy; } else { return this.storeModel; } } else if (operationType == OperationType.Replace) { if (resourceType == ResourceType.DocumentCollection) { return this.gatewayProxy; } else { return this.storeModel; } } else if (operationType == OperationType.Read) { if (resourceType == ResourceType.DocumentCollection) { return this.gatewayProxy; } else { return this.storeModel; } } else { if ((operationType == OperationType.Query || operationType == OperationType.SqlQuery || operationType == OperationType.ReadFeed) && Utils.isCollectionChild(request.getResourceType())) { if (request.getPartitionKeyRangeIdentity() == null && request.getHeaders().get(HttpConstants.HttpHeaders.PARTITION_KEY) == null) { return this.gatewayProxy; } } return this.storeModel; } } @Override public void close() { logger.info("Attempting to close client {}", this.clientId); if (!closed.getAndSet(true)) { logger.info("Shutting down ..."); logger.info("Closing Global Endpoint Manager ..."); LifeCycleUtils.closeQuietly(this.globalEndpointManager); logger.info("Closing StoreClientFactory ..."); LifeCycleUtils.closeQuietly(this.storeClientFactory); logger.info("Shutting down reactorHttpClient ..."); LifeCycleUtils.closeQuietly(this.reactorHttpClient); logger.info("Shutting down CpuMonitor ..."); CpuMemoryMonitor.unregister(this); if (this.throughputControlEnabled.get()) { logger.info("Closing ThroughputControlStore ..."); this.throughputControlStore.close(); } logger.info("Shutting down completed."); } else { logger.warn("Already shutdown!"); } } @Override public ItemDeserializer getItemDeserializer() { return this.itemDeserializer; } @Override public synchronized void enableThroughputControlGroup(ThroughputControlGroupInternal group) { checkNotNull(group, "Throughput control group can not be null"); if (this.throughputControlEnabled.compareAndSet(false, true)) { this.throughputControlStore = new ThroughputControlStore( this.collectionCache, this.connectionPolicy.getConnectionMode(), this.partitionKeyRangeCache); this.storeModel.enableThroughputControl(throughputControlStore); } this.throughputControlStore.enableThroughputControlGroup(group); } private static SqlQuerySpec createLogicalPartitionScanQuerySpec( PartitionKey partitionKey, String partitionKeySelector) { StringBuilder queryStringBuilder = new StringBuilder(); List<SqlParameter> parameters = new ArrayList<>(); queryStringBuilder.append("SELECT * FROM c WHERE"); Object pkValue = ModelBridgeInternal.getPartitionKeyObject(partitionKey); String pkParamName = "@pkValue"; parameters.add(new SqlParameter(pkParamName, pkValue)); queryStringBuilder.append(" c"); queryStringBuilder.append(partitionKeySelector); queryStringBuilder.append((" = ")); queryStringBuilder.append(pkParamName); return new SqlQuerySpec(queryStringBuilder.toString(), parameters); } @Override public Mono<List<FeedRange>> getFeedRanges(String collectionLink) { InvalidPartitionExceptionRetryPolicy invalidPartitionExceptionRetryPolicy = new InvalidPartitionExceptionRetryPolicy( this.collectionCache, null, collectionLink, new HashMap<>()); RxDocumentServiceRequest request = RxDocumentServiceRequest.create( this, OperationType.Query, ResourceType.Document, collectionLink, null); invalidPartitionExceptionRetryPolicy.onBeforeSendRequest(request); return ObservableHelper.inlineIfPossibleAsObs( () -> getFeedRangesInternal(request, collectionLink), invalidPartitionExceptionRetryPolicy); } private Mono<List<FeedRange>> getFeedRangesInternal(RxDocumentServiceRequest request, String collectionLink) { logger.debug("getFeedRange collectionLink=[{}]", collectionLink); if (StringUtils.isEmpty(collectionLink)) { throw new IllegalArgumentException("collectionLink"); } Mono<Utils.ValueHolder<DocumentCollection>> collectionObs = collectionCache.resolveCollectionAsync(null, request); return collectionObs.flatMap(documentCollectionResourceResponse -> { final DocumentCollection collection = documentCollectionResourceResponse.v; if (collection == null) { throw new IllegalStateException("Collection cannot be null"); } Mono<Utils.ValueHolder<List<PartitionKeyRange>>> valueHolderMono = partitionKeyRangeCache .tryGetOverlappingRangesAsync( BridgeInternal.getMetaDataDiagnosticContext(request.requestContext.cosmosDiagnostics), collection.getResourceId(), RANGE_INCLUDING_ALL_PARTITION_KEY_RANGES, true, null); return valueHolderMono.map(partitionKeyRangeList -> toFeedRanges(partitionKeyRangeList, request)); }); } private static List<FeedRange> toFeedRanges( Utils.ValueHolder<List<PartitionKeyRange>> partitionKeyRangeListValueHolder, RxDocumentServiceRequest request) { final List<PartitionKeyRange> partitionKeyRangeList = partitionKeyRangeListValueHolder.v; if (partitionKeyRangeList == null) { request.forceNameCacheRefresh = true; throw new InvalidPartitionException(); } List<FeedRange> feedRanges = new ArrayList<>(); partitionKeyRangeList.forEach(pkRange -> feedRanges.add(toFeedRange(pkRange))); return feedRanges; } private static FeedRange toFeedRange(PartitionKeyRange pkRange) { return new FeedRangeEpkImpl(pkRange.toRange()); } }
class RxDocumentClientImpl implements AsyncDocumentClient, IAuthorizationTokenProvider, CpuMemoryListener, DiagnosticsClientContext { private static final String tempMachineId = "uuid:" + UUID.randomUUID(); private static final AtomicInteger activeClientsCnt = new AtomicInteger(0); private static final AtomicInteger clientIdGenerator = new AtomicInteger(0); private static final Range<String> RANGE_INCLUDING_ALL_PARTITION_KEY_RANGES = new Range<>( PartitionKeyInternalHelper.MinimumInclusiveEffectivePartitionKey, PartitionKeyInternalHelper.MaximumExclusiveEffectivePartitionKey, true, false); private static final String DUMMY_SQL_QUERY = "this is dummy and only used in creating " + "ParallelDocumentQueryExecutioncontext, but not used"; private final static ObjectMapper mapper = Utils.getSimpleObjectMapper(); private final ItemDeserializer itemDeserializer = new ItemDeserializer.JsonDeserializer(); private final Logger logger = LoggerFactory.getLogger(RxDocumentClientImpl.class); private final String masterKeyOrResourceToken; private final URI serviceEndpoint; private final ConnectionPolicy connectionPolicy; private final ConsistencyLevel consistencyLevel; private final BaseAuthorizationTokenProvider authorizationTokenProvider; private final UserAgentContainer userAgentContainer; private final boolean hasAuthKeyResourceToken; private final Configs configs; private final boolean connectionSharingAcrossClientsEnabled; private AzureKeyCredential credential; private final TokenCredential tokenCredential; private String[] tokenCredentialScopes; private SimpleTokenCache tokenCredentialCache; private CosmosAuthorizationTokenResolver cosmosAuthorizationTokenResolver; AuthorizationTokenType authorizationTokenType; private SessionContainer sessionContainer; private String firstResourceTokenFromPermissionFeed = StringUtils.EMPTY; private RxClientCollectionCache collectionCache; private RxStoreModel gatewayProxy; private RxStoreModel storeModel; private GlobalAddressResolver addressResolver; private RxPartitionKeyRangeCache partitionKeyRangeCache; private Map<String, List<PartitionKeyAndResourceTokenPair>> resourceTokensMap; private final boolean contentResponseOnWriteEnabled; private Map<String, PartitionedQueryExecutionInfo> queryPlanCache; private final AtomicBoolean closed = new AtomicBoolean(false); private final int clientId; private ClientTelemetry clientTelemetry; private ApiType apiType; private IRetryPolicyFactory resetSessionTokenRetryPolicy; /** * Compatibility mode: Allows to specify compatibility mode used by client when * making query requests. Should be removed when application/sql is no longer * supported. */ private final QueryCompatibilityMode queryCompatibilityMode = QueryCompatibilityMode.Default; private final GlobalEndpointManager globalEndpointManager; private final RetryPolicy retryPolicy; private HttpClient reactorHttpClient; private Function<HttpClient, HttpClient> httpClientInterceptor; private volatile boolean useMultipleWriteLocations; private StoreClientFactory storeClientFactory; private GatewayServiceConfigurationReader gatewayConfigurationReader; private final DiagnosticsClientConfig diagnosticsClientConfig; private final AtomicBoolean throughputControlEnabled; private ThroughputControlStore throughputControlStore; public RxDocumentClientImpl(URI serviceEndpoint, String masterKeyOrResourceToken, List<Permission> permissionFeed, ConnectionPolicy connectionPolicy, ConsistencyLevel consistencyLevel, Configs configs, CosmosAuthorizationTokenResolver cosmosAuthorizationTokenResolver, AzureKeyCredential credential, boolean sessionCapturingOverride, boolean connectionSharingAcrossClientsEnabled, boolean contentResponseOnWriteEnabled, CosmosClientMetadataCachesSnapshot metadataCachesSnapshot, ApiType apiType) { this(serviceEndpoint, masterKeyOrResourceToken, permissionFeed, connectionPolicy, consistencyLevel, configs, credential, null, sessionCapturingOverride, connectionSharingAcrossClientsEnabled, contentResponseOnWriteEnabled, metadataCachesSnapshot, apiType); this.cosmosAuthorizationTokenResolver = cosmosAuthorizationTokenResolver; } public RxDocumentClientImpl(URI serviceEndpoint, String masterKeyOrResourceToken, List<Permission> permissionFeed, ConnectionPolicy connectionPolicy, ConsistencyLevel consistencyLevel, Configs configs, CosmosAuthorizationTokenResolver cosmosAuthorizationTokenResolver, AzureKeyCredential credential, TokenCredential tokenCredential, boolean sessionCapturingOverride, boolean connectionSharingAcrossClientsEnabled, boolean contentResponseOnWriteEnabled, CosmosClientMetadataCachesSnapshot metadataCachesSnapshot, ApiType apiType) { this(serviceEndpoint, masterKeyOrResourceToken, permissionFeed, connectionPolicy, consistencyLevel, configs, credential, tokenCredential, sessionCapturingOverride, connectionSharingAcrossClientsEnabled, contentResponseOnWriteEnabled, metadataCachesSnapshot, apiType); this.cosmosAuthorizationTokenResolver = cosmosAuthorizationTokenResolver; } private RxDocumentClientImpl(URI serviceEndpoint, String masterKeyOrResourceToken, List<Permission> permissionFeed, ConnectionPolicy connectionPolicy, ConsistencyLevel consistencyLevel, Configs configs, AzureKeyCredential credential, TokenCredential tokenCredential, boolean sessionCapturingOverrideEnabled, boolean connectionSharingAcrossClientsEnabled, boolean contentResponseOnWriteEnabled, CosmosClientMetadataCachesSnapshot metadataCachesSnapshot, ApiType apiType) { this(serviceEndpoint, masterKeyOrResourceToken, connectionPolicy, consistencyLevel, configs, credential, tokenCredential, sessionCapturingOverrideEnabled, connectionSharingAcrossClientsEnabled, contentResponseOnWriteEnabled, metadataCachesSnapshot, apiType); if (permissionFeed != null && permissionFeed.size() > 0) { this.resourceTokensMap = new HashMap<>(); for (Permission permission : permissionFeed) { String[] segments = StringUtils.split(permission.getResourceLink(), Constants.Properties.PATH_SEPARATOR.charAt(0)); if (segments.length <= 0) { throw new IllegalArgumentException("resourceLink"); } List<PartitionKeyAndResourceTokenPair> partitionKeyAndResourceTokenPairs = null; PathInfo pathInfo = new PathInfo(false, StringUtils.EMPTY, StringUtils.EMPTY, false); if (!PathsHelper.tryParsePathSegments(permission.getResourceLink(), pathInfo, null)) { throw new IllegalArgumentException(permission.getResourceLink()); } partitionKeyAndResourceTokenPairs = resourceTokensMap.get(pathInfo.resourceIdOrFullName); if (partitionKeyAndResourceTokenPairs == null) { partitionKeyAndResourceTokenPairs = new ArrayList<>(); this.resourceTokensMap.put(pathInfo.resourceIdOrFullName, partitionKeyAndResourceTokenPairs); } PartitionKey partitionKey = permission.getResourcePartitionKey(); partitionKeyAndResourceTokenPairs.add(new PartitionKeyAndResourceTokenPair( partitionKey != null ? BridgeInternal.getPartitionKeyInternal(partitionKey) : PartitionKeyInternal.Empty, permission.getToken())); logger.debug("Initializing resource token map , with map key [{}] , partition key [{}] and resource token [{}]", pathInfo.resourceIdOrFullName, partitionKey != null ? partitionKey.toString() : null, permission.getToken()); } if(this.resourceTokensMap.isEmpty()) { throw new IllegalArgumentException("permissionFeed"); } String firstToken = permissionFeed.get(0).getToken(); if(ResourceTokenAuthorizationHelper.isResourceToken(firstToken)) { this.firstResourceTokenFromPermissionFeed = firstToken; } } } RxDocumentClientImpl(URI serviceEndpoint, String masterKeyOrResourceToken, ConnectionPolicy connectionPolicy, ConsistencyLevel consistencyLevel, Configs configs, AzureKeyCredential credential, TokenCredential tokenCredential, boolean sessionCapturingOverrideEnabled, boolean connectionSharingAcrossClientsEnabled, boolean contentResponseOnWriteEnabled, CosmosClientMetadataCachesSnapshot metadataCachesSnapshot, ApiType apiType) { activeClientsCnt.incrementAndGet(); this.clientId = clientIdGenerator.incrementAndGet(); this.diagnosticsClientConfig = new DiagnosticsClientConfig(); this.diagnosticsClientConfig.withClientId(this.clientId); this.diagnosticsClientConfig.withActiveClientCounter(activeClientsCnt); this.diagnosticsClientConfig.withConnectionSharingAcrossClientsEnabled(connectionSharingAcrossClientsEnabled); this.diagnosticsClientConfig.withConsistency(consistencyLevel); this.throughputControlEnabled = new AtomicBoolean(false); logger.info( "Initializing DocumentClient [{}] with" + " serviceEndpoint [{}], connectionPolicy [{}], consistencyLevel [{}], directModeProtocol [{}]", this.clientId, serviceEndpoint, connectionPolicy, consistencyLevel, configs.getProtocol()); try { this.connectionSharingAcrossClientsEnabled = connectionSharingAcrossClientsEnabled; this.configs = configs; this.masterKeyOrResourceToken = masterKeyOrResourceToken; this.serviceEndpoint = serviceEndpoint; this.credential = credential; this.tokenCredential = tokenCredential; this.contentResponseOnWriteEnabled = contentResponseOnWriteEnabled; this.authorizationTokenType = AuthorizationTokenType.Invalid; if (this.credential != null) { hasAuthKeyResourceToken = false; this.authorizationTokenType = AuthorizationTokenType.PrimaryMasterKey; this.authorizationTokenProvider = new BaseAuthorizationTokenProvider(this.credential); } else if (masterKeyOrResourceToken != null && ResourceTokenAuthorizationHelper.isResourceToken(masterKeyOrResourceToken)) { this.authorizationTokenProvider = null; hasAuthKeyResourceToken = true; this.authorizationTokenType = AuthorizationTokenType.ResourceToken; } else if(masterKeyOrResourceToken != null && !ResourceTokenAuthorizationHelper.isResourceToken(masterKeyOrResourceToken)) { this.credential = new AzureKeyCredential(this.masterKeyOrResourceToken); hasAuthKeyResourceToken = false; this.authorizationTokenType = AuthorizationTokenType.PrimaryMasterKey; this.authorizationTokenProvider = new BaseAuthorizationTokenProvider(this.credential); } else { hasAuthKeyResourceToken = false; this.authorizationTokenProvider = null; if (tokenCredential != null) { this.tokenCredentialScopes = new String[] { serviceEndpoint.getScheme() + ": }; this.tokenCredentialCache = new SimpleTokenCache(() -> this.tokenCredential .getToken(new TokenRequestContext().addScopes(this.tokenCredentialScopes))); this.authorizationTokenType = AuthorizationTokenType.AadToken; } } if (connectionPolicy != null) { this.connectionPolicy = connectionPolicy; } else { this.connectionPolicy = new ConnectionPolicy(DirectConnectionConfig.getDefaultConfig()); } this.diagnosticsClientConfig.withConnectionMode(this.getConnectionPolicy().getConnectionMode()); this.diagnosticsClientConfig.withMultipleWriteRegionsEnabled(this.connectionPolicy.isMultipleWriteRegionsEnabled()); this.diagnosticsClientConfig.withEndpointDiscoveryEnabled(this.connectionPolicy.isEndpointDiscoveryEnabled()); this.diagnosticsClientConfig.withPreferredRegions(this.connectionPolicy.getPreferredRegions()); this.diagnosticsClientConfig.withMachineId(tempMachineId); boolean disableSessionCapturing = (ConsistencyLevel.SESSION != consistencyLevel && !sessionCapturingOverrideEnabled); this.sessionContainer = new SessionContainer(this.serviceEndpoint.getHost(), disableSessionCapturing); this.consistencyLevel = consistencyLevel; this.userAgentContainer = new UserAgentContainer(); String userAgentSuffix = this.connectionPolicy.getUserAgentSuffix(); if (userAgentSuffix != null && userAgentSuffix.length() > 0) { userAgentContainer.setSuffix(userAgentSuffix); } this.httpClientInterceptor = null; this.reactorHttpClient = httpClient(); this.globalEndpointManager = new GlobalEndpointManager(asDatabaseAccountManagerInternal(), this.connectionPolicy, /**/configs); this.retryPolicy = new RetryPolicy(this, this.globalEndpointManager, this.connectionPolicy); this.resetSessionTokenRetryPolicy = retryPolicy; CpuMemoryMonitor.register(this); this.queryPlanCache = Collections.synchronizedMap(new SizeLimitingLRUCache(Constants.QUERYPLAN_CACHE_SIZE)); this.apiType = apiType; } catch (RuntimeException e) { logger.error("unexpected failure in initializing client.", e); close(); throw e; } } @Override public DiagnosticsClientConfig getConfig() { return diagnosticsClientConfig; } @Override public CosmosDiagnostics createDiagnostics() { return BridgeInternal.createCosmosDiagnostics(this, this.globalEndpointManager); } private void initializeGatewayConfigurationReader() { this.gatewayConfigurationReader = new GatewayServiceConfigurationReader(this.globalEndpointManager); DatabaseAccount databaseAccount = this.globalEndpointManager.getLatestDatabaseAccount(); if (databaseAccount == null) { logger.error("Client initialization failed." + " Check if the endpoint is reachable and if your auth token is valid. More info: https: throw new RuntimeException("Client initialization failed." + " Check if the endpoint is reachable and if your auth token is valid. More info: https: } this.useMultipleWriteLocations = this.connectionPolicy.isMultipleWriteRegionsEnabled() && BridgeInternal.isEnableMultipleWriteLocations(databaseAccount); } private void updateGatewayProxy() { ((RxGatewayStoreModel)this.gatewayProxy).setGatewayServiceConfigurationReader(this.gatewayConfigurationReader); ((RxGatewayStoreModel)this.gatewayProxy).setCollectionCache(this.collectionCache); ((RxGatewayStoreModel)this.gatewayProxy).setPartitionKeyRangeCache(this.partitionKeyRangeCache); ((RxGatewayStoreModel)this.gatewayProxy).setUseMultipleWriteLocations(this.useMultipleWriteLocations); } public void serialize(CosmosClientMetadataCachesSnapshot state) { RxCollectionCache.serialize(state, this.collectionCache); } private void initializeDirectConnectivity() { this.addressResolver = new GlobalAddressResolver(this, this.reactorHttpClient, this.globalEndpointManager, this.configs.getProtocol(), this, this.collectionCache, this.partitionKeyRangeCache, userAgentContainer, null, this.connectionPolicy, this.apiType); this.storeClientFactory = new StoreClientFactory( this.addressResolver, this.diagnosticsClientConfig, this.configs, this.connectionPolicy, this.userAgentContainer, this.connectionSharingAcrossClientsEnabled, this.clientTelemetry ); this.createStoreModel(true); } DatabaseAccountManagerInternal asDatabaseAccountManagerInternal() { return new DatabaseAccountManagerInternal() { @Override public URI getServiceEndpoint() { return RxDocumentClientImpl.this.getServiceEndpoint(); } @Override public Flux<DatabaseAccount> getDatabaseAccountFromEndpoint(URI endpoint) { logger.info("Getting database account endpoint from {}", endpoint); return RxDocumentClientImpl.this.getDatabaseAccountFromEndpoint(endpoint); } @Override public ConnectionPolicy getConnectionPolicy() { return RxDocumentClientImpl.this.getConnectionPolicy(); } }; } RxGatewayStoreModel createRxGatewayProxy(ISessionContainer sessionContainer, ConsistencyLevel consistencyLevel, QueryCompatibilityMode queryCompatibilityMode, UserAgentContainer userAgentContainer, GlobalEndpointManager globalEndpointManager, HttpClient httpClient, ApiType apiType) { return new RxGatewayStoreModel( this, sessionContainer, consistencyLevel, queryCompatibilityMode, userAgentContainer, globalEndpointManager, httpClient, apiType); } private HttpClient httpClient() { HttpClientConfig httpClientConfig = new HttpClientConfig(this.configs) .withMaxIdleConnectionTimeout(this.connectionPolicy.getIdleHttpConnectionTimeout()) .withPoolSize(this.connectionPolicy.getMaxConnectionPoolSize()) .withProxy(this.connectionPolicy.getProxy()) .withNetworkRequestTimeout(this.connectionPolicy.getHttpNetworkRequestTimeout()); if (connectionSharingAcrossClientsEnabled) { return SharedGatewayHttpClient.getOrCreateInstance(httpClientConfig, diagnosticsClientConfig); } else { diagnosticsClientConfig.withGatewayHttpClientConfig(httpClientConfig); return HttpClient.createFixed(httpClientConfig); } } private void createStoreModel(boolean subscribeRntbdStatus) { StoreClient storeClient = this.storeClientFactory.createStoreClient(this, this.addressResolver, this.sessionContainer, this.gatewayConfigurationReader, this, this.useMultipleWriteLocations ); this.storeModel = new ServerStoreModel(storeClient); } @Override public URI getServiceEndpoint() { return this.serviceEndpoint; } @Override public URI getWriteEndpoint() { return globalEndpointManager.getWriteEndpoints().stream().findFirst().orElse(null); } @Override public URI getReadEndpoint() { return globalEndpointManager.getReadEndpoints().stream().findFirst().orElse(null); } @Override public ConnectionPolicy getConnectionPolicy() { return this.connectionPolicy; } @Override public boolean isContentResponseOnWriteEnabled() { return contentResponseOnWriteEnabled; } @Override public ConsistencyLevel getConsistencyLevel() { return consistencyLevel; } @Override public ClientTelemetry getClientTelemetry() { return this.clientTelemetry; } @Override public Mono<ResourceResponse<Database>> createDatabase(Database database, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> createDatabaseInternal(database, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<Database>> createDatabaseInternal(Database database, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (database == null) { throw new IllegalArgumentException("Database"); } logger.debug("Creating a Database. id: [{}]", database.getId()); validateResource(database); Map<String, String> requestHeaders = this.getRequestHeaders(options, ResourceType.Database, OperationType.Create); Instant serializationStartTimeUTC = Instant.now(); ByteBuffer byteBuffer = ModelBridgeInternal.serializeJsonToByteBuffer(database); Instant serializationEndTimeUTC = Instant.now(); SerializationDiagnosticsContext.SerializationDiagnostics serializationDiagnostics = new SerializationDiagnosticsContext.SerializationDiagnostics( serializationStartTimeUTC, serializationEndTimeUTC, SerializationDiagnosticsContext.SerializationType.DATABASE_SERIALIZATION); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(this, OperationType.Create, ResourceType.Database, Paths.DATABASES_ROOT, byteBuffer, requestHeaders, options); if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } SerializationDiagnosticsContext serializationDiagnosticsContext = BridgeInternal.getSerializationDiagnosticsContext(request.requestContext.cosmosDiagnostics); if (serializationDiagnosticsContext != null) { serializationDiagnosticsContext.addSerializationDiagnostics(serializationDiagnostics); } return this.create(request, retryPolicyInstance, getOperationContextAndListenerTuple(options)) .map(response -> toResourceResponse(response, Database.class)); } catch (Exception e) { logger.debug("Failure in creating a database. due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<Database>> deleteDatabase(String databaseLink, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> deleteDatabaseInternal(databaseLink, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<Database>> deleteDatabaseInternal(String databaseLink, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (StringUtils.isEmpty(databaseLink)) { throw new IllegalArgumentException("databaseLink"); } logger.debug("Deleting a Database. databaseLink: [{}]", databaseLink); String path = Utils.joinPath(databaseLink, null); Map<String, String> requestHeaders = this.getRequestHeaders(options, ResourceType.Database, OperationType.Delete); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(this, OperationType.Delete, ResourceType.Database, path, requestHeaders, options); if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } return this.delete(request, retryPolicyInstance, getOperationContextAndListenerTuple(options)).map(response -> toResourceResponse(response, Database.class)); } catch (Exception e) { logger.debug("Failure in deleting a database. due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<Database>> readDatabase(String databaseLink, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> readDatabaseInternal(databaseLink, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<Database>> readDatabaseInternal(String databaseLink, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (StringUtils.isEmpty(databaseLink)) { throw new IllegalArgumentException("databaseLink"); } logger.debug("Reading a Database. databaseLink: [{}]", databaseLink); String path = Utils.joinPath(databaseLink, null); Map<String, String> requestHeaders = this.getRequestHeaders(options, ResourceType.Database, OperationType.Read); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(this, OperationType.Read, ResourceType.Database, path, requestHeaders, options); if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } return this.read(request, retryPolicyInstance).map(response -> toResourceResponse(response, Database.class)); } catch (Exception e) { logger.debug("Failure in reading a database. due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Flux<FeedResponse<Database>> readDatabases(CosmosQueryRequestOptions options) { return readFeed(options, ResourceType.Database, Database.class, Paths.DATABASES_ROOT); } private String parentResourceLinkToQueryLink(String parentResourceLink, ResourceType resourceTypeEnum) { switch (resourceTypeEnum) { case Database: return Paths.DATABASES_ROOT; case DocumentCollection: return Utils.joinPath(parentResourceLink, Paths.COLLECTIONS_PATH_SEGMENT); case Document: return Utils.joinPath(parentResourceLink, Paths.DOCUMENTS_PATH_SEGMENT); case Offer: return Paths.OFFERS_ROOT; case User: return Utils.joinPath(parentResourceLink, Paths.USERS_PATH_SEGMENT); case ClientEncryptionKey: return Utils.joinPath(parentResourceLink, Paths.CLIENT_ENCRYPTION_KEY_PATH_SEGMENT); case Permission: return Utils.joinPath(parentResourceLink, Paths.PERMISSIONS_PATH_SEGMENT); case Attachment: return Utils.joinPath(parentResourceLink, Paths.ATTACHMENTS_PATH_SEGMENT); case StoredProcedure: return Utils.joinPath(parentResourceLink, Paths.STORED_PROCEDURES_PATH_SEGMENT); case Trigger: return Utils.joinPath(parentResourceLink, Paths.TRIGGERS_PATH_SEGMENT); case UserDefinedFunction: return Utils.joinPath(parentResourceLink, Paths.USER_DEFINED_FUNCTIONS_PATH_SEGMENT); case Conflict: return Utils.joinPath(parentResourceLink, Paths.CONFLICTS_PATH_SEGMENT); default: throw new IllegalArgumentException("resource type not supported"); } } private OperationContextAndListenerTuple getOperationContextAndListenerTuple(CosmosQueryRequestOptions options) { if (options == null) { return null; } return ImplementationBridgeHelpers.CosmosQueryRequestOptionsHelper.getCosmosQueryRequestOptionsAccessor().getOperationContext(options); } private OperationContextAndListenerTuple getOperationContextAndListenerTuple(RequestOptions options) { if (options == null) { return null; } return options.getOperationContextAndListenerTuple(); } private <T extends Resource> Flux<FeedResponse<T>> createQuery( String parentResourceLink, SqlQuerySpec sqlQuery, CosmosQueryRequestOptions options, Class<T> klass, ResourceType resourceTypeEnum) { String resourceLink = parentResourceLinkToQueryLink(parentResourceLink, resourceTypeEnum); UUID correlationActivityIdOfRequestOptions = ImplementationBridgeHelpers .CosmosQueryRequestOptionsHelper .getCosmosQueryRequestOptionsAccessor() .getCorrelationActivityId(options); UUID correlationActivityId = correlationActivityIdOfRequestOptions != null ? correlationActivityIdOfRequestOptions : Utils.randomUUID(); IDocumentQueryClient queryClient = documentQueryClientImpl(RxDocumentClientImpl.this, getOperationContextAndListenerTuple(options)); InvalidPartitionExceptionRetryPolicy invalidPartitionExceptionRetryPolicy = new InvalidPartitionExceptionRetryPolicy( this.collectionCache, null, resourceLink, ModelBridgeInternal.getPropertiesFromQueryRequestOptions(options)); return ObservableHelper.fluxInlineIfPossibleAsObs( () -> createQueryInternal( resourceLink, sqlQuery, options, klass, resourceTypeEnum, queryClient, correlationActivityId), invalidPartitionExceptionRetryPolicy); } private <T extends Resource> Flux<FeedResponse<T>> createQueryInternal( String resourceLink, SqlQuerySpec sqlQuery, CosmosQueryRequestOptions options, Class<T> klass, ResourceType resourceTypeEnum, IDocumentQueryClient queryClient, UUID activityId) { Flux<? extends IDocumentQueryExecutionContext<T>> executionContext = DocumentQueryExecutionContextFactory .createDocumentQueryExecutionContextAsync(this, queryClient, resourceTypeEnum, klass, sqlQuery, options, resourceLink, false, activityId, Configs.isQueryPlanCachingEnabled(), queryPlanCache); AtomicBoolean isFirstResponse = new AtomicBoolean(true); return executionContext.flatMap(iDocumentQueryExecutionContext -> { QueryInfo queryInfo = null; if (iDocumentQueryExecutionContext instanceof PipelinedDocumentQueryExecutionContext) { queryInfo = ((PipelinedDocumentQueryExecutionContext<T>) iDocumentQueryExecutionContext).getQueryInfo(); } QueryInfo finalQueryInfo = queryInfo; return iDocumentQueryExecutionContext.executeAsync() .map(tFeedResponse -> { if (finalQueryInfo != null) { if (finalQueryInfo.hasSelectValue()) { ModelBridgeInternal .addQueryInfoToFeedResponse(tFeedResponse, finalQueryInfo); } if (isFirstResponse.compareAndSet(true, false)) { ModelBridgeInternal.addQueryPlanDiagnosticsContextToFeedResponse(tFeedResponse, finalQueryInfo.getQueryPlanDiagnosticsContext()); } } return tFeedResponse; }); }); } @Override public Flux<FeedResponse<Database>> queryDatabases(String query, CosmosQueryRequestOptions options) { return queryDatabases(new SqlQuerySpec(query), options); } @Override public Flux<FeedResponse<Database>> queryDatabases(SqlQuerySpec querySpec, CosmosQueryRequestOptions options) { return createQuery(Paths.DATABASES_ROOT, querySpec, options, Database.class, ResourceType.Database); } @Override public Mono<ResourceResponse<DocumentCollection>> createCollection(String databaseLink, DocumentCollection collection, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> this.createCollectionInternal(databaseLink, collection, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<DocumentCollection>> createCollectionInternal(String databaseLink, DocumentCollection collection, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (StringUtils.isEmpty(databaseLink)) { throw new IllegalArgumentException("databaseLink"); } if (collection == null) { throw new IllegalArgumentException("collection"); } logger.debug("Creating a Collection. databaseLink: [{}], Collection id: [{}]", databaseLink, collection.getId()); validateResource(collection); String path = Utils.joinPath(databaseLink, Paths.COLLECTIONS_PATH_SEGMENT); Map<String, String> requestHeaders = this.getRequestHeaders(options, ResourceType.DocumentCollection, OperationType.Create); Instant serializationStartTimeUTC = Instant.now(); ByteBuffer byteBuffer = ModelBridgeInternal.serializeJsonToByteBuffer(collection); Instant serializationEndTimeUTC = Instant.now(); SerializationDiagnosticsContext.SerializationDiagnostics serializationDiagnostics = new SerializationDiagnosticsContext.SerializationDiagnostics( serializationStartTimeUTC, serializationEndTimeUTC, SerializationDiagnosticsContext.SerializationType.CONTAINER_SERIALIZATION); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(this, OperationType.Create, ResourceType.DocumentCollection, path, byteBuffer, requestHeaders, options); if (retryPolicyInstance != null){ retryPolicyInstance.onBeforeSendRequest(request); } SerializationDiagnosticsContext serializationDiagnosticsContext = BridgeInternal.getSerializationDiagnosticsContext(request.requestContext.cosmosDiagnostics); if (serializationDiagnosticsContext != null) { serializationDiagnosticsContext.addSerializationDiagnostics(serializationDiagnostics); } return this.create(request, retryPolicyInstance, getOperationContextAndListenerTuple(options)).map(response -> toResourceResponse(response, DocumentCollection.class)) .doOnNext(resourceResponse -> { this.sessionContainer.setSessionToken(resourceResponse.getResource().getResourceId(), getAltLink(resourceResponse.getResource()), resourceResponse.getResponseHeaders()); }); } catch (Exception e) { logger.debug("Failure in creating a collection. due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<DocumentCollection>> replaceCollection(DocumentCollection collection, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> replaceCollectionInternal(collection, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<DocumentCollection>> replaceCollectionInternal(DocumentCollection collection, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (collection == null) { throw new IllegalArgumentException("collection"); } logger.debug("Replacing a Collection. id: [{}]", collection.getId()); validateResource(collection); String path = Utils.joinPath(collection.getSelfLink(), null); Map<String, String> requestHeaders = this.getRequestHeaders(options, ResourceType.DocumentCollection, OperationType.Replace); Instant serializationStartTimeUTC = Instant.now(); ByteBuffer byteBuffer = ModelBridgeInternal.serializeJsonToByteBuffer(collection); Instant serializationEndTimeUTC = Instant.now(); SerializationDiagnosticsContext.SerializationDiagnostics serializationDiagnostics = new SerializationDiagnosticsContext.SerializationDiagnostics( serializationStartTimeUTC, serializationEndTimeUTC, SerializationDiagnosticsContext.SerializationType.CONTAINER_SERIALIZATION); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(this, OperationType.Replace, ResourceType.DocumentCollection, path, byteBuffer, requestHeaders, options); if (retryPolicyInstance != null){ retryPolicyInstance.onBeforeSendRequest(request); } SerializationDiagnosticsContext serializationDiagnosticsContext = BridgeInternal.getSerializationDiagnosticsContext(request.requestContext.cosmosDiagnostics); if (serializationDiagnosticsContext != null) { serializationDiagnosticsContext.addSerializationDiagnostics(serializationDiagnostics); } return this.replace(request, retryPolicyInstance).map(response -> toResourceResponse(response, DocumentCollection.class)) .doOnNext(resourceResponse -> { if (resourceResponse.getResource() != null) { this.sessionContainer.setSessionToken(resourceResponse.getResource().getResourceId(), getAltLink(resourceResponse.getResource()), resourceResponse.getResponseHeaders()); } }); } catch (Exception e) { logger.debug("Failure in replacing a collection. due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<DocumentCollection>> deleteCollection(String collectionLink, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> deleteCollectionInternal(collectionLink, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<DocumentCollection>> deleteCollectionInternal(String collectionLink, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (StringUtils.isEmpty(collectionLink)) { throw new IllegalArgumentException("collectionLink"); } logger.debug("Deleting a Collection. collectionLink: [{}]", collectionLink); String path = Utils.joinPath(collectionLink, null); Map<String, String> requestHeaders = this.getRequestHeaders(options, ResourceType.DocumentCollection, OperationType.Delete); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(this, OperationType.Delete, ResourceType.DocumentCollection, path, requestHeaders, options); if (retryPolicyInstance != null){ retryPolicyInstance.onBeforeSendRequest(request); } return this.delete(request, retryPolicyInstance, getOperationContextAndListenerTuple(options)).map(response -> toResourceResponse(response, DocumentCollection.class)); } catch (Exception e) { logger.debug("Failure in deleting a collection, due to [{}]", e.getMessage(), e); return Mono.error(e); } } private Mono<RxDocumentServiceResponse> delete(RxDocumentServiceRequest request, DocumentClientRetryPolicy documentClientRetryPolicy, OperationContextAndListenerTuple operationContextAndListenerTuple) { return populateHeaders(request, RequestVerb.DELETE) .flatMap(requestPopulated -> { if (documentClientRetryPolicy.getRetryContext() != null && documentClientRetryPolicy.getRetryContext().getRetryCount() > 0) { documentClientRetryPolicy.getRetryContext().updateEndTime(); } return getStoreProxy(requestPopulated).processMessage(requestPopulated, operationContextAndListenerTuple); }); } private Mono<RxDocumentServiceResponse> deleteAllItemsByPartitionKey(RxDocumentServiceRequest request, DocumentClientRetryPolicy documentClientRetryPolicy, OperationContextAndListenerTuple operationContextAndListenerTuple) { return populateHeaders(request, RequestVerb.POST) .flatMap(requestPopulated -> { RxStoreModel storeProxy = this.getStoreProxy(requestPopulated); if (documentClientRetryPolicy.getRetryContext() != null && documentClientRetryPolicy.getRetryContext().getRetryCount() > 0) { documentClientRetryPolicy.getRetryContext().updateEndTime(); } return storeProxy.processMessage(requestPopulated, operationContextAndListenerTuple); }); } private Mono<RxDocumentServiceResponse> read(RxDocumentServiceRequest request, DocumentClientRetryPolicy documentClientRetryPolicy) { return populateHeaders(request, RequestVerb.GET) .flatMap(requestPopulated -> { if (documentClientRetryPolicy.getRetryContext() != null && documentClientRetryPolicy.getRetryContext().getRetryCount() > 0) { documentClientRetryPolicy.getRetryContext().updateEndTime(); } return getStoreProxy(requestPopulated).processMessage(requestPopulated); }); } Mono<RxDocumentServiceResponse> readFeed(RxDocumentServiceRequest request) { return populateHeaders(request, RequestVerb.GET) .flatMap(requestPopulated -> getStoreProxy(requestPopulated).processMessage(requestPopulated)); } private Mono<RxDocumentServiceResponse> query(RxDocumentServiceRequest request) { return populateHeaders(request, RequestVerb.POST) .flatMap(requestPopulated -> this.getStoreProxy(requestPopulated).processMessage(requestPopulated) .map(response -> { this.captureSessionToken(requestPopulated, response); return response; } )); } @Override public Mono<ResourceResponse<DocumentCollection>> readCollection(String collectionLink, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> readCollectionInternal(collectionLink, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<DocumentCollection>> readCollectionInternal(String collectionLink, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (StringUtils.isEmpty(collectionLink)) { throw new IllegalArgumentException("collectionLink"); } logger.debug("Reading a Collection. collectionLink: [{}]", collectionLink); String path = Utils.joinPath(collectionLink, null); Map<String, String> requestHeaders = this.getRequestHeaders(options, ResourceType.DocumentCollection, OperationType.Read); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(this, OperationType.Read, ResourceType.DocumentCollection, path, requestHeaders, options); if (retryPolicyInstance != null){ retryPolicyInstance.onBeforeSendRequest(request); } return this.read(request, retryPolicyInstance).map(response -> toResourceResponse(response, DocumentCollection.class)); } catch (Exception e) { logger.debug("Failure in reading a collection, due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Flux<FeedResponse<DocumentCollection>> readCollections(String databaseLink, CosmosQueryRequestOptions options) { if (StringUtils.isEmpty(databaseLink)) { throw new IllegalArgumentException("databaseLink"); } return readFeed(options, ResourceType.DocumentCollection, DocumentCollection.class, Utils.joinPath(databaseLink, Paths.COLLECTIONS_PATH_SEGMENT)); } @Override public Flux<FeedResponse<DocumentCollection>> queryCollections(String databaseLink, String query, CosmosQueryRequestOptions options) { return createQuery(databaseLink, new SqlQuerySpec(query), options, DocumentCollection.class, ResourceType.DocumentCollection); } @Override public Flux<FeedResponse<DocumentCollection>> queryCollections(String databaseLink, SqlQuerySpec querySpec, CosmosQueryRequestOptions options) { return createQuery(databaseLink, querySpec, options, DocumentCollection.class, ResourceType.DocumentCollection); } private static String serializeProcedureParams(List<Object> objectArray) { String[] stringArray = new String[objectArray.size()]; for (int i = 0; i < objectArray.size(); ++i) { Object object = objectArray.get(i); if (object instanceof JsonSerializable) { stringArray[i] = ModelBridgeInternal.toJsonFromJsonSerializable((JsonSerializable) object); } else { try { stringArray[i] = mapper.writeValueAsString(object); } catch (IOException e) { throw new IllegalArgumentException("Can't serialize the object into the json string", e); } } } return String.format("[%s]", StringUtils.join(stringArray, ",")); } private static void validateResource(Resource resource) { if (!StringUtils.isEmpty(resource.getId())) { if (resource.getId().indexOf('/') != -1 || resource.getId().indexOf('\\') != -1 || resource.getId().indexOf('?') != -1 || resource.getId().indexOf(' throw new IllegalArgumentException("Id contains illegal chars."); } if (resource.getId().endsWith(" ")) { throw new IllegalArgumentException("Id ends with a space."); } } } private Map<String, String> getRequestHeaders(RequestOptions options, ResourceType resourceType, OperationType operationType) { Map<String, String> headers = new HashMap<>(); if (this.useMultipleWriteLocations) { headers.put(HttpConstants.HttpHeaders.ALLOW_TENTATIVE_WRITES, Boolean.TRUE.toString()); } if (consistencyLevel != null) { headers.put(HttpConstants.HttpHeaders.CONSISTENCY_LEVEL, consistencyLevel.toString()); } if (options == null) { if (!this.contentResponseOnWriteEnabled && resourceType.equals(ResourceType.Document) && operationType.isWriteOperation()) { headers.put(HttpConstants.HttpHeaders.PREFER, HttpConstants.HeaderValues.PREFER_RETURN_MINIMAL); } return headers; } Map<String, String> customOptions = options.getHeaders(); if (customOptions != null) { headers.putAll(customOptions); } boolean contentResponseOnWriteEnabled = this.contentResponseOnWriteEnabled; if (options.isContentResponseOnWriteEnabled() != null) { contentResponseOnWriteEnabled = options.isContentResponseOnWriteEnabled(); } if (!contentResponseOnWriteEnabled && resourceType.equals(ResourceType.Document) && operationType.isWriteOperation()) { headers.put(HttpConstants.HttpHeaders.PREFER, HttpConstants.HeaderValues.PREFER_RETURN_MINIMAL); } if (options.getIfMatchETag() != null) { headers.put(HttpConstants.HttpHeaders.IF_MATCH, options.getIfMatchETag()); } if(options.getIfNoneMatchETag() != null) { headers.put(HttpConstants.HttpHeaders.IF_NONE_MATCH, options.getIfNoneMatchETag()); } if (options.getConsistencyLevel() != null) { headers.put(HttpConstants.HttpHeaders.CONSISTENCY_LEVEL, options.getConsistencyLevel().toString()); } if (options.getIndexingDirective() != null) { headers.put(HttpConstants.HttpHeaders.INDEXING_DIRECTIVE, options.getIndexingDirective().toString()); } if (options.getPostTriggerInclude() != null && options.getPostTriggerInclude().size() > 0) { String postTriggerInclude = StringUtils.join(options.getPostTriggerInclude(), ","); headers.put(HttpConstants.HttpHeaders.POST_TRIGGER_INCLUDE, postTriggerInclude); } if (options.getPreTriggerInclude() != null && options.getPreTriggerInclude().size() > 0) { String preTriggerInclude = StringUtils.join(options.getPreTriggerInclude(), ","); headers.put(HttpConstants.HttpHeaders.PRE_TRIGGER_INCLUDE, preTriggerInclude); } if (!Strings.isNullOrEmpty(options.getSessionToken())) { headers.put(HttpConstants.HttpHeaders.SESSION_TOKEN, options.getSessionToken()); } if (options.getResourceTokenExpirySeconds() != null) { headers.put(HttpConstants.HttpHeaders.RESOURCE_TOKEN_EXPIRY, String.valueOf(options.getResourceTokenExpirySeconds())); } if (options.getOfferThroughput() != null && options.getOfferThroughput() >= 0) { headers.put(HttpConstants.HttpHeaders.OFFER_THROUGHPUT, options.getOfferThroughput().toString()); } else if (options.getOfferType() != null) { headers.put(HttpConstants.HttpHeaders.OFFER_TYPE, options.getOfferType()); } if (options.getOfferThroughput() == null) { if (options.getThroughputProperties() != null) { Offer offer = ModelBridgeInternal.getOfferFromThroughputProperties(options.getThroughputProperties()); final OfferAutoscaleSettings offerAutoscaleSettings = offer.getOfferAutoScaleSettings(); OfferAutoscaleAutoUpgradeProperties autoscaleAutoUpgradeProperties = null; if (offerAutoscaleSettings != null) { autoscaleAutoUpgradeProperties = offer.getOfferAutoScaleSettings().getAutoscaleAutoUpgradeProperties(); } if (offer.hasOfferThroughput() && (offerAutoscaleSettings != null && offerAutoscaleSettings.getMaxThroughput() >= 0 || autoscaleAutoUpgradeProperties != null && autoscaleAutoUpgradeProperties .getAutoscaleThroughputProperties() .getIncrementPercent() >= 0)) { throw new IllegalArgumentException("Autoscale provisioned throughput can not be configured with " + "fixed offer"); } if (offer.hasOfferThroughput()) { headers.put(HttpConstants.HttpHeaders.OFFER_THROUGHPUT, String.valueOf(offer.getThroughput())); } else if (offer.getOfferAutoScaleSettings() != null) { headers.put(HttpConstants.HttpHeaders.OFFER_AUTOPILOT_SETTINGS, ModelBridgeInternal.toJsonFromJsonSerializable(offer.getOfferAutoScaleSettings())); } } } if (options.isQuotaInfoEnabled()) { headers.put(HttpConstants.HttpHeaders.POPULATE_QUOTA_INFO, String.valueOf(true)); } if (options.isScriptLoggingEnabled()) { headers.put(HttpConstants.HttpHeaders.SCRIPT_ENABLE_LOGGING, String.valueOf(true)); } if (options.getDedicatedGatewayRequestOptions() != null && options.getDedicatedGatewayRequestOptions().getMaxIntegratedCacheStaleness() != null) { headers.put(HttpConstants.HttpHeaders.DEDICATED_GATEWAY_PER_REQUEST_CACHE_STALENESS, String.valueOf(Utils.getMaxIntegratedCacheStalenessInMillis(options.getDedicatedGatewayRequestOptions()))); } return headers; } public IRetryPolicyFactory getResetSessionTokenRetryPolicy() { return this.resetSessionTokenRetryPolicy; } private Mono<RxDocumentServiceRequest> addPartitionKeyInformation(RxDocumentServiceRequest request, ByteBuffer contentAsByteBuffer, Document document, RequestOptions options) { Mono<Utils.ValueHolder<DocumentCollection>> collectionObs = this.collectionCache.resolveCollectionAsync(BridgeInternal.getMetaDataDiagnosticContext(request.requestContext.cosmosDiagnostics), request); return collectionObs .map(collectionValueHolder -> { addPartitionKeyInformation(request, contentAsByteBuffer, document, options, collectionValueHolder.v); return request; }); } private Mono<RxDocumentServiceRequest> addPartitionKeyInformation(RxDocumentServiceRequest request, ByteBuffer contentAsByteBuffer, Object document, RequestOptions options, Mono<Utils.ValueHolder<DocumentCollection>> collectionObs) { return collectionObs.map(collectionValueHolder -> { addPartitionKeyInformation(request, contentAsByteBuffer, document, options, collectionValueHolder.v); return request; }); } private void addPartitionKeyInformation(RxDocumentServiceRequest request, ByteBuffer contentAsByteBuffer, Object objectDoc, RequestOptions options, DocumentCollection collection) { PartitionKeyDefinition partitionKeyDefinition = collection.getPartitionKey(); PartitionKeyInternal partitionKeyInternal = null; if (options != null && options.getPartitionKey() != null && options.getPartitionKey().equals(PartitionKey.NONE)){ partitionKeyInternal = ModelBridgeInternal.getNonePartitionKey(partitionKeyDefinition); } else if (options != null && options.getPartitionKey() != null) { partitionKeyInternal = BridgeInternal.getPartitionKeyInternal(options.getPartitionKey()); } else if (partitionKeyDefinition == null || partitionKeyDefinition.getPaths().size() == 0) { partitionKeyInternal = PartitionKeyInternal.getEmpty(); } else if (contentAsByteBuffer != null || objectDoc != null) { InternalObjectNode internalObjectNode; if (objectDoc instanceof InternalObjectNode) { internalObjectNode = (InternalObjectNode) objectDoc; } else if (objectDoc instanceof ObjectNode) { internalObjectNode = new InternalObjectNode((ObjectNode)objectDoc); } else if (contentAsByteBuffer != null) { contentAsByteBuffer.rewind(); internalObjectNode = new InternalObjectNode(contentAsByteBuffer); } else { throw new IllegalStateException("ContentAsByteBuffer and objectDoc are null"); } Instant serializationStartTime = Instant.now(); partitionKeyInternal = extractPartitionKeyValueFromDocument(internalObjectNode, partitionKeyDefinition); Instant serializationEndTime = Instant.now(); SerializationDiagnosticsContext.SerializationDiagnostics serializationDiagnostics = new SerializationDiagnosticsContext.SerializationDiagnostics( serializationStartTime, serializationEndTime, SerializationDiagnosticsContext.SerializationType.PARTITION_KEY_FETCH_SERIALIZATION ); SerializationDiagnosticsContext serializationDiagnosticsContext = BridgeInternal.getSerializationDiagnosticsContext(request.requestContext.cosmosDiagnostics); if (serializationDiagnosticsContext != null) { serializationDiagnosticsContext.addSerializationDiagnostics(serializationDiagnostics); } } else { throw new UnsupportedOperationException("PartitionKey value must be supplied for this operation."); } request.setPartitionKeyInternal(partitionKeyInternal); request.getHeaders().put(HttpConstants.HttpHeaders.PARTITION_KEY, Utils.escapeNonAscii(partitionKeyInternal.toJson())); } public static PartitionKeyInternal extractPartitionKeyValueFromDocument( InternalObjectNode document, PartitionKeyDefinition partitionKeyDefinition) { if (partitionKeyDefinition != null) { switch (partitionKeyDefinition.getKind()) { case HASH: String path = partitionKeyDefinition.getPaths().iterator().next(); List<String> parts = PathParser.getPathParts(path); if (parts.size() >= 1) { Object value = ModelBridgeInternal.getObjectByPathFromJsonSerializable(document, parts); if (value == null || value.getClass() == ObjectNode.class) { value = ModelBridgeInternal.getNonePartitionKey(partitionKeyDefinition); } if (value instanceof PartitionKeyInternal) { return (PartitionKeyInternal) value; } else { return PartitionKeyInternal.fromObjectArray(Collections.singletonList(value), false); } } break; case MULTI_HASH: Object[] partitionKeyValues = new Object[partitionKeyDefinition.getPaths().size()]; for(int pathIter = 0 ; pathIter < partitionKeyDefinition.getPaths().size(); pathIter++){ String partitionPath = partitionKeyDefinition.getPaths().get(pathIter); List<String> partitionPathParts = PathParser.getPathParts(partitionPath); partitionKeyValues[pathIter] = ModelBridgeInternal.getObjectByPathFromJsonSerializable(document, partitionPathParts); } return PartitionKeyInternal.fromObjectArray(partitionKeyValues, false); default: throw new IllegalArgumentException("Unrecognized Partition kind: " + partitionKeyDefinition.getKind()); } } return null; } private Mono<RxDocumentServiceRequest> getCreateDocumentRequest(DocumentClientRetryPolicy requestRetryPolicy, String documentCollectionLink, Object document, RequestOptions options, boolean disableAutomaticIdGeneration, OperationType operationType) { if (StringUtils.isEmpty(documentCollectionLink)) { throw new IllegalArgumentException("documentCollectionLink"); } if (document == null) { throw new IllegalArgumentException("document"); } Instant serializationStartTimeUTC = Instant.now(); ByteBuffer content = BridgeInternal.serializeJsonToByteBuffer(document, mapper); Instant serializationEndTimeUTC = Instant.now(); SerializationDiagnosticsContext.SerializationDiagnostics serializationDiagnostics = new SerializationDiagnosticsContext.SerializationDiagnostics( serializationStartTimeUTC, serializationEndTimeUTC, SerializationDiagnosticsContext.SerializationType.ITEM_SERIALIZATION); String path = Utils.joinPath(documentCollectionLink, Paths.DOCUMENTS_PATH_SEGMENT); Map<String, String> requestHeaders = this.getRequestHeaders(options, ResourceType.Document, operationType); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(this, operationType, ResourceType.Document, path, requestHeaders, options, content); if (requestRetryPolicy != null) { requestRetryPolicy.onBeforeSendRequest(request); } SerializationDiagnosticsContext serializationDiagnosticsContext = BridgeInternal.getSerializationDiagnosticsContext(request.requestContext.cosmosDiagnostics); if (serializationDiagnosticsContext != null) { serializationDiagnosticsContext.addSerializationDiagnostics(serializationDiagnostics); } Mono<Utils.ValueHolder<DocumentCollection>> collectionObs = this.collectionCache.resolveCollectionAsync(BridgeInternal.getMetaDataDiagnosticContext(request.requestContext.cosmosDiagnostics), request); return addPartitionKeyInformation(request, content, document, options, collectionObs); } private Mono<RxDocumentServiceRequest> getBatchDocumentRequest(DocumentClientRetryPolicy requestRetryPolicy, String documentCollectionLink, ServerBatchRequest serverBatchRequest, RequestOptions options, boolean disableAutomaticIdGeneration) { checkArgument(StringUtils.isNotEmpty(documentCollectionLink), "expected non empty documentCollectionLink"); checkNotNull(serverBatchRequest, "expected non null serverBatchRequest"); Instant serializationStartTimeUTC = Instant.now(); ByteBuffer content = ByteBuffer.wrap(Utils.getUTF8Bytes(serverBatchRequest.getRequestBody())); Instant serializationEndTimeUTC = Instant.now(); SerializationDiagnosticsContext.SerializationDiagnostics serializationDiagnostics = new SerializationDiagnosticsContext.SerializationDiagnostics( serializationStartTimeUTC, serializationEndTimeUTC, SerializationDiagnosticsContext.SerializationType.ITEM_SERIALIZATION); String path = Utils.joinPath(documentCollectionLink, Paths.DOCUMENTS_PATH_SEGMENT); Map<String, String> requestHeaders = this.getRequestHeaders(options, ResourceType.Document, OperationType.Batch); RxDocumentServiceRequest request = RxDocumentServiceRequest.create( this, OperationType.Batch, ResourceType.Document, path, requestHeaders, options, content); if (requestRetryPolicy != null) { requestRetryPolicy.onBeforeSendRequest(request); } SerializationDiagnosticsContext serializationDiagnosticsContext = BridgeInternal.getSerializationDiagnosticsContext(request.requestContext.cosmosDiagnostics); if (serializationDiagnosticsContext != null) { serializationDiagnosticsContext.addSerializationDiagnostics(serializationDiagnostics); } Mono<Utils.ValueHolder<DocumentCollection>> collectionObs = this.collectionCache.resolveCollectionAsync(BridgeInternal.getMetaDataDiagnosticContext(request.requestContext.cosmosDiagnostics), request); return collectionObs.map((Utils.ValueHolder<DocumentCollection> collectionValueHolder) -> { addBatchHeaders(request, serverBatchRequest, collectionValueHolder.v); return request; }); } private RxDocumentServiceRequest addBatchHeaders(RxDocumentServiceRequest request, ServerBatchRequest serverBatchRequest, DocumentCollection collection) { if(serverBatchRequest instanceof SinglePartitionKeyServerBatchRequest) { PartitionKey partitionKey = ((SinglePartitionKeyServerBatchRequest) serverBatchRequest).getPartitionKeyValue(); PartitionKeyInternal partitionKeyInternal; if (partitionKey.equals(PartitionKey.NONE)) { PartitionKeyDefinition partitionKeyDefinition = collection.getPartitionKey(); partitionKeyInternal = ModelBridgeInternal.getNonePartitionKey(partitionKeyDefinition); } else { partitionKeyInternal = BridgeInternal.getPartitionKeyInternal(partitionKey); } request.setPartitionKeyInternal(partitionKeyInternal); request.getHeaders().put(HttpConstants.HttpHeaders.PARTITION_KEY, Utils.escapeNonAscii(partitionKeyInternal.toJson())); } else if(serverBatchRequest instanceof PartitionKeyRangeServerBatchRequest) { request.setPartitionKeyRangeIdentity(new PartitionKeyRangeIdentity(((PartitionKeyRangeServerBatchRequest) serverBatchRequest).getPartitionKeyRangeId())); } else { throw new UnsupportedOperationException("Unknown Server request."); } request.getHeaders().put(HttpConstants.HttpHeaders.IS_BATCH_REQUEST, Boolean.TRUE.toString()); request.getHeaders().put(HttpConstants.HttpHeaders.IS_BATCH_ATOMIC, String.valueOf(serverBatchRequest.isAtomicBatch())); request.getHeaders().put(HttpConstants.HttpHeaders.SHOULD_BATCH_CONTINUE_ON_ERROR, String.valueOf(serverBatchRequest.isShouldContinueOnError())); request.setNumberOfItemsInBatchRequest(serverBatchRequest.getOperations().size()); return request; } private Mono<RxDocumentServiceRequest> populateHeaders(RxDocumentServiceRequest request, RequestVerb httpMethod) { request.getHeaders().put(HttpConstants.HttpHeaders.X_DATE, Utils.nowAsRFC1123()); if (this.masterKeyOrResourceToken != null || this.resourceTokensMap != null || this.cosmosAuthorizationTokenResolver != null || this.credential != null) { String resourceName = request.getResourceAddress(); String authorization = this.getUserAuthorizationToken( resourceName, request.getResourceType(), httpMethod, request.getHeaders(), AuthorizationTokenType.PrimaryMasterKey, request.properties); try { authorization = URLEncoder.encode(authorization, "UTF-8"); } catch (UnsupportedEncodingException e) { throw new IllegalStateException("Failed to encode authtoken.", e); } request.getHeaders().put(HttpConstants.HttpHeaders.AUTHORIZATION, authorization); } if (this.apiType != null) { request.getHeaders().put(HttpConstants.HttpHeaders.API_TYPE, this.apiType.toString()); } if ((RequestVerb.POST.equals(httpMethod) || RequestVerb.PUT.equals(httpMethod)) && !request.getHeaders().containsKey(HttpConstants.HttpHeaders.CONTENT_TYPE)) { request.getHeaders().put(HttpConstants.HttpHeaders.CONTENT_TYPE, RuntimeConstants.MediaTypes.JSON); } if (RequestVerb.PATCH.equals(httpMethod) && !request.getHeaders().containsKey(HttpConstants.HttpHeaders.CONTENT_TYPE)) { request.getHeaders().put(HttpConstants.HttpHeaders.CONTENT_TYPE, RuntimeConstants.MediaTypes.JSON_PATCH); } if (!request.getHeaders().containsKey(HttpConstants.HttpHeaders.ACCEPT)) { request.getHeaders().put(HttpConstants.HttpHeaders.ACCEPT, RuntimeConstants.MediaTypes.JSON); } MetadataDiagnosticsContext metadataDiagnosticsCtx = BridgeInternal.getMetaDataDiagnosticContext(request.requestContext.cosmosDiagnostics); if (this.requiresFeedRangeFiltering(request)) { return request.getFeedRange() .populateFeedRangeFilteringHeaders( this.getPartitionKeyRangeCache(), request, this.collectionCache.resolveCollectionAsync(metadataDiagnosticsCtx, request)) .flatMap(this::populateAuthorizationHeader); } return this.populateAuthorizationHeader(request); } private boolean requiresFeedRangeFiltering(RxDocumentServiceRequest request) { if (request.getResourceType() != ResourceType.Document && request.getResourceType() != ResourceType.Conflict) { return false; } switch (request.getOperationType()) { case ReadFeed: case Query: case SqlQuery: return request.getFeedRange() != null; default: return false; } } @Override public Mono<RxDocumentServiceRequest> populateAuthorizationHeader(RxDocumentServiceRequest request) { if (request == null) { throw new IllegalArgumentException("request"); } if (this.authorizationTokenType == AuthorizationTokenType.AadToken) { return AadTokenAuthorizationHelper.getAuthorizationToken(this.tokenCredentialCache) .map(authorization -> { request.getHeaders().put(HttpConstants.HttpHeaders.AUTHORIZATION, authorization); return request; }); } else { return Mono.just(request); } } @Override public Mono<HttpHeaders> populateAuthorizationHeader(HttpHeaders httpHeaders) { if (httpHeaders == null) { throw new IllegalArgumentException("httpHeaders"); } if (this.authorizationTokenType == AuthorizationTokenType.AadToken) { return AadTokenAuthorizationHelper.getAuthorizationToken(this.tokenCredentialCache) .map(authorization -> { httpHeaders.set(HttpConstants.HttpHeaders.AUTHORIZATION, authorization); return httpHeaders; }); } return Mono.just(httpHeaders); } @Override public AuthorizationTokenType getAuthorizationTokenType() { return this.authorizationTokenType; } @Override public String getUserAuthorizationToken(String resourceName, ResourceType resourceType, RequestVerb requestVerb, Map<String, String> headers, AuthorizationTokenType tokenType, Map<String, Object> properties) { if (this.cosmosAuthorizationTokenResolver != null) { return this.cosmosAuthorizationTokenResolver.getAuthorizationToken(requestVerb.toUpperCase(), resourceName, this.resolveCosmosResourceType(resourceType).toString(), properties != null ? Collections.unmodifiableMap(properties) : null); } else if (credential != null) { return this.authorizationTokenProvider.generateKeyAuthorizationSignature(requestVerb, resourceName, resourceType, headers); } else if (masterKeyOrResourceToken != null && hasAuthKeyResourceToken && resourceTokensMap == null) { return masterKeyOrResourceToken; } else { assert resourceTokensMap != null; if(resourceType.equals(ResourceType.DatabaseAccount)) { return this.firstResourceTokenFromPermissionFeed; } return ResourceTokenAuthorizationHelper.getAuthorizationTokenUsingResourceTokens(resourceTokensMap, requestVerb, resourceName, headers); } } private CosmosResourceType resolveCosmosResourceType(ResourceType resourceType) { CosmosResourceType cosmosResourceType = ModelBridgeInternal.fromServiceSerializedFormat(resourceType.toString()); if (cosmosResourceType == null) { return CosmosResourceType.SYSTEM; } return cosmosResourceType; } void captureSessionToken(RxDocumentServiceRequest request, RxDocumentServiceResponse response) { this.sessionContainer.setSessionToken(request, response.getResponseHeaders()); } private Mono<RxDocumentServiceResponse> create(RxDocumentServiceRequest request, DocumentClientRetryPolicy documentClientRetryPolicy, OperationContextAndListenerTuple operationContextAndListenerTuple) { return populateHeaders(request, RequestVerb.POST) .flatMap(requestPopulated -> { RxStoreModel storeProxy = this.getStoreProxy(requestPopulated); if (documentClientRetryPolicy.getRetryContext() != null && documentClientRetryPolicy.getRetryContext().getRetryCount() > 0) { documentClientRetryPolicy.getRetryContext().updateEndTime(); } return storeProxy.processMessage(requestPopulated, operationContextAndListenerTuple); }); } private Mono<RxDocumentServiceResponse> upsert(RxDocumentServiceRequest request, DocumentClientRetryPolicy documentClientRetryPolicy, OperationContextAndListenerTuple operationContextAndListenerTuple) { return populateHeaders(request, RequestVerb.POST) .flatMap(requestPopulated -> { Map<String, String> headers = requestPopulated.getHeaders(); assert (headers != null); headers.put(HttpConstants.HttpHeaders.IS_UPSERT, "true"); if (documentClientRetryPolicy.getRetryContext() != null && documentClientRetryPolicy.getRetryContext().getRetryCount() > 0) { documentClientRetryPolicy.getRetryContext().updateEndTime(); } return getStoreProxy(requestPopulated).processMessage(requestPopulated, operationContextAndListenerTuple) .map(response -> { this.captureSessionToken(requestPopulated, response); return response; } ); }); } private Mono<RxDocumentServiceResponse> replace(RxDocumentServiceRequest request, DocumentClientRetryPolicy documentClientRetryPolicy) { return populateHeaders(request, RequestVerb.PUT) .flatMap(requestPopulated -> { if (documentClientRetryPolicy.getRetryContext() != null && documentClientRetryPolicy.getRetryContext().getRetryCount() > 0) { documentClientRetryPolicy.getRetryContext().updateEndTime(); } return getStoreProxy(requestPopulated).processMessage(requestPopulated); }); } private Mono<RxDocumentServiceResponse> patch(RxDocumentServiceRequest request, DocumentClientRetryPolicy documentClientRetryPolicy) { populateHeaders(request, RequestVerb.PATCH); if (documentClientRetryPolicy.getRetryContext() != null && documentClientRetryPolicy.getRetryContext().getRetryCount() > 0) { documentClientRetryPolicy.getRetryContext().updateEndTime(); } return getStoreProxy(request).processMessage(request); } @Override public Mono<ResourceResponse<Document>> createDocument(String collectionLink, Object document, RequestOptions options, boolean disableAutomaticIdGeneration) { DocumentClientRetryPolicy requestRetryPolicy = this.resetSessionTokenRetryPolicy.getRequestPolicy(); if (options == null || options.getPartitionKey() == null) { requestRetryPolicy = new PartitionKeyMismatchRetryPolicy(collectionCache, requestRetryPolicy, collectionLink, options); } DocumentClientRetryPolicy finalRetryPolicyInstance = requestRetryPolicy; return ObservableHelper.inlineIfPossibleAsObs(() -> createDocumentInternal(collectionLink, document, options, disableAutomaticIdGeneration, finalRetryPolicyInstance), requestRetryPolicy); } private Mono<ResourceResponse<Document>> createDocumentInternal(String collectionLink, Object document, RequestOptions options, boolean disableAutomaticIdGeneration, DocumentClientRetryPolicy requestRetryPolicy) { try { logger.debug("Creating a Document. collectionLink: [{}]", collectionLink); Mono<RxDocumentServiceRequest> requestObs = getCreateDocumentRequest(requestRetryPolicy, collectionLink, document, options, disableAutomaticIdGeneration, OperationType.Create); Mono<RxDocumentServiceResponse> responseObservable = requestObs.flatMap(request -> create(request, requestRetryPolicy, getOperationContextAndListenerTuple(options))); return responseObservable .map(serviceResponse -> toResourceResponse(serviceResponse, Document.class)); } catch (Exception e) { logger.debug("Failure in creating a document due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<Document>> upsertDocument(String collectionLink, Object document, RequestOptions options, boolean disableAutomaticIdGeneration) { DocumentClientRetryPolicy requestRetryPolicy = this.resetSessionTokenRetryPolicy.getRequestPolicy(); if (options == null || options.getPartitionKey() == null) { requestRetryPolicy = new PartitionKeyMismatchRetryPolicy(collectionCache, requestRetryPolicy, collectionLink, options); } DocumentClientRetryPolicy finalRetryPolicyInstance = requestRetryPolicy; return ObservableHelper.inlineIfPossibleAsObs(() -> upsertDocumentInternal(collectionLink, document, options, disableAutomaticIdGeneration, finalRetryPolicyInstance), finalRetryPolicyInstance); } private Mono<ResourceResponse<Document>> upsertDocumentInternal(String collectionLink, Object document, RequestOptions options, boolean disableAutomaticIdGeneration, DocumentClientRetryPolicy retryPolicyInstance) { try { logger.debug("Upserting a Document. collectionLink: [{}]", collectionLink); Mono<RxDocumentServiceRequest> reqObs = getCreateDocumentRequest(retryPolicyInstance, collectionLink, document, options, disableAutomaticIdGeneration, OperationType.Upsert); Mono<RxDocumentServiceResponse> responseObservable = reqObs.flatMap(request -> upsert(request, retryPolicyInstance, getOperationContextAndListenerTuple(options))); return responseObservable .map(serviceResponse -> toResourceResponse(serviceResponse, Document.class)); } catch (Exception e) { logger.debug("Failure in upserting a document due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<Document>> replaceDocument(String documentLink, Object document, RequestOptions options) { DocumentClientRetryPolicy requestRetryPolicy = this.resetSessionTokenRetryPolicy.getRequestPolicy(); if (options == null || options.getPartitionKey() == null) { String collectionLink = Utils.getCollectionName(documentLink); requestRetryPolicy = new PartitionKeyMismatchRetryPolicy(collectionCache, requestRetryPolicy, collectionLink, options); } DocumentClientRetryPolicy finalRequestRetryPolicy = requestRetryPolicy; return ObservableHelper.inlineIfPossibleAsObs(() -> replaceDocumentInternal(documentLink, document, options, finalRequestRetryPolicy), requestRetryPolicy); } private Mono<ResourceResponse<Document>> replaceDocumentInternal(String documentLink, Object document, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (StringUtils.isEmpty(documentLink)) { throw new IllegalArgumentException("documentLink"); } if (document == null) { throw new IllegalArgumentException("document"); } Document typedDocument = documentFromObject(document, mapper); return this.replaceDocumentInternal(documentLink, typedDocument, options, retryPolicyInstance); } catch (Exception e) { logger.debug("Failure in replacing a document due to [{}]", e.getMessage()); return Mono.error(e); } } @Override public Mono<ResourceResponse<Document>> replaceDocument(Document document, RequestOptions options) { DocumentClientRetryPolicy requestRetryPolicy = this.resetSessionTokenRetryPolicy.getRequestPolicy(); if (options == null || options.getPartitionKey() == null) { String collectionLink = document.getSelfLink(); requestRetryPolicy = new PartitionKeyMismatchRetryPolicy(collectionCache, requestRetryPolicy, collectionLink, options); } DocumentClientRetryPolicy finalRequestRetryPolicy = requestRetryPolicy; return ObservableHelper.inlineIfPossibleAsObs(() -> replaceDocumentInternal(document, options, finalRequestRetryPolicy), requestRetryPolicy); } private Mono<ResourceResponse<Document>> replaceDocumentInternal(Document document, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (document == null) { throw new IllegalArgumentException("document"); } return this.replaceDocumentInternal(document.getSelfLink(), document, options, retryPolicyInstance); } catch (Exception e) { logger.debug("Failure in replacing a database due to [{}]", e.getMessage()); return Mono.error(e); } } private Mono<ResourceResponse<Document>> replaceDocumentInternal(String documentLink, Document document, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { if (document == null) { throw new IllegalArgumentException("document"); } logger.debug("Replacing a Document. documentLink: [{}]", documentLink); final String path = Utils.joinPath(documentLink, null); final Map<String, String> requestHeaders = getRequestHeaders(options, ResourceType.Document, OperationType.Replace); Instant serializationStartTimeUTC = Instant.now(); ByteBuffer content = serializeJsonToByteBuffer(document); Instant serializationEndTime = Instant.now(); SerializationDiagnosticsContext.SerializationDiagnostics serializationDiagnostics = new SerializationDiagnosticsContext.SerializationDiagnostics( serializationStartTimeUTC, serializationEndTime, SerializationDiagnosticsContext.SerializationType.ITEM_SERIALIZATION); final RxDocumentServiceRequest request = RxDocumentServiceRequest.create(this, OperationType.Replace, ResourceType.Document, path, requestHeaders, options, content); if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } SerializationDiagnosticsContext serializationDiagnosticsContext = BridgeInternal.getSerializationDiagnosticsContext(request.requestContext.cosmosDiagnostics); if (serializationDiagnosticsContext != null) { serializationDiagnosticsContext.addSerializationDiagnostics(serializationDiagnostics); } Mono<Utils.ValueHolder<DocumentCollection>> collectionObs = collectionCache.resolveCollectionAsync(BridgeInternal.getMetaDataDiagnosticContext(request.requestContext.cosmosDiagnostics), request); Mono<RxDocumentServiceRequest> requestObs = addPartitionKeyInformation(request, content, document, options, collectionObs); return requestObs.flatMap(req -> replace(request, retryPolicyInstance) .map(resp -> toResourceResponse(resp, Document.class))); } @Override public Mono<ResourceResponse<Document>> patchDocument(String documentLink, CosmosPatchOperations cosmosPatchOperations, RequestOptions options) { DocumentClientRetryPolicy documentClientRetryPolicy = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> patchDocumentInternal(documentLink, cosmosPatchOperations, options, documentClientRetryPolicy), documentClientRetryPolicy); } private Mono<ResourceResponse<Document>> patchDocumentInternal(String documentLink, CosmosPatchOperations cosmosPatchOperations, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { checkArgument(StringUtils.isNotEmpty(documentLink), "expected non empty documentLink"); checkNotNull(cosmosPatchOperations, "expected non null cosmosPatchOperations"); logger.debug("Running patch operations on Document. documentLink: [{}]", documentLink); final String path = Utils.joinPath(documentLink, null); final Map<String, String> requestHeaders = getRequestHeaders(options, ResourceType.Document, OperationType.Patch); Instant serializationStartTimeUTC = Instant.now(); ByteBuffer content = ByteBuffer.wrap(PatchUtil.serializeCosmosPatchToByteArray(cosmosPatchOperations, options)); Instant serializationEndTime = Instant.now(); SerializationDiagnosticsContext.SerializationDiagnostics serializationDiagnostics = new SerializationDiagnosticsContext.SerializationDiagnostics( serializationStartTimeUTC, serializationEndTime, SerializationDiagnosticsContext.SerializationType.ITEM_SERIALIZATION); final RxDocumentServiceRequest request = RxDocumentServiceRequest.create( this, OperationType.Patch, ResourceType.Document, path, requestHeaders, options, content); if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } SerializationDiagnosticsContext serializationDiagnosticsContext = BridgeInternal.getSerializationDiagnosticsContext(request.requestContext.cosmosDiagnostics); if (serializationDiagnosticsContext != null) { serializationDiagnosticsContext.addSerializationDiagnostics(serializationDiagnostics); } Mono<Utils.ValueHolder<DocumentCollection>> collectionObs = collectionCache.resolveCollectionAsync( BridgeInternal.getMetaDataDiagnosticContext(request.requestContext.cosmosDiagnostics), request); Mono<RxDocumentServiceRequest> requestObs = addPartitionKeyInformation( request, null, null, options, collectionObs); return requestObs.flatMap(req -> patch(request, retryPolicyInstance) .map(resp -> toResourceResponse(resp, Document.class))); } @Override public Mono<ResourceResponse<Document>> deleteDocument(String documentLink, RequestOptions options) { DocumentClientRetryPolicy requestRetryPolicy = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> deleteDocumentInternal(documentLink, null, options, requestRetryPolicy), requestRetryPolicy); } @Override public Mono<ResourceResponse<Document>> deleteDocument(String documentLink, InternalObjectNode internalObjectNode, RequestOptions options) { DocumentClientRetryPolicy requestRetryPolicy = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> deleteDocumentInternal(documentLink, internalObjectNode, options, requestRetryPolicy), requestRetryPolicy); } private Mono<ResourceResponse<Document>> deleteDocumentInternal(String documentLink, InternalObjectNode internalObjectNode, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (StringUtils.isEmpty(documentLink)) { throw new IllegalArgumentException("documentLink"); } logger.debug("Deleting a Document. documentLink: [{}]", documentLink); String path = Utils.joinPath(documentLink, null); Map<String, String> requestHeaders = this.getRequestHeaders(options, ResourceType.Document, OperationType.Delete); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(this, OperationType.Delete, ResourceType.Document, path, requestHeaders, options); if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } Mono<Utils.ValueHolder<DocumentCollection>> collectionObs = collectionCache.resolveCollectionAsync(BridgeInternal.getMetaDataDiagnosticContext(request.requestContext.cosmosDiagnostics), request); Mono<RxDocumentServiceRequest> requestObs = addPartitionKeyInformation(request, null, internalObjectNode, options, collectionObs); return requestObs.flatMap(req -> this .delete(req, retryPolicyInstance, getOperationContextAndListenerTuple(options)) .map(serviceResponse -> toResourceResponse(serviceResponse, Document.class))); } catch (Exception e) { logger.debug("Failure in deleting a document due to [{}]", e.getMessage()); return Mono.error(e); } } @Override public Mono<ResourceResponse<Document>> deleteAllDocumentsByPartitionKey(String collectionLink, PartitionKey partitionKey, RequestOptions options) { DocumentClientRetryPolicy requestRetryPolicy = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> deleteAllDocumentsByPartitionKeyInternal(collectionLink, options, requestRetryPolicy), requestRetryPolicy); } private Mono<ResourceResponse<Document>> deleteAllDocumentsByPartitionKeyInternal(String collectionLink, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (StringUtils.isEmpty(collectionLink)) { throw new IllegalArgumentException("collectionLink"); } logger.debug("Deleting all items by Partition Key. collectionLink: [{}]", collectionLink); String path = Utils.joinPath(collectionLink, null); Map<String, String> requestHeaders = this.getRequestHeaders(options, ResourceType.PartitionKey, OperationType.Delete); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(this, OperationType.Delete, ResourceType.PartitionKey, path, requestHeaders, options); if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } Mono<Utils.ValueHolder<DocumentCollection>> collectionObs = collectionCache.resolveCollectionAsync(BridgeInternal.getMetaDataDiagnosticContext(request.requestContext.cosmosDiagnostics), request); Mono<RxDocumentServiceRequest> requestObs = addPartitionKeyInformation(request, null, null, options, collectionObs); return requestObs.flatMap(req -> this .deleteAllItemsByPartitionKey(req, retryPolicyInstance, getOperationContextAndListenerTuple(options)) .map(serviceResponse -> toResourceResponse(serviceResponse, Document.class))); } catch (Exception e) { logger.debug("Failure in deleting documents due to [{}]", e.getMessage()); return Mono.error(e); } } @Override public Mono<ResourceResponse<Document>> readDocument(String documentLink, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> readDocumentInternal(documentLink, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<Document>> readDocumentInternal(String documentLink, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (StringUtils.isEmpty(documentLink)) { throw new IllegalArgumentException("documentLink"); } logger.debug("Reading a Document. documentLink: [{}]", documentLink); String path = Utils.joinPath(documentLink, null); Map<String, String> requestHeaders = this.getRequestHeaders(options, ResourceType.Document, OperationType.Read); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(this, OperationType.Read, ResourceType.Document, path, requestHeaders, options); if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } Mono<Utils.ValueHolder<DocumentCollection>> collectionObs = this.collectionCache.resolveCollectionAsync(BridgeInternal.getMetaDataDiagnosticContext(request.requestContext.cosmosDiagnostics), request); Mono<RxDocumentServiceRequest> requestObs = addPartitionKeyInformation(request, null, null, options, collectionObs); return requestObs.flatMap(req -> { return this.read(request, retryPolicyInstance).map(serviceResponse -> toResourceResponse(serviceResponse, Document.class)); }); } catch (Exception e) { logger.debug("Failure in reading a document due to [{}]", e.getMessage()); return Mono.error(e); } } @Override public Flux<FeedResponse<Document>> readDocuments(String collectionLink, CosmosQueryRequestOptions options) { if (StringUtils.isEmpty(collectionLink)) { throw new IllegalArgumentException("collectionLink"); } return queryDocuments(collectionLink, "SELECT * FROM r", options); } @Override public <T> Mono<FeedResponse<T>> readMany( List<CosmosItemIdentity> itemIdentityList, String collectionLink, CosmosQueryRequestOptions options, Class<T> klass) { String resourceLink = parentResourceLinkToQueryLink(collectionLink, ResourceType.Document); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(this, OperationType.Query, ResourceType.Document, collectionLink, null ); Mono<Utils.ValueHolder<DocumentCollection>> collectionObs = collectionCache.resolveCollectionAsync(null, request); return collectionObs .flatMap(documentCollectionResourceResponse -> { final DocumentCollection collection = documentCollectionResourceResponse.v; if (collection == null) { throw new IllegalStateException("Collection cannot be null"); } final PartitionKeyDefinition pkDefinition = collection.getPartitionKey(); Mono<Utils.ValueHolder<CollectionRoutingMap>> valueHolderMono = partitionKeyRangeCache .tryLookupAsync(BridgeInternal.getMetaDataDiagnosticContext(request.requestContext.cosmosDiagnostics), collection.getResourceId(), null, null); return valueHolderMono.flatMap(collectionRoutingMapValueHolder -> { Map<PartitionKeyRange, List<CosmosItemIdentity>> partitionRangeItemKeyMap = new HashMap<>(); CollectionRoutingMap routingMap = collectionRoutingMapValueHolder.v; if (routingMap == null) { throw new IllegalStateException("Failed to get routing map."); } itemIdentityList .forEach(itemIdentity -> { String effectivePartitionKeyString = PartitionKeyInternalHelper .getEffectivePartitionKeyString( BridgeInternal.getPartitionKeyInternal( itemIdentity.getPartitionKey()), pkDefinition); PartitionKeyRange range = routingMap.getRangeByEffectivePartitionKey(effectivePartitionKeyString); if (partitionRangeItemKeyMap.get(range) == null) { List<CosmosItemIdentity> list = new ArrayList<>(); list.add(itemIdentity); partitionRangeItemKeyMap.put(range, list); } else { List<CosmosItemIdentity> pairs = partitionRangeItemKeyMap.get(range); pairs.add(itemIdentity); partitionRangeItemKeyMap.put(range, pairs); } }); Map<PartitionKeyRange, SqlQuerySpec> rangeQueryMap; rangeQueryMap = getRangeQueryMap(partitionRangeItemKeyMap, collection.getPartitionKey()); return createReadManyQuery( resourceLink, new SqlQuerySpec(DUMMY_SQL_QUERY), options, Document.class, ResourceType.Document, collection, Collections.unmodifiableMap(rangeQueryMap)) .collectList() .map(feedList -> { List<T> finalList = new ArrayList<>(); HashMap<String, String> headers = new HashMap<>(); ConcurrentMap<String, QueryMetrics> aggregatedQueryMetrics = new ConcurrentHashMap<>(); double requestCharge = 0; for (FeedResponse<Document> page : feedList) { ConcurrentMap<String, QueryMetrics> pageQueryMetrics = ModelBridgeInternal.queryMetrics(page); if (pageQueryMetrics != null) { pageQueryMetrics.forEach( aggregatedQueryMetrics::putIfAbsent); } requestCharge += page.getRequestCharge(); finalList.addAll(page.getResults().stream().map(document -> ModelBridgeInternal.toObjectFromJsonSerializable(document, klass)).collect(Collectors.toList())); } headers.put(HttpConstants.HttpHeaders.REQUEST_CHARGE, Double .toString(requestCharge)); FeedResponse<T> frp = BridgeInternal .createFeedResponse(finalList, headers); return frp; }); }); } ); } private Map<PartitionKeyRange, SqlQuerySpec> getRangeQueryMap( Map<PartitionKeyRange, List<CosmosItemIdentity>> partitionRangeItemKeyMap, PartitionKeyDefinition partitionKeyDefinition) { Map<PartitionKeyRange, SqlQuerySpec> rangeQueryMap = new HashMap<>(); String partitionKeySelector = createPkSelector(partitionKeyDefinition); for(Map.Entry<PartitionKeyRange, List<CosmosItemIdentity>> entry: partitionRangeItemKeyMap.entrySet()) { SqlQuerySpec sqlQuerySpec; if (partitionKeySelector.equals("[\"id\"]")) { sqlQuerySpec = createReadManyQuerySpecPartitionKeyIdSame(entry.getValue(), partitionKeySelector); } else { sqlQuerySpec = createReadManyQuerySpec(entry.getValue(), partitionKeySelector); } rangeQueryMap.put(entry.getKey(), sqlQuerySpec); } return rangeQueryMap; } private SqlQuerySpec createReadManyQuerySpecPartitionKeyIdSame( List<CosmosItemIdentity> idPartitionKeyPairList, String partitionKeySelector) { StringBuilder queryStringBuilder = new StringBuilder(); List<SqlParameter> parameters = new ArrayList<>(); queryStringBuilder.append("SELECT * FROM c WHERE c.id IN ( "); for (int i = 0; i < idPartitionKeyPairList.size(); i++) { CosmosItemIdentity itemIdentity = idPartitionKeyPairList.get(i); String idValue = itemIdentity.getId(); String idParamName = "@param" + i; PartitionKey pkValueAsPartitionKey = itemIdentity.getPartitionKey(); Object pkValue = ModelBridgeInternal.getPartitionKeyObject(pkValueAsPartitionKey); if (!Objects.equals(idValue, pkValue)) { continue; } parameters.add(new SqlParameter(idParamName, idValue)); queryStringBuilder.append(idParamName); if (i < idPartitionKeyPairList.size() - 1) { queryStringBuilder.append(", "); } } queryStringBuilder.append(" )"); return new SqlQuerySpec(queryStringBuilder.toString(), parameters); } private SqlQuerySpec createReadManyQuerySpec(List<CosmosItemIdentity> itemIdentities, String partitionKeySelector) { StringBuilder queryStringBuilder = new StringBuilder(); List<SqlParameter> parameters = new ArrayList<>(); queryStringBuilder.append("SELECT * FROM c WHERE ( "); for (int i = 0; i < itemIdentities.size(); i++) { CosmosItemIdentity itemIdentity = itemIdentities.get(i); PartitionKey pkValueAsPartitionKey = itemIdentity.getPartitionKey(); Object pkValue = ModelBridgeInternal.getPartitionKeyObject(pkValueAsPartitionKey); String pkParamName = "@param" + (2 * i); parameters.add(new SqlParameter(pkParamName, pkValue)); String idValue = itemIdentity.getId(); String idParamName = "@param" + (2 * i + 1); parameters.add(new SqlParameter(idParamName, idValue)); queryStringBuilder.append("("); queryStringBuilder.append("c.id = "); queryStringBuilder.append(idParamName); queryStringBuilder.append(" AND "); queryStringBuilder.append(" c"); queryStringBuilder.append(partitionKeySelector); queryStringBuilder.append((" = ")); queryStringBuilder.append(pkParamName); queryStringBuilder.append(" )"); if (i < itemIdentities.size() - 1) { queryStringBuilder.append(" OR "); } } queryStringBuilder.append(" )"); return new SqlQuerySpec(queryStringBuilder.toString(), parameters); } private String createPkSelector(PartitionKeyDefinition partitionKeyDefinition) { return partitionKeyDefinition.getPaths() .stream() .map(pathPart -> StringUtils.substring(pathPart, 1)) .map(pathPart -> StringUtils.replace(pathPart, "\"", "\\")) .map(part -> "[\"" + part + "\"]") .collect(Collectors.joining()); } private <T extends Resource> Flux<FeedResponse<T>> createReadManyQuery( String parentResourceLink, SqlQuerySpec sqlQuery, CosmosQueryRequestOptions options, Class<T> klass, ResourceType resourceTypeEnum, DocumentCollection collection, Map<PartitionKeyRange, SqlQuerySpec> rangeQueryMap) { UUID activityId = Utils.randomUUID(); IDocumentQueryClient queryClient = documentQueryClientImpl(RxDocumentClientImpl.this, getOperationContextAndListenerTuple(options)); Flux<? extends IDocumentQueryExecutionContext<T>> executionContext = DocumentQueryExecutionContextFactory.createReadManyQueryAsync(this, queryClient, collection.getResourceId(), sqlQuery, rangeQueryMap, options, collection.getResourceId(), parentResourceLink, activityId, klass, resourceTypeEnum); return executionContext.flatMap(IDocumentQueryExecutionContext<T>::executeAsync); } @Override public Flux<FeedResponse<Document>> queryDocuments(String collectionLink, String query, CosmosQueryRequestOptions options) { return queryDocuments(collectionLink, new SqlQuerySpec(query), options); } private IDocumentQueryClient documentQueryClientImpl(RxDocumentClientImpl rxDocumentClientImpl, OperationContextAndListenerTuple operationContextAndListenerTuple) { return new IDocumentQueryClient () { @Override public RxCollectionCache getCollectionCache() { return RxDocumentClientImpl.this.collectionCache; } @Override public RxPartitionKeyRangeCache getPartitionKeyRangeCache() { return RxDocumentClientImpl.this.partitionKeyRangeCache; } @Override public IRetryPolicyFactory getResetSessionTokenRetryPolicy() { return RxDocumentClientImpl.this.resetSessionTokenRetryPolicy; } @Override public ConsistencyLevel getDefaultConsistencyLevelAsync() { return RxDocumentClientImpl.this.gatewayConfigurationReader.getDefaultConsistencyLevel(); } @Override public ConsistencyLevel getDesiredConsistencyLevelAsync() { return RxDocumentClientImpl.this.consistencyLevel; } @Override public Mono<RxDocumentServiceResponse> executeQueryAsync(RxDocumentServiceRequest request) { if (operationContextAndListenerTuple == null) { return RxDocumentClientImpl.this.query(request).single(); } else { final OperationListener listener = operationContextAndListenerTuple.getOperationListener(); final OperationContext operationContext = operationContextAndListenerTuple.getOperationContext(); request.getHeaders().put(HttpConstants.HttpHeaders.CORRELATED_ACTIVITY_ID, operationContext.getCorrelationActivityId()); listener.requestListener(operationContext, request); return RxDocumentClientImpl.this.query(request).single().doOnNext( response -> listener.responseListener(operationContext, response) ).doOnError( ex -> listener.exceptionListener(operationContext, ex) ); } } @Override public QueryCompatibilityMode getQueryCompatibilityMode() { return QueryCompatibilityMode.Default; } @Override public Mono<RxDocumentServiceResponse> readFeedAsync(RxDocumentServiceRequest request) { return null; } }; } @Override public Flux<FeedResponse<Document>> queryDocuments(String collectionLink, SqlQuerySpec querySpec, CosmosQueryRequestOptions options) { SqlQuerySpecLogger.getInstance().logQuery(querySpec); return createQuery(collectionLink, querySpec, options, Document.class, ResourceType.Document); } @Override public Flux<FeedResponse<Document>> queryDocumentChangeFeed( final DocumentCollection collection, final CosmosChangeFeedRequestOptions changeFeedOptions) { checkNotNull(collection, "Argument 'collection' must not be null."); ChangeFeedQueryImpl<Document> changeFeedQueryImpl = new ChangeFeedQueryImpl<>( this, ResourceType.Document, Document.class, collection.getAltLink(), collection.getResourceId(), changeFeedOptions); return changeFeedQueryImpl.executeAsync(); } @Override public Flux<FeedResponse<Document>> readAllDocuments( String collectionLink, PartitionKey partitionKey, CosmosQueryRequestOptions options) { if (StringUtils.isEmpty(collectionLink)) { throw new IllegalArgumentException("collectionLink"); } if (partitionKey == null) { throw new IllegalArgumentException("partitionKey"); } RxDocumentServiceRequest request = RxDocumentServiceRequest.create(this, OperationType.Query, ResourceType.Document, collectionLink, null ); Flux<Utils.ValueHolder<DocumentCollection>> collectionObs = collectionCache.resolveCollectionAsync(null, request).flux(); return collectionObs.flatMap(documentCollectionResourceResponse -> { DocumentCollection collection = documentCollectionResourceResponse.v; if (collection == null) { throw new IllegalStateException("Collection cannot be null"); } PartitionKeyDefinition pkDefinition = collection.getPartitionKey(); String pkSelector = createPkSelector(pkDefinition); SqlQuerySpec querySpec = createLogicalPartitionScanQuerySpec(partitionKey, pkSelector); String resourceLink = parentResourceLinkToQueryLink(collectionLink, ResourceType.Document); UUID activityId = Utils.randomUUID(); IDocumentQueryClient queryClient = documentQueryClientImpl(RxDocumentClientImpl.this, getOperationContextAndListenerTuple(options)); final CosmosQueryRequestOptions effectiveOptions = ModelBridgeInternal.createQueryRequestOptions(options); InvalidPartitionExceptionRetryPolicy invalidPartitionExceptionRetryPolicy = new InvalidPartitionExceptionRetryPolicy( this.collectionCache, null, resourceLink, ModelBridgeInternal.getPropertiesFromQueryRequestOptions(effectiveOptions)); return ObservableHelper.fluxInlineIfPossibleAsObs( () -> { Flux<Utils.ValueHolder<CollectionRoutingMap>> valueHolderMono = this.partitionKeyRangeCache .tryLookupAsync( BridgeInternal.getMetaDataDiagnosticContext(request.requestContext.cosmosDiagnostics), collection.getResourceId(), null, null).flux(); return valueHolderMono.flatMap(collectionRoutingMapValueHolder -> { CollectionRoutingMap routingMap = collectionRoutingMapValueHolder.v; if (routingMap == null) { throw new IllegalStateException("Failed to get routing map."); } String effectivePartitionKeyString = PartitionKeyInternalHelper .getEffectivePartitionKeyString( BridgeInternal.getPartitionKeyInternal(partitionKey), pkDefinition); PartitionKeyRange range = routingMap.getRangeByEffectivePartitionKey(effectivePartitionKeyString); return createQueryInternal( resourceLink, querySpec, ModelBridgeInternal.setPartitionKeyRangeIdInternal(effectiveOptions, range.getId()), Document.class, ResourceType.Document, queryClient, activityId); }); }, invalidPartitionExceptionRetryPolicy); }); } @Override public Map<String, PartitionedQueryExecutionInfo> getQueryPlanCache() { return queryPlanCache; } @Override public Flux<FeedResponse<PartitionKeyRange>> readPartitionKeyRanges(final String collectionLink, CosmosQueryRequestOptions options) { if (StringUtils.isEmpty(collectionLink)) { throw new IllegalArgumentException("collectionLink"); } return readFeed(options, ResourceType.PartitionKeyRange, PartitionKeyRange.class, Utils.joinPath(collectionLink, Paths.PARTITION_KEY_RANGES_PATH_SEGMENT)); } private RxDocumentServiceRequest getStoredProcedureRequest(String collectionLink, StoredProcedure storedProcedure, RequestOptions options, OperationType operationType) { if (StringUtils.isEmpty(collectionLink)) { throw new IllegalArgumentException("collectionLink"); } if (storedProcedure == null) { throw new IllegalArgumentException("storedProcedure"); } validateResource(storedProcedure); String path = Utils.joinPath(collectionLink, Paths.STORED_PROCEDURES_PATH_SEGMENT); Map<String, String> requestHeaders = this.getRequestHeaders(options, ResourceType.StoredProcedure, operationType); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(this, operationType, ResourceType.StoredProcedure, path, storedProcedure, requestHeaders, options); return request; } private RxDocumentServiceRequest getUserDefinedFunctionRequest(String collectionLink, UserDefinedFunction udf, RequestOptions options, OperationType operationType) { if (StringUtils.isEmpty(collectionLink)) { throw new IllegalArgumentException("collectionLink"); } if (udf == null) { throw new IllegalArgumentException("udf"); } validateResource(udf); String path = Utils.joinPath(collectionLink, Paths.USER_DEFINED_FUNCTIONS_PATH_SEGMENT); Map<String, String> requestHeaders = this.getRequestHeaders(options, ResourceType.UserDefinedFunction, operationType); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(this, operationType, ResourceType.UserDefinedFunction, path, udf, requestHeaders, options); return request; } @Override public Mono<ResourceResponse<StoredProcedure>> createStoredProcedure(String collectionLink, StoredProcedure storedProcedure, RequestOptions options) { DocumentClientRetryPolicy requestRetryPolicy = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> createStoredProcedureInternal(collectionLink, storedProcedure, options, requestRetryPolicy), requestRetryPolicy); } private Mono<ResourceResponse<StoredProcedure>> createStoredProcedureInternal(String collectionLink, StoredProcedure storedProcedure, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { logger.debug("Creating a StoredProcedure. collectionLink: [{}], storedProcedure id [{}]", collectionLink, storedProcedure.getId()); RxDocumentServiceRequest request = getStoredProcedureRequest(collectionLink, storedProcedure, options, OperationType.Create); if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } return this.create(request, retryPolicyInstance, getOperationContextAndListenerTuple(options)).map(response -> toResourceResponse(response, StoredProcedure.class)); } catch (Exception e) { logger.debug("Failure in creating a StoredProcedure due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<StoredProcedure>> upsertStoredProcedure(String collectionLink, StoredProcedure storedProcedure, RequestOptions options) { DocumentClientRetryPolicy requestRetryPolicy = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> upsertStoredProcedureInternal(collectionLink, storedProcedure, options, requestRetryPolicy), requestRetryPolicy); } private Mono<ResourceResponse<StoredProcedure>> upsertStoredProcedureInternal(String collectionLink, StoredProcedure storedProcedure, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { logger.debug("Upserting a StoredProcedure. collectionLink: [{}], storedProcedure id [{}]", collectionLink, storedProcedure.getId()); RxDocumentServiceRequest request = getStoredProcedureRequest(collectionLink, storedProcedure, options, OperationType.Upsert); if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } return this.upsert(request, retryPolicyInstance, getOperationContextAndListenerTuple(options)).map(response -> toResourceResponse(response, StoredProcedure.class)); } catch (Exception e) { logger.debug("Failure in upserting a StoredProcedure due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<StoredProcedure>> replaceStoredProcedure(StoredProcedure storedProcedure, RequestOptions options) { DocumentClientRetryPolicy requestRetryPolicy = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> replaceStoredProcedureInternal(storedProcedure, options, requestRetryPolicy), requestRetryPolicy); } private Mono<ResourceResponse<StoredProcedure>> replaceStoredProcedureInternal(StoredProcedure storedProcedure, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (storedProcedure == null) { throw new IllegalArgumentException("storedProcedure"); } logger.debug("Replacing a StoredProcedure. storedProcedure id [{}]", storedProcedure.getId()); RxDocumentClientImpl.validateResource(storedProcedure); String path = Utils.joinPath(storedProcedure.getSelfLink(), null); Map<String, String> requestHeaders = getRequestHeaders(options, ResourceType.StoredProcedure, OperationType.Replace); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(this, OperationType.Replace, ResourceType.StoredProcedure, path, storedProcedure, requestHeaders, options); if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } return this.replace(request, retryPolicyInstance).map(response -> toResourceResponse(response, StoredProcedure.class)); } catch (Exception e) { logger.debug("Failure in replacing a StoredProcedure due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<StoredProcedure>> deleteStoredProcedure(String storedProcedureLink, RequestOptions options) { DocumentClientRetryPolicy requestRetryPolicy = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> deleteStoredProcedureInternal(storedProcedureLink, options, requestRetryPolicy), requestRetryPolicy); } private Mono<ResourceResponse<StoredProcedure>> deleteStoredProcedureInternal(String storedProcedureLink, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (StringUtils.isEmpty(storedProcedureLink)) { throw new IllegalArgumentException("storedProcedureLink"); } logger.debug("Deleting a StoredProcedure. storedProcedureLink [{}]", storedProcedureLink); String path = Utils.joinPath(storedProcedureLink, null); Map<String, String> requestHeaders = this.getRequestHeaders(options, ResourceType.StoredProcedure, OperationType.Delete); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(this, OperationType.Delete, ResourceType.StoredProcedure, path, requestHeaders, options); if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } return this.delete(request, retryPolicyInstance, getOperationContextAndListenerTuple(options)).map(response -> toResourceResponse(response, StoredProcedure.class)); } catch (Exception e) { logger.debug("Failure in deleting a StoredProcedure due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<StoredProcedure>> readStoredProcedure(String storedProcedureLink, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> readStoredProcedureInternal(storedProcedureLink, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<StoredProcedure>> readStoredProcedureInternal(String storedProcedureLink, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (StringUtils.isEmpty(storedProcedureLink)) { throw new IllegalArgumentException("storedProcedureLink"); } logger.debug("Reading a StoredProcedure. storedProcedureLink [{}]", storedProcedureLink); String path = Utils.joinPath(storedProcedureLink, null); Map<String, String> requestHeaders = this.getRequestHeaders(options, ResourceType.StoredProcedure, OperationType.Read); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(this, OperationType.Read, ResourceType.StoredProcedure, path, requestHeaders, options); if (retryPolicyInstance != null){ retryPolicyInstance.onBeforeSendRequest(request); } return this.read(request, retryPolicyInstance).map(response -> toResourceResponse(response, StoredProcedure.class)); } catch (Exception e) { logger.debug("Failure in reading a StoredProcedure due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Flux<FeedResponse<StoredProcedure>> readStoredProcedures(String collectionLink, CosmosQueryRequestOptions options) { if (StringUtils.isEmpty(collectionLink)) { throw new IllegalArgumentException("collectionLink"); } return readFeed(options, ResourceType.StoredProcedure, StoredProcedure.class, Utils.joinPath(collectionLink, Paths.STORED_PROCEDURES_PATH_SEGMENT)); } @Override public Flux<FeedResponse<StoredProcedure>> queryStoredProcedures(String collectionLink, String query, CosmosQueryRequestOptions options) { return queryStoredProcedures(collectionLink, new SqlQuerySpec(query), options); } @Override public Flux<FeedResponse<StoredProcedure>> queryStoredProcedures(String collectionLink, SqlQuerySpec querySpec, CosmosQueryRequestOptions options) { return createQuery(collectionLink, querySpec, options, StoredProcedure.class, ResourceType.StoredProcedure); } @Override public Mono<StoredProcedureResponse> executeStoredProcedure(String storedProcedureLink, List<Object> procedureParams) { return this.executeStoredProcedure(storedProcedureLink, null, procedureParams); } @Override public Mono<StoredProcedureResponse> executeStoredProcedure(String storedProcedureLink, RequestOptions options, List<Object> procedureParams) { DocumentClientRetryPolicy documentClientRetryPolicy = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> executeStoredProcedureInternal(storedProcedureLink, options, procedureParams, documentClientRetryPolicy), documentClientRetryPolicy); } @Override public Mono<CosmosBatchResponse> executeBatchRequest(String collectionLink, ServerBatchRequest serverBatchRequest, RequestOptions options, boolean disableAutomaticIdGeneration) { DocumentClientRetryPolicy documentClientRetryPolicy = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> executeBatchRequestInternal(collectionLink, serverBatchRequest, options, documentClientRetryPolicy, disableAutomaticIdGeneration), documentClientRetryPolicy); } private Mono<StoredProcedureResponse> executeStoredProcedureInternal(String storedProcedureLink, RequestOptions options, List<Object> procedureParams, DocumentClientRetryPolicy retryPolicy) { try { logger.debug("Executing a StoredProcedure. storedProcedureLink [{}]", storedProcedureLink); String path = Utils.joinPath(storedProcedureLink, null); Map<String, String> requestHeaders = getRequestHeaders(options, ResourceType.StoredProcedure, OperationType.ExecuteJavaScript); requestHeaders.put(HttpConstants.HttpHeaders.ACCEPT, RuntimeConstants.MediaTypes.JSON); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(this, OperationType.ExecuteJavaScript, ResourceType.StoredProcedure, path, procedureParams != null && !procedureParams.isEmpty() ? RxDocumentClientImpl.serializeProcedureParams(procedureParams) : "", requestHeaders, options); if (retryPolicy != null) { retryPolicy.onBeforeSendRequest(request); } Mono<RxDocumentServiceRequest> reqObs = addPartitionKeyInformation(request, null, null, options); return reqObs.flatMap(req -> create(request, retryPolicy, getOperationContextAndListenerTuple(options)) .map(response -> { this.captureSessionToken(request, response); return toStoredProcedureResponse(response); })); } catch (Exception e) { logger.debug("Failure in executing a StoredProcedure due to [{}]", e.getMessage(), e); return Mono.error(e); } } private Mono<CosmosBatchResponse> executeBatchRequestInternal(String collectionLink, ServerBatchRequest serverBatchRequest, RequestOptions options, DocumentClientRetryPolicy requestRetryPolicy, boolean disableAutomaticIdGeneration) { try { logger.debug("Executing a Batch request with number of operations {}", serverBatchRequest.getOperations().size()); Mono<RxDocumentServiceRequest> requestObs = getBatchDocumentRequest(requestRetryPolicy, collectionLink, serverBatchRequest, options, disableAutomaticIdGeneration); Mono<RxDocumentServiceResponse> responseObservable = requestObs.flatMap(request -> create(request, requestRetryPolicy, getOperationContextAndListenerTuple(options))); return responseObservable .map(serviceResponse -> BatchResponseParser.fromDocumentServiceResponse(serviceResponse, serverBatchRequest, true)); } catch (Exception ex) { logger.debug("Failure in executing a batch due to [{}]", ex.getMessage(), ex); return Mono.error(ex); } } @Override public Mono<ResourceResponse<Trigger>> createTrigger(String collectionLink, Trigger trigger, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> createTriggerInternal(collectionLink, trigger, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<Trigger>> createTriggerInternal(String collectionLink, Trigger trigger, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { logger.debug("Creating a Trigger. collectionLink [{}], trigger id [{}]", collectionLink, trigger.getId()); RxDocumentServiceRequest request = getTriggerRequest(collectionLink, trigger, options, OperationType.Create); if (retryPolicyInstance != null){ retryPolicyInstance.onBeforeSendRequest(request); } return this.create(request, retryPolicyInstance, getOperationContextAndListenerTuple(options)).map(response -> toResourceResponse(response, Trigger.class)); } catch (Exception e) { logger.debug("Failure in creating a Trigger due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<Trigger>> upsertTrigger(String collectionLink, Trigger trigger, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> upsertTriggerInternal(collectionLink, trigger, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<Trigger>> upsertTriggerInternal(String collectionLink, Trigger trigger, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { logger.debug("Upserting a Trigger. collectionLink [{}], trigger id [{}]", collectionLink, trigger.getId()); RxDocumentServiceRequest request = getTriggerRequest(collectionLink, trigger, options, OperationType.Upsert); if (retryPolicyInstance != null){ retryPolicyInstance.onBeforeSendRequest(request); } return this.upsert(request, retryPolicyInstance, getOperationContextAndListenerTuple(options)).map(response -> toResourceResponse(response, Trigger.class)); } catch (Exception e) { logger.debug("Failure in upserting a Trigger due to [{}]", e.getMessage(), e); return Mono.error(e); } } private RxDocumentServiceRequest getTriggerRequest(String collectionLink, Trigger trigger, RequestOptions options, OperationType operationType) { if (StringUtils.isEmpty(collectionLink)) { throw new IllegalArgumentException("collectionLink"); } if (trigger == null) { throw new IllegalArgumentException("trigger"); } RxDocumentClientImpl.validateResource(trigger); String path = Utils.joinPath(collectionLink, Paths.TRIGGERS_PATH_SEGMENT); Map<String, String> requestHeaders = getRequestHeaders(options, ResourceType.Trigger, operationType); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(this, operationType, ResourceType.Trigger, path, trigger, requestHeaders, options); return request; } @Override public Mono<ResourceResponse<Trigger>> replaceTrigger(Trigger trigger, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> replaceTriggerInternal(trigger, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<Trigger>> replaceTriggerInternal(Trigger trigger, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (trigger == null) { throw new IllegalArgumentException("trigger"); } logger.debug("Replacing a Trigger. trigger id [{}]", trigger.getId()); RxDocumentClientImpl.validateResource(trigger); String path = Utils.joinPath(trigger.getSelfLink(), null); Map<String, String> requestHeaders = getRequestHeaders(options, ResourceType.Trigger, OperationType.Replace); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(this, OperationType.Replace, ResourceType.Trigger, path, trigger, requestHeaders, options); if (retryPolicyInstance != null){ retryPolicyInstance.onBeforeSendRequest(request); } return this.replace(request, retryPolicyInstance).map(response -> toResourceResponse(response, Trigger.class)); } catch (Exception e) { logger.debug("Failure in replacing a Trigger due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<Trigger>> deleteTrigger(String triggerLink, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> deleteTriggerInternal(triggerLink, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<Trigger>> deleteTriggerInternal(String triggerLink, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (StringUtils.isEmpty(triggerLink)) { throw new IllegalArgumentException("triggerLink"); } logger.debug("Deleting a Trigger. triggerLink [{}]", triggerLink); String path = Utils.joinPath(triggerLink, null); Map<String, String> requestHeaders = getRequestHeaders(options, ResourceType.Trigger, OperationType.Delete); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(this, OperationType.Delete, ResourceType.Trigger, path, requestHeaders, options); if (retryPolicyInstance != null){ retryPolicyInstance.onBeforeSendRequest(request); } return this.delete(request, retryPolicyInstance, getOperationContextAndListenerTuple(options)).map(response -> toResourceResponse(response, Trigger.class)); } catch (Exception e) { logger.debug("Failure in deleting a Trigger due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<Trigger>> readTrigger(String triggerLink, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> readTriggerInternal(triggerLink, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<Trigger>> readTriggerInternal(String triggerLink, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (StringUtils.isEmpty(triggerLink)) { throw new IllegalArgumentException("triggerLink"); } logger.debug("Reading a Trigger. triggerLink [{}]", triggerLink); String path = Utils.joinPath(triggerLink, null); Map<String, String> requestHeaders = getRequestHeaders(options, ResourceType.Trigger, OperationType.Read); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(this, OperationType.Read, ResourceType.Trigger, path, requestHeaders, options); if (retryPolicyInstance != null){ retryPolicyInstance.onBeforeSendRequest(request); } return this.read(request, retryPolicyInstance).map(response -> toResourceResponse(response, Trigger.class)); } catch (Exception e) { logger.debug("Failure in reading a Trigger due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Flux<FeedResponse<Trigger>> readTriggers(String collectionLink, CosmosQueryRequestOptions options) { if (StringUtils.isEmpty(collectionLink)) { throw new IllegalArgumentException("collectionLink"); } return readFeed(options, ResourceType.Trigger, Trigger.class, Utils.joinPath(collectionLink, Paths.TRIGGERS_PATH_SEGMENT)); } @Override public Flux<FeedResponse<Trigger>> queryTriggers(String collectionLink, String query, CosmosQueryRequestOptions options) { return queryTriggers(collectionLink, new SqlQuerySpec(query), options); } @Override public Flux<FeedResponse<Trigger>> queryTriggers(String collectionLink, SqlQuerySpec querySpec, CosmosQueryRequestOptions options) { return createQuery(collectionLink, querySpec, options, Trigger.class, ResourceType.Trigger); } @Override public Mono<ResourceResponse<UserDefinedFunction>> createUserDefinedFunction(String collectionLink, UserDefinedFunction udf, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> createUserDefinedFunctionInternal(collectionLink, udf, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<UserDefinedFunction>> createUserDefinedFunctionInternal(String collectionLink, UserDefinedFunction udf, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { logger.debug("Creating a UserDefinedFunction. collectionLink [{}], udf id [{}]", collectionLink, udf.getId()); RxDocumentServiceRequest request = getUserDefinedFunctionRequest(collectionLink, udf, options, OperationType.Create); if (retryPolicyInstance != null){ retryPolicyInstance.onBeforeSendRequest(request); } return this.create(request, retryPolicyInstance, getOperationContextAndListenerTuple(options)).map(response -> toResourceResponse(response, UserDefinedFunction.class)); } catch (Exception e) { logger.debug("Failure in creating a UserDefinedFunction due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<UserDefinedFunction>> upsertUserDefinedFunction(String collectionLink, UserDefinedFunction udf, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> upsertUserDefinedFunctionInternal(collectionLink, udf, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<UserDefinedFunction>> upsertUserDefinedFunctionInternal(String collectionLink, UserDefinedFunction udf, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { logger.debug("Upserting a UserDefinedFunction. collectionLink [{}], udf id [{}]", collectionLink, udf.getId()); RxDocumentServiceRequest request = getUserDefinedFunctionRequest(collectionLink, udf, options, OperationType.Upsert); if (retryPolicyInstance != null){ retryPolicyInstance.onBeforeSendRequest(request); } return this.upsert(request, retryPolicyInstance, getOperationContextAndListenerTuple(options)).map(response -> toResourceResponse(response, UserDefinedFunction.class)); } catch (Exception e) { logger.debug("Failure in upserting a UserDefinedFunction due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<UserDefinedFunction>> replaceUserDefinedFunction(UserDefinedFunction udf, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> replaceUserDefinedFunctionInternal(udf, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<UserDefinedFunction>> replaceUserDefinedFunctionInternal(UserDefinedFunction udf, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (udf == null) { throw new IllegalArgumentException("udf"); } logger.debug("Replacing a UserDefinedFunction. udf id [{}]", udf.getId()); validateResource(udf); String path = Utils.joinPath(udf.getSelfLink(), null); Map<String, String> requestHeaders = this.getRequestHeaders(options, ResourceType.UserDefinedFunction, OperationType.Replace); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(this, OperationType.Replace, ResourceType.UserDefinedFunction, path, udf, requestHeaders, options); if (retryPolicyInstance != null){ retryPolicyInstance.onBeforeSendRequest(request); } return this.replace(request, retryPolicyInstance).map(response -> toResourceResponse(response, UserDefinedFunction.class)); } catch (Exception e) { logger.debug("Failure in replacing a UserDefinedFunction due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<UserDefinedFunction>> deleteUserDefinedFunction(String udfLink, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> deleteUserDefinedFunctionInternal(udfLink, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<UserDefinedFunction>> deleteUserDefinedFunctionInternal(String udfLink, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (StringUtils.isEmpty(udfLink)) { throw new IllegalArgumentException("udfLink"); } logger.debug("Deleting a UserDefinedFunction. udfLink [{}]", udfLink); String path = Utils.joinPath(udfLink, null); Map<String, String> requestHeaders = this.getRequestHeaders(options, ResourceType.UserDefinedFunction, OperationType.Delete); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(this, OperationType.Delete, ResourceType.UserDefinedFunction, path, requestHeaders, options); if (retryPolicyInstance != null){ retryPolicyInstance.onBeforeSendRequest(request); } return this.delete(request, retryPolicyInstance, getOperationContextAndListenerTuple(options)).map(response -> toResourceResponse(response, UserDefinedFunction.class)); } catch (Exception e) { logger.debug("Failure in deleting a UserDefinedFunction due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<UserDefinedFunction>> readUserDefinedFunction(String udfLink, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> readUserDefinedFunctionInternal(udfLink, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<UserDefinedFunction>> readUserDefinedFunctionInternal(String udfLink, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (StringUtils.isEmpty(udfLink)) { throw new IllegalArgumentException("udfLink"); } logger.debug("Reading a UserDefinedFunction. udfLink [{}]", udfLink); String path = Utils.joinPath(udfLink, null); Map<String, String> requestHeaders = this.getRequestHeaders(options, ResourceType.UserDefinedFunction, OperationType.Read); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(this, OperationType.Read, ResourceType.UserDefinedFunction, path, requestHeaders, options); if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } return this.read(request, retryPolicyInstance).map(response -> toResourceResponse(response, UserDefinedFunction.class)); } catch (Exception e) { logger.debug("Failure in reading a UserDefinedFunction due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Flux<FeedResponse<UserDefinedFunction>> readUserDefinedFunctions(String collectionLink, CosmosQueryRequestOptions options) { if (StringUtils.isEmpty(collectionLink)) { throw new IllegalArgumentException("collectionLink"); } return readFeed(options, ResourceType.UserDefinedFunction, UserDefinedFunction.class, Utils.joinPath(collectionLink, Paths.USER_DEFINED_FUNCTIONS_PATH_SEGMENT)); } @Override public Flux<FeedResponse<UserDefinedFunction>> queryUserDefinedFunctions(String collectionLink, String query, CosmosQueryRequestOptions options) { return queryUserDefinedFunctions(collectionLink, new SqlQuerySpec(query), options); } @Override public Flux<FeedResponse<UserDefinedFunction>> queryUserDefinedFunctions(String collectionLink, SqlQuerySpec querySpec, CosmosQueryRequestOptions options) { return createQuery(collectionLink, querySpec, options, UserDefinedFunction.class, ResourceType.UserDefinedFunction); } @Override public Mono<ResourceResponse<Conflict>> readConflict(String conflictLink, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> readConflictInternal(conflictLink, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<Conflict>> readConflictInternal(String conflictLink, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (StringUtils.isEmpty(conflictLink)) { throw new IllegalArgumentException("conflictLink"); } logger.debug("Reading a Conflict. conflictLink [{}]", conflictLink); String path = Utils.joinPath(conflictLink, null); Map<String, String> requestHeaders = getRequestHeaders(options, ResourceType.Conflict, OperationType.Read); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(this, OperationType.Read, ResourceType.Conflict, path, requestHeaders, options); Mono<RxDocumentServiceRequest> reqObs = addPartitionKeyInformation(request, null, null, options); return reqObs.flatMap(req -> { if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } return this.read(request, retryPolicyInstance).map(response -> toResourceResponse(response, Conflict.class)); }); } catch (Exception e) { logger.debug("Failure in reading a Conflict due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Flux<FeedResponse<Conflict>> readConflicts(String collectionLink, CosmosQueryRequestOptions options) { if (StringUtils.isEmpty(collectionLink)) { throw new IllegalArgumentException("collectionLink"); } return readFeed(options, ResourceType.Conflict, Conflict.class, Utils.joinPath(collectionLink, Paths.CONFLICTS_PATH_SEGMENT)); } @Override public Flux<FeedResponse<Conflict>> queryConflicts(String collectionLink, String query, CosmosQueryRequestOptions options) { return queryConflicts(collectionLink, new SqlQuerySpec(query), options); } @Override public Flux<FeedResponse<Conflict>> queryConflicts(String collectionLink, SqlQuerySpec querySpec, CosmosQueryRequestOptions options) { return createQuery(collectionLink, querySpec, options, Conflict.class, ResourceType.Conflict); } @Override public Mono<ResourceResponse<Conflict>> deleteConflict(String conflictLink, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> deleteConflictInternal(conflictLink, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<Conflict>> deleteConflictInternal(String conflictLink, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (StringUtils.isEmpty(conflictLink)) { throw new IllegalArgumentException("conflictLink"); } logger.debug("Deleting a Conflict. conflictLink [{}]", conflictLink); String path = Utils.joinPath(conflictLink, null); Map<String, String> requestHeaders = getRequestHeaders(options, ResourceType.Conflict, OperationType.Delete); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(this, OperationType.Delete, ResourceType.Conflict, path, requestHeaders, options); Mono<RxDocumentServiceRequest> reqObs = addPartitionKeyInformation(request, null, null, options); return reqObs.flatMap(req -> { if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } return this.delete(request, retryPolicyInstance, getOperationContextAndListenerTuple(options)).map(response -> toResourceResponse(response, Conflict.class)); }); } catch (Exception e) { logger.debug("Failure in deleting a Conflict due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<User>> createUser(String databaseLink, User user, RequestOptions options) { DocumentClientRetryPolicy documentClientRetryPolicy = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> createUserInternal(databaseLink, user, options, documentClientRetryPolicy), documentClientRetryPolicy); } private Mono<ResourceResponse<User>> createUserInternal(String databaseLink, User user, RequestOptions options, DocumentClientRetryPolicy documentClientRetryPolicy) { try { logger.debug("Creating a User. databaseLink [{}], user id [{}]", databaseLink, user.getId()); RxDocumentServiceRequest request = getUserRequest(databaseLink, user, options, OperationType.Create); return this.create(request, documentClientRetryPolicy, getOperationContextAndListenerTuple(options)).map(response -> toResourceResponse(response, User.class)); } catch (Exception e) { logger.debug("Failure in creating a User due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<User>> upsertUser(String databaseLink, User user, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> upsertUserInternal(databaseLink, user, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<User>> upsertUserInternal(String databaseLink, User user, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { logger.debug("Upserting a User. databaseLink [{}], user id [{}]", databaseLink, user.getId()); RxDocumentServiceRequest request = getUserRequest(databaseLink, user, options, OperationType.Upsert); if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } return this.upsert(request, retryPolicyInstance, getOperationContextAndListenerTuple(options)).map(response -> toResourceResponse(response, User.class)); } catch (Exception e) { logger.debug("Failure in upserting a User due to [{}]", e.getMessage(), e); return Mono.error(e); } } private RxDocumentServiceRequest getUserRequest(String databaseLink, User user, RequestOptions options, OperationType operationType) { if (StringUtils.isEmpty(databaseLink)) { throw new IllegalArgumentException("databaseLink"); } if (user == null) { throw new IllegalArgumentException("user"); } RxDocumentClientImpl.validateResource(user); String path = Utils.joinPath(databaseLink, Paths.USERS_PATH_SEGMENT); Map<String, String> requestHeaders = getRequestHeaders(options, ResourceType.User, operationType); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(this, operationType, ResourceType.User, path, user, requestHeaders, options); return request; } @Override public Mono<ResourceResponse<User>> replaceUser(User user, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> replaceUserInternal(user, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<User>> replaceUserInternal(User user, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (user == null) { throw new IllegalArgumentException("user"); } logger.debug("Replacing a User. user id [{}]", user.getId()); RxDocumentClientImpl.validateResource(user); String path = Utils.joinPath(user.getSelfLink(), null); Map<String, String> requestHeaders = getRequestHeaders(options, ResourceType.User, OperationType.Replace); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(this, OperationType.Replace, ResourceType.User, path, user, requestHeaders, options); if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } return this.replace(request, retryPolicyInstance).map(response -> toResourceResponse(response, User.class)); } catch (Exception e) { logger.debug("Failure in replacing a User due to [{}]", e.getMessage(), e); return Mono.error(e); } } public Mono<ResourceResponse<User>> deleteUser(String userLink, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> deleteUserInternal(userLink, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<User>> deleteUserInternal(String userLink, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (StringUtils.isEmpty(userLink)) { throw new IllegalArgumentException("userLink"); } logger.debug("Deleting a User. userLink [{}]", userLink); String path = Utils.joinPath(userLink, null); Map<String, String> requestHeaders = getRequestHeaders(options, ResourceType.User, OperationType.Delete); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(this, OperationType.Delete, ResourceType.User, path, requestHeaders, options); if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } return this.delete(request, retryPolicyInstance, getOperationContextAndListenerTuple(options)).map(response -> toResourceResponse(response, User.class)); } catch (Exception e) { logger.debug("Failure in deleting a User due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<User>> readUser(String userLink, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> readUserInternal(userLink, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<User>> readUserInternal(String userLink, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (StringUtils.isEmpty(userLink)) { throw new IllegalArgumentException("userLink"); } logger.debug("Reading a User. userLink [{}]", userLink); String path = Utils.joinPath(userLink, null); Map<String, String> requestHeaders = getRequestHeaders(options, ResourceType.User, OperationType.Read); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(this, OperationType.Read, ResourceType.User, path, requestHeaders, options); if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } return this.read(request, retryPolicyInstance).map(response -> toResourceResponse(response, User.class)); } catch (Exception e) { logger.debug("Failure in reading a User due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Flux<FeedResponse<User>> readUsers(String databaseLink, CosmosQueryRequestOptions options) { if (StringUtils.isEmpty(databaseLink)) { throw new IllegalArgumentException("databaseLink"); } return readFeed(options, ResourceType.User, User.class, Utils.joinPath(databaseLink, Paths.USERS_PATH_SEGMENT)); } @Override public Flux<FeedResponse<User>> queryUsers(String databaseLink, String query, CosmosQueryRequestOptions options) { return queryUsers(databaseLink, new SqlQuerySpec(query), options); } @Override public Flux<FeedResponse<User>> queryUsers(String databaseLink, SqlQuerySpec querySpec, CosmosQueryRequestOptions options) { return createQuery(databaseLink, querySpec, options, User.class, ResourceType.User); } @Override public Mono<ResourceResponse<ClientEncryptionKey>> readClientEncryptionKey(String clientEncryptionKeyLink, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> readClientEncryptionKeyInternal(clientEncryptionKeyLink, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<ClientEncryptionKey>> readClientEncryptionKeyInternal(String clientEncryptionKeyLink, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (StringUtils.isEmpty(clientEncryptionKeyLink)) { throw new IllegalArgumentException("clientEncryptionKeyLink"); } logger.debug("Reading a client encryption key. clientEncryptionKeyLink [{}]", clientEncryptionKeyLink); String path = Utils.joinPath(clientEncryptionKeyLink, null); Map<String, String> requestHeaders = getRequestHeaders(options, ResourceType.ClientEncryptionKey, OperationType.Read); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(this, OperationType.Read, ResourceType.ClientEncryptionKey, path, requestHeaders, options); if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } return this.read(request, retryPolicyInstance).map(response -> toResourceResponse(response, ClientEncryptionKey.class)); } catch (Exception e) { logger.debug("Failure in reading a client encryption key due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<ClientEncryptionKey>> createClientEncryptionKey(String databaseLink, ClientEncryptionKey clientEncryptionKey, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> createClientEncryptionKeyInternal(databaseLink, clientEncryptionKey, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<ClientEncryptionKey>> createClientEncryptionKeyInternal(String databaseLink, ClientEncryptionKey clientEncryptionKey, RequestOptions options, DocumentClientRetryPolicy documentClientRetryPolicy) { try { logger.debug("Creating a client encryption key. databaseLink [{}], clientEncryptionKey id [{}]", databaseLink, clientEncryptionKey.getId()); RxDocumentServiceRequest request = getClientEncryptionKeyRequest(databaseLink, clientEncryptionKey, options, OperationType.Create); return this.create(request, documentClientRetryPolicy, getOperationContextAndListenerTuple(options)).map(response -> toResourceResponse(response, ClientEncryptionKey.class)); } catch (Exception e) { logger.debug("Failure in creating a client encryption key due to [{}]", e.getMessage(), e); return Mono.error(e); } } private RxDocumentServiceRequest getClientEncryptionKeyRequest(String databaseLink, ClientEncryptionKey clientEncryptionKey, RequestOptions options, OperationType operationType) { if (StringUtils.isEmpty(databaseLink)) { throw new IllegalArgumentException("databaseLink"); } if (clientEncryptionKey == null) { throw new IllegalArgumentException("clientEncryptionKey"); } RxDocumentClientImpl.validateResource(clientEncryptionKey); String path = Utils.joinPath(databaseLink, Paths.CLIENT_ENCRYPTION_KEY_PATH_SEGMENT); Map<String, String> requestHeaders = getRequestHeaders(options, ResourceType.ClientEncryptionKey, operationType); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(this, operationType, ResourceType.ClientEncryptionKey, path, clientEncryptionKey, requestHeaders, options); return request; } @Override public Mono<ResourceResponse<ClientEncryptionKey>> replaceClientEncryptionKey(ClientEncryptionKey clientEncryptionKey, String nameBasedLink, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> replaceClientEncryptionKeyInternal(clientEncryptionKey, nameBasedLink, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<ClientEncryptionKey>> replaceClientEncryptionKeyInternal(ClientEncryptionKey clientEncryptionKey, String nameBasedLink, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (clientEncryptionKey == null) { throw new IllegalArgumentException("clientEncryptionKey"); } logger.debug("Replacing a clientEncryptionKey. clientEncryptionKey id [{}]", clientEncryptionKey.getId()); RxDocumentClientImpl.validateResource(clientEncryptionKey); String path = Utils.joinPath(nameBasedLink, null); Map<String, String> requestHeaders = getRequestHeaders(options, ResourceType.ClientEncryptionKey, OperationType.Replace); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(this, OperationType.Replace, ResourceType.ClientEncryptionKey, path, clientEncryptionKey, requestHeaders, options); if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } return this.replace(request, retryPolicyInstance).map(response -> toResourceResponse(response, ClientEncryptionKey.class)); } catch (Exception e) { logger.debug("Failure in replacing a clientEncryptionKey due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Flux<FeedResponse<ClientEncryptionKey>> readClientEncryptionKeys(String databaseLink, CosmosQueryRequestOptions options) { if (StringUtils.isEmpty(databaseLink)) { throw new IllegalArgumentException("databaseLink"); } return readFeed(options, ResourceType.ClientEncryptionKey, ClientEncryptionKey.class, Utils.joinPath(databaseLink, Paths.CLIENT_ENCRYPTION_KEY_PATH_SEGMENT)); } @Override public Flux<FeedResponse<ClientEncryptionKey>> queryClientEncryptionKeys(String databaseLink, String query, CosmosQueryRequestOptions options) { return queryClientEncryptionKeys(databaseLink, new SqlQuerySpec(query), options); } @Override public Flux<FeedResponse<ClientEncryptionKey>> queryClientEncryptionKeys(String databaseLink, SqlQuerySpec querySpec, CosmosQueryRequestOptions options) { return createQuery(databaseLink, querySpec, options, ClientEncryptionKey.class, ResourceType.ClientEncryptionKey); } @Override public Mono<ResourceResponse<Permission>> createPermission(String userLink, Permission permission, RequestOptions options) { DocumentClientRetryPolicy documentClientRetryPolicy = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> createPermissionInternal(userLink, permission, options, documentClientRetryPolicy), this.resetSessionTokenRetryPolicy.getRequestPolicy()); } private Mono<ResourceResponse<Permission>> createPermissionInternal(String userLink, Permission permission, RequestOptions options, DocumentClientRetryPolicy documentClientRetryPolicy) { try { logger.debug("Creating a Permission. userLink [{}], permission id [{}]", userLink, permission.getId()); RxDocumentServiceRequest request = getPermissionRequest(userLink, permission, options, OperationType.Create); return this.create(request, documentClientRetryPolicy, getOperationContextAndListenerTuple(options)).map(response -> toResourceResponse(response, Permission.class)); } catch (Exception e) { logger.debug("Failure in creating a Permission due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<Permission>> upsertPermission(String userLink, Permission permission, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> upsertPermissionInternal(userLink, permission, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<Permission>> upsertPermissionInternal(String userLink, Permission permission, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { logger.debug("Upserting a Permission. userLink [{}], permission id [{}]", userLink, permission.getId()); RxDocumentServiceRequest request = getPermissionRequest(userLink, permission, options, OperationType.Upsert); if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } return this.upsert(request, retryPolicyInstance, getOperationContextAndListenerTuple(options)).map(response -> toResourceResponse(response, Permission.class)); } catch (Exception e) { logger.debug("Failure in upserting a Permission due to [{}]", e.getMessage(), e); return Mono.error(e); } } private RxDocumentServiceRequest getPermissionRequest(String userLink, Permission permission, RequestOptions options, OperationType operationType) { if (StringUtils.isEmpty(userLink)) { throw new IllegalArgumentException("userLink"); } if (permission == null) { throw new IllegalArgumentException("permission"); } RxDocumentClientImpl.validateResource(permission); String path = Utils.joinPath(userLink, Paths.PERMISSIONS_PATH_SEGMENT); Map<String, String> requestHeaders = getRequestHeaders(options, ResourceType.Permission, operationType); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(this, operationType, ResourceType.Permission, path, permission, requestHeaders, options); return request; } @Override public Mono<ResourceResponse<Permission>> replacePermission(Permission permission, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> replacePermissionInternal(permission, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<Permission>> replacePermissionInternal(Permission permission, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (permission == null) { throw new IllegalArgumentException("permission"); } logger.debug("Replacing a Permission. permission id [{}]", permission.getId()); RxDocumentClientImpl.validateResource(permission); String path = Utils.joinPath(permission.getSelfLink(), null); Map<String, String> requestHeaders = getRequestHeaders(options, ResourceType.Permission, OperationType.Replace); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(this, OperationType.Replace, ResourceType.Permission, path, permission, requestHeaders, options); if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } return this.replace(request, retryPolicyInstance).map(response -> toResourceResponse(response, Permission.class)); } catch (Exception e) { logger.debug("Failure in replacing a Permission due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<Permission>> deletePermission(String permissionLink, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> deletePermissionInternal(permissionLink, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<Permission>> deletePermissionInternal(String permissionLink, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (StringUtils.isEmpty(permissionLink)) { throw new IllegalArgumentException("permissionLink"); } logger.debug("Deleting a Permission. permissionLink [{}]", permissionLink); String path = Utils.joinPath(permissionLink, null); Map<String, String> requestHeaders = getRequestHeaders(options, ResourceType.Permission, OperationType.Delete); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(this, OperationType.Delete, ResourceType.Permission, path, requestHeaders, options); if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } return this.delete(request, retryPolicyInstance, getOperationContextAndListenerTuple(options)).map(response -> toResourceResponse(response, Permission.class)); } catch (Exception e) { logger.debug("Failure in deleting a Permission due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<Permission>> readPermission(String permissionLink, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> readPermissionInternal(permissionLink, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<Permission>> readPermissionInternal(String permissionLink, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance ) { try { if (StringUtils.isEmpty(permissionLink)) { throw new IllegalArgumentException("permissionLink"); } logger.debug("Reading a Permission. permissionLink [{}]", permissionLink); String path = Utils.joinPath(permissionLink, null); Map<String, String> requestHeaders = getRequestHeaders(options, ResourceType.Permission, OperationType.Read); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(this, OperationType.Read, ResourceType.Permission, path, requestHeaders, options); if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } return this.read(request, retryPolicyInstance).map(response -> toResourceResponse(response, Permission.class)); } catch (Exception e) { logger.debug("Failure in reading a Permission due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Flux<FeedResponse<Permission>> readPermissions(String userLink, CosmosQueryRequestOptions options) { if (StringUtils.isEmpty(userLink)) { throw new IllegalArgumentException("userLink"); } return readFeed(options, ResourceType.Permission, Permission.class, Utils.joinPath(userLink, Paths.PERMISSIONS_PATH_SEGMENT)); } @Override public Flux<FeedResponse<Permission>> queryPermissions(String userLink, String query, CosmosQueryRequestOptions options) { return queryPermissions(userLink, new SqlQuerySpec(query), options); } @Override public Flux<FeedResponse<Permission>> queryPermissions(String userLink, SqlQuerySpec querySpec, CosmosQueryRequestOptions options) { return createQuery(userLink, querySpec, options, Permission.class, ResourceType.Permission); } @Override public Mono<ResourceResponse<Offer>> replaceOffer(Offer offer) { DocumentClientRetryPolicy documentClientRetryPolicy = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> replaceOfferInternal(offer, documentClientRetryPolicy), documentClientRetryPolicy); } private Mono<ResourceResponse<Offer>> replaceOfferInternal(Offer offer, DocumentClientRetryPolicy documentClientRetryPolicy) { try { if (offer == null) { throw new IllegalArgumentException("offer"); } logger.debug("Replacing an Offer. offer id [{}]", offer.getId()); RxDocumentClientImpl.validateResource(offer); String path = Utils.joinPath(offer.getSelfLink(), null); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(this, OperationType.Replace, ResourceType.Offer, path, offer, null, null); return this.replace(request, documentClientRetryPolicy).map(response -> toResourceResponse(response, Offer.class)); } catch (Exception e) { logger.debug("Failure in replacing an Offer due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<Offer>> readOffer(String offerLink) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> readOfferInternal(offerLink, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<Offer>> readOfferInternal(String offerLink, DocumentClientRetryPolicy retryPolicyInstance) { try { if (StringUtils.isEmpty(offerLink)) { throw new IllegalArgumentException("offerLink"); } logger.debug("Reading an Offer. offerLink [{}]", offerLink); String path = Utils.joinPath(offerLink, null); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(this, OperationType.Read, ResourceType.Offer, path, (HashMap<String, String>)null, null); if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } return this.read(request, retryPolicyInstance).map(response -> toResourceResponse(response, Offer.class)); } catch (Exception e) { logger.debug("Failure in reading an Offer due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Flux<FeedResponse<Offer>> readOffers(CosmosQueryRequestOptions options) { return readFeed(options, ResourceType.Offer, Offer.class, Utils.joinPath(Paths.OFFERS_PATH_SEGMENT, null)); } private <T extends Resource> Flux<FeedResponse<T>> readFeed(CosmosQueryRequestOptions options, ResourceType resourceType, Class<T> klass, String resourceLink) { if (options == null) { options = new CosmosQueryRequestOptions(); } Integer maxItemCount = ModelBridgeInternal.getMaxItemCountFromQueryRequestOptions(options); int maxPageSize = maxItemCount != null ? maxItemCount : -1; final CosmosQueryRequestOptions finalCosmosQueryRequestOptions = options; DocumentClientRetryPolicy retryPolicy = this.resetSessionTokenRetryPolicy.getRequestPolicy(); BiFunction<String, Integer, RxDocumentServiceRequest> createRequestFunc = (continuationToken, pageSize) -> { Map<String, String> requestHeaders = new HashMap<>(); if (continuationToken != null) { requestHeaders.put(HttpConstants.HttpHeaders.CONTINUATION, continuationToken); } requestHeaders.put(HttpConstants.HttpHeaders.PAGE_SIZE, Integer.toString(pageSize)); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(this, OperationType.ReadFeed, resourceType, resourceLink, requestHeaders, finalCosmosQueryRequestOptions); retryPolicy.onBeforeSendRequest(request); return request; }; Function<RxDocumentServiceRequest, Mono<FeedResponse<T>>> executeFunc = request -> ObservableHelper .inlineIfPossibleAsObs(() -> readFeed(request).map(response -> toFeedResponsePage(response, klass)), retryPolicy); return Paginator.getPaginatedQueryResultAsObservable(options, createRequestFunc, executeFunc, klass, maxPageSize); } @Override public Flux<FeedResponse<Offer>> queryOffers(String query, CosmosQueryRequestOptions options) { return queryOffers(new SqlQuerySpec(query), options); } @Override public Flux<FeedResponse<Offer>> queryOffers(SqlQuerySpec querySpec, CosmosQueryRequestOptions options) { return createQuery(null, querySpec, options, Offer.class, ResourceType.Offer); } @Override public Mono<DatabaseAccount> getDatabaseAccount() { DocumentClientRetryPolicy documentClientRetryPolicy = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> getDatabaseAccountInternal(documentClientRetryPolicy), documentClientRetryPolicy); } @Override public DatabaseAccount getLatestDatabaseAccount() { return this.globalEndpointManager.getLatestDatabaseAccount(); } private Mono<DatabaseAccount> getDatabaseAccountInternal(DocumentClientRetryPolicy documentClientRetryPolicy) { try { logger.debug("Getting Database Account"); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(this, OperationType.Read, ResourceType.DatabaseAccount, "", (HashMap<String, String>) null, null); return this.read(request, documentClientRetryPolicy).map(ModelBridgeInternal::toDatabaseAccount); } catch (Exception e) { logger.debug("Failure in getting Database Account due to [{}]", e.getMessage(), e); return Mono.error(e); } } public Object getSession() { return this.sessionContainer; } public void setSession(Object sessionContainer) { this.sessionContainer = (SessionContainer) sessionContainer; } @Override public RxClientCollectionCache getCollectionCache() { return this.collectionCache; } @Override public RxPartitionKeyRangeCache getPartitionKeyRangeCache() { return partitionKeyRangeCache; } public Flux<DatabaseAccount> getDatabaseAccountFromEndpoint(URI endpoint) { return Flux.defer(() -> { RxDocumentServiceRequest request = RxDocumentServiceRequest.create(this, OperationType.Read, ResourceType.DatabaseAccount, "", null, (Object) null); return this.populateHeaders(request, RequestVerb.GET) .flatMap(requestPopulated -> { requestPopulated.setEndpointOverride(endpoint); return this.gatewayProxy.processMessage(requestPopulated).doOnError(e -> { String message = String.format("Failed to retrieve database account information. %s", e.getCause() != null ? e.getCause().toString() : e.toString()); logger.warn(message); }).map(rsp -> rsp.getResource(DatabaseAccount.class)) .doOnNext(databaseAccount -> this.useMultipleWriteLocations = this.connectionPolicy.isMultipleWriteRegionsEnabled() && BridgeInternal.isEnableMultipleWriteLocations(databaseAccount)); }); }); } /** * Certain requests must be routed through gateway even when the client connectivity mode is direct. * * @param request * @return RxStoreModel */ private RxStoreModel getStoreProxy(RxDocumentServiceRequest request) { if (request.UseGatewayMode) { return this.gatewayProxy; } ResourceType resourceType = request.getResourceType(); OperationType operationType = request.getOperationType(); if (resourceType == ResourceType.Offer || resourceType == ResourceType.ClientEncryptionKey || resourceType.isScript() && operationType != OperationType.ExecuteJavaScript || resourceType == ResourceType.PartitionKeyRange || resourceType == ResourceType.PartitionKey && operationType == OperationType.Delete) { return this.gatewayProxy; } if (operationType == OperationType.Create || operationType == OperationType.Upsert) { if (resourceType == ResourceType.Database || resourceType == ResourceType.User || resourceType == ResourceType.DocumentCollection || resourceType == ResourceType.Permission) { return this.gatewayProxy; } else { return this.storeModel; } } else if (operationType == OperationType.Delete) { if (resourceType == ResourceType.Database || resourceType == ResourceType.User || resourceType == ResourceType.DocumentCollection) { return this.gatewayProxy; } else { return this.storeModel; } } else if (operationType == OperationType.Replace) { if (resourceType == ResourceType.DocumentCollection) { return this.gatewayProxy; } else { return this.storeModel; } } else if (operationType == OperationType.Read) { if (resourceType == ResourceType.DocumentCollection) { return this.gatewayProxy; } else { return this.storeModel; } } else { if ((operationType == OperationType.Query || operationType == OperationType.SqlQuery || operationType == OperationType.ReadFeed) && Utils.isCollectionChild(request.getResourceType())) { if (request.getPartitionKeyRangeIdentity() == null && request.getHeaders().get(HttpConstants.HttpHeaders.PARTITION_KEY) == null) { return this.gatewayProxy; } } return this.storeModel; } } @Override public void close() { logger.info("Attempting to close client {}", this.clientId); if (!closed.getAndSet(true)) { activeClientsCnt.decrementAndGet(); logger.info("Shutting down ..."); logger.info("Closing Global Endpoint Manager ..."); LifeCycleUtils.closeQuietly(this.globalEndpointManager); logger.info("Closing StoreClientFactory ..."); LifeCycleUtils.closeQuietly(this.storeClientFactory); logger.info("Shutting down reactorHttpClient ..."); LifeCycleUtils.closeQuietly(this.reactorHttpClient); logger.info("Shutting down CpuMonitor ..."); CpuMemoryMonitor.unregister(this); if (this.throughputControlEnabled.get()) { logger.info("Closing ThroughputControlStore ..."); this.throughputControlStore.close(); } logger.info("Shutting down completed."); } else { logger.warn("Already shutdown!"); } } @Override public ItemDeserializer getItemDeserializer() { return this.itemDeserializer; } @Override public synchronized void enableThroughputControlGroup(ThroughputControlGroupInternal group) { checkNotNull(group, "Throughput control group can not be null"); if (this.throughputControlEnabled.compareAndSet(false, true)) { this.throughputControlStore = new ThroughputControlStore( this.collectionCache, this.connectionPolicy.getConnectionMode(), this.partitionKeyRangeCache); this.storeModel.enableThroughputControl(throughputControlStore); } this.throughputControlStore.enableThroughputControlGroup(group); } private static SqlQuerySpec createLogicalPartitionScanQuerySpec( PartitionKey partitionKey, String partitionKeySelector) { StringBuilder queryStringBuilder = new StringBuilder(); List<SqlParameter> parameters = new ArrayList<>(); queryStringBuilder.append("SELECT * FROM c WHERE"); Object pkValue = ModelBridgeInternal.getPartitionKeyObject(partitionKey); String pkParamName = "@pkValue"; parameters.add(new SqlParameter(pkParamName, pkValue)); queryStringBuilder.append(" c"); queryStringBuilder.append(partitionKeySelector); queryStringBuilder.append((" = ")); queryStringBuilder.append(pkParamName); return new SqlQuerySpec(queryStringBuilder.toString(), parameters); } @Override public Mono<List<FeedRange>> getFeedRanges(String collectionLink) { InvalidPartitionExceptionRetryPolicy invalidPartitionExceptionRetryPolicy = new InvalidPartitionExceptionRetryPolicy( this.collectionCache, null, collectionLink, new HashMap<>()); RxDocumentServiceRequest request = RxDocumentServiceRequest.create( this, OperationType.Query, ResourceType.Document, collectionLink, null); invalidPartitionExceptionRetryPolicy.onBeforeSendRequest(request); return ObservableHelper.inlineIfPossibleAsObs( () -> getFeedRangesInternal(request, collectionLink), invalidPartitionExceptionRetryPolicy); } private Mono<List<FeedRange>> getFeedRangesInternal(RxDocumentServiceRequest request, String collectionLink) { logger.debug("getFeedRange collectionLink=[{}]", collectionLink); if (StringUtils.isEmpty(collectionLink)) { throw new IllegalArgumentException("collectionLink"); } Mono<Utils.ValueHolder<DocumentCollection>> collectionObs = collectionCache.resolveCollectionAsync(null, request); return collectionObs.flatMap(documentCollectionResourceResponse -> { final DocumentCollection collection = documentCollectionResourceResponse.v; if (collection == null) { throw new IllegalStateException("Collection cannot be null"); } Mono<Utils.ValueHolder<List<PartitionKeyRange>>> valueHolderMono = partitionKeyRangeCache .tryGetOverlappingRangesAsync( BridgeInternal.getMetaDataDiagnosticContext(request.requestContext.cosmosDiagnostics), collection.getResourceId(), RANGE_INCLUDING_ALL_PARTITION_KEY_RANGES, true, null); return valueHolderMono.map(partitionKeyRangeList -> toFeedRanges(partitionKeyRangeList, request)); }); } private static List<FeedRange> toFeedRanges( Utils.ValueHolder<List<PartitionKeyRange>> partitionKeyRangeListValueHolder, RxDocumentServiceRequest request) { final List<PartitionKeyRange> partitionKeyRangeList = partitionKeyRangeListValueHolder.v; if (partitionKeyRangeList == null) { request.forceNameCacheRefresh = true; throw new InvalidPartitionException(); } List<FeedRange> feedRanges = new ArrayList<>(); partitionKeyRangeList.forEach(pkRange -> feedRanges.add(toFeedRange(pkRange))); return feedRanges; } private static FeedRange toFeedRange(PartitionKeyRange pkRange) { return new FeedRangeEpkImpl(pkRange.toRange()); } }
I see we have clientId which is getting calculated via clientIdGenerator , can we use that in here instead of UUID ? We already have unique process Id and VmID now , so this will align clientId in diagnostics and telemetry
public void init(CosmosClientMetadataCachesSnapshot metadataCachesSnapshot, Function<HttpClient, HttpClient> httpClientInterceptor) { try { this.httpClientInterceptor = httpClientInterceptor; if (httpClientInterceptor != null) { this.reactorHttpClient = httpClientInterceptor.apply(httpClient()); } this.gatewayProxy = createRxGatewayProxy(this.sessionContainer, this.consistencyLevel, this.queryCompatibilityMode, this.userAgentContainer, this.globalEndpointManager, this.reactorHttpClient, this.apiType); this.globalEndpointManager.init(); this.initializeGatewayConfigurationReader(); if (metadataCachesSnapshot != null) { this.collectionCache = new RxClientCollectionCache(this, this.sessionContainer, this.gatewayProxy, this, this.retryPolicy, metadataCachesSnapshot.getCollectionInfoByNameCache(), metadataCachesSnapshot.getCollectionInfoByIdCache() ); } else { this.collectionCache = new RxClientCollectionCache(this, this.sessionContainer, this.gatewayProxy, this, this.retryPolicy); } this.resetSessionTokenRetryPolicy = new ResetSessionTokenRetryPolicyFactory(this.sessionContainer, this.collectionCache, this.retryPolicy); this.partitionKeyRangeCache = new RxPartitionKeyRangeCache(RxDocumentClientImpl.this, collectionCache); updateGatewayProxy(); clientTelemetry = new ClientTelemetry(this, null, UUID.randomUUID().toString(), ManagementFactory.getRuntimeMXBean().getName(), userAgentContainer.getUserAgent(), connectionPolicy.getConnectionMode(), globalEndpointManager.getLatestDatabaseAccount().getId(), null, null, this.reactorHttpClient, connectionPolicy.isClientTelemetryEnabled(), this, this.connectionPolicy.getPreferredRegions()); clientTelemetry.init(); if (this.connectionPolicy.getConnectionMode() == ConnectionMode.GATEWAY) { this.storeModel = this.gatewayProxy; } else { this.initializeDirectConnectivity(); } this.retryPolicy.setRxCollectionCache(this.collectionCache); } catch (Exception e) { logger.error("unexpected failure in initializing client.", e); close(); throw e; } }
clientTelemetry = new ClientTelemetry(this, null, UUID.randomUUID().toString(),
public void init(CosmosClientMetadataCachesSnapshot metadataCachesSnapshot, Function<HttpClient, HttpClient> httpClientInterceptor) { try { this.httpClientInterceptor = httpClientInterceptor; if (httpClientInterceptor != null) { this.reactorHttpClient = httpClientInterceptor.apply(httpClient()); } this.gatewayProxy = createRxGatewayProxy(this.sessionContainer, this.consistencyLevel, this.queryCompatibilityMode, this.userAgentContainer, this.globalEndpointManager, this.reactorHttpClient, this.apiType); this.globalEndpointManager.init(); this.initializeGatewayConfigurationReader(); if (metadataCachesSnapshot != null) { this.collectionCache = new RxClientCollectionCache(this, this.sessionContainer, this.gatewayProxy, this, this.retryPolicy, metadataCachesSnapshot.getCollectionInfoByNameCache(), metadataCachesSnapshot.getCollectionInfoByIdCache() ); } else { this.collectionCache = new RxClientCollectionCache(this, this.sessionContainer, this.gatewayProxy, this, this.retryPolicy); } this.resetSessionTokenRetryPolicy = new ResetSessionTokenRetryPolicyFactory(this.sessionContainer, this.collectionCache, this.retryPolicy); this.partitionKeyRangeCache = new RxPartitionKeyRangeCache(RxDocumentClientImpl.this, collectionCache); updateGatewayProxy(); clientTelemetry = new ClientTelemetry(this, null, UUID.randomUUID().toString(), ManagementFactory.getRuntimeMXBean().getName(), userAgentContainer.getUserAgent(), connectionPolicy.getConnectionMode(), globalEndpointManager.getLatestDatabaseAccount().getId(), null, null, this.reactorHttpClient, connectionPolicy.isClientTelemetryEnabled(), this, this.connectionPolicy.getPreferredRegions()); clientTelemetry.init(); if (this.connectionPolicy.getConnectionMode() == ConnectionMode.GATEWAY) { this.storeModel = this.gatewayProxy; } else { this.initializeDirectConnectivity(); } this.retryPolicy.setRxCollectionCache(this.collectionCache); } catch (Exception e) { logger.error("unexpected failure in initializing client.", e); close(); throw e; } }
class RxDocumentClientImpl implements AsyncDocumentClient, IAuthorizationTokenProvider, CpuMemoryListener, DiagnosticsClientContext { private static final String tempMachineId = "uuid:" + UUID.randomUUID(); private static final AtomicInteger activeClientsCnt = new AtomicInteger(0); private static final AtomicInteger clientIdGenerator = new AtomicInteger(0); private static final Range<String> RANGE_INCLUDING_ALL_PARTITION_KEY_RANGES = new Range<>( PartitionKeyInternalHelper.MinimumInclusiveEffectivePartitionKey, PartitionKeyInternalHelper.MaximumExclusiveEffectivePartitionKey, true, false); private static final String DUMMY_SQL_QUERY = "this is dummy and only used in creating " + "ParallelDocumentQueryExecutioncontext, but not used"; private final static ObjectMapper mapper = Utils.getSimpleObjectMapper(); private final ItemDeserializer itemDeserializer = new ItemDeserializer.JsonDeserializer(); private final Logger logger = LoggerFactory.getLogger(RxDocumentClientImpl.class); private final String masterKeyOrResourceToken; private final URI serviceEndpoint; private final ConnectionPolicy connectionPolicy; private final ConsistencyLevel consistencyLevel; private final BaseAuthorizationTokenProvider authorizationTokenProvider; private final UserAgentContainer userAgentContainer; private final boolean hasAuthKeyResourceToken; private final Configs configs; private final boolean connectionSharingAcrossClientsEnabled; private AzureKeyCredential credential; private final TokenCredential tokenCredential; private String[] tokenCredentialScopes; private SimpleTokenCache tokenCredentialCache; private CosmosAuthorizationTokenResolver cosmosAuthorizationTokenResolver; AuthorizationTokenType authorizationTokenType; private SessionContainer sessionContainer; private String firstResourceTokenFromPermissionFeed = StringUtils.EMPTY; private RxClientCollectionCache collectionCache; private RxStoreModel gatewayProxy; private RxStoreModel storeModel; private GlobalAddressResolver addressResolver; private RxPartitionKeyRangeCache partitionKeyRangeCache; private Map<String, List<PartitionKeyAndResourceTokenPair>> resourceTokensMap; private final boolean contentResponseOnWriteEnabled; private Map<String, PartitionedQueryExecutionInfo> queryPlanCache; private final AtomicBoolean closed = new AtomicBoolean(false); private final int clientId; private ClientTelemetry clientTelemetry; private ApiType apiType; private IRetryPolicyFactory resetSessionTokenRetryPolicy; /** * Compatibility mode: Allows to specify compatibility mode used by client when * making query requests. Should be removed when application/sql is no longer * supported. */ private final QueryCompatibilityMode queryCompatibilityMode = QueryCompatibilityMode.Default; private final GlobalEndpointManager globalEndpointManager; private final RetryPolicy retryPolicy; private HttpClient reactorHttpClient; private Function<HttpClient, HttpClient> httpClientInterceptor; private volatile boolean useMultipleWriteLocations; private StoreClientFactory storeClientFactory; private GatewayServiceConfigurationReader gatewayConfigurationReader; private final DiagnosticsClientConfig diagnosticsClientConfig; private final AtomicBoolean throughputControlEnabled; private ThroughputControlStore throughputControlStore; public RxDocumentClientImpl(URI serviceEndpoint, String masterKeyOrResourceToken, List<Permission> permissionFeed, ConnectionPolicy connectionPolicy, ConsistencyLevel consistencyLevel, Configs configs, CosmosAuthorizationTokenResolver cosmosAuthorizationTokenResolver, AzureKeyCredential credential, boolean sessionCapturingOverride, boolean connectionSharingAcrossClientsEnabled, boolean contentResponseOnWriteEnabled, CosmosClientMetadataCachesSnapshot metadataCachesSnapshot, ApiType apiType) { this(serviceEndpoint, masterKeyOrResourceToken, permissionFeed, connectionPolicy, consistencyLevel, configs, credential, null, sessionCapturingOverride, connectionSharingAcrossClientsEnabled, contentResponseOnWriteEnabled, metadataCachesSnapshot, apiType); this.cosmosAuthorizationTokenResolver = cosmosAuthorizationTokenResolver; } public RxDocumentClientImpl(URI serviceEndpoint, String masterKeyOrResourceToken, List<Permission> permissionFeed, ConnectionPolicy connectionPolicy, ConsistencyLevel consistencyLevel, Configs configs, CosmosAuthorizationTokenResolver cosmosAuthorizationTokenResolver, AzureKeyCredential credential, TokenCredential tokenCredential, boolean sessionCapturingOverride, boolean connectionSharingAcrossClientsEnabled, boolean contentResponseOnWriteEnabled, CosmosClientMetadataCachesSnapshot metadataCachesSnapshot, ApiType apiType) { this(serviceEndpoint, masterKeyOrResourceToken, permissionFeed, connectionPolicy, consistencyLevel, configs, credential, tokenCredential, sessionCapturingOverride, connectionSharingAcrossClientsEnabled, contentResponseOnWriteEnabled, metadataCachesSnapshot, apiType); this.cosmosAuthorizationTokenResolver = cosmosAuthorizationTokenResolver; } private RxDocumentClientImpl(URI serviceEndpoint, String masterKeyOrResourceToken, List<Permission> permissionFeed, ConnectionPolicy connectionPolicy, ConsistencyLevel consistencyLevel, Configs configs, AzureKeyCredential credential, TokenCredential tokenCredential, boolean sessionCapturingOverrideEnabled, boolean connectionSharingAcrossClientsEnabled, boolean contentResponseOnWriteEnabled, CosmosClientMetadataCachesSnapshot metadataCachesSnapshot, ApiType apiType) { this(serviceEndpoint, masterKeyOrResourceToken, connectionPolicy, consistencyLevel, configs, credential, tokenCredential, sessionCapturingOverrideEnabled, connectionSharingAcrossClientsEnabled, contentResponseOnWriteEnabled, metadataCachesSnapshot, apiType); if (permissionFeed != null && permissionFeed.size() > 0) { this.resourceTokensMap = new HashMap<>(); for (Permission permission : permissionFeed) { String[] segments = StringUtils.split(permission.getResourceLink(), Constants.Properties.PATH_SEPARATOR.charAt(0)); if (segments.length <= 0) { throw new IllegalArgumentException("resourceLink"); } List<PartitionKeyAndResourceTokenPair> partitionKeyAndResourceTokenPairs = null; PathInfo pathInfo = new PathInfo(false, StringUtils.EMPTY, StringUtils.EMPTY, false); if (!PathsHelper.tryParsePathSegments(permission.getResourceLink(), pathInfo, null)) { throw new IllegalArgumentException(permission.getResourceLink()); } partitionKeyAndResourceTokenPairs = resourceTokensMap.get(pathInfo.resourceIdOrFullName); if (partitionKeyAndResourceTokenPairs == null) { partitionKeyAndResourceTokenPairs = new ArrayList<>(); this.resourceTokensMap.put(pathInfo.resourceIdOrFullName, partitionKeyAndResourceTokenPairs); } PartitionKey partitionKey = permission.getResourcePartitionKey(); partitionKeyAndResourceTokenPairs.add(new PartitionKeyAndResourceTokenPair( partitionKey != null ? BridgeInternal.getPartitionKeyInternal(partitionKey) : PartitionKeyInternal.Empty, permission.getToken())); logger.debug("Initializing resource token map , with map key [{}] , partition key [{}] and resource token [{}]", pathInfo.resourceIdOrFullName, partitionKey != null ? partitionKey.toString() : null, permission.getToken()); } if(this.resourceTokensMap.isEmpty()) { throw new IllegalArgumentException("permissionFeed"); } String firstToken = permissionFeed.get(0).getToken(); if(ResourceTokenAuthorizationHelper.isResourceToken(firstToken)) { this.firstResourceTokenFromPermissionFeed = firstToken; } } } RxDocumentClientImpl(URI serviceEndpoint, String masterKeyOrResourceToken, ConnectionPolicy connectionPolicy, ConsistencyLevel consistencyLevel, Configs configs, AzureKeyCredential credential, TokenCredential tokenCredential, boolean sessionCapturingOverrideEnabled, boolean connectionSharingAcrossClientsEnabled, boolean contentResponseOnWriteEnabled, CosmosClientMetadataCachesSnapshot metadataCachesSnapshot, ApiType apiType) { activeClientsCnt.incrementAndGet(); this.clientId = clientIdGenerator.incrementAndGet(); this.diagnosticsClientConfig = new DiagnosticsClientConfig(); this.diagnosticsClientConfig.withClientId(this.clientId); this.diagnosticsClientConfig.withActiveClientCounter(activeClientsCnt); this.diagnosticsClientConfig.withConnectionSharingAcrossClientsEnabled(connectionSharingAcrossClientsEnabled); this.diagnosticsClientConfig.withConsistency(consistencyLevel); this.throughputControlEnabled = new AtomicBoolean(false); logger.info( "Initializing DocumentClient [{}] with" + " serviceEndpoint [{}], connectionPolicy [{}], consistencyLevel [{}], directModeProtocol [{}]", this.clientId, serviceEndpoint, connectionPolicy, consistencyLevel, configs.getProtocol()); try { this.connectionSharingAcrossClientsEnabled = connectionSharingAcrossClientsEnabled; this.configs = configs; this.masterKeyOrResourceToken = masterKeyOrResourceToken; this.serviceEndpoint = serviceEndpoint; this.credential = credential; this.tokenCredential = tokenCredential; this.contentResponseOnWriteEnabled = contentResponseOnWriteEnabled; this.authorizationTokenType = AuthorizationTokenType.Invalid; if (this.credential != null) { hasAuthKeyResourceToken = false; this.authorizationTokenType = AuthorizationTokenType.PrimaryMasterKey; this.authorizationTokenProvider = new BaseAuthorizationTokenProvider(this.credential); } else if (masterKeyOrResourceToken != null && ResourceTokenAuthorizationHelper.isResourceToken(masterKeyOrResourceToken)) { this.authorizationTokenProvider = null; hasAuthKeyResourceToken = true; this.authorizationTokenType = AuthorizationTokenType.ResourceToken; } else if(masterKeyOrResourceToken != null && !ResourceTokenAuthorizationHelper.isResourceToken(masterKeyOrResourceToken)) { this.credential = new AzureKeyCredential(this.masterKeyOrResourceToken); hasAuthKeyResourceToken = false; this.authorizationTokenType = AuthorizationTokenType.PrimaryMasterKey; this.authorizationTokenProvider = new BaseAuthorizationTokenProvider(this.credential); } else { hasAuthKeyResourceToken = false; this.authorizationTokenProvider = null; if (tokenCredential != null) { this.tokenCredentialScopes = new String[] { serviceEndpoint.getScheme() + ": }; this.tokenCredentialCache = new SimpleTokenCache(() -> this.tokenCredential .getToken(new TokenRequestContext().addScopes(this.tokenCredentialScopes))); this.authorizationTokenType = AuthorizationTokenType.AadToken; } } if (connectionPolicy != null) { this.connectionPolicy = connectionPolicy; } else { this.connectionPolicy = new ConnectionPolicy(DirectConnectionConfig.getDefaultConfig()); } this.diagnosticsClientConfig.withConnectionMode(this.getConnectionPolicy().getConnectionMode()); this.diagnosticsClientConfig.withMultipleWriteRegionsEnabled(this.connectionPolicy.isMultipleWriteRegionsEnabled()); this.diagnosticsClientConfig.withEndpointDiscoveryEnabled(this.connectionPolicy.isEndpointDiscoveryEnabled()); this.diagnosticsClientConfig.withPreferredRegions(this.connectionPolicy.getPreferredRegions()); this.diagnosticsClientConfig.withMachineId(tempMachineId); boolean disableSessionCapturing = (ConsistencyLevel.SESSION != consistencyLevel && !sessionCapturingOverrideEnabled); this.sessionContainer = new SessionContainer(this.serviceEndpoint.getHost(), disableSessionCapturing); this.consistencyLevel = consistencyLevel; this.userAgentContainer = new UserAgentContainer(); String userAgentSuffix = this.connectionPolicy.getUserAgentSuffix(); if (userAgentSuffix != null && userAgentSuffix.length() > 0) { userAgentContainer.setSuffix(userAgentSuffix); } this.httpClientInterceptor = null; this.reactorHttpClient = httpClient(); this.globalEndpointManager = new GlobalEndpointManager(asDatabaseAccountManagerInternal(), this.connectionPolicy, /**/configs); this.retryPolicy = new RetryPolicy(this, this.globalEndpointManager, this.connectionPolicy); this.resetSessionTokenRetryPolicy = retryPolicy; CpuMemoryMonitor.register(this); this.queryPlanCache = Collections.synchronizedMap(new SizeLimitingLRUCache(Constants.QUERYPLAN_CACHE_SIZE)); this.apiType = apiType; } catch (RuntimeException e) { logger.error("unexpected failure in initializing client.", e); close(); throw e; } } @Override public DiagnosticsClientConfig getConfig() { return diagnosticsClientConfig; } @Override public CosmosDiagnostics createDiagnostics() { return BridgeInternal.createCosmosDiagnostics(this, this.globalEndpointManager); } private void initializeGatewayConfigurationReader() { this.gatewayConfigurationReader = new GatewayServiceConfigurationReader(this.globalEndpointManager); DatabaseAccount databaseAccount = this.globalEndpointManager.getLatestDatabaseAccount(); if (databaseAccount == null) { logger.error("Client initialization failed." + " Check if the endpoint is reachable and if your auth token is valid. More info: https: throw new RuntimeException("Client initialization failed." + " Check if the endpoint is reachable and if your auth token is valid. More info: https: } this.useMultipleWriteLocations = this.connectionPolicy.isMultipleWriteRegionsEnabled() && BridgeInternal.isEnableMultipleWriteLocations(databaseAccount); } private void updateGatewayProxy() { ((RxGatewayStoreModel)this.gatewayProxy).setGatewayServiceConfigurationReader(this.gatewayConfigurationReader); ((RxGatewayStoreModel)this.gatewayProxy).setCollectionCache(this.collectionCache); ((RxGatewayStoreModel)this.gatewayProxy).setPartitionKeyRangeCache(this.partitionKeyRangeCache); ((RxGatewayStoreModel)this.gatewayProxy).setUseMultipleWriteLocations(this.useMultipleWriteLocations); } public void serialize(CosmosClientMetadataCachesSnapshot state) { RxCollectionCache.serialize(state, this.collectionCache); } private void initializeDirectConnectivity() { this.addressResolver = new GlobalAddressResolver(this, this.reactorHttpClient, this.globalEndpointManager, this.configs.getProtocol(), this, this.collectionCache, this.partitionKeyRangeCache, userAgentContainer, null, this.connectionPolicy, this.apiType); this.storeClientFactory = new StoreClientFactory( this.addressResolver, this.diagnosticsClientConfig, this.configs, this.connectionPolicy, this.userAgentContainer, this.connectionSharingAcrossClientsEnabled, this.clientTelemetry ); this.createStoreModel(true); } DatabaseAccountManagerInternal asDatabaseAccountManagerInternal() { return new DatabaseAccountManagerInternal() { @Override public URI getServiceEndpoint() { return RxDocumentClientImpl.this.getServiceEndpoint(); } @Override public Flux<DatabaseAccount> getDatabaseAccountFromEndpoint(URI endpoint) { logger.info("Getting database account endpoint from {}", endpoint); return RxDocumentClientImpl.this.getDatabaseAccountFromEndpoint(endpoint); } @Override public ConnectionPolicy getConnectionPolicy() { return RxDocumentClientImpl.this.getConnectionPolicy(); } }; } RxGatewayStoreModel createRxGatewayProxy(ISessionContainer sessionContainer, ConsistencyLevel consistencyLevel, QueryCompatibilityMode queryCompatibilityMode, UserAgentContainer userAgentContainer, GlobalEndpointManager globalEndpointManager, HttpClient httpClient, ApiType apiType) { return new RxGatewayStoreModel( this, sessionContainer, consistencyLevel, queryCompatibilityMode, userAgentContainer, globalEndpointManager, httpClient, apiType); } private HttpClient httpClient() { HttpClientConfig httpClientConfig = new HttpClientConfig(this.configs) .withMaxIdleConnectionTimeout(this.connectionPolicy.getIdleHttpConnectionTimeout()) .withPoolSize(this.connectionPolicy.getMaxConnectionPoolSize()) .withProxy(this.connectionPolicy.getProxy()) .withNetworkRequestTimeout(this.connectionPolicy.getHttpNetworkRequestTimeout()); if (connectionSharingAcrossClientsEnabled) { return SharedGatewayHttpClient.getOrCreateInstance(httpClientConfig, diagnosticsClientConfig); } else { diagnosticsClientConfig.withGatewayHttpClientConfig(httpClientConfig); return HttpClient.createFixed(httpClientConfig); } } private void createStoreModel(boolean subscribeRntbdStatus) { StoreClient storeClient = this.storeClientFactory.createStoreClient(this, this.addressResolver, this.sessionContainer, this.gatewayConfigurationReader, this, this.useMultipleWriteLocations ); this.storeModel = new ServerStoreModel(storeClient); } @Override public URI getServiceEndpoint() { return this.serviceEndpoint; } @Override public URI getWriteEndpoint() { return globalEndpointManager.getWriteEndpoints().stream().findFirst().orElse(null); } @Override public URI getReadEndpoint() { return globalEndpointManager.getReadEndpoints().stream().findFirst().orElse(null); } @Override public ConnectionPolicy getConnectionPolicy() { return this.connectionPolicy; } @Override public boolean isContentResponseOnWriteEnabled() { return contentResponseOnWriteEnabled; } @Override public ConsistencyLevel getConsistencyLevel() { return consistencyLevel; } @Override public ClientTelemetry getClientTelemetry() { return this.clientTelemetry; } @Override public Mono<ResourceResponse<Database>> createDatabase(Database database, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> createDatabaseInternal(database, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<Database>> createDatabaseInternal(Database database, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (database == null) { throw new IllegalArgumentException("Database"); } logger.debug("Creating a Database. id: [{}]", database.getId()); validateResource(database); Map<String, String> requestHeaders = this.getRequestHeaders(options, ResourceType.Database, OperationType.Create); Instant serializationStartTimeUTC = Instant.now(); ByteBuffer byteBuffer = ModelBridgeInternal.serializeJsonToByteBuffer(database); Instant serializationEndTimeUTC = Instant.now(); SerializationDiagnosticsContext.SerializationDiagnostics serializationDiagnostics = new SerializationDiagnosticsContext.SerializationDiagnostics( serializationStartTimeUTC, serializationEndTimeUTC, SerializationDiagnosticsContext.SerializationType.DATABASE_SERIALIZATION); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(this, OperationType.Create, ResourceType.Database, Paths.DATABASES_ROOT, byteBuffer, requestHeaders, options); if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } SerializationDiagnosticsContext serializationDiagnosticsContext = BridgeInternal.getSerializationDiagnosticsContext(request.requestContext.cosmosDiagnostics); if (serializationDiagnosticsContext != null) { serializationDiagnosticsContext.addSerializationDiagnostics(serializationDiagnostics); } return this.create(request, retryPolicyInstance, getOperationContextAndListenerTuple(options)) .map(response -> toResourceResponse(response, Database.class)); } catch (Exception e) { logger.debug("Failure in creating a database. due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<Database>> deleteDatabase(String databaseLink, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> deleteDatabaseInternal(databaseLink, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<Database>> deleteDatabaseInternal(String databaseLink, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (StringUtils.isEmpty(databaseLink)) { throw new IllegalArgumentException("databaseLink"); } logger.debug("Deleting a Database. databaseLink: [{}]", databaseLink); String path = Utils.joinPath(databaseLink, null); Map<String, String> requestHeaders = this.getRequestHeaders(options, ResourceType.Database, OperationType.Delete); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(this, OperationType.Delete, ResourceType.Database, path, requestHeaders, options); if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } return this.delete(request, retryPolicyInstance, getOperationContextAndListenerTuple(options)).map(response -> toResourceResponse(response, Database.class)); } catch (Exception e) { logger.debug("Failure in deleting a database. due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<Database>> readDatabase(String databaseLink, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> readDatabaseInternal(databaseLink, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<Database>> readDatabaseInternal(String databaseLink, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (StringUtils.isEmpty(databaseLink)) { throw new IllegalArgumentException("databaseLink"); } logger.debug("Reading a Database. databaseLink: [{}]", databaseLink); String path = Utils.joinPath(databaseLink, null); Map<String, String> requestHeaders = this.getRequestHeaders(options, ResourceType.Database, OperationType.Read); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(this, OperationType.Read, ResourceType.Database, path, requestHeaders, options); if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } return this.read(request, retryPolicyInstance).map(response -> toResourceResponse(response, Database.class)); } catch (Exception e) { logger.debug("Failure in reading a database. due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Flux<FeedResponse<Database>> readDatabases(CosmosQueryRequestOptions options) { return readFeed(options, ResourceType.Database, Database.class, Paths.DATABASES_ROOT); } private String parentResourceLinkToQueryLink(String parentResourceLink, ResourceType resourceTypeEnum) { switch (resourceTypeEnum) { case Database: return Paths.DATABASES_ROOT; case DocumentCollection: return Utils.joinPath(parentResourceLink, Paths.COLLECTIONS_PATH_SEGMENT); case Document: return Utils.joinPath(parentResourceLink, Paths.DOCUMENTS_PATH_SEGMENT); case Offer: return Paths.OFFERS_ROOT; case User: return Utils.joinPath(parentResourceLink, Paths.USERS_PATH_SEGMENT); case ClientEncryptionKey: return Utils.joinPath(parentResourceLink, Paths.CLIENT_ENCRYPTION_KEY_PATH_SEGMENT); case Permission: return Utils.joinPath(parentResourceLink, Paths.PERMISSIONS_PATH_SEGMENT); case Attachment: return Utils.joinPath(parentResourceLink, Paths.ATTACHMENTS_PATH_SEGMENT); case StoredProcedure: return Utils.joinPath(parentResourceLink, Paths.STORED_PROCEDURES_PATH_SEGMENT); case Trigger: return Utils.joinPath(parentResourceLink, Paths.TRIGGERS_PATH_SEGMENT); case UserDefinedFunction: return Utils.joinPath(parentResourceLink, Paths.USER_DEFINED_FUNCTIONS_PATH_SEGMENT); case Conflict: return Utils.joinPath(parentResourceLink, Paths.CONFLICTS_PATH_SEGMENT); default: throw new IllegalArgumentException("resource type not supported"); } } private OperationContextAndListenerTuple getOperationContextAndListenerTuple(CosmosQueryRequestOptions options) { if (options == null) { return null; } return ImplementationBridgeHelpers.CosmosQueryRequestOptionsHelper.getCosmosQueryRequestOptionsAccessor().getOperationContext(options); } private OperationContextAndListenerTuple getOperationContextAndListenerTuple(RequestOptions options) { if (options == null) { return null; } return options.getOperationContextAndListenerTuple(); } private <T extends Resource> Flux<FeedResponse<T>> createQuery( String parentResourceLink, SqlQuerySpec sqlQuery, CosmosQueryRequestOptions options, Class<T> klass, ResourceType resourceTypeEnum) { String resourceLink = parentResourceLinkToQueryLink(parentResourceLink, resourceTypeEnum); UUID correlationActivityIdOfRequestOptions = ImplementationBridgeHelpers .CosmosQueryRequestOptionsHelper .getCosmosQueryRequestOptionsAccessor() .getCorrelationActivityId(options); UUID correlationActivityId = correlationActivityIdOfRequestOptions != null ? correlationActivityIdOfRequestOptions : Utils.randomUUID(); IDocumentQueryClient queryClient = documentQueryClientImpl(RxDocumentClientImpl.this, getOperationContextAndListenerTuple(options)); InvalidPartitionExceptionRetryPolicy invalidPartitionExceptionRetryPolicy = new InvalidPartitionExceptionRetryPolicy( this.collectionCache, null, resourceLink, ModelBridgeInternal.getPropertiesFromQueryRequestOptions(options)); return ObservableHelper.fluxInlineIfPossibleAsObs( () -> createQueryInternal( resourceLink, sqlQuery, options, klass, resourceTypeEnum, queryClient, correlationActivityId), invalidPartitionExceptionRetryPolicy); } private <T extends Resource> Flux<FeedResponse<T>> createQueryInternal( String resourceLink, SqlQuerySpec sqlQuery, CosmosQueryRequestOptions options, Class<T> klass, ResourceType resourceTypeEnum, IDocumentQueryClient queryClient, UUID activityId) { Flux<? extends IDocumentQueryExecutionContext<T>> executionContext = DocumentQueryExecutionContextFactory .createDocumentQueryExecutionContextAsync(this, queryClient, resourceTypeEnum, klass, sqlQuery, options, resourceLink, false, activityId, Configs.isQueryPlanCachingEnabled(), queryPlanCache); AtomicBoolean isFirstResponse = new AtomicBoolean(true); return executionContext.flatMap(iDocumentQueryExecutionContext -> { QueryInfo queryInfo = null; if (iDocumentQueryExecutionContext instanceof PipelinedDocumentQueryExecutionContext) { queryInfo = ((PipelinedDocumentQueryExecutionContext<T>) iDocumentQueryExecutionContext).getQueryInfo(); } QueryInfo finalQueryInfo = queryInfo; return iDocumentQueryExecutionContext.executeAsync() .map(tFeedResponse -> { if (finalQueryInfo != null) { if (finalQueryInfo.hasSelectValue()) { ModelBridgeInternal .addQueryInfoToFeedResponse(tFeedResponse, finalQueryInfo); } if (isFirstResponse.compareAndSet(true, false)) { ModelBridgeInternal.addQueryPlanDiagnosticsContextToFeedResponse(tFeedResponse, finalQueryInfo.getQueryPlanDiagnosticsContext()); } } return tFeedResponse; }); }); } @Override public Flux<FeedResponse<Database>> queryDatabases(String query, CosmosQueryRequestOptions options) { return queryDatabases(new SqlQuerySpec(query), options); } @Override public Flux<FeedResponse<Database>> queryDatabases(SqlQuerySpec querySpec, CosmosQueryRequestOptions options) { return createQuery(Paths.DATABASES_ROOT, querySpec, options, Database.class, ResourceType.Database); } @Override public Mono<ResourceResponse<DocumentCollection>> createCollection(String databaseLink, DocumentCollection collection, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> this.createCollectionInternal(databaseLink, collection, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<DocumentCollection>> createCollectionInternal(String databaseLink, DocumentCollection collection, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (StringUtils.isEmpty(databaseLink)) { throw new IllegalArgumentException("databaseLink"); } if (collection == null) { throw new IllegalArgumentException("collection"); } logger.debug("Creating a Collection. databaseLink: [{}], Collection id: [{}]", databaseLink, collection.getId()); validateResource(collection); String path = Utils.joinPath(databaseLink, Paths.COLLECTIONS_PATH_SEGMENT); Map<String, String> requestHeaders = this.getRequestHeaders(options, ResourceType.DocumentCollection, OperationType.Create); Instant serializationStartTimeUTC = Instant.now(); ByteBuffer byteBuffer = ModelBridgeInternal.serializeJsonToByteBuffer(collection); Instant serializationEndTimeUTC = Instant.now(); SerializationDiagnosticsContext.SerializationDiagnostics serializationDiagnostics = new SerializationDiagnosticsContext.SerializationDiagnostics( serializationStartTimeUTC, serializationEndTimeUTC, SerializationDiagnosticsContext.SerializationType.CONTAINER_SERIALIZATION); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(this, OperationType.Create, ResourceType.DocumentCollection, path, byteBuffer, requestHeaders, options); if (retryPolicyInstance != null){ retryPolicyInstance.onBeforeSendRequest(request); } SerializationDiagnosticsContext serializationDiagnosticsContext = BridgeInternal.getSerializationDiagnosticsContext(request.requestContext.cosmosDiagnostics); if (serializationDiagnosticsContext != null) { serializationDiagnosticsContext.addSerializationDiagnostics(serializationDiagnostics); } return this.create(request, retryPolicyInstance, getOperationContextAndListenerTuple(options)).map(response -> toResourceResponse(response, DocumentCollection.class)) .doOnNext(resourceResponse -> { this.sessionContainer.setSessionToken(resourceResponse.getResource().getResourceId(), getAltLink(resourceResponse.getResource()), resourceResponse.getResponseHeaders()); }); } catch (Exception e) { logger.debug("Failure in creating a collection. due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<DocumentCollection>> replaceCollection(DocumentCollection collection, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> replaceCollectionInternal(collection, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<DocumentCollection>> replaceCollectionInternal(DocumentCollection collection, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (collection == null) { throw new IllegalArgumentException("collection"); } logger.debug("Replacing a Collection. id: [{}]", collection.getId()); validateResource(collection); String path = Utils.joinPath(collection.getSelfLink(), null); Map<String, String> requestHeaders = this.getRequestHeaders(options, ResourceType.DocumentCollection, OperationType.Replace); Instant serializationStartTimeUTC = Instant.now(); ByteBuffer byteBuffer = ModelBridgeInternal.serializeJsonToByteBuffer(collection); Instant serializationEndTimeUTC = Instant.now(); SerializationDiagnosticsContext.SerializationDiagnostics serializationDiagnostics = new SerializationDiagnosticsContext.SerializationDiagnostics( serializationStartTimeUTC, serializationEndTimeUTC, SerializationDiagnosticsContext.SerializationType.CONTAINER_SERIALIZATION); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(this, OperationType.Replace, ResourceType.DocumentCollection, path, byteBuffer, requestHeaders, options); if (retryPolicyInstance != null){ retryPolicyInstance.onBeforeSendRequest(request); } SerializationDiagnosticsContext serializationDiagnosticsContext = BridgeInternal.getSerializationDiagnosticsContext(request.requestContext.cosmosDiagnostics); if (serializationDiagnosticsContext != null) { serializationDiagnosticsContext.addSerializationDiagnostics(serializationDiagnostics); } return this.replace(request, retryPolicyInstance).map(response -> toResourceResponse(response, DocumentCollection.class)) .doOnNext(resourceResponse -> { if (resourceResponse.getResource() != null) { this.sessionContainer.setSessionToken(resourceResponse.getResource().getResourceId(), getAltLink(resourceResponse.getResource()), resourceResponse.getResponseHeaders()); } }); } catch (Exception e) { logger.debug("Failure in replacing a collection. due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<DocumentCollection>> deleteCollection(String collectionLink, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> deleteCollectionInternal(collectionLink, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<DocumentCollection>> deleteCollectionInternal(String collectionLink, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (StringUtils.isEmpty(collectionLink)) { throw new IllegalArgumentException("collectionLink"); } logger.debug("Deleting a Collection. collectionLink: [{}]", collectionLink); String path = Utils.joinPath(collectionLink, null); Map<String, String> requestHeaders = this.getRequestHeaders(options, ResourceType.DocumentCollection, OperationType.Delete); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(this, OperationType.Delete, ResourceType.DocumentCollection, path, requestHeaders, options); if (retryPolicyInstance != null){ retryPolicyInstance.onBeforeSendRequest(request); } return this.delete(request, retryPolicyInstance, getOperationContextAndListenerTuple(options)).map(response -> toResourceResponse(response, DocumentCollection.class)); } catch (Exception e) { logger.debug("Failure in deleting a collection, due to [{}]", e.getMessage(), e); return Mono.error(e); } } private Mono<RxDocumentServiceResponse> delete(RxDocumentServiceRequest request, DocumentClientRetryPolicy documentClientRetryPolicy, OperationContextAndListenerTuple operationContextAndListenerTuple) { return populateHeaders(request, RequestVerb.DELETE) .flatMap(requestPopulated -> { if (documentClientRetryPolicy.getRetryContext() != null && documentClientRetryPolicy.getRetryContext().getRetryCount() > 0) { documentClientRetryPolicy.getRetryContext().updateEndTime(); } return getStoreProxy(requestPopulated).processMessage(requestPopulated, operationContextAndListenerTuple); }); } private Mono<RxDocumentServiceResponse> deleteAllItemsByPartitionKey(RxDocumentServiceRequest request, DocumentClientRetryPolicy documentClientRetryPolicy, OperationContextAndListenerTuple operationContextAndListenerTuple) { return populateHeaders(request, RequestVerb.POST) .flatMap(requestPopulated -> { RxStoreModel storeProxy = this.getStoreProxy(requestPopulated); if (documentClientRetryPolicy.getRetryContext() != null && documentClientRetryPolicy.getRetryContext().getRetryCount() > 0) { documentClientRetryPolicy.getRetryContext().updateEndTime(); } return storeProxy.processMessage(requestPopulated, operationContextAndListenerTuple); }); } private Mono<RxDocumentServiceResponse> read(RxDocumentServiceRequest request, DocumentClientRetryPolicy documentClientRetryPolicy) { return populateHeaders(request, RequestVerb.GET) .flatMap(requestPopulated -> { if (documentClientRetryPolicy.getRetryContext() != null && documentClientRetryPolicy.getRetryContext().getRetryCount() > 0) { documentClientRetryPolicy.getRetryContext().updateEndTime(); } return getStoreProxy(requestPopulated).processMessage(requestPopulated); }); } Mono<RxDocumentServiceResponse> readFeed(RxDocumentServiceRequest request) { return populateHeaders(request, RequestVerb.GET) .flatMap(requestPopulated -> getStoreProxy(requestPopulated).processMessage(requestPopulated)); } private Mono<RxDocumentServiceResponse> query(RxDocumentServiceRequest request) { return populateHeaders(request, RequestVerb.POST) .flatMap(requestPopulated -> this.getStoreProxy(requestPopulated).processMessage(requestPopulated) .map(response -> { this.captureSessionToken(requestPopulated, response); return response; } )); } @Override public Mono<ResourceResponse<DocumentCollection>> readCollection(String collectionLink, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> readCollectionInternal(collectionLink, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<DocumentCollection>> readCollectionInternal(String collectionLink, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (StringUtils.isEmpty(collectionLink)) { throw new IllegalArgumentException("collectionLink"); } logger.debug("Reading a Collection. collectionLink: [{}]", collectionLink); String path = Utils.joinPath(collectionLink, null); Map<String, String> requestHeaders = this.getRequestHeaders(options, ResourceType.DocumentCollection, OperationType.Read); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(this, OperationType.Read, ResourceType.DocumentCollection, path, requestHeaders, options); if (retryPolicyInstance != null){ retryPolicyInstance.onBeforeSendRequest(request); } return this.read(request, retryPolicyInstance).map(response -> toResourceResponse(response, DocumentCollection.class)); } catch (Exception e) { logger.debug("Failure in reading a collection, due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Flux<FeedResponse<DocumentCollection>> readCollections(String databaseLink, CosmosQueryRequestOptions options) { if (StringUtils.isEmpty(databaseLink)) { throw new IllegalArgumentException("databaseLink"); } return readFeed(options, ResourceType.DocumentCollection, DocumentCollection.class, Utils.joinPath(databaseLink, Paths.COLLECTIONS_PATH_SEGMENT)); } @Override public Flux<FeedResponse<DocumentCollection>> queryCollections(String databaseLink, String query, CosmosQueryRequestOptions options) { return createQuery(databaseLink, new SqlQuerySpec(query), options, DocumentCollection.class, ResourceType.DocumentCollection); } @Override public Flux<FeedResponse<DocumentCollection>> queryCollections(String databaseLink, SqlQuerySpec querySpec, CosmosQueryRequestOptions options) { return createQuery(databaseLink, querySpec, options, DocumentCollection.class, ResourceType.DocumentCollection); } private static String serializeProcedureParams(List<Object> objectArray) { String[] stringArray = new String[objectArray.size()]; for (int i = 0; i < objectArray.size(); ++i) { Object object = objectArray.get(i); if (object instanceof JsonSerializable) { stringArray[i] = ModelBridgeInternal.toJsonFromJsonSerializable((JsonSerializable) object); } else { try { stringArray[i] = mapper.writeValueAsString(object); } catch (IOException e) { throw new IllegalArgumentException("Can't serialize the object into the json string", e); } } } return String.format("[%s]", StringUtils.join(stringArray, ",")); } private static void validateResource(Resource resource) { if (!StringUtils.isEmpty(resource.getId())) { if (resource.getId().indexOf('/') != -1 || resource.getId().indexOf('\\') != -1 || resource.getId().indexOf('?') != -1 || resource.getId().indexOf(' throw new IllegalArgumentException("Id contains illegal chars."); } if (resource.getId().endsWith(" ")) { throw new IllegalArgumentException("Id ends with a space."); } } } private Map<String, String> getRequestHeaders(RequestOptions options, ResourceType resourceType, OperationType operationType) { Map<String, String> headers = new HashMap<>(); if (this.useMultipleWriteLocations) { headers.put(HttpConstants.HttpHeaders.ALLOW_TENTATIVE_WRITES, Boolean.TRUE.toString()); } if (consistencyLevel != null) { headers.put(HttpConstants.HttpHeaders.CONSISTENCY_LEVEL, consistencyLevel.toString()); } if (options == null) { if (!this.contentResponseOnWriteEnabled && resourceType.equals(ResourceType.Document) && operationType.isWriteOperation()) { headers.put(HttpConstants.HttpHeaders.PREFER, HttpConstants.HeaderValues.PREFER_RETURN_MINIMAL); } return headers; } Map<String, String> customOptions = options.getHeaders(); if (customOptions != null) { headers.putAll(customOptions); } boolean contentResponseOnWriteEnabled = this.contentResponseOnWriteEnabled; if (options.isContentResponseOnWriteEnabled() != null) { contentResponseOnWriteEnabled = options.isContentResponseOnWriteEnabled(); } if (!contentResponseOnWriteEnabled && resourceType.equals(ResourceType.Document) && operationType.isWriteOperation()) { headers.put(HttpConstants.HttpHeaders.PREFER, HttpConstants.HeaderValues.PREFER_RETURN_MINIMAL); } if (options.getIfMatchETag() != null) { headers.put(HttpConstants.HttpHeaders.IF_MATCH, options.getIfMatchETag()); } if(options.getIfNoneMatchETag() != null) { headers.put(HttpConstants.HttpHeaders.IF_NONE_MATCH, options.getIfNoneMatchETag()); } if (options.getConsistencyLevel() != null) { headers.put(HttpConstants.HttpHeaders.CONSISTENCY_LEVEL, options.getConsistencyLevel().toString()); } if (options.getIndexingDirective() != null) { headers.put(HttpConstants.HttpHeaders.INDEXING_DIRECTIVE, options.getIndexingDirective().toString()); } if (options.getPostTriggerInclude() != null && options.getPostTriggerInclude().size() > 0) { String postTriggerInclude = StringUtils.join(options.getPostTriggerInclude(), ","); headers.put(HttpConstants.HttpHeaders.POST_TRIGGER_INCLUDE, postTriggerInclude); } if (options.getPreTriggerInclude() != null && options.getPreTriggerInclude().size() > 0) { String preTriggerInclude = StringUtils.join(options.getPreTriggerInclude(), ","); headers.put(HttpConstants.HttpHeaders.PRE_TRIGGER_INCLUDE, preTriggerInclude); } if (!Strings.isNullOrEmpty(options.getSessionToken())) { headers.put(HttpConstants.HttpHeaders.SESSION_TOKEN, options.getSessionToken()); } if (options.getResourceTokenExpirySeconds() != null) { headers.put(HttpConstants.HttpHeaders.RESOURCE_TOKEN_EXPIRY, String.valueOf(options.getResourceTokenExpirySeconds())); } if (options.getOfferThroughput() != null && options.getOfferThroughput() >= 0) { headers.put(HttpConstants.HttpHeaders.OFFER_THROUGHPUT, options.getOfferThroughput().toString()); } else if (options.getOfferType() != null) { headers.put(HttpConstants.HttpHeaders.OFFER_TYPE, options.getOfferType()); } if (options.getOfferThroughput() == null) { if (options.getThroughputProperties() != null) { Offer offer = ModelBridgeInternal.getOfferFromThroughputProperties(options.getThroughputProperties()); final OfferAutoscaleSettings offerAutoscaleSettings = offer.getOfferAutoScaleSettings(); OfferAutoscaleAutoUpgradeProperties autoscaleAutoUpgradeProperties = null; if (offerAutoscaleSettings != null) { autoscaleAutoUpgradeProperties = offer.getOfferAutoScaleSettings().getAutoscaleAutoUpgradeProperties(); } if (offer.hasOfferThroughput() && (offerAutoscaleSettings != null && offerAutoscaleSettings.getMaxThroughput() >= 0 || autoscaleAutoUpgradeProperties != null && autoscaleAutoUpgradeProperties .getAutoscaleThroughputProperties() .getIncrementPercent() >= 0)) { throw new IllegalArgumentException("Autoscale provisioned throughput can not be configured with " + "fixed offer"); } if (offer.hasOfferThroughput()) { headers.put(HttpConstants.HttpHeaders.OFFER_THROUGHPUT, String.valueOf(offer.getThroughput())); } else if (offer.getOfferAutoScaleSettings() != null) { headers.put(HttpConstants.HttpHeaders.OFFER_AUTOPILOT_SETTINGS, ModelBridgeInternal.toJsonFromJsonSerializable(offer.getOfferAutoScaleSettings())); } } } if (options.isQuotaInfoEnabled()) { headers.put(HttpConstants.HttpHeaders.POPULATE_QUOTA_INFO, String.valueOf(true)); } if (options.isScriptLoggingEnabled()) { headers.put(HttpConstants.HttpHeaders.SCRIPT_ENABLE_LOGGING, String.valueOf(true)); } if (options.getDedicatedGatewayRequestOptions() != null && options.getDedicatedGatewayRequestOptions().getMaxIntegratedCacheStaleness() != null) { headers.put(HttpConstants.HttpHeaders.DEDICATED_GATEWAY_PER_REQUEST_CACHE_STALENESS, String.valueOf(Utils.getMaxIntegratedCacheStalenessInMillis(options.getDedicatedGatewayRequestOptions()))); } return headers; } public IRetryPolicyFactory getResetSessionTokenRetryPolicy() { return this.resetSessionTokenRetryPolicy; } private Mono<RxDocumentServiceRequest> addPartitionKeyInformation(RxDocumentServiceRequest request, ByteBuffer contentAsByteBuffer, Document document, RequestOptions options) { Mono<Utils.ValueHolder<DocumentCollection>> collectionObs = this.collectionCache.resolveCollectionAsync(BridgeInternal.getMetaDataDiagnosticContext(request.requestContext.cosmosDiagnostics), request); return collectionObs .map(collectionValueHolder -> { addPartitionKeyInformation(request, contentAsByteBuffer, document, options, collectionValueHolder.v); return request; }); } private Mono<RxDocumentServiceRequest> addPartitionKeyInformation(RxDocumentServiceRequest request, ByteBuffer contentAsByteBuffer, Object document, RequestOptions options, Mono<Utils.ValueHolder<DocumentCollection>> collectionObs) { return collectionObs.map(collectionValueHolder -> { addPartitionKeyInformation(request, contentAsByteBuffer, document, options, collectionValueHolder.v); return request; }); } private void addPartitionKeyInformation(RxDocumentServiceRequest request, ByteBuffer contentAsByteBuffer, Object objectDoc, RequestOptions options, DocumentCollection collection) { PartitionKeyDefinition partitionKeyDefinition = collection.getPartitionKey(); PartitionKeyInternal partitionKeyInternal = null; if (options != null && options.getPartitionKey() != null && options.getPartitionKey().equals(PartitionKey.NONE)){ partitionKeyInternal = ModelBridgeInternal.getNonePartitionKey(partitionKeyDefinition); } else if (options != null && options.getPartitionKey() != null) { partitionKeyInternal = BridgeInternal.getPartitionKeyInternal(options.getPartitionKey()); } else if (partitionKeyDefinition == null || partitionKeyDefinition.getPaths().size() == 0) { partitionKeyInternal = PartitionKeyInternal.getEmpty(); } else if (contentAsByteBuffer != null || objectDoc != null) { InternalObjectNode internalObjectNode; if (objectDoc instanceof InternalObjectNode) { internalObjectNode = (InternalObjectNode) objectDoc; } else if (objectDoc instanceof ObjectNode) { internalObjectNode = new InternalObjectNode((ObjectNode)objectDoc); } else if (contentAsByteBuffer != null) { contentAsByteBuffer.rewind(); internalObjectNode = new InternalObjectNode(contentAsByteBuffer); } else { throw new IllegalStateException("ContentAsByteBuffer and objectDoc are null"); } Instant serializationStartTime = Instant.now(); partitionKeyInternal = extractPartitionKeyValueFromDocument(internalObjectNode, partitionKeyDefinition); Instant serializationEndTime = Instant.now(); SerializationDiagnosticsContext.SerializationDiagnostics serializationDiagnostics = new SerializationDiagnosticsContext.SerializationDiagnostics( serializationStartTime, serializationEndTime, SerializationDiagnosticsContext.SerializationType.PARTITION_KEY_FETCH_SERIALIZATION ); SerializationDiagnosticsContext serializationDiagnosticsContext = BridgeInternal.getSerializationDiagnosticsContext(request.requestContext.cosmosDiagnostics); if (serializationDiagnosticsContext != null) { serializationDiagnosticsContext.addSerializationDiagnostics(serializationDiagnostics); } } else { throw new UnsupportedOperationException("PartitionKey value must be supplied for this operation."); } request.setPartitionKeyInternal(partitionKeyInternal); request.getHeaders().put(HttpConstants.HttpHeaders.PARTITION_KEY, Utils.escapeNonAscii(partitionKeyInternal.toJson())); } public static PartitionKeyInternal extractPartitionKeyValueFromDocument( InternalObjectNode document, PartitionKeyDefinition partitionKeyDefinition) { if (partitionKeyDefinition != null) { switch (partitionKeyDefinition.getKind()) { case HASH: String path = partitionKeyDefinition.getPaths().iterator().next(); List<String> parts = PathParser.getPathParts(path); if (parts.size() >= 1) { Object value = ModelBridgeInternal.getObjectByPathFromJsonSerializable(document, parts); if (value == null || value.getClass() == ObjectNode.class) { value = ModelBridgeInternal.getNonePartitionKey(partitionKeyDefinition); } if (value instanceof PartitionKeyInternal) { return (PartitionKeyInternal) value; } else { return PartitionKeyInternal.fromObjectArray(Collections.singletonList(value), false); } } break; case MULTI_HASH: Object[] partitionKeyValues = new Object[partitionKeyDefinition.getPaths().size()]; for(int pathIter = 0 ; pathIter < partitionKeyDefinition.getPaths().size(); pathIter++){ String partitionPath = partitionKeyDefinition.getPaths().get(pathIter); List<String> partitionPathParts = PathParser.getPathParts(partitionPath); partitionKeyValues[pathIter] = ModelBridgeInternal.getObjectByPathFromJsonSerializable(document, partitionPathParts); } return PartitionKeyInternal.fromObjectArray(partitionKeyValues, false); default: throw new IllegalArgumentException("Unrecognized Partition kind: " + partitionKeyDefinition.getKind()); } } return null; } private Mono<RxDocumentServiceRequest> getCreateDocumentRequest(DocumentClientRetryPolicy requestRetryPolicy, String documentCollectionLink, Object document, RequestOptions options, boolean disableAutomaticIdGeneration, OperationType operationType) { if (StringUtils.isEmpty(documentCollectionLink)) { throw new IllegalArgumentException("documentCollectionLink"); } if (document == null) { throw new IllegalArgumentException("document"); } Instant serializationStartTimeUTC = Instant.now(); ByteBuffer content = BridgeInternal.serializeJsonToByteBuffer(document, mapper); Instant serializationEndTimeUTC = Instant.now(); SerializationDiagnosticsContext.SerializationDiagnostics serializationDiagnostics = new SerializationDiagnosticsContext.SerializationDiagnostics( serializationStartTimeUTC, serializationEndTimeUTC, SerializationDiagnosticsContext.SerializationType.ITEM_SERIALIZATION); String path = Utils.joinPath(documentCollectionLink, Paths.DOCUMENTS_PATH_SEGMENT); Map<String, String> requestHeaders = this.getRequestHeaders(options, ResourceType.Document, operationType); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(this, operationType, ResourceType.Document, path, requestHeaders, options, content); if (requestRetryPolicy != null) { requestRetryPolicy.onBeforeSendRequest(request); } SerializationDiagnosticsContext serializationDiagnosticsContext = BridgeInternal.getSerializationDiagnosticsContext(request.requestContext.cosmosDiagnostics); if (serializationDiagnosticsContext != null) { serializationDiagnosticsContext.addSerializationDiagnostics(serializationDiagnostics); } Mono<Utils.ValueHolder<DocumentCollection>> collectionObs = this.collectionCache.resolveCollectionAsync(BridgeInternal.getMetaDataDiagnosticContext(request.requestContext.cosmosDiagnostics), request); return addPartitionKeyInformation(request, content, document, options, collectionObs); } private Mono<RxDocumentServiceRequest> getBatchDocumentRequest(DocumentClientRetryPolicy requestRetryPolicy, String documentCollectionLink, ServerBatchRequest serverBatchRequest, RequestOptions options, boolean disableAutomaticIdGeneration) { checkArgument(StringUtils.isNotEmpty(documentCollectionLink), "expected non empty documentCollectionLink"); checkNotNull(serverBatchRequest, "expected non null serverBatchRequest"); Instant serializationStartTimeUTC = Instant.now(); ByteBuffer content = ByteBuffer.wrap(Utils.getUTF8Bytes(serverBatchRequest.getRequestBody())); Instant serializationEndTimeUTC = Instant.now(); SerializationDiagnosticsContext.SerializationDiagnostics serializationDiagnostics = new SerializationDiagnosticsContext.SerializationDiagnostics( serializationStartTimeUTC, serializationEndTimeUTC, SerializationDiagnosticsContext.SerializationType.ITEM_SERIALIZATION); String path = Utils.joinPath(documentCollectionLink, Paths.DOCUMENTS_PATH_SEGMENT); Map<String, String> requestHeaders = this.getRequestHeaders(options, ResourceType.Document, OperationType.Batch); RxDocumentServiceRequest request = RxDocumentServiceRequest.create( this, OperationType.Batch, ResourceType.Document, path, requestHeaders, options, content); if (requestRetryPolicy != null) { requestRetryPolicy.onBeforeSendRequest(request); } SerializationDiagnosticsContext serializationDiagnosticsContext = BridgeInternal.getSerializationDiagnosticsContext(request.requestContext.cosmosDiagnostics); if (serializationDiagnosticsContext != null) { serializationDiagnosticsContext.addSerializationDiagnostics(serializationDiagnostics); } Mono<Utils.ValueHolder<DocumentCollection>> collectionObs = this.collectionCache.resolveCollectionAsync(BridgeInternal.getMetaDataDiagnosticContext(request.requestContext.cosmosDiagnostics), request); return collectionObs.map((Utils.ValueHolder<DocumentCollection> collectionValueHolder) -> { addBatchHeaders(request, serverBatchRequest, collectionValueHolder.v); return request; }); } private RxDocumentServiceRequest addBatchHeaders(RxDocumentServiceRequest request, ServerBatchRequest serverBatchRequest, DocumentCollection collection) { if(serverBatchRequest instanceof SinglePartitionKeyServerBatchRequest) { PartitionKey partitionKey = ((SinglePartitionKeyServerBatchRequest) serverBatchRequest).getPartitionKeyValue(); PartitionKeyInternal partitionKeyInternal; if (partitionKey.equals(PartitionKey.NONE)) { PartitionKeyDefinition partitionKeyDefinition = collection.getPartitionKey(); partitionKeyInternal = ModelBridgeInternal.getNonePartitionKey(partitionKeyDefinition); } else { partitionKeyInternal = BridgeInternal.getPartitionKeyInternal(partitionKey); } request.setPartitionKeyInternal(partitionKeyInternal); request.getHeaders().put(HttpConstants.HttpHeaders.PARTITION_KEY, Utils.escapeNonAscii(partitionKeyInternal.toJson())); } else if(serverBatchRequest instanceof PartitionKeyRangeServerBatchRequest) { request.setPartitionKeyRangeIdentity(new PartitionKeyRangeIdentity(((PartitionKeyRangeServerBatchRequest) serverBatchRequest).getPartitionKeyRangeId())); } else { throw new UnsupportedOperationException("Unknown Server request."); } request.getHeaders().put(HttpConstants.HttpHeaders.IS_BATCH_REQUEST, Boolean.TRUE.toString()); request.getHeaders().put(HttpConstants.HttpHeaders.IS_BATCH_ATOMIC, String.valueOf(serverBatchRequest.isAtomicBatch())); request.getHeaders().put(HttpConstants.HttpHeaders.SHOULD_BATCH_CONTINUE_ON_ERROR, String.valueOf(serverBatchRequest.isShouldContinueOnError())); request.setNumberOfItemsInBatchRequest(serverBatchRequest.getOperations().size()); return request; } private Mono<RxDocumentServiceRequest> populateHeaders(RxDocumentServiceRequest request, RequestVerb httpMethod) { request.getHeaders().put(HttpConstants.HttpHeaders.X_DATE, Utils.nowAsRFC1123()); if (this.masterKeyOrResourceToken != null || this.resourceTokensMap != null || this.cosmosAuthorizationTokenResolver != null || this.credential != null) { String resourceName = request.getResourceAddress(); String authorization = this.getUserAuthorizationToken( resourceName, request.getResourceType(), httpMethod, request.getHeaders(), AuthorizationTokenType.PrimaryMasterKey, request.properties); try { authorization = URLEncoder.encode(authorization, "UTF-8"); } catch (UnsupportedEncodingException e) { throw new IllegalStateException("Failed to encode authtoken.", e); } request.getHeaders().put(HttpConstants.HttpHeaders.AUTHORIZATION, authorization); } if (this.apiType != null) { request.getHeaders().put(HttpConstants.HttpHeaders.API_TYPE, this.apiType.toString()); } if ((RequestVerb.POST.equals(httpMethod) || RequestVerb.PUT.equals(httpMethod)) && !request.getHeaders().containsKey(HttpConstants.HttpHeaders.CONTENT_TYPE)) { request.getHeaders().put(HttpConstants.HttpHeaders.CONTENT_TYPE, RuntimeConstants.MediaTypes.JSON); } if (RequestVerb.PATCH.equals(httpMethod) && !request.getHeaders().containsKey(HttpConstants.HttpHeaders.CONTENT_TYPE)) { request.getHeaders().put(HttpConstants.HttpHeaders.CONTENT_TYPE, RuntimeConstants.MediaTypes.JSON_PATCH); } if (!request.getHeaders().containsKey(HttpConstants.HttpHeaders.ACCEPT)) { request.getHeaders().put(HttpConstants.HttpHeaders.ACCEPT, RuntimeConstants.MediaTypes.JSON); } MetadataDiagnosticsContext metadataDiagnosticsCtx = BridgeInternal.getMetaDataDiagnosticContext(request.requestContext.cosmosDiagnostics); if (this.requiresFeedRangeFiltering(request)) { return request.getFeedRange() .populateFeedRangeFilteringHeaders( this.getPartitionKeyRangeCache(), request, this.collectionCache.resolveCollectionAsync(metadataDiagnosticsCtx, request)) .flatMap(this::populateAuthorizationHeader); } return this.populateAuthorizationHeader(request); } private boolean requiresFeedRangeFiltering(RxDocumentServiceRequest request) { if (request.getResourceType() != ResourceType.Document && request.getResourceType() != ResourceType.Conflict) { return false; } switch (request.getOperationType()) { case ReadFeed: case Query: case SqlQuery: return request.getFeedRange() != null; default: return false; } } @Override public Mono<RxDocumentServiceRequest> populateAuthorizationHeader(RxDocumentServiceRequest request) { if (request == null) { throw new IllegalArgumentException("request"); } if (this.authorizationTokenType == AuthorizationTokenType.AadToken) { return AadTokenAuthorizationHelper.getAuthorizationToken(this.tokenCredentialCache) .map(authorization -> { request.getHeaders().put(HttpConstants.HttpHeaders.AUTHORIZATION, authorization); return request; }); } else { return Mono.just(request); } } @Override public Mono<HttpHeaders> populateAuthorizationHeader(HttpHeaders httpHeaders) { if (httpHeaders == null) { throw new IllegalArgumentException("httpHeaders"); } if (this.authorizationTokenType == AuthorizationTokenType.AadToken) { return AadTokenAuthorizationHelper.getAuthorizationToken(this.tokenCredentialCache) .map(authorization -> { httpHeaders.set(HttpConstants.HttpHeaders.AUTHORIZATION, authorization); return httpHeaders; }); } return Mono.just(httpHeaders); } @Override public AuthorizationTokenType getAuthorizationTokenType() { return this.authorizationTokenType; } @Override public String getUserAuthorizationToken(String resourceName, ResourceType resourceType, RequestVerb requestVerb, Map<String, String> headers, AuthorizationTokenType tokenType, Map<String, Object> properties) { if (this.cosmosAuthorizationTokenResolver != null) { return this.cosmosAuthorizationTokenResolver.getAuthorizationToken(requestVerb.toUpperCase(), resourceName, this.resolveCosmosResourceType(resourceType).toString(), properties != null ? Collections.unmodifiableMap(properties) : null); } else if (credential != null) { return this.authorizationTokenProvider.generateKeyAuthorizationSignature(requestVerb, resourceName, resourceType, headers); } else if (masterKeyOrResourceToken != null && hasAuthKeyResourceToken && resourceTokensMap == null) { return masterKeyOrResourceToken; } else { assert resourceTokensMap != null; if(resourceType.equals(ResourceType.DatabaseAccount)) { return this.firstResourceTokenFromPermissionFeed; } return ResourceTokenAuthorizationHelper.getAuthorizationTokenUsingResourceTokens(resourceTokensMap, requestVerb, resourceName, headers); } } private CosmosResourceType resolveCosmosResourceType(ResourceType resourceType) { CosmosResourceType cosmosResourceType = ModelBridgeInternal.fromServiceSerializedFormat(resourceType.toString()); if (cosmosResourceType == null) { return CosmosResourceType.SYSTEM; } return cosmosResourceType; } void captureSessionToken(RxDocumentServiceRequest request, RxDocumentServiceResponse response) { this.sessionContainer.setSessionToken(request, response.getResponseHeaders()); } private Mono<RxDocumentServiceResponse> create(RxDocumentServiceRequest request, DocumentClientRetryPolicy documentClientRetryPolicy, OperationContextAndListenerTuple operationContextAndListenerTuple) { return populateHeaders(request, RequestVerb.POST) .flatMap(requestPopulated -> { RxStoreModel storeProxy = this.getStoreProxy(requestPopulated); if (documentClientRetryPolicy.getRetryContext() != null && documentClientRetryPolicy.getRetryContext().getRetryCount() > 0) { documentClientRetryPolicy.getRetryContext().updateEndTime(); } return storeProxy.processMessage(requestPopulated, operationContextAndListenerTuple); }); } private Mono<RxDocumentServiceResponse> upsert(RxDocumentServiceRequest request, DocumentClientRetryPolicy documentClientRetryPolicy, OperationContextAndListenerTuple operationContextAndListenerTuple) { return populateHeaders(request, RequestVerb.POST) .flatMap(requestPopulated -> { Map<String, String> headers = requestPopulated.getHeaders(); assert (headers != null); headers.put(HttpConstants.HttpHeaders.IS_UPSERT, "true"); if (documentClientRetryPolicy.getRetryContext() != null && documentClientRetryPolicy.getRetryContext().getRetryCount() > 0) { documentClientRetryPolicy.getRetryContext().updateEndTime(); } return getStoreProxy(requestPopulated).processMessage(requestPopulated, operationContextAndListenerTuple) .map(response -> { this.captureSessionToken(requestPopulated, response); return response; } ); }); } private Mono<RxDocumentServiceResponse> replace(RxDocumentServiceRequest request, DocumentClientRetryPolicy documentClientRetryPolicy) { return populateHeaders(request, RequestVerb.PUT) .flatMap(requestPopulated -> { if (documentClientRetryPolicy.getRetryContext() != null && documentClientRetryPolicy.getRetryContext().getRetryCount() > 0) { documentClientRetryPolicy.getRetryContext().updateEndTime(); } return getStoreProxy(requestPopulated).processMessage(requestPopulated); }); } private Mono<RxDocumentServiceResponse> patch(RxDocumentServiceRequest request, DocumentClientRetryPolicy documentClientRetryPolicy) { populateHeaders(request, RequestVerb.PATCH); if (documentClientRetryPolicy.getRetryContext() != null && documentClientRetryPolicy.getRetryContext().getRetryCount() > 0) { documentClientRetryPolicy.getRetryContext().updateEndTime(); } return getStoreProxy(request).processMessage(request); } @Override public Mono<ResourceResponse<Document>> createDocument(String collectionLink, Object document, RequestOptions options, boolean disableAutomaticIdGeneration) { DocumentClientRetryPolicy requestRetryPolicy = this.resetSessionTokenRetryPolicy.getRequestPolicy(); if (options == null || options.getPartitionKey() == null) { requestRetryPolicy = new PartitionKeyMismatchRetryPolicy(collectionCache, requestRetryPolicy, collectionLink, options); } DocumentClientRetryPolicy finalRetryPolicyInstance = requestRetryPolicy; return ObservableHelper.inlineIfPossibleAsObs(() -> createDocumentInternal(collectionLink, document, options, disableAutomaticIdGeneration, finalRetryPolicyInstance), requestRetryPolicy); } private Mono<ResourceResponse<Document>> createDocumentInternal(String collectionLink, Object document, RequestOptions options, boolean disableAutomaticIdGeneration, DocumentClientRetryPolicy requestRetryPolicy) { try { logger.debug("Creating a Document. collectionLink: [{}]", collectionLink); Mono<RxDocumentServiceRequest> requestObs = getCreateDocumentRequest(requestRetryPolicy, collectionLink, document, options, disableAutomaticIdGeneration, OperationType.Create); Mono<RxDocumentServiceResponse> responseObservable = requestObs.flatMap(request -> create(request, requestRetryPolicy, getOperationContextAndListenerTuple(options))); return responseObservable .map(serviceResponse -> toResourceResponse(serviceResponse, Document.class)); } catch (Exception e) { logger.debug("Failure in creating a document due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<Document>> upsertDocument(String collectionLink, Object document, RequestOptions options, boolean disableAutomaticIdGeneration) { DocumentClientRetryPolicy requestRetryPolicy = this.resetSessionTokenRetryPolicy.getRequestPolicy(); if (options == null || options.getPartitionKey() == null) { requestRetryPolicy = new PartitionKeyMismatchRetryPolicy(collectionCache, requestRetryPolicy, collectionLink, options); } DocumentClientRetryPolicy finalRetryPolicyInstance = requestRetryPolicy; return ObservableHelper.inlineIfPossibleAsObs(() -> upsertDocumentInternal(collectionLink, document, options, disableAutomaticIdGeneration, finalRetryPolicyInstance), finalRetryPolicyInstance); } private Mono<ResourceResponse<Document>> upsertDocumentInternal(String collectionLink, Object document, RequestOptions options, boolean disableAutomaticIdGeneration, DocumentClientRetryPolicy retryPolicyInstance) { try { logger.debug("Upserting a Document. collectionLink: [{}]", collectionLink); Mono<RxDocumentServiceRequest> reqObs = getCreateDocumentRequest(retryPolicyInstance, collectionLink, document, options, disableAutomaticIdGeneration, OperationType.Upsert); Mono<RxDocumentServiceResponse> responseObservable = reqObs.flatMap(request -> upsert(request, retryPolicyInstance, getOperationContextAndListenerTuple(options))); return responseObservable .map(serviceResponse -> toResourceResponse(serviceResponse, Document.class)); } catch (Exception e) { logger.debug("Failure in upserting a document due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<Document>> replaceDocument(String documentLink, Object document, RequestOptions options) { DocumentClientRetryPolicy requestRetryPolicy = this.resetSessionTokenRetryPolicy.getRequestPolicy(); if (options == null || options.getPartitionKey() == null) { String collectionLink = Utils.getCollectionName(documentLink); requestRetryPolicy = new PartitionKeyMismatchRetryPolicy(collectionCache, requestRetryPolicy, collectionLink, options); } DocumentClientRetryPolicy finalRequestRetryPolicy = requestRetryPolicy; return ObservableHelper.inlineIfPossibleAsObs(() -> replaceDocumentInternal(documentLink, document, options, finalRequestRetryPolicy), requestRetryPolicy); } private Mono<ResourceResponse<Document>> replaceDocumentInternal(String documentLink, Object document, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (StringUtils.isEmpty(documentLink)) { throw new IllegalArgumentException("documentLink"); } if (document == null) { throw new IllegalArgumentException("document"); } Document typedDocument = documentFromObject(document, mapper); return this.replaceDocumentInternal(documentLink, typedDocument, options, retryPolicyInstance); } catch (Exception e) { logger.debug("Failure in replacing a document due to [{}]", e.getMessage()); return Mono.error(e); } } @Override public Mono<ResourceResponse<Document>> replaceDocument(Document document, RequestOptions options) { DocumentClientRetryPolicy requestRetryPolicy = this.resetSessionTokenRetryPolicy.getRequestPolicy(); if (options == null || options.getPartitionKey() == null) { String collectionLink = document.getSelfLink(); requestRetryPolicy = new PartitionKeyMismatchRetryPolicy(collectionCache, requestRetryPolicy, collectionLink, options); } DocumentClientRetryPolicy finalRequestRetryPolicy = requestRetryPolicy; return ObservableHelper.inlineIfPossibleAsObs(() -> replaceDocumentInternal(document, options, finalRequestRetryPolicy), requestRetryPolicy); } private Mono<ResourceResponse<Document>> replaceDocumentInternal(Document document, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (document == null) { throw new IllegalArgumentException("document"); } return this.replaceDocumentInternal(document.getSelfLink(), document, options, retryPolicyInstance); } catch (Exception e) { logger.debug("Failure in replacing a database due to [{}]", e.getMessage()); return Mono.error(e); } } private Mono<ResourceResponse<Document>> replaceDocumentInternal(String documentLink, Document document, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { if (document == null) { throw new IllegalArgumentException("document"); } logger.debug("Replacing a Document. documentLink: [{}]", documentLink); final String path = Utils.joinPath(documentLink, null); final Map<String, String> requestHeaders = getRequestHeaders(options, ResourceType.Document, OperationType.Replace); Instant serializationStartTimeUTC = Instant.now(); ByteBuffer content = serializeJsonToByteBuffer(document); Instant serializationEndTime = Instant.now(); SerializationDiagnosticsContext.SerializationDiagnostics serializationDiagnostics = new SerializationDiagnosticsContext.SerializationDiagnostics( serializationStartTimeUTC, serializationEndTime, SerializationDiagnosticsContext.SerializationType.ITEM_SERIALIZATION); final RxDocumentServiceRequest request = RxDocumentServiceRequest.create(this, OperationType.Replace, ResourceType.Document, path, requestHeaders, options, content); if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } SerializationDiagnosticsContext serializationDiagnosticsContext = BridgeInternal.getSerializationDiagnosticsContext(request.requestContext.cosmosDiagnostics); if (serializationDiagnosticsContext != null) { serializationDiagnosticsContext.addSerializationDiagnostics(serializationDiagnostics); } Mono<Utils.ValueHolder<DocumentCollection>> collectionObs = collectionCache.resolveCollectionAsync(BridgeInternal.getMetaDataDiagnosticContext(request.requestContext.cosmosDiagnostics), request); Mono<RxDocumentServiceRequest> requestObs = addPartitionKeyInformation(request, content, document, options, collectionObs); return requestObs.flatMap(req -> replace(request, retryPolicyInstance) .map(resp -> toResourceResponse(resp, Document.class))); } @Override public Mono<ResourceResponse<Document>> patchDocument(String documentLink, CosmosPatchOperations cosmosPatchOperations, RequestOptions options) { DocumentClientRetryPolicy documentClientRetryPolicy = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> patchDocumentInternal(documentLink, cosmosPatchOperations, options, documentClientRetryPolicy), documentClientRetryPolicy); } private Mono<ResourceResponse<Document>> patchDocumentInternal(String documentLink, CosmosPatchOperations cosmosPatchOperations, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { checkArgument(StringUtils.isNotEmpty(documentLink), "expected non empty documentLink"); checkNotNull(cosmosPatchOperations, "expected non null cosmosPatchOperations"); logger.debug("Running patch operations on Document. documentLink: [{}]", documentLink); final String path = Utils.joinPath(documentLink, null); final Map<String, String> requestHeaders = getRequestHeaders(options, ResourceType.Document, OperationType.Patch); Instant serializationStartTimeUTC = Instant.now(); ByteBuffer content = ByteBuffer.wrap(PatchUtil.serializeCosmosPatchToByteArray(cosmosPatchOperations, options)); Instant serializationEndTime = Instant.now(); SerializationDiagnosticsContext.SerializationDiagnostics serializationDiagnostics = new SerializationDiagnosticsContext.SerializationDiagnostics( serializationStartTimeUTC, serializationEndTime, SerializationDiagnosticsContext.SerializationType.ITEM_SERIALIZATION); final RxDocumentServiceRequest request = RxDocumentServiceRequest.create( this, OperationType.Patch, ResourceType.Document, path, requestHeaders, options, content); if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } SerializationDiagnosticsContext serializationDiagnosticsContext = BridgeInternal.getSerializationDiagnosticsContext(request.requestContext.cosmosDiagnostics); if (serializationDiagnosticsContext != null) { serializationDiagnosticsContext.addSerializationDiagnostics(serializationDiagnostics); } Mono<Utils.ValueHolder<DocumentCollection>> collectionObs = collectionCache.resolveCollectionAsync( BridgeInternal.getMetaDataDiagnosticContext(request.requestContext.cosmosDiagnostics), request); Mono<RxDocumentServiceRequest> requestObs = addPartitionKeyInformation( request, null, null, options, collectionObs); return requestObs.flatMap(req -> patch(request, retryPolicyInstance) .map(resp -> toResourceResponse(resp, Document.class))); } @Override public Mono<ResourceResponse<Document>> deleteDocument(String documentLink, RequestOptions options) { DocumentClientRetryPolicy requestRetryPolicy = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> deleteDocumentInternal(documentLink, null, options, requestRetryPolicy), requestRetryPolicy); } @Override public Mono<ResourceResponse<Document>> deleteDocument(String documentLink, InternalObjectNode internalObjectNode, RequestOptions options) { DocumentClientRetryPolicy requestRetryPolicy = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> deleteDocumentInternal(documentLink, internalObjectNode, options, requestRetryPolicy), requestRetryPolicy); } private Mono<ResourceResponse<Document>> deleteDocumentInternal(String documentLink, InternalObjectNode internalObjectNode, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (StringUtils.isEmpty(documentLink)) { throw new IllegalArgumentException("documentLink"); } logger.debug("Deleting a Document. documentLink: [{}]", documentLink); String path = Utils.joinPath(documentLink, null); Map<String, String> requestHeaders = this.getRequestHeaders(options, ResourceType.Document, OperationType.Delete); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(this, OperationType.Delete, ResourceType.Document, path, requestHeaders, options); if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } Mono<Utils.ValueHolder<DocumentCollection>> collectionObs = collectionCache.resolveCollectionAsync(BridgeInternal.getMetaDataDiagnosticContext(request.requestContext.cosmosDiagnostics), request); Mono<RxDocumentServiceRequest> requestObs = addPartitionKeyInformation(request, null, internalObjectNode, options, collectionObs); return requestObs.flatMap(req -> this .delete(req, retryPolicyInstance, getOperationContextAndListenerTuple(options)) .map(serviceResponse -> toResourceResponse(serviceResponse, Document.class))); } catch (Exception e) { logger.debug("Failure in deleting a document due to [{}]", e.getMessage()); return Mono.error(e); } } @Override public Mono<ResourceResponse<Document>> deleteAllDocumentsByPartitionKey(String collectionLink, PartitionKey partitionKey, RequestOptions options) { DocumentClientRetryPolicy requestRetryPolicy = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> deleteAllDocumentsByPartitionKeyInternal(collectionLink, options, requestRetryPolicy), requestRetryPolicy); } private Mono<ResourceResponse<Document>> deleteAllDocumentsByPartitionKeyInternal(String collectionLink, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (StringUtils.isEmpty(collectionLink)) { throw new IllegalArgumentException("collectionLink"); } logger.debug("Deleting all items by Partition Key. collectionLink: [{}]", collectionLink); String path = Utils.joinPath(collectionLink, null); Map<String, String> requestHeaders = this.getRequestHeaders(options, ResourceType.PartitionKey, OperationType.Delete); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(this, OperationType.Delete, ResourceType.PartitionKey, path, requestHeaders, options); if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } Mono<Utils.ValueHolder<DocumentCollection>> collectionObs = collectionCache.resolveCollectionAsync(BridgeInternal.getMetaDataDiagnosticContext(request.requestContext.cosmosDiagnostics), request); Mono<RxDocumentServiceRequest> requestObs = addPartitionKeyInformation(request, null, null, options, collectionObs); return requestObs.flatMap(req -> this .deleteAllItemsByPartitionKey(req, retryPolicyInstance, getOperationContextAndListenerTuple(options)) .map(serviceResponse -> toResourceResponse(serviceResponse, Document.class))); } catch (Exception e) { logger.debug("Failure in deleting documents due to [{}]", e.getMessage()); return Mono.error(e); } } @Override public Mono<ResourceResponse<Document>> readDocument(String documentLink, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> readDocumentInternal(documentLink, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<Document>> readDocumentInternal(String documentLink, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (StringUtils.isEmpty(documentLink)) { throw new IllegalArgumentException("documentLink"); } logger.debug("Reading a Document. documentLink: [{}]", documentLink); String path = Utils.joinPath(documentLink, null); Map<String, String> requestHeaders = this.getRequestHeaders(options, ResourceType.Document, OperationType.Read); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(this, OperationType.Read, ResourceType.Document, path, requestHeaders, options); if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } Mono<Utils.ValueHolder<DocumentCollection>> collectionObs = this.collectionCache.resolveCollectionAsync(BridgeInternal.getMetaDataDiagnosticContext(request.requestContext.cosmosDiagnostics), request); Mono<RxDocumentServiceRequest> requestObs = addPartitionKeyInformation(request, null, null, options, collectionObs); return requestObs.flatMap(req -> { return this.read(request, retryPolicyInstance).map(serviceResponse -> toResourceResponse(serviceResponse, Document.class)); }); } catch (Exception e) { logger.debug("Failure in reading a document due to [{}]", e.getMessage()); return Mono.error(e); } } @Override public Flux<FeedResponse<Document>> readDocuments(String collectionLink, CosmosQueryRequestOptions options) { if (StringUtils.isEmpty(collectionLink)) { throw new IllegalArgumentException("collectionLink"); } return queryDocuments(collectionLink, "SELECT * FROM r", options); } @Override public <T> Mono<FeedResponse<T>> readMany( List<CosmosItemIdentity> itemIdentityList, String collectionLink, CosmosQueryRequestOptions options, Class<T> klass) { String resourceLink = parentResourceLinkToQueryLink(collectionLink, ResourceType.Document); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(this, OperationType.Query, ResourceType.Document, collectionLink, null ); Mono<Utils.ValueHolder<DocumentCollection>> collectionObs = collectionCache.resolveCollectionAsync(null, request); return collectionObs .flatMap(documentCollectionResourceResponse -> { final DocumentCollection collection = documentCollectionResourceResponse.v; if (collection == null) { throw new IllegalStateException("Collection cannot be null"); } final PartitionKeyDefinition pkDefinition = collection.getPartitionKey(); Mono<Utils.ValueHolder<CollectionRoutingMap>> valueHolderMono = partitionKeyRangeCache .tryLookupAsync(BridgeInternal.getMetaDataDiagnosticContext(request.requestContext.cosmosDiagnostics), collection.getResourceId(), null, null); return valueHolderMono.flatMap(collectionRoutingMapValueHolder -> { Map<PartitionKeyRange, List<CosmosItemIdentity>> partitionRangeItemKeyMap = new HashMap<>(); CollectionRoutingMap routingMap = collectionRoutingMapValueHolder.v; if (routingMap == null) { throw new IllegalStateException("Failed to get routing map."); } itemIdentityList .forEach(itemIdentity -> { String effectivePartitionKeyString = PartitionKeyInternalHelper .getEffectivePartitionKeyString( BridgeInternal.getPartitionKeyInternal( itemIdentity.getPartitionKey()), pkDefinition); PartitionKeyRange range = routingMap.getRangeByEffectivePartitionKey(effectivePartitionKeyString); if (partitionRangeItemKeyMap.get(range) == null) { List<CosmosItemIdentity> list = new ArrayList<>(); list.add(itemIdentity); partitionRangeItemKeyMap.put(range, list); } else { List<CosmosItemIdentity> pairs = partitionRangeItemKeyMap.get(range); pairs.add(itemIdentity); partitionRangeItemKeyMap.put(range, pairs); } }); Map<PartitionKeyRange, SqlQuerySpec> rangeQueryMap; rangeQueryMap = getRangeQueryMap(partitionRangeItemKeyMap, collection.getPartitionKey()); return createReadManyQuery( resourceLink, new SqlQuerySpec(DUMMY_SQL_QUERY), options, Document.class, ResourceType.Document, collection, Collections.unmodifiableMap(rangeQueryMap)) .collectList() .map(feedList -> { List<T> finalList = new ArrayList<>(); HashMap<String, String> headers = new HashMap<>(); ConcurrentMap<String, QueryMetrics> aggregatedQueryMetrics = new ConcurrentHashMap<>(); double requestCharge = 0; for (FeedResponse<Document> page : feedList) { ConcurrentMap<String, QueryMetrics> pageQueryMetrics = ModelBridgeInternal.queryMetrics(page); if (pageQueryMetrics != null) { pageQueryMetrics.forEach( aggregatedQueryMetrics::putIfAbsent); } requestCharge += page.getRequestCharge(); finalList.addAll(page.getResults().stream().map(document -> ModelBridgeInternal.toObjectFromJsonSerializable(document, klass)).collect(Collectors.toList())); } headers.put(HttpConstants.HttpHeaders.REQUEST_CHARGE, Double .toString(requestCharge)); FeedResponse<T> frp = BridgeInternal .createFeedResponse(finalList, headers); return frp; }); }); } ); } private Map<PartitionKeyRange, SqlQuerySpec> getRangeQueryMap( Map<PartitionKeyRange, List<CosmosItemIdentity>> partitionRangeItemKeyMap, PartitionKeyDefinition partitionKeyDefinition) { Map<PartitionKeyRange, SqlQuerySpec> rangeQueryMap = new HashMap<>(); String partitionKeySelector = createPkSelector(partitionKeyDefinition); for(Map.Entry<PartitionKeyRange, List<CosmosItemIdentity>> entry: partitionRangeItemKeyMap.entrySet()) { SqlQuerySpec sqlQuerySpec; if (partitionKeySelector.equals("[\"id\"]")) { sqlQuerySpec = createReadManyQuerySpecPartitionKeyIdSame(entry.getValue(), partitionKeySelector); } else { sqlQuerySpec = createReadManyQuerySpec(entry.getValue(), partitionKeySelector); } rangeQueryMap.put(entry.getKey(), sqlQuerySpec); } return rangeQueryMap; } private SqlQuerySpec createReadManyQuerySpecPartitionKeyIdSame( List<CosmosItemIdentity> idPartitionKeyPairList, String partitionKeySelector) { StringBuilder queryStringBuilder = new StringBuilder(); List<SqlParameter> parameters = new ArrayList<>(); queryStringBuilder.append("SELECT * FROM c WHERE c.id IN ( "); for (int i = 0; i < idPartitionKeyPairList.size(); i++) { CosmosItemIdentity itemIdentity = idPartitionKeyPairList.get(i); String idValue = itemIdentity.getId(); String idParamName = "@param" + i; PartitionKey pkValueAsPartitionKey = itemIdentity.getPartitionKey(); Object pkValue = ModelBridgeInternal.getPartitionKeyObject(pkValueAsPartitionKey); if (!Objects.equals(idValue, pkValue)) { continue; } parameters.add(new SqlParameter(idParamName, idValue)); queryStringBuilder.append(idParamName); if (i < idPartitionKeyPairList.size() - 1) { queryStringBuilder.append(", "); } } queryStringBuilder.append(" )"); return new SqlQuerySpec(queryStringBuilder.toString(), parameters); } private SqlQuerySpec createReadManyQuerySpec(List<CosmosItemIdentity> itemIdentities, String partitionKeySelector) { StringBuilder queryStringBuilder = new StringBuilder(); List<SqlParameter> parameters = new ArrayList<>(); queryStringBuilder.append("SELECT * FROM c WHERE ( "); for (int i = 0; i < itemIdentities.size(); i++) { CosmosItemIdentity itemIdentity = itemIdentities.get(i); PartitionKey pkValueAsPartitionKey = itemIdentity.getPartitionKey(); Object pkValue = ModelBridgeInternal.getPartitionKeyObject(pkValueAsPartitionKey); String pkParamName = "@param" + (2 * i); parameters.add(new SqlParameter(pkParamName, pkValue)); String idValue = itemIdentity.getId(); String idParamName = "@param" + (2 * i + 1); parameters.add(new SqlParameter(idParamName, idValue)); queryStringBuilder.append("("); queryStringBuilder.append("c.id = "); queryStringBuilder.append(idParamName); queryStringBuilder.append(" AND "); queryStringBuilder.append(" c"); queryStringBuilder.append(partitionKeySelector); queryStringBuilder.append((" = ")); queryStringBuilder.append(pkParamName); queryStringBuilder.append(" )"); if (i < itemIdentities.size() - 1) { queryStringBuilder.append(" OR "); } } queryStringBuilder.append(" )"); return new SqlQuerySpec(queryStringBuilder.toString(), parameters); } private String createPkSelector(PartitionKeyDefinition partitionKeyDefinition) { return partitionKeyDefinition.getPaths() .stream() .map(pathPart -> StringUtils.substring(pathPart, 1)) .map(pathPart -> StringUtils.replace(pathPart, "\"", "\\")) .map(part -> "[\"" + part + "\"]") .collect(Collectors.joining()); } private <T extends Resource> Flux<FeedResponse<T>> createReadManyQuery( String parentResourceLink, SqlQuerySpec sqlQuery, CosmosQueryRequestOptions options, Class<T> klass, ResourceType resourceTypeEnum, DocumentCollection collection, Map<PartitionKeyRange, SqlQuerySpec> rangeQueryMap) { UUID activityId = Utils.randomUUID(); IDocumentQueryClient queryClient = documentQueryClientImpl(RxDocumentClientImpl.this, getOperationContextAndListenerTuple(options)); Flux<? extends IDocumentQueryExecutionContext<T>> executionContext = DocumentQueryExecutionContextFactory.createReadManyQueryAsync(this, queryClient, collection.getResourceId(), sqlQuery, rangeQueryMap, options, collection.getResourceId(), parentResourceLink, activityId, klass, resourceTypeEnum); return executionContext.flatMap(IDocumentQueryExecutionContext<T>::executeAsync); } @Override public Flux<FeedResponse<Document>> queryDocuments(String collectionLink, String query, CosmosQueryRequestOptions options) { return queryDocuments(collectionLink, new SqlQuerySpec(query), options); } private IDocumentQueryClient documentQueryClientImpl(RxDocumentClientImpl rxDocumentClientImpl, OperationContextAndListenerTuple operationContextAndListenerTuple) { return new IDocumentQueryClient () { @Override public RxCollectionCache getCollectionCache() { return RxDocumentClientImpl.this.collectionCache; } @Override public RxPartitionKeyRangeCache getPartitionKeyRangeCache() { return RxDocumentClientImpl.this.partitionKeyRangeCache; } @Override public IRetryPolicyFactory getResetSessionTokenRetryPolicy() { return RxDocumentClientImpl.this.resetSessionTokenRetryPolicy; } @Override public ConsistencyLevel getDefaultConsistencyLevelAsync() { return RxDocumentClientImpl.this.gatewayConfigurationReader.getDefaultConsistencyLevel(); } @Override public ConsistencyLevel getDesiredConsistencyLevelAsync() { return RxDocumentClientImpl.this.consistencyLevel; } @Override public Mono<RxDocumentServiceResponse> executeQueryAsync(RxDocumentServiceRequest request) { if (operationContextAndListenerTuple == null) { return RxDocumentClientImpl.this.query(request).single(); } else { final OperationListener listener = operationContextAndListenerTuple.getOperationListener(); final OperationContext operationContext = operationContextAndListenerTuple.getOperationContext(); request.getHeaders().put(HttpConstants.HttpHeaders.CORRELATED_ACTIVITY_ID, operationContext.getCorrelationActivityId()); listener.requestListener(operationContext, request); return RxDocumentClientImpl.this.query(request).single().doOnNext( response -> listener.responseListener(operationContext, response) ).doOnError( ex -> listener.exceptionListener(operationContext, ex) ); } } @Override public QueryCompatibilityMode getQueryCompatibilityMode() { return QueryCompatibilityMode.Default; } @Override public Mono<RxDocumentServiceResponse> readFeedAsync(RxDocumentServiceRequest request) { return null; } }; } @Override public Flux<FeedResponse<Document>> queryDocuments(String collectionLink, SqlQuerySpec querySpec, CosmosQueryRequestOptions options) { SqlQuerySpecLogger.getInstance().logQuery(querySpec); return createQuery(collectionLink, querySpec, options, Document.class, ResourceType.Document); } @Override public Flux<FeedResponse<Document>> queryDocumentChangeFeed( final DocumentCollection collection, final CosmosChangeFeedRequestOptions changeFeedOptions) { checkNotNull(collection, "Argument 'collection' must not be null."); ChangeFeedQueryImpl<Document> changeFeedQueryImpl = new ChangeFeedQueryImpl<>( this, ResourceType.Document, Document.class, collection.getAltLink(), collection.getResourceId(), changeFeedOptions); return changeFeedQueryImpl.executeAsync(); } @Override public Flux<FeedResponse<Document>> readAllDocuments( String collectionLink, PartitionKey partitionKey, CosmosQueryRequestOptions options) { if (StringUtils.isEmpty(collectionLink)) { throw new IllegalArgumentException("collectionLink"); } if (partitionKey == null) { throw new IllegalArgumentException("partitionKey"); } RxDocumentServiceRequest request = RxDocumentServiceRequest.create(this, OperationType.Query, ResourceType.Document, collectionLink, null ); Flux<Utils.ValueHolder<DocumentCollection>> collectionObs = collectionCache.resolveCollectionAsync(null, request).flux(); return collectionObs.flatMap(documentCollectionResourceResponse -> { DocumentCollection collection = documentCollectionResourceResponse.v; if (collection == null) { throw new IllegalStateException("Collection cannot be null"); } PartitionKeyDefinition pkDefinition = collection.getPartitionKey(); String pkSelector = createPkSelector(pkDefinition); SqlQuerySpec querySpec = createLogicalPartitionScanQuerySpec(partitionKey, pkSelector); String resourceLink = parentResourceLinkToQueryLink(collectionLink, ResourceType.Document); UUID activityId = Utils.randomUUID(); IDocumentQueryClient queryClient = documentQueryClientImpl(RxDocumentClientImpl.this, getOperationContextAndListenerTuple(options)); final CosmosQueryRequestOptions effectiveOptions = ModelBridgeInternal.createQueryRequestOptions(options); InvalidPartitionExceptionRetryPolicy invalidPartitionExceptionRetryPolicy = new InvalidPartitionExceptionRetryPolicy( this.collectionCache, null, resourceLink, ModelBridgeInternal.getPropertiesFromQueryRequestOptions(effectiveOptions)); return ObservableHelper.fluxInlineIfPossibleAsObs( () -> { Flux<Utils.ValueHolder<CollectionRoutingMap>> valueHolderMono = this.partitionKeyRangeCache .tryLookupAsync( BridgeInternal.getMetaDataDiagnosticContext(request.requestContext.cosmosDiagnostics), collection.getResourceId(), null, null).flux(); return valueHolderMono.flatMap(collectionRoutingMapValueHolder -> { CollectionRoutingMap routingMap = collectionRoutingMapValueHolder.v; if (routingMap == null) { throw new IllegalStateException("Failed to get routing map."); } String effectivePartitionKeyString = PartitionKeyInternalHelper .getEffectivePartitionKeyString( BridgeInternal.getPartitionKeyInternal(partitionKey), pkDefinition); PartitionKeyRange range = routingMap.getRangeByEffectivePartitionKey(effectivePartitionKeyString); return createQueryInternal( resourceLink, querySpec, ModelBridgeInternal.setPartitionKeyRangeIdInternal(effectiveOptions, range.getId()), Document.class, ResourceType.Document, queryClient, activityId); }); }, invalidPartitionExceptionRetryPolicy); }); } @Override public Map<String, PartitionedQueryExecutionInfo> getQueryPlanCache() { return queryPlanCache; } @Override public Flux<FeedResponse<PartitionKeyRange>> readPartitionKeyRanges(final String collectionLink, CosmosQueryRequestOptions options) { if (StringUtils.isEmpty(collectionLink)) { throw new IllegalArgumentException("collectionLink"); } return readFeed(options, ResourceType.PartitionKeyRange, PartitionKeyRange.class, Utils.joinPath(collectionLink, Paths.PARTITION_KEY_RANGES_PATH_SEGMENT)); } private RxDocumentServiceRequest getStoredProcedureRequest(String collectionLink, StoredProcedure storedProcedure, RequestOptions options, OperationType operationType) { if (StringUtils.isEmpty(collectionLink)) { throw new IllegalArgumentException("collectionLink"); } if (storedProcedure == null) { throw new IllegalArgumentException("storedProcedure"); } validateResource(storedProcedure); String path = Utils.joinPath(collectionLink, Paths.STORED_PROCEDURES_PATH_SEGMENT); Map<String, String> requestHeaders = this.getRequestHeaders(options, ResourceType.StoredProcedure, operationType); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(this, operationType, ResourceType.StoredProcedure, path, storedProcedure, requestHeaders, options); return request; } private RxDocumentServiceRequest getUserDefinedFunctionRequest(String collectionLink, UserDefinedFunction udf, RequestOptions options, OperationType operationType) { if (StringUtils.isEmpty(collectionLink)) { throw new IllegalArgumentException("collectionLink"); } if (udf == null) { throw new IllegalArgumentException("udf"); } validateResource(udf); String path = Utils.joinPath(collectionLink, Paths.USER_DEFINED_FUNCTIONS_PATH_SEGMENT); Map<String, String> requestHeaders = this.getRequestHeaders(options, ResourceType.UserDefinedFunction, operationType); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(this, operationType, ResourceType.UserDefinedFunction, path, udf, requestHeaders, options); return request; } @Override public Mono<ResourceResponse<StoredProcedure>> createStoredProcedure(String collectionLink, StoredProcedure storedProcedure, RequestOptions options) { DocumentClientRetryPolicy requestRetryPolicy = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> createStoredProcedureInternal(collectionLink, storedProcedure, options, requestRetryPolicy), requestRetryPolicy); } private Mono<ResourceResponse<StoredProcedure>> createStoredProcedureInternal(String collectionLink, StoredProcedure storedProcedure, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { logger.debug("Creating a StoredProcedure. collectionLink: [{}], storedProcedure id [{}]", collectionLink, storedProcedure.getId()); RxDocumentServiceRequest request = getStoredProcedureRequest(collectionLink, storedProcedure, options, OperationType.Create); if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } return this.create(request, retryPolicyInstance, getOperationContextAndListenerTuple(options)).map(response -> toResourceResponse(response, StoredProcedure.class)); } catch (Exception e) { logger.debug("Failure in creating a StoredProcedure due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<StoredProcedure>> upsertStoredProcedure(String collectionLink, StoredProcedure storedProcedure, RequestOptions options) { DocumentClientRetryPolicy requestRetryPolicy = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> upsertStoredProcedureInternal(collectionLink, storedProcedure, options, requestRetryPolicy), requestRetryPolicy); } private Mono<ResourceResponse<StoredProcedure>> upsertStoredProcedureInternal(String collectionLink, StoredProcedure storedProcedure, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { logger.debug("Upserting a StoredProcedure. collectionLink: [{}], storedProcedure id [{}]", collectionLink, storedProcedure.getId()); RxDocumentServiceRequest request = getStoredProcedureRequest(collectionLink, storedProcedure, options, OperationType.Upsert); if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } return this.upsert(request, retryPolicyInstance, getOperationContextAndListenerTuple(options)).map(response -> toResourceResponse(response, StoredProcedure.class)); } catch (Exception e) { logger.debug("Failure in upserting a StoredProcedure due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<StoredProcedure>> replaceStoredProcedure(StoredProcedure storedProcedure, RequestOptions options) { DocumentClientRetryPolicy requestRetryPolicy = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> replaceStoredProcedureInternal(storedProcedure, options, requestRetryPolicy), requestRetryPolicy); } private Mono<ResourceResponse<StoredProcedure>> replaceStoredProcedureInternal(StoredProcedure storedProcedure, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (storedProcedure == null) { throw new IllegalArgumentException("storedProcedure"); } logger.debug("Replacing a StoredProcedure. storedProcedure id [{}]", storedProcedure.getId()); RxDocumentClientImpl.validateResource(storedProcedure); String path = Utils.joinPath(storedProcedure.getSelfLink(), null); Map<String, String> requestHeaders = getRequestHeaders(options, ResourceType.StoredProcedure, OperationType.Replace); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(this, OperationType.Replace, ResourceType.StoredProcedure, path, storedProcedure, requestHeaders, options); if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } return this.replace(request, retryPolicyInstance).map(response -> toResourceResponse(response, StoredProcedure.class)); } catch (Exception e) { logger.debug("Failure in replacing a StoredProcedure due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<StoredProcedure>> deleteStoredProcedure(String storedProcedureLink, RequestOptions options) { DocumentClientRetryPolicy requestRetryPolicy = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> deleteStoredProcedureInternal(storedProcedureLink, options, requestRetryPolicy), requestRetryPolicy); } private Mono<ResourceResponse<StoredProcedure>> deleteStoredProcedureInternal(String storedProcedureLink, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (StringUtils.isEmpty(storedProcedureLink)) { throw new IllegalArgumentException("storedProcedureLink"); } logger.debug("Deleting a StoredProcedure. storedProcedureLink [{}]", storedProcedureLink); String path = Utils.joinPath(storedProcedureLink, null); Map<String, String> requestHeaders = this.getRequestHeaders(options, ResourceType.StoredProcedure, OperationType.Delete); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(this, OperationType.Delete, ResourceType.StoredProcedure, path, requestHeaders, options); if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } return this.delete(request, retryPolicyInstance, getOperationContextAndListenerTuple(options)).map(response -> toResourceResponse(response, StoredProcedure.class)); } catch (Exception e) { logger.debug("Failure in deleting a StoredProcedure due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<StoredProcedure>> readStoredProcedure(String storedProcedureLink, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> readStoredProcedureInternal(storedProcedureLink, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<StoredProcedure>> readStoredProcedureInternal(String storedProcedureLink, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (StringUtils.isEmpty(storedProcedureLink)) { throw new IllegalArgumentException("storedProcedureLink"); } logger.debug("Reading a StoredProcedure. storedProcedureLink [{}]", storedProcedureLink); String path = Utils.joinPath(storedProcedureLink, null); Map<String, String> requestHeaders = this.getRequestHeaders(options, ResourceType.StoredProcedure, OperationType.Read); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(this, OperationType.Read, ResourceType.StoredProcedure, path, requestHeaders, options); if (retryPolicyInstance != null){ retryPolicyInstance.onBeforeSendRequest(request); } return this.read(request, retryPolicyInstance).map(response -> toResourceResponse(response, StoredProcedure.class)); } catch (Exception e) { logger.debug("Failure in reading a StoredProcedure due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Flux<FeedResponse<StoredProcedure>> readStoredProcedures(String collectionLink, CosmosQueryRequestOptions options) { if (StringUtils.isEmpty(collectionLink)) { throw new IllegalArgumentException("collectionLink"); } return readFeed(options, ResourceType.StoredProcedure, StoredProcedure.class, Utils.joinPath(collectionLink, Paths.STORED_PROCEDURES_PATH_SEGMENT)); } @Override public Flux<FeedResponse<StoredProcedure>> queryStoredProcedures(String collectionLink, String query, CosmosQueryRequestOptions options) { return queryStoredProcedures(collectionLink, new SqlQuerySpec(query), options); } @Override public Flux<FeedResponse<StoredProcedure>> queryStoredProcedures(String collectionLink, SqlQuerySpec querySpec, CosmosQueryRequestOptions options) { return createQuery(collectionLink, querySpec, options, StoredProcedure.class, ResourceType.StoredProcedure); } @Override public Mono<StoredProcedureResponse> executeStoredProcedure(String storedProcedureLink, List<Object> procedureParams) { return this.executeStoredProcedure(storedProcedureLink, null, procedureParams); } @Override public Mono<StoredProcedureResponse> executeStoredProcedure(String storedProcedureLink, RequestOptions options, List<Object> procedureParams) { DocumentClientRetryPolicy documentClientRetryPolicy = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> executeStoredProcedureInternal(storedProcedureLink, options, procedureParams, documentClientRetryPolicy), documentClientRetryPolicy); } @Override public Mono<CosmosBatchResponse> executeBatchRequest(String collectionLink, ServerBatchRequest serverBatchRequest, RequestOptions options, boolean disableAutomaticIdGeneration) { DocumentClientRetryPolicy documentClientRetryPolicy = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> executeBatchRequestInternal(collectionLink, serverBatchRequest, options, documentClientRetryPolicy, disableAutomaticIdGeneration), documentClientRetryPolicy); } private Mono<StoredProcedureResponse> executeStoredProcedureInternal(String storedProcedureLink, RequestOptions options, List<Object> procedureParams, DocumentClientRetryPolicy retryPolicy) { try { logger.debug("Executing a StoredProcedure. storedProcedureLink [{}]", storedProcedureLink); String path = Utils.joinPath(storedProcedureLink, null); Map<String, String> requestHeaders = getRequestHeaders(options, ResourceType.StoredProcedure, OperationType.ExecuteJavaScript); requestHeaders.put(HttpConstants.HttpHeaders.ACCEPT, RuntimeConstants.MediaTypes.JSON); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(this, OperationType.ExecuteJavaScript, ResourceType.StoredProcedure, path, procedureParams != null && !procedureParams.isEmpty() ? RxDocumentClientImpl.serializeProcedureParams(procedureParams) : "", requestHeaders, options); if (retryPolicy != null) { retryPolicy.onBeforeSendRequest(request); } Mono<RxDocumentServiceRequest> reqObs = addPartitionKeyInformation(request, null, null, options); return reqObs.flatMap(req -> create(request, retryPolicy, getOperationContextAndListenerTuple(options)) .map(response -> { this.captureSessionToken(request, response); return toStoredProcedureResponse(response); })); } catch (Exception e) { logger.debug("Failure in executing a StoredProcedure due to [{}]", e.getMessage(), e); return Mono.error(e); } } private Mono<CosmosBatchResponse> executeBatchRequestInternal(String collectionLink, ServerBatchRequest serverBatchRequest, RequestOptions options, DocumentClientRetryPolicy requestRetryPolicy, boolean disableAutomaticIdGeneration) { try { logger.debug("Executing a Batch request with number of operations {}", serverBatchRequest.getOperations().size()); Mono<RxDocumentServiceRequest> requestObs = getBatchDocumentRequest(requestRetryPolicy, collectionLink, serverBatchRequest, options, disableAutomaticIdGeneration); Mono<RxDocumentServiceResponse> responseObservable = requestObs.flatMap(request -> create(request, requestRetryPolicy, getOperationContextAndListenerTuple(options))); return responseObservable .map(serviceResponse -> BatchResponseParser.fromDocumentServiceResponse(serviceResponse, serverBatchRequest, true)); } catch (Exception ex) { logger.debug("Failure in executing a batch due to [{}]", ex.getMessage(), ex); return Mono.error(ex); } } @Override public Mono<ResourceResponse<Trigger>> createTrigger(String collectionLink, Trigger trigger, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> createTriggerInternal(collectionLink, trigger, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<Trigger>> createTriggerInternal(String collectionLink, Trigger trigger, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { logger.debug("Creating a Trigger. collectionLink [{}], trigger id [{}]", collectionLink, trigger.getId()); RxDocumentServiceRequest request = getTriggerRequest(collectionLink, trigger, options, OperationType.Create); if (retryPolicyInstance != null){ retryPolicyInstance.onBeforeSendRequest(request); } return this.create(request, retryPolicyInstance, getOperationContextAndListenerTuple(options)).map(response -> toResourceResponse(response, Trigger.class)); } catch (Exception e) { logger.debug("Failure in creating a Trigger due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<Trigger>> upsertTrigger(String collectionLink, Trigger trigger, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> upsertTriggerInternal(collectionLink, trigger, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<Trigger>> upsertTriggerInternal(String collectionLink, Trigger trigger, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { logger.debug("Upserting a Trigger. collectionLink [{}], trigger id [{}]", collectionLink, trigger.getId()); RxDocumentServiceRequest request = getTriggerRequest(collectionLink, trigger, options, OperationType.Upsert); if (retryPolicyInstance != null){ retryPolicyInstance.onBeforeSendRequest(request); } return this.upsert(request, retryPolicyInstance, getOperationContextAndListenerTuple(options)).map(response -> toResourceResponse(response, Trigger.class)); } catch (Exception e) { logger.debug("Failure in upserting a Trigger due to [{}]", e.getMessage(), e); return Mono.error(e); } } private RxDocumentServiceRequest getTriggerRequest(String collectionLink, Trigger trigger, RequestOptions options, OperationType operationType) { if (StringUtils.isEmpty(collectionLink)) { throw new IllegalArgumentException("collectionLink"); } if (trigger == null) { throw new IllegalArgumentException("trigger"); } RxDocumentClientImpl.validateResource(trigger); String path = Utils.joinPath(collectionLink, Paths.TRIGGERS_PATH_SEGMENT); Map<String, String> requestHeaders = getRequestHeaders(options, ResourceType.Trigger, operationType); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(this, operationType, ResourceType.Trigger, path, trigger, requestHeaders, options); return request; } @Override public Mono<ResourceResponse<Trigger>> replaceTrigger(Trigger trigger, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> replaceTriggerInternal(trigger, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<Trigger>> replaceTriggerInternal(Trigger trigger, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (trigger == null) { throw new IllegalArgumentException("trigger"); } logger.debug("Replacing a Trigger. trigger id [{}]", trigger.getId()); RxDocumentClientImpl.validateResource(trigger); String path = Utils.joinPath(trigger.getSelfLink(), null); Map<String, String> requestHeaders = getRequestHeaders(options, ResourceType.Trigger, OperationType.Replace); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(this, OperationType.Replace, ResourceType.Trigger, path, trigger, requestHeaders, options); if (retryPolicyInstance != null){ retryPolicyInstance.onBeforeSendRequest(request); } return this.replace(request, retryPolicyInstance).map(response -> toResourceResponse(response, Trigger.class)); } catch (Exception e) { logger.debug("Failure in replacing a Trigger due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<Trigger>> deleteTrigger(String triggerLink, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> deleteTriggerInternal(triggerLink, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<Trigger>> deleteTriggerInternal(String triggerLink, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (StringUtils.isEmpty(triggerLink)) { throw new IllegalArgumentException("triggerLink"); } logger.debug("Deleting a Trigger. triggerLink [{}]", triggerLink); String path = Utils.joinPath(triggerLink, null); Map<String, String> requestHeaders = getRequestHeaders(options, ResourceType.Trigger, OperationType.Delete); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(this, OperationType.Delete, ResourceType.Trigger, path, requestHeaders, options); if (retryPolicyInstance != null){ retryPolicyInstance.onBeforeSendRequest(request); } return this.delete(request, retryPolicyInstance, getOperationContextAndListenerTuple(options)).map(response -> toResourceResponse(response, Trigger.class)); } catch (Exception e) { logger.debug("Failure in deleting a Trigger due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<Trigger>> readTrigger(String triggerLink, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> readTriggerInternal(triggerLink, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<Trigger>> readTriggerInternal(String triggerLink, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (StringUtils.isEmpty(triggerLink)) { throw new IllegalArgumentException("triggerLink"); } logger.debug("Reading a Trigger. triggerLink [{}]", triggerLink); String path = Utils.joinPath(triggerLink, null); Map<String, String> requestHeaders = getRequestHeaders(options, ResourceType.Trigger, OperationType.Read); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(this, OperationType.Read, ResourceType.Trigger, path, requestHeaders, options); if (retryPolicyInstance != null){ retryPolicyInstance.onBeforeSendRequest(request); } return this.read(request, retryPolicyInstance).map(response -> toResourceResponse(response, Trigger.class)); } catch (Exception e) { logger.debug("Failure in reading a Trigger due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Flux<FeedResponse<Trigger>> readTriggers(String collectionLink, CosmosQueryRequestOptions options) { if (StringUtils.isEmpty(collectionLink)) { throw new IllegalArgumentException("collectionLink"); } return readFeed(options, ResourceType.Trigger, Trigger.class, Utils.joinPath(collectionLink, Paths.TRIGGERS_PATH_SEGMENT)); } @Override public Flux<FeedResponse<Trigger>> queryTriggers(String collectionLink, String query, CosmosQueryRequestOptions options) { return queryTriggers(collectionLink, new SqlQuerySpec(query), options); } @Override public Flux<FeedResponse<Trigger>> queryTriggers(String collectionLink, SqlQuerySpec querySpec, CosmosQueryRequestOptions options) { return createQuery(collectionLink, querySpec, options, Trigger.class, ResourceType.Trigger); } @Override public Mono<ResourceResponse<UserDefinedFunction>> createUserDefinedFunction(String collectionLink, UserDefinedFunction udf, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> createUserDefinedFunctionInternal(collectionLink, udf, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<UserDefinedFunction>> createUserDefinedFunctionInternal(String collectionLink, UserDefinedFunction udf, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { logger.debug("Creating a UserDefinedFunction. collectionLink [{}], udf id [{}]", collectionLink, udf.getId()); RxDocumentServiceRequest request = getUserDefinedFunctionRequest(collectionLink, udf, options, OperationType.Create); if (retryPolicyInstance != null){ retryPolicyInstance.onBeforeSendRequest(request); } return this.create(request, retryPolicyInstance, getOperationContextAndListenerTuple(options)).map(response -> toResourceResponse(response, UserDefinedFunction.class)); } catch (Exception e) { logger.debug("Failure in creating a UserDefinedFunction due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<UserDefinedFunction>> upsertUserDefinedFunction(String collectionLink, UserDefinedFunction udf, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> upsertUserDefinedFunctionInternal(collectionLink, udf, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<UserDefinedFunction>> upsertUserDefinedFunctionInternal(String collectionLink, UserDefinedFunction udf, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { logger.debug("Upserting a UserDefinedFunction. collectionLink [{}], udf id [{}]", collectionLink, udf.getId()); RxDocumentServiceRequest request = getUserDefinedFunctionRequest(collectionLink, udf, options, OperationType.Upsert); if (retryPolicyInstance != null){ retryPolicyInstance.onBeforeSendRequest(request); } return this.upsert(request, retryPolicyInstance, getOperationContextAndListenerTuple(options)).map(response -> toResourceResponse(response, UserDefinedFunction.class)); } catch (Exception e) { logger.debug("Failure in upserting a UserDefinedFunction due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<UserDefinedFunction>> replaceUserDefinedFunction(UserDefinedFunction udf, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> replaceUserDefinedFunctionInternal(udf, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<UserDefinedFunction>> replaceUserDefinedFunctionInternal(UserDefinedFunction udf, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (udf == null) { throw new IllegalArgumentException("udf"); } logger.debug("Replacing a UserDefinedFunction. udf id [{}]", udf.getId()); validateResource(udf); String path = Utils.joinPath(udf.getSelfLink(), null); Map<String, String> requestHeaders = this.getRequestHeaders(options, ResourceType.UserDefinedFunction, OperationType.Replace); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(this, OperationType.Replace, ResourceType.UserDefinedFunction, path, udf, requestHeaders, options); if (retryPolicyInstance != null){ retryPolicyInstance.onBeforeSendRequest(request); } return this.replace(request, retryPolicyInstance).map(response -> toResourceResponse(response, UserDefinedFunction.class)); } catch (Exception e) { logger.debug("Failure in replacing a UserDefinedFunction due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<UserDefinedFunction>> deleteUserDefinedFunction(String udfLink, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> deleteUserDefinedFunctionInternal(udfLink, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<UserDefinedFunction>> deleteUserDefinedFunctionInternal(String udfLink, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (StringUtils.isEmpty(udfLink)) { throw new IllegalArgumentException("udfLink"); } logger.debug("Deleting a UserDefinedFunction. udfLink [{}]", udfLink); String path = Utils.joinPath(udfLink, null); Map<String, String> requestHeaders = this.getRequestHeaders(options, ResourceType.UserDefinedFunction, OperationType.Delete); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(this, OperationType.Delete, ResourceType.UserDefinedFunction, path, requestHeaders, options); if (retryPolicyInstance != null){ retryPolicyInstance.onBeforeSendRequest(request); } return this.delete(request, retryPolicyInstance, getOperationContextAndListenerTuple(options)).map(response -> toResourceResponse(response, UserDefinedFunction.class)); } catch (Exception e) { logger.debug("Failure in deleting a UserDefinedFunction due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<UserDefinedFunction>> readUserDefinedFunction(String udfLink, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> readUserDefinedFunctionInternal(udfLink, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<UserDefinedFunction>> readUserDefinedFunctionInternal(String udfLink, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (StringUtils.isEmpty(udfLink)) { throw new IllegalArgumentException("udfLink"); } logger.debug("Reading a UserDefinedFunction. udfLink [{}]", udfLink); String path = Utils.joinPath(udfLink, null); Map<String, String> requestHeaders = this.getRequestHeaders(options, ResourceType.UserDefinedFunction, OperationType.Read); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(this, OperationType.Read, ResourceType.UserDefinedFunction, path, requestHeaders, options); if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } return this.read(request, retryPolicyInstance).map(response -> toResourceResponse(response, UserDefinedFunction.class)); } catch (Exception e) { logger.debug("Failure in reading a UserDefinedFunction due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Flux<FeedResponse<UserDefinedFunction>> readUserDefinedFunctions(String collectionLink, CosmosQueryRequestOptions options) { if (StringUtils.isEmpty(collectionLink)) { throw new IllegalArgumentException("collectionLink"); } return readFeed(options, ResourceType.UserDefinedFunction, UserDefinedFunction.class, Utils.joinPath(collectionLink, Paths.USER_DEFINED_FUNCTIONS_PATH_SEGMENT)); } @Override public Flux<FeedResponse<UserDefinedFunction>> queryUserDefinedFunctions(String collectionLink, String query, CosmosQueryRequestOptions options) { return queryUserDefinedFunctions(collectionLink, new SqlQuerySpec(query), options); } @Override public Flux<FeedResponse<UserDefinedFunction>> queryUserDefinedFunctions(String collectionLink, SqlQuerySpec querySpec, CosmosQueryRequestOptions options) { return createQuery(collectionLink, querySpec, options, UserDefinedFunction.class, ResourceType.UserDefinedFunction); } @Override public Mono<ResourceResponse<Conflict>> readConflict(String conflictLink, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> readConflictInternal(conflictLink, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<Conflict>> readConflictInternal(String conflictLink, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (StringUtils.isEmpty(conflictLink)) { throw new IllegalArgumentException("conflictLink"); } logger.debug("Reading a Conflict. conflictLink [{}]", conflictLink); String path = Utils.joinPath(conflictLink, null); Map<String, String> requestHeaders = getRequestHeaders(options, ResourceType.Conflict, OperationType.Read); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(this, OperationType.Read, ResourceType.Conflict, path, requestHeaders, options); Mono<RxDocumentServiceRequest> reqObs = addPartitionKeyInformation(request, null, null, options); return reqObs.flatMap(req -> { if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } return this.read(request, retryPolicyInstance).map(response -> toResourceResponse(response, Conflict.class)); }); } catch (Exception e) { logger.debug("Failure in reading a Conflict due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Flux<FeedResponse<Conflict>> readConflicts(String collectionLink, CosmosQueryRequestOptions options) { if (StringUtils.isEmpty(collectionLink)) { throw new IllegalArgumentException("collectionLink"); } return readFeed(options, ResourceType.Conflict, Conflict.class, Utils.joinPath(collectionLink, Paths.CONFLICTS_PATH_SEGMENT)); } @Override public Flux<FeedResponse<Conflict>> queryConflicts(String collectionLink, String query, CosmosQueryRequestOptions options) { return queryConflicts(collectionLink, new SqlQuerySpec(query), options); } @Override public Flux<FeedResponse<Conflict>> queryConflicts(String collectionLink, SqlQuerySpec querySpec, CosmosQueryRequestOptions options) { return createQuery(collectionLink, querySpec, options, Conflict.class, ResourceType.Conflict); } @Override public Mono<ResourceResponse<Conflict>> deleteConflict(String conflictLink, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> deleteConflictInternal(conflictLink, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<Conflict>> deleteConflictInternal(String conflictLink, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (StringUtils.isEmpty(conflictLink)) { throw new IllegalArgumentException("conflictLink"); } logger.debug("Deleting a Conflict. conflictLink [{}]", conflictLink); String path = Utils.joinPath(conflictLink, null); Map<String, String> requestHeaders = getRequestHeaders(options, ResourceType.Conflict, OperationType.Delete); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(this, OperationType.Delete, ResourceType.Conflict, path, requestHeaders, options); Mono<RxDocumentServiceRequest> reqObs = addPartitionKeyInformation(request, null, null, options); return reqObs.flatMap(req -> { if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } return this.delete(request, retryPolicyInstance, getOperationContextAndListenerTuple(options)).map(response -> toResourceResponse(response, Conflict.class)); }); } catch (Exception e) { logger.debug("Failure in deleting a Conflict due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<User>> createUser(String databaseLink, User user, RequestOptions options) { DocumentClientRetryPolicy documentClientRetryPolicy = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> createUserInternal(databaseLink, user, options, documentClientRetryPolicy), documentClientRetryPolicy); } private Mono<ResourceResponse<User>> createUserInternal(String databaseLink, User user, RequestOptions options, DocumentClientRetryPolicy documentClientRetryPolicy) { try { logger.debug("Creating a User. databaseLink [{}], user id [{}]", databaseLink, user.getId()); RxDocumentServiceRequest request = getUserRequest(databaseLink, user, options, OperationType.Create); return this.create(request, documentClientRetryPolicy, getOperationContextAndListenerTuple(options)).map(response -> toResourceResponse(response, User.class)); } catch (Exception e) { logger.debug("Failure in creating a User due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<User>> upsertUser(String databaseLink, User user, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> upsertUserInternal(databaseLink, user, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<User>> upsertUserInternal(String databaseLink, User user, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { logger.debug("Upserting a User. databaseLink [{}], user id [{}]", databaseLink, user.getId()); RxDocumentServiceRequest request = getUserRequest(databaseLink, user, options, OperationType.Upsert); if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } return this.upsert(request, retryPolicyInstance, getOperationContextAndListenerTuple(options)).map(response -> toResourceResponse(response, User.class)); } catch (Exception e) { logger.debug("Failure in upserting a User due to [{}]", e.getMessage(), e); return Mono.error(e); } } private RxDocumentServiceRequest getUserRequest(String databaseLink, User user, RequestOptions options, OperationType operationType) { if (StringUtils.isEmpty(databaseLink)) { throw new IllegalArgumentException("databaseLink"); } if (user == null) { throw new IllegalArgumentException("user"); } RxDocumentClientImpl.validateResource(user); String path = Utils.joinPath(databaseLink, Paths.USERS_PATH_SEGMENT); Map<String, String> requestHeaders = getRequestHeaders(options, ResourceType.User, operationType); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(this, operationType, ResourceType.User, path, user, requestHeaders, options); return request; } @Override public Mono<ResourceResponse<User>> replaceUser(User user, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> replaceUserInternal(user, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<User>> replaceUserInternal(User user, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (user == null) { throw new IllegalArgumentException("user"); } logger.debug("Replacing a User. user id [{}]", user.getId()); RxDocumentClientImpl.validateResource(user); String path = Utils.joinPath(user.getSelfLink(), null); Map<String, String> requestHeaders = getRequestHeaders(options, ResourceType.User, OperationType.Replace); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(this, OperationType.Replace, ResourceType.User, path, user, requestHeaders, options); if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } return this.replace(request, retryPolicyInstance).map(response -> toResourceResponse(response, User.class)); } catch (Exception e) { logger.debug("Failure in replacing a User due to [{}]", e.getMessage(), e); return Mono.error(e); } } public Mono<ResourceResponse<User>> deleteUser(String userLink, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> deleteUserInternal(userLink, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<User>> deleteUserInternal(String userLink, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (StringUtils.isEmpty(userLink)) { throw new IllegalArgumentException("userLink"); } logger.debug("Deleting a User. userLink [{}]", userLink); String path = Utils.joinPath(userLink, null); Map<String, String> requestHeaders = getRequestHeaders(options, ResourceType.User, OperationType.Delete); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(this, OperationType.Delete, ResourceType.User, path, requestHeaders, options); if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } return this.delete(request, retryPolicyInstance, getOperationContextAndListenerTuple(options)).map(response -> toResourceResponse(response, User.class)); } catch (Exception e) { logger.debug("Failure in deleting a User due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<User>> readUser(String userLink, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> readUserInternal(userLink, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<User>> readUserInternal(String userLink, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (StringUtils.isEmpty(userLink)) { throw new IllegalArgumentException("userLink"); } logger.debug("Reading a User. userLink [{}]", userLink); String path = Utils.joinPath(userLink, null); Map<String, String> requestHeaders = getRequestHeaders(options, ResourceType.User, OperationType.Read); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(this, OperationType.Read, ResourceType.User, path, requestHeaders, options); if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } return this.read(request, retryPolicyInstance).map(response -> toResourceResponse(response, User.class)); } catch (Exception e) { logger.debug("Failure in reading a User due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Flux<FeedResponse<User>> readUsers(String databaseLink, CosmosQueryRequestOptions options) { if (StringUtils.isEmpty(databaseLink)) { throw new IllegalArgumentException("databaseLink"); } return readFeed(options, ResourceType.User, User.class, Utils.joinPath(databaseLink, Paths.USERS_PATH_SEGMENT)); } @Override public Flux<FeedResponse<User>> queryUsers(String databaseLink, String query, CosmosQueryRequestOptions options) { return queryUsers(databaseLink, new SqlQuerySpec(query), options); } @Override public Flux<FeedResponse<User>> queryUsers(String databaseLink, SqlQuerySpec querySpec, CosmosQueryRequestOptions options) { return createQuery(databaseLink, querySpec, options, User.class, ResourceType.User); } @Override public Mono<ResourceResponse<ClientEncryptionKey>> readClientEncryptionKey(String clientEncryptionKeyLink, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> readClientEncryptionKeyInternal(clientEncryptionKeyLink, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<ClientEncryptionKey>> readClientEncryptionKeyInternal(String clientEncryptionKeyLink, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (StringUtils.isEmpty(clientEncryptionKeyLink)) { throw new IllegalArgumentException("clientEncryptionKeyLink"); } logger.debug("Reading a client encryption key. clientEncryptionKeyLink [{}]", clientEncryptionKeyLink); String path = Utils.joinPath(clientEncryptionKeyLink, null); Map<String, String> requestHeaders = getRequestHeaders(options, ResourceType.ClientEncryptionKey, OperationType.Read); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(this, OperationType.Read, ResourceType.ClientEncryptionKey, path, requestHeaders, options); if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } return this.read(request, retryPolicyInstance).map(response -> toResourceResponse(response, ClientEncryptionKey.class)); } catch (Exception e) { logger.debug("Failure in reading a client encryption key due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<ClientEncryptionKey>> createClientEncryptionKey(String databaseLink, ClientEncryptionKey clientEncryptionKey, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> createClientEncryptionKeyInternal(databaseLink, clientEncryptionKey, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<ClientEncryptionKey>> createClientEncryptionKeyInternal(String databaseLink, ClientEncryptionKey clientEncryptionKey, RequestOptions options, DocumentClientRetryPolicy documentClientRetryPolicy) { try { logger.debug("Creating a client encryption key. databaseLink [{}], clientEncryptionKey id [{}]", databaseLink, clientEncryptionKey.getId()); RxDocumentServiceRequest request = getClientEncryptionKeyRequest(databaseLink, clientEncryptionKey, options, OperationType.Create); return this.create(request, documentClientRetryPolicy, getOperationContextAndListenerTuple(options)).map(response -> toResourceResponse(response, ClientEncryptionKey.class)); } catch (Exception e) { logger.debug("Failure in creating a client encryption key due to [{}]", e.getMessage(), e); return Mono.error(e); } } private RxDocumentServiceRequest getClientEncryptionKeyRequest(String databaseLink, ClientEncryptionKey clientEncryptionKey, RequestOptions options, OperationType operationType) { if (StringUtils.isEmpty(databaseLink)) { throw new IllegalArgumentException("databaseLink"); } if (clientEncryptionKey == null) { throw new IllegalArgumentException("clientEncryptionKey"); } RxDocumentClientImpl.validateResource(clientEncryptionKey); String path = Utils.joinPath(databaseLink, Paths.CLIENT_ENCRYPTION_KEY_PATH_SEGMENT); Map<String, String> requestHeaders = getRequestHeaders(options, ResourceType.ClientEncryptionKey, operationType); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(this, operationType, ResourceType.ClientEncryptionKey, path, clientEncryptionKey, requestHeaders, options); return request; } @Override public Mono<ResourceResponse<ClientEncryptionKey>> replaceClientEncryptionKey(ClientEncryptionKey clientEncryptionKey, String nameBasedLink, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> replaceClientEncryptionKeyInternal(clientEncryptionKey, nameBasedLink, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<ClientEncryptionKey>> replaceClientEncryptionKeyInternal(ClientEncryptionKey clientEncryptionKey, String nameBasedLink, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (clientEncryptionKey == null) { throw new IllegalArgumentException("clientEncryptionKey"); } logger.debug("Replacing a clientEncryptionKey. clientEncryptionKey id [{}]", clientEncryptionKey.getId()); RxDocumentClientImpl.validateResource(clientEncryptionKey); String path = Utils.joinPath(nameBasedLink, null); Map<String, String> requestHeaders = getRequestHeaders(options, ResourceType.ClientEncryptionKey, OperationType.Replace); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(this, OperationType.Replace, ResourceType.ClientEncryptionKey, path, clientEncryptionKey, requestHeaders, options); if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } return this.replace(request, retryPolicyInstance).map(response -> toResourceResponse(response, ClientEncryptionKey.class)); } catch (Exception e) { logger.debug("Failure in replacing a clientEncryptionKey due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Flux<FeedResponse<ClientEncryptionKey>> readClientEncryptionKeys(String databaseLink, CosmosQueryRequestOptions options) { if (StringUtils.isEmpty(databaseLink)) { throw new IllegalArgumentException("databaseLink"); } return readFeed(options, ResourceType.ClientEncryptionKey, ClientEncryptionKey.class, Utils.joinPath(databaseLink, Paths.CLIENT_ENCRYPTION_KEY_PATH_SEGMENT)); } @Override public Flux<FeedResponse<ClientEncryptionKey>> queryClientEncryptionKeys(String databaseLink, String query, CosmosQueryRequestOptions options) { return queryClientEncryptionKeys(databaseLink, new SqlQuerySpec(query), options); } @Override public Flux<FeedResponse<ClientEncryptionKey>> queryClientEncryptionKeys(String databaseLink, SqlQuerySpec querySpec, CosmosQueryRequestOptions options) { return createQuery(databaseLink, querySpec, options, ClientEncryptionKey.class, ResourceType.ClientEncryptionKey); } @Override public Mono<ResourceResponse<Permission>> createPermission(String userLink, Permission permission, RequestOptions options) { DocumentClientRetryPolicy documentClientRetryPolicy = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> createPermissionInternal(userLink, permission, options, documentClientRetryPolicy), this.resetSessionTokenRetryPolicy.getRequestPolicy()); } private Mono<ResourceResponse<Permission>> createPermissionInternal(String userLink, Permission permission, RequestOptions options, DocumentClientRetryPolicy documentClientRetryPolicy) { try { logger.debug("Creating a Permission. userLink [{}], permission id [{}]", userLink, permission.getId()); RxDocumentServiceRequest request = getPermissionRequest(userLink, permission, options, OperationType.Create); return this.create(request, documentClientRetryPolicy, getOperationContextAndListenerTuple(options)).map(response -> toResourceResponse(response, Permission.class)); } catch (Exception e) { logger.debug("Failure in creating a Permission due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<Permission>> upsertPermission(String userLink, Permission permission, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> upsertPermissionInternal(userLink, permission, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<Permission>> upsertPermissionInternal(String userLink, Permission permission, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { logger.debug("Upserting a Permission. userLink [{}], permission id [{}]", userLink, permission.getId()); RxDocumentServiceRequest request = getPermissionRequest(userLink, permission, options, OperationType.Upsert); if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } return this.upsert(request, retryPolicyInstance, getOperationContextAndListenerTuple(options)).map(response -> toResourceResponse(response, Permission.class)); } catch (Exception e) { logger.debug("Failure in upserting a Permission due to [{}]", e.getMessage(), e); return Mono.error(e); } } private RxDocumentServiceRequest getPermissionRequest(String userLink, Permission permission, RequestOptions options, OperationType operationType) { if (StringUtils.isEmpty(userLink)) { throw new IllegalArgumentException("userLink"); } if (permission == null) { throw new IllegalArgumentException("permission"); } RxDocumentClientImpl.validateResource(permission); String path = Utils.joinPath(userLink, Paths.PERMISSIONS_PATH_SEGMENT); Map<String, String> requestHeaders = getRequestHeaders(options, ResourceType.Permission, operationType); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(this, operationType, ResourceType.Permission, path, permission, requestHeaders, options); return request; } @Override public Mono<ResourceResponse<Permission>> replacePermission(Permission permission, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> replacePermissionInternal(permission, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<Permission>> replacePermissionInternal(Permission permission, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (permission == null) { throw new IllegalArgumentException("permission"); } logger.debug("Replacing a Permission. permission id [{}]", permission.getId()); RxDocumentClientImpl.validateResource(permission); String path = Utils.joinPath(permission.getSelfLink(), null); Map<String, String> requestHeaders = getRequestHeaders(options, ResourceType.Permission, OperationType.Replace); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(this, OperationType.Replace, ResourceType.Permission, path, permission, requestHeaders, options); if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } return this.replace(request, retryPolicyInstance).map(response -> toResourceResponse(response, Permission.class)); } catch (Exception e) { logger.debug("Failure in replacing a Permission due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<Permission>> deletePermission(String permissionLink, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> deletePermissionInternal(permissionLink, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<Permission>> deletePermissionInternal(String permissionLink, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (StringUtils.isEmpty(permissionLink)) { throw new IllegalArgumentException("permissionLink"); } logger.debug("Deleting a Permission. permissionLink [{}]", permissionLink); String path = Utils.joinPath(permissionLink, null); Map<String, String> requestHeaders = getRequestHeaders(options, ResourceType.Permission, OperationType.Delete); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(this, OperationType.Delete, ResourceType.Permission, path, requestHeaders, options); if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } return this.delete(request, retryPolicyInstance, getOperationContextAndListenerTuple(options)).map(response -> toResourceResponse(response, Permission.class)); } catch (Exception e) { logger.debug("Failure in deleting a Permission due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<Permission>> readPermission(String permissionLink, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> readPermissionInternal(permissionLink, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<Permission>> readPermissionInternal(String permissionLink, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance ) { try { if (StringUtils.isEmpty(permissionLink)) { throw new IllegalArgumentException("permissionLink"); } logger.debug("Reading a Permission. permissionLink [{}]", permissionLink); String path = Utils.joinPath(permissionLink, null); Map<String, String> requestHeaders = getRequestHeaders(options, ResourceType.Permission, OperationType.Read); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(this, OperationType.Read, ResourceType.Permission, path, requestHeaders, options); if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } return this.read(request, retryPolicyInstance).map(response -> toResourceResponse(response, Permission.class)); } catch (Exception e) { logger.debug("Failure in reading a Permission due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Flux<FeedResponse<Permission>> readPermissions(String userLink, CosmosQueryRequestOptions options) { if (StringUtils.isEmpty(userLink)) { throw new IllegalArgumentException("userLink"); } return readFeed(options, ResourceType.Permission, Permission.class, Utils.joinPath(userLink, Paths.PERMISSIONS_PATH_SEGMENT)); } @Override public Flux<FeedResponse<Permission>> queryPermissions(String userLink, String query, CosmosQueryRequestOptions options) { return queryPermissions(userLink, new SqlQuerySpec(query), options); } @Override public Flux<FeedResponse<Permission>> queryPermissions(String userLink, SqlQuerySpec querySpec, CosmosQueryRequestOptions options) { return createQuery(userLink, querySpec, options, Permission.class, ResourceType.Permission); } @Override public Mono<ResourceResponse<Offer>> replaceOffer(Offer offer) { DocumentClientRetryPolicy documentClientRetryPolicy = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> replaceOfferInternal(offer, documentClientRetryPolicy), documentClientRetryPolicy); } private Mono<ResourceResponse<Offer>> replaceOfferInternal(Offer offer, DocumentClientRetryPolicy documentClientRetryPolicy) { try { if (offer == null) { throw new IllegalArgumentException("offer"); } logger.debug("Replacing an Offer. offer id [{}]", offer.getId()); RxDocumentClientImpl.validateResource(offer); String path = Utils.joinPath(offer.getSelfLink(), null); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(this, OperationType.Replace, ResourceType.Offer, path, offer, null, null); return this.replace(request, documentClientRetryPolicy).map(response -> toResourceResponse(response, Offer.class)); } catch (Exception e) { logger.debug("Failure in replacing an Offer due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<Offer>> readOffer(String offerLink) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> readOfferInternal(offerLink, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<Offer>> readOfferInternal(String offerLink, DocumentClientRetryPolicy retryPolicyInstance) { try { if (StringUtils.isEmpty(offerLink)) { throw new IllegalArgumentException("offerLink"); } logger.debug("Reading an Offer. offerLink [{}]", offerLink); String path = Utils.joinPath(offerLink, null); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(this, OperationType.Read, ResourceType.Offer, path, (HashMap<String, String>)null, null); if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } return this.read(request, retryPolicyInstance).map(response -> toResourceResponse(response, Offer.class)); } catch (Exception e) { logger.debug("Failure in reading an Offer due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Flux<FeedResponse<Offer>> readOffers(CosmosQueryRequestOptions options) { return readFeed(options, ResourceType.Offer, Offer.class, Utils.joinPath(Paths.OFFERS_PATH_SEGMENT, null)); } private <T extends Resource> Flux<FeedResponse<T>> readFeed(CosmosQueryRequestOptions options, ResourceType resourceType, Class<T> klass, String resourceLink) { if (options == null) { options = new CosmosQueryRequestOptions(); } Integer maxItemCount = ModelBridgeInternal.getMaxItemCountFromQueryRequestOptions(options); int maxPageSize = maxItemCount != null ? maxItemCount : -1; final CosmosQueryRequestOptions finalCosmosQueryRequestOptions = options; DocumentClientRetryPolicy retryPolicy = this.resetSessionTokenRetryPolicy.getRequestPolicy(); BiFunction<String, Integer, RxDocumentServiceRequest> createRequestFunc = (continuationToken, pageSize) -> { Map<String, String> requestHeaders = new HashMap<>(); if (continuationToken != null) { requestHeaders.put(HttpConstants.HttpHeaders.CONTINUATION, continuationToken); } requestHeaders.put(HttpConstants.HttpHeaders.PAGE_SIZE, Integer.toString(pageSize)); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(this, OperationType.ReadFeed, resourceType, resourceLink, requestHeaders, finalCosmosQueryRequestOptions); retryPolicy.onBeforeSendRequest(request); return request; }; Function<RxDocumentServiceRequest, Mono<FeedResponse<T>>> executeFunc = request -> ObservableHelper .inlineIfPossibleAsObs(() -> readFeed(request).map(response -> toFeedResponsePage(response, klass)), retryPolicy); return Paginator.getPaginatedQueryResultAsObservable(options, createRequestFunc, executeFunc, klass, maxPageSize); } @Override public Flux<FeedResponse<Offer>> queryOffers(String query, CosmosQueryRequestOptions options) { return queryOffers(new SqlQuerySpec(query), options); } @Override public Flux<FeedResponse<Offer>> queryOffers(SqlQuerySpec querySpec, CosmosQueryRequestOptions options) { return createQuery(null, querySpec, options, Offer.class, ResourceType.Offer); } @Override public Mono<DatabaseAccount> getDatabaseAccount() { DocumentClientRetryPolicy documentClientRetryPolicy = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> getDatabaseAccountInternal(documentClientRetryPolicy), documentClientRetryPolicy); } @Override public DatabaseAccount getLatestDatabaseAccount() { return this.globalEndpointManager.getLatestDatabaseAccount(); } private Mono<DatabaseAccount> getDatabaseAccountInternal(DocumentClientRetryPolicy documentClientRetryPolicy) { try { logger.debug("Getting Database Account"); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(this, OperationType.Read, ResourceType.DatabaseAccount, "", (HashMap<String, String>) null, null); return this.read(request, documentClientRetryPolicy).map(ModelBridgeInternal::toDatabaseAccount); } catch (Exception e) { logger.debug("Failure in getting Database Account due to [{}]", e.getMessage(), e); return Mono.error(e); } } public Object getSession() { return this.sessionContainer; } public void setSession(Object sessionContainer) { this.sessionContainer = (SessionContainer) sessionContainer; } @Override public RxClientCollectionCache getCollectionCache() { return this.collectionCache; } @Override public RxPartitionKeyRangeCache getPartitionKeyRangeCache() { return partitionKeyRangeCache; } public Flux<DatabaseAccount> getDatabaseAccountFromEndpoint(URI endpoint) { return Flux.defer(() -> { RxDocumentServiceRequest request = RxDocumentServiceRequest.create(this, OperationType.Read, ResourceType.DatabaseAccount, "", null, (Object) null); return this.populateHeaders(request, RequestVerb.GET) .flatMap(requestPopulated -> { requestPopulated.setEndpointOverride(endpoint); return this.gatewayProxy.processMessage(requestPopulated).doOnError(e -> { String message = String.format("Failed to retrieve database account information. %s", e.getCause() != null ? e.getCause().toString() : e.toString()); logger.warn(message); }).map(rsp -> rsp.getResource(DatabaseAccount.class)) .doOnNext(databaseAccount -> this.useMultipleWriteLocations = this.connectionPolicy.isMultipleWriteRegionsEnabled() && BridgeInternal.isEnableMultipleWriteLocations(databaseAccount)); }); }); } /** * Certain requests must be routed through gateway even when the client connectivity mode is direct. * * @param request * @return RxStoreModel */ private RxStoreModel getStoreProxy(RxDocumentServiceRequest request) { if (request.UseGatewayMode) { return this.gatewayProxy; } ResourceType resourceType = request.getResourceType(); OperationType operationType = request.getOperationType(); if (resourceType == ResourceType.Offer || resourceType == ResourceType.ClientEncryptionKey || resourceType.isScript() && operationType != OperationType.ExecuteJavaScript || resourceType == ResourceType.PartitionKeyRange || resourceType == ResourceType.PartitionKey && operationType == OperationType.Delete) { return this.gatewayProxy; } if (operationType == OperationType.Create || operationType == OperationType.Upsert) { if (resourceType == ResourceType.Database || resourceType == ResourceType.User || resourceType == ResourceType.DocumentCollection || resourceType == ResourceType.Permission) { return this.gatewayProxy; } else { return this.storeModel; } } else if (operationType == OperationType.Delete) { if (resourceType == ResourceType.Database || resourceType == ResourceType.User || resourceType == ResourceType.DocumentCollection) { return this.gatewayProxy; } else { return this.storeModel; } } else if (operationType == OperationType.Replace) { if (resourceType == ResourceType.DocumentCollection) { return this.gatewayProxy; } else { return this.storeModel; } } else if (operationType == OperationType.Read) { if (resourceType == ResourceType.DocumentCollection) { return this.gatewayProxy; } else { return this.storeModel; } } else { if ((operationType == OperationType.Query || operationType == OperationType.SqlQuery || operationType == OperationType.ReadFeed) && Utils.isCollectionChild(request.getResourceType())) { if (request.getPartitionKeyRangeIdentity() == null && request.getHeaders().get(HttpConstants.HttpHeaders.PARTITION_KEY) == null) { return this.gatewayProxy; } } return this.storeModel; } } @Override public void close() { logger.info("Attempting to close client {}", this.clientId); if (!closed.getAndSet(true)) { this.activeClientsCnt.decrementAndGet(); logger.info("Shutting down ..."); logger.info("Closing Global Endpoint Manager ..."); LifeCycleUtils.closeQuietly(this.globalEndpointManager); logger.info("Closing StoreClientFactory ..."); LifeCycleUtils.closeQuietly(this.storeClientFactory); logger.info("Shutting down reactorHttpClient ..."); LifeCycleUtils.closeQuietly(this.reactorHttpClient); logger.info("Shutting down CpuMonitor ..."); CpuMemoryMonitor.unregister(this); if (this.throughputControlEnabled.get()) { logger.info("Closing ThroughputControlStore ..."); this.throughputControlStore.close(); } logger.info("Shutting down completed."); } else { logger.warn("Already shutdown!"); } } @Override public ItemDeserializer getItemDeserializer() { return this.itemDeserializer; } @Override public synchronized void enableThroughputControlGroup(ThroughputControlGroupInternal group) { checkNotNull(group, "Throughput control group can not be null"); if (this.throughputControlEnabled.compareAndSet(false, true)) { this.throughputControlStore = new ThroughputControlStore( this.collectionCache, this.connectionPolicy.getConnectionMode(), this.partitionKeyRangeCache); this.storeModel.enableThroughputControl(throughputControlStore); } this.throughputControlStore.enableThroughputControlGroup(group); } private static SqlQuerySpec createLogicalPartitionScanQuerySpec( PartitionKey partitionKey, String partitionKeySelector) { StringBuilder queryStringBuilder = new StringBuilder(); List<SqlParameter> parameters = new ArrayList<>(); queryStringBuilder.append("SELECT * FROM c WHERE"); Object pkValue = ModelBridgeInternal.getPartitionKeyObject(partitionKey); String pkParamName = "@pkValue"; parameters.add(new SqlParameter(pkParamName, pkValue)); queryStringBuilder.append(" c"); queryStringBuilder.append(partitionKeySelector); queryStringBuilder.append((" = ")); queryStringBuilder.append(pkParamName); return new SqlQuerySpec(queryStringBuilder.toString(), parameters); } @Override public Mono<List<FeedRange>> getFeedRanges(String collectionLink) { InvalidPartitionExceptionRetryPolicy invalidPartitionExceptionRetryPolicy = new InvalidPartitionExceptionRetryPolicy( this.collectionCache, null, collectionLink, new HashMap<>()); RxDocumentServiceRequest request = RxDocumentServiceRequest.create( this, OperationType.Query, ResourceType.Document, collectionLink, null); invalidPartitionExceptionRetryPolicy.onBeforeSendRequest(request); return ObservableHelper.inlineIfPossibleAsObs( () -> getFeedRangesInternal(request, collectionLink), invalidPartitionExceptionRetryPolicy); } private Mono<List<FeedRange>> getFeedRangesInternal(RxDocumentServiceRequest request, String collectionLink) { logger.debug("getFeedRange collectionLink=[{}]", collectionLink); if (StringUtils.isEmpty(collectionLink)) { throw new IllegalArgumentException("collectionLink"); } Mono<Utils.ValueHolder<DocumentCollection>> collectionObs = collectionCache.resolveCollectionAsync(null, request); return collectionObs.flatMap(documentCollectionResourceResponse -> { final DocumentCollection collection = documentCollectionResourceResponse.v; if (collection == null) { throw new IllegalStateException("Collection cannot be null"); } Mono<Utils.ValueHolder<List<PartitionKeyRange>>> valueHolderMono = partitionKeyRangeCache .tryGetOverlappingRangesAsync( BridgeInternal.getMetaDataDiagnosticContext(request.requestContext.cosmosDiagnostics), collection.getResourceId(), RANGE_INCLUDING_ALL_PARTITION_KEY_RANGES, true, null); return valueHolderMono.map(partitionKeyRangeList -> toFeedRanges(partitionKeyRangeList, request)); }); } private static List<FeedRange> toFeedRanges( Utils.ValueHolder<List<PartitionKeyRange>> partitionKeyRangeListValueHolder, RxDocumentServiceRequest request) { final List<PartitionKeyRange> partitionKeyRangeList = partitionKeyRangeListValueHolder.v; if (partitionKeyRangeList == null) { request.forceNameCacheRefresh = true; throw new InvalidPartitionException(); } List<FeedRange> feedRanges = new ArrayList<>(); partitionKeyRangeList.forEach(pkRange -> feedRanges.add(toFeedRange(pkRange))); return feedRanges; } private static FeedRange toFeedRange(PartitionKeyRange pkRange) { return new FeedRangeEpkImpl(pkRange.toRange()); } }
class RxDocumentClientImpl implements AsyncDocumentClient, IAuthorizationTokenProvider, CpuMemoryListener, DiagnosticsClientContext { private static final String tempMachineId = "uuid:" + UUID.randomUUID(); private static final AtomicInteger activeClientsCnt = new AtomicInteger(0); private static final AtomicInteger clientIdGenerator = new AtomicInteger(0); private static final Range<String> RANGE_INCLUDING_ALL_PARTITION_KEY_RANGES = new Range<>( PartitionKeyInternalHelper.MinimumInclusiveEffectivePartitionKey, PartitionKeyInternalHelper.MaximumExclusiveEffectivePartitionKey, true, false); private static final String DUMMY_SQL_QUERY = "this is dummy and only used in creating " + "ParallelDocumentQueryExecutioncontext, but not used"; private final static ObjectMapper mapper = Utils.getSimpleObjectMapper(); private final ItemDeserializer itemDeserializer = new ItemDeserializer.JsonDeserializer(); private final Logger logger = LoggerFactory.getLogger(RxDocumentClientImpl.class); private final String masterKeyOrResourceToken; private final URI serviceEndpoint; private final ConnectionPolicy connectionPolicy; private final ConsistencyLevel consistencyLevel; private final BaseAuthorizationTokenProvider authorizationTokenProvider; private final UserAgentContainer userAgentContainer; private final boolean hasAuthKeyResourceToken; private final Configs configs; private final boolean connectionSharingAcrossClientsEnabled; private AzureKeyCredential credential; private final TokenCredential tokenCredential; private String[] tokenCredentialScopes; private SimpleTokenCache tokenCredentialCache; private CosmosAuthorizationTokenResolver cosmosAuthorizationTokenResolver; AuthorizationTokenType authorizationTokenType; private SessionContainer sessionContainer; private String firstResourceTokenFromPermissionFeed = StringUtils.EMPTY; private RxClientCollectionCache collectionCache; private RxStoreModel gatewayProxy; private RxStoreModel storeModel; private GlobalAddressResolver addressResolver; private RxPartitionKeyRangeCache partitionKeyRangeCache; private Map<String, List<PartitionKeyAndResourceTokenPair>> resourceTokensMap; private final boolean contentResponseOnWriteEnabled; private Map<String, PartitionedQueryExecutionInfo> queryPlanCache; private final AtomicBoolean closed = new AtomicBoolean(false); private final int clientId; private ClientTelemetry clientTelemetry; private ApiType apiType; private IRetryPolicyFactory resetSessionTokenRetryPolicy; /** * Compatibility mode: Allows to specify compatibility mode used by client when * making query requests. Should be removed when application/sql is no longer * supported. */ private final QueryCompatibilityMode queryCompatibilityMode = QueryCompatibilityMode.Default; private final GlobalEndpointManager globalEndpointManager; private final RetryPolicy retryPolicy; private HttpClient reactorHttpClient; private Function<HttpClient, HttpClient> httpClientInterceptor; private volatile boolean useMultipleWriteLocations; private StoreClientFactory storeClientFactory; private GatewayServiceConfigurationReader gatewayConfigurationReader; private final DiagnosticsClientConfig diagnosticsClientConfig; private final AtomicBoolean throughputControlEnabled; private ThroughputControlStore throughputControlStore; public RxDocumentClientImpl(URI serviceEndpoint, String masterKeyOrResourceToken, List<Permission> permissionFeed, ConnectionPolicy connectionPolicy, ConsistencyLevel consistencyLevel, Configs configs, CosmosAuthorizationTokenResolver cosmosAuthorizationTokenResolver, AzureKeyCredential credential, boolean sessionCapturingOverride, boolean connectionSharingAcrossClientsEnabled, boolean contentResponseOnWriteEnabled, CosmosClientMetadataCachesSnapshot metadataCachesSnapshot, ApiType apiType) { this(serviceEndpoint, masterKeyOrResourceToken, permissionFeed, connectionPolicy, consistencyLevel, configs, credential, null, sessionCapturingOverride, connectionSharingAcrossClientsEnabled, contentResponseOnWriteEnabled, metadataCachesSnapshot, apiType); this.cosmosAuthorizationTokenResolver = cosmosAuthorizationTokenResolver; } public RxDocumentClientImpl(URI serviceEndpoint, String masterKeyOrResourceToken, List<Permission> permissionFeed, ConnectionPolicy connectionPolicy, ConsistencyLevel consistencyLevel, Configs configs, CosmosAuthorizationTokenResolver cosmosAuthorizationTokenResolver, AzureKeyCredential credential, TokenCredential tokenCredential, boolean sessionCapturingOverride, boolean connectionSharingAcrossClientsEnabled, boolean contentResponseOnWriteEnabled, CosmosClientMetadataCachesSnapshot metadataCachesSnapshot, ApiType apiType) { this(serviceEndpoint, masterKeyOrResourceToken, permissionFeed, connectionPolicy, consistencyLevel, configs, credential, tokenCredential, sessionCapturingOverride, connectionSharingAcrossClientsEnabled, contentResponseOnWriteEnabled, metadataCachesSnapshot, apiType); this.cosmosAuthorizationTokenResolver = cosmosAuthorizationTokenResolver; } private RxDocumentClientImpl(URI serviceEndpoint, String masterKeyOrResourceToken, List<Permission> permissionFeed, ConnectionPolicy connectionPolicy, ConsistencyLevel consistencyLevel, Configs configs, AzureKeyCredential credential, TokenCredential tokenCredential, boolean sessionCapturingOverrideEnabled, boolean connectionSharingAcrossClientsEnabled, boolean contentResponseOnWriteEnabled, CosmosClientMetadataCachesSnapshot metadataCachesSnapshot, ApiType apiType) { this(serviceEndpoint, masterKeyOrResourceToken, connectionPolicy, consistencyLevel, configs, credential, tokenCredential, sessionCapturingOverrideEnabled, connectionSharingAcrossClientsEnabled, contentResponseOnWriteEnabled, metadataCachesSnapshot, apiType); if (permissionFeed != null && permissionFeed.size() > 0) { this.resourceTokensMap = new HashMap<>(); for (Permission permission : permissionFeed) { String[] segments = StringUtils.split(permission.getResourceLink(), Constants.Properties.PATH_SEPARATOR.charAt(0)); if (segments.length <= 0) { throw new IllegalArgumentException("resourceLink"); } List<PartitionKeyAndResourceTokenPair> partitionKeyAndResourceTokenPairs = null; PathInfo pathInfo = new PathInfo(false, StringUtils.EMPTY, StringUtils.EMPTY, false); if (!PathsHelper.tryParsePathSegments(permission.getResourceLink(), pathInfo, null)) { throw new IllegalArgumentException(permission.getResourceLink()); } partitionKeyAndResourceTokenPairs = resourceTokensMap.get(pathInfo.resourceIdOrFullName); if (partitionKeyAndResourceTokenPairs == null) { partitionKeyAndResourceTokenPairs = new ArrayList<>(); this.resourceTokensMap.put(pathInfo.resourceIdOrFullName, partitionKeyAndResourceTokenPairs); } PartitionKey partitionKey = permission.getResourcePartitionKey(); partitionKeyAndResourceTokenPairs.add(new PartitionKeyAndResourceTokenPair( partitionKey != null ? BridgeInternal.getPartitionKeyInternal(partitionKey) : PartitionKeyInternal.Empty, permission.getToken())); logger.debug("Initializing resource token map , with map key [{}] , partition key [{}] and resource token [{}]", pathInfo.resourceIdOrFullName, partitionKey != null ? partitionKey.toString() : null, permission.getToken()); } if(this.resourceTokensMap.isEmpty()) { throw new IllegalArgumentException("permissionFeed"); } String firstToken = permissionFeed.get(0).getToken(); if(ResourceTokenAuthorizationHelper.isResourceToken(firstToken)) { this.firstResourceTokenFromPermissionFeed = firstToken; } } } RxDocumentClientImpl(URI serviceEndpoint, String masterKeyOrResourceToken, ConnectionPolicy connectionPolicy, ConsistencyLevel consistencyLevel, Configs configs, AzureKeyCredential credential, TokenCredential tokenCredential, boolean sessionCapturingOverrideEnabled, boolean connectionSharingAcrossClientsEnabled, boolean contentResponseOnWriteEnabled, CosmosClientMetadataCachesSnapshot metadataCachesSnapshot, ApiType apiType) { activeClientsCnt.incrementAndGet(); this.clientId = clientIdGenerator.incrementAndGet(); this.diagnosticsClientConfig = new DiagnosticsClientConfig(); this.diagnosticsClientConfig.withClientId(this.clientId); this.diagnosticsClientConfig.withActiveClientCounter(activeClientsCnt); this.diagnosticsClientConfig.withConnectionSharingAcrossClientsEnabled(connectionSharingAcrossClientsEnabled); this.diagnosticsClientConfig.withConsistency(consistencyLevel); this.throughputControlEnabled = new AtomicBoolean(false); logger.info( "Initializing DocumentClient [{}] with" + " serviceEndpoint [{}], connectionPolicy [{}], consistencyLevel [{}], directModeProtocol [{}]", this.clientId, serviceEndpoint, connectionPolicy, consistencyLevel, configs.getProtocol()); try { this.connectionSharingAcrossClientsEnabled = connectionSharingAcrossClientsEnabled; this.configs = configs; this.masterKeyOrResourceToken = masterKeyOrResourceToken; this.serviceEndpoint = serviceEndpoint; this.credential = credential; this.tokenCredential = tokenCredential; this.contentResponseOnWriteEnabled = contentResponseOnWriteEnabled; this.authorizationTokenType = AuthorizationTokenType.Invalid; if (this.credential != null) { hasAuthKeyResourceToken = false; this.authorizationTokenType = AuthorizationTokenType.PrimaryMasterKey; this.authorizationTokenProvider = new BaseAuthorizationTokenProvider(this.credential); } else if (masterKeyOrResourceToken != null && ResourceTokenAuthorizationHelper.isResourceToken(masterKeyOrResourceToken)) { this.authorizationTokenProvider = null; hasAuthKeyResourceToken = true; this.authorizationTokenType = AuthorizationTokenType.ResourceToken; } else if(masterKeyOrResourceToken != null && !ResourceTokenAuthorizationHelper.isResourceToken(masterKeyOrResourceToken)) { this.credential = new AzureKeyCredential(this.masterKeyOrResourceToken); hasAuthKeyResourceToken = false; this.authorizationTokenType = AuthorizationTokenType.PrimaryMasterKey; this.authorizationTokenProvider = new BaseAuthorizationTokenProvider(this.credential); } else { hasAuthKeyResourceToken = false; this.authorizationTokenProvider = null; if (tokenCredential != null) { this.tokenCredentialScopes = new String[] { serviceEndpoint.getScheme() + ": }; this.tokenCredentialCache = new SimpleTokenCache(() -> this.tokenCredential .getToken(new TokenRequestContext().addScopes(this.tokenCredentialScopes))); this.authorizationTokenType = AuthorizationTokenType.AadToken; } } if (connectionPolicy != null) { this.connectionPolicy = connectionPolicy; } else { this.connectionPolicy = new ConnectionPolicy(DirectConnectionConfig.getDefaultConfig()); } this.diagnosticsClientConfig.withConnectionMode(this.getConnectionPolicy().getConnectionMode()); this.diagnosticsClientConfig.withMultipleWriteRegionsEnabled(this.connectionPolicy.isMultipleWriteRegionsEnabled()); this.diagnosticsClientConfig.withEndpointDiscoveryEnabled(this.connectionPolicy.isEndpointDiscoveryEnabled()); this.diagnosticsClientConfig.withPreferredRegions(this.connectionPolicy.getPreferredRegions()); this.diagnosticsClientConfig.withMachineId(tempMachineId); boolean disableSessionCapturing = (ConsistencyLevel.SESSION != consistencyLevel && !sessionCapturingOverrideEnabled); this.sessionContainer = new SessionContainer(this.serviceEndpoint.getHost(), disableSessionCapturing); this.consistencyLevel = consistencyLevel; this.userAgentContainer = new UserAgentContainer(); String userAgentSuffix = this.connectionPolicy.getUserAgentSuffix(); if (userAgentSuffix != null && userAgentSuffix.length() > 0) { userAgentContainer.setSuffix(userAgentSuffix); } this.httpClientInterceptor = null; this.reactorHttpClient = httpClient(); this.globalEndpointManager = new GlobalEndpointManager(asDatabaseAccountManagerInternal(), this.connectionPolicy, /**/configs); this.retryPolicy = new RetryPolicy(this, this.globalEndpointManager, this.connectionPolicy); this.resetSessionTokenRetryPolicy = retryPolicy; CpuMemoryMonitor.register(this); this.queryPlanCache = Collections.synchronizedMap(new SizeLimitingLRUCache(Constants.QUERYPLAN_CACHE_SIZE)); this.apiType = apiType; } catch (RuntimeException e) { logger.error("unexpected failure in initializing client.", e); close(); throw e; } } @Override public DiagnosticsClientConfig getConfig() { return diagnosticsClientConfig; } @Override public CosmosDiagnostics createDiagnostics() { return BridgeInternal.createCosmosDiagnostics(this, this.globalEndpointManager); } private void initializeGatewayConfigurationReader() { this.gatewayConfigurationReader = new GatewayServiceConfigurationReader(this.globalEndpointManager); DatabaseAccount databaseAccount = this.globalEndpointManager.getLatestDatabaseAccount(); if (databaseAccount == null) { logger.error("Client initialization failed." + " Check if the endpoint is reachable and if your auth token is valid. More info: https: throw new RuntimeException("Client initialization failed." + " Check if the endpoint is reachable and if your auth token is valid. More info: https: } this.useMultipleWriteLocations = this.connectionPolicy.isMultipleWriteRegionsEnabled() && BridgeInternal.isEnableMultipleWriteLocations(databaseAccount); } private void updateGatewayProxy() { ((RxGatewayStoreModel)this.gatewayProxy).setGatewayServiceConfigurationReader(this.gatewayConfigurationReader); ((RxGatewayStoreModel)this.gatewayProxy).setCollectionCache(this.collectionCache); ((RxGatewayStoreModel)this.gatewayProxy).setPartitionKeyRangeCache(this.partitionKeyRangeCache); ((RxGatewayStoreModel)this.gatewayProxy).setUseMultipleWriteLocations(this.useMultipleWriteLocations); } public void serialize(CosmosClientMetadataCachesSnapshot state) { RxCollectionCache.serialize(state, this.collectionCache); } private void initializeDirectConnectivity() { this.addressResolver = new GlobalAddressResolver(this, this.reactorHttpClient, this.globalEndpointManager, this.configs.getProtocol(), this, this.collectionCache, this.partitionKeyRangeCache, userAgentContainer, null, this.connectionPolicy, this.apiType); this.storeClientFactory = new StoreClientFactory( this.addressResolver, this.diagnosticsClientConfig, this.configs, this.connectionPolicy, this.userAgentContainer, this.connectionSharingAcrossClientsEnabled, this.clientTelemetry ); this.createStoreModel(true); } DatabaseAccountManagerInternal asDatabaseAccountManagerInternal() { return new DatabaseAccountManagerInternal() { @Override public URI getServiceEndpoint() { return RxDocumentClientImpl.this.getServiceEndpoint(); } @Override public Flux<DatabaseAccount> getDatabaseAccountFromEndpoint(URI endpoint) { logger.info("Getting database account endpoint from {}", endpoint); return RxDocumentClientImpl.this.getDatabaseAccountFromEndpoint(endpoint); } @Override public ConnectionPolicy getConnectionPolicy() { return RxDocumentClientImpl.this.getConnectionPolicy(); } }; } RxGatewayStoreModel createRxGatewayProxy(ISessionContainer sessionContainer, ConsistencyLevel consistencyLevel, QueryCompatibilityMode queryCompatibilityMode, UserAgentContainer userAgentContainer, GlobalEndpointManager globalEndpointManager, HttpClient httpClient, ApiType apiType) { return new RxGatewayStoreModel( this, sessionContainer, consistencyLevel, queryCompatibilityMode, userAgentContainer, globalEndpointManager, httpClient, apiType); } private HttpClient httpClient() { HttpClientConfig httpClientConfig = new HttpClientConfig(this.configs) .withMaxIdleConnectionTimeout(this.connectionPolicy.getIdleHttpConnectionTimeout()) .withPoolSize(this.connectionPolicy.getMaxConnectionPoolSize()) .withProxy(this.connectionPolicy.getProxy()) .withNetworkRequestTimeout(this.connectionPolicy.getHttpNetworkRequestTimeout()); if (connectionSharingAcrossClientsEnabled) { return SharedGatewayHttpClient.getOrCreateInstance(httpClientConfig, diagnosticsClientConfig); } else { diagnosticsClientConfig.withGatewayHttpClientConfig(httpClientConfig); return HttpClient.createFixed(httpClientConfig); } } private void createStoreModel(boolean subscribeRntbdStatus) { StoreClient storeClient = this.storeClientFactory.createStoreClient(this, this.addressResolver, this.sessionContainer, this.gatewayConfigurationReader, this, this.useMultipleWriteLocations ); this.storeModel = new ServerStoreModel(storeClient); } @Override public URI getServiceEndpoint() { return this.serviceEndpoint; } @Override public URI getWriteEndpoint() { return globalEndpointManager.getWriteEndpoints().stream().findFirst().orElse(null); } @Override public URI getReadEndpoint() { return globalEndpointManager.getReadEndpoints().stream().findFirst().orElse(null); } @Override public ConnectionPolicy getConnectionPolicy() { return this.connectionPolicy; } @Override public boolean isContentResponseOnWriteEnabled() { return contentResponseOnWriteEnabled; } @Override public ConsistencyLevel getConsistencyLevel() { return consistencyLevel; } @Override public ClientTelemetry getClientTelemetry() { return this.clientTelemetry; } @Override public Mono<ResourceResponse<Database>> createDatabase(Database database, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> createDatabaseInternal(database, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<Database>> createDatabaseInternal(Database database, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (database == null) { throw new IllegalArgumentException("Database"); } logger.debug("Creating a Database. id: [{}]", database.getId()); validateResource(database); Map<String, String> requestHeaders = this.getRequestHeaders(options, ResourceType.Database, OperationType.Create); Instant serializationStartTimeUTC = Instant.now(); ByteBuffer byteBuffer = ModelBridgeInternal.serializeJsonToByteBuffer(database); Instant serializationEndTimeUTC = Instant.now(); SerializationDiagnosticsContext.SerializationDiagnostics serializationDiagnostics = new SerializationDiagnosticsContext.SerializationDiagnostics( serializationStartTimeUTC, serializationEndTimeUTC, SerializationDiagnosticsContext.SerializationType.DATABASE_SERIALIZATION); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(this, OperationType.Create, ResourceType.Database, Paths.DATABASES_ROOT, byteBuffer, requestHeaders, options); if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } SerializationDiagnosticsContext serializationDiagnosticsContext = BridgeInternal.getSerializationDiagnosticsContext(request.requestContext.cosmosDiagnostics); if (serializationDiagnosticsContext != null) { serializationDiagnosticsContext.addSerializationDiagnostics(serializationDiagnostics); } return this.create(request, retryPolicyInstance, getOperationContextAndListenerTuple(options)) .map(response -> toResourceResponse(response, Database.class)); } catch (Exception e) { logger.debug("Failure in creating a database. due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<Database>> deleteDatabase(String databaseLink, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> deleteDatabaseInternal(databaseLink, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<Database>> deleteDatabaseInternal(String databaseLink, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (StringUtils.isEmpty(databaseLink)) { throw new IllegalArgumentException("databaseLink"); } logger.debug("Deleting a Database. databaseLink: [{}]", databaseLink); String path = Utils.joinPath(databaseLink, null); Map<String, String> requestHeaders = this.getRequestHeaders(options, ResourceType.Database, OperationType.Delete); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(this, OperationType.Delete, ResourceType.Database, path, requestHeaders, options); if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } return this.delete(request, retryPolicyInstance, getOperationContextAndListenerTuple(options)).map(response -> toResourceResponse(response, Database.class)); } catch (Exception e) { logger.debug("Failure in deleting a database. due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<Database>> readDatabase(String databaseLink, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> readDatabaseInternal(databaseLink, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<Database>> readDatabaseInternal(String databaseLink, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (StringUtils.isEmpty(databaseLink)) { throw new IllegalArgumentException("databaseLink"); } logger.debug("Reading a Database. databaseLink: [{}]", databaseLink); String path = Utils.joinPath(databaseLink, null); Map<String, String> requestHeaders = this.getRequestHeaders(options, ResourceType.Database, OperationType.Read); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(this, OperationType.Read, ResourceType.Database, path, requestHeaders, options); if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } return this.read(request, retryPolicyInstance).map(response -> toResourceResponse(response, Database.class)); } catch (Exception e) { logger.debug("Failure in reading a database. due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Flux<FeedResponse<Database>> readDatabases(CosmosQueryRequestOptions options) { return readFeed(options, ResourceType.Database, Database.class, Paths.DATABASES_ROOT); } private String parentResourceLinkToQueryLink(String parentResourceLink, ResourceType resourceTypeEnum) { switch (resourceTypeEnum) { case Database: return Paths.DATABASES_ROOT; case DocumentCollection: return Utils.joinPath(parentResourceLink, Paths.COLLECTIONS_PATH_SEGMENT); case Document: return Utils.joinPath(parentResourceLink, Paths.DOCUMENTS_PATH_SEGMENT); case Offer: return Paths.OFFERS_ROOT; case User: return Utils.joinPath(parentResourceLink, Paths.USERS_PATH_SEGMENT); case ClientEncryptionKey: return Utils.joinPath(parentResourceLink, Paths.CLIENT_ENCRYPTION_KEY_PATH_SEGMENT); case Permission: return Utils.joinPath(parentResourceLink, Paths.PERMISSIONS_PATH_SEGMENT); case Attachment: return Utils.joinPath(parentResourceLink, Paths.ATTACHMENTS_PATH_SEGMENT); case StoredProcedure: return Utils.joinPath(parentResourceLink, Paths.STORED_PROCEDURES_PATH_SEGMENT); case Trigger: return Utils.joinPath(parentResourceLink, Paths.TRIGGERS_PATH_SEGMENT); case UserDefinedFunction: return Utils.joinPath(parentResourceLink, Paths.USER_DEFINED_FUNCTIONS_PATH_SEGMENT); case Conflict: return Utils.joinPath(parentResourceLink, Paths.CONFLICTS_PATH_SEGMENT); default: throw new IllegalArgumentException("resource type not supported"); } } private OperationContextAndListenerTuple getOperationContextAndListenerTuple(CosmosQueryRequestOptions options) { if (options == null) { return null; } return ImplementationBridgeHelpers.CosmosQueryRequestOptionsHelper.getCosmosQueryRequestOptionsAccessor().getOperationContext(options); } private OperationContextAndListenerTuple getOperationContextAndListenerTuple(RequestOptions options) { if (options == null) { return null; } return options.getOperationContextAndListenerTuple(); } private <T extends Resource> Flux<FeedResponse<T>> createQuery( String parentResourceLink, SqlQuerySpec sqlQuery, CosmosQueryRequestOptions options, Class<T> klass, ResourceType resourceTypeEnum) { String resourceLink = parentResourceLinkToQueryLink(parentResourceLink, resourceTypeEnum); UUID correlationActivityIdOfRequestOptions = ImplementationBridgeHelpers .CosmosQueryRequestOptionsHelper .getCosmosQueryRequestOptionsAccessor() .getCorrelationActivityId(options); UUID correlationActivityId = correlationActivityIdOfRequestOptions != null ? correlationActivityIdOfRequestOptions : Utils.randomUUID(); IDocumentQueryClient queryClient = documentQueryClientImpl(RxDocumentClientImpl.this, getOperationContextAndListenerTuple(options)); InvalidPartitionExceptionRetryPolicy invalidPartitionExceptionRetryPolicy = new InvalidPartitionExceptionRetryPolicy( this.collectionCache, null, resourceLink, ModelBridgeInternal.getPropertiesFromQueryRequestOptions(options)); return ObservableHelper.fluxInlineIfPossibleAsObs( () -> createQueryInternal( resourceLink, sqlQuery, options, klass, resourceTypeEnum, queryClient, correlationActivityId), invalidPartitionExceptionRetryPolicy); } private <T extends Resource> Flux<FeedResponse<T>> createQueryInternal( String resourceLink, SqlQuerySpec sqlQuery, CosmosQueryRequestOptions options, Class<T> klass, ResourceType resourceTypeEnum, IDocumentQueryClient queryClient, UUID activityId) { Flux<? extends IDocumentQueryExecutionContext<T>> executionContext = DocumentQueryExecutionContextFactory .createDocumentQueryExecutionContextAsync(this, queryClient, resourceTypeEnum, klass, sqlQuery, options, resourceLink, false, activityId, Configs.isQueryPlanCachingEnabled(), queryPlanCache); AtomicBoolean isFirstResponse = new AtomicBoolean(true); return executionContext.flatMap(iDocumentQueryExecutionContext -> { QueryInfo queryInfo = null; if (iDocumentQueryExecutionContext instanceof PipelinedDocumentQueryExecutionContext) { queryInfo = ((PipelinedDocumentQueryExecutionContext<T>) iDocumentQueryExecutionContext).getQueryInfo(); } QueryInfo finalQueryInfo = queryInfo; return iDocumentQueryExecutionContext.executeAsync() .map(tFeedResponse -> { if (finalQueryInfo != null) { if (finalQueryInfo.hasSelectValue()) { ModelBridgeInternal .addQueryInfoToFeedResponse(tFeedResponse, finalQueryInfo); } if (isFirstResponse.compareAndSet(true, false)) { ModelBridgeInternal.addQueryPlanDiagnosticsContextToFeedResponse(tFeedResponse, finalQueryInfo.getQueryPlanDiagnosticsContext()); } } return tFeedResponse; }); }); } @Override public Flux<FeedResponse<Database>> queryDatabases(String query, CosmosQueryRequestOptions options) { return queryDatabases(new SqlQuerySpec(query), options); } @Override public Flux<FeedResponse<Database>> queryDatabases(SqlQuerySpec querySpec, CosmosQueryRequestOptions options) { return createQuery(Paths.DATABASES_ROOT, querySpec, options, Database.class, ResourceType.Database); } @Override public Mono<ResourceResponse<DocumentCollection>> createCollection(String databaseLink, DocumentCollection collection, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> this.createCollectionInternal(databaseLink, collection, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<DocumentCollection>> createCollectionInternal(String databaseLink, DocumentCollection collection, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (StringUtils.isEmpty(databaseLink)) { throw new IllegalArgumentException("databaseLink"); } if (collection == null) { throw new IllegalArgumentException("collection"); } logger.debug("Creating a Collection. databaseLink: [{}], Collection id: [{}]", databaseLink, collection.getId()); validateResource(collection); String path = Utils.joinPath(databaseLink, Paths.COLLECTIONS_PATH_SEGMENT); Map<String, String> requestHeaders = this.getRequestHeaders(options, ResourceType.DocumentCollection, OperationType.Create); Instant serializationStartTimeUTC = Instant.now(); ByteBuffer byteBuffer = ModelBridgeInternal.serializeJsonToByteBuffer(collection); Instant serializationEndTimeUTC = Instant.now(); SerializationDiagnosticsContext.SerializationDiagnostics serializationDiagnostics = new SerializationDiagnosticsContext.SerializationDiagnostics( serializationStartTimeUTC, serializationEndTimeUTC, SerializationDiagnosticsContext.SerializationType.CONTAINER_SERIALIZATION); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(this, OperationType.Create, ResourceType.DocumentCollection, path, byteBuffer, requestHeaders, options); if (retryPolicyInstance != null){ retryPolicyInstance.onBeforeSendRequest(request); } SerializationDiagnosticsContext serializationDiagnosticsContext = BridgeInternal.getSerializationDiagnosticsContext(request.requestContext.cosmosDiagnostics); if (serializationDiagnosticsContext != null) { serializationDiagnosticsContext.addSerializationDiagnostics(serializationDiagnostics); } return this.create(request, retryPolicyInstance, getOperationContextAndListenerTuple(options)).map(response -> toResourceResponse(response, DocumentCollection.class)) .doOnNext(resourceResponse -> { this.sessionContainer.setSessionToken(resourceResponse.getResource().getResourceId(), getAltLink(resourceResponse.getResource()), resourceResponse.getResponseHeaders()); }); } catch (Exception e) { logger.debug("Failure in creating a collection. due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<DocumentCollection>> replaceCollection(DocumentCollection collection, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> replaceCollectionInternal(collection, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<DocumentCollection>> replaceCollectionInternal(DocumentCollection collection, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (collection == null) { throw new IllegalArgumentException("collection"); } logger.debug("Replacing a Collection. id: [{}]", collection.getId()); validateResource(collection); String path = Utils.joinPath(collection.getSelfLink(), null); Map<String, String> requestHeaders = this.getRequestHeaders(options, ResourceType.DocumentCollection, OperationType.Replace); Instant serializationStartTimeUTC = Instant.now(); ByteBuffer byteBuffer = ModelBridgeInternal.serializeJsonToByteBuffer(collection); Instant serializationEndTimeUTC = Instant.now(); SerializationDiagnosticsContext.SerializationDiagnostics serializationDiagnostics = new SerializationDiagnosticsContext.SerializationDiagnostics( serializationStartTimeUTC, serializationEndTimeUTC, SerializationDiagnosticsContext.SerializationType.CONTAINER_SERIALIZATION); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(this, OperationType.Replace, ResourceType.DocumentCollection, path, byteBuffer, requestHeaders, options); if (retryPolicyInstance != null){ retryPolicyInstance.onBeforeSendRequest(request); } SerializationDiagnosticsContext serializationDiagnosticsContext = BridgeInternal.getSerializationDiagnosticsContext(request.requestContext.cosmosDiagnostics); if (serializationDiagnosticsContext != null) { serializationDiagnosticsContext.addSerializationDiagnostics(serializationDiagnostics); } return this.replace(request, retryPolicyInstance).map(response -> toResourceResponse(response, DocumentCollection.class)) .doOnNext(resourceResponse -> { if (resourceResponse.getResource() != null) { this.sessionContainer.setSessionToken(resourceResponse.getResource().getResourceId(), getAltLink(resourceResponse.getResource()), resourceResponse.getResponseHeaders()); } }); } catch (Exception e) { logger.debug("Failure in replacing a collection. due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<DocumentCollection>> deleteCollection(String collectionLink, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> deleteCollectionInternal(collectionLink, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<DocumentCollection>> deleteCollectionInternal(String collectionLink, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (StringUtils.isEmpty(collectionLink)) { throw new IllegalArgumentException("collectionLink"); } logger.debug("Deleting a Collection. collectionLink: [{}]", collectionLink); String path = Utils.joinPath(collectionLink, null); Map<String, String> requestHeaders = this.getRequestHeaders(options, ResourceType.DocumentCollection, OperationType.Delete); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(this, OperationType.Delete, ResourceType.DocumentCollection, path, requestHeaders, options); if (retryPolicyInstance != null){ retryPolicyInstance.onBeforeSendRequest(request); } return this.delete(request, retryPolicyInstance, getOperationContextAndListenerTuple(options)).map(response -> toResourceResponse(response, DocumentCollection.class)); } catch (Exception e) { logger.debug("Failure in deleting a collection, due to [{}]", e.getMessage(), e); return Mono.error(e); } } private Mono<RxDocumentServiceResponse> delete(RxDocumentServiceRequest request, DocumentClientRetryPolicy documentClientRetryPolicy, OperationContextAndListenerTuple operationContextAndListenerTuple) { return populateHeaders(request, RequestVerb.DELETE) .flatMap(requestPopulated -> { if (documentClientRetryPolicy.getRetryContext() != null && documentClientRetryPolicy.getRetryContext().getRetryCount() > 0) { documentClientRetryPolicy.getRetryContext().updateEndTime(); } return getStoreProxy(requestPopulated).processMessage(requestPopulated, operationContextAndListenerTuple); }); } private Mono<RxDocumentServiceResponse> deleteAllItemsByPartitionKey(RxDocumentServiceRequest request, DocumentClientRetryPolicy documentClientRetryPolicy, OperationContextAndListenerTuple operationContextAndListenerTuple) { return populateHeaders(request, RequestVerb.POST) .flatMap(requestPopulated -> { RxStoreModel storeProxy = this.getStoreProxy(requestPopulated); if (documentClientRetryPolicy.getRetryContext() != null && documentClientRetryPolicy.getRetryContext().getRetryCount() > 0) { documentClientRetryPolicy.getRetryContext().updateEndTime(); } return storeProxy.processMessage(requestPopulated, operationContextAndListenerTuple); }); } private Mono<RxDocumentServiceResponse> read(RxDocumentServiceRequest request, DocumentClientRetryPolicy documentClientRetryPolicy) { return populateHeaders(request, RequestVerb.GET) .flatMap(requestPopulated -> { if (documentClientRetryPolicy.getRetryContext() != null && documentClientRetryPolicy.getRetryContext().getRetryCount() > 0) { documentClientRetryPolicy.getRetryContext().updateEndTime(); } return getStoreProxy(requestPopulated).processMessage(requestPopulated); }); } Mono<RxDocumentServiceResponse> readFeed(RxDocumentServiceRequest request) { return populateHeaders(request, RequestVerb.GET) .flatMap(requestPopulated -> getStoreProxy(requestPopulated).processMessage(requestPopulated)); } private Mono<RxDocumentServiceResponse> query(RxDocumentServiceRequest request) { return populateHeaders(request, RequestVerb.POST) .flatMap(requestPopulated -> this.getStoreProxy(requestPopulated).processMessage(requestPopulated) .map(response -> { this.captureSessionToken(requestPopulated, response); return response; } )); } @Override public Mono<ResourceResponse<DocumentCollection>> readCollection(String collectionLink, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> readCollectionInternal(collectionLink, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<DocumentCollection>> readCollectionInternal(String collectionLink, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (StringUtils.isEmpty(collectionLink)) { throw new IllegalArgumentException("collectionLink"); } logger.debug("Reading a Collection. collectionLink: [{}]", collectionLink); String path = Utils.joinPath(collectionLink, null); Map<String, String> requestHeaders = this.getRequestHeaders(options, ResourceType.DocumentCollection, OperationType.Read); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(this, OperationType.Read, ResourceType.DocumentCollection, path, requestHeaders, options); if (retryPolicyInstance != null){ retryPolicyInstance.onBeforeSendRequest(request); } return this.read(request, retryPolicyInstance).map(response -> toResourceResponse(response, DocumentCollection.class)); } catch (Exception e) { logger.debug("Failure in reading a collection, due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Flux<FeedResponse<DocumentCollection>> readCollections(String databaseLink, CosmosQueryRequestOptions options) { if (StringUtils.isEmpty(databaseLink)) { throw new IllegalArgumentException("databaseLink"); } return readFeed(options, ResourceType.DocumentCollection, DocumentCollection.class, Utils.joinPath(databaseLink, Paths.COLLECTIONS_PATH_SEGMENT)); } @Override public Flux<FeedResponse<DocumentCollection>> queryCollections(String databaseLink, String query, CosmosQueryRequestOptions options) { return createQuery(databaseLink, new SqlQuerySpec(query), options, DocumentCollection.class, ResourceType.DocumentCollection); } @Override public Flux<FeedResponse<DocumentCollection>> queryCollections(String databaseLink, SqlQuerySpec querySpec, CosmosQueryRequestOptions options) { return createQuery(databaseLink, querySpec, options, DocumentCollection.class, ResourceType.DocumentCollection); } private static String serializeProcedureParams(List<Object> objectArray) { String[] stringArray = new String[objectArray.size()]; for (int i = 0; i < objectArray.size(); ++i) { Object object = objectArray.get(i); if (object instanceof JsonSerializable) { stringArray[i] = ModelBridgeInternal.toJsonFromJsonSerializable((JsonSerializable) object); } else { try { stringArray[i] = mapper.writeValueAsString(object); } catch (IOException e) { throw new IllegalArgumentException("Can't serialize the object into the json string", e); } } } return String.format("[%s]", StringUtils.join(stringArray, ",")); } private static void validateResource(Resource resource) { if (!StringUtils.isEmpty(resource.getId())) { if (resource.getId().indexOf('/') != -1 || resource.getId().indexOf('\\') != -1 || resource.getId().indexOf('?') != -1 || resource.getId().indexOf(' throw new IllegalArgumentException("Id contains illegal chars."); } if (resource.getId().endsWith(" ")) { throw new IllegalArgumentException("Id ends with a space."); } } } private Map<String, String> getRequestHeaders(RequestOptions options, ResourceType resourceType, OperationType operationType) { Map<String, String> headers = new HashMap<>(); if (this.useMultipleWriteLocations) { headers.put(HttpConstants.HttpHeaders.ALLOW_TENTATIVE_WRITES, Boolean.TRUE.toString()); } if (consistencyLevel != null) { headers.put(HttpConstants.HttpHeaders.CONSISTENCY_LEVEL, consistencyLevel.toString()); } if (options == null) { if (!this.contentResponseOnWriteEnabled && resourceType.equals(ResourceType.Document) && operationType.isWriteOperation()) { headers.put(HttpConstants.HttpHeaders.PREFER, HttpConstants.HeaderValues.PREFER_RETURN_MINIMAL); } return headers; } Map<String, String> customOptions = options.getHeaders(); if (customOptions != null) { headers.putAll(customOptions); } boolean contentResponseOnWriteEnabled = this.contentResponseOnWriteEnabled; if (options.isContentResponseOnWriteEnabled() != null) { contentResponseOnWriteEnabled = options.isContentResponseOnWriteEnabled(); } if (!contentResponseOnWriteEnabled && resourceType.equals(ResourceType.Document) && operationType.isWriteOperation()) { headers.put(HttpConstants.HttpHeaders.PREFER, HttpConstants.HeaderValues.PREFER_RETURN_MINIMAL); } if (options.getIfMatchETag() != null) { headers.put(HttpConstants.HttpHeaders.IF_MATCH, options.getIfMatchETag()); } if(options.getIfNoneMatchETag() != null) { headers.put(HttpConstants.HttpHeaders.IF_NONE_MATCH, options.getIfNoneMatchETag()); } if (options.getConsistencyLevel() != null) { headers.put(HttpConstants.HttpHeaders.CONSISTENCY_LEVEL, options.getConsistencyLevel().toString()); } if (options.getIndexingDirective() != null) { headers.put(HttpConstants.HttpHeaders.INDEXING_DIRECTIVE, options.getIndexingDirective().toString()); } if (options.getPostTriggerInclude() != null && options.getPostTriggerInclude().size() > 0) { String postTriggerInclude = StringUtils.join(options.getPostTriggerInclude(), ","); headers.put(HttpConstants.HttpHeaders.POST_TRIGGER_INCLUDE, postTriggerInclude); } if (options.getPreTriggerInclude() != null && options.getPreTriggerInclude().size() > 0) { String preTriggerInclude = StringUtils.join(options.getPreTriggerInclude(), ","); headers.put(HttpConstants.HttpHeaders.PRE_TRIGGER_INCLUDE, preTriggerInclude); } if (!Strings.isNullOrEmpty(options.getSessionToken())) { headers.put(HttpConstants.HttpHeaders.SESSION_TOKEN, options.getSessionToken()); } if (options.getResourceTokenExpirySeconds() != null) { headers.put(HttpConstants.HttpHeaders.RESOURCE_TOKEN_EXPIRY, String.valueOf(options.getResourceTokenExpirySeconds())); } if (options.getOfferThroughput() != null && options.getOfferThroughput() >= 0) { headers.put(HttpConstants.HttpHeaders.OFFER_THROUGHPUT, options.getOfferThroughput().toString()); } else if (options.getOfferType() != null) { headers.put(HttpConstants.HttpHeaders.OFFER_TYPE, options.getOfferType()); } if (options.getOfferThroughput() == null) { if (options.getThroughputProperties() != null) { Offer offer = ModelBridgeInternal.getOfferFromThroughputProperties(options.getThroughputProperties()); final OfferAutoscaleSettings offerAutoscaleSettings = offer.getOfferAutoScaleSettings(); OfferAutoscaleAutoUpgradeProperties autoscaleAutoUpgradeProperties = null; if (offerAutoscaleSettings != null) { autoscaleAutoUpgradeProperties = offer.getOfferAutoScaleSettings().getAutoscaleAutoUpgradeProperties(); } if (offer.hasOfferThroughput() && (offerAutoscaleSettings != null && offerAutoscaleSettings.getMaxThroughput() >= 0 || autoscaleAutoUpgradeProperties != null && autoscaleAutoUpgradeProperties .getAutoscaleThroughputProperties() .getIncrementPercent() >= 0)) { throw new IllegalArgumentException("Autoscale provisioned throughput can not be configured with " + "fixed offer"); } if (offer.hasOfferThroughput()) { headers.put(HttpConstants.HttpHeaders.OFFER_THROUGHPUT, String.valueOf(offer.getThroughput())); } else if (offer.getOfferAutoScaleSettings() != null) { headers.put(HttpConstants.HttpHeaders.OFFER_AUTOPILOT_SETTINGS, ModelBridgeInternal.toJsonFromJsonSerializable(offer.getOfferAutoScaleSettings())); } } } if (options.isQuotaInfoEnabled()) { headers.put(HttpConstants.HttpHeaders.POPULATE_QUOTA_INFO, String.valueOf(true)); } if (options.isScriptLoggingEnabled()) { headers.put(HttpConstants.HttpHeaders.SCRIPT_ENABLE_LOGGING, String.valueOf(true)); } if (options.getDedicatedGatewayRequestOptions() != null && options.getDedicatedGatewayRequestOptions().getMaxIntegratedCacheStaleness() != null) { headers.put(HttpConstants.HttpHeaders.DEDICATED_GATEWAY_PER_REQUEST_CACHE_STALENESS, String.valueOf(Utils.getMaxIntegratedCacheStalenessInMillis(options.getDedicatedGatewayRequestOptions()))); } return headers; } public IRetryPolicyFactory getResetSessionTokenRetryPolicy() { return this.resetSessionTokenRetryPolicy; } private Mono<RxDocumentServiceRequest> addPartitionKeyInformation(RxDocumentServiceRequest request, ByteBuffer contentAsByteBuffer, Document document, RequestOptions options) { Mono<Utils.ValueHolder<DocumentCollection>> collectionObs = this.collectionCache.resolveCollectionAsync(BridgeInternal.getMetaDataDiagnosticContext(request.requestContext.cosmosDiagnostics), request); return collectionObs .map(collectionValueHolder -> { addPartitionKeyInformation(request, contentAsByteBuffer, document, options, collectionValueHolder.v); return request; }); } private Mono<RxDocumentServiceRequest> addPartitionKeyInformation(RxDocumentServiceRequest request, ByteBuffer contentAsByteBuffer, Object document, RequestOptions options, Mono<Utils.ValueHolder<DocumentCollection>> collectionObs) { return collectionObs.map(collectionValueHolder -> { addPartitionKeyInformation(request, contentAsByteBuffer, document, options, collectionValueHolder.v); return request; }); } private void addPartitionKeyInformation(RxDocumentServiceRequest request, ByteBuffer contentAsByteBuffer, Object objectDoc, RequestOptions options, DocumentCollection collection) { PartitionKeyDefinition partitionKeyDefinition = collection.getPartitionKey(); PartitionKeyInternal partitionKeyInternal = null; if (options != null && options.getPartitionKey() != null && options.getPartitionKey().equals(PartitionKey.NONE)){ partitionKeyInternal = ModelBridgeInternal.getNonePartitionKey(partitionKeyDefinition); } else if (options != null && options.getPartitionKey() != null) { partitionKeyInternal = BridgeInternal.getPartitionKeyInternal(options.getPartitionKey()); } else if (partitionKeyDefinition == null || partitionKeyDefinition.getPaths().size() == 0) { partitionKeyInternal = PartitionKeyInternal.getEmpty(); } else if (contentAsByteBuffer != null || objectDoc != null) { InternalObjectNode internalObjectNode; if (objectDoc instanceof InternalObjectNode) { internalObjectNode = (InternalObjectNode) objectDoc; } else if (objectDoc instanceof ObjectNode) { internalObjectNode = new InternalObjectNode((ObjectNode)objectDoc); } else if (contentAsByteBuffer != null) { contentAsByteBuffer.rewind(); internalObjectNode = new InternalObjectNode(contentAsByteBuffer); } else { throw new IllegalStateException("ContentAsByteBuffer and objectDoc are null"); } Instant serializationStartTime = Instant.now(); partitionKeyInternal = extractPartitionKeyValueFromDocument(internalObjectNode, partitionKeyDefinition); Instant serializationEndTime = Instant.now(); SerializationDiagnosticsContext.SerializationDiagnostics serializationDiagnostics = new SerializationDiagnosticsContext.SerializationDiagnostics( serializationStartTime, serializationEndTime, SerializationDiagnosticsContext.SerializationType.PARTITION_KEY_FETCH_SERIALIZATION ); SerializationDiagnosticsContext serializationDiagnosticsContext = BridgeInternal.getSerializationDiagnosticsContext(request.requestContext.cosmosDiagnostics); if (serializationDiagnosticsContext != null) { serializationDiagnosticsContext.addSerializationDiagnostics(serializationDiagnostics); } } else { throw new UnsupportedOperationException("PartitionKey value must be supplied for this operation."); } request.setPartitionKeyInternal(partitionKeyInternal); request.getHeaders().put(HttpConstants.HttpHeaders.PARTITION_KEY, Utils.escapeNonAscii(partitionKeyInternal.toJson())); } public static PartitionKeyInternal extractPartitionKeyValueFromDocument( InternalObjectNode document, PartitionKeyDefinition partitionKeyDefinition) { if (partitionKeyDefinition != null) { switch (partitionKeyDefinition.getKind()) { case HASH: String path = partitionKeyDefinition.getPaths().iterator().next(); List<String> parts = PathParser.getPathParts(path); if (parts.size() >= 1) { Object value = ModelBridgeInternal.getObjectByPathFromJsonSerializable(document, parts); if (value == null || value.getClass() == ObjectNode.class) { value = ModelBridgeInternal.getNonePartitionKey(partitionKeyDefinition); } if (value instanceof PartitionKeyInternal) { return (PartitionKeyInternal) value; } else { return PartitionKeyInternal.fromObjectArray(Collections.singletonList(value), false); } } break; case MULTI_HASH: Object[] partitionKeyValues = new Object[partitionKeyDefinition.getPaths().size()]; for(int pathIter = 0 ; pathIter < partitionKeyDefinition.getPaths().size(); pathIter++){ String partitionPath = partitionKeyDefinition.getPaths().get(pathIter); List<String> partitionPathParts = PathParser.getPathParts(partitionPath); partitionKeyValues[pathIter] = ModelBridgeInternal.getObjectByPathFromJsonSerializable(document, partitionPathParts); } return PartitionKeyInternal.fromObjectArray(partitionKeyValues, false); default: throw new IllegalArgumentException("Unrecognized Partition kind: " + partitionKeyDefinition.getKind()); } } return null; } private Mono<RxDocumentServiceRequest> getCreateDocumentRequest(DocumentClientRetryPolicy requestRetryPolicy, String documentCollectionLink, Object document, RequestOptions options, boolean disableAutomaticIdGeneration, OperationType operationType) { if (StringUtils.isEmpty(documentCollectionLink)) { throw new IllegalArgumentException("documentCollectionLink"); } if (document == null) { throw new IllegalArgumentException("document"); } Instant serializationStartTimeUTC = Instant.now(); ByteBuffer content = BridgeInternal.serializeJsonToByteBuffer(document, mapper); Instant serializationEndTimeUTC = Instant.now(); SerializationDiagnosticsContext.SerializationDiagnostics serializationDiagnostics = new SerializationDiagnosticsContext.SerializationDiagnostics( serializationStartTimeUTC, serializationEndTimeUTC, SerializationDiagnosticsContext.SerializationType.ITEM_SERIALIZATION); String path = Utils.joinPath(documentCollectionLink, Paths.DOCUMENTS_PATH_SEGMENT); Map<String, String> requestHeaders = this.getRequestHeaders(options, ResourceType.Document, operationType); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(this, operationType, ResourceType.Document, path, requestHeaders, options, content); if (requestRetryPolicy != null) { requestRetryPolicy.onBeforeSendRequest(request); } SerializationDiagnosticsContext serializationDiagnosticsContext = BridgeInternal.getSerializationDiagnosticsContext(request.requestContext.cosmosDiagnostics); if (serializationDiagnosticsContext != null) { serializationDiagnosticsContext.addSerializationDiagnostics(serializationDiagnostics); } Mono<Utils.ValueHolder<DocumentCollection>> collectionObs = this.collectionCache.resolveCollectionAsync(BridgeInternal.getMetaDataDiagnosticContext(request.requestContext.cosmosDiagnostics), request); return addPartitionKeyInformation(request, content, document, options, collectionObs); } private Mono<RxDocumentServiceRequest> getBatchDocumentRequest(DocumentClientRetryPolicy requestRetryPolicy, String documentCollectionLink, ServerBatchRequest serverBatchRequest, RequestOptions options, boolean disableAutomaticIdGeneration) { checkArgument(StringUtils.isNotEmpty(documentCollectionLink), "expected non empty documentCollectionLink"); checkNotNull(serverBatchRequest, "expected non null serverBatchRequest"); Instant serializationStartTimeUTC = Instant.now(); ByteBuffer content = ByteBuffer.wrap(Utils.getUTF8Bytes(serverBatchRequest.getRequestBody())); Instant serializationEndTimeUTC = Instant.now(); SerializationDiagnosticsContext.SerializationDiagnostics serializationDiagnostics = new SerializationDiagnosticsContext.SerializationDiagnostics( serializationStartTimeUTC, serializationEndTimeUTC, SerializationDiagnosticsContext.SerializationType.ITEM_SERIALIZATION); String path = Utils.joinPath(documentCollectionLink, Paths.DOCUMENTS_PATH_SEGMENT); Map<String, String> requestHeaders = this.getRequestHeaders(options, ResourceType.Document, OperationType.Batch); RxDocumentServiceRequest request = RxDocumentServiceRequest.create( this, OperationType.Batch, ResourceType.Document, path, requestHeaders, options, content); if (requestRetryPolicy != null) { requestRetryPolicy.onBeforeSendRequest(request); } SerializationDiagnosticsContext serializationDiagnosticsContext = BridgeInternal.getSerializationDiagnosticsContext(request.requestContext.cosmosDiagnostics); if (serializationDiagnosticsContext != null) { serializationDiagnosticsContext.addSerializationDiagnostics(serializationDiagnostics); } Mono<Utils.ValueHolder<DocumentCollection>> collectionObs = this.collectionCache.resolveCollectionAsync(BridgeInternal.getMetaDataDiagnosticContext(request.requestContext.cosmosDiagnostics), request); return collectionObs.map((Utils.ValueHolder<DocumentCollection> collectionValueHolder) -> { addBatchHeaders(request, serverBatchRequest, collectionValueHolder.v); return request; }); } private RxDocumentServiceRequest addBatchHeaders(RxDocumentServiceRequest request, ServerBatchRequest serverBatchRequest, DocumentCollection collection) { if(serverBatchRequest instanceof SinglePartitionKeyServerBatchRequest) { PartitionKey partitionKey = ((SinglePartitionKeyServerBatchRequest) serverBatchRequest).getPartitionKeyValue(); PartitionKeyInternal partitionKeyInternal; if (partitionKey.equals(PartitionKey.NONE)) { PartitionKeyDefinition partitionKeyDefinition = collection.getPartitionKey(); partitionKeyInternal = ModelBridgeInternal.getNonePartitionKey(partitionKeyDefinition); } else { partitionKeyInternal = BridgeInternal.getPartitionKeyInternal(partitionKey); } request.setPartitionKeyInternal(partitionKeyInternal); request.getHeaders().put(HttpConstants.HttpHeaders.PARTITION_KEY, Utils.escapeNonAscii(partitionKeyInternal.toJson())); } else if(serverBatchRequest instanceof PartitionKeyRangeServerBatchRequest) { request.setPartitionKeyRangeIdentity(new PartitionKeyRangeIdentity(((PartitionKeyRangeServerBatchRequest) serverBatchRequest).getPartitionKeyRangeId())); } else { throw new UnsupportedOperationException("Unknown Server request."); } request.getHeaders().put(HttpConstants.HttpHeaders.IS_BATCH_REQUEST, Boolean.TRUE.toString()); request.getHeaders().put(HttpConstants.HttpHeaders.IS_BATCH_ATOMIC, String.valueOf(serverBatchRequest.isAtomicBatch())); request.getHeaders().put(HttpConstants.HttpHeaders.SHOULD_BATCH_CONTINUE_ON_ERROR, String.valueOf(serverBatchRequest.isShouldContinueOnError())); request.setNumberOfItemsInBatchRequest(serverBatchRequest.getOperations().size()); return request; } private Mono<RxDocumentServiceRequest> populateHeaders(RxDocumentServiceRequest request, RequestVerb httpMethod) { request.getHeaders().put(HttpConstants.HttpHeaders.X_DATE, Utils.nowAsRFC1123()); if (this.masterKeyOrResourceToken != null || this.resourceTokensMap != null || this.cosmosAuthorizationTokenResolver != null || this.credential != null) { String resourceName = request.getResourceAddress(); String authorization = this.getUserAuthorizationToken( resourceName, request.getResourceType(), httpMethod, request.getHeaders(), AuthorizationTokenType.PrimaryMasterKey, request.properties); try { authorization = URLEncoder.encode(authorization, "UTF-8"); } catch (UnsupportedEncodingException e) { throw new IllegalStateException("Failed to encode authtoken.", e); } request.getHeaders().put(HttpConstants.HttpHeaders.AUTHORIZATION, authorization); } if (this.apiType != null) { request.getHeaders().put(HttpConstants.HttpHeaders.API_TYPE, this.apiType.toString()); } if ((RequestVerb.POST.equals(httpMethod) || RequestVerb.PUT.equals(httpMethod)) && !request.getHeaders().containsKey(HttpConstants.HttpHeaders.CONTENT_TYPE)) { request.getHeaders().put(HttpConstants.HttpHeaders.CONTENT_TYPE, RuntimeConstants.MediaTypes.JSON); } if (RequestVerb.PATCH.equals(httpMethod) && !request.getHeaders().containsKey(HttpConstants.HttpHeaders.CONTENT_TYPE)) { request.getHeaders().put(HttpConstants.HttpHeaders.CONTENT_TYPE, RuntimeConstants.MediaTypes.JSON_PATCH); } if (!request.getHeaders().containsKey(HttpConstants.HttpHeaders.ACCEPT)) { request.getHeaders().put(HttpConstants.HttpHeaders.ACCEPT, RuntimeConstants.MediaTypes.JSON); } MetadataDiagnosticsContext metadataDiagnosticsCtx = BridgeInternal.getMetaDataDiagnosticContext(request.requestContext.cosmosDiagnostics); if (this.requiresFeedRangeFiltering(request)) { return request.getFeedRange() .populateFeedRangeFilteringHeaders( this.getPartitionKeyRangeCache(), request, this.collectionCache.resolveCollectionAsync(metadataDiagnosticsCtx, request)) .flatMap(this::populateAuthorizationHeader); } return this.populateAuthorizationHeader(request); } private boolean requiresFeedRangeFiltering(RxDocumentServiceRequest request) { if (request.getResourceType() != ResourceType.Document && request.getResourceType() != ResourceType.Conflict) { return false; } switch (request.getOperationType()) { case ReadFeed: case Query: case SqlQuery: return request.getFeedRange() != null; default: return false; } } @Override public Mono<RxDocumentServiceRequest> populateAuthorizationHeader(RxDocumentServiceRequest request) { if (request == null) { throw new IllegalArgumentException("request"); } if (this.authorizationTokenType == AuthorizationTokenType.AadToken) { return AadTokenAuthorizationHelper.getAuthorizationToken(this.tokenCredentialCache) .map(authorization -> { request.getHeaders().put(HttpConstants.HttpHeaders.AUTHORIZATION, authorization); return request; }); } else { return Mono.just(request); } } @Override public Mono<HttpHeaders> populateAuthorizationHeader(HttpHeaders httpHeaders) { if (httpHeaders == null) { throw new IllegalArgumentException("httpHeaders"); } if (this.authorizationTokenType == AuthorizationTokenType.AadToken) { return AadTokenAuthorizationHelper.getAuthorizationToken(this.tokenCredentialCache) .map(authorization -> { httpHeaders.set(HttpConstants.HttpHeaders.AUTHORIZATION, authorization); return httpHeaders; }); } return Mono.just(httpHeaders); } @Override public AuthorizationTokenType getAuthorizationTokenType() { return this.authorizationTokenType; } @Override public String getUserAuthorizationToken(String resourceName, ResourceType resourceType, RequestVerb requestVerb, Map<String, String> headers, AuthorizationTokenType tokenType, Map<String, Object> properties) { if (this.cosmosAuthorizationTokenResolver != null) { return this.cosmosAuthorizationTokenResolver.getAuthorizationToken(requestVerb.toUpperCase(), resourceName, this.resolveCosmosResourceType(resourceType).toString(), properties != null ? Collections.unmodifiableMap(properties) : null); } else if (credential != null) { return this.authorizationTokenProvider.generateKeyAuthorizationSignature(requestVerb, resourceName, resourceType, headers); } else if (masterKeyOrResourceToken != null && hasAuthKeyResourceToken && resourceTokensMap == null) { return masterKeyOrResourceToken; } else { assert resourceTokensMap != null; if(resourceType.equals(ResourceType.DatabaseAccount)) { return this.firstResourceTokenFromPermissionFeed; } return ResourceTokenAuthorizationHelper.getAuthorizationTokenUsingResourceTokens(resourceTokensMap, requestVerb, resourceName, headers); } } private CosmosResourceType resolveCosmosResourceType(ResourceType resourceType) { CosmosResourceType cosmosResourceType = ModelBridgeInternal.fromServiceSerializedFormat(resourceType.toString()); if (cosmosResourceType == null) { return CosmosResourceType.SYSTEM; } return cosmosResourceType; } void captureSessionToken(RxDocumentServiceRequest request, RxDocumentServiceResponse response) { this.sessionContainer.setSessionToken(request, response.getResponseHeaders()); } private Mono<RxDocumentServiceResponse> create(RxDocumentServiceRequest request, DocumentClientRetryPolicy documentClientRetryPolicy, OperationContextAndListenerTuple operationContextAndListenerTuple) { return populateHeaders(request, RequestVerb.POST) .flatMap(requestPopulated -> { RxStoreModel storeProxy = this.getStoreProxy(requestPopulated); if (documentClientRetryPolicy.getRetryContext() != null && documentClientRetryPolicy.getRetryContext().getRetryCount() > 0) { documentClientRetryPolicy.getRetryContext().updateEndTime(); } return storeProxy.processMessage(requestPopulated, operationContextAndListenerTuple); }); } private Mono<RxDocumentServiceResponse> upsert(RxDocumentServiceRequest request, DocumentClientRetryPolicy documentClientRetryPolicy, OperationContextAndListenerTuple operationContextAndListenerTuple) { return populateHeaders(request, RequestVerb.POST) .flatMap(requestPopulated -> { Map<String, String> headers = requestPopulated.getHeaders(); assert (headers != null); headers.put(HttpConstants.HttpHeaders.IS_UPSERT, "true"); if (documentClientRetryPolicy.getRetryContext() != null && documentClientRetryPolicy.getRetryContext().getRetryCount() > 0) { documentClientRetryPolicy.getRetryContext().updateEndTime(); } return getStoreProxy(requestPopulated).processMessage(requestPopulated, operationContextAndListenerTuple) .map(response -> { this.captureSessionToken(requestPopulated, response); return response; } ); }); } private Mono<RxDocumentServiceResponse> replace(RxDocumentServiceRequest request, DocumentClientRetryPolicy documentClientRetryPolicy) { return populateHeaders(request, RequestVerb.PUT) .flatMap(requestPopulated -> { if (documentClientRetryPolicy.getRetryContext() != null && documentClientRetryPolicy.getRetryContext().getRetryCount() > 0) { documentClientRetryPolicy.getRetryContext().updateEndTime(); } return getStoreProxy(requestPopulated).processMessage(requestPopulated); }); } private Mono<RxDocumentServiceResponse> patch(RxDocumentServiceRequest request, DocumentClientRetryPolicy documentClientRetryPolicy) { populateHeaders(request, RequestVerb.PATCH); if (documentClientRetryPolicy.getRetryContext() != null && documentClientRetryPolicy.getRetryContext().getRetryCount() > 0) { documentClientRetryPolicy.getRetryContext().updateEndTime(); } return getStoreProxy(request).processMessage(request); } @Override public Mono<ResourceResponse<Document>> createDocument(String collectionLink, Object document, RequestOptions options, boolean disableAutomaticIdGeneration) { DocumentClientRetryPolicy requestRetryPolicy = this.resetSessionTokenRetryPolicy.getRequestPolicy(); if (options == null || options.getPartitionKey() == null) { requestRetryPolicy = new PartitionKeyMismatchRetryPolicy(collectionCache, requestRetryPolicy, collectionLink, options); } DocumentClientRetryPolicy finalRetryPolicyInstance = requestRetryPolicy; return ObservableHelper.inlineIfPossibleAsObs(() -> createDocumentInternal(collectionLink, document, options, disableAutomaticIdGeneration, finalRetryPolicyInstance), requestRetryPolicy); } private Mono<ResourceResponse<Document>> createDocumentInternal(String collectionLink, Object document, RequestOptions options, boolean disableAutomaticIdGeneration, DocumentClientRetryPolicy requestRetryPolicy) { try { logger.debug("Creating a Document. collectionLink: [{}]", collectionLink); Mono<RxDocumentServiceRequest> requestObs = getCreateDocumentRequest(requestRetryPolicy, collectionLink, document, options, disableAutomaticIdGeneration, OperationType.Create); Mono<RxDocumentServiceResponse> responseObservable = requestObs.flatMap(request -> create(request, requestRetryPolicy, getOperationContextAndListenerTuple(options))); return responseObservable .map(serviceResponse -> toResourceResponse(serviceResponse, Document.class)); } catch (Exception e) { logger.debug("Failure in creating a document due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<Document>> upsertDocument(String collectionLink, Object document, RequestOptions options, boolean disableAutomaticIdGeneration) { DocumentClientRetryPolicy requestRetryPolicy = this.resetSessionTokenRetryPolicy.getRequestPolicy(); if (options == null || options.getPartitionKey() == null) { requestRetryPolicy = new PartitionKeyMismatchRetryPolicy(collectionCache, requestRetryPolicy, collectionLink, options); } DocumentClientRetryPolicy finalRetryPolicyInstance = requestRetryPolicy; return ObservableHelper.inlineIfPossibleAsObs(() -> upsertDocumentInternal(collectionLink, document, options, disableAutomaticIdGeneration, finalRetryPolicyInstance), finalRetryPolicyInstance); } private Mono<ResourceResponse<Document>> upsertDocumentInternal(String collectionLink, Object document, RequestOptions options, boolean disableAutomaticIdGeneration, DocumentClientRetryPolicy retryPolicyInstance) { try { logger.debug("Upserting a Document. collectionLink: [{}]", collectionLink); Mono<RxDocumentServiceRequest> reqObs = getCreateDocumentRequest(retryPolicyInstance, collectionLink, document, options, disableAutomaticIdGeneration, OperationType.Upsert); Mono<RxDocumentServiceResponse> responseObservable = reqObs.flatMap(request -> upsert(request, retryPolicyInstance, getOperationContextAndListenerTuple(options))); return responseObservable .map(serviceResponse -> toResourceResponse(serviceResponse, Document.class)); } catch (Exception e) { logger.debug("Failure in upserting a document due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<Document>> replaceDocument(String documentLink, Object document, RequestOptions options) { DocumentClientRetryPolicy requestRetryPolicy = this.resetSessionTokenRetryPolicy.getRequestPolicy(); if (options == null || options.getPartitionKey() == null) { String collectionLink = Utils.getCollectionName(documentLink); requestRetryPolicy = new PartitionKeyMismatchRetryPolicy(collectionCache, requestRetryPolicy, collectionLink, options); } DocumentClientRetryPolicy finalRequestRetryPolicy = requestRetryPolicy; return ObservableHelper.inlineIfPossibleAsObs(() -> replaceDocumentInternal(documentLink, document, options, finalRequestRetryPolicy), requestRetryPolicy); } private Mono<ResourceResponse<Document>> replaceDocumentInternal(String documentLink, Object document, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (StringUtils.isEmpty(documentLink)) { throw new IllegalArgumentException("documentLink"); } if (document == null) { throw new IllegalArgumentException("document"); } Document typedDocument = documentFromObject(document, mapper); return this.replaceDocumentInternal(documentLink, typedDocument, options, retryPolicyInstance); } catch (Exception e) { logger.debug("Failure in replacing a document due to [{}]", e.getMessage()); return Mono.error(e); } } @Override public Mono<ResourceResponse<Document>> replaceDocument(Document document, RequestOptions options) { DocumentClientRetryPolicy requestRetryPolicy = this.resetSessionTokenRetryPolicy.getRequestPolicy(); if (options == null || options.getPartitionKey() == null) { String collectionLink = document.getSelfLink(); requestRetryPolicy = new PartitionKeyMismatchRetryPolicy(collectionCache, requestRetryPolicy, collectionLink, options); } DocumentClientRetryPolicy finalRequestRetryPolicy = requestRetryPolicy; return ObservableHelper.inlineIfPossibleAsObs(() -> replaceDocumentInternal(document, options, finalRequestRetryPolicy), requestRetryPolicy); } private Mono<ResourceResponse<Document>> replaceDocumentInternal(Document document, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (document == null) { throw new IllegalArgumentException("document"); } return this.replaceDocumentInternal(document.getSelfLink(), document, options, retryPolicyInstance); } catch (Exception e) { logger.debug("Failure in replacing a database due to [{}]", e.getMessage()); return Mono.error(e); } } private Mono<ResourceResponse<Document>> replaceDocumentInternal(String documentLink, Document document, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { if (document == null) { throw new IllegalArgumentException("document"); } logger.debug("Replacing a Document. documentLink: [{}]", documentLink); final String path = Utils.joinPath(documentLink, null); final Map<String, String> requestHeaders = getRequestHeaders(options, ResourceType.Document, OperationType.Replace); Instant serializationStartTimeUTC = Instant.now(); ByteBuffer content = serializeJsonToByteBuffer(document); Instant serializationEndTime = Instant.now(); SerializationDiagnosticsContext.SerializationDiagnostics serializationDiagnostics = new SerializationDiagnosticsContext.SerializationDiagnostics( serializationStartTimeUTC, serializationEndTime, SerializationDiagnosticsContext.SerializationType.ITEM_SERIALIZATION); final RxDocumentServiceRequest request = RxDocumentServiceRequest.create(this, OperationType.Replace, ResourceType.Document, path, requestHeaders, options, content); if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } SerializationDiagnosticsContext serializationDiagnosticsContext = BridgeInternal.getSerializationDiagnosticsContext(request.requestContext.cosmosDiagnostics); if (serializationDiagnosticsContext != null) { serializationDiagnosticsContext.addSerializationDiagnostics(serializationDiagnostics); } Mono<Utils.ValueHolder<DocumentCollection>> collectionObs = collectionCache.resolveCollectionAsync(BridgeInternal.getMetaDataDiagnosticContext(request.requestContext.cosmosDiagnostics), request); Mono<RxDocumentServiceRequest> requestObs = addPartitionKeyInformation(request, content, document, options, collectionObs); return requestObs.flatMap(req -> replace(request, retryPolicyInstance) .map(resp -> toResourceResponse(resp, Document.class))); } @Override public Mono<ResourceResponse<Document>> patchDocument(String documentLink, CosmosPatchOperations cosmosPatchOperations, RequestOptions options) { DocumentClientRetryPolicy documentClientRetryPolicy = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> patchDocumentInternal(documentLink, cosmosPatchOperations, options, documentClientRetryPolicy), documentClientRetryPolicy); } private Mono<ResourceResponse<Document>> patchDocumentInternal(String documentLink, CosmosPatchOperations cosmosPatchOperations, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { checkArgument(StringUtils.isNotEmpty(documentLink), "expected non empty documentLink"); checkNotNull(cosmosPatchOperations, "expected non null cosmosPatchOperations"); logger.debug("Running patch operations on Document. documentLink: [{}]", documentLink); final String path = Utils.joinPath(documentLink, null); final Map<String, String> requestHeaders = getRequestHeaders(options, ResourceType.Document, OperationType.Patch); Instant serializationStartTimeUTC = Instant.now(); ByteBuffer content = ByteBuffer.wrap(PatchUtil.serializeCosmosPatchToByteArray(cosmosPatchOperations, options)); Instant serializationEndTime = Instant.now(); SerializationDiagnosticsContext.SerializationDiagnostics serializationDiagnostics = new SerializationDiagnosticsContext.SerializationDiagnostics( serializationStartTimeUTC, serializationEndTime, SerializationDiagnosticsContext.SerializationType.ITEM_SERIALIZATION); final RxDocumentServiceRequest request = RxDocumentServiceRequest.create( this, OperationType.Patch, ResourceType.Document, path, requestHeaders, options, content); if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } SerializationDiagnosticsContext serializationDiagnosticsContext = BridgeInternal.getSerializationDiagnosticsContext(request.requestContext.cosmosDiagnostics); if (serializationDiagnosticsContext != null) { serializationDiagnosticsContext.addSerializationDiagnostics(serializationDiagnostics); } Mono<Utils.ValueHolder<DocumentCollection>> collectionObs = collectionCache.resolveCollectionAsync( BridgeInternal.getMetaDataDiagnosticContext(request.requestContext.cosmosDiagnostics), request); Mono<RxDocumentServiceRequest> requestObs = addPartitionKeyInformation( request, null, null, options, collectionObs); return requestObs.flatMap(req -> patch(request, retryPolicyInstance) .map(resp -> toResourceResponse(resp, Document.class))); } @Override public Mono<ResourceResponse<Document>> deleteDocument(String documentLink, RequestOptions options) { DocumentClientRetryPolicy requestRetryPolicy = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> deleteDocumentInternal(documentLink, null, options, requestRetryPolicy), requestRetryPolicy); } @Override public Mono<ResourceResponse<Document>> deleteDocument(String documentLink, InternalObjectNode internalObjectNode, RequestOptions options) { DocumentClientRetryPolicy requestRetryPolicy = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> deleteDocumentInternal(documentLink, internalObjectNode, options, requestRetryPolicy), requestRetryPolicy); } private Mono<ResourceResponse<Document>> deleteDocumentInternal(String documentLink, InternalObjectNode internalObjectNode, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (StringUtils.isEmpty(documentLink)) { throw new IllegalArgumentException("documentLink"); } logger.debug("Deleting a Document. documentLink: [{}]", documentLink); String path = Utils.joinPath(documentLink, null); Map<String, String> requestHeaders = this.getRequestHeaders(options, ResourceType.Document, OperationType.Delete); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(this, OperationType.Delete, ResourceType.Document, path, requestHeaders, options); if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } Mono<Utils.ValueHolder<DocumentCollection>> collectionObs = collectionCache.resolveCollectionAsync(BridgeInternal.getMetaDataDiagnosticContext(request.requestContext.cosmosDiagnostics), request); Mono<RxDocumentServiceRequest> requestObs = addPartitionKeyInformation(request, null, internalObjectNode, options, collectionObs); return requestObs.flatMap(req -> this .delete(req, retryPolicyInstance, getOperationContextAndListenerTuple(options)) .map(serviceResponse -> toResourceResponse(serviceResponse, Document.class))); } catch (Exception e) { logger.debug("Failure in deleting a document due to [{}]", e.getMessage()); return Mono.error(e); } } @Override public Mono<ResourceResponse<Document>> deleteAllDocumentsByPartitionKey(String collectionLink, PartitionKey partitionKey, RequestOptions options) { DocumentClientRetryPolicy requestRetryPolicy = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> deleteAllDocumentsByPartitionKeyInternal(collectionLink, options, requestRetryPolicy), requestRetryPolicy); } private Mono<ResourceResponse<Document>> deleteAllDocumentsByPartitionKeyInternal(String collectionLink, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (StringUtils.isEmpty(collectionLink)) { throw new IllegalArgumentException("collectionLink"); } logger.debug("Deleting all items by Partition Key. collectionLink: [{}]", collectionLink); String path = Utils.joinPath(collectionLink, null); Map<String, String> requestHeaders = this.getRequestHeaders(options, ResourceType.PartitionKey, OperationType.Delete); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(this, OperationType.Delete, ResourceType.PartitionKey, path, requestHeaders, options); if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } Mono<Utils.ValueHolder<DocumentCollection>> collectionObs = collectionCache.resolveCollectionAsync(BridgeInternal.getMetaDataDiagnosticContext(request.requestContext.cosmosDiagnostics), request); Mono<RxDocumentServiceRequest> requestObs = addPartitionKeyInformation(request, null, null, options, collectionObs); return requestObs.flatMap(req -> this .deleteAllItemsByPartitionKey(req, retryPolicyInstance, getOperationContextAndListenerTuple(options)) .map(serviceResponse -> toResourceResponse(serviceResponse, Document.class))); } catch (Exception e) { logger.debug("Failure in deleting documents due to [{}]", e.getMessage()); return Mono.error(e); } } @Override public Mono<ResourceResponse<Document>> readDocument(String documentLink, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> readDocumentInternal(documentLink, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<Document>> readDocumentInternal(String documentLink, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (StringUtils.isEmpty(documentLink)) { throw new IllegalArgumentException("documentLink"); } logger.debug("Reading a Document. documentLink: [{}]", documentLink); String path = Utils.joinPath(documentLink, null); Map<String, String> requestHeaders = this.getRequestHeaders(options, ResourceType.Document, OperationType.Read); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(this, OperationType.Read, ResourceType.Document, path, requestHeaders, options); if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } Mono<Utils.ValueHolder<DocumentCollection>> collectionObs = this.collectionCache.resolveCollectionAsync(BridgeInternal.getMetaDataDiagnosticContext(request.requestContext.cosmosDiagnostics), request); Mono<RxDocumentServiceRequest> requestObs = addPartitionKeyInformation(request, null, null, options, collectionObs); return requestObs.flatMap(req -> { return this.read(request, retryPolicyInstance).map(serviceResponse -> toResourceResponse(serviceResponse, Document.class)); }); } catch (Exception e) { logger.debug("Failure in reading a document due to [{}]", e.getMessage()); return Mono.error(e); } } @Override public Flux<FeedResponse<Document>> readDocuments(String collectionLink, CosmosQueryRequestOptions options) { if (StringUtils.isEmpty(collectionLink)) { throw new IllegalArgumentException("collectionLink"); } return queryDocuments(collectionLink, "SELECT * FROM r", options); } @Override public <T> Mono<FeedResponse<T>> readMany( List<CosmosItemIdentity> itemIdentityList, String collectionLink, CosmosQueryRequestOptions options, Class<T> klass) { String resourceLink = parentResourceLinkToQueryLink(collectionLink, ResourceType.Document); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(this, OperationType.Query, ResourceType.Document, collectionLink, null ); Mono<Utils.ValueHolder<DocumentCollection>> collectionObs = collectionCache.resolveCollectionAsync(null, request); return collectionObs .flatMap(documentCollectionResourceResponse -> { final DocumentCollection collection = documentCollectionResourceResponse.v; if (collection == null) { throw new IllegalStateException("Collection cannot be null"); } final PartitionKeyDefinition pkDefinition = collection.getPartitionKey(); Mono<Utils.ValueHolder<CollectionRoutingMap>> valueHolderMono = partitionKeyRangeCache .tryLookupAsync(BridgeInternal.getMetaDataDiagnosticContext(request.requestContext.cosmosDiagnostics), collection.getResourceId(), null, null); return valueHolderMono.flatMap(collectionRoutingMapValueHolder -> { Map<PartitionKeyRange, List<CosmosItemIdentity>> partitionRangeItemKeyMap = new HashMap<>(); CollectionRoutingMap routingMap = collectionRoutingMapValueHolder.v; if (routingMap == null) { throw new IllegalStateException("Failed to get routing map."); } itemIdentityList .forEach(itemIdentity -> { String effectivePartitionKeyString = PartitionKeyInternalHelper .getEffectivePartitionKeyString( BridgeInternal.getPartitionKeyInternal( itemIdentity.getPartitionKey()), pkDefinition); PartitionKeyRange range = routingMap.getRangeByEffectivePartitionKey(effectivePartitionKeyString); if (partitionRangeItemKeyMap.get(range) == null) { List<CosmosItemIdentity> list = new ArrayList<>(); list.add(itemIdentity); partitionRangeItemKeyMap.put(range, list); } else { List<CosmosItemIdentity> pairs = partitionRangeItemKeyMap.get(range); pairs.add(itemIdentity); partitionRangeItemKeyMap.put(range, pairs); } }); Map<PartitionKeyRange, SqlQuerySpec> rangeQueryMap; rangeQueryMap = getRangeQueryMap(partitionRangeItemKeyMap, collection.getPartitionKey()); return createReadManyQuery( resourceLink, new SqlQuerySpec(DUMMY_SQL_QUERY), options, Document.class, ResourceType.Document, collection, Collections.unmodifiableMap(rangeQueryMap)) .collectList() .map(feedList -> { List<T> finalList = new ArrayList<>(); HashMap<String, String> headers = new HashMap<>(); ConcurrentMap<String, QueryMetrics> aggregatedQueryMetrics = new ConcurrentHashMap<>(); double requestCharge = 0; for (FeedResponse<Document> page : feedList) { ConcurrentMap<String, QueryMetrics> pageQueryMetrics = ModelBridgeInternal.queryMetrics(page); if (pageQueryMetrics != null) { pageQueryMetrics.forEach( aggregatedQueryMetrics::putIfAbsent); } requestCharge += page.getRequestCharge(); finalList.addAll(page.getResults().stream().map(document -> ModelBridgeInternal.toObjectFromJsonSerializable(document, klass)).collect(Collectors.toList())); } headers.put(HttpConstants.HttpHeaders.REQUEST_CHARGE, Double .toString(requestCharge)); FeedResponse<T> frp = BridgeInternal .createFeedResponse(finalList, headers); return frp; }); }); } ); } private Map<PartitionKeyRange, SqlQuerySpec> getRangeQueryMap( Map<PartitionKeyRange, List<CosmosItemIdentity>> partitionRangeItemKeyMap, PartitionKeyDefinition partitionKeyDefinition) { Map<PartitionKeyRange, SqlQuerySpec> rangeQueryMap = new HashMap<>(); String partitionKeySelector = createPkSelector(partitionKeyDefinition); for(Map.Entry<PartitionKeyRange, List<CosmosItemIdentity>> entry: partitionRangeItemKeyMap.entrySet()) { SqlQuerySpec sqlQuerySpec; if (partitionKeySelector.equals("[\"id\"]")) { sqlQuerySpec = createReadManyQuerySpecPartitionKeyIdSame(entry.getValue(), partitionKeySelector); } else { sqlQuerySpec = createReadManyQuerySpec(entry.getValue(), partitionKeySelector); } rangeQueryMap.put(entry.getKey(), sqlQuerySpec); } return rangeQueryMap; } private SqlQuerySpec createReadManyQuerySpecPartitionKeyIdSame( List<CosmosItemIdentity> idPartitionKeyPairList, String partitionKeySelector) { StringBuilder queryStringBuilder = new StringBuilder(); List<SqlParameter> parameters = new ArrayList<>(); queryStringBuilder.append("SELECT * FROM c WHERE c.id IN ( "); for (int i = 0; i < idPartitionKeyPairList.size(); i++) { CosmosItemIdentity itemIdentity = idPartitionKeyPairList.get(i); String idValue = itemIdentity.getId(); String idParamName = "@param" + i; PartitionKey pkValueAsPartitionKey = itemIdentity.getPartitionKey(); Object pkValue = ModelBridgeInternal.getPartitionKeyObject(pkValueAsPartitionKey); if (!Objects.equals(idValue, pkValue)) { continue; } parameters.add(new SqlParameter(idParamName, idValue)); queryStringBuilder.append(idParamName); if (i < idPartitionKeyPairList.size() - 1) { queryStringBuilder.append(", "); } } queryStringBuilder.append(" )"); return new SqlQuerySpec(queryStringBuilder.toString(), parameters); } private SqlQuerySpec createReadManyQuerySpec(List<CosmosItemIdentity> itemIdentities, String partitionKeySelector) { StringBuilder queryStringBuilder = new StringBuilder(); List<SqlParameter> parameters = new ArrayList<>(); queryStringBuilder.append("SELECT * FROM c WHERE ( "); for (int i = 0; i < itemIdentities.size(); i++) { CosmosItemIdentity itemIdentity = itemIdentities.get(i); PartitionKey pkValueAsPartitionKey = itemIdentity.getPartitionKey(); Object pkValue = ModelBridgeInternal.getPartitionKeyObject(pkValueAsPartitionKey); String pkParamName = "@param" + (2 * i); parameters.add(new SqlParameter(pkParamName, pkValue)); String idValue = itemIdentity.getId(); String idParamName = "@param" + (2 * i + 1); parameters.add(new SqlParameter(idParamName, idValue)); queryStringBuilder.append("("); queryStringBuilder.append("c.id = "); queryStringBuilder.append(idParamName); queryStringBuilder.append(" AND "); queryStringBuilder.append(" c"); queryStringBuilder.append(partitionKeySelector); queryStringBuilder.append((" = ")); queryStringBuilder.append(pkParamName); queryStringBuilder.append(" )"); if (i < itemIdentities.size() - 1) { queryStringBuilder.append(" OR "); } } queryStringBuilder.append(" )"); return new SqlQuerySpec(queryStringBuilder.toString(), parameters); } private String createPkSelector(PartitionKeyDefinition partitionKeyDefinition) { return partitionKeyDefinition.getPaths() .stream() .map(pathPart -> StringUtils.substring(pathPart, 1)) .map(pathPart -> StringUtils.replace(pathPart, "\"", "\\")) .map(part -> "[\"" + part + "\"]") .collect(Collectors.joining()); } private <T extends Resource> Flux<FeedResponse<T>> createReadManyQuery( String parentResourceLink, SqlQuerySpec sqlQuery, CosmosQueryRequestOptions options, Class<T> klass, ResourceType resourceTypeEnum, DocumentCollection collection, Map<PartitionKeyRange, SqlQuerySpec> rangeQueryMap) { UUID activityId = Utils.randomUUID(); IDocumentQueryClient queryClient = documentQueryClientImpl(RxDocumentClientImpl.this, getOperationContextAndListenerTuple(options)); Flux<? extends IDocumentQueryExecutionContext<T>> executionContext = DocumentQueryExecutionContextFactory.createReadManyQueryAsync(this, queryClient, collection.getResourceId(), sqlQuery, rangeQueryMap, options, collection.getResourceId(), parentResourceLink, activityId, klass, resourceTypeEnum); return executionContext.flatMap(IDocumentQueryExecutionContext<T>::executeAsync); } @Override public Flux<FeedResponse<Document>> queryDocuments(String collectionLink, String query, CosmosQueryRequestOptions options) { return queryDocuments(collectionLink, new SqlQuerySpec(query), options); } private IDocumentQueryClient documentQueryClientImpl(RxDocumentClientImpl rxDocumentClientImpl, OperationContextAndListenerTuple operationContextAndListenerTuple) { return new IDocumentQueryClient () { @Override public RxCollectionCache getCollectionCache() { return RxDocumentClientImpl.this.collectionCache; } @Override public RxPartitionKeyRangeCache getPartitionKeyRangeCache() { return RxDocumentClientImpl.this.partitionKeyRangeCache; } @Override public IRetryPolicyFactory getResetSessionTokenRetryPolicy() { return RxDocumentClientImpl.this.resetSessionTokenRetryPolicy; } @Override public ConsistencyLevel getDefaultConsistencyLevelAsync() { return RxDocumentClientImpl.this.gatewayConfigurationReader.getDefaultConsistencyLevel(); } @Override public ConsistencyLevel getDesiredConsistencyLevelAsync() { return RxDocumentClientImpl.this.consistencyLevel; } @Override public Mono<RxDocumentServiceResponse> executeQueryAsync(RxDocumentServiceRequest request) { if (operationContextAndListenerTuple == null) { return RxDocumentClientImpl.this.query(request).single(); } else { final OperationListener listener = operationContextAndListenerTuple.getOperationListener(); final OperationContext operationContext = operationContextAndListenerTuple.getOperationContext(); request.getHeaders().put(HttpConstants.HttpHeaders.CORRELATED_ACTIVITY_ID, operationContext.getCorrelationActivityId()); listener.requestListener(operationContext, request); return RxDocumentClientImpl.this.query(request).single().doOnNext( response -> listener.responseListener(operationContext, response) ).doOnError( ex -> listener.exceptionListener(operationContext, ex) ); } } @Override public QueryCompatibilityMode getQueryCompatibilityMode() { return QueryCompatibilityMode.Default; } @Override public Mono<RxDocumentServiceResponse> readFeedAsync(RxDocumentServiceRequest request) { return null; } }; } @Override public Flux<FeedResponse<Document>> queryDocuments(String collectionLink, SqlQuerySpec querySpec, CosmosQueryRequestOptions options) { SqlQuerySpecLogger.getInstance().logQuery(querySpec); return createQuery(collectionLink, querySpec, options, Document.class, ResourceType.Document); } @Override public Flux<FeedResponse<Document>> queryDocumentChangeFeed( final DocumentCollection collection, final CosmosChangeFeedRequestOptions changeFeedOptions) { checkNotNull(collection, "Argument 'collection' must not be null."); ChangeFeedQueryImpl<Document> changeFeedQueryImpl = new ChangeFeedQueryImpl<>( this, ResourceType.Document, Document.class, collection.getAltLink(), collection.getResourceId(), changeFeedOptions); return changeFeedQueryImpl.executeAsync(); } @Override public Flux<FeedResponse<Document>> readAllDocuments( String collectionLink, PartitionKey partitionKey, CosmosQueryRequestOptions options) { if (StringUtils.isEmpty(collectionLink)) { throw new IllegalArgumentException("collectionLink"); } if (partitionKey == null) { throw new IllegalArgumentException("partitionKey"); } RxDocumentServiceRequest request = RxDocumentServiceRequest.create(this, OperationType.Query, ResourceType.Document, collectionLink, null ); Flux<Utils.ValueHolder<DocumentCollection>> collectionObs = collectionCache.resolveCollectionAsync(null, request).flux(); return collectionObs.flatMap(documentCollectionResourceResponse -> { DocumentCollection collection = documentCollectionResourceResponse.v; if (collection == null) { throw new IllegalStateException("Collection cannot be null"); } PartitionKeyDefinition pkDefinition = collection.getPartitionKey(); String pkSelector = createPkSelector(pkDefinition); SqlQuerySpec querySpec = createLogicalPartitionScanQuerySpec(partitionKey, pkSelector); String resourceLink = parentResourceLinkToQueryLink(collectionLink, ResourceType.Document); UUID activityId = Utils.randomUUID(); IDocumentQueryClient queryClient = documentQueryClientImpl(RxDocumentClientImpl.this, getOperationContextAndListenerTuple(options)); final CosmosQueryRequestOptions effectiveOptions = ModelBridgeInternal.createQueryRequestOptions(options); InvalidPartitionExceptionRetryPolicy invalidPartitionExceptionRetryPolicy = new InvalidPartitionExceptionRetryPolicy( this.collectionCache, null, resourceLink, ModelBridgeInternal.getPropertiesFromQueryRequestOptions(effectiveOptions)); return ObservableHelper.fluxInlineIfPossibleAsObs( () -> { Flux<Utils.ValueHolder<CollectionRoutingMap>> valueHolderMono = this.partitionKeyRangeCache .tryLookupAsync( BridgeInternal.getMetaDataDiagnosticContext(request.requestContext.cosmosDiagnostics), collection.getResourceId(), null, null).flux(); return valueHolderMono.flatMap(collectionRoutingMapValueHolder -> { CollectionRoutingMap routingMap = collectionRoutingMapValueHolder.v; if (routingMap == null) { throw new IllegalStateException("Failed to get routing map."); } String effectivePartitionKeyString = PartitionKeyInternalHelper .getEffectivePartitionKeyString( BridgeInternal.getPartitionKeyInternal(partitionKey), pkDefinition); PartitionKeyRange range = routingMap.getRangeByEffectivePartitionKey(effectivePartitionKeyString); return createQueryInternal( resourceLink, querySpec, ModelBridgeInternal.setPartitionKeyRangeIdInternal(effectiveOptions, range.getId()), Document.class, ResourceType.Document, queryClient, activityId); }); }, invalidPartitionExceptionRetryPolicy); }); } @Override public Map<String, PartitionedQueryExecutionInfo> getQueryPlanCache() { return queryPlanCache; } @Override public Flux<FeedResponse<PartitionKeyRange>> readPartitionKeyRanges(final String collectionLink, CosmosQueryRequestOptions options) { if (StringUtils.isEmpty(collectionLink)) { throw new IllegalArgumentException("collectionLink"); } return readFeed(options, ResourceType.PartitionKeyRange, PartitionKeyRange.class, Utils.joinPath(collectionLink, Paths.PARTITION_KEY_RANGES_PATH_SEGMENT)); } private RxDocumentServiceRequest getStoredProcedureRequest(String collectionLink, StoredProcedure storedProcedure, RequestOptions options, OperationType operationType) { if (StringUtils.isEmpty(collectionLink)) { throw new IllegalArgumentException("collectionLink"); } if (storedProcedure == null) { throw new IllegalArgumentException("storedProcedure"); } validateResource(storedProcedure); String path = Utils.joinPath(collectionLink, Paths.STORED_PROCEDURES_PATH_SEGMENT); Map<String, String> requestHeaders = this.getRequestHeaders(options, ResourceType.StoredProcedure, operationType); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(this, operationType, ResourceType.StoredProcedure, path, storedProcedure, requestHeaders, options); return request; } private RxDocumentServiceRequest getUserDefinedFunctionRequest(String collectionLink, UserDefinedFunction udf, RequestOptions options, OperationType operationType) { if (StringUtils.isEmpty(collectionLink)) { throw new IllegalArgumentException("collectionLink"); } if (udf == null) { throw new IllegalArgumentException("udf"); } validateResource(udf); String path = Utils.joinPath(collectionLink, Paths.USER_DEFINED_FUNCTIONS_PATH_SEGMENT); Map<String, String> requestHeaders = this.getRequestHeaders(options, ResourceType.UserDefinedFunction, operationType); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(this, operationType, ResourceType.UserDefinedFunction, path, udf, requestHeaders, options); return request; } @Override public Mono<ResourceResponse<StoredProcedure>> createStoredProcedure(String collectionLink, StoredProcedure storedProcedure, RequestOptions options) { DocumentClientRetryPolicy requestRetryPolicy = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> createStoredProcedureInternal(collectionLink, storedProcedure, options, requestRetryPolicy), requestRetryPolicy); } private Mono<ResourceResponse<StoredProcedure>> createStoredProcedureInternal(String collectionLink, StoredProcedure storedProcedure, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { logger.debug("Creating a StoredProcedure. collectionLink: [{}], storedProcedure id [{}]", collectionLink, storedProcedure.getId()); RxDocumentServiceRequest request = getStoredProcedureRequest(collectionLink, storedProcedure, options, OperationType.Create); if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } return this.create(request, retryPolicyInstance, getOperationContextAndListenerTuple(options)).map(response -> toResourceResponse(response, StoredProcedure.class)); } catch (Exception e) { logger.debug("Failure in creating a StoredProcedure due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<StoredProcedure>> upsertStoredProcedure(String collectionLink, StoredProcedure storedProcedure, RequestOptions options) { DocumentClientRetryPolicy requestRetryPolicy = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> upsertStoredProcedureInternal(collectionLink, storedProcedure, options, requestRetryPolicy), requestRetryPolicy); } private Mono<ResourceResponse<StoredProcedure>> upsertStoredProcedureInternal(String collectionLink, StoredProcedure storedProcedure, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { logger.debug("Upserting a StoredProcedure. collectionLink: [{}], storedProcedure id [{}]", collectionLink, storedProcedure.getId()); RxDocumentServiceRequest request = getStoredProcedureRequest(collectionLink, storedProcedure, options, OperationType.Upsert); if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } return this.upsert(request, retryPolicyInstance, getOperationContextAndListenerTuple(options)).map(response -> toResourceResponse(response, StoredProcedure.class)); } catch (Exception e) { logger.debug("Failure in upserting a StoredProcedure due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<StoredProcedure>> replaceStoredProcedure(StoredProcedure storedProcedure, RequestOptions options) { DocumentClientRetryPolicy requestRetryPolicy = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> replaceStoredProcedureInternal(storedProcedure, options, requestRetryPolicy), requestRetryPolicy); } private Mono<ResourceResponse<StoredProcedure>> replaceStoredProcedureInternal(StoredProcedure storedProcedure, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (storedProcedure == null) { throw new IllegalArgumentException("storedProcedure"); } logger.debug("Replacing a StoredProcedure. storedProcedure id [{}]", storedProcedure.getId()); RxDocumentClientImpl.validateResource(storedProcedure); String path = Utils.joinPath(storedProcedure.getSelfLink(), null); Map<String, String> requestHeaders = getRequestHeaders(options, ResourceType.StoredProcedure, OperationType.Replace); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(this, OperationType.Replace, ResourceType.StoredProcedure, path, storedProcedure, requestHeaders, options); if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } return this.replace(request, retryPolicyInstance).map(response -> toResourceResponse(response, StoredProcedure.class)); } catch (Exception e) { logger.debug("Failure in replacing a StoredProcedure due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<StoredProcedure>> deleteStoredProcedure(String storedProcedureLink, RequestOptions options) { DocumentClientRetryPolicy requestRetryPolicy = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> deleteStoredProcedureInternal(storedProcedureLink, options, requestRetryPolicy), requestRetryPolicy); } private Mono<ResourceResponse<StoredProcedure>> deleteStoredProcedureInternal(String storedProcedureLink, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (StringUtils.isEmpty(storedProcedureLink)) { throw new IllegalArgumentException("storedProcedureLink"); } logger.debug("Deleting a StoredProcedure. storedProcedureLink [{}]", storedProcedureLink); String path = Utils.joinPath(storedProcedureLink, null); Map<String, String> requestHeaders = this.getRequestHeaders(options, ResourceType.StoredProcedure, OperationType.Delete); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(this, OperationType.Delete, ResourceType.StoredProcedure, path, requestHeaders, options); if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } return this.delete(request, retryPolicyInstance, getOperationContextAndListenerTuple(options)).map(response -> toResourceResponse(response, StoredProcedure.class)); } catch (Exception e) { logger.debug("Failure in deleting a StoredProcedure due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<StoredProcedure>> readStoredProcedure(String storedProcedureLink, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> readStoredProcedureInternal(storedProcedureLink, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<StoredProcedure>> readStoredProcedureInternal(String storedProcedureLink, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (StringUtils.isEmpty(storedProcedureLink)) { throw new IllegalArgumentException("storedProcedureLink"); } logger.debug("Reading a StoredProcedure. storedProcedureLink [{}]", storedProcedureLink); String path = Utils.joinPath(storedProcedureLink, null); Map<String, String> requestHeaders = this.getRequestHeaders(options, ResourceType.StoredProcedure, OperationType.Read); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(this, OperationType.Read, ResourceType.StoredProcedure, path, requestHeaders, options); if (retryPolicyInstance != null){ retryPolicyInstance.onBeforeSendRequest(request); } return this.read(request, retryPolicyInstance).map(response -> toResourceResponse(response, StoredProcedure.class)); } catch (Exception e) { logger.debug("Failure in reading a StoredProcedure due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Flux<FeedResponse<StoredProcedure>> readStoredProcedures(String collectionLink, CosmosQueryRequestOptions options) { if (StringUtils.isEmpty(collectionLink)) { throw new IllegalArgumentException("collectionLink"); } return readFeed(options, ResourceType.StoredProcedure, StoredProcedure.class, Utils.joinPath(collectionLink, Paths.STORED_PROCEDURES_PATH_SEGMENT)); } @Override public Flux<FeedResponse<StoredProcedure>> queryStoredProcedures(String collectionLink, String query, CosmosQueryRequestOptions options) { return queryStoredProcedures(collectionLink, new SqlQuerySpec(query), options); } @Override public Flux<FeedResponse<StoredProcedure>> queryStoredProcedures(String collectionLink, SqlQuerySpec querySpec, CosmosQueryRequestOptions options) { return createQuery(collectionLink, querySpec, options, StoredProcedure.class, ResourceType.StoredProcedure); } @Override public Mono<StoredProcedureResponse> executeStoredProcedure(String storedProcedureLink, List<Object> procedureParams) { return this.executeStoredProcedure(storedProcedureLink, null, procedureParams); } @Override public Mono<StoredProcedureResponse> executeStoredProcedure(String storedProcedureLink, RequestOptions options, List<Object> procedureParams) { DocumentClientRetryPolicy documentClientRetryPolicy = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> executeStoredProcedureInternal(storedProcedureLink, options, procedureParams, documentClientRetryPolicy), documentClientRetryPolicy); } @Override public Mono<CosmosBatchResponse> executeBatchRequest(String collectionLink, ServerBatchRequest serverBatchRequest, RequestOptions options, boolean disableAutomaticIdGeneration) { DocumentClientRetryPolicy documentClientRetryPolicy = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> executeBatchRequestInternal(collectionLink, serverBatchRequest, options, documentClientRetryPolicy, disableAutomaticIdGeneration), documentClientRetryPolicy); } private Mono<StoredProcedureResponse> executeStoredProcedureInternal(String storedProcedureLink, RequestOptions options, List<Object> procedureParams, DocumentClientRetryPolicy retryPolicy) { try { logger.debug("Executing a StoredProcedure. storedProcedureLink [{}]", storedProcedureLink); String path = Utils.joinPath(storedProcedureLink, null); Map<String, String> requestHeaders = getRequestHeaders(options, ResourceType.StoredProcedure, OperationType.ExecuteJavaScript); requestHeaders.put(HttpConstants.HttpHeaders.ACCEPT, RuntimeConstants.MediaTypes.JSON); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(this, OperationType.ExecuteJavaScript, ResourceType.StoredProcedure, path, procedureParams != null && !procedureParams.isEmpty() ? RxDocumentClientImpl.serializeProcedureParams(procedureParams) : "", requestHeaders, options); if (retryPolicy != null) { retryPolicy.onBeforeSendRequest(request); } Mono<RxDocumentServiceRequest> reqObs = addPartitionKeyInformation(request, null, null, options); return reqObs.flatMap(req -> create(request, retryPolicy, getOperationContextAndListenerTuple(options)) .map(response -> { this.captureSessionToken(request, response); return toStoredProcedureResponse(response); })); } catch (Exception e) { logger.debug("Failure in executing a StoredProcedure due to [{}]", e.getMessage(), e); return Mono.error(e); } } private Mono<CosmosBatchResponse> executeBatchRequestInternal(String collectionLink, ServerBatchRequest serverBatchRequest, RequestOptions options, DocumentClientRetryPolicy requestRetryPolicy, boolean disableAutomaticIdGeneration) { try { logger.debug("Executing a Batch request with number of operations {}", serverBatchRequest.getOperations().size()); Mono<RxDocumentServiceRequest> requestObs = getBatchDocumentRequest(requestRetryPolicy, collectionLink, serverBatchRequest, options, disableAutomaticIdGeneration); Mono<RxDocumentServiceResponse> responseObservable = requestObs.flatMap(request -> create(request, requestRetryPolicy, getOperationContextAndListenerTuple(options))); return responseObservable .map(serviceResponse -> BatchResponseParser.fromDocumentServiceResponse(serviceResponse, serverBatchRequest, true)); } catch (Exception ex) { logger.debug("Failure in executing a batch due to [{}]", ex.getMessage(), ex); return Mono.error(ex); } } @Override public Mono<ResourceResponse<Trigger>> createTrigger(String collectionLink, Trigger trigger, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> createTriggerInternal(collectionLink, trigger, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<Trigger>> createTriggerInternal(String collectionLink, Trigger trigger, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { logger.debug("Creating a Trigger. collectionLink [{}], trigger id [{}]", collectionLink, trigger.getId()); RxDocumentServiceRequest request = getTriggerRequest(collectionLink, trigger, options, OperationType.Create); if (retryPolicyInstance != null){ retryPolicyInstance.onBeforeSendRequest(request); } return this.create(request, retryPolicyInstance, getOperationContextAndListenerTuple(options)).map(response -> toResourceResponse(response, Trigger.class)); } catch (Exception e) { logger.debug("Failure in creating a Trigger due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<Trigger>> upsertTrigger(String collectionLink, Trigger trigger, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> upsertTriggerInternal(collectionLink, trigger, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<Trigger>> upsertTriggerInternal(String collectionLink, Trigger trigger, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { logger.debug("Upserting a Trigger. collectionLink [{}], trigger id [{}]", collectionLink, trigger.getId()); RxDocumentServiceRequest request = getTriggerRequest(collectionLink, trigger, options, OperationType.Upsert); if (retryPolicyInstance != null){ retryPolicyInstance.onBeforeSendRequest(request); } return this.upsert(request, retryPolicyInstance, getOperationContextAndListenerTuple(options)).map(response -> toResourceResponse(response, Trigger.class)); } catch (Exception e) { logger.debug("Failure in upserting a Trigger due to [{}]", e.getMessage(), e); return Mono.error(e); } } private RxDocumentServiceRequest getTriggerRequest(String collectionLink, Trigger trigger, RequestOptions options, OperationType operationType) { if (StringUtils.isEmpty(collectionLink)) { throw new IllegalArgumentException("collectionLink"); } if (trigger == null) { throw new IllegalArgumentException("trigger"); } RxDocumentClientImpl.validateResource(trigger); String path = Utils.joinPath(collectionLink, Paths.TRIGGERS_PATH_SEGMENT); Map<String, String> requestHeaders = getRequestHeaders(options, ResourceType.Trigger, operationType); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(this, operationType, ResourceType.Trigger, path, trigger, requestHeaders, options); return request; } @Override public Mono<ResourceResponse<Trigger>> replaceTrigger(Trigger trigger, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> replaceTriggerInternal(trigger, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<Trigger>> replaceTriggerInternal(Trigger trigger, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (trigger == null) { throw new IllegalArgumentException("trigger"); } logger.debug("Replacing a Trigger. trigger id [{}]", trigger.getId()); RxDocumentClientImpl.validateResource(trigger); String path = Utils.joinPath(trigger.getSelfLink(), null); Map<String, String> requestHeaders = getRequestHeaders(options, ResourceType.Trigger, OperationType.Replace); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(this, OperationType.Replace, ResourceType.Trigger, path, trigger, requestHeaders, options); if (retryPolicyInstance != null){ retryPolicyInstance.onBeforeSendRequest(request); } return this.replace(request, retryPolicyInstance).map(response -> toResourceResponse(response, Trigger.class)); } catch (Exception e) { logger.debug("Failure in replacing a Trigger due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<Trigger>> deleteTrigger(String triggerLink, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> deleteTriggerInternal(triggerLink, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<Trigger>> deleteTriggerInternal(String triggerLink, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (StringUtils.isEmpty(triggerLink)) { throw new IllegalArgumentException("triggerLink"); } logger.debug("Deleting a Trigger. triggerLink [{}]", triggerLink); String path = Utils.joinPath(triggerLink, null); Map<String, String> requestHeaders = getRequestHeaders(options, ResourceType.Trigger, OperationType.Delete); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(this, OperationType.Delete, ResourceType.Trigger, path, requestHeaders, options); if (retryPolicyInstance != null){ retryPolicyInstance.onBeforeSendRequest(request); } return this.delete(request, retryPolicyInstance, getOperationContextAndListenerTuple(options)).map(response -> toResourceResponse(response, Trigger.class)); } catch (Exception e) { logger.debug("Failure in deleting a Trigger due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<Trigger>> readTrigger(String triggerLink, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> readTriggerInternal(triggerLink, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<Trigger>> readTriggerInternal(String triggerLink, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (StringUtils.isEmpty(triggerLink)) { throw new IllegalArgumentException("triggerLink"); } logger.debug("Reading a Trigger. triggerLink [{}]", triggerLink); String path = Utils.joinPath(triggerLink, null); Map<String, String> requestHeaders = getRequestHeaders(options, ResourceType.Trigger, OperationType.Read); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(this, OperationType.Read, ResourceType.Trigger, path, requestHeaders, options); if (retryPolicyInstance != null){ retryPolicyInstance.onBeforeSendRequest(request); } return this.read(request, retryPolicyInstance).map(response -> toResourceResponse(response, Trigger.class)); } catch (Exception e) { logger.debug("Failure in reading a Trigger due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Flux<FeedResponse<Trigger>> readTriggers(String collectionLink, CosmosQueryRequestOptions options) { if (StringUtils.isEmpty(collectionLink)) { throw new IllegalArgumentException("collectionLink"); } return readFeed(options, ResourceType.Trigger, Trigger.class, Utils.joinPath(collectionLink, Paths.TRIGGERS_PATH_SEGMENT)); } @Override public Flux<FeedResponse<Trigger>> queryTriggers(String collectionLink, String query, CosmosQueryRequestOptions options) { return queryTriggers(collectionLink, new SqlQuerySpec(query), options); } @Override public Flux<FeedResponse<Trigger>> queryTriggers(String collectionLink, SqlQuerySpec querySpec, CosmosQueryRequestOptions options) { return createQuery(collectionLink, querySpec, options, Trigger.class, ResourceType.Trigger); } @Override public Mono<ResourceResponse<UserDefinedFunction>> createUserDefinedFunction(String collectionLink, UserDefinedFunction udf, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> createUserDefinedFunctionInternal(collectionLink, udf, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<UserDefinedFunction>> createUserDefinedFunctionInternal(String collectionLink, UserDefinedFunction udf, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { logger.debug("Creating a UserDefinedFunction. collectionLink [{}], udf id [{}]", collectionLink, udf.getId()); RxDocumentServiceRequest request = getUserDefinedFunctionRequest(collectionLink, udf, options, OperationType.Create); if (retryPolicyInstance != null){ retryPolicyInstance.onBeforeSendRequest(request); } return this.create(request, retryPolicyInstance, getOperationContextAndListenerTuple(options)).map(response -> toResourceResponse(response, UserDefinedFunction.class)); } catch (Exception e) { logger.debug("Failure in creating a UserDefinedFunction due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<UserDefinedFunction>> upsertUserDefinedFunction(String collectionLink, UserDefinedFunction udf, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> upsertUserDefinedFunctionInternal(collectionLink, udf, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<UserDefinedFunction>> upsertUserDefinedFunctionInternal(String collectionLink, UserDefinedFunction udf, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { logger.debug("Upserting a UserDefinedFunction. collectionLink [{}], udf id [{}]", collectionLink, udf.getId()); RxDocumentServiceRequest request = getUserDefinedFunctionRequest(collectionLink, udf, options, OperationType.Upsert); if (retryPolicyInstance != null){ retryPolicyInstance.onBeforeSendRequest(request); } return this.upsert(request, retryPolicyInstance, getOperationContextAndListenerTuple(options)).map(response -> toResourceResponse(response, UserDefinedFunction.class)); } catch (Exception e) { logger.debug("Failure in upserting a UserDefinedFunction due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<UserDefinedFunction>> replaceUserDefinedFunction(UserDefinedFunction udf, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> replaceUserDefinedFunctionInternal(udf, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<UserDefinedFunction>> replaceUserDefinedFunctionInternal(UserDefinedFunction udf, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (udf == null) { throw new IllegalArgumentException("udf"); } logger.debug("Replacing a UserDefinedFunction. udf id [{}]", udf.getId()); validateResource(udf); String path = Utils.joinPath(udf.getSelfLink(), null); Map<String, String> requestHeaders = this.getRequestHeaders(options, ResourceType.UserDefinedFunction, OperationType.Replace); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(this, OperationType.Replace, ResourceType.UserDefinedFunction, path, udf, requestHeaders, options); if (retryPolicyInstance != null){ retryPolicyInstance.onBeforeSendRequest(request); } return this.replace(request, retryPolicyInstance).map(response -> toResourceResponse(response, UserDefinedFunction.class)); } catch (Exception e) { logger.debug("Failure in replacing a UserDefinedFunction due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<UserDefinedFunction>> deleteUserDefinedFunction(String udfLink, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> deleteUserDefinedFunctionInternal(udfLink, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<UserDefinedFunction>> deleteUserDefinedFunctionInternal(String udfLink, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (StringUtils.isEmpty(udfLink)) { throw new IllegalArgumentException("udfLink"); } logger.debug("Deleting a UserDefinedFunction. udfLink [{}]", udfLink); String path = Utils.joinPath(udfLink, null); Map<String, String> requestHeaders = this.getRequestHeaders(options, ResourceType.UserDefinedFunction, OperationType.Delete); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(this, OperationType.Delete, ResourceType.UserDefinedFunction, path, requestHeaders, options); if (retryPolicyInstance != null){ retryPolicyInstance.onBeforeSendRequest(request); } return this.delete(request, retryPolicyInstance, getOperationContextAndListenerTuple(options)).map(response -> toResourceResponse(response, UserDefinedFunction.class)); } catch (Exception e) { logger.debug("Failure in deleting a UserDefinedFunction due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<UserDefinedFunction>> readUserDefinedFunction(String udfLink, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> readUserDefinedFunctionInternal(udfLink, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<UserDefinedFunction>> readUserDefinedFunctionInternal(String udfLink, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (StringUtils.isEmpty(udfLink)) { throw new IllegalArgumentException("udfLink"); } logger.debug("Reading a UserDefinedFunction. udfLink [{}]", udfLink); String path = Utils.joinPath(udfLink, null); Map<String, String> requestHeaders = this.getRequestHeaders(options, ResourceType.UserDefinedFunction, OperationType.Read); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(this, OperationType.Read, ResourceType.UserDefinedFunction, path, requestHeaders, options); if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } return this.read(request, retryPolicyInstance).map(response -> toResourceResponse(response, UserDefinedFunction.class)); } catch (Exception e) { logger.debug("Failure in reading a UserDefinedFunction due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Flux<FeedResponse<UserDefinedFunction>> readUserDefinedFunctions(String collectionLink, CosmosQueryRequestOptions options) { if (StringUtils.isEmpty(collectionLink)) { throw new IllegalArgumentException("collectionLink"); } return readFeed(options, ResourceType.UserDefinedFunction, UserDefinedFunction.class, Utils.joinPath(collectionLink, Paths.USER_DEFINED_FUNCTIONS_PATH_SEGMENT)); } @Override public Flux<FeedResponse<UserDefinedFunction>> queryUserDefinedFunctions(String collectionLink, String query, CosmosQueryRequestOptions options) { return queryUserDefinedFunctions(collectionLink, new SqlQuerySpec(query), options); } @Override public Flux<FeedResponse<UserDefinedFunction>> queryUserDefinedFunctions(String collectionLink, SqlQuerySpec querySpec, CosmosQueryRequestOptions options) { return createQuery(collectionLink, querySpec, options, UserDefinedFunction.class, ResourceType.UserDefinedFunction); } @Override public Mono<ResourceResponse<Conflict>> readConflict(String conflictLink, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> readConflictInternal(conflictLink, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<Conflict>> readConflictInternal(String conflictLink, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (StringUtils.isEmpty(conflictLink)) { throw new IllegalArgumentException("conflictLink"); } logger.debug("Reading a Conflict. conflictLink [{}]", conflictLink); String path = Utils.joinPath(conflictLink, null); Map<String, String> requestHeaders = getRequestHeaders(options, ResourceType.Conflict, OperationType.Read); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(this, OperationType.Read, ResourceType.Conflict, path, requestHeaders, options); Mono<RxDocumentServiceRequest> reqObs = addPartitionKeyInformation(request, null, null, options); return reqObs.flatMap(req -> { if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } return this.read(request, retryPolicyInstance).map(response -> toResourceResponse(response, Conflict.class)); }); } catch (Exception e) { logger.debug("Failure in reading a Conflict due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Flux<FeedResponse<Conflict>> readConflicts(String collectionLink, CosmosQueryRequestOptions options) { if (StringUtils.isEmpty(collectionLink)) { throw new IllegalArgumentException("collectionLink"); } return readFeed(options, ResourceType.Conflict, Conflict.class, Utils.joinPath(collectionLink, Paths.CONFLICTS_PATH_SEGMENT)); } @Override public Flux<FeedResponse<Conflict>> queryConflicts(String collectionLink, String query, CosmosQueryRequestOptions options) { return queryConflicts(collectionLink, new SqlQuerySpec(query), options); } @Override public Flux<FeedResponse<Conflict>> queryConflicts(String collectionLink, SqlQuerySpec querySpec, CosmosQueryRequestOptions options) { return createQuery(collectionLink, querySpec, options, Conflict.class, ResourceType.Conflict); } @Override public Mono<ResourceResponse<Conflict>> deleteConflict(String conflictLink, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> deleteConflictInternal(conflictLink, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<Conflict>> deleteConflictInternal(String conflictLink, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (StringUtils.isEmpty(conflictLink)) { throw new IllegalArgumentException("conflictLink"); } logger.debug("Deleting a Conflict. conflictLink [{}]", conflictLink); String path = Utils.joinPath(conflictLink, null); Map<String, String> requestHeaders = getRequestHeaders(options, ResourceType.Conflict, OperationType.Delete); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(this, OperationType.Delete, ResourceType.Conflict, path, requestHeaders, options); Mono<RxDocumentServiceRequest> reqObs = addPartitionKeyInformation(request, null, null, options); return reqObs.flatMap(req -> { if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } return this.delete(request, retryPolicyInstance, getOperationContextAndListenerTuple(options)).map(response -> toResourceResponse(response, Conflict.class)); }); } catch (Exception e) { logger.debug("Failure in deleting a Conflict due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<User>> createUser(String databaseLink, User user, RequestOptions options) { DocumentClientRetryPolicy documentClientRetryPolicy = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> createUserInternal(databaseLink, user, options, documentClientRetryPolicy), documentClientRetryPolicy); } private Mono<ResourceResponse<User>> createUserInternal(String databaseLink, User user, RequestOptions options, DocumentClientRetryPolicy documentClientRetryPolicy) { try { logger.debug("Creating a User. databaseLink [{}], user id [{}]", databaseLink, user.getId()); RxDocumentServiceRequest request = getUserRequest(databaseLink, user, options, OperationType.Create); return this.create(request, documentClientRetryPolicy, getOperationContextAndListenerTuple(options)).map(response -> toResourceResponse(response, User.class)); } catch (Exception e) { logger.debug("Failure in creating a User due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<User>> upsertUser(String databaseLink, User user, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> upsertUserInternal(databaseLink, user, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<User>> upsertUserInternal(String databaseLink, User user, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { logger.debug("Upserting a User. databaseLink [{}], user id [{}]", databaseLink, user.getId()); RxDocumentServiceRequest request = getUserRequest(databaseLink, user, options, OperationType.Upsert); if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } return this.upsert(request, retryPolicyInstance, getOperationContextAndListenerTuple(options)).map(response -> toResourceResponse(response, User.class)); } catch (Exception e) { logger.debug("Failure in upserting a User due to [{}]", e.getMessage(), e); return Mono.error(e); } } private RxDocumentServiceRequest getUserRequest(String databaseLink, User user, RequestOptions options, OperationType operationType) { if (StringUtils.isEmpty(databaseLink)) { throw new IllegalArgumentException("databaseLink"); } if (user == null) { throw new IllegalArgumentException("user"); } RxDocumentClientImpl.validateResource(user); String path = Utils.joinPath(databaseLink, Paths.USERS_PATH_SEGMENT); Map<String, String> requestHeaders = getRequestHeaders(options, ResourceType.User, operationType); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(this, operationType, ResourceType.User, path, user, requestHeaders, options); return request; } @Override public Mono<ResourceResponse<User>> replaceUser(User user, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> replaceUserInternal(user, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<User>> replaceUserInternal(User user, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (user == null) { throw new IllegalArgumentException("user"); } logger.debug("Replacing a User. user id [{}]", user.getId()); RxDocumentClientImpl.validateResource(user); String path = Utils.joinPath(user.getSelfLink(), null); Map<String, String> requestHeaders = getRequestHeaders(options, ResourceType.User, OperationType.Replace); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(this, OperationType.Replace, ResourceType.User, path, user, requestHeaders, options); if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } return this.replace(request, retryPolicyInstance).map(response -> toResourceResponse(response, User.class)); } catch (Exception e) { logger.debug("Failure in replacing a User due to [{}]", e.getMessage(), e); return Mono.error(e); } } public Mono<ResourceResponse<User>> deleteUser(String userLink, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> deleteUserInternal(userLink, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<User>> deleteUserInternal(String userLink, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (StringUtils.isEmpty(userLink)) { throw new IllegalArgumentException("userLink"); } logger.debug("Deleting a User. userLink [{}]", userLink); String path = Utils.joinPath(userLink, null); Map<String, String> requestHeaders = getRequestHeaders(options, ResourceType.User, OperationType.Delete); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(this, OperationType.Delete, ResourceType.User, path, requestHeaders, options); if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } return this.delete(request, retryPolicyInstance, getOperationContextAndListenerTuple(options)).map(response -> toResourceResponse(response, User.class)); } catch (Exception e) { logger.debug("Failure in deleting a User due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<User>> readUser(String userLink, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> readUserInternal(userLink, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<User>> readUserInternal(String userLink, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (StringUtils.isEmpty(userLink)) { throw new IllegalArgumentException("userLink"); } logger.debug("Reading a User. userLink [{}]", userLink); String path = Utils.joinPath(userLink, null); Map<String, String> requestHeaders = getRequestHeaders(options, ResourceType.User, OperationType.Read); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(this, OperationType.Read, ResourceType.User, path, requestHeaders, options); if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } return this.read(request, retryPolicyInstance).map(response -> toResourceResponse(response, User.class)); } catch (Exception e) { logger.debug("Failure in reading a User due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Flux<FeedResponse<User>> readUsers(String databaseLink, CosmosQueryRequestOptions options) { if (StringUtils.isEmpty(databaseLink)) { throw new IllegalArgumentException("databaseLink"); } return readFeed(options, ResourceType.User, User.class, Utils.joinPath(databaseLink, Paths.USERS_PATH_SEGMENT)); } @Override public Flux<FeedResponse<User>> queryUsers(String databaseLink, String query, CosmosQueryRequestOptions options) { return queryUsers(databaseLink, new SqlQuerySpec(query), options); } @Override public Flux<FeedResponse<User>> queryUsers(String databaseLink, SqlQuerySpec querySpec, CosmosQueryRequestOptions options) { return createQuery(databaseLink, querySpec, options, User.class, ResourceType.User); } @Override public Mono<ResourceResponse<ClientEncryptionKey>> readClientEncryptionKey(String clientEncryptionKeyLink, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> readClientEncryptionKeyInternal(clientEncryptionKeyLink, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<ClientEncryptionKey>> readClientEncryptionKeyInternal(String clientEncryptionKeyLink, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (StringUtils.isEmpty(clientEncryptionKeyLink)) { throw new IllegalArgumentException("clientEncryptionKeyLink"); } logger.debug("Reading a client encryption key. clientEncryptionKeyLink [{}]", clientEncryptionKeyLink); String path = Utils.joinPath(clientEncryptionKeyLink, null); Map<String, String> requestHeaders = getRequestHeaders(options, ResourceType.ClientEncryptionKey, OperationType.Read); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(this, OperationType.Read, ResourceType.ClientEncryptionKey, path, requestHeaders, options); if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } return this.read(request, retryPolicyInstance).map(response -> toResourceResponse(response, ClientEncryptionKey.class)); } catch (Exception e) { logger.debug("Failure in reading a client encryption key due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<ClientEncryptionKey>> createClientEncryptionKey(String databaseLink, ClientEncryptionKey clientEncryptionKey, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> createClientEncryptionKeyInternal(databaseLink, clientEncryptionKey, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<ClientEncryptionKey>> createClientEncryptionKeyInternal(String databaseLink, ClientEncryptionKey clientEncryptionKey, RequestOptions options, DocumentClientRetryPolicy documentClientRetryPolicy) { try { logger.debug("Creating a client encryption key. databaseLink [{}], clientEncryptionKey id [{}]", databaseLink, clientEncryptionKey.getId()); RxDocumentServiceRequest request = getClientEncryptionKeyRequest(databaseLink, clientEncryptionKey, options, OperationType.Create); return this.create(request, documentClientRetryPolicy, getOperationContextAndListenerTuple(options)).map(response -> toResourceResponse(response, ClientEncryptionKey.class)); } catch (Exception e) { logger.debug("Failure in creating a client encryption key due to [{}]", e.getMessage(), e); return Mono.error(e); } } private RxDocumentServiceRequest getClientEncryptionKeyRequest(String databaseLink, ClientEncryptionKey clientEncryptionKey, RequestOptions options, OperationType operationType) { if (StringUtils.isEmpty(databaseLink)) { throw new IllegalArgumentException("databaseLink"); } if (clientEncryptionKey == null) { throw new IllegalArgumentException("clientEncryptionKey"); } RxDocumentClientImpl.validateResource(clientEncryptionKey); String path = Utils.joinPath(databaseLink, Paths.CLIENT_ENCRYPTION_KEY_PATH_SEGMENT); Map<String, String> requestHeaders = getRequestHeaders(options, ResourceType.ClientEncryptionKey, operationType); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(this, operationType, ResourceType.ClientEncryptionKey, path, clientEncryptionKey, requestHeaders, options); return request; } @Override public Mono<ResourceResponse<ClientEncryptionKey>> replaceClientEncryptionKey(ClientEncryptionKey clientEncryptionKey, String nameBasedLink, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> replaceClientEncryptionKeyInternal(clientEncryptionKey, nameBasedLink, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<ClientEncryptionKey>> replaceClientEncryptionKeyInternal(ClientEncryptionKey clientEncryptionKey, String nameBasedLink, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (clientEncryptionKey == null) { throw new IllegalArgumentException("clientEncryptionKey"); } logger.debug("Replacing a clientEncryptionKey. clientEncryptionKey id [{}]", clientEncryptionKey.getId()); RxDocumentClientImpl.validateResource(clientEncryptionKey); String path = Utils.joinPath(nameBasedLink, null); Map<String, String> requestHeaders = getRequestHeaders(options, ResourceType.ClientEncryptionKey, OperationType.Replace); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(this, OperationType.Replace, ResourceType.ClientEncryptionKey, path, clientEncryptionKey, requestHeaders, options); if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } return this.replace(request, retryPolicyInstance).map(response -> toResourceResponse(response, ClientEncryptionKey.class)); } catch (Exception e) { logger.debug("Failure in replacing a clientEncryptionKey due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Flux<FeedResponse<ClientEncryptionKey>> readClientEncryptionKeys(String databaseLink, CosmosQueryRequestOptions options) { if (StringUtils.isEmpty(databaseLink)) { throw new IllegalArgumentException("databaseLink"); } return readFeed(options, ResourceType.ClientEncryptionKey, ClientEncryptionKey.class, Utils.joinPath(databaseLink, Paths.CLIENT_ENCRYPTION_KEY_PATH_SEGMENT)); } @Override public Flux<FeedResponse<ClientEncryptionKey>> queryClientEncryptionKeys(String databaseLink, String query, CosmosQueryRequestOptions options) { return queryClientEncryptionKeys(databaseLink, new SqlQuerySpec(query), options); } @Override public Flux<FeedResponse<ClientEncryptionKey>> queryClientEncryptionKeys(String databaseLink, SqlQuerySpec querySpec, CosmosQueryRequestOptions options) { return createQuery(databaseLink, querySpec, options, ClientEncryptionKey.class, ResourceType.ClientEncryptionKey); } @Override public Mono<ResourceResponse<Permission>> createPermission(String userLink, Permission permission, RequestOptions options) { DocumentClientRetryPolicy documentClientRetryPolicy = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> createPermissionInternal(userLink, permission, options, documentClientRetryPolicy), this.resetSessionTokenRetryPolicy.getRequestPolicy()); } private Mono<ResourceResponse<Permission>> createPermissionInternal(String userLink, Permission permission, RequestOptions options, DocumentClientRetryPolicy documentClientRetryPolicy) { try { logger.debug("Creating a Permission. userLink [{}], permission id [{}]", userLink, permission.getId()); RxDocumentServiceRequest request = getPermissionRequest(userLink, permission, options, OperationType.Create); return this.create(request, documentClientRetryPolicy, getOperationContextAndListenerTuple(options)).map(response -> toResourceResponse(response, Permission.class)); } catch (Exception e) { logger.debug("Failure in creating a Permission due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<Permission>> upsertPermission(String userLink, Permission permission, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> upsertPermissionInternal(userLink, permission, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<Permission>> upsertPermissionInternal(String userLink, Permission permission, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { logger.debug("Upserting a Permission. userLink [{}], permission id [{}]", userLink, permission.getId()); RxDocumentServiceRequest request = getPermissionRequest(userLink, permission, options, OperationType.Upsert); if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } return this.upsert(request, retryPolicyInstance, getOperationContextAndListenerTuple(options)).map(response -> toResourceResponse(response, Permission.class)); } catch (Exception e) { logger.debug("Failure in upserting a Permission due to [{}]", e.getMessage(), e); return Mono.error(e); } } private RxDocumentServiceRequest getPermissionRequest(String userLink, Permission permission, RequestOptions options, OperationType operationType) { if (StringUtils.isEmpty(userLink)) { throw new IllegalArgumentException("userLink"); } if (permission == null) { throw new IllegalArgumentException("permission"); } RxDocumentClientImpl.validateResource(permission); String path = Utils.joinPath(userLink, Paths.PERMISSIONS_PATH_SEGMENT); Map<String, String> requestHeaders = getRequestHeaders(options, ResourceType.Permission, operationType); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(this, operationType, ResourceType.Permission, path, permission, requestHeaders, options); return request; } @Override public Mono<ResourceResponse<Permission>> replacePermission(Permission permission, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> replacePermissionInternal(permission, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<Permission>> replacePermissionInternal(Permission permission, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (permission == null) { throw new IllegalArgumentException("permission"); } logger.debug("Replacing a Permission. permission id [{}]", permission.getId()); RxDocumentClientImpl.validateResource(permission); String path = Utils.joinPath(permission.getSelfLink(), null); Map<String, String> requestHeaders = getRequestHeaders(options, ResourceType.Permission, OperationType.Replace); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(this, OperationType.Replace, ResourceType.Permission, path, permission, requestHeaders, options); if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } return this.replace(request, retryPolicyInstance).map(response -> toResourceResponse(response, Permission.class)); } catch (Exception e) { logger.debug("Failure in replacing a Permission due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<Permission>> deletePermission(String permissionLink, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> deletePermissionInternal(permissionLink, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<Permission>> deletePermissionInternal(String permissionLink, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance) { try { if (StringUtils.isEmpty(permissionLink)) { throw new IllegalArgumentException("permissionLink"); } logger.debug("Deleting a Permission. permissionLink [{}]", permissionLink); String path = Utils.joinPath(permissionLink, null); Map<String, String> requestHeaders = getRequestHeaders(options, ResourceType.Permission, OperationType.Delete); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(this, OperationType.Delete, ResourceType.Permission, path, requestHeaders, options); if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } return this.delete(request, retryPolicyInstance, getOperationContextAndListenerTuple(options)).map(response -> toResourceResponse(response, Permission.class)); } catch (Exception e) { logger.debug("Failure in deleting a Permission due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<Permission>> readPermission(String permissionLink, RequestOptions options) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> readPermissionInternal(permissionLink, options, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<Permission>> readPermissionInternal(String permissionLink, RequestOptions options, DocumentClientRetryPolicy retryPolicyInstance ) { try { if (StringUtils.isEmpty(permissionLink)) { throw new IllegalArgumentException("permissionLink"); } logger.debug("Reading a Permission. permissionLink [{}]", permissionLink); String path = Utils.joinPath(permissionLink, null); Map<String, String> requestHeaders = getRequestHeaders(options, ResourceType.Permission, OperationType.Read); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(this, OperationType.Read, ResourceType.Permission, path, requestHeaders, options); if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } return this.read(request, retryPolicyInstance).map(response -> toResourceResponse(response, Permission.class)); } catch (Exception e) { logger.debug("Failure in reading a Permission due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Flux<FeedResponse<Permission>> readPermissions(String userLink, CosmosQueryRequestOptions options) { if (StringUtils.isEmpty(userLink)) { throw new IllegalArgumentException("userLink"); } return readFeed(options, ResourceType.Permission, Permission.class, Utils.joinPath(userLink, Paths.PERMISSIONS_PATH_SEGMENT)); } @Override public Flux<FeedResponse<Permission>> queryPermissions(String userLink, String query, CosmosQueryRequestOptions options) { return queryPermissions(userLink, new SqlQuerySpec(query), options); } @Override public Flux<FeedResponse<Permission>> queryPermissions(String userLink, SqlQuerySpec querySpec, CosmosQueryRequestOptions options) { return createQuery(userLink, querySpec, options, Permission.class, ResourceType.Permission); } @Override public Mono<ResourceResponse<Offer>> replaceOffer(Offer offer) { DocumentClientRetryPolicy documentClientRetryPolicy = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> replaceOfferInternal(offer, documentClientRetryPolicy), documentClientRetryPolicy); } private Mono<ResourceResponse<Offer>> replaceOfferInternal(Offer offer, DocumentClientRetryPolicy documentClientRetryPolicy) { try { if (offer == null) { throw new IllegalArgumentException("offer"); } logger.debug("Replacing an Offer. offer id [{}]", offer.getId()); RxDocumentClientImpl.validateResource(offer); String path = Utils.joinPath(offer.getSelfLink(), null); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(this, OperationType.Replace, ResourceType.Offer, path, offer, null, null); return this.replace(request, documentClientRetryPolicy).map(response -> toResourceResponse(response, Offer.class)); } catch (Exception e) { logger.debug("Failure in replacing an Offer due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Mono<ResourceResponse<Offer>> readOffer(String offerLink) { DocumentClientRetryPolicy retryPolicyInstance = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> readOfferInternal(offerLink, retryPolicyInstance), retryPolicyInstance); } private Mono<ResourceResponse<Offer>> readOfferInternal(String offerLink, DocumentClientRetryPolicy retryPolicyInstance) { try { if (StringUtils.isEmpty(offerLink)) { throw new IllegalArgumentException("offerLink"); } logger.debug("Reading an Offer. offerLink [{}]", offerLink); String path = Utils.joinPath(offerLink, null); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(this, OperationType.Read, ResourceType.Offer, path, (HashMap<String, String>)null, null); if (retryPolicyInstance != null) { retryPolicyInstance.onBeforeSendRequest(request); } return this.read(request, retryPolicyInstance).map(response -> toResourceResponse(response, Offer.class)); } catch (Exception e) { logger.debug("Failure in reading an Offer due to [{}]", e.getMessage(), e); return Mono.error(e); } } @Override public Flux<FeedResponse<Offer>> readOffers(CosmosQueryRequestOptions options) { return readFeed(options, ResourceType.Offer, Offer.class, Utils.joinPath(Paths.OFFERS_PATH_SEGMENT, null)); } private <T extends Resource> Flux<FeedResponse<T>> readFeed(CosmosQueryRequestOptions options, ResourceType resourceType, Class<T> klass, String resourceLink) { if (options == null) { options = new CosmosQueryRequestOptions(); } Integer maxItemCount = ModelBridgeInternal.getMaxItemCountFromQueryRequestOptions(options); int maxPageSize = maxItemCount != null ? maxItemCount : -1; final CosmosQueryRequestOptions finalCosmosQueryRequestOptions = options; DocumentClientRetryPolicy retryPolicy = this.resetSessionTokenRetryPolicy.getRequestPolicy(); BiFunction<String, Integer, RxDocumentServiceRequest> createRequestFunc = (continuationToken, pageSize) -> { Map<String, String> requestHeaders = new HashMap<>(); if (continuationToken != null) { requestHeaders.put(HttpConstants.HttpHeaders.CONTINUATION, continuationToken); } requestHeaders.put(HttpConstants.HttpHeaders.PAGE_SIZE, Integer.toString(pageSize)); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(this, OperationType.ReadFeed, resourceType, resourceLink, requestHeaders, finalCosmosQueryRequestOptions); retryPolicy.onBeforeSendRequest(request); return request; }; Function<RxDocumentServiceRequest, Mono<FeedResponse<T>>> executeFunc = request -> ObservableHelper .inlineIfPossibleAsObs(() -> readFeed(request).map(response -> toFeedResponsePage(response, klass)), retryPolicy); return Paginator.getPaginatedQueryResultAsObservable(options, createRequestFunc, executeFunc, klass, maxPageSize); } @Override public Flux<FeedResponse<Offer>> queryOffers(String query, CosmosQueryRequestOptions options) { return queryOffers(new SqlQuerySpec(query), options); } @Override public Flux<FeedResponse<Offer>> queryOffers(SqlQuerySpec querySpec, CosmosQueryRequestOptions options) { return createQuery(null, querySpec, options, Offer.class, ResourceType.Offer); } @Override public Mono<DatabaseAccount> getDatabaseAccount() { DocumentClientRetryPolicy documentClientRetryPolicy = this.resetSessionTokenRetryPolicy.getRequestPolicy(); return ObservableHelper.inlineIfPossibleAsObs(() -> getDatabaseAccountInternal(documentClientRetryPolicy), documentClientRetryPolicy); } @Override public DatabaseAccount getLatestDatabaseAccount() { return this.globalEndpointManager.getLatestDatabaseAccount(); } private Mono<DatabaseAccount> getDatabaseAccountInternal(DocumentClientRetryPolicy documentClientRetryPolicy) { try { logger.debug("Getting Database Account"); RxDocumentServiceRequest request = RxDocumentServiceRequest.create(this, OperationType.Read, ResourceType.DatabaseAccount, "", (HashMap<String, String>) null, null); return this.read(request, documentClientRetryPolicy).map(ModelBridgeInternal::toDatabaseAccount); } catch (Exception e) { logger.debug("Failure in getting Database Account due to [{}]", e.getMessage(), e); return Mono.error(e); } } public Object getSession() { return this.sessionContainer; } public void setSession(Object sessionContainer) { this.sessionContainer = (SessionContainer) sessionContainer; } @Override public RxClientCollectionCache getCollectionCache() { return this.collectionCache; } @Override public RxPartitionKeyRangeCache getPartitionKeyRangeCache() { return partitionKeyRangeCache; } public Flux<DatabaseAccount> getDatabaseAccountFromEndpoint(URI endpoint) { return Flux.defer(() -> { RxDocumentServiceRequest request = RxDocumentServiceRequest.create(this, OperationType.Read, ResourceType.DatabaseAccount, "", null, (Object) null); return this.populateHeaders(request, RequestVerb.GET) .flatMap(requestPopulated -> { requestPopulated.setEndpointOverride(endpoint); return this.gatewayProxy.processMessage(requestPopulated).doOnError(e -> { String message = String.format("Failed to retrieve database account information. %s", e.getCause() != null ? e.getCause().toString() : e.toString()); logger.warn(message); }).map(rsp -> rsp.getResource(DatabaseAccount.class)) .doOnNext(databaseAccount -> this.useMultipleWriteLocations = this.connectionPolicy.isMultipleWriteRegionsEnabled() && BridgeInternal.isEnableMultipleWriteLocations(databaseAccount)); }); }); } /** * Certain requests must be routed through gateway even when the client connectivity mode is direct. * * @param request * @return RxStoreModel */ private RxStoreModel getStoreProxy(RxDocumentServiceRequest request) { if (request.UseGatewayMode) { return this.gatewayProxy; } ResourceType resourceType = request.getResourceType(); OperationType operationType = request.getOperationType(); if (resourceType == ResourceType.Offer || resourceType == ResourceType.ClientEncryptionKey || resourceType.isScript() && operationType != OperationType.ExecuteJavaScript || resourceType == ResourceType.PartitionKeyRange || resourceType == ResourceType.PartitionKey && operationType == OperationType.Delete) { return this.gatewayProxy; } if (operationType == OperationType.Create || operationType == OperationType.Upsert) { if (resourceType == ResourceType.Database || resourceType == ResourceType.User || resourceType == ResourceType.DocumentCollection || resourceType == ResourceType.Permission) { return this.gatewayProxy; } else { return this.storeModel; } } else if (operationType == OperationType.Delete) { if (resourceType == ResourceType.Database || resourceType == ResourceType.User || resourceType == ResourceType.DocumentCollection) { return this.gatewayProxy; } else { return this.storeModel; } } else if (operationType == OperationType.Replace) { if (resourceType == ResourceType.DocumentCollection) { return this.gatewayProxy; } else { return this.storeModel; } } else if (operationType == OperationType.Read) { if (resourceType == ResourceType.DocumentCollection) { return this.gatewayProxy; } else { return this.storeModel; } } else { if ((operationType == OperationType.Query || operationType == OperationType.SqlQuery || operationType == OperationType.ReadFeed) && Utils.isCollectionChild(request.getResourceType())) { if (request.getPartitionKeyRangeIdentity() == null && request.getHeaders().get(HttpConstants.HttpHeaders.PARTITION_KEY) == null) { return this.gatewayProxy; } } return this.storeModel; } } @Override public void close() { logger.info("Attempting to close client {}", this.clientId); if (!closed.getAndSet(true)) { activeClientsCnt.decrementAndGet(); logger.info("Shutting down ..."); logger.info("Closing Global Endpoint Manager ..."); LifeCycleUtils.closeQuietly(this.globalEndpointManager); logger.info("Closing StoreClientFactory ..."); LifeCycleUtils.closeQuietly(this.storeClientFactory); logger.info("Shutting down reactorHttpClient ..."); LifeCycleUtils.closeQuietly(this.reactorHttpClient); logger.info("Shutting down CpuMonitor ..."); CpuMemoryMonitor.unregister(this); if (this.throughputControlEnabled.get()) { logger.info("Closing ThroughputControlStore ..."); this.throughputControlStore.close(); } logger.info("Shutting down completed."); } else { logger.warn("Already shutdown!"); } } @Override public ItemDeserializer getItemDeserializer() { return this.itemDeserializer; } @Override public synchronized void enableThroughputControlGroup(ThroughputControlGroupInternal group) { checkNotNull(group, "Throughput control group can not be null"); if (this.throughputControlEnabled.compareAndSet(false, true)) { this.throughputControlStore = new ThroughputControlStore( this.collectionCache, this.connectionPolicy.getConnectionMode(), this.partitionKeyRangeCache); this.storeModel.enableThroughputControl(throughputControlStore); } this.throughputControlStore.enableThroughputControlGroup(group); } private static SqlQuerySpec createLogicalPartitionScanQuerySpec( PartitionKey partitionKey, String partitionKeySelector) { StringBuilder queryStringBuilder = new StringBuilder(); List<SqlParameter> parameters = new ArrayList<>(); queryStringBuilder.append("SELECT * FROM c WHERE"); Object pkValue = ModelBridgeInternal.getPartitionKeyObject(partitionKey); String pkParamName = "@pkValue"; parameters.add(new SqlParameter(pkParamName, pkValue)); queryStringBuilder.append(" c"); queryStringBuilder.append(partitionKeySelector); queryStringBuilder.append((" = ")); queryStringBuilder.append(pkParamName); return new SqlQuerySpec(queryStringBuilder.toString(), parameters); } @Override public Mono<List<FeedRange>> getFeedRanges(String collectionLink) { InvalidPartitionExceptionRetryPolicy invalidPartitionExceptionRetryPolicy = new InvalidPartitionExceptionRetryPolicy( this.collectionCache, null, collectionLink, new HashMap<>()); RxDocumentServiceRequest request = RxDocumentServiceRequest.create( this, OperationType.Query, ResourceType.Document, collectionLink, null); invalidPartitionExceptionRetryPolicy.onBeforeSendRequest(request); return ObservableHelper.inlineIfPossibleAsObs( () -> getFeedRangesInternal(request, collectionLink), invalidPartitionExceptionRetryPolicy); } private Mono<List<FeedRange>> getFeedRangesInternal(RxDocumentServiceRequest request, String collectionLink) { logger.debug("getFeedRange collectionLink=[{}]", collectionLink); if (StringUtils.isEmpty(collectionLink)) { throw new IllegalArgumentException("collectionLink"); } Mono<Utils.ValueHolder<DocumentCollection>> collectionObs = collectionCache.resolveCollectionAsync(null, request); return collectionObs.flatMap(documentCollectionResourceResponse -> { final DocumentCollection collection = documentCollectionResourceResponse.v; if (collection == null) { throw new IllegalStateException("Collection cannot be null"); } Mono<Utils.ValueHolder<List<PartitionKeyRange>>> valueHolderMono = partitionKeyRangeCache .tryGetOverlappingRangesAsync( BridgeInternal.getMetaDataDiagnosticContext(request.requestContext.cosmosDiagnostics), collection.getResourceId(), RANGE_INCLUDING_ALL_PARTITION_KEY_RANGES, true, null); return valueHolderMono.map(partitionKeyRangeList -> toFeedRanges(partitionKeyRangeList, request)); }); } private static List<FeedRange> toFeedRanges( Utils.ValueHolder<List<PartitionKeyRange>> partitionKeyRangeListValueHolder, RxDocumentServiceRequest request) { final List<PartitionKeyRange> partitionKeyRangeList = partitionKeyRangeListValueHolder.v; if (partitionKeyRangeList == null) { request.forceNameCacheRefresh = true; throw new InvalidPartitionException(); } List<FeedRange> feedRanges = new ArrayList<>(); partitionKeyRangeList.forEach(pkRange -> feedRanges.add(toFeedRange(pkRange))); return feedRanges; } private static FeedRange toFeedRange(PartitionKeyRange pkRange) { return new FeedRangeEpkImpl(pkRange.toRange()); } }
loadAzureVmMetaData is part of init and not meant to call multiple times , why we used AtomicReference here ?
private void loadAzureVmMetaData() { AzureVMMetadata metadataSnapshot = azureVmMetaDataSingleton.get(); if (metadataSnapshot != null) { this.populateAzureVmMetaData(metadataSnapshot); return; } URI targetEndpoint = null; try { targetEndpoint = new URI(AZURE_VM_METADATA); } catch (URISyntaxException ex) { logger.info("Unable to parse azure vm metadata url"); return; } HashMap<String, String> headers = new HashMap<>(); headers.put("Metadata", "true"); HttpHeaders httpHeaders = new HttpHeaders(headers); HttpRequest httpRequest = new HttpRequest(HttpMethod.GET, targetEndpoint, targetEndpoint.getPort(), httpHeaders); Mono<HttpResponse> httpResponseMono = this.httpClient.send(httpRequest); httpResponseMono .flatMap(response -> response.bodyAsString()).map(metadataJson -> parse(metadataJson, AzureVMMetadata.class)).doOnSuccess(metadata -> { azureVmMetaDataSingleton.compareAndSet(null, metadata); this.populateAzureVmMetaData(metadata); }).onErrorResume(throwable -> { logger.info("Client is not on azure vm"); logger.debug("Unable to get azure vm metadata", throwable); return Mono.empty(); }).subscribe(); }
AzureVMMetadata metadataSnapshot = azureVmMetaDataSingleton.get();
private void loadAzureVmMetaData() { AzureVMMetadata metadataSnapshot = azureVmMetaDataSingleton.get(); if (metadataSnapshot != null) { this.populateAzureVmMetaData(metadataSnapshot); return; } URI targetEndpoint = null; try { targetEndpoint = new URI(AZURE_VM_METADATA); } catch (URISyntaxException ex) { logger.info("Unable to parse azure vm metadata url"); return; } HashMap<String, String> headers = new HashMap<>(); headers.put("Metadata", "true"); HttpHeaders httpHeaders = new HttpHeaders(headers); HttpRequest httpRequest = new HttpRequest(HttpMethod.GET, targetEndpoint, targetEndpoint.getPort(), httpHeaders); Mono<HttpResponse> httpResponseMono = this.httpClient.send(httpRequest); httpResponseMono .flatMap(response -> response.bodyAsString()).map(metadataJson -> parse(metadataJson, AzureVMMetadata.class)).doOnSuccess(metadata -> { azureVmMetaDataSingleton.compareAndSet(null, metadata); this.populateAzureVmMetaData(metadata); }).onErrorResume(throwable -> { logger.info("Client is not on azure vm"); logger.debug("Unable to get azure vm metadata", throwable); return Mono.empty(); }).subscribe(); }
class ClientTelemetry { public final static int ONE_KB_TO_BYTES = 1024; public final static int REQUEST_LATENCY_MAX_MILLI_SEC = 300000; public final static int REQUEST_LATENCY_SUCCESS_PRECISION = 4; public final static int REQUEST_LATENCY_FAILURE_PRECISION = 2; public final static String REQUEST_LATENCY_NAME = "RequestLatency"; public final static String REQUEST_LATENCY_UNIT = "MilliSecond"; public final static int REQUEST_CHARGE_MAX = 10000; public final static int REQUEST_CHARGE_PRECISION = 2; public final static String REQUEST_CHARGE_NAME = "RequestCharge"; public final static String REQUEST_CHARGE_UNIT = "RU"; public final static String TCP_NEW_CHANNEL_LATENCY_NAME = "TcpNewChannelOpenLatency"; public final static String TCP_NEW_CHANNEL_LATENCY_UNIT = "MilliSecond"; public final static int TCP_NEW_CHANNEL_LATENCY_MAX_MILLI_SEC = 300000; public final static int TCP_NEW_CHANNEL_LATENCY_PRECISION = 2; public final static int CPU_MAX = 100; public final static int CPU_PRECISION = 2; private final static String CPU_NAME = "CPU"; private final static String CPU_UNIT = "Percentage"; public final static int MEMORY_MAX_IN_MB = 102400; public final static int MEMORY_PRECISION = 2; private final static String MEMORY_NAME = "MemoryRemaining"; private final static String MEMORY_UNIT = "MB"; private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper(); private final static AtomicLong instanceCount = new AtomicLong(0); private final static AtomicReference<AzureVMMetadata> azureVmMetaDataSingleton = new AtomicReference<>(null); private ClientTelemetryInfo clientTelemetryInfo; private final HttpClient httpClient; private final ScheduledThreadPoolExecutor scheduledExecutorService = new ScheduledThreadPoolExecutor(1, new CosmosDaemonThreadFactory("ClientTelemetry-" + instanceCount.incrementAndGet())); private final Scheduler scheduler = Schedulers.fromExecutor(scheduledExecutorService); private static final Logger logger = LoggerFactory.getLogger(ClientTelemetry.class); private volatile boolean isClosed; private volatile boolean isClientTelemetryEnabled; private static String AZURE_VM_METADATA = "http: private static final double PERCENTILE_50 = 50.0; private static final double PERCENTILE_90 = 90.0; private static final double PERCENTILE_95 = 95.0; private static final double PERCENTILE_99 = 99.0; private static final double PERCENTILE_999 = 99.9; private final int clientTelemetrySchedulingSec; private final IAuthorizationTokenProvider tokenProvider; private final String globalDatabaseAccountName; public ClientTelemetry(DiagnosticsClientContext diagnosticsClientContext, Boolean acceleratedNetworking, String clientId, String processId, String userAgent, ConnectionMode connectionMode, String globalDatabaseAccountName, String applicationRegion, String hostEnvInfo, HttpClient httpClient, boolean isClientTelemetryEnabled, IAuthorizationTokenProvider tokenProvider, List<String> preferredRegions ) { clientTelemetryInfo = new ClientTelemetryInfo( getMachineId(diagnosticsClientContext), clientId, processId, userAgent, connectionMode, globalDatabaseAccountName, applicationRegion, hostEnvInfo, acceleratedNetworking, preferredRegions); this.isClosed = false; this.httpClient = httpClient; this.isClientTelemetryEnabled = isClientTelemetryEnabled; this.clientTelemetrySchedulingSec = Configs.getClientTelemetrySchedulingInSec(); this.tokenProvider = tokenProvider; this.globalDatabaseAccountName = globalDatabaseAccountName; } public ClientTelemetryInfo getClientTelemetryInfo() { return clientTelemetryInfo; } public static String getMachineId(DiagnosticsClientContext diagnosticsClientContext) { AzureVMMetadata metadataSnapshot = azureVmMetaDataSingleton.get(); if (metadataSnapshot != null && metadataSnapshot.getVmId() != null) { String machineId = "vmId:" + metadataSnapshot.getVmId(); if (diagnosticsClientContext != null) { diagnosticsClientContext.getConfig().withMachineId(machineId); } return machineId; } if (diagnosticsClientContext == null) { return ""; } return diagnosticsClientContext.getConfig().getMachineId(); } public static void recordValue(ConcurrentDoubleHistogram doubleHistogram, long value) { try { doubleHistogram.recordValue(value); } catch (Exception ex) { logger.warn("Error while recording value for client telemetry. ", ex); } } public static void recordValue(ConcurrentDoubleHistogram doubleHistogram, double value) { try { doubleHistogram.recordValue(value); } catch (Exception ex) { logger.warn("Error while recording value for client telemetry. ", ex); } } public boolean isClientTelemetryEnabled() { return isClientTelemetryEnabled; } public void init() { loadAzureVmMetaData(); sendClientTelemetry().subscribe(); } public void close() { this.isClosed = true; this.scheduledExecutorService.shutdown(); logger.debug("GlobalEndpointManager closed."); } private Mono<Void> sendClientTelemetry() { return Mono.delay(Duration.ofSeconds(clientTelemetrySchedulingSec), CosmosSchedulers.COSMOS_PARALLEL) .flatMap(t -> { if (this.isClosed) { logger.warn("client already closed"); return Mono.empty(); } if (!Configs.isClientTelemetryEnabled(this.isClientTelemetryEnabled)) { logger.trace("client telemetry not enabled"); return Mono.empty(); } readHistogram(); try { String endpoint = Configs.getClientTelemetryEndpoint(); if (StringUtils.isEmpty(endpoint)) { logger.info("ClientTelemetry {}", OBJECT_MAPPER.writeValueAsString(this.clientTelemetryInfo)); clearDataForNextRun(); return this.sendClientTelemetry(); } else { URI targetEndpoint = new URI(endpoint); ByteBuffer byteBuffer = BridgeInternal.serializeJsonToByteBuffer(this.clientTelemetryInfo, ClientTelemetry.OBJECT_MAPPER); Flux<byte[]> fluxBytes = Flux.just(RxDocumentServiceRequest.toByteArray(byteBuffer)); Map<String, String> headers = new HashMap<>(); String date = Utils.nowAsRFC1123(); headers.put(HttpConstants.HttpHeaders.X_DATE, date); String authorization = this.tokenProvider.getUserAuthorizationToken( "", ResourceType.ClientTelemetry, RequestVerb.POST, headers, AuthorizationTokenType.PrimaryMasterKey, null); try { authorization = URLEncoder.encode(authorization, Constants.UrlEncodingInfo.UTF_8); } catch (UnsupportedEncodingException e) { logger.error("Failed to encode authToken. Exception: ", e); this.clearDataForNextRun(); return this.sendClientTelemetry(); } HttpHeaders httpHeaders = new HttpHeaders(); httpHeaders.set(HttpConstants.HttpHeaders.CONTENT_TYPE, RuntimeConstants.MediaTypes.JSON); httpHeaders.set(HttpConstants.HttpHeaders.CONTENT_ENCODING, RuntimeConstants.Encoding.GZIP); httpHeaders.set(HttpConstants.HttpHeaders.X_DATE, date); httpHeaders.set(HttpConstants.HttpHeaders.DATABASE_ACCOUNT_NAME, this.globalDatabaseAccountName); httpHeaders.set(HttpConstants.HttpHeaders.AUTHORIZATION, authorization); String envName = Configs.getEnvironmentName(); if (StringUtils.isNotEmpty(envName)) { httpHeaders.set(HttpConstants.HttpHeaders.ENVIRONMENT_NAME, envName); } HttpRequest httpRequest = new HttpRequest(HttpMethod.POST, targetEndpoint, targetEndpoint.getPort(), httpHeaders, fluxBytes); Mono<HttpResponse> httpResponseMono = this.httpClient.send(httpRequest, Duration.ofSeconds(Configs.getHttpResponseTimeoutInSeconds())); return httpResponseMono.flatMap(response -> { if (response.statusCode() != HttpConstants.StatusCodes.OK) { logger.error("Client telemetry request did not succeeded, status code {}", response.statusCode()); } this.clearDataForNextRun(); return this.sendClientTelemetry(); }).onErrorResume(throwable -> { logger.error("Error while sending client telemetry request Exception: ", throwable); this.clearDataForNextRun(); return this.sendClientTelemetry(); }); } } catch (JsonProcessingException | URISyntaxException ex) { logger.error("Error while preparing client telemetry. Exception: ", ex); this.clearDataForNextRun(); return this.sendClientTelemetry(); } }).onErrorResume(ex -> { logger.error("sendClientTelemetry() - Unable to send client telemetry" + ". Exception: ", ex); clearDataForNextRun(); return this.sendClientTelemetry(); }).subscribeOn(scheduler); } private void populateAzureVmMetaData(AzureVMMetadata azureVMMetadata) { this.clientTelemetryInfo.setApplicationRegion(azureVMMetadata.getLocation()); this.clientTelemetryInfo.setVmId(azureVMMetadata.getVmId()); this.clientTelemetryInfo.setHostEnvInfo(azureVMMetadata.getOsType() + "|" + azureVMMetadata.getSku() + "|" + azureVMMetadata.getVmSize() + "|" + azureVMMetadata.getAzEnvironment()); } private static <T> T parse(String itemResponseBodyAsString, Class<T> itemClassType) { try { return OBJECT_MAPPER.readValue(itemResponseBodyAsString, itemClassType); } catch (IOException e) { throw new IllegalStateException( "Failed to parse string [" + itemResponseBodyAsString + "] to POJO.", e); } } private void clearDataForNextRun() { this.clientTelemetryInfo.getSystemInfoMap().clear(); this.clientTelemetryInfo.getOperationInfoMap().clear(); this.clientTelemetryInfo.getCacheRefreshInfoMap().clear(); for (ConcurrentDoubleHistogram histogram : this.clientTelemetryInfo.getSystemInfoMap().values()) { histogram.reset(); } } private void readHistogram() { ConcurrentDoubleHistogram cpuHistogram = new ConcurrentDoubleHistogram(ClientTelemetry.CPU_MAX, ClientTelemetry.CPU_PRECISION); cpuHistogram.setAutoResize(true); for (double val : CpuMemoryMonitor.getClientTelemetryCpuLatestList()) { recordValue(cpuHistogram, val); } ReportPayload cpuReportPayload = new ReportPayload(CPU_NAME, CPU_UNIT); clientTelemetryInfo.getSystemInfoMap().put(cpuReportPayload, cpuHistogram); ConcurrentDoubleHistogram memoryHistogram = new ConcurrentDoubleHistogram(ClientTelemetry.MEMORY_MAX_IN_MB, ClientTelemetry.MEMORY_PRECISION); memoryHistogram.setAutoResize(true); for (double val : CpuMemoryMonitor.getClientTelemetryMemoryLatestList()) { recordValue(memoryHistogram, val); } ReportPayload memoryReportPayload = new ReportPayload(MEMORY_NAME, MEMORY_UNIT); clientTelemetryInfo.getSystemInfoMap().put(memoryReportPayload, memoryHistogram); this.clientTelemetryInfo.setTimeStamp(Instant.now().toString()); for (Map.Entry<ReportPayload, ConcurrentDoubleHistogram> entry : this.clientTelemetryInfo.getSystemInfoMap().entrySet()) { fillMetricsInfo(entry.getKey(), entry.getValue()); } for (Map.Entry<ReportPayload, ConcurrentDoubleHistogram> entry : this.clientTelemetryInfo.getCacheRefreshInfoMap().entrySet()) { fillMetricsInfo(entry.getKey(), entry.getValue()); } for (Map.Entry<ReportPayload, ConcurrentDoubleHistogram> entry : this.clientTelemetryInfo.getOperationInfoMap().entrySet()) { fillMetricsInfo(entry.getKey(), entry.getValue()); } } private void fillMetricsInfo(ReportPayload payload, ConcurrentDoubleHistogram histogram) { DoubleHistogram copyHistogram = histogram.copy(); payload.getMetricInfo().setCount(copyHistogram.getTotalCount()); payload.getMetricInfo().setMax(copyHistogram.getMaxValue()); payload.getMetricInfo().setMin(copyHistogram.getMinValue()); payload.getMetricInfo().setMean(copyHistogram.getMean()); Map<Double, Double> percentile = new HashMap<>(); percentile.put(PERCENTILE_50, copyHistogram.getValueAtPercentile(PERCENTILE_50)); percentile.put(PERCENTILE_90, copyHistogram.getValueAtPercentile(PERCENTILE_90)); percentile.put(PERCENTILE_95, copyHistogram.getValueAtPercentile(PERCENTILE_95)); percentile.put(PERCENTILE_99, copyHistogram.getValueAtPercentile(PERCENTILE_99)); percentile.put(PERCENTILE_999, copyHistogram.getValueAtPercentile(PERCENTILE_999)); payload.getMetricInfo().setPercentiles(percentile); } }
class ClientTelemetry { public final static int ONE_KB_TO_BYTES = 1024; public final static int REQUEST_LATENCY_MAX_MILLI_SEC = 300000; public final static int REQUEST_LATENCY_SUCCESS_PRECISION = 4; public final static int REQUEST_LATENCY_FAILURE_PRECISION = 2; public final static String REQUEST_LATENCY_NAME = "RequestLatency"; public final static String REQUEST_LATENCY_UNIT = "MilliSecond"; public final static int REQUEST_CHARGE_MAX = 10000; public final static int REQUEST_CHARGE_PRECISION = 2; public final static String REQUEST_CHARGE_NAME = "RequestCharge"; public final static String REQUEST_CHARGE_UNIT = "RU"; public final static String TCP_NEW_CHANNEL_LATENCY_NAME = "TcpNewChannelOpenLatency"; public final static String TCP_NEW_CHANNEL_LATENCY_UNIT = "MilliSecond"; public final static int TCP_NEW_CHANNEL_LATENCY_MAX_MILLI_SEC = 300000; public final static int TCP_NEW_CHANNEL_LATENCY_PRECISION = 2; public final static int CPU_MAX = 100; public final static int CPU_PRECISION = 2; private final static String CPU_NAME = "CPU"; private final static String CPU_UNIT = "Percentage"; public final static int MEMORY_MAX_IN_MB = 102400; public final static int MEMORY_PRECISION = 2; private final static String MEMORY_NAME = "MemoryRemaining"; private final static String MEMORY_UNIT = "MB"; private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper(); private final static AtomicLong instanceCount = new AtomicLong(0); private final static AtomicReference<AzureVMMetadata> azureVmMetaDataSingleton = new AtomicReference<>(null); private ClientTelemetryInfo clientTelemetryInfo; private final HttpClient httpClient; private final ScheduledThreadPoolExecutor scheduledExecutorService = new ScheduledThreadPoolExecutor(1, new CosmosDaemonThreadFactory("ClientTelemetry-" + instanceCount.incrementAndGet())); private final Scheduler scheduler = Schedulers.fromExecutor(scheduledExecutorService); private static final Logger logger = LoggerFactory.getLogger(ClientTelemetry.class); private volatile boolean isClosed; private volatile boolean isClientTelemetryEnabled; private static String AZURE_VM_METADATA = "http: private static final double PERCENTILE_50 = 50.0; private static final double PERCENTILE_90 = 90.0; private static final double PERCENTILE_95 = 95.0; private static final double PERCENTILE_99 = 99.0; private static final double PERCENTILE_999 = 99.9; private final int clientTelemetrySchedulingSec; private final IAuthorizationTokenProvider tokenProvider; private final String globalDatabaseAccountName; public ClientTelemetry(DiagnosticsClientContext diagnosticsClientContext, Boolean acceleratedNetworking, String clientId, String processId, String userAgent, ConnectionMode connectionMode, String globalDatabaseAccountName, String applicationRegion, String hostEnvInfo, HttpClient httpClient, boolean isClientTelemetryEnabled, IAuthorizationTokenProvider tokenProvider, List<String> preferredRegions ) { clientTelemetryInfo = new ClientTelemetryInfo( getMachineId(diagnosticsClientContext), clientId, processId, userAgent, connectionMode, globalDatabaseAccountName, applicationRegion, hostEnvInfo, acceleratedNetworking, preferredRegions); this.isClosed = false; this.httpClient = httpClient; this.isClientTelemetryEnabled = isClientTelemetryEnabled; this.clientTelemetrySchedulingSec = Configs.getClientTelemetrySchedulingInSec(); this.tokenProvider = tokenProvider; this.globalDatabaseAccountName = globalDatabaseAccountName; } public ClientTelemetryInfo getClientTelemetryInfo() { return clientTelemetryInfo; } public static String getMachineId(DiagnosticsClientContext diagnosticsClientContext) { AzureVMMetadata metadataSnapshot = azureVmMetaDataSingleton.get(); if (metadataSnapshot != null && metadataSnapshot.getVmId() != null) { String machineId = "vmId:" + metadataSnapshot.getVmId(); if (diagnosticsClientContext != null) { diagnosticsClientContext.getConfig().withMachineId(machineId); } return machineId; } if (diagnosticsClientContext == null) { return ""; } return diagnosticsClientContext.getConfig().getMachineId(); } public static void recordValue(ConcurrentDoubleHistogram doubleHistogram, long value) { try { doubleHistogram.recordValue(value); } catch (Exception ex) { logger.warn("Error while recording value for client telemetry. ", ex); } } public static void recordValue(ConcurrentDoubleHistogram doubleHistogram, double value) { try { doubleHistogram.recordValue(value); } catch (Exception ex) { logger.warn("Error while recording value for client telemetry. ", ex); } } public boolean isClientTelemetryEnabled() { return isClientTelemetryEnabled; } public void init() { loadAzureVmMetaData(); sendClientTelemetry().subscribe(); } public void close() { this.isClosed = true; this.scheduledExecutorService.shutdown(); logger.debug("GlobalEndpointManager closed."); } private Mono<Void> sendClientTelemetry() { return Mono.delay(Duration.ofSeconds(clientTelemetrySchedulingSec), CosmosSchedulers.COSMOS_PARALLEL) .flatMap(t -> { if (this.isClosed) { logger.warn("client already closed"); return Mono.empty(); } if (!Configs.isClientTelemetryEnabled(this.isClientTelemetryEnabled)) { logger.trace("client telemetry not enabled"); return Mono.empty(); } readHistogram(); try { String endpoint = Configs.getClientTelemetryEndpoint(); if (StringUtils.isEmpty(endpoint)) { logger.info("ClientTelemetry {}", OBJECT_MAPPER.writeValueAsString(this.clientTelemetryInfo)); clearDataForNextRun(); return this.sendClientTelemetry(); } else { URI targetEndpoint = new URI(endpoint); ByteBuffer byteBuffer = BridgeInternal.serializeJsonToByteBuffer(this.clientTelemetryInfo, ClientTelemetry.OBJECT_MAPPER); Flux<byte[]> fluxBytes = Flux.just(RxDocumentServiceRequest.toByteArray(byteBuffer)); Map<String, String> headers = new HashMap<>(); String date = Utils.nowAsRFC1123(); headers.put(HttpConstants.HttpHeaders.X_DATE, date); String authorization = this.tokenProvider.getUserAuthorizationToken( "", ResourceType.ClientTelemetry, RequestVerb.POST, headers, AuthorizationTokenType.PrimaryMasterKey, null); try { authorization = URLEncoder.encode(authorization, Constants.UrlEncodingInfo.UTF_8); } catch (UnsupportedEncodingException e) { logger.error("Failed to encode authToken. Exception: ", e); this.clearDataForNextRun(); return this.sendClientTelemetry(); } HttpHeaders httpHeaders = new HttpHeaders(); httpHeaders.set(HttpConstants.HttpHeaders.CONTENT_TYPE, RuntimeConstants.MediaTypes.JSON); httpHeaders.set(HttpConstants.HttpHeaders.CONTENT_ENCODING, RuntimeConstants.Encoding.GZIP); httpHeaders.set(HttpConstants.HttpHeaders.X_DATE, date); httpHeaders.set(HttpConstants.HttpHeaders.DATABASE_ACCOUNT_NAME, this.globalDatabaseAccountName); httpHeaders.set(HttpConstants.HttpHeaders.AUTHORIZATION, authorization); String envName = Configs.getEnvironmentName(); if (StringUtils.isNotEmpty(envName)) { httpHeaders.set(HttpConstants.HttpHeaders.ENVIRONMENT_NAME, envName); } HttpRequest httpRequest = new HttpRequest(HttpMethod.POST, targetEndpoint, targetEndpoint.getPort(), httpHeaders, fluxBytes); Mono<HttpResponse> httpResponseMono = this.httpClient.send(httpRequest, Duration.ofSeconds(Configs.getHttpResponseTimeoutInSeconds())); return httpResponseMono.flatMap(response -> { if (response.statusCode() != HttpConstants.StatusCodes.OK) { logger.error("Client telemetry request did not succeeded, status code {}", response.statusCode()); } this.clearDataForNextRun(); return this.sendClientTelemetry(); }).onErrorResume(throwable -> { logger.error("Error while sending client telemetry request Exception: ", throwable); this.clearDataForNextRun(); return this.sendClientTelemetry(); }); } } catch (JsonProcessingException | URISyntaxException ex) { logger.error("Error while preparing client telemetry. Exception: ", ex); this.clearDataForNextRun(); return this.sendClientTelemetry(); } }).onErrorResume(ex -> { logger.error("sendClientTelemetry() - Unable to send client telemetry" + ". Exception: ", ex); clearDataForNextRun(); return this.sendClientTelemetry(); }).subscribeOn(scheduler); } private void populateAzureVmMetaData(AzureVMMetadata azureVMMetadata) { this.clientTelemetryInfo.setApplicationRegion(azureVMMetadata.getLocation()); this.clientTelemetryInfo.setMachineId("vmId:" + azureVMMetadata.getVmId()); this.clientTelemetryInfo.setHostEnvInfo(azureVMMetadata.getOsType() + "|" + azureVMMetadata.getSku() + "|" + azureVMMetadata.getVmSize() + "|" + azureVMMetadata.getAzEnvironment()); } private static <T> T parse(String itemResponseBodyAsString, Class<T> itemClassType) { try { return OBJECT_MAPPER.readValue(itemResponseBodyAsString, itemClassType); } catch (IOException e) { throw new IllegalStateException( "Failed to parse string [" + itemResponseBodyAsString + "] to POJO.", e); } } private void clearDataForNextRun() { this.clientTelemetryInfo.getSystemInfoMap().clear(); this.clientTelemetryInfo.getOperationInfoMap().clear(); this.clientTelemetryInfo.getCacheRefreshInfoMap().clear(); for (ConcurrentDoubleHistogram histogram : this.clientTelemetryInfo.getSystemInfoMap().values()) { histogram.reset(); } } private void readHistogram() { ConcurrentDoubleHistogram cpuHistogram = new ConcurrentDoubleHistogram(ClientTelemetry.CPU_MAX, ClientTelemetry.CPU_PRECISION); cpuHistogram.setAutoResize(true); for (double val : CpuMemoryMonitor.getClientTelemetryCpuLatestList()) { recordValue(cpuHistogram, val); } ReportPayload cpuReportPayload = new ReportPayload(CPU_NAME, CPU_UNIT); clientTelemetryInfo.getSystemInfoMap().put(cpuReportPayload, cpuHistogram); ConcurrentDoubleHistogram memoryHistogram = new ConcurrentDoubleHistogram(ClientTelemetry.MEMORY_MAX_IN_MB, ClientTelemetry.MEMORY_PRECISION); memoryHistogram.setAutoResize(true); for (double val : CpuMemoryMonitor.getClientTelemetryMemoryLatestList()) { recordValue(memoryHistogram, val); } ReportPayload memoryReportPayload = new ReportPayload(MEMORY_NAME, MEMORY_UNIT); clientTelemetryInfo.getSystemInfoMap().put(memoryReportPayload, memoryHistogram); this.clientTelemetryInfo.setTimeStamp(Instant.now().toString()); for (Map.Entry<ReportPayload, ConcurrentDoubleHistogram> entry : this.clientTelemetryInfo.getSystemInfoMap().entrySet()) { fillMetricsInfo(entry.getKey(), entry.getValue()); } for (Map.Entry<ReportPayload, ConcurrentDoubleHistogram> entry : this.clientTelemetryInfo.getCacheRefreshInfoMap().entrySet()) { fillMetricsInfo(entry.getKey(), entry.getValue()); } for (Map.Entry<ReportPayload, ConcurrentDoubleHistogram> entry : this.clientTelemetryInfo.getOperationInfoMap().entrySet()) { fillMetricsInfo(entry.getKey(), entry.getValue()); } } private void fillMetricsInfo(ReportPayload payload, ConcurrentDoubleHistogram histogram) { DoubleHistogram copyHistogram = histogram.copy(); payload.getMetricInfo().setCount(copyHistogram.getTotalCount()); payload.getMetricInfo().setMax(copyHistogram.getMaxValue()); payload.getMetricInfo().setMin(copyHistogram.getMinValue()); payload.getMetricInfo().setMean(copyHistogram.getMean()); Map<Double, Double> percentile = new HashMap<>(); percentile.put(PERCENTILE_50, copyHistogram.getValueAtPercentile(PERCENTILE_50)); percentile.put(PERCENTILE_90, copyHistogram.getValueAtPercentile(PERCENTILE_90)); percentile.put(PERCENTILE_95, copyHistogram.getValueAtPercentile(PERCENTILE_95)); percentile.put(PERCENTILE_99, copyHistogram.getValueAtPercentile(PERCENTILE_99)); percentile.put(PERCENTILE_999, copyHistogram.getValueAtPercentile(PERCENTILE_999)); payload.getMetricInfo().setPercentiles(percentile); } }
that's no different from async version. Should `createHttpRequestBase` -> `protected createHttpRequest` in the base class ?
public HttpRequest createHttpRequest(SwaggerMethodParser methodParser, Object[] args) throws IOException { return createHttpRequestBase(methodParser, serializer, false, args); }
}
public HttpRequest createHttpRequest(SwaggerMethodParser methodParser, Object[] args) throws IOException { return createHttpRequest(methodParser, serializer, false, args); }
class SyncRestProxy extends RestProxyBase { /** * Create a RestProxy. * * @param httpPipeline the HttpPipelinePolicy and HttpClient httpPipeline that will be used to send HTTP requests. * @param serializer the serializer that will be used to convert response bodies to POJOs. * @param interfaceParser the parser that contains information about the interface describing REST API methods that */ public SyncRestProxy(HttpPipeline httpPipeline, SerializerAdapter serializer, SwaggerInterfaceParser interfaceParser) { super(httpPipeline, serializer, interfaceParser); } /** * Send the provided request asynchronously, applying any request policies provided to the HttpClient instance. * * @param request the HTTP request to send * @param contextData the context * @return a {@link Mono} that emits HttpResponse asynchronously */ public HttpResponse send(HttpRequest request, Context contextData) { return httpPipeline.sendSync(request, contextData); } @Override public Object invoke(Object proxy, Method method, RequestOptions options, EnumSet<ErrorOptions> errorOptions, Consumer<HttpRequest> requestCallback, SwaggerMethodParser methodParser, HttpRequest request, Context context) { HttpResponseDecoder.HttpDecodedResponse decodedResponse = null; Throwable throwable = null; try { context = startTracingSpan(method, context); if (options != null && requestCallback != null) { requestCallback.accept(request); } if (request.getBodyAsBinaryData() != null) { request.setBody(RestProxyUtils.validateLengthSync(request)); } final HttpResponse response = send(request, context); decodedResponse = this.decoder.decodeSync(response, methodParser); return handleRestReturnType(decodedResponse, methodParser, methodParser.getReturnType(), context, options, errorOptions); } catch (RuntimeException e) { throwable = e; throw LOGGER.logExceptionAsError(e); } finally { if (decodedResponse != null || throwable != null) { endTracingSpan(decodedResponse, throwable, context); } } } /** * Starts the tracing span for the current service call, additionally set metadata attributes on the span by passing * additional context information. * * @param method Service method being called. * @param context Context information about the current service call. * @return The updated context containing the span context. */ private Context startTracingSpan(Method method, Context context) { if (!TracerProxy.isTracingEnabled()) { return context; } if ((boolean) context.getData(Tracer.DISABLE_TRACING_KEY).orElse(false)) { return context; } String spanName = interfaceParser.getServiceName() + "." + method.getName(); context = TracerProxy.setSpanName(spanName, context); return TracerProxy.start(spanName, context); } /** * Create a publisher that (1) emits error if the provided response {@code decodedResponse} has 'disallowed status * code' OR (2) emits provided response if it's status code ia allowed. * * 'disallowed status code' is one of the status code defined in the provided SwaggerMethodParser or is in the int[] * of additional allowed status codes. * * @param decodedResponse The HttpResponse to check. * @param methodParser The method parser that contains information about the service interface method that initiated * the HTTP request. * @return An async-version of the provided decodedResponse. */ private HttpResponseDecoder.HttpDecodedResponse ensureExpectedStatus(final HttpResponseDecoder.HttpDecodedResponse decodedResponse, final SwaggerMethodParser methodParser, RequestOptions options, EnumSet<ErrorOptions> errorOptions) { final int responseStatusCode = decodedResponse.getSourceResponse().getStatusCode(); if (methodParser.isExpectedResponseStatusCode(responseStatusCode) || (options != null && errorOptions.contains(ErrorOptions.NO_THROW))) { return decodedResponse; } Exception e; BinaryData responseData = decodedResponse.getSourceResponse().getBodyAsBinaryData(); byte[] responseBytes = responseData == null ? null : responseData.toBytes(); if (responseBytes == null || responseBytes.length == 0) { e = instantiateUnexpectedException(methodParser.getUnexpectedException(responseStatusCode), decodedResponse.getSourceResponse(), null, null); } else { Object decodedBody = decodedResponse.getDecodedBodySync(responseBytes); e = instantiateUnexpectedException(methodParser.getUnexpectedException(responseStatusCode), decodedResponse.getSourceResponse(), responseBytes, decodedBody); } if (e instanceof RuntimeException) { throw LOGGER.logExceptionAsError((RuntimeException) e); } else { throw LOGGER.logExceptionAsError(new RuntimeException(e)); } } private Object handleRestResponseReturnType(final HttpResponseDecoder.HttpDecodedResponse response, final SwaggerMethodParser methodParser, final Type entityType) { if (TypeUtil.isTypeOrSubTypeOf(entityType, Response.class)) { if (entityType.equals(StreamResponse.class)) { return createResponse(response, entityType, null); } final Type bodyType = TypeUtil.getRestResponseBodyType(entityType); if (TypeUtil.isTypeOrSubTypeOf(bodyType, Void.class)) { response.getSourceResponse().close(); return createResponse(response, entityType, null); } else { Object bodyAsObject = handleBodyReturnType(response, methodParser, bodyType); Response<?> httpResponse = createResponse(response, entityType, bodyAsObject); if (httpResponse == null) { return createResponse(response, entityType, null); } return httpResponse; } } else { return handleBodyReturnType(response, methodParser, entityType); } } private Object handleBodyReturnType(final HttpResponseDecoder.HttpDecodedResponse response, final SwaggerMethodParser methodParser, final Type entityType) { final int responseStatusCode = response.getSourceResponse().getStatusCode(); final HttpMethod httpMethod = methodParser.getHttpMethod(); final Type returnValueWireType = methodParser.getReturnValueWireType(); final Object result; if (httpMethod == HttpMethod.HEAD && (TypeUtil.isTypeOrSubTypeOf( entityType, Boolean.TYPE) || TypeUtil.isTypeOrSubTypeOf(entityType, Boolean.class))) { boolean isSuccess = (responseStatusCode / 100) == 2; result = isSuccess; } else if (TypeUtil.isTypeOrSubTypeOf(entityType, byte[].class)) { byte[] responseBodyBytes = response.getSourceResponse().getBodyAsBinaryData().toBytes(); if (returnValueWireType == Base64Url.class) { responseBodyBytes = new Base64Url(responseBodyBytes).decodedBytes(); } result = responseBodyBytes; } else if (TypeUtil.isTypeOrSubTypeOf(entityType, BinaryData.class)) { result = response.getSourceResponse().getBodyAsBinaryData(); } else { result = response.getDecodedBodySync((byte[]) null); } return result; } /** * Handle the provided asynchronous HTTP response and return the deserialized value. * * @param httpDecodedResponse the asynchronous HTTP response to the original HTTP request * @param methodParser the SwaggerMethodParser that the request originates from * @param returnType the type of value that will be returned * @param context Additional context that is passed through the Http pipeline during the service call. * @return the deserialized result */ private Object handleRestReturnType(final HttpResponseDecoder.HttpDecodedResponse httpDecodedResponse, final SwaggerMethodParser methodParser, final Type returnType, final Context context, final RequestOptions options, EnumSet<ErrorOptions> errorOptions) { final HttpResponseDecoder.HttpDecodedResponse expectedResponse = ensureExpectedStatus(httpDecodedResponse, methodParser, options, errorOptions); final Object result; if (TypeUtil.isTypeOrSubTypeOf(returnType, void.class) || TypeUtil.isTypeOrSubTypeOf(returnType, Void.class)) { result = expectedResponse; } else { result = handleRestResponseReturnType(httpDecodedResponse, methodParser, returnType); } return result; } private static void endTracingSpan(HttpResponseDecoder.HttpDecodedResponse httpDecodedResponse, Throwable throwable, Context tracingContext) { if (tracingContext == null) { return; } Object disableTracingValue = (tracingContext.getData(Tracer.DISABLE_TRACING_KEY).isPresent() ? tracingContext.getData(Tracer.DISABLE_TRACING_KEY).get() : null); boolean disableTracing = Boolean.TRUE.equals(disableTracingValue != null ? disableTracingValue : false); if (disableTracing) { return; } int statusCode = 0; if (httpDecodedResponse != null) { statusCode = httpDecodedResponse.getSourceResponse().getStatusCode(); } else if (throwable != null) { if (throwable instanceof HttpResponseException) { HttpResponseException exception = (HttpResponseException) throwable; statusCode = exception.getResponse().getStatusCode(); } } TracerProxy.end(statusCode, throwable, tracingContext); } public void updateRequest(RequestDataConfiguration requestDataConfiguration, SerializerAdapter serializerAdapter) throws IOException { boolean isJson = requestDataConfiguration.isJson(); HttpRequest request = requestDataConfiguration.getHttpRequest(); Object bodyContentObject = requestDataConfiguration.getBodyContent(); if (isJson) { ByteArrayOutputStream stream = new AccessibleByteArrayOutputStream(); serializerAdapter.serialize(bodyContentObject, SerializerEncoding.JSON, stream); request.setHeader("Content-Length", String.valueOf(stream.size())); request.setBody(BinaryData.fromStream(new ByteArrayInputStream(stream.toByteArray(), 0, stream.size()))); } else if (bodyContentObject instanceof byte[]) { request.setBody((byte[]) bodyContentObject); } else if (bodyContentObject instanceof String) { final String bodyContentString = (String) bodyContentObject; if (!bodyContentString.isEmpty()) { request.setBody(bodyContentString); } } else if (bodyContentObject instanceof ByteBuffer) { request.setBody(((ByteBuffer) bodyContentObject).array()); } else { ByteArrayOutputStream stream = new AccessibleByteArrayOutputStream(); serializerAdapter.serialize(bodyContentObject, SerializerEncoding.fromHeaders(request.getHeaders()), stream); request.setHeader("Content-Length", String.valueOf(stream.size())); request.setBody(stream.toByteArray()); } } }
class SyncRestProxy extends RestProxyBase { /** * Create a RestProxy. * * @param httpPipeline the HttpPipelinePolicy and HttpClient httpPipeline that will be used to send HTTP requests. * @param serializer the serializer that will be used to convert response bodies to POJOs. * @param interfaceParser the parser that contains information about the interface describing REST API methods that */ public SyncRestProxy(HttpPipeline httpPipeline, SerializerAdapter serializer, SwaggerInterfaceParser interfaceParser) { super(httpPipeline, serializer, interfaceParser); } /** * Send the provided request asynchronously, applying any request policies provided to the HttpClient instance. * * @param request the HTTP request to send * @param contextData the context * @return a {@link Mono} that emits HttpResponse asynchronously */ HttpResponse send(HttpRequest request, Context contextData) { return httpPipeline.sendSync(request, contextData); } @Override public Object invoke(Object proxy, Method method, RequestOptions options, EnumSet<ErrorOptions> errorOptions, Consumer<HttpRequest> requestCallback, SwaggerMethodParser methodParser, HttpRequest request, Context context) { HttpResponseDecoder.HttpDecodedResponse decodedResponse = null; Throwable throwable = null; try { context = startTracingSpan(method, context); if (options != null && requestCallback != null) { requestCallback.accept(request); } if (request.getBodyAsBinaryData() != null) { request.setBody(RestProxyUtils.validateLengthSync(request)); } final HttpResponse response = send(request, context); decodedResponse = this.decoder.decodeSync(response, methodParser); return handleRestReturnType(decodedResponse, methodParser, methodParser.getReturnType(), context, options, errorOptions); } catch (RuntimeException e) { throwable = e; throw LOGGER.logExceptionAsError(e); } finally { if (decodedResponse != null || throwable != null) { endTracingSpan(decodedResponse, throwable, context); } } } /** * Create a publisher that (1) emits error if the provided response {@code decodedResponse} has 'disallowed status * code' OR (2) emits provided response if it's status code ia allowed. * * 'disallowed status code' is one of the status code defined in the provided SwaggerMethodParser or is in the int[] * of additional allowed status codes. * * @param decodedResponse The HttpResponse to check. * @param methodParser The method parser that contains information about the service interface method that initiated * the HTTP request. * @return An async-version of the provided decodedResponse. */ private HttpResponseDecoder.HttpDecodedResponse ensureExpectedStatus(final HttpResponseDecoder.HttpDecodedResponse decodedResponse, final SwaggerMethodParser methodParser, RequestOptions options, EnumSet<ErrorOptions> errorOptions) { final int responseStatusCode = decodedResponse.getSourceResponse().getStatusCode(); if (methodParser.isExpectedResponseStatusCode(responseStatusCode) || (options != null && errorOptions.contains(ErrorOptions.NO_THROW))) { return decodedResponse; } Exception e; BinaryData responseData = decodedResponse.getSourceResponse().getBodyAsBinaryData(); byte[] responseBytes = responseData == null ? null : responseData.toBytes(); if (responseBytes == null || responseBytes.length == 0) { e = instantiateUnexpectedException(methodParser.getUnexpectedException(responseStatusCode), decodedResponse.getSourceResponse(), null, null); } else { Object decodedBody = decodedResponse.getDecodedBodySync(responseBytes); e = instantiateUnexpectedException(methodParser.getUnexpectedException(responseStatusCode), decodedResponse.getSourceResponse(), responseBytes, decodedBody); } if (e instanceof RuntimeException) { throw LOGGER.logExceptionAsError((RuntimeException) e); } else { throw LOGGER.logExceptionAsError(new RuntimeException(e)); } } private Object handleRestResponseReturnType(final HttpResponseDecoder.HttpDecodedResponse response, final SwaggerMethodParser methodParser, final Type entityType) { if (TypeUtil.isTypeOrSubTypeOf(entityType, Response.class)) { if (entityType.equals(StreamResponse.class)) { return createResponse(response, entityType, null); } final Type bodyType = TypeUtil.getRestResponseBodyType(entityType); if (TypeUtil.isTypeOrSubTypeOf(bodyType, Void.class)) { response.getSourceResponse().close(); return createResponse(response, entityType, null); } else { Object bodyAsObject = handleBodyReturnType(response, methodParser, bodyType); Response<?> httpResponse = createResponse(response, entityType, bodyAsObject); if (httpResponse == null) { return createResponse(response, entityType, null); } return httpResponse; } } else { return handleBodyReturnType(response, methodParser, entityType); } } private Object handleBodyReturnType(final HttpResponseDecoder.HttpDecodedResponse response, final SwaggerMethodParser methodParser, final Type entityType) { final int responseStatusCode = response.getSourceResponse().getStatusCode(); final HttpMethod httpMethod = methodParser.getHttpMethod(); final Type returnValueWireType = methodParser.getReturnValueWireType(); final Object result; if (httpMethod == HttpMethod.HEAD && (TypeUtil.isTypeOrSubTypeOf( entityType, Boolean.TYPE) || TypeUtil.isTypeOrSubTypeOf(entityType, Boolean.class))) { boolean isSuccess = (responseStatusCode / 100) == 2; result = isSuccess; } else if (TypeUtil.isTypeOrSubTypeOf(entityType, byte[].class)) { BinaryData binaryData = response.getSourceResponse().getBodyAsBinaryData(); byte[] responseBodyBytes = binaryData != null ? binaryData.toBytes() : null; if (returnValueWireType == Base64Url.class) { responseBodyBytes = new Base64Url(responseBodyBytes).decodedBytes(); } result = responseBodyBytes != null ? (responseBodyBytes.length == 0 ? null : responseBodyBytes) : null; } else if (TypeUtil.isTypeOrSubTypeOf(entityType, BinaryData.class)) { result = response.getSourceResponse().getBodyAsBinaryData(); } else { result = response.getDecodedBodySync((byte[]) null); } return result; } /** * Handle the provided asynchronous HTTP response and return the deserialized value. * * @param httpDecodedResponse the asynchronous HTTP response to the original HTTP request * @param methodParser the SwaggerMethodParser that the request originates from * @param returnType the type of value that will be returned * @param context Additional context that is passed through the Http pipeline during the service call. * @return the deserialized result */ private Object handleRestReturnType(final HttpResponseDecoder.HttpDecodedResponse httpDecodedResponse, final SwaggerMethodParser methodParser, final Type returnType, final Context context, final RequestOptions options, EnumSet<ErrorOptions> errorOptions) { final HttpResponseDecoder.HttpDecodedResponse expectedResponse = ensureExpectedStatus(httpDecodedResponse, methodParser, options, errorOptions); final Object result; if (TypeUtil.isTypeOrSubTypeOf(returnType, void.class) || TypeUtil.isTypeOrSubTypeOf(returnType, Void.class)) { result = expectedResponse; } else { result = handleRestResponseReturnType(httpDecodedResponse, methodParser, returnType); } return result; } public void updateRequest(RequestDataConfiguration requestDataConfiguration, SerializerAdapter serializerAdapter) throws IOException { boolean isJson = requestDataConfiguration.isJson(); HttpRequest request = requestDataConfiguration.getHttpRequest(); Object bodyContentObject = requestDataConfiguration.getBodyContent(); if (isJson) { byte[] serializedBytes = serializerAdapter.serializeToBytes(bodyContentObject, SerializerEncoding.JSON); ByteArrayOutputStream stream = new AccessibleByteArrayOutputStream(); serializerAdapter.serialize(bodyContentObject, SerializerEncoding.JSON, stream); request.setHeader("Content-Length", String.valueOf(serializedBytes.length)); request.setBody(BinaryData.fromBytes(serializedBytes)); } else if (bodyContentObject instanceof byte[]) { request.setBody((byte[]) bodyContentObject); } else if (bodyContentObject instanceof String) { final String bodyContentString = (String) bodyContentObject; if (!bodyContentString.isEmpty()) { request.setBody(bodyContentString); } } else if (bodyContentObject instanceof ByteBuffer) { if (((ByteBuffer) bodyContentObject).hasArray()) { request.setBody(((ByteBuffer) bodyContentObject).array()); } else { byte[] array = new byte[((ByteBuffer) bodyContentObject).remaining()]; ((ByteBuffer) bodyContentObject).get(array); request.setBody(array); } } else { byte[] serializedBytes = serializerAdapter .serializeToBytes(bodyContentObject, SerializerEncoding.fromHeaders(request.getHeaders())); request.setHeader("Content-Length", String.valueOf(serializedBytes.length)); request.setBody(serializedBytes); } } }
just a generic question: should we retry few times here in case any transient issue>
private void loadAzureVmMetaData() { AzureVMMetadata metadataSnapshot = azureVmMetaDataSingleton.get(); if (metadataSnapshot != null) { this.populateAzureVmMetaData(metadataSnapshot); return; } URI targetEndpoint = null; try { targetEndpoint = new URI(AZURE_VM_METADATA); } catch (URISyntaxException ex) { logger.info("Unable to parse azure vm metadata url"); return; } HashMap<String, String> headers = new HashMap<>(); headers.put("Metadata", "true"); HttpHeaders httpHeaders = new HttpHeaders(headers); HttpRequest httpRequest = new HttpRequest(HttpMethod.GET, targetEndpoint, targetEndpoint.getPort(), httpHeaders); Mono<HttpResponse> httpResponseMono = this.httpClient.send(httpRequest); httpResponseMono .flatMap(response -> response.bodyAsString()).map(metadataJson -> parse(metadataJson, AzureVMMetadata.class)).doOnSuccess(metadata -> { azureVmMetaDataSingleton.compareAndSet(null, metadata); this.populateAzureVmMetaData(metadata); }).onErrorResume(throwable -> { logger.info("Client is not on azure vm"); logger.debug("Unable to get azure vm metadata", throwable); return Mono.empty(); }).subscribe(); }
}).onErrorResume(throwable -> {
private void loadAzureVmMetaData() { AzureVMMetadata metadataSnapshot = azureVmMetaDataSingleton.get(); if (metadataSnapshot != null) { this.populateAzureVmMetaData(metadataSnapshot); return; } URI targetEndpoint = null; try { targetEndpoint = new URI(AZURE_VM_METADATA); } catch (URISyntaxException ex) { logger.info("Unable to parse azure vm metadata url"); return; } HashMap<String, String> headers = new HashMap<>(); headers.put("Metadata", "true"); HttpHeaders httpHeaders = new HttpHeaders(headers); HttpRequest httpRequest = new HttpRequest(HttpMethod.GET, targetEndpoint, targetEndpoint.getPort(), httpHeaders); Mono<HttpResponse> httpResponseMono = this.httpClient.send(httpRequest); httpResponseMono .flatMap(response -> response.bodyAsString()).map(metadataJson -> parse(metadataJson, AzureVMMetadata.class)).doOnSuccess(metadata -> { azureVmMetaDataSingleton.compareAndSet(null, metadata); this.populateAzureVmMetaData(metadata); }).onErrorResume(throwable -> { logger.info("Client is not on azure vm"); logger.debug("Unable to get azure vm metadata", throwable); return Mono.empty(); }).subscribe(); }
class ClientTelemetry { public final static int ONE_KB_TO_BYTES = 1024; public final static int REQUEST_LATENCY_MAX_MILLI_SEC = 300000; public final static int REQUEST_LATENCY_SUCCESS_PRECISION = 4; public final static int REQUEST_LATENCY_FAILURE_PRECISION = 2; public final static String REQUEST_LATENCY_NAME = "RequestLatency"; public final static String REQUEST_LATENCY_UNIT = "MilliSecond"; public final static int REQUEST_CHARGE_MAX = 10000; public final static int REQUEST_CHARGE_PRECISION = 2; public final static String REQUEST_CHARGE_NAME = "RequestCharge"; public final static String REQUEST_CHARGE_UNIT = "RU"; public final static String TCP_NEW_CHANNEL_LATENCY_NAME = "TcpNewChannelOpenLatency"; public final static String TCP_NEW_CHANNEL_LATENCY_UNIT = "MilliSecond"; public final static int TCP_NEW_CHANNEL_LATENCY_MAX_MILLI_SEC = 300000; public final static int TCP_NEW_CHANNEL_LATENCY_PRECISION = 2; public final static int CPU_MAX = 100; public final static int CPU_PRECISION = 2; private final static String CPU_NAME = "CPU"; private final static String CPU_UNIT = "Percentage"; public final static int MEMORY_MAX_IN_MB = 102400; public final static int MEMORY_PRECISION = 2; private final static String MEMORY_NAME = "MemoryRemaining"; private final static String MEMORY_UNIT = "MB"; private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper(); private final static AtomicLong instanceCount = new AtomicLong(0); private final static AtomicReference<AzureVMMetadata> azureVmMetaDataSingleton = new AtomicReference<>(null); private ClientTelemetryInfo clientTelemetryInfo; private final HttpClient httpClient; private final ScheduledThreadPoolExecutor scheduledExecutorService = new ScheduledThreadPoolExecutor(1, new CosmosDaemonThreadFactory("ClientTelemetry-" + instanceCount.incrementAndGet())); private final Scheduler scheduler = Schedulers.fromExecutor(scheduledExecutorService); private static final Logger logger = LoggerFactory.getLogger(ClientTelemetry.class); private volatile boolean isClosed; private volatile boolean isClientTelemetryEnabled; private static String AZURE_VM_METADATA = "http: private static final double PERCENTILE_50 = 50.0; private static final double PERCENTILE_90 = 90.0; private static final double PERCENTILE_95 = 95.0; private static final double PERCENTILE_99 = 99.0; private static final double PERCENTILE_999 = 99.9; private final int clientTelemetrySchedulingSec; private final IAuthorizationTokenProvider tokenProvider; private final String globalDatabaseAccountName; public ClientTelemetry(DiagnosticsClientContext diagnosticsClientContext, Boolean acceleratedNetworking, String clientId, String processId, String userAgent, ConnectionMode connectionMode, String globalDatabaseAccountName, String applicationRegion, String hostEnvInfo, HttpClient httpClient, boolean isClientTelemetryEnabled, IAuthorizationTokenProvider tokenProvider, List<String> preferredRegions ) { clientTelemetryInfo = new ClientTelemetryInfo( getMachineId(diagnosticsClientContext), clientId, processId, userAgent, connectionMode, globalDatabaseAccountName, applicationRegion, hostEnvInfo, acceleratedNetworking, preferredRegions); this.isClosed = false; this.httpClient = httpClient; this.isClientTelemetryEnabled = isClientTelemetryEnabled; this.clientTelemetrySchedulingSec = Configs.getClientTelemetrySchedulingInSec(); this.tokenProvider = tokenProvider; this.globalDatabaseAccountName = globalDatabaseAccountName; } public ClientTelemetryInfo getClientTelemetryInfo() { return clientTelemetryInfo; } public static String getMachineId(DiagnosticsClientContext diagnosticsClientContext) { AzureVMMetadata metadataSnapshot = azureVmMetaDataSingleton.get(); if (metadataSnapshot != null && metadataSnapshot.getVmId() != null) { String machineId = "vmId:" + metadataSnapshot.getVmId(); if (diagnosticsClientContext != null) { diagnosticsClientContext.getConfig().withMachineId(machineId); } return machineId; } if (diagnosticsClientContext == null) { return ""; } return diagnosticsClientContext.getConfig().getMachineId(); } public static void recordValue(ConcurrentDoubleHistogram doubleHistogram, long value) { try { doubleHistogram.recordValue(value); } catch (Exception ex) { logger.warn("Error while recording value for client telemetry. ", ex); } } public static void recordValue(ConcurrentDoubleHistogram doubleHistogram, double value) { try { doubleHistogram.recordValue(value); } catch (Exception ex) { logger.warn("Error while recording value for client telemetry. ", ex); } } public boolean isClientTelemetryEnabled() { return isClientTelemetryEnabled; } public void init() { loadAzureVmMetaData(); sendClientTelemetry().subscribe(); } public void close() { this.isClosed = true; this.scheduledExecutorService.shutdown(); logger.debug("GlobalEndpointManager closed."); } private Mono<Void> sendClientTelemetry() { return Mono.delay(Duration.ofSeconds(clientTelemetrySchedulingSec), CosmosSchedulers.COSMOS_PARALLEL) .flatMap(t -> { if (this.isClosed) { logger.warn("client already closed"); return Mono.empty(); } if (!Configs.isClientTelemetryEnabled(this.isClientTelemetryEnabled)) { logger.trace("client telemetry not enabled"); return Mono.empty(); } readHistogram(); try { String endpoint = Configs.getClientTelemetryEndpoint(); if (StringUtils.isEmpty(endpoint)) { logger.info("ClientTelemetry {}", OBJECT_MAPPER.writeValueAsString(this.clientTelemetryInfo)); clearDataForNextRun(); return this.sendClientTelemetry(); } else { URI targetEndpoint = new URI(endpoint); ByteBuffer byteBuffer = BridgeInternal.serializeJsonToByteBuffer(this.clientTelemetryInfo, ClientTelemetry.OBJECT_MAPPER); Flux<byte[]> fluxBytes = Flux.just(RxDocumentServiceRequest.toByteArray(byteBuffer)); Map<String, String> headers = new HashMap<>(); String date = Utils.nowAsRFC1123(); headers.put(HttpConstants.HttpHeaders.X_DATE, date); String authorization = this.tokenProvider.getUserAuthorizationToken( "", ResourceType.ClientTelemetry, RequestVerb.POST, headers, AuthorizationTokenType.PrimaryMasterKey, null); try { authorization = URLEncoder.encode(authorization, Constants.UrlEncodingInfo.UTF_8); } catch (UnsupportedEncodingException e) { logger.error("Failed to encode authToken. Exception: ", e); this.clearDataForNextRun(); return this.sendClientTelemetry(); } HttpHeaders httpHeaders = new HttpHeaders(); httpHeaders.set(HttpConstants.HttpHeaders.CONTENT_TYPE, RuntimeConstants.MediaTypes.JSON); httpHeaders.set(HttpConstants.HttpHeaders.CONTENT_ENCODING, RuntimeConstants.Encoding.GZIP); httpHeaders.set(HttpConstants.HttpHeaders.X_DATE, date); httpHeaders.set(HttpConstants.HttpHeaders.DATABASE_ACCOUNT_NAME, this.globalDatabaseAccountName); httpHeaders.set(HttpConstants.HttpHeaders.AUTHORIZATION, authorization); String envName = Configs.getEnvironmentName(); if (StringUtils.isNotEmpty(envName)) { httpHeaders.set(HttpConstants.HttpHeaders.ENVIRONMENT_NAME, envName); } HttpRequest httpRequest = new HttpRequest(HttpMethod.POST, targetEndpoint, targetEndpoint.getPort(), httpHeaders, fluxBytes); Mono<HttpResponse> httpResponseMono = this.httpClient.send(httpRequest, Duration.ofSeconds(Configs.getHttpResponseTimeoutInSeconds())); return httpResponseMono.flatMap(response -> { if (response.statusCode() != HttpConstants.StatusCodes.OK) { logger.error("Client telemetry request did not succeeded, status code {}", response.statusCode()); } this.clearDataForNextRun(); return this.sendClientTelemetry(); }).onErrorResume(throwable -> { logger.error("Error while sending client telemetry request Exception: ", throwable); this.clearDataForNextRun(); return this.sendClientTelemetry(); }); } } catch (JsonProcessingException | URISyntaxException ex) { logger.error("Error while preparing client telemetry. Exception: ", ex); this.clearDataForNextRun(); return this.sendClientTelemetry(); } }).onErrorResume(ex -> { logger.error("sendClientTelemetry() - Unable to send client telemetry" + ". Exception: ", ex); clearDataForNextRun(); return this.sendClientTelemetry(); }).subscribeOn(scheduler); } private void populateAzureVmMetaData(AzureVMMetadata azureVMMetadata) { this.clientTelemetryInfo.setApplicationRegion(azureVMMetadata.getLocation()); this.clientTelemetryInfo.setMachineId("vmId:" + azureVMMetadata.getVmId()); this.clientTelemetryInfo.setHostEnvInfo(azureVMMetadata.getOsType() + "|" + azureVMMetadata.getSku() + "|" + azureVMMetadata.getVmSize() + "|" + azureVMMetadata.getAzEnvironment()); } private static <T> T parse(String itemResponseBodyAsString, Class<T> itemClassType) { try { return OBJECT_MAPPER.readValue(itemResponseBodyAsString, itemClassType); } catch (IOException e) { throw new IllegalStateException( "Failed to parse string [" + itemResponseBodyAsString + "] to POJO.", e); } } private void clearDataForNextRun() { this.clientTelemetryInfo.getSystemInfoMap().clear(); this.clientTelemetryInfo.getOperationInfoMap().clear(); this.clientTelemetryInfo.getCacheRefreshInfoMap().clear(); for (ConcurrentDoubleHistogram histogram : this.clientTelemetryInfo.getSystemInfoMap().values()) { histogram.reset(); } } private void readHistogram() { ConcurrentDoubleHistogram cpuHistogram = new ConcurrentDoubleHistogram(ClientTelemetry.CPU_MAX, ClientTelemetry.CPU_PRECISION); cpuHistogram.setAutoResize(true); for (double val : CpuMemoryMonitor.getClientTelemetryCpuLatestList()) { recordValue(cpuHistogram, val); } ReportPayload cpuReportPayload = new ReportPayload(CPU_NAME, CPU_UNIT); clientTelemetryInfo.getSystemInfoMap().put(cpuReportPayload, cpuHistogram); ConcurrentDoubleHistogram memoryHistogram = new ConcurrentDoubleHistogram(ClientTelemetry.MEMORY_MAX_IN_MB, ClientTelemetry.MEMORY_PRECISION); memoryHistogram.setAutoResize(true); for (double val : CpuMemoryMonitor.getClientTelemetryMemoryLatestList()) { recordValue(memoryHistogram, val); } ReportPayload memoryReportPayload = new ReportPayload(MEMORY_NAME, MEMORY_UNIT); clientTelemetryInfo.getSystemInfoMap().put(memoryReportPayload, memoryHistogram); this.clientTelemetryInfo.setTimeStamp(Instant.now().toString()); for (Map.Entry<ReportPayload, ConcurrentDoubleHistogram> entry : this.clientTelemetryInfo.getSystemInfoMap().entrySet()) { fillMetricsInfo(entry.getKey(), entry.getValue()); } for (Map.Entry<ReportPayload, ConcurrentDoubleHistogram> entry : this.clientTelemetryInfo.getCacheRefreshInfoMap().entrySet()) { fillMetricsInfo(entry.getKey(), entry.getValue()); } for (Map.Entry<ReportPayload, ConcurrentDoubleHistogram> entry : this.clientTelemetryInfo.getOperationInfoMap().entrySet()) { fillMetricsInfo(entry.getKey(), entry.getValue()); } } private void fillMetricsInfo(ReportPayload payload, ConcurrentDoubleHistogram histogram) { DoubleHistogram copyHistogram = histogram.copy(); payload.getMetricInfo().setCount(copyHistogram.getTotalCount()); payload.getMetricInfo().setMax(copyHistogram.getMaxValue()); payload.getMetricInfo().setMin(copyHistogram.getMinValue()); payload.getMetricInfo().setMean(copyHistogram.getMean()); Map<Double, Double> percentile = new HashMap<>(); percentile.put(PERCENTILE_50, copyHistogram.getValueAtPercentile(PERCENTILE_50)); percentile.put(PERCENTILE_90, copyHistogram.getValueAtPercentile(PERCENTILE_90)); percentile.put(PERCENTILE_95, copyHistogram.getValueAtPercentile(PERCENTILE_95)); percentile.put(PERCENTILE_99, copyHistogram.getValueAtPercentile(PERCENTILE_99)); percentile.put(PERCENTILE_999, copyHistogram.getValueAtPercentile(PERCENTILE_999)); payload.getMetricInfo().setPercentiles(percentile); } }
class ClientTelemetry { public final static int ONE_KB_TO_BYTES = 1024; public final static int REQUEST_LATENCY_MAX_MILLI_SEC = 300000; public final static int REQUEST_LATENCY_SUCCESS_PRECISION = 4; public final static int REQUEST_LATENCY_FAILURE_PRECISION = 2; public final static String REQUEST_LATENCY_NAME = "RequestLatency"; public final static String REQUEST_LATENCY_UNIT = "MilliSecond"; public final static int REQUEST_CHARGE_MAX = 10000; public final static int REQUEST_CHARGE_PRECISION = 2; public final static String REQUEST_CHARGE_NAME = "RequestCharge"; public final static String REQUEST_CHARGE_UNIT = "RU"; public final static String TCP_NEW_CHANNEL_LATENCY_NAME = "TcpNewChannelOpenLatency"; public final static String TCP_NEW_CHANNEL_LATENCY_UNIT = "MilliSecond"; public final static int TCP_NEW_CHANNEL_LATENCY_MAX_MILLI_SEC = 300000; public final static int TCP_NEW_CHANNEL_LATENCY_PRECISION = 2; public final static int CPU_MAX = 100; public final static int CPU_PRECISION = 2; private final static String CPU_NAME = "CPU"; private final static String CPU_UNIT = "Percentage"; public final static int MEMORY_MAX_IN_MB = 102400; public final static int MEMORY_PRECISION = 2; private final static String MEMORY_NAME = "MemoryRemaining"; private final static String MEMORY_UNIT = "MB"; private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper(); private final static AtomicLong instanceCount = new AtomicLong(0); private final static AtomicReference<AzureVMMetadata> azureVmMetaDataSingleton = new AtomicReference<>(null); private ClientTelemetryInfo clientTelemetryInfo; private final HttpClient httpClient; private final ScheduledThreadPoolExecutor scheduledExecutorService = new ScheduledThreadPoolExecutor(1, new CosmosDaemonThreadFactory("ClientTelemetry-" + instanceCount.incrementAndGet())); private final Scheduler scheduler = Schedulers.fromExecutor(scheduledExecutorService); private static final Logger logger = LoggerFactory.getLogger(ClientTelemetry.class); private volatile boolean isClosed; private volatile boolean isClientTelemetryEnabled; private static String AZURE_VM_METADATA = "http: private static final double PERCENTILE_50 = 50.0; private static final double PERCENTILE_90 = 90.0; private static final double PERCENTILE_95 = 95.0; private static final double PERCENTILE_99 = 99.0; private static final double PERCENTILE_999 = 99.9; private final int clientTelemetrySchedulingSec; private final IAuthorizationTokenProvider tokenProvider; private final String globalDatabaseAccountName; public ClientTelemetry(DiagnosticsClientContext diagnosticsClientContext, Boolean acceleratedNetworking, String clientId, String processId, String userAgent, ConnectionMode connectionMode, String globalDatabaseAccountName, String applicationRegion, String hostEnvInfo, HttpClient httpClient, boolean isClientTelemetryEnabled, IAuthorizationTokenProvider tokenProvider, List<String> preferredRegions ) { clientTelemetryInfo = new ClientTelemetryInfo( getMachineId(diagnosticsClientContext), clientId, processId, userAgent, connectionMode, globalDatabaseAccountName, applicationRegion, hostEnvInfo, acceleratedNetworking, preferredRegions); this.isClosed = false; this.httpClient = httpClient; this.isClientTelemetryEnabled = isClientTelemetryEnabled; this.clientTelemetrySchedulingSec = Configs.getClientTelemetrySchedulingInSec(); this.tokenProvider = tokenProvider; this.globalDatabaseAccountName = globalDatabaseAccountName; } public ClientTelemetryInfo getClientTelemetryInfo() { return clientTelemetryInfo; } public static String getMachineId(DiagnosticsClientContext diagnosticsClientContext) { AzureVMMetadata metadataSnapshot = azureVmMetaDataSingleton.get(); if (metadataSnapshot != null && metadataSnapshot.getVmId() != null) { String machineId = "vmId:" + metadataSnapshot.getVmId(); if (diagnosticsClientContext != null) { diagnosticsClientContext.getConfig().withMachineId(machineId); } return machineId; } if (diagnosticsClientContext == null) { return ""; } return diagnosticsClientContext.getConfig().getMachineId(); } public static void recordValue(ConcurrentDoubleHistogram doubleHistogram, long value) { try { doubleHistogram.recordValue(value); } catch (Exception ex) { logger.warn("Error while recording value for client telemetry. ", ex); } } public static void recordValue(ConcurrentDoubleHistogram doubleHistogram, double value) { try { doubleHistogram.recordValue(value); } catch (Exception ex) { logger.warn("Error while recording value for client telemetry. ", ex); } } public boolean isClientTelemetryEnabled() { return isClientTelemetryEnabled; } public void init() { loadAzureVmMetaData(); sendClientTelemetry().subscribe(); } public void close() { this.isClosed = true; this.scheduledExecutorService.shutdown(); logger.debug("GlobalEndpointManager closed."); } private Mono<Void> sendClientTelemetry() { return Mono.delay(Duration.ofSeconds(clientTelemetrySchedulingSec), CosmosSchedulers.COSMOS_PARALLEL) .flatMap(t -> { if (this.isClosed) { logger.warn("client already closed"); return Mono.empty(); } if (!Configs.isClientTelemetryEnabled(this.isClientTelemetryEnabled)) { logger.trace("client telemetry not enabled"); return Mono.empty(); } readHistogram(); try { String endpoint = Configs.getClientTelemetryEndpoint(); if (StringUtils.isEmpty(endpoint)) { logger.info("ClientTelemetry {}", OBJECT_MAPPER.writeValueAsString(this.clientTelemetryInfo)); clearDataForNextRun(); return this.sendClientTelemetry(); } else { URI targetEndpoint = new URI(endpoint); ByteBuffer byteBuffer = BridgeInternal.serializeJsonToByteBuffer(this.clientTelemetryInfo, ClientTelemetry.OBJECT_MAPPER); Flux<byte[]> fluxBytes = Flux.just(RxDocumentServiceRequest.toByteArray(byteBuffer)); Map<String, String> headers = new HashMap<>(); String date = Utils.nowAsRFC1123(); headers.put(HttpConstants.HttpHeaders.X_DATE, date); String authorization = this.tokenProvider.getUserAuthorizationToken( "", ResourceType.ClientTelemetry, RequestVerb.POST, headers, AuthorizationTokenType.PrimaryMasterKey, null); try { authorization = URLEncoder.encode(authorization, Constants.UrlEncodingInfo.UTF_8); } catch (UnsupportedEncodingException e) { logger.error("Failed to encode authToken. Exception: ", e); this.clearDataForNextRun(); return this.sendClientTelemetry(); } HttpHeaders httpHeaders = new HttpHeaders(); httpHeaders.set(HttpConstants.HttpHeaders.CONTENT_TYPE, RuntimeConstants.MediaTypes.JSON); httpHeaders.set(HttpConstants.HttpHeaders.CONTENT_ENCODING, RuntimeConstants.Encoding.GZIP); httpHeaders.set(HttpConstants.HttpHeaders.X_DATE, date); httpHeaders.set(HttpConstants.HttpHeaders.DATABASE_ACCOUNT_NAME, this.globalDatabaseAccountName); httpHeaders.set(HttpConstants.HttpHeaders.AUTHORIZATION, authorization); String envName = Configs.getEnvironmentName(); if (StringUtils.isNotEmpty(envName)) { httpHeaders.set(HttpConstants.HttpHeaders.ENVIRONMENT_NAME, envName); } HttpRequest httpRequest = new HttpRequest(HttpMethod.POST, targetEndpoint, targetEndpoint.getPort(), httpHeaders, fluxBytes); Mono<HttpResponse> httpResponseMono = this.httpClient.send(httpRequest, Duration.ofSeconds(Configs.getHttpResponseTimeoutInSeconds())); return httpResponseMono.flatMap(response -> { if (response.statusCode() != HttpConstants.StatusCodes.OK) { logger.error("Client telemetry request did not succeeded, status code {}", response.statusCode()); } this.clearDataForNextRun(); return this.sendClientTelemetry(); }).onErrorResume(throwable -> { logger.error("Error while sending client telemetry request Exception: ", throwable); this.clearDataForNextRun(); return this.sendClientTelemetry(); }); } } catch (JsonProcessingException | URISyntaxException ex) { logger.error("Error while preparing client telemetry. Exception: ", ex); this.clearDataForNextRun(); return this.sendClientTelemetry(); } }).onErrorResume(ex -> { logger.error("sendClientTelemetry() - Unable to send client telemetry" + ". Exception: ", ex); clearDataForNextRun(); return this.sendClientTelemetry(); }).subscribeOn(scheduler); } private void populateAzureVmMetaData(AzureVMMetadata azureVMMetadata) { this.clientTelemetryInfo.setApplicationRegion(azureVMMetadata.getLocation()); this.clientTelemetryInfo.setMachineId("vmId:" + azureVMMetadata.getVmId()); this.clientTelemetryInfo.setHostEnvInfo(azureVMMetadata.getOsType() + "|" + azureVMMetadata.getSku() + "|" + azureVMMetadata.getVmSize() + "|" + azureVMMetadata.getAzEnvironment()); } private static <T> T parse(String itemResponseBodyAsString, Class<T> itemClassType) { try { return OBJECT_MAPPER.readValue(itemResponseBodyAsString, itemClassType); } catch (IOException e) { throw new IllegalStateException( "Failed to parse string [" + itemResponseBodyAsString + "] to POJO.", e); } } private void clearDataForNextRun() { this.clientTelemetryInfo.getSystemInfoMap().clear(); this.clientTelemetryInfo.getOperationInfoMap().clear(); this.clientTelemetryInfo.getCacheRefreshInfoMap().clear(); for (ConcurrentDoubleHistogram histogram : this.clientTelemetryInfo.getSystemInfoMap().values()) { histogram.reset(); } } private void readHistogram() { ConcurrentDoubleHistogram cpuHistogram = new ConcurrentDoubleHistogram(ClientTelemetry.CPU_MAX, ClientTelemetry.CPU_PRECISION); cpuHistogram.setAutoResize(true); for (double val : CpuMemoryMonitor.getClientTelemetryCpuLatestList()) { recordValue(cpuHistogram, val); } ReportPayload cpuReportPayload = new ReportPayload(CPU_NAME, CPU_UNIT); clientTelemetryInfo.getSystemInfoMap().put(cpuReportPayload, cpuHistogram); ConcurrentDoubleHistogram memoryHistogram = new ConcurrentDoubleHistogram(ClientTelemetry.MEMORY_MAX_IN_MB, ClientTelemetry.MEMORY_PRECISION); memoryHistogram.setAutoResize(true); for (double val : CpuMemoryMonitor.getClientTelemetryMemoryLatestList()) { recordValue(memoryHistogram, val); } ReportPayload memoryReportPayload = new ReportPayload(MEMORY_NAME, MEMORY_UNIT); clientTelemetryInfo.getSystemInfoMap().put(memoryReportPayload, memoryHistogram); this.clientTelemetryInfo.setTimeStamp(Instant.now().toString()); for (Map.Entry<ReportPayload, ConcurrentDoubleHistogram> entry : this.clientTelemetryInfo.getSystemInfoMap().entrySet()) { fillMetricsInfo(entry.getKey(), entry.getValue()); } for (Map.Entry<ReportPayload, ConcurrentDoubleHistogram> entry : this.clientTelemetryInfo.getCacheRefreshInfoMap().entrySet()) { fillMetricsInfo(entry.getKey(), entry.getValue()); } for (Map.Entry<ReportPayload, ConcurrentDoubleHistogram> entry : this.clientTelemetryInfo.getOperationInfoMap().entrySet()) { fillMetricsInfo(entry.getKey(), entry.getValue()); } } private void fillMetricsInfo(ReportPayload payload, ConcurrentDoubleHistogram histogram) { DoubleHistogram copyHistogram = histogram.copy(); payload.getMetricInfo().setCount(copyHistogram.getTotalCount()); payload.getMetricInfo().setMax(copyHistogram.getMaxValue()); payload.getMetricInfo().setMin(copyHistogram.getMinValue()); payload.getMetricInfo().setMean(copyHistogram.getMean()); Map<Double, Double> percentile = new HashMap<>(); percentile.put(PERCENTILE_50, copyHistogram.getValueAtPercentile(PERCENTILE_50)); percentile.put(PERCENTILE_90, copyHistogram.getValueAtPercentile(PERCENTILE_90)); percentile.put(PERCENTILE_95, copyHistogram.getValueAtPercentile(PERCENTILE_95)); percentile.put(PERCENTILE_99, copyHistogram.getValueAtPercentile(PERCENTILE_99)); percentile.put(PERCENTILE_999, copyHistogram.getValueAtPercentile(PERCENTILE_999)); payload.getMetricInfo().setPercentiles(percentile); } }
nit: this
public Set<ThroughputControlGroupInternal> getThroughputControlGroupSet() { return throughputControlGroupSet; }
return throughputControlGroupSet;
public Set<ThroughputControlGroupInternal> getThroughputControlGroupSet() { return this.throughputControlGroupSet; }
class ContainerThroughputControlGroupProperties { private static Logger logger = LoggerFactory.getLogger(ContainerThroughputControlGroupProperties.class); private final AtomicReference<ThroughputControlGroupInternal> defaultGroup; private final Set<ThroughputControlGroupInternal> throughputControlGroupSet; private final Set<String> supressInitErrorGroupSet; public ContainerThroughputControlGroupProperties() { this.defaultGroup = new AtomicReference<>(); this.throughputControlGroupSet = ConcurrentHashMap.newKeySet(); this.supressInitErrorGroupSet = ConcurrentHashMap.newKeySet(); } public int addThroughputControlGroup(ThroughputControlGroupInternal group) { checkNotNull(group, "Throughput control group should not be null"); if (group.isDefault()) { if (!this.defaultGroup.compareAndSet(null, group)) { if (!this.defaultGroup.get().equals(group)) { throw new IllegalStateException("A default group already exists"); } } } if (group.isSuppressInitError()) { this.supressInitErrorGroupSet.add(group.getGroupName()); } if (this.throughputControlGroupSet.stream() .anyMatch(existingGroup -> StringUtils.equals(existingGroup.getId(), group.getId()) && !existingGroup.equals(group))) { throw new IllegalStateException("Throughput control group with id " + group.getId() + " already exists"); } this.throughputControlGroupSet.add(group); return this.throughputControlGroupSet.size(); } public boolean allowRequestContinueOnInitError(RxDocumentServiceRequest request) { checkNotNull(request, "Request should not be null"); String requestGroupName = request.getThroughputControlGroupName(); if (StringUtils.isEmpty(requestGroupName)) { requestGroupName = this.defaultGroup.get().getGroupName(); } return this.supressInitErrorGroupSet.contains(requestGroupName); } }
class ContainerThroughputControlGroupProperties { private static Logger logger = LoggerFactory.getLogger(ContainerThroughputControlGroupProperties.class); private final AtomicReference<ThroughputControlGroupInternal> defaultGroup; private final Set<ThroughputControlGroupInternal> throughputControlGroupSet; private final Set<String> supressInitErrorGroupSet; public ContainerThroughputControlGroupProperties() { this.defaultGroup = new AtomicReference<>(); this.throughputControlGroupSet = ConcurrentHashMap.newKeySet(); this.supressInitErrorGroupSet = ConcurrentHashMap.newKeySet(); } /*** * Enable a throughput control group. * * @param group a {@link ThroughputControlGroupInternal}. * * @return the total size of distinct throughput control groups enabled on the container. */ public int enableThroughputControlGroup(ThroughputControlGroupInternal group) { checkNotNull(group, "Throughput control group should not be null"); if (group.isDefault()) { if (!this.defaultGroup.compareAndSet(null, group)) { if (!this.defaultGroup.get().equals(group)) { throw new IllegalArgumentException("A default group already exists"); } } } if (group.isContinueOnInitError()) { this.supressInitErrorGroupSet.add(group.getGroupName()); } if (this.throughputControlGroupSet.stream() .anyMatch(existingGroup -> Objects.equals(existingGroup.getId(), group.getId()) && !existingGroup.equals(group))) { throw new IllegalArgumentException("Throughput control group with id " + group.getId() + " already exists"); } this.throughputControlGroupSet.add(group); return this.throughputControlGroupSet.size(); } public boolean allowRequestToContinueOnInitError(RxDocumentServiceRequest request) { checkNotNull(request, "Request should not be null"); String requestGroupName = request.getThroughputControlGroupName(); if (StringUtils.isEmpty(requestGroupName)) { requestGroupName = this.defaultGroup.get().getGroupName(); } return this.supressInitErrorGroupSet.contains(requestGroupName); } }
Interesting you used `ConcurrentHashMap.newKeySet()` instead of `Collections.synchronizedSet(new HashSet<>());` I am curious to learn, any specific reason for using this?
public ContainerThroughputControlGroupProperties() { this.defaultGroup = new AtomicReference<>(); this.throughputControlGroupSet = ConcurrentHashMap.newKeySet(); this.supressInitErrorGroupSet = ConcurrentHashMap.newKeySet(); }
this.throughputControlGroupSet = ConcurrentHashMap.newKeySet();
public ContainerThroughputControlGroupProperties() { this.defaultGroup = new AtomicReference<>(); this.throughputControlGroupSet = ConcurrentHashMap.newKeySet(); this.supressInitErrorGroupSet = ConcurrentHashMap.newKeySet(); }
class ContainerThroughputControlGroupProperties { private static Logger logger = LoggerFactory.getLogger(ContainerThroughputControlGroupProperties.class); private final AtomicReference<ThroughputControlGroupInternal> defaultGroup; private final Set<ThroughputControlGroupInternal> throughputControlGroupSet; private final Set<String> supressInitErrorGroupSet; public int addThroughputControlGroup(ThroughputControlGroupInternal group) { checkNotNull(group, "Throughput control group should not be null"); if (group.isDefault()) { if (!this.defaultGroup.compareAndSet(null, group)) { if (!this.defaultGroup.get().equals(group)) { throw new IllegalArgumentException("A default group already exists"); } } } if (group.isContinueOnInitError()) { this.supressInitErrorGroupSet.add(group.getGroupName()); } if (this.throughputControlGroupSet.stream() .anyMatch(existingGroup -> StringUtils.equals(existingGroup.getId(), group.getId()) && !existingGroup.equals(group))) { throw new IllegalArgumentException("Throughput control group with id " + group.getId() + " already exists"); } this.throughputControlGroupSet.add(group); return this.throughputControlGroupSet.size(); } public Set<ThroughputControlGroupInternal> getThroughputControlGroupSet() { return this.throughputControlGroupSet; } public boolean allowRequestToContinueOnInitError(RxDocumentServiceRequest request) { checkNotNull(request, "Request should not be null"); String requestGroupName = request.getThroughputControlGroupName(); if (StringUtils.isEmpty(requestGroupName)) { requestGroupName = this.defaultGroup.get().getGroupName(); } return this.supressInitErrorGroupSet.contains(requestGroupName); } }
class ContainerThroughputControlGroupProperties { private static Logger logger = LoggerFactory.getLogger(ContainerThroughputControlGroupProperties.class); private final AtomicReference<ThroughputControlGroupInternal> defaultGroup; private final Set<ThroughputControlGroupInternal> throughputControlGroupSet; private final Set<String> supressInitErrorGroupSet; /*** * Enable a throughput control group. * * @param group a {@link ThroughputControlGroupInternal}. * * @return the total size of distinct throughput control groups enabled on the container. */ public int enableThroughputControlGroup(ThroughputControlGroupInternal group) { checkNotNull(group, "Throughput control group should not be null"); if (group.isDefault()) { if (!this.defaultGroup.compareAndSet(null, group)) { if (!this.defaultGroup.get().equals(group)) { throw new IllegalArgumentException("A default group already exists"); } } } if (group.isContinueOnInitError()) { this.supressInitErrorGroupSet.add(group.getGroupName()); } if (this.throughputControlGroupSet.stream() .anyMatch(existingGroup -> Objects.equals(existingGroup.getId(), group.getId()) && !existingGroup.equals(group))) { throw new IllegalArgumentException("Throughput control group with id " + group.getId() + " already exists"); } this.throughputControlGroupSet.add(group); return this.throughputControlGroupSet.size(); } public Set<ThroughputControlGroupInternal> getThroughputControlGroupSet() { return this.throughputControlGroupSet; } public boolean allowRequestToContinueOnInitError(RxDocumentServiceRequest request) { checkNotNull(request, "Request should not be null"); String requestGroupName = request.getThroughputControlGroupName(); if (StringUtils.isEmpty(requestGroupName)) { requestGroupName = this.defaultGroup.get().getGroupName(); } return this.supressInitErrorGroupSet.contains(requestGroupName); } }
please help change to `spring-cloud-azure-not-existing-xx`
protected void doHealthCheck(Health.Builder builder) { try { this.secretAsyncClient.getSecretWithResponse("spring-cloud-azure-none-existing-secret", "") .block(timeout); builder.up(); } catch (Exception e) { if (e instanceof ResourceNotFoundException) { builder.up(); } else { throw e; } } }
this.secretAsyncClient.getSecretWithResponse("spring-cloud-azure-none-existing-secret", "")
protected void doHealthCheck(Health.Builder builder) { try { this.secretAsyncClient.getSecretWithResponse("spring-cloud-azure-not-existing-secret", "") .block(timeout); builder.up(); } catch (Exception e) { if (e instanceof ResourceNotFoundException) { builder.up(); } else { throw e; } } }
class KeyVaultSecretHealthIndicator extends AbstractHealthIndicator { private final SecretAsyncClient secretAsyncClient; private Duration timeout = DEFAULT_HEALTH_CHECK_TIMEOUT; /** * Creates a new instance of {@link KeyVaultSecretHealthIndicator}. * @param secretAsyncClient the secret async client */ public KeyVaultSecretHealthIndicator(SecretAsyncClient secretAsyncClient) { this.secretAsyncClient = secretAsyncClient; } @Override /** * Set health check request timeout. * @param timeout the duration value. */ public void setTimeout(Duration timeout) { this.timeout = timeout; } }
class KeyVaultSecretHealthIndicator extends AbstractHealthIndicator { private final SecretAsyncClient secretAsyncClient; private Duration timeout = DEFAULT_HEALTH_CHECK_TIMEOUT; /** * Creates a new instance of {@link KeyVaultSecretHealthIndicator}. * @param secretAsyncClient the secret async client */ public KeyVaultSecretHealthIndicator(SecretAsyncClient secretAsyncClient) { this.secretAsyncClient = secretAsyncClient; } @Override /** * Set health check request timeout. * @param timeout the duration value. */ public void setTimeout(Duration timeout) { this.timeout = timeout; } }
It's weird that this feature is partly contained in the library.
protected void configure(HttpSecurity http) throws Exception { http.oauth2Login() .authorizationEndpoint() .authorizationRequestResolver(requestResolver()) .and() .tokenEndpoint() .accessTokenResponseClient(accessTokenResponseClient()) .and() .userInfoEndpoint() .oidcUserService(oidcUserService) .and() .and() .logout() .logoutSuccessHandler(oidcLogoutSuccessHandler()); Filter handleConditionalAccessFilter = handleConditionalAccessFilter(); if (handleConditionalAccessFilter != null) { http.addFilterAfter(handleConditionalAccessFilter, OAuth2AuthorizationRequestRedirectFilter.class); } }
http.addFilterAfter(handleConditionalAccessFilter, OAuth2AuthorizationRequestRedirectFilter.class);
protected void configure(HttpSecurity http) throws Exception { http.oauth2Login() .authorizationEndpoint() .authorizationRequestResolver(requestResolver()) .and() .tokenEndpoint() .accessTokenResponseClient(accessTokenResponseClient()) .and() .userInfoEndpoint() .oidcUserService(oidcUserService) .and() .and() .logout() .logoutSuccessHandler(oidcLogoutSuccessHandler()); Filter conditionalAccessFilter = conditionalAccessFilter(); if (conditionalAccessFilter != null) { http.addFilterAfter(conditionalAccessFilter, OAuth2AuthorizationRequestRedirectFilter.class); } }
class AadWebSecurityConfigurerAdapter extends WebSecurityConfigurerAdapter { @Autowired private ClientRegistrationRepository repo; @Autowired private OAuth2UserService<OidcUserRequest, OidcUser> oidcUserService; /** * AAD authentication properties */ @Autowired protected AadAuthenticationProperties properties; /** * configure * * @param http the {@link HttpSecurity} to use * @throws Exception Configuration failed * */ @Override /** * Return the filter to handle conditional access exception. * No conditional access filter is provided by default. * @return a filter that handles conditional access exception. */ protected Filter handleConditionalAccessFilter() { return null; } /** * Gets the OIDC logout success handler. * * @return the OIDC logout success handler */ protected LogoutSuccessHandler oidcLogoutSuccessHandler() { OidcClientInitiatedLogoutSuccessHandler oidcLogoutSuccessHandler = new OidcClientInitiatedLogoutSuccessHandler(this.repo); String uri = this.properties.getPostLogoutRedirectUri(); if (StringUtils.hasText(uri)) { oidcLogoutSuccessHandler.setPostLogoutRedirectUri(uri); } return oidcLogoutSuccessHandler; } /** * Gets the access token response client. * * @return the access token response client */ protected OAuth2AccessTokenResponseClient<OAuth2AuthorizationCodeGrantRequest> accessTokenResponseClient() { DefaultAuthorizationCodeTokenResponseClient result = new DefaultAuthorizationCodeTokenResponseClient(); if (repo instanceof AadClientRegistrationRepository) { result.setRequestEntityConverter( new AadOAuth2AuthorizationCodeGrantRequestEntityConverter( ((AadClientRegistrationRepository) repo).getAzureClientAccessTokenScopes())); } return result; } /** * Gets the request resolver. * * @return the request resolver */ protected OAuth2AuthorizationRequestResolver requestResolver() { return new AadOAuth2AuthorizationRequestResolver(this.repo, properties); } }
class AadWebSecurityConfigurerAdapter extends WebSecurityConfigurerAdapter { @Autowired private ClientRegistrationRepository repo; @Autowired private OAuth2UserService<OidcUserRequest, OidcUser> oidcUserService; /** * AAD authentication properties */ @Autowired protected AadAuthenticationProperties properties; /** * configure * * @param http the {@link HttpSecurity} to use * @throws Exception Configuration failed * */ @Override /** * Return the filter to handle conditional access exception. * No conditional access filter is provided by default. * @see <a href="https: * @see <a href="https: * @return a filter that handles conditional access exception. */ protected Filter conditionalAccessFilter() { return null; } /** * Gets the OIDC logout success handler. * * @return the OIDC logout success handler */ protected LogoutSuccessHandler oidcLogoutSuccessHandler() { OidcClientInitiatedLogoutSuccessHandler oidcLogoutSuccessHandler = new OidcClientInitiatedLogoutSuccessHandler(this.repo); String uri = this.properties.getPostLogoutRedirectUri(); if (StringUtils.hasText(uri)) { oidcLogoutSuccessHandler.setPostLogoutRedirectUri(uri); } return oidcLogoutSuccessHandler; } /** * Gets the access token response client. * * @return the access token response client */ protected OAuth2AccessTokenResponseClient<OAuth2AuthorizationCodeGrantRequest> accessTokenResponseClient() { DefaultAuthorizationCodeTokenResponseClient result = new DefaultAuthorizationCodeTokenResponseClient(); if (repo instanceof AadClientRegistrationRepository) { result.setRequestEntityConverter( new AadOAuth2AuthorizationCodeGrantRequestEntityConverter( ((AadClientRegistrationRepository) repo).getAzureClientAccessTokenScopes())); } return result; } /** * Gets the request resolver. * * @return the request resolver */ protected OAuth2AuthorizationRequestResolver requestResolver() { return new AadOAuth2AuthorizationRequestResolver(this.repo, properties); } }
This is very nitpicky, but I wonder if this should just say `"none"`. Given the format of the string we'd end up with something like: ``` [Authenticated account] Client ID: No Application Identifier Available, Tenant ID:... ``` Which both feels wordy and also because the consts don't match the string you get weird disconnects.
private String getAccountIdentifierMessage(String identifierName, String identifierValue) { if (identifierValue == null) { return "No " + identifierName + " available."; } return identifierValue; }
return "No " + identifierName + " available.";
private String getAccountIdentifierMessage(String identifierName, String identifierValue) { if (identifierValue == null) { return "No " + identifierName + " available."; } return identifierValue; }
class HttpPipelineAdapter implements IHttpClient { private static final ClientLogger CLIENT_LOGGER = new ClientLogger(HttpPipelineAdapter.class); private static final JsonFactory JSON_FACTORY = new JsonFactory(); private static final String ACCOUNT_IDENTIFIER_LOG_MESSAGE = "[Authenticated account] Client ID: {0}, Tenant ID: {1}" + ", User Principal Name: {2}, Object ID (user): {3})"; private static final String APPLICATION_IDENTIFIER = "Application Identifier"; private static final String OBJECT_ID = "Object Id"; private static final String TENANT_ID = "Tenant Id"; private static final String USER_PRINCIPAL_NAME = "User Principal Name"; private static final String ACCESS_TOKEN_JSON_KEY = "access_token"; private static final String APPLICATION_ID_JSON_KEY = "appid"; private static final String OBJECT_ID_JSON_KEY = "oid"; private static final String TENANT_ID_JSON_KEY = "tid"; private static final String USER_PRINCIPAL_NAME_JSON_KEY = "upn"; private final HttpPipeline httpPipeline; private IdentityClientOptions identityClientOptions; HttpPipelineAdapter(HttpPipeline httpPipeline, IdentityClientOptions identityClientOptions) { this.httpPipeline = httpPipeline; this.identityClientOptions = identityClientOptions; } @Override public IHttpResponse send(HttpRequest httpRequest) { com.azure.core.http.HttpRequest request = new com.azure.core.http.HttpRequest( HttpMethod.valueOf(httpRequest.httpMethod().name()), httpRequest.url()); if (httpRequest.headers() != null) { request.setHeaders(new HttpHeaders(httpRequest.headers())); } if (httpRequest.body() != null) { request.setBody(httpRequest.body()); } return httpPipeline.send(request) .flatMap(response -> response.getBodyAsString() .map(body -> { logAccounIdentifiersIfConfigured(body); com.microsoft.aad.msal4j.HttpResponse httpResponse = new com.microsoft.aad.msal4j.HttpResponse() .body(body) .statusCode(response.getStatusCode()); httpResponse.addHeaders(response.getHeaders().stream().collect(Collectors.toMap(HttpHeader::getName, HttpHeader::getValuesList))); return httpResponse; }) .switchIfEmpty(Mono.defer(() -> { com.microsoft.aad.msal4j.HttpResponse httpResponse = new com.microsoft.aad.msal4j.HttpResponse() .statusCode(response.getStatusCode()); httpResponse.addHeaders(response.getHeaders().stream().collect(Collectors.toMap(HttpHeader::getName, HttpHeader::getValuesList))); return Mono.just(httpResponse); }))) .block(); } private void logAccounIdentifiersIfConfigured(String body) { if (identityClientOptions != null && !identityClientOptions.getIdentityLogOptionsImpl().isLoggingAccountIdentifiersAllowed()) { return; } try { JsonParser responseParser = JSON_FACTORY.createParser(body); String accessToken = getTargetFieldValueFromJsonParser(responseParser, ACCESS_TOKEN_JSON_KEY); responseParser.close(); if (accessToken != null) { String[] base64Metadata = accessToken.split("\\."); if (base64Metadata.length > 1) { byte[] decoded = Base64.getDecoder().decode(base64Metadata[1]); String data = new String(decoded, StandardCharsets.UTF_8); JsonParser jsonParser = JSON_FACTORY.createParser(data); HashMap<String, String> jsonMap = parseJsonIntoMap(jsonParser); jsonParser.close(); String appId = jsonMap.containsKey(APPLICATION_ID_JSON_KEY) ? jsonMap.get(APPLICATION_ID_JSON_KEY) : null; String objectId = jsonMap.containsKey(OBJECT_ID_JSON_KEY) ? jsonMap.get(OBJECT_ID_JSON_KEY) : null; String tenantId = jsonMap.containsKey(TENANT_ID_JSON_KEY) ? jsonMap.get(TENANT_ID_JSON_KEY) : null; String userPrincipalName = jsonMap.containsKey(USER_PRINCIPAL_NAME_JSON_KEY) ? jsonMap.get(USER_PRINCIPAL_NAME_JSON_KEY) : null; CLIENT_LOGGER.log(LogLevel.INFORMATIONAL, () -> MessageFormat .format(ACCOUNT_IDENTIFIER_LOG_MESSAGE, getAccountIdentifierMessage(APPLICATION_IDENTIFIER, appId), getAccountIdentifierMessage(OBJECT_ID, objectId), getAccountIdentifierMessage(TENANT_ID, tenantId), getAccountIdentifierMessage(USER_PRINCIPAL_NAME, userPrincipalName))); } } } catch (IOException e) { CLIENT_LOGGER.log(LogLevel.WARNING, () -> "allowLoggingAccountIdentifiers Log option was set," + " but the account information could not be logged.", e); } } private String getTargetFieldValueFromJsonParser(JsonParser jsonParser, String targetField) throws IOException { while (jsonParser.nextToken() != JsonToken.END_OBJECT) { String fieldName = jsonParser.getCurrentName(); if (targetField.equals(fieldName)) { jsonParser.nextToken(); return jsonParser.getText(); } } return null; } private HashMap<String, String> parseJsonIntoMap(JsonParser jsonParser) throws IOException { HashMap<String, String> output = new HashMap<>(); JsonToken currentToken = jsonParser.nextToken(); if (jsonParser.getCurrentName() == null) { currentToken = jsonParser.nextToken(); } while (currentToken != JsonToken.END_OBJECT) { String fieldName = jsonParser.getCurrentName(); jsonParser.nextToken(); String value = jsonParser.getText(); output.put(fieldName, value); currentToken = jsonParser.nextToken(); } return output; } }
class HttpPipelineAdapter implements IHttpClient { private static final ClientLogger CLIENT_LOGGER = new ClientLogger(HttpPipelineAdapter.class); private static final JsonFactory JSON_FACTORY = new JsonFactory(); private static final String ACCOUNT_IDENTIFIER_LOG_MESSAGE = "[Authenticated account] Client ID: {0}, Tenant ID: {1}" + ", User Principal Name: {2}, Object ID (user): {3})"; private static final String APPLICATION_IDENTIFIER = "Application Identifier"; private static final String OBJECT_ID = "Object Id"; private static final String TENANT_ID = "Tenant Id"; private static final String USER_PRINCIPAL_NAME = "User Principal Name"; private static final String ACCESS_TOKEN_JSON_KEY = "access_token"; private static final String APPLICATION_ID_JSON_KEY = "appid"; private static final String OBJECT_ID_JSON_KEY = "oid"; private static final String TENANT_ID_JSON_KEY = "tid"; private static final String USER_PRINCIPAL_NAME_JSON_KEY = "upn"; private final HttpPipeline httpPipeline; private IdentityClientOptions identityClientOptions; HttpPipelineAdapter(HttpPipeline httpPipeline, IdentityClientOptions identityClientOptions) { this.httpPipeline = httpPipeline; this.identityClientOptions = identityClientOptions; } @Override public IHttpResponse send(HttpRequest httpRequest) { com.azure.core.http.HttpRequest request = new com.azure.core.http.HttpRequest( HttpMethod.valueOf(httpRequest.httpMethod().name()), httpRequest.url()); if (httpRequest.headers() != null) { request.setHeaders(new HttpHeaders(httpRequest.headers())); } if (httpRequest.body() != null) { request.setBody(httpRequest.body()); } return httpPipeline.send(request) .flatMap(response -> response.getBodyAsString() .map(body -> { logAccountIdentifiersIfConfigured(body); com.microsoft.aad.msal4j.HttpResponse httpResponse = new com.microsoft.aad.msal4j.HttpResponse() .body(body) .statusCode(response.getStatusCode()); httpResponse.addHeaders(response.getHeaders().stream().collect(Collectors.toMap(HttpHeader::getName, HttpHeader::getValuesList))); return httpResponse; }) .switchIfEmpty(Mono.defer(() -> { com.microsoft.aad.msal4j.HttpResponse httpResponse = new com.microsoft.aad.msal4j.HttpResponse() .statusCode(response.getStatusCode()); httpResponse.addHeaders(response.getHeaders().stream().collect(Collectors.toMap(HttpHeader::getName, HttpHeader::getValuesList))); return Mono.just(httpResponse); }))) .block(); } private void logAccountIdentifiersIfConfigured(String body) { if (identityClientOptions == null || !identityClientOptions.getIdentityLogOptionsImpl().isLoggingAccountIdentifiersAllowed()) { return; } try { JsonParser responseParser = JSON_FACTORY.createParser(body); String accessToken = getTargetFieldValueFromJsonParser(responseParser, ACCESS_TOKEN_JSON_KEY); responseParser.close(); if (accessToken != null) { String[] base64Metadata = accessToken.split("\\."); if (base64Metadata.length > 1) { byte[] decoded = Base64.getDecoder().decode(base64Metadata[1]); String data = new String(decoded, StandardCharsets.UTF_8); JsonParser jsonParser = JSON_FACTORY.createParser(data); HashMap<String, String> jsonMap = parseJsonIntoMap(jsonParser); jsonParser.close(); String appId = jsonMap.containsKey(APPLICATION_ID_JSON_KEY) ? jsonMap.get(APPLICATION_ID_JSON_KEY) : null; String objectId = jsonMap.containsKey(OBJECT_ID_JSON_KEY) ? jsonMap.get(OBJECT_ID_JSON_KEY) : null; String tenantId = jsonMap.containsKey(TENANT_ID_JSON_KEY) ? jsonMap.get(TENANT_ID_JSON_KEY) : null; String userPrincipalName = jsonMap.containsKey(USER_PRINCIPAL_NAME_JSON_KEY) ? jsonMap.get(USER_PRINCIPAL_NAME_JSON_KEY) : null; CLIENT_LOGGER.log(LogLevel.INFORMATIONAL, () -> MessageFormat .format(ACCOUNT_IDENTIFIER_LOG_MESSAGE, getAccountIdentifierMessage(APPLICATION_IDENTIFIER, appId), getAccountIdentifierMessage(TENANT_ID, tenantId), getAccountIdentifierMessage(USER_PRINCIPAL_NAME, userPrincipalName), getAccountIdentifierMessage(OBJECT_ID, objectId))); } } } catch (IOException e) { CLIENT_LOGGER.log(LogLevel.WARNING, () -> "allowLoggingAccountIdentifiers Log option was set," + " but the account information could not be logged.", e); } } private String getTargetFieldValueFromJsonParser(JsonParser jsonParser, String targetField) throws IOException { while (jsonParser.nextToken() != JsonToken.END_OBJECT) { String fieldName = jsonParser.getCurrentName(); if (targetField.equals(fieldName)) { jsonParser.nextToken(); return jsonParser.getText(); } } return null; } private HashMap<String, String> parseJsonIntoMap(JsonParser jsonParser) throws IOException { HashMap<String, String> output = new HashMap<>(); JsonToken currentToken = jsonParser.nextToken(); if (jsonParser.getCurrentName() == null) { currentToken = jsonParser.nextToken(); } while (currentToken != JsonToken.END_OBJECT) { String fieldName = jsonParser.getCurrentName(); jsonParser.nextToken(); String value = jsonParser.getText(); output.put(fieldName, value); currentToken = jsonParser.nextToken(); } return output; } }
Could look into generalizing the Flux backed InputStream implementation that one of the Storage libraries use
private static RequestBody toOkHttpRequestBodySynchronously(BinaryData bodyContent, HttpHeaders headers) { String contentType = headers.getValue("Content-Type"); MediaType mediaType = (contentType == null) ? null : MediaType.parse(contentType); if (bodyContent == null) { return RequestBody.create(ByteString.EMPTY, mediaType); } BinaryDataContent content = BinaryDataHelper.getContent(bodyContent); if (content instanceof ByteArrayContent) { return RequestBody.create(content.toBytes(), mediaType); } else if (content instanceof FileContent) { FileContent fileContent = (FileContent) content; return RequestBody.create(fileContent.getFile().toFile(), mediaType); } else if (content instanceof StringContent) { return RequestBody.create(bodyContent.toString(), mediaType); } else if (content instanceof InputStreamContent) { return RequestBody.create(toByteString(content.toStream()), mediaType); } else { return toByteString(bodyContent.toFluxByteBuffer()).map(bs -> RequestBody.create(bs, mediaType)).block(); } }
private static RequestBody toOkHttpRequestBodySynchronously(BinaryData bodyContent, HttpHeaders headers) { String contentType = headers.getValue("Content-Type"); MediaType mediaType = (contentType == null) ? null : MediaType.parse(contentType); if (bodyContent == null) { return RequestBody.create(ByteString.EMPTY, mediaType); } BinaryDataContent content = BinaryDataHelper.getContent(bodyContent); if (content instanceof ByteArrayContent) { return RequestBody.create(content.toBytes(), mediaType); } else if (content instanceof FileContent) { FileContent fileContent = (FileContent) content; return RequestBody.create(fileContent.getFile().toFile(), mediaType); } else if (content instanceof StringContent) { return RequestBody.create(bodyContent.toString(), mediaType); } else if (content instanceof InputStreamContent) { return RequestBody.create(toByteString(content.toStream()), mediaType); } else { return toByteString(bodyContent.toFluxByteBuffer()).map(bs -> RequestBody.create(bs, mediaType)).block(); } }
class OkHttpAsyncHttpClient implements HttpClient { private static final ClientLogger LOGGER = new ClientLogger(OkHttpAsyncHttpClient.class); final OkHttpClient httpClient; private static final Mono<okio.ByteString> EMPTY_BYTE_STRING_MONO = Mono.just(okio.ByteString.EMPTY); OkHttpAsyncHttpClient(OkHttpClient httpClient) { this.httpClient = httpClient; } @Override public Mono<HttpResponse> send(HttpRequest request) { return send(request, Context.NONE); } @Override public Mono<HttpResponse> send(HttpRequest request, Context context) { boolean eagerlyReadResponse = (boolean) context.getData("azure-eagerly-read-response").orElse(false); return Mono.create(sink -> sink.onRequest(value -> { toOkHttpRequest(request).subscribe(okHttpRequest -> { try { Call call = httpClient.newCall(okHttpRequest); call.enqueue(new OkHttpCallback(sink, request, eagerlyReadResponse)); sink.onCancel(call::cancel); } catch (Exception ex) { sink.error(ex); } }, sink::error); })); } @Override public HttpResponse sendSynchronously(HttpRequest request, Context context) { boolean eagerlyReadResponse = (boolean) context.getData("azure-eagerly-read-response").orElse(false); Request okHttpRequest = toOkHttpRequestSynchronously(request); Call call = httpClient.newCall(okHttpRequest); try { Response okHttpResponse = call.execute(); return fromOkHttpResponse(okHttpResponse, request, eagerlyReadResponse); } catch (IOException e) { throw LOGGER.logExceptionAsError(new UncheckedIOException(e)); } } /** * Converts the given azure-core request to okhttp request. * * @param request the azure-core request * @return the Mono emitting okhttp request */ private static Mono<okhttp3.Request> toOkHttpRequest(HttpRequest request) { Request.Builder requestBuilder = new Request.Builder() .url(request.getUrl()); if (request.getHeaders() != null) { for (HttpHeader hdr : request.getHeaders()) { hdr.getValuesList().forEach(value -> requestBuilder.addHeader(hdr.getName(), value)); } } if (request.getHttpMethod() == HttpMethod.GET) { return Mono.just(requestBuilder.get().build()); } else if (request.getHttpMethod() == HttpMethod.HEAD) { return Mono.just(requestBuilder.head().build()); } return toOkHttpRequestBody(request.getContent(), request.getHeaders()) .map(okhttpRequestBody -> requestBuilder.method(request.getHttpMethod().toString(), okhttpRequestBody) .build()); } /** * Converts the given azure-core request to okhttp request. * * @param request the azure-core request * @return the Mono emitting okhttp request */ private static okhttp3.Request toOkHttpRequestSynchronously(HttpRequest request) { Request.Builder requestBuilder = new Request.Builder() .url(request.getUrl()); if (request.getHeaders() != null) { for (HttpHeader hdr : request.getHeaders()) { hdr.getValuesList().forEach(value -> requestBuilder.addHeader(hdr.getName(), value)); } } if (request.getHttpMethod() == HttpMethod.GET) { return requestBuilder.get().build(); } else if (request.getHttpMethod() == HttpMethod.HEAD) { return requestBuilder.head().build(); } RequestBody requestBody = toOkHttpRequestBodySynchronously(request.getContent(), request.getHeaders()); return requestBuilder.method(request.getHttpMethod().toString(), requestBody) .build(); } /** * Create a Mono of okhttp3.RequestBody from the given java.nio.ByteBuffer Flux. * * @param bodyContent The BinaryData request body * @param headers the headers associated with the original request * @return the Mono emitting okhttp3.RequestBody */ /** * Create a Mono of okhttp3.RequestBody from the given java.nio.ByteBuffer Flux. * * @param bodyContent The BinaryData request body * @param headers the headers associated with the original request * @return the Mono emitting okhttp3.RequestBody */ private static Mono<RequestBody> toOkHttpRequestBody(BinaryData bodyContent, HttpHeaders headers) { String contentType = headers.getValue("Content-Type"); MediaType mediaType = (contentType == null) ? null : MediaType.parse(contentType); if (bodyContent == null) { return Mono.defer(() -> Mono.just(RequestBody.create(ByteString.EMPTY, mediaType))); } BinaryDataContent content = BinaryDataHelper.getContent(bodyContent); if (content instanceof ByteArrayContent) { return Mono.defer(() -> Mono.just(RequestBody.create(content.toBytes(), mediaType))); } else if (content instanceof FileContent) { FileContent fileContent = (FileContent) content; return Mono.defer(() -> Mono.just(RequestBody.create(fileContent.getFile().toFile(), mediaType))); } else if (content instanceof StringContent) { return Mono.defer(() -> Mono.just(RequestBody.create(bodyContent.toString(), mediaType))); } else { return toByteString(bodyContent.toFluxByteBuffer()).map(bs -> RequestBody.create(bs, mediaType)); } } /** * Aggregate Flux of java.nio.ByteBuffer to single okio.ByteString. * * Pooled okio.Buffer type is used to buffer emitted ByteBuffer instances. Content of each ByteBuffer will be * written (i.e copied) to the internal okio.Buffer slots. Once the stream terminates, the contents of all slots get * copied to one single byte array and okio.ByteString will be created referring this byte array. Finally, the * initial okio.Buffer will be returned to the pool. * * @param bbFlux the Flux of ByteBuffer to aggregate * @return a mono emitting aggregated ByteString */ private static Mono<ByteString> toByteString(Flux<ByteBuffer> bbFlux) { Objects.requireNonNull(bbFlux, "'bbFlux' cannot be null."); return Mono.using(okio.Buffer::new, buffer -> bbFlux.reduce(buffer, (b, byteBuffer) -> { try { b.write(byteBuffer); return b; } catch (IOException ioe) { throw Exceptions.propagate(ioe); } }).map(b -> ByteString.of(b.readByteArray())), okio.Buffer::clear) .switchIfEmpty(Mono.defer(() -> EMPTY_BYTE_STRING_MONO)); } /** * Aggregate InputStream to single okio.ByteString. * * @param inputStream the InputStream to aggregate * @return Aggregated ByteString */ private static ByteString toByteString(InputStream inputStream) { ByteArrayOutputStream outputStream = new ByteArrayOutputStream(); int nRead; byte[] buffer = new byte[8192]; try (InputStream closeableInputStream = inputStream) { while ((nRead = closeableInputStream.read(buffer, 0, buffer.length)) != -1) { outputStream.write(buffer, 0, nRead); } } catch (IOException e) { throw LOGGER.logExceptionAsError(new UncheckedIOException(e)); } return ByteString.of(outputStream.toByteArray()); } private static HttpResponse fromOkHttpResponse( okhttp3.Response response, HttpRequest request, boolean eagerlyReadResponse) throws IOException { /* * Use a buffered response when we are eagerly reading the response from the network and the body isn't * empty. */ if (eagerlyReadResponse) { ResponseBody body = response.body(); if (Objects.nonNull(body)) { byte[] bytes = body.bytes(); body.close(); return new OkHttpAsyncBufferedResponse(response, request, bytes); } else { return new OkHttpAsyncResponse(response, request); } } else { return new OkHttpAsyncResponse(response, request); } } private static class OkHttpCallback implements okhttp3.Callback { private final MonoSink<HttpResponse> sink; private final HttpRequest request; private final boolean eagerlyReadResponse; OkHttpCallback(MonoSink<HttpResponse> sink, HttpRequest request, boolean eagerlyReadResponse) { this.sink = sink; this.request = request; this.eagerlyReadResponse = eagerlyReadResponse; } @SuppressWarnings("NullableProblems") @Override public void onFailure(okhttp3.Call call, IOException e) { sink.error(e); } @SuppressWarnings("NullableProblems") @Override public void onResponse(okhttp3.Call call, okhttp3.Response response) { try { HttpResponse httpResponse = fromOkHttpResponse(response, request, eagerlyReadResponse); sink.success(httpResponse); } catch (IOException ex) { sink.error(ex); } } } }
class OkHttpAsyncHttpClient implements HttpClient { private static final ClientLogger LOGGER = new ClientLogger(OkHttpAsyncHttpClient.class); final OkHttpClient httpClient; private static final Mono<okio.ByteString> EMPTY_BYTE_STRING_MONO = Mono.just(okio.ByteString.EMPTY); OkHttpAsyncHttpClient(OkHttpClient httpClient) { this.httpClient = httpClient; } @Override public Mono<HttpResponse> send(HttpRequest request) { return send(request, Context.NONE); } @Override public Mono<HttpResponse> send(HttpRequest request, Context context) { boolean eagerlyReadResponse = (boolean) context.getData("azure-eagerly-read-response").orElse(false); return Mono.create(sink -> sink.onRequest(value -> { toOkHttpRequest(request).subscribe(okHttpRequest -> { try { Call call = httpClient.newCall(okHttpRequest); call.enqueue(new OkHttpCallback(sink, request, eagerlyReadResponse)); sink.onCancel(call::cancel); } catch (Exception ex) { sink.error(ex); } }, sink::error); })); } @Override public HttpResponse sendSynchronously(HttpRequest request, Context context) { boolean eagerlyReadResponse = (boolean) context.getData("azure-eagerly-read-response").orElse(false); Request okHttpRequest = toOkHttpRequestSynchronously(request); Call call = httpClient.newCall(okHttpRequest); try { Response okHttpResponse = call.execute(); return fromOkHttpResponse(okHttpResponse, request, eagerlyReadResponse); } catch (IOException e) { throw LOGGER.logExceptionAsError(new UncheckedIOException(e)); } } /** * Converts the given azure-core request to okhttp request. * * @param request the azure-core request * @return the Mono emitting okhttp request */ private static Mono<okhttp3.Request> toOkHttpRequest(HttpRequest request) { Request.Builder requestBuilder = new Request.Builder() .url(request.getUrl()); if (request.getHeaders() != null) { for (HttpHeader hdr : request.getHeaders()) { hdr.getValuesList().forEach(value -> requestBuilder.addHeader(hdr.getName(), value)); } } if (request.getHttpMethod() == HttpMethod.GET) { return Mono.just(requestBuilder.get().build()); } else if (request.getHttpMethod() == HttpMethod.HEAD) { return Mono.just(requestBuilder.head().build()); } return toOkHttpRequestBody(request.getContent(), request.getHeaders()) .map(okhttpRequestBody -> requestBuilder.method(request.getHttpMethod().toString(), okhttpRequestBody) .build()); } /** * Converts the given azure-core request to okhttp request. * * @param request the azure-core request * @return the Mono emitting okhttp request */ private static okhttp3.Request toOkHttpRequestSynchronously(HttpRequest request) { Request.Builder requestBuilder = new Request.Builder() .url(request.getUrl()); if (request.getHeaders() != null) { for (HttpHeader hdr : request.getHeaders()) { hdr.getValuesList().forEach(value -> requestBuilder.addHeader(hdr.getName(), value)); } } if (request.getHttpMethod() == HttpMethod.GET) { return requestBuilder.get().build(); } else if (request.getHttpMethod() == HttpMethod.HEAD) { return requestBuilder.head().build(); } RequestBody requestBody = toOkHttpRequestBodySynchronously(request.getContent(), request.getHeaders()); return requestBuilder.method(request.getHttpMethod().toString(), requestBody) .build(); } /** * Create a Mono of okhttp3.RequestBody from the given java.nio.ByteBuffer Flux. * * @param bodyContent The BinaryData request body * @param headers the headers associated with the original request * @return the Mono emitting okhttp3.RequestBody */ /** * Create a Mono of okhttp3.RequestBody from the given java.nio.ByteBuffer Flux. * * @param bodyContent The BinaryData request body * @param headers the headers associated with the original request * @return the Mono emitting okhttp3.RequestBody */ private static Mono<RequestBody> toOkHttpRequestBody(BinaryData bodyContent, HttpHeaders headers) { String contentType = headers.getValue("Content-Type"); MediaType mediaType = (contentType == null) ? null : MediaType.parse(contentType); if (bodyContent == null) { return Mono.defer(() -> Mono.just(RequestBody.create(ByteString.EMPTY, mediaType))); } BinaryDataContent content = BinaryDataHelper.getContent(bodyContent); if (content instanceof ByteArrayContent) { return Mono.defer(() -> Mono.just(RequestBody.create(content.toBytes(), mediaType))); } else if (content instanceof FileContent) { FileContent fileContent = (FileContent) content; return Mono.defer(() -> Mono.just(RequestBody.create(fileContent.getFile().toFile(), mediaType))); } else if (content instanceof StringContent) { return Mono.defer(() -> Mono.just(RequestBody.create(bodyContent.toString(), mediaType))); } else { return toByteString(bodyContent.toFluxByteBuffer()).map(bs -> RequestBody.create(bs, mediaType)); } } /** * Aggregate Flux of java.nio.ByteBuffer to single okio.ByteString. * * Pooled okio.Buffer type is used to buffer emitted ByteBuffer instances. Content of each ByteBuffer will be * written (i.e copied) to the internal okio.Buffer slots. Once the stream terminates, the contents of all slots get * copied to one single byte array and okio.ByteString will be created referring this byte array. Finally, the * initial okio.Buffer will be returned to the pool. * * @param bbFlux the Flux of ByteBuffer to aggregate * @return a mono emitting aggregated ByteString */ private static Mono<ByteString> toByteString(Flux<ByteBuffer> bbFlux) { Objects.requireNonNull(bbFlux, "'bbFlux' cannot be null."); return Mono.using(okio.Buffer::new, buffer -> bbFlux.reduce(buffer, (b, byteBuffer) -> { try { b.write(byteBuffer); return b; } catch (IOException ioe) { throw Exceptions.propagate(ioe); } }).map(b -> ByteString.of(b.readByteArray())), okio.Buffer::clear) .switchIfEmpty(Mono.defer(() -> EMPTY_BYTE_STRING_MONO)); } /** * Aggregate InputStream to single okio.ByteString. * * @param inputStream the InputStream to aggregate * @return Aggregated ByteString */ private static ByteString toByteString(InputStream inputStream) { try (InputStream closeableInputStream = inputStream) { byte[] content = StreamUtils.INSTANCE.readAllBytes(closeableInputStream); return ByteString.of(content); } catch (IOException e) { throw LOGGER.logExceptionAsError(new UncheckedIOException(e)); } } private static HttpResponse fromOkHttpResponse( okhttp3.Response response, HttpRequest request, boolean eagerlyReadResponse) throws IOException { /* * Use a buffered response when we are eagerly reading the response from the network and the body isn't * empty. */ if (eagerlyReadResponse) { ResponseBody body = response.body(); if (Objects.nonNull(body)) { byte[] bytes = body.bytes(); body.close(); return new OkHttpAsyncBufferedResponse(response, request, bytes); } else { return new OkHttpAsyncResponse(response, request); } } else { return new OkHttpAsyncResponse(response, request); } } private static class OkHttpCallback implements okhttp3.Callback { private final MonoSink<HttpResponse> sink; private final HttpRequest request; private final boolean eagerlyReadResponse; OkHttpCallback(MonoSink<HttpResponse> sink, HttpRequest request, boolean eagerlyReadResponse) { this.sink = sink; this.request = request; this.eagerlyReadResponse = eagerlyReadResponse; } @SuppressWarnings("NullableProblems") @Override public void onFailure(okhttp3.Call call, IOException e) { sink.error(e); } @SuppressWarnings("NullableProblems") @Override public void onResponse(okhttp3.Call call, okhttp3.Response response) { try { HttpResponse httpResponse = fromOkHttpResponse(response, request, eagerlyReadResponse); sink.success(httpResponse); } catch (IOException ex) { sink.error(ex); } } } }
We should look at creating a utility method in Core that can optimize InputStream to byte[] conversion based on the type of InputStream
private static ByteString toByteString(InputStream inputStream) { ByteArrayOutputStream outputStream = new ByteArrayOutputStream(); int nRead; byte[] buffer = new byte[8192]; try (InputStream closeableInputStream = inputStream) { while ((nRead = closeableInputStream.read(buffer, 0, buffer.length)) != -1) { outputStream.write(buffer, 0, nRead); } } catch (IOException e) { throw LOGGER.logExceptionAsError(new UncheckedIOException(e)); } return ByteString.of(outputStream.toByteArray()); }
try (InputStream closeableInputStream = inputStream) {
private static ByteString toByteString(InputStream inputStream) { try (InputStream closeableInputStream = inputStream) { byte[] content = StreamUtils.INSTANCE.readAllBytes(closeableInputStream); return ByteString.of(content); } catch (IOException e) { throw LOGGER.logExceptionAsError(new UncheckedIOException(e)); } }
class OkHttpAsyncHttpClient implements HttpClient { private static final ClientLogger LOGGER = new ClientLogger(OkHttpAsyncHttpClient.class); final OkHttpClient httpClient; private static final Mono<okio.ByteString> EMPTY_BYTE_STRING_MONO = Mono.just(okio.ByteString.EMPTY); OkHttpAsyncHttpClient(OkHttpClient httpClient) { this.httpClient = httpClient; } @Override public Mono<HttpResponse> send(HttpRequest request) { return send(request, Context.NONE); } @Override public Mono<HttpResponse> send(HttpRequest request, Context context) { boolean eagerlyReadResponse = (boolean) context.getData("azure-eagerly-read-response").orElse(false); return Mono.create(sink -> sink.onRequest(value -> { toOkHttpRequest(request).subscribe(okHttpRequest -> { try { Call call = httpClient.newCall(okHttpRequest); call.enqueue(new OkHttpCallback(sink, request, eagerlyReadResponse)); sink.onCancel(call::cancel); } catch (Exception ex) { sink.error(ex); } }, sink::error); })); } @Override public HttpResponse sendSynchronously(HttpRequest request, Context context) { boolean eagerlyReadResponse = (boolean) context.getData("azure-eagerly-read-response").orElse(false); Request okHttpRequest = toOkHttpRequestSynchronously(request); Call call = httpClient.newCall(okHttpRequest); try { Response okHttpResponse = call.execute(); return fromOkHttpResponse(okHttpResponse, request, eagerlyReadResponse); } catch (IOException e) { throw LOGGER.logExceptionAsError(new UncheckedIOException(e)); } } /** * Converts the given azure-core request to okhttp request. * * @param request the azure-core request * @return the Mono emitting okhttp request */ private static Mono<okhttp3.Request> toOkHttpRequest(HttpRequest request) { Request.Builder requestBuilder = new Request.Builder() .url(request.getUrl()); if (request.getHeaders() != null) { for (HttpHeader hdr : request.getHeaders()) { hdr.getValuesList().forEach(value -> requestBuilder.addHeader(hdr.getName(), value)); } } if (request.getHttpMethod() == HttpMethod.GET) { return Mono.just(requestBuilder.get().build()); } else if (request.getHttpMethod() == HttpMethod.HEAD) { return Mono.just(requestBuilder.head().build()); } return toOkHttpRequestBody(request.getContent(), request.getHeaders()) .map(okhttpRequestBody -> requestBuilder.method(request.getHttpMethod().toString(), okhttpRequestBody) .build()); } /** * Converts the given azure-core request to okhttp request. * * @param request the azure-core request * @return the Mono emitting okhttp request */ private static okhttp3.Request toOkHttpRequestSynchronously(HttpRequest request) { Request.Builder requestBuilder = new Request.Builder() .url(request.getUrl()); if (request.getHeaders() != null) { for (HttpHeader hdr : request.getHeaders()) { hdr.getValuesList().forEach(value -> requestBuilder.addHeader(hdr.getName(), value)); } } if (request.getHttpMethod() == HttpMethod.GET) { return requestBuilder.get().build(); } else if (request.getHttpMethod() == HttpMethod.HEAD) { return requestBuilder.head().build(); } RequestBody requestBody = toOkHttpRequestBodySynchronously(request.getContent(), request.getHeaders()); return requestBuilder.method(request.getHttpMethod().toString(), requestBody) .build(); } /** * Create a Mono of okhttp3.RequestBody from the given java.nio.ByteBuffer Flux. * * @param bodyContent The BinaryData request body * @param headers the headers associated with the original request * @return the Mono emitting okhttp3.RequestBody */ private static RequestBody toOkHttpRequestBodySynchronously(BinaryData bodyContent, HttpHeaders headers) { String contentType = headers.getValue("Content-Type"); MediaType mediaType = (contentType == null) ? null : MediaType.parse(contentType); if (bodyContent == null) { return RequestBody.create(ByteString.EMPTY, mediaType); } BinaryDataContent content = BinaryDataHelper.getContent(bodyContent); if (content instanceof ByteArrayContent) { return RequestBody.create(content.toBytes(), mediaType); } else if (content instanceof FileContent) { FileContent fileContent = (FileContent) content; return RequestBody.create(fileContent.getFile().toFile(), mediaType); } else if (content instanceof StringContent) { return RequestBody.create(bodyContent.toString(), mediaType); } else if (content instanceof InputStreamContent) { return RequestBody.create(toByteString(content.toStream()), mediaType); } else { return toByteString(bodyContent.toFluxByteBuffer()).map(bs -> RequestBody.create(bs, mediaType)).block(); } } /** * Create a Mono of okhttp3.RequestBody from the given java.nio.ByteBuffer Flux. * * @param bodyContent The BinaryData request body * @param headers the headers associated with the original request * @return the Mono emitting okhttp3.RequestBody */ private static Mono<RequestBody> toOkHttpRequestBody(BinaryData bodyContent, HttpHeaders headers) { String contentType = headers.getValue("Content-Type"); MediaType mediaType = (contentType == null) ? null : MediaType.parse(contentType); if (bodyContent == null) { return Mono.defer(() -> Mono.just(RequestBody.create(ByteString.EMPTY, mediaType))); } BinaryDataContent content = BinaryDataHelper.getContent(bodyContent); if (content instanceof ByteArrayContent) { return Mono.defer(() -> Mono.just(RequestBody.create(content.toBytes(), mediaType))); } else if (content instanceof FileContent) { FileContent fileContent = (FileContent) content; return Mono.defer(() -> Mono.just(RequestBody.create(fileContent.getFile().toFile(), mediaType))); } else if (content instanceof StringContent) { return Mono.defer(() -> Mono.just(RequestBody.create(bodyContent.toString(), mediaType))); } else { return toByteString(bodyContent.toFluxByteBuffer()).map(bs -> RequestBody.create(bs, mediaType)); } } /** * Aggregate Flux of java.nio.ByteBuffer to single okio.ByteString. * * Pooled okio.Buffer type is used to buffer emitted ByteBuffer instances. Content of each ByteBuffer will be * written (i.e copied) to the internal okio.Buffer slots. Once the stream terminates, the contents of all slots get * copied to one single byte array and okio.ByteString will be created referring this byte array. Finally, the * initial okio.Buffer will be returned to the pool. * * @param bbFlux the Flux of ByteBuffer to aggregate * @return a mono emitting aggregated ByteString */ private static Mono<ByteString> toByteString(Flux<ByteBuffer> bbFlux) { Objects.requireNonNull(bbFlux, "'bbFlux' cannot be null."); return Mono.using(okio.Buffer::new, buffer -> bbFlux.reduce(buffer, (b, byteBuffer) -> { try { b.write(byteBuffer); return b; } catch (IOException ioe) { throw Exceptions.propagate(ioe); } }).map(b -> ByteString.of(b.readByteArray())), okio.Buffer::clear) .switchIfEmpty(Mono.defer(() -> EMPTY_BYTE_STRING_MONO)); } /** * Aggregate InputStream to single okio.ByteString. * * @param inputStream the InputStream to aggregate * @return Aggregated ByteString */ private static HttpResponse fromOkHttpResponse( okhttp3.Response response, HttpRequest request, boolean eagerlyReadResponse) throws IOException { /* * Use a buffered response when we are eagerly reading the response from the network and the body isn't * empty. */ if (eagerlyReadResponse) { ResponseBody body = response.body(); if (Objects.nonNull(body)) { byte[] bytes = body.bytes(); body.close(); return new OkHttpAsyncBufferedResponse(response, request, bytes); } else { return new OkHttpAsyncResponse(response, request); } } else { return new OkHttpAsyncResponse(response, request); } } private static class OkHttpCallback implements okhttp3.Callback { private final MonoSink<HttpResponse> sink; private final HttpRequest request; private final boolean eagerlyReadResponse; OkHttpCallback(MonoSink<HttpResponse> sink, HttpRequest request, boolean eagerlyReadResponse) { this.sink = sink; this.request = request; this.eagerlyReadResponse = eagerlyReadResponse; } @SuppressWarnings("NullableProblems") @Override public void onFailure(okhttp3.Call call, IOException e) { sink.error(e); } @SuppressWarnings("NullableProblems") @Override public void onResponse(okhttp3.Call call, okhttp3.Response response) { try { HttpResponse httpResponse = fromOkHttpResponse(response, request, eagerlyReadResponse); sink.success(httpResponse); } catch (IOException ex) { sink.error(ex); } } } }
class OkHttpAsyncHttpClient implements HttpClient { private static final ClientLogger LOGGER = new ClientLogger(OkHttpAsyncHttpClient.class); final OkHttpClient httpClient; private static final Mono<okio.ByteString> EMPTY_BYTE_STRING_MONO = Mono.just(okio.ByteString.EMPTY); OkHttpAsyncHttpClient(OkHttpClient httpClient) { this.httpClient = httpClient; } @Override public Mono<HttpResponse> send(HttpRequest request) { return send(request, Context.NONE); } @Override public Mono<HttpResponse> send(HttpRequest request, Context context) { boolean eagerlyReadResponse = (boolean) context.getData("azure-eagerly-read-response").orElse(false); return Mono.create(sink -> sink.onRequest(value -> { toOkHttpRequest(request).subscribe(okHttpRequest -> { try { Call call = httpClient.newCall(okHttpRequest); call.enqueue(new OkHttpCallback(sink, request, eagerlyReadResponse)); sink.onCancel(call::cancel); } catch (Exception ex) { sink.error(ex); } }, sink::error); })); } @Override public HttpResponse sendSynchronously(HttpRequest request, Context context) { boolean eagerlyReadResponse = (boolean) context.getData("azure-eagerly-read-response").orElse(false); Request okHttpRequest = toOkHttpRequestSynchronously(request); Call call = httpClient.newCall(okHttpRequest); try { Response okHttpResponse = call.execute(); return fromOkHttpResponse(okHttpResponse, request, eagerlyReadResponse); } catch (IOException e) { throw LOGGER.logExceptionAsError(new UncheckedIOException(e)); } } /** * Converts the given azure-core request to okhttp request. * * @param request the azure-core request * @return the Mono emitting okhttp request */ private static Mono<okhttp3.Request> toOkHttpRequest(HttpRequest request) { Request.Builder requestBuilder = new Request.Builder() .url(request.getUrl()); if (request.getHeaders() != null) { for (HttpHeader hdr : request.getHeaders()) { hdr.getValuesList().forEach(value -> requestBuilder.addHeader(hdr.getName(), value)); } } if (request.getHttpMethod() == HttpMethod.GET) { return Mono.just(requestBuilder.get().build()); } else if (request.getHttpMethod() == HttpMethod.HEAD) { return Mono.just(requestBuilder.head().build()); } return toOkHttpRequestBody(request.getContent(), request.getHeaders()) .map(okhttpRequestBody -> requestBuilder.method(request.getHttpMethod().toString(), okhttpRequestBody) .build()); } /** * Converts the given azure-core request to okhttp request. * * @param request the azure-core request * @return the Mono emitting okhttp request */ private static okhttp3.Request toOkHttpRequestSynchronously(HttpRequest request) { Request.Builder requestBuilder = new Request.Builder() .url(request.getUrl()); if (request.getHeaders() != null) { for (HttpHeader hdr : request.getHeaders()) { hdr.getValuesList().forEach(value -> requestBuilder.addHeader(hdr.getName(), value)); } } if (request.getHttpMethod() == HttpMethod.GET) { return requestBuilder.get().build(); } else if (request.getHttpMethod() == HttpMethod.HEAD) { return requestBuilder.head().build(); } RequestBody requestBody = toOkHttpRequestBodySynchronously(request.getContent(), request.getHeaders()); return requestBuilder.method(request.getHttpMethod().toString(), requestBody) .build(); } /** * Create a Mono of okhttp3.RequestBody from the given java.nio.ByteBuffer Flux. * * @param bodyContent The BinaryData request body * @param headers the headers associated with the original request * @return the Mono emitting okhttp3.RequestBody */ private static RequestBody toOkHttpRequestBodySynchronously(BinaryData bodyContent, HttpHeaders headers) { String contentType = headers.getValue("Content-Type"); MediaType mediaType = (contentType == null) ? null : MediaType.parse(contentType); if (bodyContent == null) { return RequestBody.create(ByteString.EMPTY, mediaType); } BinaryDataContent content = BinaryDataHelper.getContent(bodyContent); if (content instanceof ByteArrayContent) { return RequestBody.create(content.toBytes(), mediaType); } else if (content instanceof FileContent) { FileContent fileContent = (FileContent) content; return RequestBody.create(fileContent.getFile().toFile(), mediaType); } else if (content instanceof StringContent) { return RequestBody.create(bodyContent.toString(), mediaType); } else if (content instanceof InputStreamContent) { return RequestBody.create(toByteString(content.toStream()), mediaType); } else { return toByteString(bodyContent.toFluxByteBuffer()).map(bs -> RequestBody.create(bs, mediaType)).block(); } } /** * Create a Mono of okhttp3.RequestBody from the given java.nio.ByteBuffer Flux. * * @param bodyContent The BinaryData request body * @param headers the headers associated with the original request * @return the Mono emitting okhttp3.RequestBody */ private static Mono<RequestBody> toOkHttpRequestBody(BinaryData bodyContent, HttpHeaders headers) { String contentType = headers.getValue("Content-Type"); MediaType mediaType = (contentType == null) ? null : MediaType.parse(contentType); if (bodyContent == null) { return Mono.defer(() -> Mono.just(RequestBody.create(ByteString.EMPTY, mediaType))); } BinaryDataContent content = BinaryDataHelper.getContent(bodyContent); if (content instanceof ByteArrayContent) { return Mono.defer(() -> Mono.just(RequestBody.create(content.toBytes(), mediaType))); } else if (content instanceof FileContent) { FileContent fileContent = (FileContent) content; return Mono.defer(() -> Mono.just(RequestBody.create(fileContent.getFile().toFile(), mediaType))); } else if (content instanceof StringContent) { return Mono.defer(() -> Mono.just(RequestBody.create(bodyContent.toString(), mediaType))); } else { return toByteString(bodyContent.toFluxByteBuffer()).map(bs -> RequestBody.create(bs, mediaType)); } } /** * Aggregate Flux of java.nio.ByteBuffer to single okio.ByteString. * * Pooled okio.Buffer type is used to buffer emitted ByteBuffer instances. Content of each ByteBuffer will be * written (i.e copied) to the internal okio.Buffer slots. Once the stream terminates, the contents of all slots get * copied to one single byte array and okio.ByteString will be created referring this byte array. Finally, the * initial okio.Buffer will be returned to the pool. * * @param bbFlux the Flux of ByteBuffer to aggregate * @return a mono emitting aggregated ByteString */ private static Mono<ByteString> toByteString(Flux<ByteBuffer> bbFlux) { Objects.requireNonNull(bbFlux, "'bbFlux' cannot be null."); return Mono.using(okio.Buffer::new, buffer -> bbFlux.reduce(buffer, (b, byteBuffer) -> { try { b.write(byteBuffer); return b; } catch (IOException ioe) { throw Exceptions.propagate(ioe); } }).map(b -> ByteString.of(b.readByteArray())), okio.Buffer::clear) .switchIfEmpty(Mono.defer(() -> EMPTY_BYTE_STRING_MONO)); } /** * Aggregate InputStream to single okio.ByteString. * * @param inputStream the InputStream to aggregate * @return Aggregated ByteString */ private static HttpResponse fromOkHttpResponse( okhttp3.Response response, HttpRequest request, boolean eagerlyReadResponse) throws IOException { /* * Use a buffered response when we are eagerly reading the response from the network and the body isn't * empty. */ if (eagerlyReadResponse) { ResponseBody body = response.body(); if (Objects.nonNull(body)) { byte[] bytes = body.bytes(); body.close(); return new OkHttpAsyncBufferedResponse(response, request, bytes); } else { return new OkHttpAsyncResponse(response, request); } } else { return new OkHttpAsyncResponse(response, request); } } private static class OkHttpCallback implements okhttp3.Callback { private final MonoSink<HttpResponse> sink; private final HttpRequest request; private final boolean eagerlyReadResponse; OkHttpCallback(MonoSink<HttpResponse> sink, HttpRequest request, boolean eagerlyReadResponse) { this.sink = sink; this.request = request; this.eagerlyReadResponse = eagerlyReadResponse; } @SuppressWarnings("NullableProblems") @Override public void onFailure(okhttp3.Call call, IOException e) { sink.error(e); } @SuppressWarnings("NullableProblems") @Override public void onResponse(okhttp3.Call call, okhttp3.Response response) { try { HttpResponse httpResponse = fromOkHttpResponse(response, request, eagerlyReadResponse); sink.success(httpResponse); } catch (IOException ex) { sink.error(ex); } } } }
just trying to understand since I see this update all over the PR, was DocumentBuildMode just missing from before / is now replacing the removed modelId?
public void beginBuildModel() { String trainingFilesUrl = "{SAS-URL-of-your-container-in-blob-storage}"; documentModelAdministrationAsyncClient.beginBuildModel(trainingFilesUrl, DocumentBuildMode.TEMPLATE ) .flatMap(AsyncPollResponse::getFinalResult) .subscribe(documentModel -> { System.out.printf("Model ID: %s%n", documentModel.getModelId()); System.out.printf("Model Created on: %s%n", documentModel.getCreatedOn()); documentModel.getDocTypes().forEach((key, docTypeInfo) -> { docTypeInfo.getFieldSchema().forEach((field, documentFieldSchema) -> { System.out.printf("Field: %s", field); System.out.printf("Field type: %s", documentFieldSchema.getType()); System.out.printf("Field confidence: %.2f", docTypeInfo.getFieldConfidence().get(field)); }); }); }); }
public void beginBuildModel() { String trainingFilesUrl = "{SAS-URL-of-your-container-in-blob-storage}"; documentModelAdministrationAsyncClient.beginBuildModel(trainingFilesUrl, DocumentBuildMode.TEMPLATE ) .flatMap(AsyncPollResponse::getFinalResult) .subscribe(documentModel -> { System.out.printf("Model ID: %s%n", documentModel.getModelId()); System.out.printf("Model Created on: %s%n", documentModel.getCreatedOn()); documentModel.getDocTypes().forEach((key, docTypeInfo) -> { docTypeInfo.getFieldSchema().forEach((field, documentFieldSchema) -> { System.out.printf("Field: %s", field); System.out.printf("Field type: %s", documentFieldSchema.getType()); System.out.printf("Field confidence: %.2f", docTypeInfo.getFieldConfidence().get(field)); }); }); }); }
class DocumentModelAdminAsyncClientJavaDocCodeSnippets { private final DocumentModelAdministrationAsyncClient documentModelAdministrationAsyncClient = new DocumentModelAdministrationClientBuilder().buildAsyncClient(); /** * Code snippet for {@link DocumentModelAdministrationAsyncClient} initialization */ public void formTrainingAsyncClientInInitialization() { DocumentModelAdministrationAsyncClient documentModelAdministrationAsyncClient = new DocumentModelAdministrationClientBuilder().buildAsyncClient(); } /** * Code snippet for creating a {@link DocumentModelAdministrationAsyncClient} with pipeline */ public void createDocumentTrainingAsyncClientWithPipeline() { HttpPipeline pipeline = new HttpPipelineBuilder() .policies(/* add policies */) .build(); DocumentModelAdministrationAsyncClient documentModelAdministrationAsyncClient = new DocumentModelAdministrationClientBuilder() .credential(new AzureKeyCredential("{key}")) .endpoint("{endpoint}") .pipeline(pipeline) .buildAsyncClient(); } /** * Code snippet for {@link DocumentModelAdministrationAsyncClient */ /** * Code snippet for {@link DocumentModelAdministrationAsyncClient * with options */ public void beginBuildModelWithOptions() { String trainingFilesUrl = "{SAS-URL-of-your-container-in-blob-storage}"; Map<String, String> attrs = new HashMap<String, String>(); attrs.put("createdBy", "sample"); documentModelAdministrationAsyncClient.beginBuildModel(trainingFilesUrl, DocumentBuildMode.TEMPLATE, new BuildModelOptions() .setDescription("model desc") .setPrefix("Invoice") .setTags(attrs)) .flatMap(AsyncPollResponse::getFinalResult) .subscribe(documentModel -> { System.out.printf("Model ID: %s%n", documentModel.getModelId()); System.out.printf("Model Description: %s%n", documentModel.getDescription()); System.out.printf("Model Created on: %s%n", documentModel.getCreatedOn()); System.out.printf("Model assigned tags: %s%n", documentModel.getTags()); documentModel.getDocTypes().forEach((key, docTypeInfo) -> { docTypeInfo.getFieldSchema().forEach((field, documentFieldSchema) -> { System.out.printf("Field: %s", field); System.out.printf("Field type: %s", documentFieldSchema.getType()); System.out.printf("Field confidence: %.2f", docTypeInfo.getFieldConfidence().get(field)); }); }); }); } /** * Code snippet for {@link DocumentModelAdministrationAsyncClient */ public void deleteModel() { String modelId = "{model_id}"; documentModelAdministrationAsyncClient.deleteModel(modelId) .subscribe(ignored -> System.out.printf("Model ID: %s is deleted%n", modelId)); } /** * Code snippet for {@link DocumentModelAdministrationAsyncClient */ public void deleteModelWithResponse() { String modelId = "{model_id}"; documentModelAdministrationAsyncClient.deleteModelWithResponse(modelId) .subscribe(response -> { System.out.printf("Response Status Code: %d.", response.getStatusCode()); System.out.printf("Model ID: %s is deleted.%n", modelId); }); } /** * Code snippet for {@link DocumentModelAdministrationAsyncClient */ public void getCopyAuthorization() { String modelId = "my-copied-model"; documentModelAdministrationAsyncClient.getCopyAuthorization(modelId) .subscribe(copyAuthorization -> System.out.printf("Copy Authorization for model id: %s, access token: %s, expiration time: %s, " + "target resource ID; %s, target resource region: %s%n", copyAuthorization.getTargetModelId(), copyAuthorization.getAccessToken(), copyAuthorization.getExpiresOn(), copyAuthorization.getTargetResourceId(), copyAuthorization.getTargetResourceRegion() )); } /** * Code snippet for {@link DocumentModelAdministrationAsyncClient */ public void getCopyAuthorizationWithResponse() { String modelId = "my-copied-model"; Map<String, String> attrs = new HashMap<String, String>(); attrs.put("createdBy", "sample"); documentModelAdministrationAsyncClient.getCopyAuthorizationWithResponse(modelId, new CopyAuthorizationOptions() .setDescription("model desc") .setTags(attrs)) .subscribe(copyAuthorization -> System.out.printf("Copy Authorization response status: %s, for model id: %s, access token: %s, " + "expiration time: %s, target resource ID; %s, target resource region: %s%n", copyAuthorization.getStatusCode(), copyAuthorization.getValue().getTargetModelId(), copyAuthorization.getValue().getAccessToken(), copyAuthorization.getValue().getExpiresOn(), copyAuthorization.getValue().getTargetResourceId(), copyAuthorization.getValue().getTargetResourceRegion() )); } /** * Code snippet for {@link DocumentModelAdministrationAsyncClient */ public void getAccountProperties() { documentModelAdministrationAsyncClient.getAccountProperties() .subscribe(accountProperties -> { System.out.printf("Max number of models that can be build for this account: %d%n", accountProperties.getDocumentModelLimit()); System.out.printf("Current count of built document analysis models: %d%n", accountProperties.getDocumentModelCount()); }); } /** * Code snippet for {@link DocumentModelAdministrationAsyncClient */ public void getAccountPropertiesWithResponse() { documentModelAdministrationAsyncClient.getAccountPropertiesWithResponse() .subscribe(response -> { System.out.printf("Response Status Code: %d.", response.getStatusCode()); AccountProperties accountProperties = response.getValue(); System.out.printf("Max number of models that can be build for this account: %d%n", accountProperties.getDocumentModelLimit()); System.out.printf("Current count of built document analysis models: %d%n", accountProperties.getDocumentModelCount()); }); } /** * Code snippet for {@link DocumentModelAdministrationAsyncClient */ public void beginCreateComposedModel() { String modelId1 = "{model_Id_1}"; String modelId2 = "{model_Id_2}"; documentModelAdministrationAsyncClient.beginCreateComposedModel(Arrays.asList(modelId1, modelId2) ) .flatMap(AsyncPollResponse::getFinalResult) .subscribe(documentModel -> { System.out.printf("Model ID: %s%n", documentModel.getModelId()); System.out.printf("Model Created on: %s%n", documentModel.getCreatedOn()); documentModel.getDocTypes().forEach((key, docTypeInfo) -> { docTypeInfo.getFieldSchema().forEach((field, documentFieldSchema) -> { System.out.printf("Field: %s", field); System.out.printf("Field type: %s", documentFieldSchema.getType()); System.out.printf("Field confidence: %.2f", docTypeInfo.getFieldConfidence().get(field)); }); }); }); } /** * Code snippet for {@link DocumentModelAdministrationAsyncClient * with options */ public void beginCreateComposedModelWithOptions() { String modelId1 = "{model_Id_1}"; String modelId2 = "{model_Id_2}"; Map<String, String> attrs = new HashMap<String, String>(); attrs.put("createdBy", "sample"); documentModelAdministrationAsyncClient.beginCreateComposedModel(Arrays.asList(modelId1, modelId2), new CreateComposedModelOptions().setDescription("model-desc").setTags(attrs)) .flatMap(AsyncPollResponse::getFinalResult) .subscribe(documentModel -> { System.out.printf("Model ID: %s%n", documentModel.getModelId()); System.out.printf("Model Description: %s%n", documentModel.getDescription()); System.out.printf("Model Created on: %s%n", documentModel.getCreatedOn()); System.out.printf("Model assigned tags: %s%n", documentModel.getTags()); documentModel.getDocTypes().forEach((key, docTypeInfo) -> { docTypeInfo.getFieldSchema().forEach((field, documentFieldSchema) -> { System.out.printf("Field: %s", field); System.out.printf("Field type: %s", documentFieldSchema.getType()); System.out.printf("Field confidence: %.2f", docTypeInfo.getFieldConfidence().get(field)); }); }); }); } /** * Code snippet for {@link DocumentModelAdministrationAsyncClient */ public void beginCopy() { String copyModelId = "copy-model"; String targetModelId = "my-copied-model-id"; documentModelAdministrationAsyncClient.getCopyAuthorization(targetModelId) .subscribe(copyAuthorization -> documentModelAdministrationAsyncClient.beginCopyModelTo(copyModelId, copyAuthorization) .filter(pollResponse -> pollResponse.getStatus().isComplete()) .flatMap(AsyncPollResponse::getFinalResult) .subscribe(documentModel -> System.out.printf("Copied model has model ID: %s, was created on: %s.%n,", documentModel.getModelId(), documentModel.getCreatedOn()))); } /** * Code snippet for {@link DocumentModelAdministrationAsyncClient */ public void listModels() { documentModelAdministrationAsyncClient.listModels() .subscribe(documentModelInfo -> System.out.printf("Model ID: %s, Model description: %s, Created on: %s.%n", documentModelInfo.getModelId(), documentModelInfo.getDescription(), documentModelInfo.getCreatedOn())); } /** * Code snippet for {@link DocumentModelAdministrationAsyncClient */ public void getModel() { String modelId = "{model_id}"; documentModelAdministrationAsyncClient.getModel(modelId).subscribe(documentModel -> { System.out.printf("Model ID: %s%n", documentModel.getModelId()); System.out.printf("Model Description: %s%n", documentModel.getDescription()); System.out.printf("Model Created on: %s%n", documentModel.getCreatedOn()); documentModel.getDocTypes().forEach((key, docTypeInfo) -> { docTypeInfo.getFieldSchema().forEach((field, documentFieldSchema) -> { System.out.printf("Field: %s", field); System.out.printf("Field type: %s", documentFieldSchema.getType()); System.out.printf("Field confidence: %.2f", docTypeInfo.getFieldConfidence().get(field)); }); }); }); } /** * Code snippet for {@link DocumentModelAdministrationAsyncClient */ public void getModelWithResponse() { String modelId = "{model_id}"; documentModelAdministrationAsyncClient.getModelWithResponse(modelId).subscribe(response -> { System.out.printf("Response Status Code: %d.", response.getStatusCode()); DocumentModel documentModel = response.getValue(); System.out.printf("Model ID: %s%n", documentModel.getModelId()); System.out.printf("Model Description: %s%n", documentModel.getDescription()); System.out.printf("Model Created on: %s%n", documentModel.getCreatedOn()); documentModel.getDocTypes().forEach((key, docTypeInfo) -> { docTypeInfo.getFieldSchema().forEach((field, documentFieldSchema) -> { System.out.printf("Field: %s", field); System.out.printf("Field type: %s", documentFieldSchema.getType()); System.out.printf("Field confidence: %.2f", docTypeInfo.getFieldConfidence().get(field)); }); }); }); } /** * Code snippet for {@link DocumentModelAdministrationAsyncClient */ public void getOperation() { String operationId = "{operation_Id}"; documentModelAdministrationAsyncClient.getOperation(operationId).subscribe(modelOperation -> { System.out.printf("Operation ID: %s%n", modelOperation.getOperationId()); System.out.printf("Operation Kind: %s%n", modelOperation.getKind()); System.out.printf("Operation Status: %s%n", modelOperation.getStatus()); System.out.printf("Model ID created with this operation: %s%n", modelOperation.getModelId()); if (ModelOperationStatus.FAILED.equals(modelOperation.getStatus())) { System.out.printf("Operation fail error: %s%n", modelOperation.getError().getMessage()); } }); } /** * Code snippet for {@link DocumentModelAdministrationAsyncClient */ public void getOperationWithResponse() { String operationId = "{operation_Id}"; documentModelAdministrationAsyncClient.getOperationWithResponse(operationId).subscribe(response -> { System.out.printf("Response Status Code: %d.", response.getStatusCode()); ModelOperation modelOperation = response.getValue(); System.out.printf("Operation ID: %s%n", modelOperation.getOperationId()); System.out.printf("Operation Kind: %s%n", modelOperation.getKind()); System.out.printf("Operation Status: %s%n", modelOperation.getStatus()); System.out.printf("Model ID created with this operation: %s%n", modelOperation.getModelId()); if (ModelOperationStatus.FAILED.equals(modelOperation.getStatus())) { System.out.printf("Operation fail error: %s%n", modelOperation.getError().getMessage()); } }); } /** * Code snippet for {@link DocumentModelAdministrationAsyncClient */ public void listOperations() { documentModelAdministrationAsyncClient.listOperations() .subscribe(modelOperation -> { System.out.printf("Operation ID: %s%n", modelOperation.getOperationId()); System.out.printf("Operation Status: %s%n", modelOperation.getStatus()); System.out.printf("Operation Created on: %s%n", modelOperation.getCreatedOn()); System.out.printf("Operation Percent completed: %d%n", modelOperation.getPercentCompleted()); System.out.printf("Operation Kind: %s%n", modelOperation.getKind()); System.out.printf("Operation Last updated on: %s%n", modelOperation.getLastUpdatedOn()); System.out.printf("Operation resource location: %s%n", modelOperation.getResourceLocation()); }); } }
class DocumentModelAdminAsyncClientJavaDocCodeSnippets { private final DocumentModelAdministrationAsyncClient documentModelAdministrationAsyncClient = new DocumentModelAdministrationClientBuilder().buildAsyncClient(); /** * Code snippet for {@link DocumentModelAdministrationAsyncClient} initialization */ public void documentModelAdministrationAsyncClientInitialization() { DocumentModelAdministrationAsyncClient documentModelAdministrationAsyncClient = new DocumentModelAdministrationClientBuilder().buildAsyncClient(); } /** * Code snippet for creating a {@link DocumentModelAdministrationAsyncClient} with pipeline */ public void createDocumentModelAdministrationAsyncClientWithPipeline() { HttpPipeline pipeline = new HttpPipelineBuilder() .policies(/* add policies */) .build(); DocumentModelAdministrationAsyncClient documentModelAdministrationAsyncClient = new DocumentModelAdministrationClientBuilder() .credential(new AzureKeyCredential("{key}")) .endpoint("{endpoint}") .pipeline(pipeline) .buildAsyncClient(); } /** * Code snippet for {@link DocumentModelAdministrationAsyncClient */ /** * Code snippet for {@link DocumentModelAdministrationAsyncClient * with options */ public void beginBuildModelWithOptions() { String trainingFilesUrl = "{SAS-URL-of-your-container-in-blob-storage}"; String modelId = "model-id"; Map<String, String> attrs = new HashMap<String, String>(); attrs.put("createdBy", "sample"); documentModelAdministrationAsyncClient.beginBuildModel(trainingFilesUrl, DocumentBuildMode.TEMPLATE, new BuildModelOptions() .setModelId(modelId) .setDescription("model desc") .setPrefix("Invoice") .setTags(attrs)) .flatMap(AsyncPollResponse::getFinalResult) .subscribe(documentModel -> { System.out.printf("Model ID: %s%n", documentModel.getModelId()); System.out.printf("Model Description: %s%n", documentModel.getDescription()); System.out.printf("Model Created on: %s%n", documentModel.getCreatedOn()); System.out.printf("Model assigned tags: %s%n", documentModel.getTags()); documentModel.getDocTypes().forEach((key, docTypeInfo) -> { docTypeInfo.getFieldSchema().forEach((field, documentFieldSchema) -> { System.out.printf("Field: %s", field); System.out.printf("Field type: %s", documentFieldSchema.getType()); System.out.printf("Field confidence: %.2f", docTypeInfo.getFieldConfidence().get(field)); }); }); }); } /** * Code snippet for {@link DocumentModelAdministrationAsyncClient */ public void deleteModel() { String modelId = "{model_id}"; documentModelAdministrationAsyncClient.deleteModel(modelId) .subscribe(ignored -> System.out.printf("Model ID: %s is deleted%n", modelId)); } /** * Code snippet for {@link DocumentModelAdministrationAsyncClient */ public void deleteModelWithResponse() { String modelId = "{model_id}"; documentModelAdministrationAsyncClient.deleteModelWithResponse(modelId) .subscribe(response -> { System.out.printf("Response Status Code: %d.", response.getStatusCode()); System.out.printf("Model ID: %s is deleted.%n", modelId); }); } /** * Code snippet for {@link DocumentModelAdministrationAsyncClient */ public void getCopyAuthorization() { String modelId = "my-copied-model"; documentModelAdministrationAsyncClient.getCopyAuthorization() .subscribe(copyAuthorization -> System.out.printf("Copy Authorization for model id: %s, access token: %s, expiration time: %s, " + "target resource ID; %s, target resource region: %s%n", copyAuthorization.getTargetModelId(), copyAuthorization.getAccessToken(), copyAuthorization.getExpiresOn(), copyAuthorization.getTargetResourceId(), copyAuthorization.getTargetResourceRegion() )); } /** * Code snippet for {@link DocumentModelAdministrationAsyncClient */ public void getCopyAuthorizationWithResponse() { String modelId = "my-copied-model"; Map<String, String> attrs = new HashMap<String, String>(); attrs.put("createdBy", "sample"); documentModelAdministrationAsyncClient.getCopyAuthorizationWithResponse( new CopyAuthorizationOptions() .setModelId(modelId) .setDescription("model desc") .setTags(attrs)) .subscribe(copyAuthorization -> System.out.printf("Copy Authorization response status: %s, for model id: %s, access token: %s, " + "expiration time: %s, target resource ID; %s, target resource region: %s%n", copyAuthorization.getStatusCode(), copyAuthorization.getValue().getTargetModelId(), copyAuthorization.getValue().getAccessToken(), copyAuthorization.getValue().getExpiresOn(), copyAuthorization.getValue().getTargetResourceId(), copyAuthorization.getValue().getTargetResourceRegion() )); } /** * Code snippet for {@link DocumentModelAdministrationAsyncClient */ public void getAccountProperties() { documentModelAdministrationAsyncClient.getAccountProperties() .subscribe(accountProperties -> { System.out.printf("Max number of models that can be build for this account: %d%n", accountProperties.getDocumentModelLimit()); System.out.printf("Current count of built document analysis models: %d%n", accountProperties.getDocumentModelCount()); }); } /** * Code snippet for {@link DocumentModelAdministrationAsyncClient */ public void getAccountPropertiesWithResponse() { documentModelAdministrationAsyncClient.getAccountPropertiesWithResponse() .subscribe(response -> { System.out.printf("Response Status Code: %d.", response.getStatusCode()); AccountProperties accountProperties = response.getValue(); System.out.printf("Max number of models that can be build for this account: %d%n", accountProperties.getDocumentModelLimit()); System.out.printf("Current count of built document analysis models: %d%n", accountProperties.getDocumentModelCount()); }); } /** * Code snippet for {@link DocumentModelAdministrationAsyncClient */ public void beginCreateComposedModel() { String modelId1 = "{model_Id_1}"; String modelId2 = "{model_Id_2}"; documentModelAdministrationAsyncClient.beginCreateComposedModel(Arrays.asList(modelId1, modelId2) ) .flatMap(AsyncPollResponse::getFinalResult) .subscribe(documentModel -> { System.out.printf("Model ID: %s%n", documentModel.getModelId()); System.out.printf("Model Created on: %s%n", documentModel.getCreatedOn()); documentModel.getDocTypes().forEach((key, docTypeInfo) -> { docTypeInfo.getFieldSchema().forEach((field, documentFieldSchema) -> { System.out.printf("Field: %s", field); System.out.printf("Field type: %s", documentFieldSchema.getType()); System.out.printf("Field confidence: %.2f", docTypeInfo.getFieldConfidence().get(field)); }); }); }); } /** * Code snippet for {@link DocumentModelAdministrationAsyncClient * with options */ public void beginCreateComposedModelWithOptions() { String modelId1 = "{model_Id_1}"; String modelId2 = "{model_Id_2}"; String modelId = "my-composed-model"; Map<String, String> attrs = new HashMap<String, String>(); attrs.put("createdBy", "sample"); documentModelAdministrationAsyncClient.beginCreateComposedModel(Arrays.asList(modelId1, modelId2), new CreateComposedModelOptions() .setModelId(modelId) .setDescription("model-desc") .setTags(attrs)) .flatMap(AsyncPollResponse::getFinalResult) .subscribe(documentModel -> { System.out.printf("Model ID: %s%n", documentModel.getModelId()); System.out.printf("Model Description: %s%n", documentModel.getDescription()); System.out.printf("Model Created on: %s%n", documentModel.getCreatedOn()); System.out.printf("Model assigned tags: %s%n", documentModel.getTags()); documentModel.getDocTypes().forEach((key, docTypeInfo) -> { docTypeInfo.getFieldSchema().forEach((field, documentFieldSchema) -> { System.out.printf("Field: %s", field); System.out.printf("Field type: %s", documentFieldSchema.getType()); System.out.printf("Field confidence: %.2f", docTypeInfo.getFieldConfidence().get(field)); }); }); }); } /** * Code snippet for {@link DocumentModelAdministrationAsyncClient */ public void beginCopy() { String copyModelId = "copy-model"; documentModelAdministrationAsyncClient.getCopyAuthorization() .subscribe(copyAuthorization -> documentModelAdministrationAsyncClient.beginCopyModelTo(copyModelId, copyAuthorization) .filter(pollResponse -> pollResponse.getStatus().isComplete()) .flatMap(AsyncPollResponse::getFinalResult) .subscribe(documentModel -> System.out.printf("Copied model has model ID: %s, was created on: %s.%n,", documentModel.getModelId(), documentModel.getCreatedOn()))); } /** * Code snippet for {@link DocumentModelAdministrationAsyncClient */ public void listModels() { documentModelAdministrationAsyncClient.listModels() .subscribe(documentModelInfo -> System.out.printf("Model ID: %s, Model description: %s, Created on: %s.%n", documentModelInfo.getModelId(), documentModelInfo.getDescription(), documentModelInfo.getCreatedOn())); } /** * Code snippet for {@link DocumentModelAdministrationAsyncClient */ public void getModel() { String modelId = "{model_id}"; documentModelAdministrationAsyncClient.getModel(modelId).subscribe(documentModel -> { System.out.printf("Model ID: %s%n", documentModel.getModelId()); System.out.printf("Model Description: %s%n", documentModel.getDescription()); System.out.printf("Model Created on: %s%n", documentModel.getCreatedOn()); documentModel.getDocTypes().forEach((key, docTypeInfo) -> { docTypeInfo.getFieldSchema().forEach((field, documentFieldSchema) -> { System.out.printf("Field: %s", field); System.out.printf("Field type: %s", documentFieldSchema.getType()); System.out.printf("Field confidence: %.2f", docTypeInfo.getFieldConfidence().get(field)); }); }); }); } /** * Code snippet for {@link DocumentModelAdministrationAsyncClient */ public void getModelWithResponse() { String modelId = "{model_id}"; documentModelAdministrationAsyncClient.getModelWithResponse(modelId).subscribe(response -> { System.out.printf("Response Status Code: %d.", response.getStatusCode()); DocumentModel documentModel = response.getValue(); System.out.printf("Model ID: %s%n", documentModel.getModelId()); System.out.printf("Model Description: %s%n", documentModel.getDescription()); System.out.printf("Model Created on: %s%n", documentModel.getCreatedOn()); documentModel.getDocTypes().forEach((key, docTypeInfo) -> { docTypeInfo.getFieldSchema().forEach((field, documentFieldSchema) -> { System.out.printf("Field: %s", field); System.out.printf("Field type: %s", documentFieldSchema.getType()); System.out.printf("Field confidence: %.2f", docTypeInfo.getFieldConfidence().get(field)); }); }); }); } /** * Code snippet for {@link DocumentModelAdministrationAsyncClient */ public void getOperation() { String operationId = "{operation_Id}"; documentModelAdministrationAsyncClient.getOperation(operationId).subscribe(modelOperation -> { System.out.printf("Operation ID: %s%n", modelOperation.getOperationId()); System.out.printf("Operation Kind: %s%n", modelOperation.getKind()); System.out.printf("Operation Status: %s%n", modelOperation.getStatus()); System.out.printf("Model ID created with this operation: %s%n", modelOperation.getModelId()); if (ModelOperationStatus.FAILED.equals(modelOperation.getStatus())) { System.out.printf("Operation fail error: %s%n", modelOperation.getError().getMessage()); } }); } /** * Code snippet for {@link DocumentModelAdministrationAsyncClient */ public void getOperationWithResponse() { String operationId = "{operation_Id}"; documentModelAdministrationAsyncClient.getOperationWithResponse(operationId).subscribe(response -> { System.out.printf("Response Status Code: %d.", response.getStatusCode()); ModelOperation modelOperation = response.getValue(); System.out.printf("Operation ID: %s%n", modelOperation.getOperationId()); System.out.printf("Operation Kind: %s%n", modelOperation.getKind()); System.out.printf("Operation Status: %s%n", modelOperation.getStatus()); System.out.printf("Model ID created with this operation: %s%n", modelOperation.getModelId()); if (ModelOperationStatus.FAILED.equals(modelOperation.getStatus())) { System.out.printf("Operation fail error: %s%n", modelOperation.getError().getMessage()); } }); } /** * Code snippet for {@link DocumentModelAdministrationAsyncClient */ public void listOperations() { documentModelAdministrationAsyncClient.listOperations() .subscribe(modelOperation -> { System.out.printf("Operation ID: %s%n", modelOperation.getOperationId()); System.out.printf("Operation Status: %s%n", modelOperation.getStatus()); System.out.printf("Operation Created on: %s%n", modelOperation.getCreatedOn()); System.out.printf("Operation Percent completed: %d%n", modelOperation.getPercentCompleted()); System.out.printf("Operation Kind: %s%n", modelOperation.getKind()); System.out.printf("Operation Last updated on: %s%n", modelOperation.getLastUpdatedOn()); System.out.printf("Operation resource location: %s%n", modelOperation.getResourceLocation()); }); } }
shouldn't this be send.synchronously ?
public void run() { runAsync().block(); }
runAsync().block();
public void run() { runAsync().block(); }
class PipelineSendTest extends RestProxyTestBase<CorePerfStressOptions> { private final Supplier<BinaryData> binaryDataSupplier; private final URL targetURL; public PipelineSendTest(CorePerfStressOptions options) { super(options); binaryDataSupplier = createBinaryDataSupplier(options); try { targetURL = new URL(new URL(endpoint), "BinaryData"); } catch (MalformedURLException e) { throw new UncheckedIOException(e); } } @Override @Override public Mono<Void> runAsync() { HttpRequest httpRequest = new HttpRequest( HttpMethod.PUT, targetURL, new HttpHeaders(), binaryDataSupplier.get().toFluxByteBuffer()); return httpPipeline.send(httpRequest) .then(); } }
class PipelineSendTest extends RestProxyTestBase<CorePerfStressOptions> { private final Supplier<BinaryData> binaryDataSupplier; private final URL targetURL; public PipelineSendTest(CorePerfStressOptions options) { super(options); binaryDataSupplier = createBinaryDataSupplier(options); try { targetURL = new URL(new URL(endpoint), "BinaryData"); } catch (MalformedURLException e) { throw new UncheckedIOException(e); } } @Override @Override public Mono<Void> runAsync() { HttpRequest httpRequest = new HttpRequest( HttpMethod.PUT, targetURL, new HttpHeaders(), binaryDataSupplier.get().toFluxByteBuffer()); return httpPipeline.send(httpRequest) .then(); } }
there is no behavior change. released mgmt SDKs is using same 7.2 latest code is 7.3-preview
private void init() { if (innerModel().properties().vaultUri() != null) { final String vaultUrl = vaultUri(); this.secretClient = new SecretClientBuilder() .vaultUrl(vaultUrl) .pipeline(vaultHttpPipeline) .serviceVersion(SecretServiceVersion.V7_2) .buildAsyncClient(); this.keyClient = new KeyClientBuilder() .vaultUrl(vaultUrl) .pipeline(vaultHttpPipeline) .serviceVersion(KeyServiceVersion.V7_2) .buildAsyncClient(); } }
.serviceVersion(SecretServiceVersion.V7_2)
private void init() { if (innerModel().properties().vaultUri() != null) { final String vaultUrl = vaultUri(); this.secretClient = new SecretClientBuilder() .vaultUrl(vaultUrl) .pipeline(vaultHttpPipeline) .serviceVersion(SecretServiceVersion.V7_2) .buildAsyncClient(); this.keyClient = new KeyClientBuilder() .vaultUrl(vaultUrl) .pipeline(vaultHttpPipeline) .serviceVersion(KeyServiceVersion.V7_2) .buildAsyncClient(); } }
class VaultImpl extends GroupableResourceImpl<Vault, VaultInner, VaultImpl, KeyVaultManager> implements Vault, Vault.Definition, Vault.Update { private final ClientLogger logger = new ClientLogger(this.getClass()); private AuthorizationManager authorizationManager; private List<AccessPolicyImpl> accessPolicies; private SecretAsyncClient secretClient; private KeyAsyncClient keyClient; private HttpPipeline vaultHttpPipeline; private Keys keys; private Secrets secrets; VaultImpl(String key, VaultInner innerObject, KeyVaultManager manager, AuthorizationManager authorizationManager) { super(key, innerObject, manager); this.authorizationManager = authorizationManager; this.accessPolicies = new ArrayList<>(); if (innerObject != null && innerObject.properties() != null && innerObject.properties().accessPolicies() != null) { for (AccessPolicyEntry entry : innerObject.properties().accessPolicies()) { this.accessPolicies.add(new AccessPolicyImpl(entry, this)); } } vaultHttpPipeline = manager().httpPipeline(); init(); } @Override public HttpPipeline vaultHttpPipeline() { return vaultHttpPipeline; } public SecretAsyncClient secretClient() { return secretClient; } @Override public KeyAsyncClient keyClient() { return keyClient; } @Override public Keys keys() { if (keys == null) { keys = new KeysImpl(keyClient, this); } return keys; } @Override public Secrets secrets() { if (secrets == null) { secrets = new SecretsImpl(secretClient, this); } return secrets; } @Override public String vaultUri() { if (innerModel().properties() == null) { return null; } return innerModel().properties().vaultUri(); } @Override public String tenantId() { if (innerModel().properties() == null) { return null; } if (innerModel().properties().tenantId() == null) { return null; } return innerModel().properties().tenantId().toString(); } @Override public Sku sku() { if (innerModel().properties() == null) { return null; } return innerModel().properties().sku(); } @Override public List<AccessPolicy> accessPolicies() { AccessPolicy[] array = new AccessPolicy[accessPolicies.size()]; return Arrays.asList(accessPolicies.toArray(array)); } @Override public boolean roleBasedAccessControlEnabled() { if (innerModel().properties() == null) { return false; } return ResourceManagerUtils.toPrimitiveBoolean(innerModel().properties().enableRbacAuthorization()); } @Override public boolean enabledForDeployment() { if (innerModel().properties() == null) { return false; } return ResourceManagerUtils.toPrimitiveBoolean(innerModel().properties().enabledForDeployment()); } @Override public boolean enabledForDiskEncryption() { if (innerModel().properties() == null) { return false; } return ResourceManagerUtils.toPrimitiveBoolean(innerModel().properties().enabledForDiskEncryption()); } @Override public boolean enabledForTemplateDeployment() { if (innerModel().properties() == null) { return false; } return ResourceManagerUtils.toPrimitiveBoolean(innerModel().properties().enabledForTemplateDeployment()); } @Override public boolean softDeleteEnabled() { if (innerModel().properties() == null) { return false; } return ResourceManagerUtils.toPrimitiveBoolean(innerModel().properties().enableSoftDelete()); } @Override public boolean purgeProtectionEnabled() { if (innerModel().properties() == null) { return false; } return ResourceManagerUtils.toPrimitiveBoolean(innerModel().properties().enablePurgeProtection()); } @Override public VaultImpl withEmptyAccessPolicy() { this.accessPolicies = new ArrayList<>(); return this; } @Override public VaultImpl withoutAccessPolicy(String objectId) { for (AccessPolicyImpl entry : this.accessPolicies) { if (entry.objectId().equals(objectId)) { accessPolicies.remove(entry); break; } } return this; } @Override public VaultImpl withAccessPolicy(AccessPolicy accessPolicy) { accessPolicies.add((AccessPolicyImpl) accessPolicy); return this; } @Override public AccessPolicyImpl defineAccessPolicy() { return new AccessPolicyImpl(new AccessPolicyEntry(), this); } @Override public VaultImpl withRoleBasedAccessControl() { innerModel().properties().withEnableRbacAuthorization(true); return this; } @Override public VaultImpl withoutRoleBasedAccessControl() { innerModel().properties().withEnableRbacAuthorization(false); return this; } @Override public AccessPolicyImpl updateAccessPolicy(String objectId) { for (AccessPolicyImpl entry : this.accessPolicies) { if (entry.objectId().equals(objectId)) { return entry; } } throw logger.logExceptionAsError( new NoSuchElementException(String.format("Identity %s not found in the access policies.", objectId))); } @Override public VaultImpl withDeploymentEnabled() { innerModel().properties().withEnabledForDeployment(true); return this; } @Override public VaultImpl withDiskEncryptionEnabled() { innerModel().properties().withEnabledForDiskEncryption(true); return this; } @Override public VaultImpl withTemplateDeploymentEnabled() { innerModel().properties().withEnabledForTemplateDeployment(true); return this; } @Override public VaultImpl withSoftDeleteEnabled() { innerModel().properties().withEnableSoftDelete(true); return this; } @Override public VaultImpl withPurgeProtectionEnabled() { innerModel().properties().withEnablePurgeProtection(true); return this; } @Override public VaultImpl withDeploymentDisabled() { innerModel().properties().withEnabledForDeployment(false); return this; } @Override public VaultImpl withDiskEncryptionDisabled() { innerModel().properties().withEnabledForDiskEncryption(false); return this; } @Override public VaultImpl withTemplateDeploymentDisabled() { innerModel().properties().withEnabledForTemplateDeployment(false); return this; } @Override public VaultImpl withSku(SkuName skuName) { if (innerModel().properties() == null) { innerModel().withProperties(new VaultProperties()); } innerModel().properties().withSku(new Sku().withName(skuName).withFamily(SkuFamily.A)); return this; } private Mono<List<AccessPolicy>> populateAccessPolicies() { List<Mono<?>> observables = new ArrayList<>(); for (final AccessPolicyImpl accessPolicy : accessPolicies) { if (accessPolicy.objectId() == null) { if (accessPolicy.userPrincipalName() != null) { observables .add( authorizationManager .users() .getByNameAsync(accessPolicy.userPrincipalName()) .subscribeOn(ResourceManagerUtils.InternalRuntimeContext.getReactorScheduler()) .doOnNext(user -> accessPolicy.forObjectId(user.id())) .switchIfEmpty( Mono .error( new ManagementException( String .format( "User principal name %s is not found in tenant %s", accessPolicy.userPrincipalName(), authorizationManager.tenantId()), null)))); } else if (accessPolicy.servicePrincipalName() != null) { observables .add( authorizationManager .servicePrincipals() .getByNameAsync(accessPolicy.servicePrincipalName()) .subscribeOn(ResourceManagerUtils.InternalRuntimeContext.getReactorScheduler()) .doOnNext(sp -> accessPolicy.forObjectId(sp.id())) .switchIfEmpty( Mono .error( new ManagementException( String .format( "Service principal name %s is not found in tenant %s", accessPolicy.servicePrincipalName(), authorizationManager.tenantId()), null)))); } else { throw logger.logExceptionAsError( new IllegalArgumentException("Access policy must specify object ID.")); } } } if (observables.isEmpty()) { return Mono.just(accessPolicies()); } else { return Mono.zip(observables, args -> accessPolicies()); } } @Override public Mono<Vault> createResourceAsync() { final VaultsClient client = this.manager().serviceClient().getVaults(); return populateAccessPolicies() .then( Mono .defer( () -> { VaultCreateOrUpdateParameters parameters = new VaultCreateOrUpdateParameters(); parameters.withLocation(regionName()); parameters.withProperties(innerModel().properties()); parameters.withTags(innerModel().tags()); parameters.properties().withAccessPolicies(new ArrayList<>()); for (AccessPolicy accessPolicy : accessPolicies) { parameters.properties().accessPolicies().add(accessPolicy.innerModel()); } return client.createOrUpdateAsync(resourceGroupName(), this.name(), parameters); })) .map( inner -> { this.setInner(inner); init(); return this; }); } @Override protected Mono<VaultInner> getInnerAsync() { return this.manager().serviceClient().getVaults().getByResourceGroupAsync(resourceGroupName(), this.name()); } @Override public CreateMode createMode() { return innerModel().properties().createMode(); } @Override public NetworkRuleSet networkRuleSet() { return innerModel().properties().networkAcls(); } @Override public VaultImpl withAccessFromAllNetworks() { if (innerModel().properties().networkAcls() == null) { innerModel().properties().withNetworkAcls(new NetworkRuleSet()); } innerModel().properties().networkAcls().withDefaultAction(NetworkRuleAction.ALLOW); return this; } @Override public VaultImpl withAccessFromSelectedNetworks() { if (innerModel().properties().networkAcls() == null) { innerModel().properties().withNetworkAcls(new NetworkRuleSet()); } innerModel().properties().networkAcls().withDefaultAction(NetworkRuleAction.DENY); return this; } /** * Specifies that access to the storage account should be allowed from the given ip address or ip address range. * * @param ipAddressOrRange the ip address or ip address range in cidr format * @return VaultImpl */ private VaultImpl withAccessAllowedFromIpAddressOrRange(String ipAddressOrRange) { NetworkRuleSet networkRuleSet = innerModel().properties().networkAcls(); if (networkRuleSet.ipRules() == null) { networkRuleSet.withIpRules(new ArrayList<>()); } boolean found = false; for (IpRule rule : networkRuleSet.ipRules()) { if (rule.value().equalsIgnoreCase(ipAddressOrRange)) { found = true; break; } } if (!found) { networkRuleSet.ipRules().add(new IpRule().withValue(ipAddressOrRange)); } return this; } @Override public VaultImpl withAccessFromIpAddress(String ipAddress) { return withAccessAllowedFromIpAddressOrRange(ipAddress); } @Override public VaultImpl withAccessFromIpAddressRange(String ipAddressCidr) { return withAccessAllowedFromIpAddressOrRange(ipAddressCidr); } @Override public VaultImpl withAccessFromAzureServices() { if (innerModel().properties().networkAcls() == null) { innerModel().properties().withNetworkAcls(new NetworkRuleSet()); } innerModel().properties().networkAcls().withBypass(NetworkRuleBypassOptions.AZURE_SERVICES); return this; } @Override public VaultImpl withBypass(NetworkRuleBypassOptions bypass) { if (innerModel().properties().networkAcls() == null) { innerModel().properties().withNetworkAcls(new NetworkRuleSet()); } innerModel().properties().networkAcls().withBypass(bypass); return this; } @Override public VaultImpl withDefaultAction(NetworkRuleAction defaultAction) { if (innerModel().properties().networkAcls() == null) { innerModel().properties().withNetworkAcls(new NetworkRuleSet()); } innerModel().properties().networkAcls().withDefaultAction(defaultAction); return this; } @Override public VaultImpl withVirtualNetworkRules(List<VirtualNetworkRule> virtualNetworkRules) { if (innerModel().properties().networkAcls() == null) { innerModel().properties().withNetworkAcls(new NetworkRuleSet()); } innerModel().properties().networkAcls().withVirtualNetworkRules(virtualNetworkRules); return this; } @Override public PagedIterable<PrivateLinkResource> listPrivateLinkResources() { return new PagedIterable<>(listPrivateLinkResourcesAsync()); } @Override public PagedFlux<PrivateLinkResource> listPrivateLinkResourcesAsync() { Mono<Response<List<PrivateLinkResource>>> retList = this.manager().serviceClient().getPrivateLinkResources() .listByVaultWithResponseAsync(this.resourceGroupName(), this.name()) .map(response -> new SimpleResponse<>(response, response.getValue().value().stream() .map(PrivateLinkResourceImpl::new) .collect(Collectors.toList()))); return PagedConverter.convertListToPagedFlux(retList); } @Override public void approvePrivateEndpointConnection(String privateEndpointConnectionName) { approvePrivateEndpointConnectionAsync(privateEndpointConnectionName).block(); } @Override public Mono<Void> approvePrivateEndpointConnectionAsync(String privateEndpointConnectionName) { return manager().serviceClient().getPrivateEndpointConnections().putAsync( this.resourceGroupName(), this.name(), privateEndpointConnectionName, new PrivateEndpointConnectionInner().withPrivateLinkServiceConnectionState( new PrivateLinkServiceConnectionState().withStatus(PrivateEndpointServiceConnectionStatus.APPROVED))) .then(); } @Override public void rejectPrivateEndpointConnection(String privateEndpointConnectionName) { rejectPrivateEndpointConnectionAsync(privateEndpointConnectionName).block(); } @Override public Mono<Void> rejectPrivateEndpointConnectionAsync(String privateEndpointConnectionName) { return manager().serviceClient().getPrivateEndpointConnections().putAsync( this.resourceGroupName(), this.name(), privateEndpointConnectionName, new PrivateEndpointConnectionInner().withPrivateLinkServiceConnectionState( new PrivateLinkServiceConnectionState().withStatus(PrivateEndpointServiceConnectionStatus.REJECTED))) .then(); } private static final class PrivateLinkResourceImpl implements PrivateLinkResource { private final com.azure.resourcemanager.keyvault.models.PrivateLinkResource innerModel; private PrivateLinkResourceImpl(com.azure.resourcemanager.keyvault.models.PrivateLinkResource innerModel) { this.innerModel = innerModel; } @Override public String groupId() { return innerModel.groupId(); } @Override public List<String> requiredMemberNames() { return Collections.unmodifiableList(innerModel.requiredMembers()); } @Override public List<String> requiredDnsZoneNames() { return Collections.unmodifiableList(innerModel.requiredZoneNames()); } } }
class VaultImpl extends GroupableResourceImpl<Vault, VaultInner, VaultImpl, KeyVaultManager> implements Vault, Vault.Definition, Vault.Update { private final ClientLogger logger = new ClientLogger(this.getClass()); private AuthorizationManager authorizationManager; private List<AccessPolicyImpl> accessPolicies; private SecretAsyncClient secretClient; private KeyAsyncClient keyClient; private HttpPipeline vaultHttpPipeline; private Keys keys; private Secrets secrets; VaultImpl(String key, VaultInner innerObject, KeyVaultManager manager, AuthorizationManager authorizationManager) { super(key, innerObject, manager); this.authorizationManager = authorizationManager; this.accessPolicies = new ArrayList<>(); if (innerObject != null && innerObject.properties() != null && innerObject.properties().accessPolicies() != null) { for (AccessPolicyEntry entry : innerObject.properties().accessPolicies()) { this.accessPolicies.add(new AccessPolicyImpl(entry, this)); } } vaultHttpPipeline = manager().httpPipeline(); init(); } @Override public HttpPipeline vaultHttpPipeline() { return vaultHttpPipeline; } public SecretAsyncClient secretClient() { return secretClient; } @Override public KeyAsyncClient keyClient() { return keyClient; } @Override public Keys keys() { if (keys == null) { keys = new KeysImpl(keyClient, this); } return keys; } @Override public Secrets secrets() { if (secrets == null) { secrets = new SecretsImpl(secretClient, this); } return secrets; } @Override public String vaultUri() { if (innerModel().properties() == null) { return null; } return innerModel().properties().vaultUri(); } @Override public String tenantId() { if (innerModel().properties() == null) { return null; } if (innerModel().properties().tenantId() == null) { return null; } return innerModel().properties().tenantId().toString(); } @Override public Sku sku() { if (innerModel().properties() == null) { return null; } return innerModel().properties().sku(); } @Override public List<AccessPolicy> accessPolicies() { AccessPolicy[] array = new AccessPolicy[accessPolicies.size()]; return Arrays.asList(accessPolicies.toArray(array)); } @Override public boolean roleBasedAccessControlEnabled() { if (innerModel().properties() == null) { return false; } return ResourceManagerUtils.toPrimitiveBoolean(innerModel().properties().enableRbacAuthorization()); } @Override public boolean enabledForDeployment() { if (innerModel().properties() == null) { return false; } return ResourceManagerUtils.toPrimitiveBoolean(innerModel().properties().enabledForDeployment()); } @Override public boolean enabledForDiskEncryption() { if (innerModel().properties() == null) { return false; } return ResourceManagerUtils.toPrimitiveBoolean(innerModel().properties().enabledForDiskEncryption()); } @Override public boolean enabledForTemplateDeployment() { if (innerModel().properties() == null) { return false; } return ResourceManagerUtils.toPrimitiveBoolean(innerModel().properties().enabledForTemplateDeployment()); } @Override public boolean softDeleteEnabled() { if (innerModel().properties() == null) { return false; } return ResourceManagerUtils.toPrimitiveBoolean(innerModel().properties().enableSoftDelete()); } @Override public boolean purgeProtectionEnabled() { if (innerModel().properties() == null) { return false; } return ResourceManagerUtils.toPrimitiveBoolean(innerModel().properties().enablePurgeProtection()); } @Override public VaultImpl withEmptyAccessPolicy() { this.accessPolicies = new ArrayList<>(); return this; } @Override public VaultImpl withoutAccessPolicy(String objectId) { for (AccessPolicyImpl entry : this.accessPolicies) { if (entry.objectId().equals(objectId)) { accessPolicies.remove(entry); break; } } return this; } @Override public VaultImpl withAccessPolicy(AccessPolicy accessPolicy) { accessPolicies.add((AccessPolicyImpl) accessPolicy); return this; } @Override public AccessPolicyImpl defineAccessPolicy() { return new AccessPolicyImpl(new AccessPolicyEntry(), this); } @Override public VaultImpl withRoleBasedAccessControl() { innerModel().properties().withEnableRbacAuthorization(true); return this; } @Override public VaultImpl withoutRoleBasedAccessControl() { innerModel().properties().withEnableRbacAuthorization(false); return this; } @Override public AccessPolicyImpl updateAccessPolicy(String objectId) { for (AccessPolicyImpl entry : this.accessPolicies) { if (entry.objectId().equals(objectId)) { return entry; } } throw logger.logExceptionAsError( new NoSuchElementException(String.format("Identity %s not found in the access policies.", objectId))); } @Override public VaultImpl withDeploymentEnabled() { innerModel().properties().withEnabledForDeployment(true); return this; } @Override public VaultImpl withDiskEncryptionEnabled() { innerModel().properties().withEnabledForDiskEncryption(true); return this; } @Override public VaultImpl withTemplateDeploymentEnabled() { innerModel().properties().withEnabledForTemplateDeployment(true); return this; } @Override public VaultImpl withSoftDeleteEnabled() { innerModel().properties().withEnableSoftDelete(true); return this; } @Override public VaultImpl withPurgeProtectionEnabled() { innerModel().properties().withEnablePurgeProtection(true); return this; } @Override public VaultImpl withDeploymentDisabled() { innerModel().properties().withEnabledForDeployment(false); return this; } @Override public VaultImpl withDiskEncryptionDisabled() { innerModel().properties().withEnabledForDiskEncryption(false); return this; } @Override public VaultImpl withTemplateDeploymentDisabled() { innerModel().properties().withEnabledForTemplateDeployment(false); return this; } @Override public VaultImpl withSku(SkuName skuName) { if (innerModel().properties() == null) { innerModel().withProperties(new VaultProperties()); } innerModel().properties().withSku(new Sku().withName(skuName).withFamily(SkuFamily.A)); return this; } private Mono<List<AccessPolicy>> populateAccessPolicies() { List<Mono<?>> observables = new ArrayList<>(); for (final AccessPolicyImpl accessPolicy : accessPolicies) { if (accessPolicy.objectId() == null) { if (accessPolicy.userPrincipalName() != null) { observables .add( authorizationManager .users() .getByNameAsync(accessPolicy.userPrincipalName()) .subscribeOn(ResourceManagerUtils.InternalRuntimeContext.getReactorScheduler()) .doOnNext(user -> accessPolicy.forObjectId(user.id())) .switchIfEmpty( Mono .error( new ManagementException( String .format( "User principal name %s is not found in tenant %s", accessPolicy.userPrincipalName(), authorizationManager.tenantId()), null)))); } else if (accessPolicy.servicePrincipalName() != null) { observables .add( authorizationManager .servicePrincipals() .getByNameAsync(accessPolicy.servicePrincipalName()) .subscribeOn(ResourceManagerUtils.InternalRuntimeContext.getReactorScheduler()) .doOnNext(sp -> accessPolicy.forObjectId(sp.id())) .switchIfEmpty( Mono .error( new ManagementException( String .format( "Service principal name %s is not found in tenant %s", accessPolicy.servicePrincipalName(), authorizationManager.tenantId()), null)))); } else { throw logger.logExceptionAsError( new IllegalArgumentException("Access policy must specify object ID.")); } } } if (observables.isEmpty()) { return Mono.just(accessPolicies()); } else { return Mono.zip(observables, args -> accessPolicies()); } } @Override public Mono<Vault> createResourceAsync() { final VaultsClient client = this.manager().serviceClient().getVaults(); return populateAccessPolicies() .then( Mono .defer( () -> { VaultCreateOrUpdateParameters parameters = new VaultCreateOrUpdateParameters(); parameters.withLocation(regionName()); parameters.withProperties(innerModel().properties()); parameters.withTags(innerModel().tags()); parameters.properties().withAccessPolicies(new ArrayList<>()); for (AccessPolicy accessPolicy : accessPolicies) { parameters.properties().accessPolicies().add(accessPolicy.innerModel()); } return client.createOrUpdateAsync(resourceGroupName(), this.name(), parameters); })) .map( inner -> { this.setInner(inner); init(); return this; }); } @Override protected Mono<VaultInner> getInnerAsync() { return this.manager().serviceClient().getVaults().getByResourceGroupAsync(resourceGroupName(), this.name()); } @Override public CreateMode createMode() { return innerModel().properties().createMode(); } @Override public NetworkRuleSet networkRuleSet() { return innerModel().properties().networkAcls(); } @Override public VaultImpl withAccessFromAllNetworks() { if (innerModel().properties().networkAcls() == null) { innerModel().properties().withNetworkAcls(new NetworkRuleSet()); } innerModel().properties().networkAcls().withDefaultAction(NetworkRuleAction.ALLOW); return this; } @Override public VaultImpl withAccessFromSelectedNetworks() { if (innerModel().properties().networkAcls() == null) { innerModel().properties().withNetworkAcls(new NetworkRuleSet()); } innerModel().properties().networkAcls().withDefaultAction(NetworkRuleAction.DENY); return this; } /** * Specifies that access to the storage account should be allowed from the given ip address or ip address range. * * @param ipAddressOrRange the ip address or ip address range in cidr format * @return VaultImpl */ private VaultImpl withAccessAllowedFromIpAddressOrRange(String ipAddressOrRange) { NetworkRuleSet networkRuleSet = innerModel().properties().networkAcls(); if (networkRuleSet.ipRules() == null) { networkRuleSet.withIpRules(new ArrayList<>()); } boolean found = false; for (IpRule rule : networkRuleSet.ipRules()) { if (rule.value().equalsIgnoreCase(ipAddressOrRange)) { found = true; break; } } if (!found) { networkRuleSet.ipRules().add(new IpRule().withValue(ipAddressOrRange)); } return this; } @Override public VaultImpl withAccessFromIpAddress(String ipAddress) { return withAccessAllowedFromIpAddressOrRange(ipAddress); } @Override public VaultImpl withAccessFromIpAddressRange(String ipAddressCidr) { return withAccessAllowedFromIpAddressOrRange(ipAddressCidr); } @Override public VaultImpl withAccessFromAzureServices() { if (innerModel().properties().networkAcls() == null) { innerModel().properties().withNetworkAcls(new NetworkRuleSet()); } innerModel().properties().networkAcls().withBypass(NetworkRuleBypassOptions.AZURE_SERVICES); return this; } @Override public VaultImpl withBypass(NetworkRuleBypassOptions bypass) { if (innerModel().properties().networkAcls() == null) { innerModel().properties().withNetworkAcls(new NetworkRuleSet()); } innerModel().properties().networkAcls().withBypass(bypass); return this; } @Override public VaultImpl withDefaultAction(NetworkRuleAction defaultAction) { if (innerModel().properties().networkAcls() == null) { innerModel().properties().withNetworkAcls(new NetworkRuleSet()); } innerModel().properties().networkAcls().withDefaultAction(defaultAction); return this; } @Override public VaultImpl withVirtualNetworkRules(List<VirtualNetworkRule> virtualNetworkRules) { if (innerModel().properties().networkAcls() == null) { innerModel().properties().withNetworkAcls(new NetworkRuleSet()); } innerModel().properties().networkAcls().withVirtualNetworkRules(virtualNetworkRules); return this; } @Override public PagedIterable<PrivateLinkResource> listPrivateLinkResources() { return new PagedIterable<>(listPrivateLinkResourcesAsync()); } @Override public PagedFlux<PrivateLinkResource> listPrivateLinkResourcesAsync() { Mono<Response<List<PrivateLinkResource>>> retList = this.manager().serviceClient().getPrivateLinkResources() .listByVaultWithResponseAsync(this.resourceGroupName(), this.name()) .map(response -> new SimpleResponse<>(response, response.getValue().value().stream() .map(PrivateLinkResourceImpl::new) .collect(Collectors.toList()))); return PagedConverter.convertListToPagedFlux(retList); } @Override public void approvePrivateEndpointConnection(String privateEndpointConnectionName) { approvePrivateEndpointConnectionAsync(privateEndpointConnectionName).block(); } @Override public Mono<Void> approvePrivateEndpointConnectionAsync(String privateEndpointConnectionName) { return manager().serviceClient().getPrivateEndpointConnections().putAsync( this.resourceGroupName(), this.name(), privateEndpointConnectionName, new PrivateEndpointConnectionInner().withPrivateLinkServiceConnectionState( new PrivateLinkServiceConnectionState().withStatus(PrivateEndpointServiceConnectionStatus.APPROVED))) .then(); } @Override public void rejectPrivateEndpointConnection(String privateEndpointConnectionName) { rejectPrivateEndpointConnectionAsync(privateEndpointConnectionName).block(); } @Override public Mono<Void> rejectPrivateEndpointConnectionAsync(String privateEndpointConnectionName) { return manager().serviceClient().getPrivateEndpointConnections().putAsync( this.resourceGroupName(), this.name(), privateEndpointConnectionName, new PrivateEndpointConnectionInner().withPrivateLinkServiceConnectionState( new PrivateLinkServiceConnectionState().withStatus(PrivateEndpointServiceConnectionStatus.REJECTED))) .then(); } private static final class PrivateLinkResourceImpl implements PrivateLinkResource { private final com.azure.resourcemanager.keyvault.models.PrivateLinkResource innerModel; private PrivateLinkResourceImpl(com.azure.resourcemanager.keyvault.models.PrivateLinkResource innerModel) { this.innerModel = innerModel; } @Override public String groupId() { return innerModel.groupId(); } @Override public List<String> requiredMemberNames() { return Collections.unmodifiableList(innerModel.requiredMembers()); } @Override public List<String> requiredDnsZoneNames() { return Collections.unmodifiableList(innerModel.requiredZoneNames()); } } }
When will `producerFactoryCustomizer` be null?
public void addProducerFactoryCustomizer(EventHubsProducerFactoryCustomizer producerFactoryCustomizer) { if (producerFactoryCustomizer != null) { this.producerFactoryCustomizers.add(producerFactoryCustomizer); } }
if (producerFactoryCustomizer != null) {
public void addProducerFactoryCustomizer(EventHubsProducerFactoryCustomizer producerFactoryCustomizer) { if (producerFactoryCustomizer != null) { this.producerFactoryCustomizers.add(producerFactoryCustomizer); } }
class EventHubsMessageChannelBinder extends AbstractMessageChannelBinder<ExtendedConsumerProperties<EventHubsConsumerProperties>, ExtendedProducerProperties<EventHubsProducerProperties>, EventHubsChannelProvisioner> implements ExtendedPropertiesBinder<MessageChannel, EventHubsConsumerProperties, EventHubsProducerProperties> { private static final Logger LOGGER = LoggerFactory.getLogger(EventHubsMessageChannelBinder.class); private static final ExpressionParser EXPRESSION_PARSER = new SpelExpressionParser(); private NamespaceProperties namespaceProperties; private EventHubsTemplate eventHubsTemplate; private CheckpointStore checkpointStore; private DefaultEventHubsNamespaceProcessorFactory processorFactory; private final List<EventHubsMessageListenerContainer> eventHubsMessageListenerContainers = new ArrayList<>(); private final InstrumentationManager instrumentationManager = new DefaultInstrumentationManager(); private EventHubsExtendedBindingProperties bindingProperties = new EventHubsExtendedBindingProperties(); private final Map<String, ExtendedProducerProperties<EventHubsProducerProperties>> extendedProducerPropertiesMap = new ConcurrentHashMap<>(); private final List<EventHubsProducerFactoryCustomizer> producerFactoryCustomizers = new ArrayList<>(); private final List<EventHubsProcessorFactoryCustomizer> processorFactoryCustomizers = new ArrayList<>(); /** * Construct a {@link EventHubsMessageChannelBinder} with the specified headers to embed and {@link EventHubsChannelProvisioner}. * * @param headersToEmbed the headers to embed * @param provisioningProvider the provisioning provider */ public EventHubsMessageChannelBinder(String[] headersToEmbed, EventHubsChannelProvisioner provisioningProvider) { super(headersToEmbed, provisioningProvider); } @Override protected MessageHandler createProducerMessageHandler( ProducerDestination destination, ExtendedProducerProperties<EventHubsProducerProperties> producerProperties, MessageChannel errorChannel) { extendedProducerPropertiesMap.put(destination.getName(), producerProperties); Assert.notNull(getEventHubTemplate(), "eventHubsTemplate can't be null when create a producer"); DefaultMessageHandler handler = new DefaultMessageHandler(destination.getName(), this.eventHubsTemplate); handler.setBeanFactory(getBeanFactory()); handler.setSync(producerProperties.getExtension().isSync()); handler.setSendTimeout(producerProperties.getExtension().getSendTimeout().toMillis()); handler.setSendFailureChannel(errorChannel); String instrumentationId = Instrumentation.buildId(PRODUCER, destination.getName()); handler.setSendCallback(new InstrumentationSendCallback(instrumentationId, instrumentationManager)); if (producerProperties.isPartitioned()) { handler.setPartitionIdExpression( EXPRESSION_PARSER.parseExpression("headers['" + BinderHeaders.PARTITION_HEADER + "']")); } else { handler.setPartitionKeyExpression(new FunctionExpression<Message<?>>(m -> m.getPayload().hashCode())); } return handler; } @Override protected MessageProducer createConsumerEndpoint(ConsumerDestination destination, String group, ExtendedConsumerProperties<EventHubsConsumerProperties> properties) { Assert.notNull(getProcessorFactory(), "processor factory can't be null when create a consumer"); boolean anonymous = !StringUtils.hasText(group); if (anonymous) { group = "anonymous." + UUID.randomUUID(); } EventHubsContainerProperties containerProperties = createContainerProperties(destination, group, properties); EventHubsMessageListenerContainer listenerContainer = new EventHubsMessageListenerContainer( getProcessorFactory(), containerProperties); this.eventHubsMessageListenerContainers.add(listenerContainer); EventHubsInboundChannelAdapter inboundAdapter; if (properties.isBatchMode()) { inboundAdapter = new EventHubsInboundChannelAdapter(listenerContainer, ListenerMode.BATCH); } else { inboundAdapter = new EventHubsInboundChannelAdapter(listenerContainer); } inboundAdapter.setBeanFactory(getBeanFactory()); String instrumentationId = Instrumentation.buildId(CONSUMER, destination.getName() + "/" + group); inboundAdapter.setInstrumentationManager(instrumentationManager); inboundAdapter.setInstrumentationId(instrumentationId); ErrorInfrastructure errorInfrastructure = registerErrorInfrastructure(destination, group, properties); inboundAdapter.setErrorChannel(errorInfrastructure.getErrorChannel()); return inboundAdapter; } /** * Create {@link EventHubsContainerProperties} from the extended {@link EventHubsConsumerProperties}. * @param destination reference to the consumer destination. * @param group the consumer group. * @param properties the consumer properties. * @return the {@link EventHubsContainerProperties}. */ private EventHubsContainerProperties createContainerProperties( ConsumerDestination destination, String group, ExtendedConsumerProperties<EventHubsConsumerProperties> properties) { EventHubsContainerProperties containerProperties = new EventHubsContainerProperties(); AzurePropertiesUtils.copyAzureCommonProperties(properties.getExtension(), containerProperties); ProcessorPropertiesMerger.copyProcessorPropertiesIfNotNull(properties.getExtension(), containerProperties); containerProperties.setEventHubName(destination.getName()); containerProperties.setConsumerGroup(group); containerProperties.setCheckpointConfig(properties.getExtension().getCheckpoint()); return containerProperties; } @Override public EventHubsConsumerProperties getExtendedConsumerProperties(String destination) { return this.bindingProperties.getExtendedConsumerProperties(destination); } @Override public EventHubsProducerProperties getExtendedProducerProperties(String destination) { return this.bindingProperties.getExtendedProducerProperties(destination); } @Override public String getDefaultsPrefix() { return this.bindingProperties.getDefaultsPrefix(); } @Override public Class<? extends BinderSpecificPropertiesProvider> getExtendedPropertiesEntryClass() { return this.bindingProperties.getExtendedPropertiesEntryClass(); } /** * Set binding properties. * * @param bindingProperties the binding properties */ public void setBindingProperties(EventHubsExtendedBindingProperties bindingProperties) { this.bindingProperties = bindingProperties; } private PropertiesSupplier<String, ProducerProperties> getProducerPropertiesSupplier() { return key -> { if (this.extendedProducerPropertiesMap.containsKey(key)) { EventHubsProducerProperties producerProperties = this.extendedProducerPropertiesMap.get(key) .getExtension(); producerProperties.setEventHubName(key); return producerProperties; } else { LOGGER.debug("Can't find extended properties for {}", key); return null; } }; } private EventHubsTemplate getEventHubTemplate() { if (this.eventHubsTemplate == null) { DefaultEventHubsNamespaceProducerFactory factory = new DefaultEventHubsNamespaceProducerFactory( this.namespaceProperties, getProducerPropertiesSupplier()); producerFactoryCustomizers.forEach(customizer -> customizer.customize(factory)); factory.addListener((name, producerAsyncClient) -> { DefaultInstrumentation instrumentation = new DefaultInstrumentation(name, PRODUCER); instrumentation.setStatus(Instrumentation.Status.UP); instrumentationManager.addHealthInstrumentation(instrumentation); }); this.eventHubsTemplate = new EventHubsTemplate(factory); } return this.eventHubsTemplate; } private EventHubsProcessorFactory getProcessorFactory() { if (this.processorFactory == null) { this.processorFactory = new DefaultEventHubsNamespaceProcessorFactory( this.checkpointStore, this.namespaceProperties); processorFactoryCustomizers.forEach(customizer -> customizer.customize(processorFactory)); processorFactory.addListener((name, consumerGroup, processorClient) -> { String instrumentationName = name + "/" + consumerGroup; Instrumentation instrumentation = new EventHubsProcessorInstrumentation(instrumentationName, CONSUMER, Duration.ofMinutes(2)); instrumentation.setStatus(Instrumentation.Status.UP); instrumentationManager.addHealthInstrumentation(instrumentation); }); } return this.processorFactory; } /** * Set namespace properties. * * @param namespaceProperties the namespace properties */ public void setNamespaceProperties(NamespaceProperties namespaceProperties) { this.namespaceProperties = namespaceProperties; } /** * Set checkpoint store. * * @param checkpointStore the checkpoint store */ public void setCheckpointStore(CheckpointStore checkpointStore) { this.checkpointStore = checkpointStore; } /** * Get instrumentation manager. * * @return instrumentationManager the instrumentation manager * @see InstrumentationManager */ InstrumentationManager getInstrumentationManager() { return instrumentationManager; } /** * Add a producer factory customizer. * * @param producerFactoryCustomizer The producer factory customizer to add. */ /** * Add a processor factory customizer. * * @param processorFactoryCustomizer The processor factory customizer to add. */ public void addProcessorFactoryCustomizer(EventHubsProcessorFactoryCustomizer processorFactoryCustomizer) { if (processorFactoryCustomizer != null) { this.processorFactoryCustomizers.add(processorFactoryCustomizer); } } }
class EventHubsMessageChannelBinder extends AbstractMessageChannelBinder<ExtendedConsumerProperties<EventHubsConsumerProperties>, ExtendedProducerProperties<EventHubsProducerProperties>, EventHubsChannelProvisioner> implements ExtendedPropertiesBinder<MessageChannel, EventHubsConsumerProperties, EventHubsProducerProperties> { private static final Logger LOGGER = LoggerFactory.getLogger(EventHubsMessageChannelBinder.class); private static final ExpressionParser EXPRESSION_PARSER = new SpelExpressionParser(); private NamespaceProperties namespaceProperties; private EventHubsTemplate eventHubsTemplate; private CheckpointStore checkpointStore; private DefaultEventHubsNamespaceProcessorFactory processorFactory; private final List<EventHubsMessageListenerContainer> eventHubsMessageListenerContainers = new ArrayList<>(); private final InstrumentationManager instrumentationManager = new DefaultInstrumentationManager(); private EventHubsExtendedBindingProperties bindingProperties = new EventHubsExtendedBindingProperties(); private final Map<String, ExtendedProducerProperties<EventHubsProducerProperties>> extendedProducerPropertiesMap = new ConcurrentHashMap<>(); private final List<EventHubsProducerFactoryCustomizer> producerFactoryCustomizers = new ArrayList<>(); private final List<EventHubsProcessorFactoryCustomizer> processorFactoryCustomizers = new ArrayList<>(); /** * Construct a {@link EventHubsMessageChannelBinder} with the specified headers to embed and {@link EventHubsChannelProvisioner}. * * @param headersToEmbed the headers to embed * @param provisioningProvider the provisioning provider */ public EventHubsMessageChannelBinder(String[] headersToEmbed, EventHubsChannelProvisioner provisioningProvider) { super(headersToEmbed, provisioningProvider); } @Override protected MessageHandler createProducerMessageHandler( ProducerDestination destination, ExtendedProducerProperties<EventHubsProducerProperties> producerProperties, MessageChannel errorChannel) { extendedProducerPropertiesMap.put(destination.getName(), producerProperties); Assert.notNull(getEventHubTemplate(), "eventHubsTemplate can't be null when create a producer"); DefaultMessageHandler handler = new DefaultMessageHandler(destination.getName(), this.eventHubsTemplate); handler.setBeanFactory(getBeanFactory()); handler.setSync(producerProperties.getExtension().isSync()); handler.setSendTimeout(producerProperties.getExtension().getSendTimeout().toMillis()); handler.setSendFailureChannel(errorChannel); String instrumentationId = Instrumentation.buildId(PRODUCER, destination.getName()); handler.setSendCallback(new InstrumentationSendCallback(instrumentationId, instrumentationManager)); if (producerProperties.isPartitioned()) { handler.setPartitionIdExpression( EXPRESSION_PARSER.parseExpression("headers['" + BinderHeaders.PARTITION_HEADER + "']")); } else { handler.setPartitionKeyExpression(new FunctionExpression<Message<?>>(m -> m.getPayload().hashCode())); } return handler; } @Override protected MessageProducer createConsumerEndpoint(ConsumerDestination destination, String group, ExtendedConsumerProperties<EventHubsConsumerProperties> properties) { Assert.notNull(getProcessorFactory(), "processor factory can't be null when create a consumer"); boolean anonymous = !StringUtils.hasText(group); if (anonymous) { group = "anonymous." + UUID.randomUUID(); } EventHubsContainerProperties containerProperties = createContainerProperties(destination, group, properties); EventHubsMessageListenerContainer listenerContainer = new EventHubsMessageListenerContainer( getProcessorFactory(), containerProperties); this.eventHubsMessageListenerContainers.add(listenerContainer); EventHubsInboundChannelAdapter inboundAdapter; if (properties.isBatchMode()) { inboundAdapter = new EventHubsInboundChannelAdapter(listenerContainer, ListenerMode.BATCH); } else { inboundAdapter = new EventHubsInboundChannelAdapter(listenerContainer); } inboundAdapter.setBeanFactory(getBeanFactory()); String instrumentationId = Instrumentation.buildId(CONSUMER, destination.getName() + "/" + group); inboundAdapter.setInstrumentationManager(instrumentationManager); inboundAdapter.setInstrumentationId(instrumentationId); ErrorInfrastructure errorInfrastructure = registerErrorInfrastructure(destination, group, properties); inboundAdapter.setErrorChannel(errorInfrastructure.getErrorChannel()); return inboundAdapter; } /** * Create {@link EventHubsContainerProperties} from the extended {@link EventHubsConsumerProperties}. * @param destination reference to the consumer destination. * @param group the consumer group. * @param properties the consumer properties. * @return the {@link EventHubsContainerProperties}. */ private EventHubsContainerProperties createContainerProperties( ConsumerDestination destination, String group, ExtendedConsumerProperties<EventHubsConsumerProperties> properties) { EventHubsContainerProperties containerProperties = new EventHubsContainerProperties(); AzurePropertiesUtils.copyAzureCommonProperties(properties.getExtension(), containerProperties); ProcessorPropertiesMerger.copyProcessorPropertiesIfNotNull(properties.getExtension(), containerProperties); containerProperties.setEventHubName(destination.getName()); containerProperties.setConsumerGroup(group); containerProperties.setCheckpointConfig(properties.getExtension().getCheckpoint()); return containerProperties; } @Override public EventHubsConsumerProperties getExtendedConsumerProperties(String destination) { return this.bindingProperties.getExtendedConsumerProperties(destination); } @Override public EventHubsProducerProperties getExtendedProducerProperties(String destination) { return this.bindingProperties.getExtendedProducerProperties(destination); } @Override public String getDefaultsPrefix() { return this.bindingProperties.getDefaultsPrefix(); } @Override public Class<? extends BinderSpecificPropertiesProvider> getExtendedPropertiesEntryClass() { return this.bindingProperties.getExtendedPropertiesEntryClass(); } /** * Set binding properties. * * @param bindingProperties the binding properties */ public void setBindingProperties(EventHubsExtendedBindingProperties bindingProperties) { this.bindingProperties = bindingProperties; } private PropertiesSupplier<String, ProducerProperties> getProducerPropertiesSupplier() { return key -> { if (this.extendedProducerPropertiesMap.containsKey(key)) { EventHubsProducerProperties producerProperties = this.extendedProducerPropertiesMap.get(key) .getExtension(); producerProperties.setEventHubName(key); return producerProperties; } else { LOGGER.debug("Can't find extended properties for {}", key); return null; } }; } private EventHubsTemplate getEventHubTemplate() { if (this.eventHubsTemplate == null) { DefaultEventHubsNamespaceProducerFactory factory = new DefaultEventHubsNamespaceProducerFactory( this.namespaceProperties, getProducerPropertiesSupplier()); producerFactoryCustomizers.forEach(customizer -> customizer.customize(factory)); factory.addListener((name, producerAsyncClient) -> { DefaultInstrumentation instrumentation = new DefaultInstrumentation(name, PRODUCER); instrumentation.setStatus(Instrumentation.Status.UP); instrumentationManager.addHealthInstrumentation(instrumentation); }); this.eventHubsTemplate = new EventHubsTemplate(factory); } return this.eventHubsTemplate; } private EventHubsProcessorFactory getProcessorFactory() { if (this.processorFactory == null) { this.processorFactory = new DefaultEventHubsNamespaceProcessorFactory( this.checkpointStore, this.namespaceProperties); processorFactoryCustomizers.forEach(customizer -> customizer.customize(processorFactory)); processorFactory.addListener((name, consumerGroup, processorClient) -> { String instrumentationName = name + "/" + consumerGroup; Instrumentation instrumentation = new EventHubsProcessorInstrumentation(instrumentationName, CONSUMER, Duration.ofMinutes(2)); instrumentation.setStatus(Instrumentation.Status.UP); instrumentationManager.addHealthInstrumentation(instrumentation); }); } return this.processorFactory; } /** * Set namespace properties. * * @param namespaceProperties the namespace properties */ public void setNamespaceProperties(NamespaceProperties namespaceProperties) { this.namespaceProperties = namespaceProperties; } /** * Set checkpoint store. * * @param checkpointStore the checkpoint store */ public void setCheckpointStore(CheckpointStore checkpointStore) { this.checkpointStore = checkpointStore; } /** * Get instrumentation manager. * * @return instrumentationManager the instrumentation manager * @see InstrumentationManager */ InstrumentationManager getInstrumentationManager() { return instrumentationManager; } /** * Add a producer factory customizer. * * @param producerFactoryCustomizer The producer factory customizer to add. */ /** * Add a processor factory customizer. * * @param processorFactoryCustomizer The processor factory customizer to add. */ public void addProcessorFactoryCustomizer(EventHubsProcessorFactoryCustomizer processorFactoryCustomizer) { if (processorFactoryCustomizer != null) { this.processorFactoryCustomizers.add(processorFactoryCustomizer); } } }
Do we need to consider thread-safe here?
public void addProducerFactoryCustomizer(EventHubsProducerFactoryCustomizer producerFactoryCustomizer) { if (producerFactoryCustomizer != null) { this.producerFactoryCustomizers.add(producerFactoryCustomizer); } }
this.producerFactoryCustomizers.add(producerFactoryCustomizer);
public void addProducerFactoryCustomizer(EventHubsProducerFactoryCustomizer producerFactoryCustomizer) { if (producerFactoryCustomizer != null) { this.producerFactoryCustomizers.add(producerFactoryCustomizer); } }
class EventHubsMessageChannelBinder extends AbstractMessageChannelBinder<ExtendedConsumerProperties<EventHubsConsumerProperties>, ExtendedProducerProperties<EventHubsProducerProperties>, EventHubsChannelProvisioner> implements ExtendedPropertiesBinder<MessageChannel, EventHubsConsumerProperties, EventHubsProducerProperties> { private static final Logger LOGGER = LoggerFactory.getLogger(EventHubsMessageChannelBinder.class); private static final ExpressionParser EXPRESSION_PARSER = new SpelExpressionParser(); private NamespaceProperties namespaceProperties; private EventHubsTemplate eventHubsTemplate; private CheckpointStore checkpointStore; private DefaultEventHubsNamespaceProcessorFactory processorFactory; private final List<EventHubsMessageListenerContainer> eventHubsMessageListenerContainers = new ArrayList<>(); private final InstrumentationManager instrumentationManager = new DefaultInstrumentationManager(); private EventHubsExtendedBindingProperties bindingProperties = new EventHubsExtendedBindingProperties(); private final Map<String, ExtendedProducerProperties<EventHubsProducerProperties>> extendedProducerPropertiesMap = new ConcurrentHashMap<>(); private final List<EventHubsProducerFactoryCustomizer> producerFactoryCustomizers = new ArrayList<>(); private final List<EventHubsProcessorFactoryCustomizer> processorFactoryCustomizers = new ArrayList<>(); /** * Construct a {@link EventHubsMessageChannelBinder} with the specified headers to embed and {@link EventHubsChannelProvisioner}. * * @param headersToEmbed the headers to embed * @param provisioningProvider the provisioning provider */ public EventHubsMessageChannelBinder(String[] headersToEmbed, EventHubsChannelProvisioner provisioningProvider) { super(headersToEmbed, provisioningProvider); } @Override protected MessageHandler createProducerMessageHandler( ProducerDestination destination, ExtendedProducerProperties<EventHubsProducerProperties> producerProperties, MessageChannel errorChannel) { extendedProducerPropertiesMap.put(destination.getName(), producerProperties); Assert.notNull(getEventHubTemplate(), "eventHubsTemplate can't be null when create a producer"); DefaultMessageHandler handler = new DefaultMessageHandler(destination.getName(), this.eventHubsTemplate); handler.setBeanFactory(getBeanFactory()); handler.setSync(producerProperties.getExtension().isSync()); handler.setSendTimeout(producerProperties.getExtension().getSendTimeout().toMillis()); handler.setSendFailureChannel(errorChannel); String instrumentationId = Instrumentation.buildId(PRODUCER, destination.getName()); handler.setSendCallback(new InstrumentationSendCallback(instrumentationId, instrumentationManager)); if (producerProperties.isPartitioned()) { handler.setPartitionIdExpression( EXPRESSION_PARSER.parseExpression("headers['" + BinderHeaders.PARTITION_HEADER + "']")); } else { handler.setPartitionKeyExpression(new FunctionExpression<Message<?>>(m -> m.getPayload().hashCode())); } return handler; } @Override protected MessageProducer createConsumerEndpoint(ConsumerDestination destination, String group, ExtendedConsumerProperties<EventHubsConsumerProperties> properties) { Assert.notNull(getProcessorFactory(), "processor factory can't be null when create a consumer"); boolean anonymous = !StringUtils.hasText(group); if (anonymous) { group = "anonymous." + UUID.randomUUID(); } EventHubsContainerProperties containerProperties = createContainerProperties(destination, group, properties); EventHubsMessageListenerContainer listenerContainer = new EventHubsMessageListenerContainer( getProcessorFactory(), containerProperties); this.eventHubsMessageListenerContainers.add(listenerContainer); EventHubsInboundChannelAdapter inboundAdapter; if (properties.isBatchMode()) { inboundAdapter = new EventHubsInboundChannelAdapter(listenerContainer, ListenerMode.BATCH); } else { inboundAdapter = new EventHubsInboundChannelAdapter(listenerContainer); } inboundAdapter.setBeanFactory(getBeanFactory()); String instrumentationId = Instrumentation.buildId(CONSUMER, destination.getName() + "/" + group); inboundAdapter.setInstrumentationManager(instrumentationManager); inboundAdapter.setInstrumentationId(instrumentationId); ErrorInfrastructure errorInfrastructure = registerErrorInfrastructure(destination, group, properties); inboundAdapter.setErrorChannel(errorInfrastructure.getErrorChannel()); return inboundAdapter; } /** * Create {@link EventHubsContainerProperties} from the extended {@link EventHubsConsumerProperties}. * @param destination reference to the consumer destination. * @param group the consumer group. * @param properties the consumer properties. * @return the {@link EventHubsContainerProperties}. */ private EventHubsContainerProperties createContainerProperties( ConsumerDestination destination, String group, ExtendedConsumerProperties<EventHubsConsumerProperties> properties) { EventHubsContainerProperties containerProperties = new EventHubsContainerProperties(); AzurePropertiesUtils.copyAzureCommonProperties(properties.getExtension(), containerProperties); ProcessorPropertiesMerger.copyProcessorPropertiesIfNotNull(properties.getExtension(), containerProperties); containerProperties.setEventHubName(destination.getName()); containerProperties.setConsumerGroup(group); containerProperties.setCheckpointConfig(properties.getExtension().getCheckpoint()); return containerProperties; } @Override public EventHubsConsumerProperties getExtendedConsumerProperties(String destination) { return this.bindingProperties.getExtendedConsumerProperties(destination); } @Override public EventHubsProducerProperties getExtendedProducerProperties(String destination) { return this.bindingProperties.getExtendedProducerProperties(destination); } @Override public String getDefaultsPrefix() { return this.bindingProperties.getDefaultsPrefix(); } @Override public Class<? extends BinderSpecificPropertiesProvider> getExtendedPropertiesEntryClass() { return this.bindingProperties.getExtendedPropertiesEntryClass(); } /** * Set binding properties. * * @param bindingProperties the binding properties */ public void setBindingProperties(EventHubsExtendedBindingProperties bindingProperties) { this.bindingProperties = bindingProperties; } private PropertiesSupplier<String, ProducerProperties> getProducerPropertiesSupplier() { return key -> { if (this.extendedProducerPropertiesMap.containsKey(key)) { EventHubsProducerProperties producerProperties = this.extendedProducerPropertiesMap.get(key) .getExtension(); producerProperties.setEventHubName(key); return producerProperties; } else { LOGGER.debug("Can't find extended properties for {}", key); return null; } }; } private EventHubsTemplate getEventHubTemplate() { if (this.eventHubsTemplate == null) { DefaultEventHubsNamespaceProducerFactory factory = new DefaultEventHubsNamespaceProducerFactory( this.namespaceProperties, getProducerPropertiesSupplier()); producerFactoryCustomizers.forEach(customizer -> customizer.customize(factory)); factory.addListener((name, producerAsyncClient) -> { DefaultInstrumentation instrumentation = new DefaultInstrumentation(name, PRODUCER); instrumentation.setStatus(Instrumentation.Status.UP); instrumentationManager.addHealthInstrumentation(instrumentation); }); this.eventHubsTemplate = new EventHubsTemplate(factory); } return this.eventHubsTemplate; } private EventHubsProcessorFactory getProcessorFactory() { if (this.processorFactory == null) { this.processorFactory = new DefaultEventHubsNamespaceProcessorFactory( this.checkpointStore, this.namespaceProperties); processorFactoryCustomizers.forEach(customizer -> customizer.customize(processorFactory)); processorFactory.addListener((name, consumerGroup, processorClient) -> { String instrumentationName = name + "/" + consumerGroup; Instrumentation instrumentation = new EventHubsProcessorInstrumentation(instrumentationName, CONSUMER, Duration.ofMinutes(2)); instrumentation.setStatus(Instrumentation.Status.UP); instrumentationManager.addHealthInstrumentation(instrumentation); }); } return this.processorFactory; } /** * Set namespace properties. * * @param namespaceProperties the namespace properties */ public void setNamespaceProperties(NamespaceProperties namespaceProperties) { this.namespaceProperties = namespaceProperties; } /** * Set checkpoint store. * * @param checkpointStore the checkpoint store */ public void setCheckpointStore(CheckpointStore checkpointStore) { this.checkpointStore = checkpointStore; } /** * Get instrumentation manager. * * @return instrumentationManager the instrumentation manager * @see InstrumentationManager */ InstrumentationManager getInstrumentationManager() { return instrumentationManager; } /** * Add a producer factory customizer. * * @param producerFactoryCustomizer The producer factory customizer to add. */ /** * Add a processor factory customizer. * * @param processorFactoryCustomizer The processor factory customizer to add. */ public void addProcessorFactoryCustomizer(EventHubsProcessorFactoryCustomizer processorFactoryCustomizer) { if (processorFactoryCustomizer != null) { this.processorFactoryCustomizers.add(processorFactoryCustomizer); } } }
class EventHubsMessageChannelBinder extends AbstractMessageChannelBinder<ExtendedConsumerProperties<EventHubsConsumerProperties>, ExtendedProducerProperties<EventHubsProducerProperties>, EventHubsChannelProvisioner> implements ExtendedPropertiesBinder<MessageChannel, EventHubsConsumerProperties, EventHubsProducerProperties> { private static final Logger LOGGER = LoggerFactory.getLogger(EventHubsMessageChannelBinder.class); private static final ExpressionParser EXPRESSION_PARSER = new SpelExpressionParser(); private NamespaceProperties namespaceProperties; private EventHubsTemplate eventHubsTemplate; private CheckpointStore checkpointStore; private DefaultEventHubsNamespaceProcessorFactory processorFactory; private final List<EventHubsMessageListenerContainer> eventHubsMessageListenerContainers = new ArrayList<>(); private final InstrumentationManager instrumentationManager = new DefaultInstrumentationManager(); private EventHubsExtendedBindingProperties bindingProperties = new EventHubsExtendedBindingProperties(); private final Map<String, ExtendedProducerProperties<EventHubsProducerProperties>> extendedProducerPropertiesMap = new ConcurrentHashMap<>(); private final List<EventHubsProducerFactoryCustomizer> producerFactoryCustomizers = new ArrayList<>(); private final List<EventHubsProcessorFactoryCustomizer> processorFactoryCustomizers = new ArrayList<>(); /** * Construct a {@link EventHubsMessageChannelBinder} with the specified headers to embed and {@link EventHubsChannelProvisioner}. * * @param headersToEmbed the headers to embed * @param provisioningProvider the provisioning provider */ public EventHubsMessageChannelBinder(String[] headersToEmbed, EventHubsChannelProvisioner provisioningProvider) { super(headersToEmbed, provisioningProvider); } @Override protected MessageHandler createProducerMessageHandler( ProducerDestination destination, ExtendedProducerProperties<EventHubsProducerProperties> producerProperties, MessageChannel errorChannel) { extendedProducerPropertiesMap.put(destination.getName(), producerProperties); Assert.notNull(getEventHubTemplate(), "eventHubsTemplate can't be null when create a producer"); DefaultMessageHandler handler = new DefaultMessageHandler(destination.getName(), this.eventHubsTemplate); handler.setBeanFactory(getBeanFactory()); handler.setSync(producerProperties.getExtension().isSync()); handler.setSendTimeout(producerProperties.getExtension().getSendTimeout().toMillis()); handler.setSendFailureChannel(errorChannel); String instrumentationId = Instrumentation.buildId(PRODUCER, destination.getName()); handler.setSendCallback(new InstrumentationSendCallback(instrumentationId, instrumentationManager)); if (producerProperties.isPartitioned()) { handler.setPartitionIdExpression( EXPRESSION_PARSER.parseExpression("headers['" + BinderHeaders.PARTITION_HEADER + "']")); } else { handler.setPartitionKeyExpression(new FunctionExpression<Message<?>>(m -> m.getPayload().hashCode())); } return handler; } @Override protected MessageProducer createConsumerEndpoint(ConsumerDestination destination, String group, ExtendedConsumerProperties<EventHubsConsumerProperties> properties) { Assert.notNull(getProcessorFactory(), "processor factory can't be null when create a consumer"); boolean anonymous = !StringUtils.hasText(group); if (anonymous) { group = "anonymous." + UUID.randomUUID(); } EventHubsContainerProperties containerProperties = createContainerProperties(destination, group, properties); EventHubsMessageListenerContainer listenerContainer = new EventHubsMessageListenerContainer( getProcessorFactory(), containerProperties); this.eventHubsMessageListenerContainers.add(listenerContainer); EventHubsInboundChannelAdapter inboundAdapter; if (properties.isBatchMode()) { inboundAdapter = new EventHubsInboundChannelAdapter(listenerContainer, ListenerMode.BATCH); } else { inboundAdapter = new EventHubsInboundChannelAdapter(listenerContainer); } inboundAdapter.setBeanFactory(getBeanFactory()); String instrumentationId = Instrumentation.buildId(CONSUMER, destination.getName() + "/" + group); inboundAdapter.setInstrumentationManager(instrumentationManager); inboundAdapter.setInstrumentationId(instrumentationId); ErrorInfrastructure errorInfrastructure = registerErrorInfrastructure(destination, group, properties); inboundAdapter.setErrorChannel(errorInfrastructure.getErrorChannel()); return inboundAdapter; } /** * Create {@link EventHubsContainerProperties} from the extended {@link EventHubsConsumerProperties}. * @param destination reference to the consumer destination. * @param group the consumer group. * @param properties the consumer properties. * @return the {@link EventHubsContainerProperties}. */ private EventHubsContainerProperties createContainerProperties( ConsumerDestination destination, String group, ExtendedConsumerProperties<EventHubsConsumerProperties> properties) { EventHubsContainerProperties containerProperties = new EventHubsContainerProperties(); AzurePropertiesUtils.copyAzureCommonProperties(properties.getExtension(), containerProperties); ProcessorPropertiesMerger.copyProcessorPropertiesIfNotNull(properties.getExtension(), containerProperties); containerProperties.setEventHubName(destination.getName()); containerProperties.setConsumerGroup(group); containerProperties.setCheckpointConfig(properties.getExtension().getCheckpoint()); return containerProperties; } @Override public EventHubsConsumerProperties getExtendedConsumerProperties(String destination) { return this.bindingProperties.getExtendedConsumerProperties(destination); } @Override public EventHubsProducerProperties getExtendedProducerProperties(String destination) { return this.bindingProperties.getExtendedProducerProperties(destination); } @Override public String getDefaultsPrefix() { return this.bindingProperties.getDefaultsPrefix(); } @Override public Class<? extends BinderSpecificPropertiesProvider> getExtendedPropertiesEntryClass() { return this.bindingProperties.getExtendedPropertiesEntryClass(); } /** * Set binding properties. * * @param bindingProperties the binding properties */ public void setBindingProperties(EventHubsExtendedBindingProperties bindingProperties) { this.bindingProperties = bindingProperties; } private PropertiesSupplier<String, ProducerProperties> getProducerPropertiesSupplier() { return key -> { if (this.extendedProducerPropertiesMap.containsKey(key)) { EventHubsProducerProperties producerProperties = this.extendedProducerPropertiesMap.get(key) .getExtension(); producerProperties.setEventHubName(key); return producerProperties; } else { LOGGER.debug("Can't find extended properties for {}", key); return null; } }; } private EventHubsTemplate getEventHubTemplate() { if (this.eventHubsTemplate == null) { DefaultEventHubsNamespaceProducerFactory factory = new DefaultEventHubsNamespaceProducerFactory( this.namespaceProperties, getProducerPropertiesSupplier()); producerFactoryCustomizers.forEach(customizer -> customizer.customize(factory)); factory.addListener((name, producerAsyncClient) -> { DefaultInstrumentation instrumentation = new DefaultInstrumentation(name, PRODUCER); instrumentation.setStatus(Instrumentation.Status.UP); instrumentationManager.addHealthInstrumentation(instrumentation); }); this.eventHubsTemplate = new EventHubsTemplate(factory); } return this.eventHubsTemplate; } private EventHubsProcessorFactory getProcessorFactory() { if (this.processorFactory == null) { this.processorFactory = new DefaultEventHubsNamespaceProcessorFactory( this.checkpointStore, this.namespaceProperties); processorFactoryCustomizers.forEach(customizer -> customizer.customize(processorFactory)); processorFactory.addListener((name, consumerGroup, processorClient) -> { String instrumentationName = name + "/" + consumerGroup; Instrumentation instrumentation = new EventHubsProcessorInstrumentation(instrumentationName, CONSUMER, Duration.ofMinutes(2)); instrumentation.setStatus(Instrumentation.Status.UP); instrumentationManager.addHealthInstrumentation(instrumentation); }); } return this.processorFactory; } /** * Set namespace properties. * * @param namespaceProperties the namespace properties */ public void setNamespaceProperties(NamespaceProperties namespaceProperties) { this.namespaceProperties = namespaceProperties; } /** * Set checkpoint store. * * @param checkpointStore the checkpoint store */ public void setCheckpointStore(CheckpointStore checkpointStore) { this.checkpointStore = checkpointStore; } /** * Get instrumentation manager. * * @return instrumentationManager the instrumentation manager * @see InstrumentationManager */ InstrumentationManager getInstrumentationManager() { return instrumentationManager; } /** * Add a producer factory customizer. * * @param producerFactoryCustomizer The producer factory customizer to add. */ /** * Add a processor factory customizer. * * @param processorFactoryCustomizer The processor factory customizer to add. */ public void addProcessorFactoryCustomizer(EventHubsProcessorFactoryCustomizer processorFactoryCustomizer) { if (processorFactoryCustomizer != null) { this.processorFactoryCustomizers.add(processorFactoryCustomizer); } } }
Is there any reason that we only use `sqlExpression` in hashCode, but equals contains all fields (nullable field seems acceptable for `Objects.hash`)?
public int hashCode() { return sqlExpression.hashCode(); }
return sqlExpression.hashCode();
public int hashCode() { return sqlExpression.hashCode(); }
class SqlRuleFilter extends RuleFilter { private final Map<String, Object> properties = new HashMap<>(); private final String sqlExpression; private final String compatibilityLevel; private final Boolean requiresPreprocessing; /** * Creates a new instance with the given SQL expression. * * @param sqlExpression SQL expression for the filter. * * @throws NullPointerException if {@code sqlExpression} is null. * @throws IllegalArgumentException if {@code sqlExpression} is an empty string. */ public SqlRuleFilter(String sqlExpression) { final ClientLogger logger = new ClientLogger(SqlRuleFilter.class); if (sqlExpression == null) { throw logger.logExceptionAsError(new NullPointerException("'sqlExpression' cannot be null.")); } else if (sqlExpression.isEmpty()) { throw logger.logExceptionAsError( new IllegalArgumentException("'sqlExpression' cannot be an empty string.")); } this.sqlExpression = sqlExpression; this.compatibilityLevel = null; this.requiresPreprocessing = null; } /** * Package private constructor for creating a model deserialised from the service. * * @param sqlExpression SQL expression for the filter. * @param compatibilityLevel The compatibility level. * @param requiresPreprocessing Whether or not it requires preprocessing */ SqlRuleFilter(String sqlExpression, String compatibilityLevel, Boolean requiresPreprocessing) { this.sqlExpression = sqlExpression; this.compatibilityLevel = compatibilityLevel; this.requiresPreprocessing = requiresPreprocessing; } /** * Gets the compatibility level. * * @return The compatibility level. */ String getCompatibilityLevel() { return compatibilityLevel; } /** * Gets whether or not requires preprocessing. * * @return Whether or not requires preprocessing. */ Boolean isPreprocessingRequired() { return requiresPreprocessing; } /** * Gets the value of a filter expression. Allowed types: string, int, long, bool, double * * @return Gets the value of a filter expression. */ public Map<String, Object> getParameters() { return properties; } /** * Gets the SQL expression. * * @return The SQL expression. */ public String getSqlExpression() { return sqlExpression; } /** * Converts the value of the current instance to its equivalent string representation. * * @return A string representation of the current instance. */ @Override public String toString() { return String.format("SqlRuleFilter: %s", sqlExpression); } /** * Compares this RuleFilter to the specified object. The result is true if and only if the argument is not null * and is a SqlRuleFilter object that with the same parameters as this object. * * @param other - the object to which the current SqlRuleFilter should be compared. * @return True, if the passed object is a SqlRuleFilter with the same parameter values, False otherwise. */ @Override public boolean equals(Object other) { if (this == other) { return true; } if (!(other instanceof SqlRuleFilter)) { return false; } SqlRuleFilter that = (SqlRuleFilter) other; return sqlExpression.equals(that.sqlExpression) && Objects.equals(compatibilityLevel, that.compatibilityLevel) && Objects.equals(requiresPreprocessing, that.requiresPreprocessing) && Objects.equals(properties, that.properties); } /** * Returns a hash code for this SqlRuleFilter, which is the hashcode for the SqlExpression. * * @return a hash code value for this object. */ @Override }
class SqlRuleFilter extends RuleFilter { private final Map<String, Object> properties = new HashMap<>(); private final String sqlExpression; private final String compatibilityLevel; private final Boolean requiresPreprocessing; /** * Creates a new instance with the given SQL expression. * * @param sqlExpression SQL expression for the filter. * * @throws NullPointerException if {@code sqlExpression} is null. * @throws IllegalArgumentException if {@code sqlExpression} is an empty string. */ public SqlRuleFilter(String sqlExpression) { final ClientLogger logger = new ClientLogger(SqlRuleFilter.class); if (sqlExpression == null) { throw logger.logExceptionAsError(new NullPointerException("'sqlExpression' cannot be null.")); } else if (sqlExpression.isEmpty()) { throw logger.logExceptionAsError( new IllegalArgumentException("'sqlExpression' cannot be an empty string.")); } this.sqlExpression = sqlExpression; this.compatibilityLevel = null; this.requiresPreprocessing = null; } /** * Package private constructor for creating a model deserialised from the service. * * @param sqlExpression SQL expression for the filter. * @param compatibilityLevel The compatibility level. * @param requiresPreprocessing Whether or not it requires preprocessing */ SqlRuleFilter(String sqlExpression, String compatibilityLevel, Boolean requiresPreprocessing) { this.sqlExpression = sqlExpression; this.compatibilityLevel = compatibilityLevel; this.requiresPreprocessing = requiresPreprocessing; } /** * Gets the compatibility level. * * @return The compatibility level. */ String getCompatibilityLevel() { return compatibilityLevel; } /** * Gets whether or not requires preprocessing. * * @return Whether or not requires preprocessing. */ Boolean isPreprocessingRequired() { return requiresPreprocessing; } /** * Gets the value of a filter expression. Allowed types: string, int, long, bool, double * * @return Gets the value of a filter expression. */ public Map<String, Object> getParameters() { return properties; } /** * Gets the SQL expression. * * @return The SQL expression. */ public String getSqlExpression() { return sqlExpression; } /** * Converts the value of the current instance to its equivalent string representation. * * @return A string representation of the current instance. */ @Override public String toString() { return String.format("SqlRuleFilter: %s", sqlExpression); } /** * Compares this RuleFilter to the specified object. The result is true if and only if the argument is not null * and is a SqlRuleFilter object that with the same parameters as this object. * * @param other - the object to which the current SqlRuleFilter should be compared. * @return True, if the passed object is a SqlRuleFilter with the same parameter values, False otherwise. */ @Override public boolean equals(Object other) { if (this == other) { return true; } if (!(other instanceof SqlRuleFilter)) { return false; } SqlRuleFilter that = (SqlRuleFilter) other; return sqlExpression.equals(that.sqlExpression) && Objects.equals(compatibilityLevel, that.compatibilityLevel) && Objects.equals(requiresPreprocessing, that.requiresPreprocessing) && Objects.equals(properties, that.properties); } /** * Returns a hash code for this SqlRuleFilter, which is the hashcode for the SqlExpression. * * @return a hash code value for this object. */ @Override }
We could remove the outter "()", @chenrujun is this the way to fix the bug? Is the return value of `getPropertyNames()` correct now?
public boolean containsProperty(String name) { return (getProperty(name) != null); }
return (getProperty(name) != null);
public boolean containsProperty(String name) { return getProperty(name) != null; }
class KeyVaultPropertySource extends EnumerablePropertySource<KeyVaultOperation> { private final KeyVaultOperation operations; /** * Create a new {@code KeyVaultPropertySource} with the given name and {@link KeyVaultOperation}. * @param name the associated name * @param operation the {@link KeyVaultOperation} */ public KeyVaultPropertySource(String name, KeyVaultOperation operation) { super(name, operation); this.operations = operation; } @Override public String[] getPropertyNames() { return this.operations.getPropertyNames(); } @Override public Object getProperty(String name) { return operations.getProperty(name); } @Override }
class KeyVaultPropertySource extends EnumerablePropertySource<KeyVaultOperation> { private final KeyVaultOperation operations; /** * Create a new {@code KeyVaultPropertySource} with the given name and {@link KeyVaultOperation}. * @param name the associated name * @param operation the {@link KeyVaultOperation} */ public KeyVaultPropertySource(String name, KeyVaultOperation operation) { super(name, operation); this.operations = operation; } @Override public String[] getPropertyNames() { return this.operations.getPropertyNames(); } @Override public Object getProperty(String name) { return operations.getProperty(name); } @Override }
Should we test it for both 307 and 308 as both are our default redirect strategy return codes?
public void defaultRedirectAuthorizationHeaderCleared() throws Exception { RecordingHttpClient httpClient = new RecordingHttpClient(request -> { if (request.getUrl().toString().equals("http: Map<String, String> headers = new HashMap<>(); headers.put("Location", "http: headers.put("Authorization", "12345"); HttpHeaders httpHeader = new HttpHeaders(headers); return Mono.just(new MockHttpResponse(request, 308, httpHeader)); } else { return Mono.just(new MockHttpResponse(request, 200)); } }); HttpPipeline pipeline = new HttpPipelineBuilder() .httpClient(httpClient) .policies(new RedirectPolicy()) .build(); HttpResponse response = pipeline.send(new HttpRequest(HttpMethod.GET, new URL("http: assertEquals(200, response.getStatusCode()); assertNull(response.getHeaders().getValue("Authorization")); }
return Mono.just(new MockHttpResponse(request, 308, httpHeader));
public void defaultRedirectAuthorizationHeaderCleared() throws Exception { RecordingHttpClient httpClient = new RecordingHttpClient(request -> { if (request.getUrl().toString().equals("http: Map<String, String> headers = new HashMap<>(); headers.put("Location", "http: headers.put("Authorization", "12345"); HttpHeaders httpHeader = new HttpHeaders(headers); return Mono.just(new MockHttpResponse(request, 308, httpHeader)); } else { return Mono.just(new MockHttpResponse(request, 200)); } }); HttpPipeline pipeline = new HttpPipelineBuilder() .httpClient(httpClient) .policies(new RedirectPolicy()) .build(); HttpResponse response = pipeline.send(new HttpRequest(HttpMethod.GET, new URL("http: assertEquals(200, response.getStatusCode()); assertNull(response.getHeaders().getValue("Authorization")); }
class RedirectPolicyTest { @Test public void noRedirectPolicyTest() throws Exception { final HttpPipeline pipeline = new HttpPipelineBuilder() .httpClient(new NoOpHttpClient() { @Override public Mono<HttpResponse> send(HttpRequest request) { if (request.getUrl().toString().equals("http: Map<String, String> headers = new HashMap<>(); headers.put("Location", "http: HttpHeaders httpHeader = new HttpHeaders(headers); return Mono.just(new MockHttpResponse(request, 308, httpHeader)); } else { return Mono.just(new MockHttpResponse(request, 200)); } } }) .build(); HttpResponse response = pipeline.send(new HttpRequest(HttpMethod.GET, new URL("http: assertEquals(308, response.getStatusCode()); } @Test public void defaultRedirectWhen308() throws Exception { RecordingHttpClient httpClient = new RecordingHttpClient(request -> { if (request.getUrl().toString().equals("http: Map<String, String> headers = new HashMap<>(); headers.put("Location", "http: HttpHeaders httpHeader = new HttpHeaders(headers); return Mono.just(new MockHttpResponse(request, 308, httpHeader)); } else { return Mono.just(new MockHttpResponse(request, 200)); } }); HttpPipeline pipeline = new HttpPipelineBuilder() .httpClient(httpClient) .policies(new RedirectPolicy()) .build(); HttpResponse response = pipeline.send(new HttpRequest(HttpMethod.GET, new URL("http: assertEquals(200, response.getStatusCode()); } @Test public void redirectForNAttempts() throws MalformedURLException { final int[] requestCount = {1}; RecordingHttpClient httpClient = new RecordingHttpClient(request -> { Map<String, String> headers = new HashMap<>(); headers.put("Location", "http: HttpHeaders httpHeader = new HttpHeaders(headers); requestCount[0]++; return Mono.just(new MockHttpResponse(request, 308, httpHeader)); }); HttpPipeline pipeline = new HttpPipelineBuilder() .httpClient(httpClient) .policies(new RedirectPolicy(new DefaultRedirectStrategy(5))) .build(); HttpResponse response = pipeline.send(new HttpRequest(HttpMethod.GET, new URL("http: assertEquals(5, httpClient.getCount()); assertEquals(308, response.getStatusCode()); } @Test public void redirectNonAllowedMethodTest() throws Exception { RecordingHttpClient httpClient = new RecordingHttpClient(request -> { if (request.getUrl().toString().equals("http: Map<String, String> headers = new HashMap<>(); headers.put("Location", "http: HttpHeaders httpHeader = new HttpHeaders(headers); return Mono.just(new MockHttpResponse(request, 308, httpHeader)); } else { return Mono.just(new MockHttpResponse(request, 200)); } }); HttpPipeline pipeline = new HttpPipelineBuilder() .httpClient(httpClient) .policies(new RedirectPolicy(new DefaultRedirectStrategy(5))) .build(); HttpResponse response = pipeline.send(new HttpRequest(HttpMethod.POST, new URL("http: assertEquals(1, httpClient.getCount()); assertEquals(308, response.getStatusCode()); } @Test public void redirectAllowedStatusCodesTest() throws Exception { RecordingHttpClient httpClient = new RecordingHttpClient(request -> { if (request.getUrl().toString().equals("http: Map<String, String> headers = new HashMap<>(); headers.put("Location", "http: HttpHeaders httpHeader = new HttpHeaders(headers); return Mono.just(new MockHttpResponse(request, 308, httpHeader)); } else { return Mono.just(new MockHttpResponse(request, 200)); } }); HttpPipeline pipeline = new HttpPipelineBuilder() .httpClient(httpClient) .policies(new RedirectPolicy(new DefaultRedirectStrategy())) .build(); HttpResponse response = pipeline.send(new HttpRequest(HttpMethod.GET, new URL("http: assertEquals(2, httpClient.getCount()); assertEquals(200, response.getStatusCode()); } @Test public void alreadyAttemptedUrlsTest() throws Exception { RecordingHttpClient httpClient = new RecordingHttpClient(request -> { if (request.getUrl().toString().equals("http: Map<String, String> headers = new HashMap<>(); headers.put("Location", "http: HttpHeaders httpHeader = new HttpHeaders(headers); return Mono.just(new MockHttpResponse(request, 308, httpHeader)); } else if (request.getUrl().toString().equals("http: Map<String, String> headers = new HashMap<>(); headers.put("Location", "http: HttpHeaders httpHeader = new HttpHeaders(headers); return Mono.just(new MockHttpResponse(request, 308, httpHeader)); } else { return Mono.just(new MockHttpResponse(request, 200)); } }); HttpPipeline pipeline = new HttpPipelineBuilder() .httpClient(httpClient) .policies(new RedirectPolicy(new DefaultRedirectStrategy())) .build(); HttpResponse response = pipeline.send(new HttpRequest(HttpMethod.GET, new URL("http: assertEquals(2, httpClient.getCount()); assertEquals(308, response.getStatusCode()); } @Test public void redirectForProvidedHeader() throws MalformedURLException { final int[] requestCount = {1}; RecordingHttpClient httpClient = new RecordingHttpClient(request -> { Map<String, String> headers = new HashMap<>(); headers.put("Location1", "http: HttpHeaders httpHeader = new HttpHeaders(headers); requestCount[0]++; return Mono.just(new MockHttpResponse(request, 308, httpHeader)); }); HttpPipeline pipeline = new HttpPipelineBuilder() .httpClient(httpClient) .policies(new RedirectPolicy(new DefaultRedirectStrategy(5, "Location1", null))) .build(); HttpResponse response = pipeline.send(new HttpRequest(HttpMethod.GET, new URL("http: assertEquals(5, httpClient.getCount()); assertEquals(308, response.getStatusCode()); } @Test public void redirectForProvidedMethods() throws MalformedURLException { Set<HttpMethod> allowedMethods = new HashSet<HttpMethod>() { { add(HttpMethod.GET); add(HttpMethod.PUT); add(HttpMethod.POST); } }; final int[] requestCount = {1}; RecordingHttpClient httpClient = new RecordingHttpClient(request -> { if (request.getUrl().toString().equals("http: Map<String, String> headers = new HashMap<>(); headers.put("Location", "http: HttpHeaders httpHeader = new HttpHeaders(headers); request.setHttpMethod(HttpMethod.PUT); requestCount[0]++; return Mono.just(new MockHttpResponse(request, 308, httpHeader)); } else if (request.getUrl().toString().equals("http: && requestCount[0] == 2) { Map<String, String> headers = new HashMap<>(); headers.put("Location", "http: HttpHeaders httpHeader = new HttpHeaders(headers); request.setHttpMethod(HttpMethod.POST); return Mono.just(new MockHttpResponse(request, 308, httpHeader)); } else { return Mono.just(new MockHttpResponse(request, 200)); } }); HttpPipeline pipeline = new HttpPipelineBuilder() .httpClient(httpClient) .policies(new RedirectPolicy(new DefaultRedirectStrategy(5, null, allowedMethods))) .build(); HttpResponse response = pipeline.send(new HttpRequest(HttpMethod.GET, new URL("http: assertEquals(2, httpClient.getCount()); assertEquals(200, response.getStatusCode()); } @Test public void nullRedirectUrlTest() throws MalformedURLException { RecordingHttpClient httpClient = new RecordingHttpClient(request -> { if (request.getUrl().toString().equals("http: Map<String, String> headers = new HashMap<>(); HttpHeaders httpHeader = new HttpHeaders(headers); return Mono.just(new MockHttpResponse(request, 308, httpHeader)); } else { return Mono.just(new MockHttpResponse(request, 200)); } }); HttpPipeline pipeline = new HttpPipelineBuilder() .httpClient(httpClient) .policies(new RedirectPolicy(new DefaultRedirectStrategy())) .build(); HttpResponse response = pipeline.send(new HttpRequest(HttpMethod.GET, new URL("http: assertEquals(1, httpClient.getCount()); assertEquals(308, response.getStatusCode()); } @Test public void redirectForMultipleRequests() throws MalformedURLException { RecordingHttpClient httpClient = new RecordingHttpClient(request -> { if (request.getUrl().toString().equals("http: Map<String, String> headers = new HashMap<>(); headers.put("Location", "http: HttpHeaders httpHeader = new HttpHeaders(headers); return Mono.just(new MockHttpResponse(request, 308, httpHeader)); } else { return Mono.just(new MockHttpResponse(request, 200)); } }); HttpPipeline pipeline = new HttpPipelineBuilder() .httpClient(httpClient) .policies(new RedirectPolicy()) .build(); HttpResponse response1 = pipeline.send(new HttpRequest(HttpMethod.GET, new URL("http: HttpResponse response2 = pipeline.send(new HttpRequest(HttpMethod.GET, new URL("http: assertEquals(4, httpClient.getCount()); assertEquals(200, response1.getStatusCode()); assertEquals(200, response2.getStatusCode()); } @Test public void nonRedirectRequest() throws MalformedURLException { final HttpPipeline pipeline = new HttpPipelineBuilder() .httpClient(new NoOpHttpClient() { @Override public Mono<HttpResponse> send(HttpRequest request) { if (request.getUrl().toString().equals("http: Map<String, String> headers = new HashMap<>(); HttpHeaders httpHeader = new HttpHeaders(headers); return Mono.just(new MockHttpResponse(request, 401, httpHeader)); } else { return Mono.just(new MockHttpResponse(request, 200)); } } }) .policies(new RedirectPolicy()) .build(); HttpResponse response = pipeline.send(new HttpRequest(HttpMethod.GET, new URL("http: assertEquals(401, response.getStatusCode()); } @Test static class RecordingHttpClient implements HttpClient { private final AtomicInteger count = new AtomicInteger(); private final Function<HttpRequest, Mono<HttpResponse>> handler; RecordingHttpClient(Function<HttpRequest, Mono<HttpResponse>> handler) { this.handler = handler; } @Override public Mono<HttpResponse> send(HttpRequest httpRequest) { count.getAndIncrement(); return handler.apply(httpRequest); } int getCount() { return count.get(); } void resetCount() { count.set(0); } } }
class RedirectPolicyTest { @Test public void noRedirectPolicyTest() throws Exception { final HttpPipeline pipeline = new HttpPipelineBuilder() .httpClient(new NoOpHttpClient() { @Override public Mono<HttpResponse> send(HttpRequest request) { if (request.getUrl().toString().equals("http: Map<String, String> headers = new HashMap<>(); headers.put("Location", "http: HttpHeaders httpHeader = new HttpHeaders(headers); return Mono.just(new MockHttpResponse(request, 308, httpHeader)); } else { return Mono.just(new MockHttpResponse(request, 200)); } } }) .build(); HttpResponse response = pipeline.send(new HttpRequest(HttpMethod.GET, new URL("http: assertEquals(308, response.getStatusCode()); } @ParameterizedTest @ValueSource(ints = {308, 307, 301, 302}) public void defaultRedirectExpectedStatusCodes(int statusCode) throws Exception { RecordingHttpClient httpClient = new RecordingHttpClient(request -> { if (request.getUrl().toString().equals("http: Map<String, String> headers = new HashMap<>(); headers.put("Location", "http: headers.put("Authorization", "12345"); HttpHeaders httpHeader = new HttpHeaders(headers); return Mono.just(new MockHttpResponse(request, statusCode, httpHeader)); } else { return Mono.just(new MockHttpResponse(request, 200)); } }); HttpPipeline pipeline = new HttpPipelineBuilder() .httpClient(httpClient) .policies(new RedirectPolicy()) .build(); HttpResponse response = pipeline.send(new HttpRequest(HttpMethod.GET, new URL("http: assertEquals(200, response.getStatusCode()); assertNull(response.getHeaders().getValue("Authorization")); } @Test public void redirectForNAttempts() throws MalformedURLException { final int[] requestCount = {1}; RecordingHttpClient httpClient = new RecordingHttpClient(request -> { Map<String, String> headers = new HashMap<>(); headers.put("Location", "http: HttpHeaders httpHeader = new HttpHeaders(headers); requestCount[0]++; return Mono.just(new MockHttpResponse(request, 308, httpHeader)); }); HttpPipeline pipeline = new HttpPipelineBuilder() .httpClient(httpClient) .policies(new RedirectPolicy(new DefaultRedirectStrategy(5))) .build(); HttpResponse response = pipeline.send(new HttpRequest(HttpMethod.GET, new URL("http: assertEquals(5, httpClient.getCount()); assertEquals(308, response.getStatusCode()); } @Test public void redirectNonAllowedMethodTest() throws Exception { RecordingHttpClient httpClient = new RecordingHttpClient(request -> { if (request.getUrl().toString().equals("http: Map<String, String> headers = new HashMap<>(); headers.put("Location", "http: HttpHeaders httpHeader = new HttpHeaders(headers); return Mono.just(new MockHttpResponse(request, 308, httpHeader)); } else { return Mono.just(new MockHttpResponse(request, 200)); } }); HttpPipeline pipeline = new HttpPipelineBuilder() .httpClient(httpClient) .policies(new RedirectPolicy(new DefaultRedirectStrategy(5))) .build(); HttpResponse response = pipeline.send(new HttpRequest(HttpMethod.POST, new URL("http: assertEquals(1, httpClient.getCount()); assertEquals(308, response.getStatusCode()); } @Test public void redirectAllowedStatusCodesTest() throws Exception { RecordingHttpClient httpClient = new RecordingHttpClient(request -> { if (request.getUrl().toString().equals("http: Map<String, String> headers = new HashMap<>(); headers.put("Location", "http: HttpHeaders httpHeader = new HttpHeaders(headers); return Mono.just(new MockHttpResponse(request, 308, httpHeader)); } else { return Mono.just(new MockHttpResponse(request, 200)); } }); HttpPipeline pipeline = new HttpPipelineBuilder() .httpClient(httpClient) .policies(new RedirectPolicy(new DefaultRedirectStrategy())) .build(); HttpResponse response = pipeline.send(new HttpRequest(HttpMethod.GET, new URL("http: assertEquals(2, httpClient.getCount()); assertEquals(200, response.getStatusCode()); } @Test public void alreadyAttemptedUrlsTest() throws Exception { RecordingHttpClient httpClient = new RecordingHttpClient(request -> { if (request.getUrl().toString().equals("http: Map<String, String> headers = new HashMap<>(); headers.put("Location", "http: HttpHeaders httpHeader = new HttpHeaders(headers); return Mono.just(new MockHttpResponse(request, 308, httpHeader)); } else if (request.getUrl().toString().equals("http: Map<String, String> headers = new HashMap<>(); headers.put("Location", "http: HttpHeaders httpHeader = new HttpHeaders(headers); return Mono.just(new MockHttpResponse(request, 308, httpHeader)); } else { return Mono.just(new MockHttpResponse(request, 200)); } }); HttpPipeline pipeline = new HttpPipelineBuilder() .httpClient(httpClient) .policies(new RedirectPolicy(new DefaultRedirectStrategy())) .build(); HttpResponse response = pipeline.send(new HttpRequest(HttpMethod.GET, new URL("http: assertEquals(2, httpClient.getCount()); assertEquals(308, response.getStatusCode()); } @Test public void redirectForProvidedHeader() throws MalformedURLException { final int[] requestCount = {1}; RecordingHttpClient httpClient = new RecordingHttpClient(request -> { Map<String, String> headers = new HashMap<>(); headers.put("Location1", "http: HttpHeaders httpHeader = new HttpHeaders(headers); requestCount[0]++; return Mono.just(new MockHttpResponse(request, 308, httpHeader)); }); HttpPipeline pipeline = new HttpPipelineBuilder() .httpClient(httpClient) .policies(new RedirectPolicy(new DefaultRedirectStrategy(5, "Location1", null))) .build(); HttpResponse response = pipeline.send(new HttpRequest(HttpMethod.GET, new URL("http: assertEquals(5, httpClient.getCount()); assertEquals(308, response.getStatusCode()); } @Test public void redirectForProvidedMethods() throws MalformedURLException { Set<HttpMethod> allowedMethods = new HashSet<HttpMethod>() { { add(HttpMethod.GET); add(HttpMethod.PUT); add(HttpMethod.POST); } }; final int[] requestCount = {1}; RecordingHttpClient httpClient = new RecordingHttpClient(request -> { if (request.getUrl().toString().equals("http: Map<String, String> headers = new HashMap<>(); headers.put("Location", "http: HttpHeaders httpHeader = new HttpHeaders(headers); request.setHttpMethod(HttpMethod.PUT); requestCount[0]++; return Mono.just(new MockHttpResponse(request, 308, httpHeader)); } else if (request.getUrl().toString().equals("http: && requestCount[0] == 2) { Map<String, String> headers = new HashMap<>(); headers.put("Location", "http: HttpHeaders httpHeader = new HttpHeaders(headers); request.setHttpMethod(HttpMethod.POST); return Mono.just(new MockHttpResponse(request, 308, httpHeader)); } else { return Mono.just(new MockHttpResponse(request, 200)); } }); HttpPipeline pipeline = new HttpPipelineBuilder() .httpClient(httpClient) .policies(new RedirectPolicy(new DefaultRedirectStrategy(5, null, allowedMethods))) .build(); HttpResponse response = pipeline.send(new HttpRequest(HttpMethod.GET, new URL("http: assertEquals(2, httpClient.getCount()); assertEquals(200, response.getStatusCode()); } @Test public void nullRedirectUrlTest() throws MalformedURLException { RecordingHttpClient httpClient = new RecordingHttpClient(request -> { if (request.getUrl().toString().equals("http: Map<String, String> headers = new HashMap<>(); HttpHeaders httpHeader = new HttpHeaders(headers); return Mono.just(new MockHttpResponse(request, 308, httpHeader)); } else { return Mono.just(new MockHttpResponse(request, 200)); } }); HttpPipeline pipeline = new HttpPipelineBuilder() .httpClient(httpClient) .policies(new RedirectPolicy(new DefaultRedirectStrategy())) .build(); HttpResponse response = pipeline.send(new HttpRequest(HttpMethod.GET, new URL("http: assertEquals(1, httpClient.getCount()); assertEquals(308, response.getStatusCode()); } @Test public void redirectForMultipleRequests() throws MalformedURLException { RecordingHttpClient httpClient = new RecordingHttpClient(request -> { if (request.getUrl().toString().equals("http: Map<String, String> headers = new HashMap<>(); headers.put("Location", "http: HttpHeaders httpHeader = new HttpHeaders(headers); return Mono.just(new MockHttpResponse(request, 308, httpHeader)); } else { return Mono.just(new MockHttpResponse(request, 200)); } }); HttpPipeline pipeline = new HttpPipelineBuilder() .httpClient(httpClient) .policies(new RedirectPolicy()) .build(); HttpResponse response1 = pipeline.send(new HttpRequest(HttpMethod.GET, new URL("http: HttpResponse response2 = pipeline.send(new HttpRequest(HttpMethod.GET, new URL("http: assertEquals(4, httpClient.getCount()); assertEquals(200, response1.getStatusCode()); assertEquals(200, response2.getStatusCode()); } @Test public void nonRedirectRequest() throws MalformedURLException { final HttpPipeline pipeline = new HttpPipelineBuilder() .httpClient(new NoOpHttpClient() { @Override public Mono<HttpResponse> send(HttpRequest request) { if (request.getUrl().toString().equals("http: Map<String, String> headers = new HashMap<>(); HttpHeaders httpHeader = new HttpHeaders(headers); return Mono.just(new MockHttpResponse(request, 401, httpHeader)); } else { return Mono.just(new MockHttpResponse(request, 200)); } } }) .policies(new RedirectPolicy()) .build(); HttpResponse response = pipeline.send(new HttpRequest(HttpMethod.GET, new URL("http: assertEquals(401, response.getStatusCode()); } @Test static class RecordingHttpClient implements HttpClient { private final AtomicInteger count = new AtomicInteger(); private final Function<HttpRequest, Mono<HttpResponse>> handler; RecordingHttpClient(Function<HttpRequest, Mono<HttpResponse>> handler) { this.handler = handler; } @Override public Mono<HttpResponse> send(HttpRequest httpRequest) { count.getAndIncrement(); return handler.apply(httpRequest); } int getCount() { return count.get(); } void resetCount() { count.set(0); } } }
It's possible people hold onto the same builder and build multiple clients from it. Initialising it here will always have the same identifier.
public EventHubClientBuilder() { transport = AmqpTransportType.AMQP; identifier = UUID.randomUUID().toString(); }
identifier = UUID.randomUUID().toString();
public EventHubClientBuilder() { transport = AmqpTransportType.AMQP; }
class EventHubClientBuilder implements TokenCredentialTrait<EventHubClientBuilder>, AzureNamedKeyCredentialTrait<EventHubClientBuilder>, ConnectionStringTrait<EventHubClientBuilder>, AzureSasCredentialTrait<EventHubClientBuilder>, AmqpTrait<EventHubClientBuilder>, ConfigurationTrait<EventHubClientBuilder>, IdentifierTrait<EventHubClientBuilder> { static final int DEFAULT_PREFETCH_COUNT = 500; static final int DEFAULT_PREFETCH_COUNT_FOR_SYNC_CLIENT = 1; /** * The name of the default consumer group in the Event Hubs service. */ public static final String DEFAULT_CONSUMER_GROUP_NAME = "$Default"; /** * The minimum value allowed for the prefetch count of the consumer. */ private static final int MINIMUM_PREFETCH_COUNT = 1; /** * The maximum value allowed for the prefetch count of the consumer. */ private static final int MAXIMUM_PREFETCH_COUNT = 8000; private static final String EVENTHUBS_PROPERTIES_FILE = "azure-messaging-eventhubs.properties"; private static final String NAME_KEY = "name"; private static final String VERSION_KEY = "version"; private static final String UNKNOWN = "UNKNOWN"; private static final String AZURE_EVENT_HUBS_CONNECTION_STRING = "AZURE_EVENT_HUBS_CONNECTION_STRING"; private static final AmqpRetryOptions DEFAULT_RETRY = new AmqpRetryOptions() .setTryTimeout(ClientConstants.OPERATION_TIMEOUT); private static final Pattern HOST_PORT_PATTERN = Pattern.compile("^[^:]+:\\d+"); private final ClientLogger logger = new ClientLogger(EventHubClientBuilder.class); private final Object connectionLock = new Object(); private final AtomicBoolean isSharedConnection = new AtomicBoolean(); private TokenCredential credentials; private Configuration configuration; private ProxyOptions proxyOptions; private AmqpRetryOptions retryOptions; private Scheduler scheduler; private AmqpTransportType transport; private String fullyQualifiedNamespace; private String eventHubName; private String consumerGroup; private EventHubConnectionProcessor eventHubConnectionProcessor; private Integer prefetchCount; private ClientOptions clientOptions; private SslDomain.VerifyMode verifyMode; private URL customEndpointAddress; private String identifier; /** * Keeps track of the open clients that were created from this builder when there is a shared connection. */ private final AtomicInteger openClients = new AtomicInteger(); /** * Creates a new instance with the default transport {@link AmqpTransportType * non-shared connection means that a dedicated AMQP connection is created for every Event Hub consumer or producer * created using the builder. */ /** * Sets the credential information given a connection string to the Event Hub instance. * * <p> * If the connection string is copied from the Event Hubs namespace, it will likely not contain the name to the * desired Event Hub, which is needed. In this case, the name can be added manually by adding {@literal * "EntityPath=EVENT_HUB_NAME"} to the end of the connection string. For example, "EntityPath=telemetry-hub". * </p> * * <p> * If you have defined a shared access policy directly on the Event Hub itself, then copying the connection string * from that Event Hub will result in a connection string that contains the name. * </p> * * @param connectionString The connection string to use for connecting to the Event Hub instance. It is expected * that the Event Hub name and the shared access key properties are contained in this connection string. * * @return The updated {@link EventHubClientBuilder} object. * @throws IllegalArgumentException if {@code connectionString} is null or empty. Or, the {@code * connectionString} does not contain the "EntityPath" key, which is the name of the Event Hub instance. * @throws AzureException If the shared access signature token credential could not be created using the * connection string. */ @Override public EventHubClientBuilder connectionString(String connectionString) { ConnectionStringProperties properties = new ConnectionStringProperties(connectionString); TokenCredential tokenCredential = getTokenCredential(properties); return credential(properties.getEndpoint().getHost(), properties.getEntityPath(), tokenCredential); } private TokenCredential getTokenCredential(ConnectionStringProperties properties) { TokenCredential tokenCredential; if (properties.getSharedAccessSignature() == null) { tokenCredential = new EventHubSharedKeyCredential(properties.getSharedAccessKeyName(), properties.getSharedAccessKey(), ClientConstants.TOKEN_VALIDITY); } else { tokenCredential = new EventHubSharedKeyCredential(properties.getSharedAccessSignature()); } return tokenCredential; } /** * Sets the client options. * * @param clientOptions The client options. * @return The updated {@link EventHubClientBuilder} object. */ @Override public EventHubClientBuilder clientOptions(ClientOptions clientOptions) { this.clientOptions = clientOptions; return this; } /** * {@inheritDoc} */ @Override public EventHubClientBuilder identifier(String identifier) { this.identifier = Objects.requireNonNull(identifier, "'identifier' cannot be null."); return this; } /** * Sets the credential information given a connection string to the Event Hubs namespace and name to a specific * Event Hub instance. * * @param connectionString The connection string to use for connecting to the Event Hubs namespace; it is * expected that the shared access key properties are contained in this connection string, but not the Event Hub * name. * @param eventHubName The name of the Event Hub to connect the client to. * * @return The updated {@link EventHubClientBuilder} object. * @throws NullPointerException if {@code connectionString} or {@code eventHubName} is null. * @throws IllegalArgumentException if {@code connectionString} or {@code eventHubName} is an empty string. Or, * if the {@code connectionString} contains the Event Hub name. * @throws AzureException If the shared access signature token credential could not be created using the * connection string. */ public EventHubClientBuilder connectionString(String connectionString, String eventHubName) { Objects.requireNonNull(connectionString, "'connectionString' cannot be null."); Objects.requireNonNull(eventHubName, "'eventHubName' cannot be null."); if (connectionString.isEmpty()) { throw logger.logExceptionAsError(new IllegalArgumentException( "'connectionString' cannot be an empty string.")); } else if (eventHubName.isEmpty()) { throw logger.logExceptionAsError(new IllegalArgumentException("'eventHubName' cannot be an empty string.")); } final ConnectionStringProperties properties = new ConnectionStringProperties(connectionString); TokenCredential tokenCredential = getTokenCredential(properties); if (!CoreUtils.isNullOrEmpty(properties.getEntityPath()) && !eventHubName.equals(properties.getEntityPath())) { throw logger.logExceptionAsError(new IllegalArgumentException(String.format(Locale.US, "'connectionString' contains an Event Hub name [%s] and it does not match the given " + "'eventHubName' parameter [%s]. Please use the credentials(String connectionString) overload. " + "Or supply a 'connectionString' without 'EntityPath' in it.", properties.getEntityPath(), eventHubName))); } return credential(properties.getEndpoint().getHost(), eventHubName, tokenCredential); } /** * Sets the configuration store that is used during construction of the service client. * * If not specified, the default configuration store is used to configure the {@link EventHubAsyncClient}. Use * {@link Configuration * * @param configuration The configuration store used to configure the {@link EventHubAsyncClient}. * * @return The updated {@link EventHubClientBuilder} object. */ @Override public EventHubClientBuilder configuration(Configuration configuration) { this.configuration = configuration; return this; } /** * Sets a custom endpoint address when connecting to the Event Hubs service. This can be useful when your network * does not allow connecting to the standard Azure Event Hubs endpoint address, but does allow connecting through * an intermediary. For example: {@literal https: * <p> * If no port is specified, the default port for the {@link * used. * * @param customEndpointAddress The custom endpoint address. * @return The updated {@link EventHubClientBuilder} object. * @throws IllegalArgumentException if {@code customEndpointAddress} cannot be parsed into a valid {@link URL}. */ public EventHubClientBuilder customEndpointAddress(String customEndpointAddress) { if (customEndpointAddress == null) { this.customEndpointAddress = null; return this; } try { this.customEndpointAddress = new URL(customEndpointAddress); } catch (MalformedURLException e) { throw logger.logExceptionAsError( new IllegalArgumentException(customEndpointAddress + " : is not a valid URL.", e)); } return this; } /** * Sets the fully qualified name for the Event Hubs namespace. * * @param fullyQualifiedNamespace The fully qualified name for the Event Hubs namespace. This is likely to be * similar to <strong>{@literal "{your-namespace}.servicebus.windows.net}"</strong>. * * @return The updated {@link EventHubClientBuilder} object. * @throws IllegalArgumentException if {@code fullyQualifiedNamespace} is an empty string. * @throws NullPointerException if {@code fullyQualifiedNamespace} is null. */ public EventHubClientBuilder fullyQualifiedNamespace(String fullyQualifiedNamespace) { this.fullyQualifiedNamespace = Objects.requireNonNull(fullyQualifiedNamespace, "'fullyQualifiedNamespace' cannot be null."); if (CoreUtils.isNullOrEmpty(fullyQualifiedNamespace)) { throw logger.logExceptionAsError( new IllegalArgumentException("'fullyQualifiedNamespace' cannot be an empty string.")); } return this; } private String getAndValidateFullyQualifiedNamespace() { if (CoreUtils.isNullOrEmpty(fullyQualifiedNamespace)) { throw logger.logExceptionAsError( new IllegalArgumentException("'fullyQualifiedNamespace' cannot be an empty string.")); } return fullyQualifiedNamespace; } /** * Sets the name of the Event Hub to connect the client to. * * @param eventHubName The name of the Event Hub to connect the client to. * @return The updated {@link EventHubClientBuilder} object. * @throws IllegalArgumentException if {@code eventHubName} is an empty string. * @throws NullPointerException if {@code eventHubName} is null. */ public EventHubClientBuilder eventHubName(String eventHubName) { this.eventHubName = Objects.requireNonNull(eventHubName, "'eventHubName' cannot be null."); if (CoreUtils.isNullOrEmpty(eventHubName)) { throw logger.logExceptionAsError(new IllegalArgumentException("'eventHubName' cannot be an empty string.")); } return this; } private String getEventHubName() { if (CoreUtils.isNullOrEmpty(eventHubName)) { throw logger.logExceptionAsError(new IllegalArgumentException("'eventHubName' cannot be an empty string.")); } return eventHubName; } /** * Toggles the builder to use the same connection for producers or consumers that are built from this instance. By * default, a new connection is constructed and used created for each Event Hub consumer or producer created. * * @return The updated {@link EventHubClientBuilder} object. */ public EventHubClientBuilder shareConnection() { this.isSharedConnection.set(true); return this; } /** * Sets the credential information for which Event Hub instance to connect to, and how to authorize against it. * * @param fullyQualifiedNamespace The fully qualified name for the Event Hubs namespace. This is likely to be * similar to <strong>{@literal "{your-namespace}.servicebus.windows.net}"</strong>. * @param eventHubName The name of the Event Hub to connect the client to. * @param credential The token credential to use for authorization. Access controls may be specified by the * Event Hubs namespace or the requested Event Hub, depending on Azure configuration. * * @return The updated {@link EventHubClientBuilder} object. * @throws IllegalArgumentException if {@code fullyQualifiedNamespace} or {@code eventHubName} is an empty * string. * @throws NullPointerException if {@code fullyQualifiedNamespace}, {@code eventHubName}, {@code credentials} is * null. */ public EventHubClientBuilder credential(String fullyQualifiedNamespace, String eventHubName, TokenCredential credential) { this.fullyQualifiedNamespace = Objects.requireNonNull(fullyQualifiedNamespace, "'fullyQualifiedNamespace' cannot be null."); this.credentials = Objects.requireNonNull(credential, "'credential' cannot be null."); this.eventHubName = Objects.requireNonNull(eventHubName, "'eventHubName' cannot be null."); if (CoreUtils.isNullOrEmpty(fullyQualifiedNamespace)) { throw logger.logExceptionAsError(new IllegalArgumentException("'host' cannot be an empty string.")); } else if (CoreUtils.isNullOrEmpty(eventHubName)) { throw logger.logExceptionAsError(new IllegalArgumentException("'eventHubName' cannot be an empty string.")); } return this; } /** * Sets the {@link TokenCredential} used to authorize requests sent to the service. Refer to the Azure SDK for Java * <a href="https: * documentation for more details on proper usage of the {@link TokenCredential} type. * * @param credential The token credential to use for authorization. Access controls may be specified by the * Event Hubs namespace or the requested Event Hub, depending on Azure configuration. * * @return The updated {@link EventHubClientBuilder} object. * @throws NullPointerException if {@code credentials} is null. */ @Override public EventHubClientBuilder credential(TokenCredential credential) { this.credentials = Objects.requireNonNull(credential, "'credential' cannot be null."); return this; } /** * Sets the credential information for which Event Hub instance to connect to, and how to authorize against it. * * @param fullyQualifiedNamespace The fully qualified name for the Event Hubs namespace. This is likely to be * similar to <strong>{@literal "{your-namespace}.servicebus.windows.net}"</strong>. * @param eventHubName The name of the Event Hub to connect the client to. * @param credential The shared access name and key credential to use for authorization. * Access controls may be specified by the Event Hubs namespace or the requested Event Hub, * depending on Azure configuration. * * @return The updated {@link EventHubClientBuilder} object. * @throws IllegalArgumentException if {@code fullyQualifiedNamespace} or {@code eventHubName} is an empty * string. * @throws NullPointerException if {@code fullyQualifiedNamespace}, {@code eventHubName}, {@code credentials} is * null. */ public EventHubClientBuilder credential(String fullyQualifiedNamespace, String eventHubName, AzureNamedKeyCredential credential) { this.fullyQualifiedNamespace = Objects.requireNonNull(fullyQualifiedNamespace, "'fullyQualifiedNamespace' cannot be null."); this.eventHubName = Objects.requireNonNull(eventHubName, "'eventHubName' cannot be null."); if (CoreUtils.isNullOrEmpty(fullyQualifiedNamespace)) { throw logger.logExceptionAsError(new IllegalArgumentException("'host' cannot be an empty string.")); } else if (CoreUtils.isNullOrEmpty(eventHubName)) { throw logger.logExceptionAsError(new IllegalArgumentException("'eventHubName' cannot be an empty string.")); } Objects.requireNonNull(credential, "'credential' cannot be null."); this.credentials = new EventHubSharedKeyCredential(credential.getAzureNamedKey().getName(), credential.getAzureNamedKey().getKey(), ClientConstants.TOKEN_VALIDITY); return this; } /** * Sets the credential information for which Event Hub instance to connect to, and how to authorize against it. * * @param credential The shared access name and key credential to use for authorization. * Access controls may be specified by the Event Hubs namespace or the requested Event Hub, * depending on Azure configuration. * * @return The updated {@link EventHubClientBuilder} object. * @throws NullPointerException if {@code credentials} is null. */ @Override public EventHubClientBuilder credential(AzureNamedKeyCredential credential) { Objects.requireNonNull(credential, "'credential' cannot be null."); this.credentials = new EventHubSharedKeyCredential(credential.getAzureNamedKey().getName(), credential.getAzureNamedKey().getKey(), ClientConstants.TOKEN_VALIDITY); return this; } /** * Sets the credential information for which Event Hub instance to connect to, and how to authorize against it. * * @param fullyQualifiedNamespace The fully qualified name for the Event Hubs namespace. This is likely to be * similar to <strong>{@literal "{your-namespace}.servicebus.windows.net}"</strong>. * @param eventHubName The name of the Event Hub to connect the client to. * @param credential The shared access signature credential to use for authorization. * Access controls may be specified by the Event Hubs namespace or the requested Event Hub, * depending on Azure configuration. * * @return The updated {@link EventHubClientBuilder} object. * @throws IllegalArgumentException if {@code fullyQualifiedNamespace} or {@code eventHubName} is an empty * string. * @throws NullPointerException if {@code fullyQualifiedNamespace}, {@code eventHubName}, {@code credentials} is * null. */ public EventHubClientBuilder credential(String fullyQualifiedNamespace, String eventHubName, AzureSasCredential credential) { this.fullyQualifiedNamespace = Objects.requireNonNull(fullyQualifiedNamespace, "'fullyQualifiedNamespace' cannot be null."); this.eventHubName = Objects.requireNonNull(eventHubName, "'eventHubName' cannot be null."); if (CoreUtils.isNullOrEmpty(fullyQualifiedNamespace)) { throw logger.logExceptionAsError(new IllegalArgumentException("'host' cannot be an empty string.")); } else if (CoreUtils.isNullOrEmpty(eventHubName)) { throw logger.logExceptionAsError(new IllegalArgumentException("'eventHubName' cannot be an empty string.")); } Objects.requireNonNull(credential, "'credential' cannot be null."); this.credentials = new EventHubSharedKeyCredential(credential.getSignature()); return this; } /** * Sets the credential information for which Event Hub instance to connect to, and how to authorize against it. * * @param credential The shared access signature credential to use for authorization. * Access controls may be specified by the Event Hubs namespace or the requested Event Hub, * depending on Azure configuration. * * @return The updated {@link EventHubClientBuilder} object. * @throws NullPointerException if {@code credentials} is null. */ @Override public EventHubClientBuilder credential(AzureSasCredential credential) { Objects.requireNonNull(credential, "'credential' cannot be null."); this.credentials = new EventHubSharedKeyCredential(credential.getSignature()); return this; } /** * Sets the proxy configuration to use for {@link EventHubAsyncClient}. When a proxy is configured, {@link * AmqpTransportType * * @param proxyOptions The proxy configuration to use. * * @return The updated {@link EventHubClientBuilder} object. */ @Override public EventHubClientBuilder proxyOptions(ProxyOptions proxyOptions) { this.proxyOptions = proxyOptions; return this; } /** * Sets the transport type by which all the communication with Azure Event Hubs occurs. Default value is {@link * AmqpTransportType * * @param transport The transport type to use. * * @return The updated {@link EventHubClientBuilder} object. */ @Override public EventHubClientBuilder transportType(AmqpTransportType transport) { this.transport = transport; return this; } /** * Sets the retry policy for {@link EventHubAsyncClient}. If not specified, the default retry options are used. * * @param retryOptions The retry policy to use. * * @return The updated {@link EventHubClientBuilder} object. * @deprecated Replaced by {@link */ @Deprecated public EventHubClientBuilder retry(AmqpRetryOptions retryOptions) { this.retryOptions = retryOptions; return this; } /** * Sets the retry policy for {@link EventHubAsyncClient}. If not specified, the default retry options are used. * * @param retryOptions The retry policy to use. * * @return The updated {@link EventHubClientBuilder} object. */ @Override public EventHubClientBuilder retryOptions(AmqpRetryOptions retryOptions) { this.retryOptions = retryOptions; return this; } /** * Sets the name of the consumer group this consumer is associated with. Events are read in the context of this * group. The name of the consumer group that is created by default is {@link * "$Default"}. * * @param consumerGroup The name of the consumer group this consumer is associated with. Events are read in the * context of this group. The name of the consumer group that is created by default is {@link * * * @return The updated {@link EventHubClientBuilder} object. */ public EventHubClientBuilder consumerGroup(String consumerGroup) { this.consumerGroup = consumerGroup; return this; } /** * Sets the count used by the receiver to control the number of events the Event Hub consumer will actively receive * and queue locally without regard to whether a receive operation is currently active. * * @param prefetchCount The amount of events to queue locally. * * @return The updated {@link EventHubClientBuilder} object. * @throws IllegalArgumentException if {@code prefetchCount} is less than {@link * greater than {@link */ public EventHubClientBuilder prefetchCount(int prefetchCount) { if (prefetchCount < MINIMUM_PREFETCH_COUNT) { throw logger.logExceptionAsError(new IllegalArgumentException(String.format(Locale.US, "PrefetchCount, '%s' has to be above %s", prefetchCount, MINIMUM_PREFETCH_COUNT))); } if (prefetchCount > MAXIMUM_PREFETCH_COUNT) { throw logger.logExceptionAsError(new IllegalArgumentException(String.format(Locale.US, "PrefetchCount, '%s', has to be below %s", prefetchCount, MAXIMUM_PREFETCH_COUNT))); } this.prefetchCount = prefetchCount; return this; } /** * Package-private method that gets the prefetch count. * * @return Gets the prefetch count or {@code null} if it has not been set. * @see */ Integer getPrefetchCount() { return prefetchCount; } /** * Package-private method that sets the scheduler for the created Event Hub client. * * @param scheduler Scheduler to set. * * @return The updated {@link EventHubClientBuilder} object. */ EventHubClientBuilder scheduler(Scheduler scheduler) { this.scheduler = scheduler; return this; } /** * Package-private method that sets the verify mode for this connection. * * @param verifyMode The verification mode. * @return The updated {@link EventHubClientBuilder} object. */ EventHubClientBuilder verifyMode(SslDomain.VerifyMode verifyMode) { this.verifyMode = verifyMode; return this; } /** * Creates a new {@link EventHubConsumerAsyncClient} based on the options set on this builder. Every time {@code * buildAsyncConsumer()} is invoked, a new instance of {@link EventHubConsumerAsyncClient} is created. * * @return A new {@link EventHubConsumerAsyncClient} with the configured options. * @throws IllegalArgumentException If shared connection is not used and the credentials have not been set using * either {@link * {@link * {@link AmqpTransportType */ public EventHubConsumerAsyncClient buildAsyncConsumerClient() { if (CoreUtils.isNullOrEmpty(consumerGroup)) { throw logger.logExceptionAsError(new IllegalArgumentException("'consumerGroup' cannot be null or an empty " + "string. using EventHubClientBuilder.consumerGroup(String)")); } return buildAsyncClient().createConsumer(consumerGroup, prefetchCount); } /** * Creates a new {@link EventHubConsumerClient} based on the options set on this builder. Every time {@code * buildConsumer()} is invoked, a new instance of {@link EventHubConsumerClient} is created. * * @return A new {@link EventHubConsumerClient} with the configured options. * @throws IllegalArgumentException If shared connection is not used and the credentials have not been set using * either {@link * {@link * {@link AmqpTransportType */ public EventHubConsumerClient buildConsumerClient() { return buildClient().createConsumer(consumerGroup, prefetchCount); } /** * Creates a new {@link EventHubProducerAsyncClient} based on options set on this builder. Every time {@code * buildAsyncProducer()} is invoked, a new instance of {@link EventHubProducerAsyncClient} is created. * * @return A new {@link EventHubProducerAsyncClient} instance with all the configured options. * @throws IllegalArgumentException If shared connection is not used and the credentials have not been set using * either {@link * proxy is specified but the transport type is not {@link AmqpTransportType */ public EventHubProducerAsyncClient buildAsyncProducerClient() { return buildAsyncClient().createProducer(); } /** * Creates a new {@link EventHubProducerClient} based on options set on this builder. Every time {@code * buildAsyncProducer()} is invoked, a new instance of {@link EventHubProducerClient} is created. * * @return A new {@link EventHubProducerClient} instance with all the configured options. * @throws IllegalArgumentException If shared connection is not used and the credentials have not been set using * either {@link * proxy is specified but the transport type is not {@link AmqpTransportType */ public EventHubProducerClient buildProducerClient() { return buildClient().createProducer(); } /** * Creates a new {@link EventHubAsyncClient} based on options set on this builder. Every time {@code * buildAsyncClient()} is invoked, a new instance of {@link EventHubAsyncClient} is created. * * <p> * The following options are used if ones are not specified in the builder: * * <ul> * <li>If no configuration is specified, the {@link Configuration * is used to provide any shared configuration values. The configuration values read are the {@link * Configuration * ProxyOptions * <li>If no retry is specified, the default retry options are used.</li> * <li>If no proxy is specified, the builder checks the {@link Configuration * configuration} for a configured proxy, then it checks to see if a system proxy is configured.</li> * <li>If no timeout is specified, a {@link ClientConstants * </ul> * * @return A new {@link EventHubAsyncClient} instance with all the configured options. * @throws IllegalArgumentException if the credentials have not been set using either {@link * * specified but the transport type is not {@link AmqpTransportType */ EventHubAsyncClient buildAsyncClient() { if (retryOptions == null) { retryOptions = DEFAULT_RETRY; } if (scheduler == null) { scheduler = Schedulers.boundedElastic(); } if (prefetchCount == null) { prefetchCount = DEFAULT_PREFETCH_COUNT; } final MessageSerializer messageSerializer = new EventHubMessageSerializer(); final EventHubConnectionProcessor processor; if (isSharedConnection.get()) { synchronized (connectionLock) { if (eventHubConnectionProcessor == null) { eventHubConnectionProcessor = buildConnectionProcessor(messageSerializer); } } processor = eventHubConnectionProcessor; final int numberOfOpenClients = openClients.incrementAndGet(); logger.info(" } else { processor = buildConnectionProcessor(messageSerializer); } final TracerProvider tracerProvider = new TracerProvider(ServiceLoader.load(Tracer.class)); return new EventHubAsyncClient(processor, tracerProvider, messageSerializer, scheduler, isSharedConnection.get(), this::onClientClose, this.identifier); } /** * Creates a new {@link EventHubClient} based on options set on this builder. Every time {@code buildClient()} is * invoked, a new instance of {@link EventHubClient} is created. * * <p> * The following options are used if ones are not specified in the builder: * * <ul> * <li>If no configuration is specified, the {@link Configuration * is used to provide any shared configuration values. The configuration values read are the {@link * Configuration * ProxyOptions * <li>If no retry is specified, the default retry options are used.</li> * <li>If no proxy is specified, the builder checks the {@link Configuration * configuration} for a configured proxy, then it checks to see if a system proxy is configured.</li> * <li>If no timeout is specified, a {@link ClientConstants * <li>If no scheduler is specified, an {@link Schedulers * </ul> * * @return A new {@link EventHubClient} instance with all the configured options. * @throws IllegalArgumentException if the credentials have not been set using either {@link * * specified but the transport type is not {@link AmqpTransportType */ EventHubClient buildClient() { if (prefetchCount == null) { prefetchCount = DEFAULT_PREFETCH_COUNT_FOR_SYNC_CLIENT; } final EventHubAsyncClient client = buildAsyncClient(); return new EventHubClient(client, retryOptions); } void onClientClose() { synchronized (connectionLock) { final int numberOfOpenClients = openClients.decrementAndGet(); logger.info("Closing a dependent client. if (numberOfOpenClients > 0) { return; } if (numberOfOpenClients < 0) { logger.warning("There should not be less than 0 clients. actual: {}", numberOfOpenClients); } logger.info("No more open clients, closing shared connection."); if (eventHubConnectionProcessor != null) { eventHubConnectionProcessor.dispose(); eventHubConnectionProcessor = null; } else { logger.warning("Shared EventHubConnectionProcessor was already disposed."); } } } private EventHubConnectionProcessor buildConnectionProcessor(MessageSerializer messageSerializer) { final ConnectionOptions connectionOptions = getConnectionOptions(); final Flux<EventHubAmqpConnection> connectionFlux = Flux.create(sink -> { sink.onRequest(request -> { if (request == 0) { return; } else if (request > 1) { sink.error(logger.logExceptionAsWarning(new IllegalArgumentException( "Requested more than one connection. Only emitting one. Request: " + request))); return; } final String connectionId = StringUtil.getRandomString("MF"); logger.atInfo() .addKeyValue(CONNECTION_ID_KEY, connectionId) .log("Emitting a single connection."); final TokenManagerProvider tokenManagerProvider = new AzureTokenManagerProvider( connectionOptions.getAuthorizationType(), connectionOptions.getFullyQualifiedNamespace(), connectionOptions.getAuthorizationScope()); final ReactorProvider provider = new ReactorProvider(); final ReactorHandlerProvider handlerProvider = new ReactorHandlerProvider(provider); final EventHubAmqpConnection connection = new EventHubReactorAmqpConnection(connectionId, connectionOptions, getEventHubName(), provider, handlerProvider, tokenManagerProvider, messageSerializer); sink.next(connection); }); }); return connectionFlux.subscribeWith(new EventHubConnectionProcessor( connectionOptions.getFullyQualifiedNamespace(), getEventHubName(), connectionOptions.getRetry())); } private ConnectionOptions getConnectionOptions() { Configuration buildConfiguration = configuration == null ? Configuration.getGlobalConfiguration().clone() : configuration; if (credentials == null) { final String connectionString = buildConfiguration.get(AZURE_EVENT_HUBS_CONNECTION_STRING); if (CoreUtils.isNullOrEmpty(connectionString)) { throw logger.logExceptionAsError(new IllegalArgumentException("Credentials have not been set. " + "They can be set using: connectionString(String), connectionString(String, String), " + "credentials(String, String, TokenCredential), or setting the environment variable '" + AZURE_EVENT_HUBS_CONNECTION_STRING + "' with a connection string")); } connectionString(connectionString); } if (proxyOptions == null) { proxyOptions = getDefaultProxyConfiguration(buildConfiguration); } if (proxyOptions != null && proxyOptions.isProxyAddressConfigured() && transport != AmqpTransportType.AMQP_WEB_SOCKETS) { throw logger.logExceptionAsError(new IllegalArgumentException( "Cannot use a proxy when TransportType is not AMQP Web Sockets.")); } final CbsAuthorizationType authorizationType = credentials instanceof EventHubSharedKeyCredential ? CbsAuthorizationType.SHARED_ACCESS_SIGNATURE : CbsAuthorizationType.JSON_WEB_TOKEN; final SslDomain.VerifyMode verificationMode = verifyMode != null ? verifyMode : SslDomain.VerifyMode.VERIFY_PEER_NAME; final ClientOptions options = clientOptions != null ? clientOptions : new ClientOptions(); final Map<String, String> properties = CoreUtils.getProperties(EVENTHUBS_PROPERTIES_FILE); final String product = properties.getOrDefault(NAME_KEY, UNKNOWN); final String clientVersion = properties.getOrDefault(VERSION_KEY, UNKNOWN); if (customEndpointAddress == null) { return new ConnectionOptions(getAndValidateFullyQualifiedNamespace(), credentials, authorizationType, ClientConstants.AZURE_ACTIVE_DIRECTORY_SCOPE, transport, retryOptions, proxyOptions, scheduler, options, verificationMode, product, clientVersion); } else { return new ConnectionOptions(getAndValidateFullyQualifiedNamespace(), credentials, authorizationType, ClientConstants.AZURE_ACTIVE_DIRECTORY_SCOPE, transport, retryOptions, proxyOptions, scheduler, options, verificationMode, product, clientVersion, customEndpointAddress.getHost(), customEndpointAddress.getPort()); } } private ProxyOptions getDefaultProxyConfiguration(Configuration configuration) { ProxyAuthenticationType authentication = ProxyAuthenticationType.NONE; if (proxyOptions != null) { authentication = proxyOptions.getAuthentication(); } String proxyAddress = configuration.get(Configuration.PROPERTY_HTTP_PROXY); if (CoreUtils.isNullOrEmpty(proxyAddress)) { return ProxyOptions.SYSTEM_DEFAULTS; } return getProxyOptions(authentication, proxyAddress, configuration, Boolean.parseBoolean(configuration.get("java.net.useSystemProxies"))); } private ProxyOptions getProxyOptions(ProxyAuthenticationType authentication, String proxyAddress, Configuration configuration, boolean useSystemProxies) { String host; int port; if (HOST_PORT_PATTERN.matcher(proxyAddress.trim()).find()) { final String[] hostPort = proxyAddress.split(":"); host = hostPort[0]; port = Integer.parseInt(hostPort[1]); final Proxy proxy = new Proxy(Proxy.Type.HTTP, new InetSocketAddress(host, port)); final String username = configuration.get(ProxyOptions.PROXY_USERNAME); final String password = configuration.get(ProxyOptions.PROXY_PASSWORD); return new ProxyOptions(authentication, proxy, username, password); } else if (useSystemProxies) { com.azure.core.http.ProxyOptions coreProxyOptions = com.azure.core.http.ProxyOptions .fromConfiguration(configuration); Proxy.Type proxyType = coreProxyOptions.getType().toProxyType(); InetSocketAddress coreProxyAddress = coreProxyOptions.getAddress(); String username = coreProxyOptions.getUsername(); String password = coreProxyOptions.getPassword(); return new ProxyOptions(authentication, new Proxy(proxyType, coreProxyAddress), username, password); } else { logger.verbose("'HTTP_PROXY' was configured but ignored as 'java.net.useSystemProxies' wasn't " + "set or was false."); return ProxyOptions.SYSTEM_DEFAULTS; } } }
class EventHubClientBuilder implements TokenCredentialTrait<EventHubClientBuilder>, AzureNamedKeyCredentialTrait<EventHubClientBuilder>, ConnectionStringTrait<EventHubClientBuilder>, AzureSasCredentialTrait<EventHubClientBuilder>, AmqpTrait<EventHubClientBuilder>, ConfigurationTrait<EventHubClientBuilder> { static final int DEFAULT_PREFETCH_COUNT = 500; static final int DEFAULT_PREFETCH_COUNT_FOR_SYNC_CLIENT = 1; /** * The name of the default consumer group in the Event Hubs service. */ public static final String DEFAULT_CONSUMER_GROUP_NAME = "$Default"; /** * The minimum value allowed for the prefetch count of the consumer. */ private static final int MINIMUM_PREFETCH_COUNT = 1; /** * The maximum value allowed for the prefetch count of the consumer. */ private static final int MAXIMUM_PREFETCH_COUNT = 8000; private static final String EVENTHUBS_PROPERTIES_FILE = "azure-messaging-eventhubs.properties"; private static final String NAME_KEY = "name"; private static final String VERSION_KEY = "version"; private static final String UNKNOWN = "UNKNOWN"; private static final String AZURE_EVENT_HUBS_CONNECTION_STRING = "AZURE_EVENT_HUBS_CONNECTION_STRING"; private static final AmqpRetryOptions DEFAULT_RETRY = new AmqpRetryOptions() .setTryTimeout(ClientConstants.OPERATION_TIMEOUT); private static final Pattern HOST_PORT_PATTERN = Pattern.compile("^[^:]+:\\d+"); private static final ClientLogger LOGGER = new ClientLogger(EventHubClientBuilder.class); private final Object connectionLock = new Object(); private final AtomicBoolean isSharedConnection = new AtomicBoolean(); private TokenCredential credentials; private Configuration configuration; private ProxyOptions proxyOptions; private AmqpRetryOptions retryOptions; private Scheduler scheduler; private AmqpTransportType transport; private String fullyQualifiedNamespace; private String eventHubName; private String consumerGroup; private EventHubConnectionProcessor eventHubConnectionProcessor; private Integer prefetchCount; private ClientOptions clientOptions; private SslDomain.VerifyMode verifyMode; private URL customEndpointAddress; /** * Keeps track of the open clients that were created from this builder when there is a shared connection. */ private final AtomicInteger openClients = new AtomicInteger(); /** * Creates a new instance with the default transport {@link AmqpTransportType * non-shared connection means that a dedicated AMQP connection is created for every Event Hub consumer or producer * created using the builder. */ /** * Sets the credential information given a connection string to the Event Hub instance. * * <p> * If the connection string is copied from the Event Hubs namespace, it will likely not contain the name to the * desired Event Hub, which is needed. In this case, the name can be added manually by adding {@literal * "EntityPath=EVENT_HUB_NAME"} to the end of the connection string. For example, "EntityPath=telemetry-hub". * </p> * * <p> * If you have defined a shared access policy directly on the Event Hub itself, then copying the connection string * from that Event Hub will result in a connection string that contains the name. * </p> * * @param connectionString The connection string to use for connecting to the Event Hub instance. It is expected * that the Event Hub name and the shared access key properties are contained in this connection string. * * @return The updated {@link EventHubClientBuilder} object. * @throws IllegalArgumentException if {@code connectionString} is null or empty. Or, the {@code * connectionString} does not contain the "EntityPath" key, which is the name of the Event Hub instance. * @throws AzureException If the shared access signature token credential could not be created using the * connection string. */ @Override public EventHubClientBuilder connectionString(String connectionString) { ConnectionStringProperties properties = new ConnectionStringProperties(connectionString); TokenCredential tokenCredential = getTokenCredential(properties); return credential(properties.getEndpoint().getHost(), properties.getEntityPath(), tokenCredential); } private TokenCredential getTokenCredential(ConnectionStringProperties properties) { TokenCredential tokenCredential; if (properties.getSharedAccessSignature() == null) { tokenCredential = new EventHubSharedKeyCredential(properties.getSharedAccessKeyName(), properties.getSharedAccessKey(), ClientConstants.TOKEN_VALIDITY); } else { tokenCredential = new EventHubSharedKeyCredential(properties.getSharedAccessSignature()); } return tokenCredential; } /** * Sets the client options. * * @param clientOptions The client options. * @return The updated {@link EventHubClientBuilder} object. */ @Override public EventHubClientBuilder clientOptions(ClientOptions clientOptions) { this.clientOptions = clientOptions; return this; } /** * Sets the credential information given a connection string to the Event Hubs namespace and name to a specific * Event Hub instance. * * @param connectionString The connection string to use for connecting to the Event Hubs namespace; it is * expected that the shared access key properties are contained in this connection string, but not the Event Hub * name. * @param eventHubName The name of the Event Hub to connect the client to. * * @return The updated {@link EventHubClientBuilder} object. * @throws NullPointerException if {@code connectionString} or {@code eventHubName} is null. * @throws IllegalArgumentException if {@code connectionString} or {@code eventHubName} is an empty string. Or, * if the {@code connectionString} contains the Event Hub name. * @throws AzureException If the shared access signature token credential could not be created using the * connection string. */ public EventHubClientBuilder connectionString(String connectionString, String eventHubName) { Objects.requireNonNull(connectionString, "'connectionString' cannot be null."); Objects.requireNonNull(eventHubName, "'eventHubName' cannot be null."); if (connectionString.isEmpty()) { throw LOGGER.logExceptionAsError(new IllegalArgumentException( "'connectionString' cannot be an empty string.")); } else if (eventHubName.isEmpty()) { throw LOGGER.logExceptionAsError(new IllegalArgumentException("'eventHubName' cannot be an empty string.")); } final ConnectionStringProperties properties = new ConnectionStringProperties(connectionString); TokenCredential tokenCredential = getTokenCredential(properties); if (!CoreUtils.isNullOrEmpty(properties.getEntityPath()) && !eventHubName.equals(properties.getEntityPath())) { throw LOGGER.logExceptionAsError(new IllegalArgumentException(String.format(Locale.US, "'connectionString' contains an Event Hub name [%s] and it does not match the given " + "'eventHubName' parameter [%s]. Please use the credentials(String connectionString) overload. " + "Or supply a 'connectionString' without 'EntityPath' in it.", properties.getEntityPath(), eventHubName))); } return credential(properties.getEndpoint().getHost(), eventHubName, tokenCredential); } /** * Sets the configuration store that is used during construction of the service client. * * If not specified, the default configuration store is used to configure the {@link EventHubAsyncClient}. Use * {@link Configuration * * @param configuration The configuration store used to configure the {@link EventHubAsyncClient}. * * @return The updated {@link EventHubClientBuilder} object. */ @Override public EventHubClientBuilder configuration(Configuration configuration) { this.configuration = configuration; return this; } /** * Sets a custom endpoint address when connecting to the Event Hubs service. This can be useful when your network * does not allow connecting to the standard Azure Event Hubs endpoint address, but does allow connecting through * an intermediary. For example: {@literal https: * <p> * If no port is specified, the default port for the {@link * used. * * @param customEndpointAddress The custom endpoint address. * @return The updated {@link EventHubClientBuilder} object. * @throws IllegalArgumentException if {@code customEndpointAddress} cannot be parsed into a valid {@link URL}. */ public EventHubClientBuilder customEndpointAddress(String customEndpointAddress) { if (customEndpointAddress == null) { this.customEndpointAddress = null; return this; } try { this.customEndpointAddress = new URL(customEndpointAddress); } catch (MalformedURLException e) { throw LOGGER.logExceptionAsError( new IllegalArgumentException(customEndpointAddress + " : is not a valid URL.", e)); } return this; } /** * Sets the fully qualified name for the Event Hubs namespace. * * @param fullyQualifiedNamespace The fully qualified name for the Event Hubs namespace. This is likely to be * similar to <strong>{@literal "{your-namespace}.servicebus.windows.net}"</strong>. * * @return The updated {@link EventHubClientBuilder} object. * @throws IllegalArgumentException if {@code fullyQualifiedNamespace} is an empty string. * @throws NullPointerException if {@code fullyQualifiedNamespace} is null. */ public EventHubClientBuilder fullyQualifiedNamespace(String fullyQualifiedNamespace) { this.fullyQualifiedNamespace = Objects.requireNonNull(fullyQualifiedNamespace, "'fullyQualifiedNamespace' cannot be null."); if (CoreUtils.isNullOrEmpty(fullyQualifiedNamespace)) { throw LOGGER.logExceptionAsError( new IllegalArgumentException("'fullyQualifiedNamespace' cannot be an empty string.")); } return this; } private String getAndValidateFullyQualifiedNamespace() { if (CoreUtils.isNullOrEmpty(fullyQualifiedNamespace)) { throw LOGGER.logExceptionAsError( new IllegalArgumentException("'fullyQualifiedNamespace' cannot be an empty string.")); } return fullyQualifiedNamespace; } /** * Sets the name of the Event Hub to connect the client to. * * @param eventHubName The name of the Event Hub to connect the client to. * @return The updated {@link EventHubClientBuilder} object. * @throws IllegalArgumentException if {@code eventHubName} is an empty string. * @throws NullPointerException if {@code eventHubName} is null. */ public EventHubClientBuilder eventHubName(String eventHubName) { this.eventHubName = Objects.requireNonNull(eventHubName, "'eventHubName' cannot be null."); if (CoreUtils.isNullOrEmpty(eventHubName)) { throw LOGGER.logExceptionAsError(new IllegalArgumentException("'eventHubName' cannot be an empty string.")); } return this; } private String getEventHubName() { if (CoreUtils.isNullOrEmpty(eventHubName)) { throw LOGGER.logExceptionAsError(new IllegalArgumentException("'eventHubName' cannot be an empty string.")); } return eventHubName; } /** * Toggles the builder to use the same connection for producers or consumers that are built from this instance. By * default, a new connection is constructed and used created for each Event Hub consumer or producer created. * * @return The updated {@link EventHubClientBuilder} object. */ public EventHubClientBuilder shareConnection() { this.isSharedConnection.set(true); return this; } /** * Sets the credential information for which Event Hub instance to connect to, and how to authorize against it. * * @param fullyQualifiedNamespace The fully qualified name for the Event Hubs namespace. This is likely to be * similar to <strong>{@literal "{your-namespace}.servicebus.windows.net}"</strong>. * @param eventHubName The name of the Event Hub to connect the client to. * @param credential The token credential to use for authorization. Access controls may be specified by the * Event Hubs namespace or the requested Event Hub, depending on Azure configuration. * * @return The updated {@link EventHubClientBuilder} object. * @throws IllegalArgumentException if {@code fullyQualifiedNamespace} or {@code eventHubName} is an empty * string. * @throws NullPointerException if {@code fullyQualifiedNamespace}, {@code eventHubName}, {@code credentials} is * null. */ public EventHubClientBuilder credential(String fullyQualifiedNamespace, String eventHubName, TokenCredential credential) { this.fullyQualifiedNamespace = Objects.requireNonNull(fullyQualifiedNamespace, "'fullyQualifiedNamespace' cannot be null."); this.credentials = Objects.requireNonNull(credential, "'credential' cannot be null."); this.eventHubName = Objects.requireNonNull(eventHubName, "'eventHubName' cannot be null."); if (CoreUtils.isNullOrEmpty(fullyQualifiedNamespace)) { throw LOGGER.logExceptionAsError(new IllegalArgumentException("'host' cannot be an empty string.")); } else if (CoreUtils.isNullOrEmpty(eventHubName)) { throw LOGGER.logExceptionAsError(new IllegalArgumentException("'eventHubName' cannot be an empty string.")); } return this; } /** * Sets the {@link TokenCredential} used to authorize requests sent to the service. Refer to the Azure SDK for Java * <a href="https: * documentation for more details on proper usage of the {@link TokenCredential} type. * * @param credential The token credential to use for authorization. Access controls may be specified by the * Event Hubs namespace or the requested Event Hub, depending on Azure configuration. * * @return The updated {@link EventHubClientBuilder} object. * @throws NullPointerException if {@code credentials} is null. */ @Override public EventHubClientBuilder credential(TokenCredential credential) { this.credentials = Objects.requireNonNull(credential, "'credential' cannot be null."); return this; } /** * Sets the credential information for which Event Hub instance to connect to, and how to authorize against it. * * @param fullyQualifiedNamespace The fully qualified name for the Event Hubs namespace. This is likely to be * similar to <strong>{@literal "{your-namespace}.servicebus.windows.net}"</strong>. * @param eventHubName The name of the Event Hub to connect the client to. * @param credential The shared access name and key credential to use for authorization. * Access controls may be specified by the Event Hubs namespace or the requested Event Hub, * depending on Azure configuration. * * @return The updated {@link EventHubClientBuilder} object. * @throws IllegalArgumentException if {@code fullyQualifiedNamespace} or {@code eventHubName} is an empty * string. * @throws NullPointerException if {@code fullyQualifiedNamespace}, {@code eventHubName}, {@code credentials} is * null. */ public EventHubClientBuilder credential(String fullyQualifiedNamespace, String eventHubName, AzureNamedKeyCredential credential) { this.fullyQualifiedNamespace = Objects.requireNonNull(fullyQualifiedNamespace, "'fullyQualifiedNamespace' cannot be null."); this.eventHubName = Objects.requireNonNull(eventHubName, "'eventHubName' cannot be null."); if (CoreUtils.isNullOrEmpty(fullyQualifiedNamespace)) { throw LOGGER.logExceptionAsError(new IllegalArgumentException("'host' cannot be an empty string.")); } else if (CoreUtils.isNullOrEmpty(eventHubName)) { throw LOGGER.logExceptionAsError(new IllegalArgumentException("'eventHubName' cannot be an empty string.")); } Objects.requireNonNull(credential, "'credential' cannot be null."); this.credentials = new EventHubSharedKeyCredential(credential.getAzureNamedKey().getName(), credential.getAzureNamedKey().getKey(), ClientConstants.TOKEN_VALIDITY); return this; } /** * Sets the credential information for which Event Hub instance to connect to, and how to authorize against it. * * @param credential The shared access name and key credential to use for authorization. * Access controls may be specified by the Event Hubs namespace or the requested Event Hub, * depending on Azure configuration. * * @return The updated {@link EventHubClientBuilder} object. * @throws NullPointerException if {@code credentials} is null. */ @Override public EventHubClientBuilder credential(AzureNamedKeyCredential credential) { Objects.requireNonNull(credential, "'credential' cannot be null."); this.credentials = new EventHubSharedKeyCredential(credential.getAzureNamedKey().getName(), credential.getAzureNamedKey().getKey(), ClientConstants.TOKEN_VALIDITY); return this; } /** * Sets the credential information for which Event Hub instance to connect to, and how to authorize against it. * * @param fullyQualifiedNamespace The fully qualified name for the Event Hubs namespace. This is likely to be * similar to <strong>{@literal "{your-namespace}.servicebus.windows.net}"</strong>. * @param eventHubName The name of the Event Hub to connect the client to. * @param credential The shared access signature credential to use for authorization. * Access controls may be specified by the Event Hubs namespace or the requested Event Hub, * depending on Azure configuration. * * @return The updated {@link EventHubClientBuilder} object. * @throws IllegalArgumentException if {@code fullyQualifiedNamespace} or {@code eventHubName} is an empty * string. * @throws NullPointerException if {@code fullyQualifiedNamespace}, {@code eventHubName}, {@code credentials} is * null. */ public EventHubClientBuilder credential(String fullyQualifiedNamespace, String eventHubName, AzureSasCredential credential) { this.fullyQualifiedNamespace = Objects.requireNonNull(fullyQualifiedNamespace, "'fullyQualifiedNamespace' cannot be null."); this.eventHubName = Objects.requireNonNull(eventHubName, "'eventHubName' cannot be null."); if (CoreUtils.isNullOrEmpty(fullyQualifiedNamespace)) { throw LOGGER.logExceptionAsError(new IllegalArgumentException("'host' cannot be an empty string.")); } else if (CoreUtils.isNullOrEmpty(eventHubName)) { throw LOGGER.logExceptionAsError(new IllegalArgumentException("'eventHubName' cannot be an empty string.")); } Objects.requireNonNull(credential, "'credential' cannot be null."); this.credentials = new EventHubSharedKeyCredential(credential.getSignature()); return this; } /** * Sets the credential information for which Event Hub instance to connect to, and how to authorize against it. * * @param credential The shared access signature credential to use for authorization. * Access controls may be specified by the Event Hubs namespace or the requested Event Hub, * depending on Azure configuration. * * @return The updated {@link EventHubClientBuilder} object. * @throws NullPointerException if {@code credentials} is null. */ @Override public EventHubClientBuilder credential(AzureSasCredential credential) { Objects.requireNonNull(credential, "'credential' cannot be null."); this.credentials = new EventHubSharedKeyCredential(credential.getSignature()); return this; } /** * Sets the proxy configuration to use for {@link EventHubAsyncClient}. When a proxy is configured, {@link * AmqpTransportType * * @param proxyOptions The proxy configuration to use. * * @return The updated {@link EventHubClientBuilder} object. */ @Override public EventHubClientBuilder proxyOptions(ProxyOptions proxyOptions) { this.proxyOptions = proxyOptions; return this; } /** * Sets the transport type by which all the communication with Azure Event Hubs occurs. Default value is {@link * AmqpTransportType * * @param transport The transport type to use. * * @return The updated {@link EventHubClientBuilder} object. */ @Override public EventHubClientBuilder transportType(AmqpTransportType transport) { this.transport = transport; return this; } /** * Sets the retry policy for {@link EventHubAsyncClient}. If not specified, the default retry options are used. * * @param retryOptions The retry policy to use. * * @return The updated {@link EventHubClientBuilder} object. * @deprecated Replaced by {@link */ @Deprecated public EventHubClientBuilder retry(AmqpRetryOptions retryOptions) { this.retryOptions = retryOptions; return this; } /** * Sets the retry policy for {@link EventHubAsyncClient}. If not specified, the default retry options are used. * * @param retryOptions The retry policy to use. * * @return The updated {@link EventHubClientBuilder} object. */ @Override public EventHubClientBuilder retryOptions(AmqpRetryOptions retryOptions) { this.retryOptions = retryOptions; return this; } /** * Sets the name of the consumer group this consumer is associated with. Events are read in the context of this * group. The name of the consumer group that is created by default is {@link * "$Default"}. * * @param consumerGroup The name of the consumer group this consumer is associated with. Events are read in the * context of this group. The name of the consumer group that is created by default is {@link * * * @return The updated {@link EventHubClientBuilder} object. */ public EventHubClientBuilder consumerGroup(String consumerGroup) { this.consumerGroup = consumerGroup; return this; } /** * Sets the count used by the receiver to control the number of events the Event Hub consumer will actively receive * and queue locally without regard to whether a receive operation is currently active. * * @param prefetchCount The amount of events to queue locally. * * @return The updated {@link EventHubClientBuilder} object. * @throws IllegalArgumentException if {@code prefetchCount} is less than {@link * greater than {@link */ public EventHubClientBuilder prefetchCount(int prefetchCount) { if (prefetchCount < MINIMUM_PREFETCH_COUNT) { throw LOGGER.logExceptionAsError(new IllegalArgumentException(String.format(Locale.US, "PrefetchCount, '%s' has to be above %s", prefetchCount, MINIMUM_PREFETCH_COUNT))); } if (prefetchCount > MAXIMUM_PREFETCH_COUNT) { throw LOGGER.logExceptionAsError(new IllegalArgumentException(String.format(Locale.US, "PrefetchCount, '%s', has to be below %s", prefetchCount, MAXIMUM_PREFETCH_COUNT))); } this.prefetchCount = prefetchCount; return this; } /** * Package-private method that gets the prefetch count. * * @return Gets the prefetch count or {@code null} if it has not been set. * @see */ Integer getPrefetchCount() { return prefetchCount; } /** * Package-private method that sets the scheduler for the created Event Hub client. * * @param scheduler Scheduler to set. * * @return The updated {@link EventHubClientBuilder} object. */ EventHubClientBuilder scheduler(Scheduler scheduler) { this.scheduler = scheduler; return this; } /** * Package-private method that sets the verify mode for this connection. * * @param verifyMode The verification mode. * @return The updated {@link EventHubClientBuilder} object. */ EventHubClientBuilder verifyMode(SslDomain.VerifyMode verifyMode) { this.verifyMode = verifyMode; return this; } /** * Creates a new {@link EventHubConsumerAsyncClient} based on the options set on this builder. Every time {@code * buildAsyncConsumer()} is invoked, a new instance of {@link EventHubConsumerAsyncClient} is created. * * @return A new {@link EventHubConsumerAsyncClient} with the configured options. * @throws IllegalArgumentException If shared connection is not used and the credentials have not been set using * either {@link * {@link * {@link AmqpTransportType */ public EventHubConsumerAsyncClient buildAsyncConsumerClient() { if (CoreUtils.isNullOrEmpty(consumerGroup)) { throw LOGGER.logExceptionAsError(new IllegalArgumentException("'consumerGroup' cannot be null or an empty " + "string. using EventHubClientBuilder.consumerGroup(String)")); } return buildAsyncClient().createConsumer(consumerGroup, prefetchCount); } /** * Creates a new {@link EventHubConsumerClient} based on the options set on this builder. Every time {@code * buildConsumer()} is invoked, a new instance of {@link EventHubConsumerClient} is created. * * @return A new {@link EventHubConsumerClient} with the configured options. * @throws IllegalArgumentException If shared connection is not used and the credentials have not been set using * either {@link * {@link * {@link AmqpTransportType */ public EventHubConsumerClient buildConsumerClient() { return buildClient().createConsumer(consumerGroup, prefetchCount); } /** * Creates a new {@link EventHubProducerAsyncClient} based on options set on this builder. Every time {@code * buildAsyncProducer()} is invoked, a new instance of {@link EventHubProducerAsyncClient} is created. * * @return A new {@link EventHubProducerAsyncClient} instance with all the configured options. * @throws IllegalArgumentException If shared connection is not used and the credentials have not been set using * either {@link * proxy is specified but the transport type is not {@link AmqpTransportType */ public EventHubProducerAsyncClient buildAsyncProducerClient() { return buildAsyncClient().createProducer(); } /** * Creates a new {@link EventHubProducerClient} based on options set on this builder. Every time {@code * buildAsyncProducer()} is invoked, a new instance of {@link EventHubProducerClient} is created. * * @return A new {@link EventHubProducerClient} instance with all the configured options. * @throws IllegalArgumentException If shared connection is not used and the credentials have not been set using * either {@link * proxy is specified but the transport type is not {@link AmqpTransportType */ public EventHubProducerClient buildProducerClient() { return buildClient().createProducer(); } /** * Creates a new {@link EventHubAsyncClient} based on options set on this builder. Every time {@code * buildAsyncClient()} is invoked, a new instance of {@link EventHubAsyncClient} is created. * * <p> * The following options are used if ones are not specified in the builder: * * <ul> * <li>If no configuration is specified, the {@link Configuration * is used to provide any shared configuration values. The configuration values read are the {@link * Configuration * ProxyOptions * <li>If no retry is specified, the default retry options are used.</li> * <li>If no proxy is specified, the builder checks the {@link Configuration * configuration} for a configured proxy, then it checks to see if a system proxy is configured.</li> * <li>If no timeout is specified, a {@link ClientConstants * </ul> * * @return A new {@link EventHubAsyncClient} instance with all the configured options. * @throws IllegalArgumentException if the credentials have not been set using either {@link * * specified but the transport type is not {@link AmqpTransportType */ EventHubAsyncClient buildAsyncClient() { if (retryOptions == null) { retryOptions = DEFAULT_RETRY; } if (scheduler == null) { scheduler = Schedulers.boundedElastic(); } if (prefetchCount == null) { prefetchCount = DEFAULT_PREFETCH_COUNT; } final MessageSerializer messageSerializer = new EventHubMessageSerializer(); final EventHubConnectionProcessor processor; if (isSharedConnection.get()) { synchronized (connectionLock) { if (eventHubConnectionProcessor == null) { eventHubConnectionProcessor = buildConnectionProcessor(messageSerializer); } } processor = eventHubConnectionProcessor; final int numberOfOpenClients = openClients.incrementAndGet(); LOGGER.info(" } else { processor = buildConnectionProcessor(messageSerializer); } final TracerProvider tracerProvider = new TracerProvider(ServiceLoader.load(Tracer.class)); String identifier; if (clientOptions instanceof AmqpClientOptions) { String clientOptionIdentifier = ((AmqpClientOptions) clientOptions).getIdentifier(); identifier = CoreUtils.isNullOrEmpty(clientOptionIdentifier) ? UUID.randomUUID().toString() : clientOptionIdentifier; } else { identifier = UUID.randomUUID().toString(); } return new EventHubAsyncClient(processor, tracerProvider, messageSerializer, scheduler, isSharedConnection.get(), this::onClientClose, identifier); } /** * Creates a new {@link EventHubClient} based on options set on this builder. Every time {@code buildClient()} is * invoked, a new instance of {@link EventHubClient} is created. * * <p> * The following options are used if ones are not specified in the builder: * * <ul> * <li>If no configuration is specified, the {@link Configuration * is used to provide any shared configuration values. The configuration values read are the {@link * Configuration * ProxyOptions * <li>If no retry is specified, the default retry options are used.</li> * <li>If no proxy is specified, the builder checks the {@link Configuration * configuration} for a configured proxy, then it checks to see if a system proxy is configured.</li> * <li>If no timeout is specified, a {@link ClientConstants * <li>If no scheduler is specified, an {@link Schedulers * </ul> * * @return A new {@link EventHubClient} instance with all the configured options. * @throws IllegalArgumentException if the credentials have not been set using either {@link * * specified but the transport type is not {@link AmqpTransportType */ EventHubClient buildClient() { if (prefetchCount == null) { prefetchCount = DEFAULT_PREFETCH_COUNT_FOR_SYNC_CLIENT; } final EventHubAsyncClient client = buildAsyncClient(); return new EventHubClient(client, retryOptions); } void onClientClose() { synchronized (connectionLock) { final int numberOfOpenClients = openClients.decrementAndGet(); LOGGER.info("Closing a dependent client. if (numberOfOpenClients > 0) { return; } if (numberOfOpenClients < 0) { LOGGER.warning("There should not be less than 0 clients. actual: {}", numberOfOpenClients); } LOGGER.info("No more open clients, closing shared connection."); if (eventHubConnectionProcessor != null) { eventHubConnectionProcessor.dispose(); eventHubConnectionProcessor = null; } else { LOGGER.warning("Shared EventHubConnectionProcessor was already disposed."); } } } private EventHubConnectionProcessor buildConnectionProcessor(MessageSerializer messageSerializer) { final ConnectionOptions connectionOptions = getConnectionOptions(); final Flux<EventHubAmqpConnection> connectionFlux = Flux.create(sink -> { sink.onRequest(request -> { if (request == 0) { return; } else if (request > 1) { sink.error(LOGGER.logExceptionAsWarning(new IllegalArgumentException( "Requested more than one connection. Only emitting one. Request: " + request))); return; } final String connectionId = StringUtil.getRandomString("MF"); LOGGER.atInfo() .addKeyValue(CONNECTION_ID_KEY, connectionId) .log("Emitting a single connection."); final TokenManagerProvider tokenManagerProvider = new AzureTokenManagerProvider( connectionOptions.getAuthorizationType(), connectionOptions.getFullyQualifiedNamespace(), connectionOptions.getAuthorizationScope()); final ReactorProvider provider = new ReactorProvider(); final ReactorHandlerProvider handlerProvider = new ReactorHandlerProvider(provider); final EventHubAmqpConnection connection = new EventHubReactorAmqpConnection(connectionId, connectionOptions, getEventHubName(), provider, handlerProvider, tokenManagerProvider, messageSerializer); sink.next(connection); }); }); return connectionFlux.subscribeWith(new EventHubConnectionProcessor( connectionOptions.getFullyQualifiedNamespace(), getEventHubName(), connectionOptions.getRetry())); } private ConnectionOptions getConnectionOptions() { Configuration buildConfiguration = configuration == null ? Configuration.getGlobalConfiguration().clone() : configuration; if (credentials == null) { final String connectionString = buildConfiguration.get(AZURE_EVENT_HUBS_CONNECTION_STRING); if (CoreUtils.isNullOrEmpty(connectionString)) { throw LOGGER.logExceptionAsError(new IllegalArgumentException("Credentials have not been set. " + "They can be set using: connectionString(String), connectionString(String, String), " + "credentials(String, String, TokenCredential), or setting the environment variable '" + AZURE_EVENT_HUBS_CONNECTION_STRING + "' with a connection string")); } connectionString(connectionString); } if (proxyOptions == null) { proxyOptions = getDefaultProxyConfiguration(buildConfiguration); } if (proxyOptions != null && proxyOptions.isProxyAddressConfigured() && transport != AmqpTransportType.AMQP_WEB_SOCKETS) { throw LOGGER.logExceptionAsError(new IllegalArgumentException( "Cannot use a proxy when TransportType is not AMQP Web Sockets.")); } final CbsAuthorizationType authorizationType = credentials instanceof EventHubSharedKeyCredential ? CbsAuthorizationType.SHARED_ACCESS_SIGNATURE : CbsAuthorizationType.JSON_WEB_TOKEN; final SslDomain.VerifyMode verificationMode = verifyMode != null ? verifyMode : SslDomain.VerifyMode.VERIFY_PEER_NAME; final ClientOptions options = clientOptions != null ? clientOptions : new ClientOptions(); final Map<String, String> properties = CoreUtils.getProperties(EVENTHUBS_PROPERTIES_FILE); final String product = properties.getOrDefault(NAME_KEY, UNKNOWN); final String clientVersion = properties.getOrDefault(VERSION_KEY, UNKNOWN); if (customEndpointAddress == null) { return new ConnectionOptions(getAndValidateFullyQualifiedNamespace(), credentials, authorizationType, ClientConstants.AZURE_ACTIVE_DIRECTORY_SCOPE, transport, retryOptions, proxyOptions, scheduler, options, verificationMode, product, clientVersion); } else { return new ConnectionOptions(getAndValidateFullyQualifiedNamespace(), credentials, authorizationType, ClientConstants.AZURE_ACTIVE_DIRECTORY_SCOPE, transport, retryOptions, proxyOptions, scheduler, options, verificationMode, product, clientVersion, customEndpointAddress.getHost(), customEndpointAddress.getPort()); } } private ProxyOptions getDefaultProxyConfiguration(Configuration configuration) { ProxyAuthenticationType authentication = ProxyAuthenticationType.NONE; if (proxyOptions != null) { authentication = proxyOptions.getAuthentication(); } String proxyAddress = configuration.get(Configuration.PROPERTY_HTTP_PROXY); if (CoreUtils.isNullOrEmpty(proxyAddress)) { return ProxyOptions.SYSTEM_DEFAULTS; } return getProxyOptions(authentication, proxyAddress, configuration, Boolean.parseBoolean(configuration.get("java.net.useSystemProxies"))); } private ProxyOptions getProxyOptions(ProxyAuthenticationType authentication, String proxyAddress, Configuration configuration, boolean useSystemProxies) { String host; int port; if (HOST_PORT_PATTERN.matcher(proxyAddress.trim()).find()) { final String[] hostPort = proxyAddress.split(":"); host = hostPort[0]; port = Integer.parseInt(hostPort[1]); final Proxy proxy = new Proxy(Proxy.Type.HTTP, new InetSocketAddress(host, port)); final String username = configuration.get(ProxyOptions.PROXY_USERNAME); final String password = configuration.get(ProxyOptions.PROXY_PASSWORD); return new ProxyOptions(authentication, proxy, username, password); } else if (useSystemProxies) { com.azure.core.http.ProxyOptions coreProxyOptions = com.azure.core.http.ProxyOptions .fromConfiguration(configuration); Proxy.Type proxyType = coreProxyOptions.getType().toProxyType(); InetSocketAddress coreProxyAddress = coreProxyOptions.getAddress(); String username = coreProxyOptions.getUsername(); String password = coreProxyOptions.getPassword(); return new ProxyOptions(authentication, new Proxy(proxyType, coreProxyAddress), username, password); } else { LOGGER.verbose("'HTTP_PROXY' was configured but ignored as 'java.net.useSystemProxies' wasn't " + "set or was false."); return ProxyOptions.SYSTEM_DEFAULTS; } } }
We should allow setting `null` if the user wants to clear the previously set identifier.
public AmqpClientOptions setIdentifier(String identifier) { this.identifier = Objects.requireNonNull(identifier, "'identifier' cannot be null."); return this; }
this.identifier = Objects.requireNonNull(identifier, "'identifier' cannot be null.");
public AmqpClientOptions setIdentifier(String identifier) { this.identifier = identifier; return this; }
class AmqpClientOptions extends ClientOptions { private String identifier = UUID.randomUUID().toString(); /** {@inheritDoc} **/ @Override public ClientOptions setApplicationId(String applicationId) { super.setApplicationId(applicationId); return this; } /** {@inheritDoc} **/ @Override public ClientOptions setHeaders(Iterable<Header> headers) { super.setHeaders(headers); return this; } /** * Gets the identifier for the amqp client. * @return Amqp client identifier. */ public String getIdentifier() { return identifier; } /** * Sets the identifier for the amqp client. * @param identifier a specific string to identify amqp client. * @return The updated {@link AmqpRetryOptions} object. */ }
class AmqpClientOptions extends ClientOptions { private String identifier; /** {@inheritDoc} **/ @Override public AmqpClientOptions setApplicationId(String applicationId) { super.setApplicationId(applicationId); return this; } /** {@inheritDoc} **/ @Override public AmqpClientOptions setHeaders(Iterable<Header> headers) { super.setHeaders(headers); return this; } /** * Gets the identifier for the AMQP client. * @return AMQP client identifier. */ public String getIdentifier() { return identifier; } /** * Sets the identifier for the AMQP client. * @param identifier A specific string to identify AMQP client. If null or empty, a UUID will be used as the * identifier. * @return The updated {@link AmqpClientOptions} object. */ }
Are we not doing this in `EventProcessorClientBuilder`?
EventHubAsyncClient buildAsyncClient() { if (retryOptions == null) { retryOptions = DEFAULT_RETRY; } if (scheduler == null) { scheduler = Schedulers.boundedElastic(); } if (prefetchCount == null) { prefetchCount = DEFAULT_PREFETCH_COUNT; } final MessageSerializer messageSerializer = new EventHubMessageSerializer(); final EventHubConnectionProcessor processor; if (isSharedConnection.get()) { synchronized (connectionLock) { if (eventHubConnectionProcessor == null) { eventHubConnectionProcessor = buildConnectionProcessor(messageSerializer); } } processor = eventHubConnectionProcessor; final int numberOfOpenClients = openClients.incrementAndGet(); LOGGER.info(" } else { processor = buildConnectionProcessor(messageSerializer); } final TracerProvider tracerProvider = new TracerProvider(ServiceLoader.load(Tracer.class)); String identifier; if (clientOptions != null && clientOptions instanceof AmqpClientOptions) { String clientOptionIdentifier = ((AmqpClientOptions) clientOptions).getIdentifier(); identifier = clientOptionIdentifier == null ? UUID.randomUUID().toString() : clientOptionIdentifier; } else { identifier = UUID.randomUUID().toString(); } return new EventHubAsyncClient(processor, tracerProvider, messageSerializer, scheduler, isSharedConnection.get(), this::onClientClose, identifier); }
}
EventHubAsyncClient buildAsyncClient() { if (retryOptions == null) { retryOptions = DEFAULT_RETRY; } if (scheduler == null) { scheduler = Schedulers.boundedElastic(); } if (prefetchCount == null) { prefetchCount = DEFAULT_PREFETCH_COUNT; } final MessageSerializer messageSerializer = new EventHubMessageSerializer(); final EventHubConnectionProcessor processor; if (isSharedConnection.get()) { synchronized (connectionLock) { if (eventHubConnectionProcessor == null) { eventHubConnectionProcessor = buildConnectionProcessor(messageSerializer); } } processor = eventHubConnectionProcessor; final int numberOfOpenClients = openClients.incrementAndGet(); LOGGER.info(" } else { processor = buildConnectionProcessor(messageSerializer); } final TracerProvider tracerProvider = new TracerProvider(ServiceLoader.load(Tracer.class)); String identifier; if (clientOptions instanceof AmqpClientOptions) { String clientOptionIdentifier = ((AmqpClientOptions) clientOptions).getIdentifier(); identifier = CoreUtils.isNullOrEmpty(clientOptionIdentifier) ? UUID.randomUUID().toString() : clientOptionIdentifier; } else { identifier = UUID.randomUUID().toString(); } return new EventHubAsyncClient(processor, tracerProvider, messageSerializer, scheduler, isSharedConnection.get(), this::onClientClose, identifier); }
class EventHubClientBuilder implements TokenCredentialTrait<EventHubClientBuilder>, AzureNamedKeyCredentialTrait<EventHubClientBuilder>, ConnectionStringTrait<EventHubClientBuilder>, AzureSasCredentialTrait<EventHubClientBuilder>, AmqpTrait<EventHubClientBuilder>, ConfigurationTrait<EventHubClientBuilder> { static final int DEFAULT_PREFETCH_COUNT = 500; static final int DEFAULT_PREFETCH_COUNT_FOR_SYNC_CLIENT = 1; /** * The name of the default consumer group in the Event Hubs service. */ public static final String DEFAULT_CONSUMER_GROUP_NAME = "$Default"; /** * The minimum value allowed for the prefetch count of the consumer. */ private static final int MINIMUM_PREFETCH_COUNT = 1; /** * The maximum value allowed for the prefetch count of the consumer. */ private static final int MAXIMUM_PREFETCH_COUNT = 8000; private static final String EVENTHUBS_PROPERTIES_FILE = "azure-messaging-eventhubs.properties"; private static final String NAME_KEY = "name"; private static final String VERSION_KEY = "version"; private static final String UNKNOWN = "UNKNOWN"; private static final String AZURE_EVENT_HUBS_CONNECTION_STRING = "AZURE_EVENT_HUBS_CONNECTION_STRING"; private static final AmqpRetryOptions DEFAULT_RETRY = new AmqpRetryOptions() .setTryTimeout(ClientConstants.OPERATION_TIMEOUT); private static final Pattern HOST_PORT_PATTERN = Pattern.compile("^[^:]+:\\d+"); private static final ClientLogger LOGGER = new ClientLogger(EventHubClientBuilder.class); private final Object connectionLock = new Object(); private final AtomicBoolean isSharedConnection = new AtomicBoolean(); private TokenCredential credentials; private Configuration configuration; private ProxyOptions proxyOptions; private AmqpRetryOptions retryOptions; private Scheduler scheduler; private AmqpTransportType transport; private String fullyQualifiedNamespace; private String eventHubName; private String consumerGroup; private EventHubConnectionProcessor eventHubConnectionProcessor; private Integer prefetchCount; private ClientOptions clientOptions; private SslDomain.VerifyMode verifyMode; private URL customEndpointAddress; /** * Keeps track of the open clients that were created from this builder when there is a shared connection. */ private final AtomicInteger openClients = new AtomicInteger(); /** * Creates a new instance with the default transport {@link AmqpTransportType * non-shared connection means that a dedicated AMQP connection is created for every Event Hub consumer or producer * created using the builder. */ public EventHubClientBuilder() { transport = AmqpTransportType.AMQP; } /** * Sets the credential information given a connection string to the Event Hub instance. * * <p> * If the connection string is copied from the Event Hubs namespace, it will likely not contain the name to the * desired Event Hub, which is needed. In this case, the name can be added manually by adding {@literal * "EntityPath=EVENT_HUB_NAME"} to the end of the connection string. For example, "EntityPath=telemetry-hub". * </p> * * <p> * If you have defined a shared access policy directly on the Event Hub itself, then copying the connection string * from that Event Hub will result in a connection string that contains the name. * </p> * * @param connectionString The connection string to use for connecting to the Event Hub instance. It is expected * that the Event Hub name and the shared access key properties are contained in this connection string. * * @return The updated {@link EventHubClientBuilder} object. * @throws IllegalArgumentException if {@code connectionString} is null or empty. Or, the {@code * connectionString} does not contain the "EntityPath" key, which is the name of the Event Hub instance. * @throws AzureException If the shared access signature token credential could not be created using the * connection string. */ @Override public EventHubClientBuilder connectionString(String connectionString) { ConnectionStringProperties properties = new ConnectionStringProperties(connectionString); TokenCredential tokenCredential = getTokenCredential(properties); return credential(properties.getEndpoint().getHost(), properties.getEntityPath(), tokenCredential); } private TokenCredential getTokenCredential(ConnectionStringProperties properties) { TokenCredential tokenCredential; if (properties.getSharedAccessSignature() == null) { tokenCredential = new EventHubSharedKeyCredential(properties.getSharedAccessKeyName(), properties.getSharedAccessKey(), ClientConstants.TOKEN_VALIDITY); } else { tokenCredential = new EventHubSharedKeyCredential(properties.getSharedAccessSignature()); } return tokenCredential; } /** * Sets the client options. * * @param clientOptions The client options. * @return The updated {@link EventHubClientBuilder} object. */ @Override public EventHubClientBuilder clientOptions(ClientOptions clientOptions) { this.clientOptions = clientOptions; return this; } /** * Sets the credential information given a connection string to the Event Hubs namespace and name to a specific * Event Hub instance. * * @param connectionString The connection string to use for connecting to the Event Hubs namespace; it is * expected that the shared access key properties are contained in this connection string, but not the Event Hub * name. * @param eventHubName The name of the Event Hub to connect the client to. * * @return The updated {@link EventHubClientBuilder} object. * @throws NullPointerException if {@code connectionString} or {@code eventHubName} is null. * @throws IllegalArgumentException if {@code connectionString} or {@code eventHubName} is an empty string. Or, * if the {@code connectionString} contains the Event Hub name. * @throws AzureException If the shared access signature token credential could not be created using the * connection string. */ public EventHubClientBuilder connectionString(String connectionString, String eventHubName) { Objects.requireNonNull(connectionString, "'connectionString' cannot be null."); Objects.requireNonNull(eventHubName, "'eventHubName' cannot be null."); if (connectionString.isEmpty()) { throw LOGGER.logExceptionAsError(new IllegalArgumentException( "'connectionString' cannot be an empty string.")); } else if (eventHubName.isEmpty()) { throw LOGGER.logExceptionAsError(new IllegalArgumentException("'eventHubName' cannot be an empty string.")); } final ConnectionStringProperties properties = new ConnectionStringProperties(connectionString); TokenCredential tokenCredential = getTokenCredential(properties); if (!CoreUtils.isNullOrEmpty(properties.getEntityPath()) && !eventHubName.equals(properties.getEntityPath())) { throw LOGGER.logExceptionAsError(new IllegalArgumentException(String.format(Locale.US, "'connectionString' contains an Event Hub name [%s] and it does not match the given " + "'eventHubName' parameter [%s]. Please use the credentials(String connectionString) overload. " + "Or supply a 'connectionString' without 'EntityPath' in it.", properties.getEntityPath(), eventHubName))); } return credential(properties.getEndpoint().getHost(), eventHubName, tokenCredential); } /** * Sets the configuration store that is used during construction of the service client. * * If not specified, the default configuration store is used to configure the {@link EventHubAsyncClient}. Use * {@link Configuration * * @param configuration The configuration store used to configure the {@link EventHubAsyncClient}. * * @return The updated {@link EventHubClientBuilder} object. */ @Override public EventHubClientBuilder configuration(Configuration configuration) { this.configuration = configuration; return this; } /** * Sets a custom endpoint address when connecting to the Event Hubs service. This can be useful when your network * does not allow connecting to the standard Azure Event Hubs endpoint address, but does allow connecting through * an intermediary. For example: {@literal https: * <p> * If no port is specified, the default port for the {@link * used. * * @param customEndpointAddress The custom endpoint address. * @return The updated {@link EventHubClientBuilder} object. * @throws IllegalArgumentException if {@code customEndpointAddress} cannot be parsed into a valid {@link URL}. */ public EventHubClientBuilder customEndpointAddress(String customEndpointAddress) { if (customEndpointAddress == null) { this.customEndpointAddress = null; return this; } try { this.customEndpointAddress = new URL(customEndpointAddress); } catch (MalformedURLException e) { throw LOGGER.logExceptionAsError( new IllegalArgumentException(customEndpointAddress + " : is not a valid URL.", e)); } return this; } /** * Sets the fully qualified name for the Event Hubs namespace. * * @param fullyQualifiedNamespace The fully qualified name for the Event Hubs namespace. This is likely to be * similar to <strong>{@literal "{your-namespace}.servicebus.windows.net}"</strong>. * * @return The updated {@link EventHubClientBuilder} object. * @throws IllegalArgumentException if {@code fullyQualifiedNamespace} is an empty string. * @throws NullPointerException if {@code fullyQualifiedNamespace} is null. */ public EventHubClientBuilder fullyQualifiedNamespace(String fullyQualifiedNamespace) { this.fullyQualifiedNamespace = Objects.requireNonNull(fullyQualifiedNamespace, "'fullyQualifiedNamespace' cannot be null."); if (CoreUtils.isNullOrEmpty(fullyQualifiedNamespace)) { throw LOGGER.logExceptionAsError( new IllegalArgumentException("'fullyQualifiedNamespace' cannot be an empty string.")); } return this; } private String getAndValidateFullyQualifiedNamespace() { if (CoreUtils.isNullOrEmpty(fullyQualifiedNamespace)) { throw LOGGER.logExceptionAsError( new IllegalArgumentException("'fullyQualifiedNamespace' cannot be an empty string.")); } return fullyQualifiedNamespace; } /** * Sets the name of the Event Hub to connect the client to. * * @param eventHubName The name of the Event Hub to connect the client to. * @return The updated {@link EventHubClientBuilder} object. * @throws IllegalArgumentException if {@code eventHubName} is an empty string. * @throws NullPointerException if {@code eventHubName} is null. */ public EventHubClientBuilder eventHubName(String eventHubName) { this.eventHubName = Objects.requireNonNull(eventHubName, "'eventHubName' cannot be null."); if (CoreUtils.isNullOrEmpty(eventHubName)) { throw LOGGER.logExceptionAsError(new IllegalArgumentException("'eventHubName' cannot be an empty string.")); } return this; } private String getEventHubName() { if (CoreUtils.isNullOrEmpty(eventHubName)) { throw LOGGER.logExceptionAsError(new IllegalArgumentException("'eventHubName' cannot be an empty string.")); } return eventHubName; } /** * Toggles the builder to use the same connection for producers or consumers that are built from this instance. By * default, a new connection is constructed and used created for each Event Hub consumer or producer created. * * @return The updated {@link EventHubClientBuilder} object. */ public EventHubClientBuilder shareConnection() { this.isSharedConnection.set(true); return this; } /** * Sets the credential information for which Event Hub instance to connect to, and how to authorize against it. * * @param fullyQualifiedNamespace The fully qualified name for the Event Hubs namespace. This is likely to be * similar to <strong>{@literal "{your-namespace}.servicebus.windows.net}"</strong>. * @param eventHubName The name of the Event Hub to connect the client to. * @param credential The token credential to use for authorization. Access controls may be specified by the * Event Hubs namespace or the requested Event Hub, depending on Azure configuration. * * @return The updated {@link EventHubClientBuilder} object. * @throws IllegalArgumentException if {@code fullyQualifiedNamespace} or {@code eventHubName} is an empty * string. * @throws NullPointerException if {@code fullyQualifiedNamespace}, {@code eventHubName}, {@code credentials} is * null. */ public EventHubClientBuilder credential(String fullyQualifiedNamespace, String eventHubName, TokenCredential credential) { this.fullyQualifiedNamespace = Objects.requireNonNull(fullyQualifiedNamespace, "'fullyQualifiedNamespace' cannot be null."); this.credentials = Objects.requireNonNull(credential, "'credential' cannot be null."); this.eventHubName = Objects.requireNonNull(eventHubName, "'eventHubName' cannot be null."); if (CoreUtils.isNullOrEmpty(fullyQualifiedNamespace)) { throw LOGGER.logExceptionAsError(new IllegalArgumentException("'host' cannot be an empty string.")); } else if (CoreUtils.isNullOrEmpty(eventHubName)) { throw LOGGER.logExceptionAsError(new IllegalArgumentException("'eventHubName' cannot be an empty string.")); } return this; } /** * Sets the {@link TokenCredential} used to authorize requests sent to the service. Refer to the Azure SDK for Java * <a href="https: * documentation for more details on proper usage of the {@link TokenCredential} type. * * @param credential The token credential to use for authorization. Access controls may be specified by the * Event Hubs namespace or the requested Event Hub, depending on Azure configuration. * * @return The updated {@link EventHubClientBuilder} object. * @throws NullPointerException if {@code credentials} is null. */ @Override public EventHubClientBuilder credential(TokenCredential credential) { this.credentials = Objects.requireNonNull(credential, "'credential' cannot be null."); return this; } /** * Sets the credential information for which Event Hub instance to connect to, and how to authorize against it. * * @param fullyQualifiedNamespace The fully qualified name for the Event Hubs namespace. This is likely to be * similar to <strong>{@literal "{your-namespace}.servicebus.windows.net}"</strong>. * @param eventHubName The name of the Event Hub to connect the client to. * @param credential The shared access name and key credential to use for authorization. * Access controls may be specified by the Event Hubs namespace or the requested Event Hub, * depending on Azure configuration. * * @return The updated {@link EventHubClientBuilder} object. * @throws IllegalArgumentException if {@code fullyQualifiedNamespace} or {@code eventHubName} is an empty * string. * @throws NullPointerException if {@code fullyQualifiedNamespace}, {@code eventHubName}, {@code credentials} is * null. */ public EventHubClientBuilder credential(String fullyQualifiedNamespace, String eventHubName, AzureNamedKeyCredential credential) { this.fullyQualifiedNamespace = Objects.requireNonNull(fullyQualifiedNamespace, "'fullyQualifiedNamespace' cannot be null."); this.eventHubName = Objects.requireNonNull(eventHubName, "'eventHubName' cannot be null."); if (CoreUtils.isNullOrEmpty(fullyQualifiedNamespace)) { throw LOGGER.logExceptionAsError(new IllegalArgumentException("'host' cannot be an empty string.")); } else if (CoreUtils.isNullOrEmpty(eventHubName)) { throw LOGGER.logExceptionAsError(new IllegalArgumentException("'eventHubName' cannot be an empty string.")); } Objects.requireNonNull(credential, "'credential' cannot be null."); this.credentials = new EventHubSharedKeyCredential(credential.getAzureNamedKey().getName(), credential.getAzureNamedKey().getKey(), ClientConstants.TOKEN_VALIDITY); return this; } /** * Sets the credential information for which Event Hub instance to connect to, and how to authorize against it. * * @param credential The shared access name and key credential to use for authorization. * Access controls may be specified by the Event Hubs namespace or the requested Event Hub, * depending on Azure configuration. * * @return The updated {@link EventHubClientBuilder} object. * @throws NullPointerException if {@code credentials} is null. */ @Override public EventHubClientBuilder credential(AzureNamedKeyCredential credential) { Objects.requireNonNull(credential, "'credential' cannot be null."); this.credentials = new EventHubSharedKeyCredential(credential.getAzureNamedKey().getName(), credential.getAzureNamedKey().getKey(), ClientConstants.TOKEN_VALIDITY); return this; } /** * Sets the credential information for which Event Hub instance to connect to, and how to authorize against it. * * @param fullyQualifiedNamespace The fully qualified name for the Event Hubs namespace. This is likely to be * similar to <strong>{@literal "{your-namespace}.servicebus.windows.net}"</strong>. * @param eventHubName The name of the Event Hub to connect the client to. * @param credential The shared access signature credential to use for authorization. * Access controls may be specified by the Event Hubs namespace or the requested Event Hub, * depending on Azure configuration. * * @return The updated {@link EventHubClientBuilder} object. * @throws IllegalArgumentException if {@code fullyQualifiedNamespace} or {@code eventHubName} is an empty * string. * @throws NullPointerException if {@code fullyQualifiedNamespace}, {@code eventHubName}, {@code credentials} is * null. */ public EventHubClientBuilder credential(String fullyQualifiedNamespace, String eventHubName, AzureSasCredential credential) { this.fullyQualifiedNamespace = Objects.requireNonNull(fullyQualifiedNamespace, "'fullyQualifiedNamespace' cannot be null."); this.eventHubName = Objects.requireNonNull(eventHubName, "'eventHubName' cannot be null."); if (CoreUtils.isNullOrEmpty(fullyQualifiedNamespace)) { throw LOGGER.logExceptionAsError(new IllegalArgumentException("'host' cannot be an empty string.")); } else if (CoreUtils.isNullOrEmpty(eventHubName)) { throw LOGGER.logExceptionAsError(new IllegalArgumentException("'eventHubName' cannot be an empty string.")); } Objects.requireNonNull(credential, "'credential' cannot be null."); this.credentials = new EventHubSharedKeyCredential(credential.getSignature()); return this; } /** * Sets the credential information for which Event Hub instance to connect to, and how to authorize against it. * * @param credential The shared access signature credential to use for authorization. * Access controls may be specified by the Event Hubs namespace or the requested Event Hub, * depending on Azure configuration. * * @return The updated {@link EventHubClientBuilder} object. * @throws NullPointerException if {@code credentials} is null. */ @Override public EventHubClientBuilder credential(AzureSasCredential credential) { Objects.requireNonNull(credential, "'credential' cannot be null."); this.credentials = new EventHubSharedKeyCredential(credential.getSignature()); return this; } /** * Sets the proxy configuration to use for {@link EventHubAsyncClient}. When a proxy is configured, {@link * AmqpTransportType * * @param proxyOptions The proxy configuration to use. * * @return The updated {@link EventHubClientBuilder} object. */ @Override public EventHubClientBuilder proxyOptions(ProxyOptions proxyOptions) { this.proxyOptions = proxyOptions; return this; } /** * Sets the transport type by which all the communication with Azure Event Hubs occurs. Default value is {@link * AmqpTransportType * * @param transport The transport type to use. * * @return The updated {@link EventHubClientBuilder} object. */ @Override public EventHubClientBuilder transportType(AmqpTransportType transport) { this.transport = transport; return this; } /** * Sets the retry policy for {@link EventHubAsyncClient}. If not specified, the default retry options are used. * * @param retryOptions The retry policy to use. * * @return The updated {@link EventHubClientBuilder} object. * @deprecated Replaced by {@link */ @Deprecated public EventHubClientBuilder retry(AmqpRetryOptions retryOptions) { this.retryOptions = retryOptions; return this; } /** * Sets the retry policy for {@link EventHubAsyncClient}. If not specified, the default retry options are used. * * @param retryOptions The retry policy to use. * * @return The updated {@link EventHubClientBuilder} object. */ @Override public EventHubClientBuilder retryOptions(AmqpRetryOptions retryOptions) { this.retryOptions = retryOptions; return this; } /** * Sets the name of the consumer group this consumer is associated with. Events are read in the context of this * group. The name of the consumer group that is created by default is {@link * "$Default"}. * * @param consumerGroup The name of the consumer group this consumer is associated with. Events are read in the * context of this group. The name of the consumer group that is created by default is {@link * * * @return The updated {@link EventHubClientBuilder} object. */ public EventHubClientBuilder consumerGroup(String consumerGroup) { this.consumerGroup = consumerGroup; return this; } /** * Sets the count used by the receiver to control the number of events the Event Hub consumer will actively receive * and queue locally without regard to whether a receive operation is currently active. * * @param prefetchCount The amount of events to queue locally. * * @return The updated {@link EventHubClientBuilder} object. * @throws IllegalArgumentException if {@code prefetchCount} is less than {@link * greater than {@link */ public EventHubClientBuilder prefetchCount(int prefetchCount) { if (prefetchCount < MINIMUM_PREFETCH_COUNT) { throw LOGGER.logExceptionAsError(new IllegalArgumentException(String.format(Locale.US, "PrefetchCount, '%s' has to be above %s", prefetchCount, MINIMUM_PREFETCH_COUNT))); } if (prefetchCount > MAXIMUM_PREFETCH_COUNT) { throw LOGGER.logExceptionAsError(new IllegalArgumentException(String.format(Locale.US, "PrefetchCount, '%s', has to be below %s", prefetchCount, MAXIMUM_PREFETCH_COUNT))); } this.prefetchCount = prefetchCount; return this; } /** * Package-private method that gets the prefetch count. * * @return Gets the prefetch count or {@code null} if it has not been set. * @see */ Integer getPrefetchCount() { return prefetchCount; } /** * Package-private method that sets the scheduler for the created Event Hub client. * * @param scheduler Scheduler to set. * * @return The updated {@link EventHubClientBuilder} object. */ EventHubClientBuilder scheduler(Scheduler scheduler) { this.scheduler = scheduler; return this; } /** * Package-private method that sets the verify mode for this connection. * * @param verifyMode The verification mode. * @return The updated {@link EventHubClientBuilder} object. */ EventHubClientBuilder verifyMode(SslDomain.VerifyMode verifyMode) { this.verifyMode = verifyMode; return this; } /** * Creates a new {@link EventHubConsumerAsyncClient} based on the options set on this builder. Every time {@code * buildAsyncConsumer()} is invoked, a new instance of {@link EventHubConsumerAsyncClient} is created. * * @return A new {@link EventHubConsumerAsyncClient} with the configured options. * @throws IllegalArgumentException If shared connection is not used and the credentials have not been set using * either {@link * {@link * {@link AmqpTransportType */ public EventHubConsumerAsyncClient buildAsyncConsumerClient() { if (CoreUtils.isNullOrEmpty(consumerGroup)) { throw LOGGER.logExceptionAsError(new IllegalArgumentException("'consumerGroup' cannot be null or an empty " + "string. using EventHubClientBuilder.consumerGroup(String)")); } return buildAsyncClient().createConsumer(consumerGroup, prefetchCount); } /** * Creates a new {@link EventHubConsumerClient} based on the options set on this builder. Every time {@code * buildConsumer()} is invoked, a new instance of {@link EventHubConsumerClient} is created. * * @return A new {@link EventHubConsumerClient} with the configured options. * @throws IllegalArgumentException If shared connection is not used and the credentials have not been set using * either {@link * {@link * {@link AmqpTransportType */ public EventHubConsumerClient buildConsumerClient() { return buildClient().createConsumer(consumerGroup, prefetchCount); } /** * Creates a new {@link EventHubProducerAsyncClient} based on options set on this builder. Every time {@code * buildAsyncProducer()} is invoked, a new instance of {@link EventHubProducerAsyncClient} is created. * * @return A new {@link EventHubProducerAsyncClient} instance with all the configured options. * @throws IllegalArgumentException If shared connection is not used and the credentials have not been set using * either {@link * proxy is specified but the transport type is not {@link AmqpTransportType */ public EventHubProducerAsyncClient buildAsyncProducerClient() { return buildAsyncClient().createProducer(); } /** * Creates a new {@link EventHubProducerClient} based on options set on this builder. Every time {@code * buildAsyncProducer()} is invoked, a new instance of {@link EventHubProducerClient} is created. * * @return A new {@link EventHubProducerClient} instance with all the configured options. * @throws IllegalArgumentException If shared connection is not used and the credentials have not been set using * either {@link * proxy is specified but the transport type is not {@link AmqpTransportType */ public EventHubProducerClient buildProducerClient() { return buildClient().createProducer(); } /** * Creates a new {@link EventHubAsyncClient} based on options set on this builder. Every time {@code * buildAsyncClient()} is invoked, a new instance of {@link EventHubAsyncClient} is created. * * <p> * The following options are used if ones are not specified in the builder: * * <ul> * <li>If no configuration is specified, the {@link Configuration * is used to provide any shared configuration values. The configuration values read are the {@link * Configuration * ProxyOptions * <li>If no retry is specified, the default retry options are used.</li> * <li>If no proxy is specified, the builder checks the {@link Configuration * configuration} for a configured proxy, then it checks to see if a system proxy is configured.</li> * <li>If no timeout is specified, a {@link ClientConstants * </ul> * * @return A new {@link EventHubAsyncClient} instance with all the configured options. * @throws IllegalArgumentException if the credentials have not been set using either {@link * * specified but the transport type is not {@link AmqpTransportType */ /** * Creates a new {@link EventHubClient} based on options set on this builder. Every time {@code buildClient()} is * invoked, a new instance of {@link EventHubClient} is created. * * <p> * The following options are used if ones are not specified in the builder: * * <ul> * <li>If no configuration is specified, the {@link Configuration * is used to provide any shared configuration values. The configuration values read are the {@link * Configuration * ProxyOptions * <li>If no retry is specified, the default retry options are used.</li> * <li>If no proxy is specified, the builder checks the {@link Configuration * configuration} for a configured proxy, then it checks to see if a system proxy is configured.</li> * <li>If no timeout is specified, a {@link ClientConstants * <li>If no scheduler is specified, an {@link Schedulers * </ul> * * @return A new {@link EventHubClient} instance with all the configured options. * @throws IllegalArgumentException if the credentials have not been set using either {@link * * specified but the transport type is not {@link AmqpTransportType */ EventHubClient buildClient() { if (prefetchCount == null) { prefetchCount = DEFAULT_PREFETCH_COUNT_FOR_SYNC_CLIENT; } final EventHubAsyncClient client = buildAsyncClient(); return new EventHubClient(client, retryOptions); } void onClientClose() { synchronized (connectionLock) { final int numberOfOpenClients = openClients.decrementAndGet(); LOGGER.info("Closing a dependent client. if (numberOfOpenClients > 0) { return; } if (numberOfOpenClients < 0) { LOGGER.warning("There should not be less than 0 clients. actual: {}", numberOfOpenClients); } LOGGER.info("No more open clients, closing shared connection."); if (eventHubConnectionProcessor != null) { eventHubConnectionProcessor.dispose(); eventHubConnectionProcessor = null; } else { LOGGER.warning("Shared EventHubConnectionProcessor was already disposed."); } } } private EventHubConnectionProcessor buildConnectionProcessor(MessageSerializer messageSerializer) { final ConnectionOptions connectionOptions = getConnectionOptions(); final Flux<EventHubAmqpConnection> connectionFlux = Flux.create(sink -> { sink.onRequest(request -> { if (request == 0) { return; } else if (request > 1) { sink.error(LOGGER.logExceptionAsWarning(new IllegalArgumentException( "Requested more than one connection. Only emitting one. Request: " + request))); return; } final String connectionId = StringUtil.getRandomString("MF"); LOGGER.atInfo() .addKeyValue(CONNECTION_ID_KEY, connectionId) .log("Emitting a single connection."); final TokenManagerProvider tokenManagerProvider = new AzureTokenManagerProvider( connectionOptions.getAuthorizationType(), connectionOptions.getFullyQualifiedNamespace(), connectionOptions.getAuthorizationScope()); final ReactorProvider provider = new ReactorProvider(); final ReactorHandlerProvider handlerProvider = new ReactorHandlerProvider(provider); final EventHubAmqpConnection connection = new EventHubReactorAmqpConnection(connectionId, connectionOptions, getEventHubName(), provider, handlerProvider, tokenManagerProvider, messageSerializer); sink.next(connection); }); }); return connectionFlux.subscribeWith(new EventHubConnectionProcessor( connectionOptions.getFullyQualifiedNamespace(), getEventHubName(), connectionOptions.getRetry())); } private ConnectionOptions getConnectionOptions() { Configuration buildConfiguration = configuration == null ? Configuration.getGlobalConfiguration().clone() : configuration; if (credentials == null) { final String connectionString = buildConfiguration.get(AZURE_EVENT_HUBS_CONNECTION_STRING); if (CoreUtils.isNullOrEmpty(connectionString)) { throw LOGGER.logExceptionAsError(new IllegalArgumentException("Credentials have not been set. " + "They can be set using: connectionString(String), connectionString(String, String), " + "credentials(String, String, TokenCredential), or setting the environment variable '" + AZURE_EVENT_HUBS_CONNECTION_STRING + "' with a connection string")); } connectionString(connectionString); } if (proxyOptions == null) { proxyOptions = getDefaultProxyConfiguration(buildConfiguration); } if (proxyOptions != null && proxyOptions.isProxyAddressConfigured() && transport != AmqpTransportType.AMQP_WEB_SOCKETS) { throw LOGGER.logExceptionAsError(new IllegalArgumentException( "Cannot use a proxy when TransportType is not AMQP Web Sockets.")); } final CbsAuthorizationType authorizationType = credentials instanceof EventHubSharedKeyCredential ? CbsAuthorizationType.SHARED_ACCESS_SIGNATURE : CbsAuthorizationType.JSON_WEB_TOKEN; final SslDomain.VerifyMode verificationMode = verifyMode != null ? verifyMode : SslDomain.VerifyMode.VERIFY_PEER_NAME; final ClientOptions options = clientOptions != null ? clientOptions : new ClientOptions(); final Map<String, String> properties = CoreUtils.getProperties(EVENTHUBS_PROPERTIES_FILE); final String product = properties.getOrDefault(NAME_KEY, UNKNOWN); final String clientVersion = properties.getOrDefault(VERSION_KEY, UNKNOWN); if (customEndpointAddress == null) { return new ConnectionOptions(getAndValidateFullyQualifiedNamespace(), credentials, authorizationType, ClientConstants.AZURE_ACTIVE_DIRECTORY_SCOPE, transport, retryOptions, proxyOptions, scheduler, options, verificationMode, product, clientVersion); } else { return new ConnectionOptions(getAndValidateFullyQualifiedNamespace(), credentials, authorizationType, ClientConstants.AZURE_ACTIVE_DIRECTORY_SCOPE, transport, retryOptions, proxyOptions, scheduler, options, verificationMode, product, clientVersion, customEndpointAddress.getHost(), customEndpointAddress.getPort()); } } private ProxyOptions getDefaultProxyConfiguration(Configuration configuration) { ProxyAuthenticationType authentication = ProxyAuthenticationType.NONE; if (proxyOptions != null) { authentication = proxyOptions.getAuthentication(); } String proxyAddress = configuration.get(Configuration.PROPERTY_HTTP_PROXY); if (CoreUtils.isNullOrEmpty(proxyAddress)) { return ProxyOptions.SYSTEM_DEFAULTS; } return getProxyOptions(authentication, proxyAddress, configuration, Boolean.parseBoolean(configuration.get("java.net.useSystemProxies"))); } private ProxyOptions getProxyOptions(ProxyAuthenticationType authentication, String proxyAddress, Configuration configuration, boolean useSystemProxies) { String host; int port; if (HOST_PORT_PATTERN.matcher(proxyAddress.trim()).find()) { final String[] hostPort = proxyAddress.split(":"); host = hostPort[0]; port = Integer.parseInt(hostPort[1]); final Proxy proxy = new Proxy(Proxy.Type.HTTP, new InetSocketAddress(host, port)); final String username = configuration.get(ProxyOptions.PROXY_USERNAME); final String password = configuration.get(ProxyOptions.PROXY_PASSWORD); return new ProxyOptions(authentication, proxy, username, password); } else if (useSystemProxies) { com.azure.core.http.ProxyOptions coreProxyOptions = com.azure.core.http.ProxyOptions .fromConfiguration(configuration); Proxy.Type proxyType = coreProxyOptions.getType().toProxyType(); InetSocketAddress coreProxyAddress = coreProxyOptions.getAddress(); String username = coreProxyOptions.getUsername(); String password = coreProxyOptions.getPassword(); return new ProxyOptions(authentication, new Proxy(proxyType, coreProxyAddress), username, password); } else { LOGGER.verbose("'HTTP_PROXY' was configured but ignored as 'java.net.useSystemProxies' wasn't " + "set or was false."); return ProxyOptions.SYSTEM_DEFAULTS; } } }
class EventHubClientBuilder implements TokenCredentialTrait<EventHubClientBuilder>, AzureNamedKeyCredentialTrait<EventHubClientBuilder>, ConnectionStringTrait<EventHubClientBuilder>, AzureSasCredentialTrait<EventHubClientBuilder>, AmqpTrait<EventHubClientBuilder>, ConfigurationTrait<EventHubClientBuilder> { static final int DEFAULT_PREFETCH_COUNT = 500; static final int DEFAULT_PREFETCH_COUNT_FOR_SYNC_CLIENT = 1; /** * The name of the default consumer group in the Event Hubs service. */ public static final String DEFAULT_CONSUMER_GROUP_NAME = "$Default"; /** * The minimum value allowed for the prefetch count of the consumer. */ private static final int MINIMUM_PREFETCH_COUNT = 1; /** * The maximum value allowed for the prefetch count of the consumer. */ private static final int MAXIMUM_PREFETCH_COUNT = 8000; private static final String EVENTHUBS_PROPERTIES_FILE = "azure-messaging-eventhubs.properties"; private static final String NAME_KEY = "name"; private static final String VERSION_KEY = "version"; private static final String UNKNOWN = "UNKNOWN"; private static final String AZURE_EVENT_HUBS_CONNECTION_STRING = "AZURE_EVENT_HUBS_CONNECTION_STRING"; private static final AmqpRetryOptions DEFAULT_RETRY = new AmqpRetryOptions() .setTryTimeout(ClientConstants.OPERATION_TIMEOUT); private static final Pattern HOST_PORT_PATTERN = Pattern.compile("^[^:]+:\\d+"); private static final ClientLogger LOGGER = new ClientLogger(EventHubClientBuilder.class); private final Object connectionLock = new Object(); private final AtomicBoolean isSharedConnection = new AtomicBoolean(); private TokenCredential credentials; private Configuration configuration; private ProxyOptions proxyOptions; private AmqpRetryOptions retryOptions; private Scheduler scheduler; private AmqpTransportType transport; private String fullyQualifiedNamespace; private String eventHubName; private String consumerGroup; private EventHubConnectionProcessor eventHubConnectionProcessor; private Integer prefetchCount; private ClientOptions clientOptions; private SslDomain.VerifyMode verifyMode; private URL customEndpointAddress; /** * Keeps track of the open clients that were created from this builder when there is a shared connection. */ private final AtomicInteger openClients = new AtomicInteger(); /** * Creates a new instance with the default transport {@link AmqpTransportType * non-shared connection means that a dedicated AMQP connection is created for every Event Hub consumer or producer * created using the builder. */ public EventHubClientBuilder() { transport = AmqpTransportType.AMQP; } /** * Sets the credential information given a connection string to the Event Hub instance. * * <p> * If the connection string is copied from the Event Hubs namespace, it will likely not contain the name to the * desired Event Hub, which is needed. In this case, the name can be added manually by adding {@literal * "EntityPath=EVENT_HUB_NAME"} to the end of the connection string. For example, "EntityPath=telemetry-hub". * </p> * * <p> * If you have defined a shared access policy directly on the Event Hub itself, then copying the connection string * from that Event Hub will result in a connection string that contains the name. * </p> * * @param connectionString The connection string to use for connecting to the Event Hub instance. It is expected * that the Event Hub name and the shared access key properties are contained in this connection string. * * @return The updated {@link EventHubClientBuilder} object. * @throws IllegalArgumentException if {@code connectionString} is null or empty. Or, the {@code * connectionString} does not contain the "EntityPath" key, which is the name of the Event Hub instance. * @throws AzureException If the shared access signature token credential could not be created using the * connection string. */ @Override public EventHubClientBuilder connectionString(String connectionString) { ConnectionStringProperties properties = new ConnectionStringProperties(connectionString); TokenCredential tokenCredential = getTokenCredential(properties); return credential(properties.getEndpoint().getHost(), properties.getEntityPath(), tokenCredential); } private TokenCredential getTokenCredential(ConnectionStringProperties properties) { TokenCredential tokenCredential; if (properties.getSharedAccessSignature() == null) { tokenCredential = new EventHubSharedKeyCredential(properties.getSharedAccessKeyName(), properties.getSharedAccessKey(), ClientConstants.TOKEN_VALIDITY); } else { tokenCredential = new EventHubSharedKeyCredential(properties.getSharedAccessSignature()); } return tokenCredential; } /** * Sets the client options. * * @param clientOptions The client options. * @return The updated {@link EventHubClientBuilder} object. */ @Override public EventHubClientBuilder clientOptions(ClientOptions clientOptions) { this.clientOptions = clientOptions; return this; } /** * Sets the credential information given a connection string to the Event Hubs namespace and name to a specific * Event Hub instance. * * @param connectionString The connection string to use for connecting to the Event Hubs namespace; it is * expected that the shared access key properties are contained in this connection string, but not the Event Hub * name. * @param eventHubName The name of the Event Hub to connect the client to. * * @return The updated {@link EventHubClientBuilder} object. * @throws NullPointerException if {@code connectionString} or {@code eventHubName} is null. * @throws IllegalArgumentException if {@code connectionString} or {@code eventHubName} is an empty string. Or, * if the {@code connectionString} contains the Event Hub name. * @throws AzureException If the shared access signature token credential could not be created using the * connection string. */ public EventHubClientBuilder connectionString(String connectionString, String eventHubName) { Objects.requireNonNull(connectionString, "'connectionString' cannot be null."); Objects.requireNonNull(eventHubName, "'eventHubName' cannot be null."); if (connectionString.isEmpty()) { throw LOGGER.logExceptionAsError(new IllegalArgumentException( "'connectionString' cannot be an empty string.")); } else if (eventHubName.isEmpty()) { throw LOGGER.logExceptionAsError(new IllegalArgumentException("'eventHubName' cannot be an empty string.")); } final ConnectionStringProperties properties = new ConnectionStringProperties(connectionString); TokenCredential tokenCredential = getTokenCredential(properties); if (!CoreUtils.isNullOrEmpty(properties.getEntityPath()) && !eventHubName.equals(properties.getEntityPath())) { throw LOGGER.logExceptionAsError(new IllegalArgumentException(String.format(Locale.US, "'connectionString' contains an Event Hub name [%s] and it does not match the given " + "'eventHubName' parameter [%s]. Please use the credentials(String connectionString) overload. " + "Or supply a 'connectionString' without 'EntityPath' in it.", properties.getEntityPath(), eventHubName))); } return credential(properties.getEndpoint().getHost(), eventHubName, tokenCredential); } /** * Sets the configuration store that is used during construction of the service client. * * If not specified, the default configuration store is used to configure the {@link EventHubAsyncClient}. Use * {@link Configuration * * @param configuration The configuration store used to configure the {@link EventHubAsyncClient}. * * @return The updated {@link EventHubClientBuilder} object. */ @Override public EventHubClientBuilder configuration(Configuration configuration) { this.configuration = configuration; return this; } /** * Sets a custom endpoint address when connecting to the Event Hubs service. This can be useful when your network * does not allow connecting to the standard Azure Event Hubs endpoint address, but does allow connecting through * an intermediary. For example: {@literal https: * <p> * If no port is specified, the default port for the {@link * used. * * @param customEndpointAddress The custom endpoint address. * @return The updated {@link EventHubClientBuilder} object. * @throws IllegalArgumentException if {@code customEndpointAddress} cannot be parsed into a valid {@link URL}. */ public EventHubClientBuilder customEndpointAddress(String customEndpointAddress) { if (customEndpointAddress == null) { this.customEndpointAddress = null; return this; } try { this.customEndpointAddress = new URL(customEndpointAddress); } catch (MalformedURLException e) { throw LOGGER.logExceptionAsError( new IllegalArgumentException(customEndpointAddress + " : is not a valid URL.", e)); } return this; } /** * Sets the fully qualified name for the Event Hubs namespace. * * @param fullyQualifiedNamespace The fully qualified name for the Event Hubs namespace. This is likely to be * similar to <strong>{@literal "{your-namespace}.servicebus.windows.net}"</strong>. * * @return The updated {@link EventHubClientBuilder} object. * @throws IllegalArgumentException if {@code fullyQualifiedNamespace} is an empty string. * @throws NullPointerException if {@code fullyQualifiedNamespace} is null. */ public EventHubClientBuilder fullyQualifiedNamespace(String fullyQualifiedNamespace) { this.fullyQualifiedNamespace = Objects.requireNonNull(fullyQualifiedNamespace, "'fullyQualifiedNamespace' cannot be null."); if (CoreUtils.isNullOrEmpty(fullyQualifiedNamespace)) { throw LOGGER.logExceptionAsError( new IllegalArgumentException("'fullyQualifiedNamespace' cannot be an empty string.")); } return this; } private String getAndValidateFullyQualifiedNamespace() { if (CoreUtils.isNullOrEmpty(fullyQualifiedNamespace)) { throw LOGGER.logExceptionAsError( new IllegalArgumentException("'fullyQualifiedNamespace' cannot be an empty string.")); } return fullyQualifiedNamespace; } /** * Sets the name of the Event Hub to connect the client to. * * @param eventHubName The name of the Event Hub to connect the client to. * @return The updated {@link EventHubClientBuilder} object. * @throws IllegalArgumentException if {@code eventHubName} is an empty string. * @throws NullPointerException if {@code eventHubName} is null. */ public EventHubClientBuilder eventHubName(String eventHubName) { this.eventHubName = Objects.requireNonNull(eventHubName, "'eventHubName' cannot be null."); if (CoreUtils.isNullOrEmpty(eventHubName)) { throw LOGGER.logExceptionAsError(new IllegalArgumentException("'eventHubName' cannot be an empty string.")); } return this; } private String getEventHubName() { if (CoreUtils.isNullOrEmpty(eventHubName)) { throw LOGGER.logExceptionAsError(new IllegalArgumentException("'eventHubName' cannot be an empty string.")); } return eventHubName; } /** * Toggles the builder to use the same connection for producers or consumers that are built from this instance. By * default, a new connection is constructed and used created for each Event Hub consumer or producer created. * * @return The updated {@link EventHubClientBuilder} object. */ public EventHubClientBuilder shareConnection() { this.isSharedConnection.set(true); return this; } /** * Sets the credential information for which Event Hub instance to connect to, and how to authorize against it. * * @param fullyQualifiedNamespace The fully qualified name for the Event Hubs namespace. This is likely to be * similar to <strong>{@literal "{your-namespace}.servicebus.windows.net}"</strong>. * @param eventHubName The name of the Event Hub to connect the client to. * @param credential The token credential to use for authorization. Access controls may be specified by the * Event Hubs namespace or the requested Event Hub, depending on Azure configuration. * * @return The updated {@link EventHubClientBuilder} object. * @throws IllegalArgumentException if {@code fullyQualifiedNamespace} or {@code eventHubName} is an empty * string. * @throws NullPointerException if {@code fullyQualifiedNamespace}, {@code eventHubName}, {@code credentials} is * null. */ public EventHubClientBuilder credential(String fullyQualifiedNamespace, String eventHubName, TokenCredential credential) { this.fullyQualifiedNamespace = Objects.requireNonNull(fullyQualifiedNamespace, "'fullyQualifiedNamespace' cannot be null."); this.credentials = Objects.requireNonNull(credential, "'credential' cannot be null."); this.eventHubName = Objects.requireNonNull(eventHubName, "'eventHubName' cannot be null."); if (CoreUtils.isNullOrEmpty(fullyQualifiedNamespace)) { throw LOGGER.logExceptionAsError(new IllegalArgumentException("'host' cannot be an empty string.")); } else if (CoreUtils.isNullOrEmpty(eventHubName)) { throw LOGGER.logExceptionAsError(new IllegalArgumentException("'eventHubName' cannot be an empty string.")); } return this; } /** * Sets the {@link TokenCredential} used to authorize requests sent to the service. Refer to the Azure SDK for Java * <a href="https: * documentation for more details on proper usage of the {@link TokenCredential} type. * * @param credential The token credential to use for authorization. Access controls may be specified by the * Event Hubs namespace or the requested Event Hub, depending on Azure configuration. * * @return The updated {@link EventHubClientBuilder} object. * @throws NullPointerException if {@code credentials} is null. */ @Override public EventHubClientBuilder credential(TokenCredential credential) { this.credentials = Objects.requireNonNull(credential, "'credential' cannot be null."); return this; } /** * Sets the credential information for which Event Hub instance to connect to, and how to authorize against it. * * @param fullyQualifiedNamespace The fully qualified name for the Event Hubs namespace. This is likely to be * similar to <strong>{@literal "{your-namespace}.servicebus.windows.net}"</strong>. * @param eventHubName The name of the Event Hub to connect the client to. * @param credential The shared access name and key credential to use for authorization. * Access controls may be specified by the Event Hubs namespace or the requested Event Hub, * depending on Azure configuration. * * @return The updated {@link EventHubClientBuilder} object. * @throws IllegalArgumentException if {@code fullyQualifiedNamespace} or {@code eventHubName} is an empty * string. * @throws NullPointerException if {@code fullyQualifiedNamespace}, {@code eventHubName}, {@code credentials} is * null. */ public EventHubClientBuilder credential(String fullyQualifiedNamespace, String eventHubName, AzureNamedKeyCredential credential) { this.fullyQualifiedNamespace = Objects.requireNonNull(fullyQualifiedNamespace, "'fullyQualifiedNamespace' cannot be null."); this.eventHubName = Objects.requireNonNull(eventHubName, "'eventHubName' cannot be null."); if (CoreUtils.isNullOrEmpty(fullyQualifiedNamespace)) { throw LOGGER.logExceptionAsError(new IllegalArgumentException("'host' cannot be an empty string.")); } else if (CoreUtils.isNullOrEmpty(eventHubName)) { throw LOGGER.logExceptionAsError(new IllegalArgumentException("'eventHubName' cannot be an empty string.")); } Objects.requireNonNull(credential, "'credential' cannot be null."); this.credentials = new EventHubSharedKeyCredential(credential.getAzureNamedKey().getName(), credential.getAzureNamedKey().getKey(), ClientConstants.TOKEN_VALIDITY); return this; } /** * Sets the credential information for which Event Hub instance to connect to, and how to authorize against it. * * @param credential The shared access name and key credential to use for authorization. * Access controls may be specified by the Event Hubs namespace or the requested Event Hub, * depending on Azure configuration. * * @return The updated {@link EventHubClientBuilder} object. * @throws NullPointerException if {@code credentials} is null. */ @Override public EventHubClientBuilder credential(AzureNamedKeyCredential credential) { Objects.requireNonNull(credential, "'credential' cannot be null."); this.credentials = new EventHubSharedKeyCredential(credential.getAzureNamedKey().getName(), credential.getAzureNamedKey().getKey(), ClientConstants.TOKEN_VALIDITY); return this; } /** * Sets the credential information for which Event Hub instance to connect to, and how to authorize against it. * * @param fullyQualifiedNamespace The fully qualified name for the Event Hubs namespace. This is likely to be * similar to <strong>{@literal "{your-namespace}.servicebus.windows.net}"</strong>. * @param eventHubName The name of the Event Hub to connect the client to. * @param credential The shared access signature credential to use for authorization. * Access controls may be specified by the Event Hubs namespace or the requested Event Hub, * depending on Azure configuration. * * @return The updated {@link EventHubClientBuilder} object. * @throws IllegalArgumentException if {@code fullyQualifiedNamespace} or {@code eventHubName} is an empty * string. * @throws NullPointerException if {@code fullyQualifiedNamespace}, {@code eventHubName}, {@code credentials} is * null. */ public EventHubClientBuilder credential(String fullyQualifiedNamespace, String eventHubName, AzureSasCredential credential) { this.fullyQualifiedNamespace = Objects.requireNonNull(fullyQualifiedNamespace, "'fullyQualifiedNamespace' cannot be null."); this.eventHubName = Objects.requireNonNull(eventHubName, "'eventHubName' cannot be null."); if (CoreUtils.isNullOrEmpty(fullyQualifiedNamespace)) { throw LOGGER.logExceptionAsError(new IllegalArgumentException("'host' cannot be an empty string.")); } else if (CoreUtils.isNullOrEmpty(eventHubName)) { throw LOGGER.logExceptionAsError(new IllegalArgumentException("'eventHubName' cannot be an empty string.")); } Objects.requireNonNull(credential, "'credential' cannot be null."); this.credentials = new EventHubSharedKeyCredential(credential.getSignature()); return this; } /** * Sets the credential information for which Event Hub instance to connect to, and how to authorize against it. * * @param credential The shared access signature credential to use for authorization. * Access controls may be specified by the Event Hubs namespace or the requested Event Hub, * depending on Azure configuration. * * @return The updated {@link EventHubClientBuilder} object. * @throws NullPointerException if {@code credentials} is null. */ @Override public EventHubClientBuilder credential(AzureSasCredential credential) { Objects.requireNonNull(credential, "'credential' cannot be null."); this.credentials = new EventHubSharedKeyCredential(credential.getSignature()); return this; } /** * Sets the proxy configuration to use for {@link EventHubAsyncClient}. When a proxy is configured, {@link * AmqpTransportType * * @param proxyOptions The proxy configuration to use. * * @return The updated {@link EventHubClientBuilder} object. */ @Override public EventHubClientBuilder proxyOptions(ProxyOptions proxyOptions) { this.proxyOptions = proxyOptions; return this; } /** * Sets the transport type by which all the communication with Azure Event Hubs occurs. Default value is {@link * AmqpTransportType * * @param transport The transport type to use. * * @return The updated {@link EventHubClientBuilder} object. */ @Override public EventHubClientBuilder transportType(AmqpTransportType transport) { this.transport = transport; return this; } /** * Sets the retry policy for {@link EventHubAsyncClient}. If not specified, the default retry options are used. * * @param retryOptions The retry policy to use. * * @return The updated {@link EventHubClientBuilder} object. * @deprecated Replaced by {@link */ @Deprecated public EventHubClientBuilder retry(AmqpRetryOptions retryOptions) { this.retryOptions = retryOptions; return this; } /** * Sets the retry policy for {@link EventHubAsyncClient}. If not specified, the default retry options are used. * * @param retryOptions The retry policy to use. * * @return The updated {@link EventHubClientBuilder} object. */ @Override public EventHubClientBuilder retryOptions(AmqpRetryOptions retryOptions) { this.retryOptions = retryOptions; return this; } /** * Sets the name of the consumer group this consumer is associated with. Events are read in the context of this * group. The name of the consumer group that is created by default is {@link * "$Default"}. * * @param consumerGroup The name of the consumer group this consumer is associated with. Events are read in the * context of this group. The name of the consumer group that is created by default is {@link * * * @return The updated {@link EventHubClientBuilder} object. */ public EventHubClientBuilder consumerGroup(String consumerGroup) { this.consumerGroup = consumerGroup; return this; } /** * Sets the count used by the receiver to control the number of events the Event Hub consumer will actively receive * and queue locally without regard to whether a receive operation is currently active. * * @param prefetchCount The amount of events to queue locally. * * @return The updated {@link EventHubClientBuilder} object. * @throws IllegalArgumentException if {@code prefetchCount} is less than {@link * greater than {@link */ public EventHubClientBuilder prefetchCount(int prefetchCount) { if (prefetchCount < MINIMUM_PREFETCH_COUNT) { throw LOGGER.logExceptionAsError(new IllegalArgumentException(String.format(Locale.US, "PrefetchCount, '%s' has to be above %s", prefetchCount, MINIMUM_PREFETCH_COUNT))); } if (prefetchCount > MAXIMUM_PREFETCH_COUNT) { throw LOGGER.logExceptionAsError(new IllegalArgumentException(String.format(Locale.US, "PrefetchCount, '%s', has to be below %s", prefetchCount, MAXIMUM_PREFETCH_COUNT))); } this.prefetchCount = prefetchCount; return this; } /** * Package-private method that gets the prefetch count. * * @return Gets the prefetch count or {@code null} if it has not been set. * @see */ Integer getPrefetchCount() { return prefetchCount; } /** * Package-private method that sets the scheduler for the created Event Hub client. * * @param scheduler Scheduler to set. * * @return The updated {@link EventHubClientBuilder} object. */ EventHubClientBuilder scheduler(Scheduler scheduler) { this.scheduler = scheduler; return this; } /** * Package-private method that sets the verify mode for this connection. * * @param verifyMode The verification mode. * @return The updated {@link EventHubClientBuilder} object. */ EventHubClientBuilder verifyMode(SslDomain.VerifyMode verifyMode) { this.verifyMode = verifyMode; return this; } /** * Creates a new {@link EventHubConsumerAsyncClient} based on the options set on this builder. Every time {@code * buildAsyncConsumer()} is invoked, a new instance of {@link EventHubConsumerAsyncClient} is created. * * @return A new {@link EventHubConsumerAsyncClient} with the configured options. * @throws IllegalArgumentException If shared connection is not used and the credentials have not been set using * either {@link * {@link * {@link AmqpTransportType */ public EventHubConsumerAsyncClient buildAsyncConsumerClient() { if (CoreUtils.isNullOrEmpty(consumerGroup)) { throw LOGGER.logExceptionAsError(new IllegalArgumentException("'consumerGroup' cannot be null or an empty " + "string. using EventHubClientBuilder.consumerGroup(String)")); } return buildAsyncClient().createConsumer(consumerGroup, prefetchCount); } /** * Creates a new {@link EventHubConsumerClient} based on the options set on this builder. Every time {@code * buildConsumer()} is invoked, a new instance of {@link EventHubConsumerClient} is created. * * @return A new {@link EventHubConsumerClient} with the configured options. * @throws IllegalArgumentException If shared connection is not used and the credentials have not been set using * either {@link * {@link * {@link AmqpTransportType */ public EventHubConsumerClient buildConsumerClient() { return buildClient().createConsumer(consumerGroup, prefetchCount); } /** * Creates a new {@link EventHubProducerAsyncClient} based on options set on this builder. Every time {@code * buildAsyncProducer()} is invoked, a new instance of {@link EventHubProducerAsyncClient} is created. * * @return A new {@link EventHubProducerAsyncClient} instance with all the configured options. * @throws IllegalArgumentException If shared connection is not used and the credentials have not been set using * either {@link * proxy is specified but the transport type is not {@link AmqpTransportType */ public EventHubProducerAsyncClient buildAsyncProducerClient() { return buildAsyncClient().createProducer(); } /** * Creates a new {@link EventHubProducerClient} based on options set on this builder. Every time {@code * buildAsyncProducer()} is invoked, a new instance of {@link EventHubProducerClient} is created. * * @return A new {@link EventHubProducerClient} instance with all the configured options. * @throws IllegalArgumentException If shared connection is not used and the credentials have not been set using * either {@link * proxy is specified but the transport type is not {@link AmqpTransportType */ public EventHubProducerClient buildProducerClient() { return buildClient().createProducer(); } /** * Creates a new {@link EventHubAsyncClient} based on options set on this builder. Every time {@code * buildAsyncClient()} is invoked, a new instance of {@link EventHubAsyncClient} is created. * * <p> * The following options are used if ones are not specified in the builder: * * <ul> * <li>If no configuration is specified, the {@link Configuration * is used to provide any shared configuration values. The configuration values read are the {@link * Configuration * ProxyOptions * <li>If no retry is specified, the default retry options are used.</li> * <li>If no proxy is specified, the builder checks the {@link Configuration * configuration} for a configured proxy, then it checks to see if a system proxy is configured.</li> * <li>If no timeout is specified, a {@link ClientConstants * </ul> * * @return A new {@link EventHubAsyncClient} instance with all the configured options. * @throws IllegalArgumentException if the credentials have not been set using either {@link * * specified but the transport type is not {@link AmqpTransportType */ /** * Creates a new {@link EventHubClient} based on options set on this builder. Every time {@code buildClient()} is * invoked, a new instance of {@link EventHubClient} is created. * * <p> * The following options are used if ones are not specified in the builder: * * <ul> * <li>If no configuration is specified, the {@link Configuration * is used to provide any shared configuration values. The configuration values read are the {@link * Configuration * ProxyOptions * <li>If no retry is specified, the default retry options are used.</li> * <li>If no proxy is specified, the builder checks the {@link Configuration * configuration} for a configured proxy, then it checks to see if a system proxy is configured.</li> * <li>If no timeout is specified, a {@link ClientConstants * <li>If no scheduler is specified, an {@link Schedulers * </ul> * * @return A new {@link EventHubClient} instance with all the configured options. * @throws IllegalArgumentException if the credentials have not been set using either {@link * * specified but the transport type is not {@link AmqpTransportType */ EventHubClient buildClient() { if (prefetchCount == null) { prefetchCount = DEFAULT_PREFETCH_COUNT_FOR_SYNC_CLIENT; } final EventHubAsyncClient client = buildAsyncClient(); return new EventHubClient(client, retryOptions); } void onClientClose() { synchronized (connectionLock) { final int numberOfOpenClients = openClients.decrementAndGet(); LOGGER.info("Closing a dependent client. if (numberOfOpenClients > 0) { return; } if (numberOfOpenClients < 0) { LOGGER.warning("There should not be less than 0 clients. actual: {}", numberOfOpenClients); } LOGGER.info("No more open clients, closing shared connection."); if (eventHubConnectionProcessor != null) { eventHubConnectionProcessor.dispose(); eventHubConnectionProcessor = null; } else { LOGGER.warning("Shared EventHubConnectionProcessor was already disposed."); } } } private EventHubConnectionProcessor buildConnectionProcessor(MessageSerializer messageSerializer) { final ConnectionOptions connectionOptions = getConnectionOptions(); final Flux<EventHubAmqpConnection> connectionFlux = Flux.create(sink -> { sink.onRequest(request -> { if (request == 0) { return; } else if (request > 1) { sink.error(LOGGER.logExceptionAsWarning(new IllegalArgumentException( "Requested more than one connection. Only emitting one. Request: " + request))); return; } final String connectionId = StringUtil.getRandomString("MF"); LOGGER.atInfo() .addKeyValue(CONNECTION_ID_KEY, connectionId) .log("Emitting a single connection."); final TokenManagerProvider tokenManagerProvider = new AzureTokenManagerProvider( connectionOptions.getAuthorizationType(), connectionOptions.getFullyQualifiedNamespace(), connectionOptions.getAuthorizationScope()); final ReactorProvider provider = new ReactorProvider(); final ReactorHandlerProvider handlerProvider = new ReactorHandlerProvider(provider); final EventHubAmqpConnection connection = new EventHubReactorAmqpConnection(connectionId, connectionOptions, getEventHubName(), provider, handlerProvider, tokenManagerProvider, messageSerializer); sink.next(connection); }); }); return connectionFlux.subscribeWith(new EventHubConnectionProcessor( connectionOptions.getFullyQualifiedNamespace(), getEventHubName(), connectionOptions.getRetry())); } private ConnectionOptions getConnectionOptions() { Configuration buildConfiguration = configuration == null ? Configuration.getGlobalConfiguration().clone() : configuration; if (credentials == null) { final String connectionString = buildConfiguration.get(AZURE_EVENT_HUBS_CONNECTION_STRING); if (CoreUtils.isNullOrEmpty(connectionString)) { throw LOGGER.logExceptionAsError(new IllegalArgumentException("Credentials have not been set. " + "They can be set using: connectionString(String), connectionString(String, String), " + "credentials(String, String, TokenCredential), or setting the environment variable '" + AZURE_EVENT_HUBS_CONNECTION_STRING + "' with a connection string")); } connectionString(connectionString); } if (proxyOptions == null) { proxyOptions = getDefaultProxyConfiguration(buildConfiguration); } if (proxyOptions != null && proxyOptions.isProxyAddressConfigured() && transport != AmqpTransportType.AMQP_WEB_SOCKETS) { throw LOGGER.logExceptionAsError(new IllegalArgumentException( "Cannot use a proxy when TransportType is not AMQP Web Sockets.")); } final CbsAuthorizationType authorizationType = credentials instanceof EventHubSharedKeyCredential ? CbsAuthorizationType.SHARED_ACCESS_SIGNATURE : CbsAuthorizationType.JSON_WEB_TOKEN; final SslDomain.VerifyMode verificationMode = verifyMode != null ? verifyMode : SslDomain.VerifyMode.VERIFY_PEER_NAME; final ClientOptions options = clientOptions != null ? clientOptions : new ClientOptions(); final Map<String, String> properties = CoreUtils.getProperties(EVENTHUBS_PROPERTIES_FILE); final String product = properties.getOrDefault(NAME_KEY, UNKNOWN); final String clientVersion = properties.getOrDefault(VERSION_KEY, UNKNOWN); if (customEndpointAddress == null) { return new ConnectionOptions(getAndValidateFullyQualifiedNamespace(), credentials, authorizationType, ClientConstants.AZURE_ACTIVE_DIRECTORY_SCOPE, transport, retryOptions, proxyOptions, scheduler, options, verificationMode, product, clientVersion); } else { return new ConnectionOptions(getAndValidateFullyQualifiedNamespace(), credentials, authorizationType, ClientConstants.AZURE_ACTIVE_DIRECTORY_SCOPE, transport, retryOptions, proxyOptions, scheduler, options, verificationMode, product, clientVersion, customEndpointAddress.getHost(), customEndpointAddress.getPort()); } } private ProxyOptions getDefaultProxyConfiguration(Configuration configuration) { ProxyAuthenticationType authentication = ProxyAuthenticationType.NONE; if (proxyOptions != null) { authentication = proxyOptions.getAuthentication(); } String proxyAddress = configuration.get(Configuration.PROPERTY_HTTP_PROXY); if (CoreUtils.isNullOrEmpty(proxyAddress)) { return ProxyOptions.SYSTEM_DEFAULTS; } return getProxyOptions(authentication, proxyAddress, configuration, Boolean.parseBoolean(configuration.get("java.net.useSystemProxies"))); } private ProxyOptions getProxyOptions(ProxyAuthenticationType authentication, String proxyAddress, Configuration configuration, boolean useSystemProxies) { String host; int port; if (HOST_PORT_PATTERN.matcher(proxyAddress.trim()).find()) { final String[] hostPort = proxyAddress.split(":"); host = hostPort[0]; port = Integer.parseInt(hostPort[1]); final Proxy proxy = new Proxy(Proxy.Type.HTTP, new InetSocketAddress(host, port)); final String username = configuration.get(ProxyOptions.PROXY_USERNAME); final String password = configuration.get(ProxyOptions.PROXY_PASSWORD); return new ProxyOptions(authentication, proxy, username, password); } else if (useSystemProxies) { com.azure.core.http.ProxyOptions coreProxyOptions = com.azure.core.http.ProxyOptions .fromConfiguration(configuration); Proxy.Type proxyType = coreProxyOptions.getType().toProxyType(); InetSocketAddress coreProxyAddress = coreProxyOptions.getAddress(); String username = coreProxyOptions.getUsername(); String password = coreProxyOptions.getPassword(); return new ProxyOptions(authentication, new Proxy(proxyType, coreProxyAddress), username, password); } else { LOGGER.verbose("'HTTP_PROXY' was configured but ignored as 'java.net.useSystemProxies' wasn't " + "set or was false."); return ProxyOptions.SYSTEM_DEFAULTS; } } }
the `binaryData.getLength()` may return `null` which means it didn't measure length eagerly. If this happens we should get the `BinaryDataContent` from inside and decorate it depending on type. I.e. if it's `FluxByteBufferContent` then fall back to the old `validateLength` or maybe just throw exception and don't accept Flux at all (I think this might be better). If it's `InputStreamContent` then decorate stream with `LengthValidatingInputStream`.
static BinaryData validateLengthSync(final HttpRequest request) { final BinaryData binaryData = request.getContent(); if (binaryData == null) { return binaryData; } final long expectedLength = Long.parseLong(request.getHeaders().getValue("Content-Length")); long length = binaryData.getLength(); if (length > expectedLength) { throw new UnexpectedLengthException(String.format(BODY_TOO_LARGE, binaryData.getLength(), expectedLength), binaryData.getLength(), expectedLength); } return binaryData; }
long length = binaryData.getLength();
static BinaryData validateLengthSync(final HttpRequest request) { final BinaryData binaryData = request.getBodyAsBinaryData(); if (binaryData == null) { return binaryData; } final long expectedLength = Long.parseLong(request.getHeaders().getValue("Content-Length")); Long length = binaryData.getLength(); BinaryDataContent bdc = BinaryDataHelper.getContent(binaryData); if (length == null) { if (bdc instanceof FluxByteBufferContent) { throw new IllegalStateException("Flux Byte Buffer is not supported in Synchronous Rest Proxy."); } else if (bdc instanceof InputStreamContent) { InputStreamContent inputStreamContent = ((InputStreamContent) bdc); InputStream inputStream = inputStreamContent.toStream(); LengthValidatingInputStream lengthValidatingInputStream = new LengthValidatingInputStream(inputStream, expectedLength); return BinaryData.fromStream(lengthValidatingInputStream); } else { byte[] b = (bdc).toBytes(); long len = b.length; if (len > expectedLength) { throw new UnexpectedLengthException(String.format(BODY_TOO_LARGE, len, expectedLength), len, expectedLength); } return BinaryData.fromBytes(b); } } else { if (length > expectedLength) { throw new UnexpectedLengthException(String.format(BODY_TOO_LARGE, length, expectedLength), length, expectedLength); } return binaryData; } }
class is if (method.isAnnotationPresent(com.azure.core.annotation.ResumeOperation.class)) { throw LOGGER.logExceptionAsError(Exceptions.propagate(new Exception("'ResumeOperation' isn't supported."))); }
class is if (method.isAnnotationPresent(com.azure.core.annotation.ResumeOperation.class)) { throw LOGGER.logExceptionAsError(new IllegalStateException("'ResumeOperation' isn't supported.")); }
we should probably add generic `catch (Exception)` and `finally` to `try` block and make sure to close the tracing span.
public Object invoke(Object proxy, final Method method, Object[] args) { validateResumeOperationIsNotPresent(method); try { final SwaggerMethodParser methodParser = getMethodParser(method); final HttpRequest request = createHttpRequest(methodParser, args); Context context = methodParser.setContext(args); RequestOptions options = methodParser.setRequestOptions(args); context = mergeRequestOptionsContext(context, options); context = context.addData("caller-method", methodParser.getFullyQualifiedMethodName()) .addData("azure-eagerly-read-response", shouldEagerlyReadResponse(methodParser.getReturnType())); context = startTracingSpan(method, context); if (options != null) { options.getRequestCallback().accept(request); } if (request.getBody() != null) { request.setBody(validateLength(request)); } final HttpResponse response = send(request, context); HttpDecodedResponse decodedResponse = this.decoder.decodeSync(response, methodParser); return handleRestReturnType(decodedResponse, methodParser, methodParser.getReturnType(), context, options); } catch (IOException e) { throw LOGGER.logExceptionAsError(Exceptions.propagate(e)); } }
context = startTracingSpan(method, context);
public Object invoke(Object proxy, final Method method, Object[] args) { validateResumeOperationIsNotPresent(method); final SwaggerMethodParser methodParser = getMethodParser(method); HttpRequest request; try { request = createHttpRequest(methodParser, args); } catch (IOException e) { throw LOGGER.logExceptionAsError(new UncheckedIOException(e)); } Context context = methodParser.setContext(args); RequestOptions options = methodParser.setRequestOptions(args); context = mergeRequestOptionsContext(context, options); context = context.addData("caller-method", methodParser.getFullyQualifiedMethodName()) .addData("azure-eagerly-read-response", shouldEagerlyReadResponse(methodParser.getReturnType())); HttpDecodedResponse decodedResponse = null; Throwable throwable = null; try { context = startTracingSpan(method, context); if (options != null) { options.getRequestCallback().accept(request); } if (request.getBody() != null) { request.setBody(validateLengthSync(request)); } final HttpResponse response = send(request, context); decodedResponse = this.decoder.decodeSync(response, methodParser); return handleRestReturnType(decodedResponse, methodParser, methodParser.getReturnType(), context, options); } catch (Exception e) { throwable = e; if (e instanceof RuntimeException) { throw LOGGER.logExceptionAsError((RuntimeException) e); } else { throw LOGGER.logExceptionAsError(new RuntimeException(e)); } } finally { if (decodedResponse != null || throwable != null) { endTracingSpan(decodedResponse, throwable, context); } } }
class SyncRestProxy implements InvocationHandler { private static final ByteBuffer VALIDATION_BUFFER = ByteBuffer.allocate(0); private static final String BODY_TOO_LARGE = "Request body emitted %d bytes, more than the expected %d bytes."; private static final String BODY_TOO_SMALL = "Request body emitted %d bytes, less than the expected %d bytes."; private static final String MUST_IMPLEMENT_PAGE_ERROR = "Unable to create PagedResponse<T>. Body must be of a type that implements: " + Page.class; private static final ResponseConstructorsCache RESPONSE_CONSTRUCTORS_CACHE = new ResponseConstructorsCache(); private static final ClientLogger LOGGER = new ClientLogger(SyncRestProxy.class); private final HttpPipeline httpPipeline; private final SerializerAdapter serializer; private final SwaggerInterfaceParser interfaceParser; private final HttpResponseDecoder decoder; /** * Create a RestProxy. * * @param httpPipeline the HttpPipelinePolicy and HttpClient httpPipeline that will be used to send HTTP requests. * @param serializer the serializer that will be used to convert response bodies to POJOs. * @param interfaceParser the parser that contains information about the interface describing REST API methods that * this RestProxy "implements". */ private SyncRestProxy(HttpPipeline httpPipeline, SerializerAdapter serializer, SwaggerInterfaceParser interfaceParser) { this.httpPipeline = httpPipeline; this.serializer = serializer; this.interfaceParser = interfaceParser; this.decoder = new HttpResponseDecoder(this.serializer); } /** * Get the SwaggerMethodParser for the provided method. The Method must exist on the Swagger interface that this * RestProxy was created to "implement". * * @param method the method to get a SwaggerMethodParser for * @return the SwaggerMethodParser for the provided method */ private SwaggerMethodParser getMethodParser(Method method) { return interfaceParser.getMethodParser(method); } /** * Send the provided request asynchronously, applying any request policies provided to the HttpClient instance. * * @param request the HTTP request to send * @param contextData the context * @return a {@link Mono} that emits HttpResponse asynchronously */ public HttpResponse send(HttpRequest request, Context contextData) { return httpPipeline.sendSynchronously(request, contextData); } @Override @SuppressWarnings("deprecation") void validateResumeOperationIsNotPresent(Method method) { if (method.isAnnotationPresent(com.azure.core.annotation.ResumeOperation.class)) { throw LOGGER.logExceptionAsError(Exceptions.propagate(new Exception("'ResumeOperation' isn't supported."))); } } static Context mergeRequestOptionsContext(Context context, RequestOptions options) { if (options == null) { return context; } Context optionsContext = options.getContext(); if (optionsContext != null && optionsContext != Context.NONE) { context = CoreUtils.mergeContexts(context, optionsContext); } return context; } static Flux<ByteBuffer> validateLength(final HttpRequest request) { final Flux<ByteBuffer> bbFlux = request.getBody(); if (bbFlux == null) { return Flux.empty(); } final long expectedLength = Long.parseLong(request.getHeaders().getValue("Content-Length")); return Flux.defer(() -> { final long[] currentTotalLength = new long[1]; return Flux.concat(bbFlux, Flux.just(VALIDATION_BUFFER)).handle((buffer, sink) -> { if (buffer == null) { return; } if (buffer == VALIDATION_BUFFER) { if (expectedLength != currentTotalLength[0]) { sink.error(new UnexpectedLengthException(String.format(BODY_TOO_SMALL, currentTotalLength[0], expectedLength), currentTotalLength[0], expectedLength)); } else { sink.complete(); } return; } currentTotalLength[0] += buffer.remaining(); if (currentTotalLength[0] > expectedLength) { sink.error(new UnexpectedLengthException(String.format(BODY_TOO_LARGE, currentTotalLength[0], expectedLength), currentTotalLength[0], expectedLength)); return; } sink.next(buffer); }); }); } static BinaryData validateLengthSync(final HttpRequest request) { final BinaryData binaryData = request.getContent(); if (binaryData == null) { return binaryData; } final long expectedLength = Long.parseLong(request.getHeaders().getValue("Content-Length")); long length = binaryData.getLength(); if (length > expectedLength) { throw new UnexpectedLengthException(String.format(BODY_TOO_LARGE, binaryData.getLength(), expectedLength), binaryData.getLength(), expectedLength); } return binaryData; } /** * Starts the tracing span for the current service call, additionally set metadata attributes on the span by passing * additional context information. * * @param method Service method being called. * @param context Context information about the current service call. * @return The updated context containing the span context. */ private Context startTracingSpan(Method method, Context context) { if (!TracerProxy.isTracingEnabled()) { return context; } if ((boolean) context.getData(Tracer.DISABLE_TRACING_KEY).orElse(false)) { return context; } String spanName = interfaceParser.getServiceName() + "." + method.getName(); context = TracerProxy.setSpanName(spanName, context); return TracerProxy.start(spanName, context); } /** * Create a HttpRequest for the provided Swagger method using the provided arguments. * * @param methodParser the Swagger method parser to use * @param args the arguments to use to populate the method's annotation values * @return a HttpRequest * @throws IOException thrown if the body contents cannot be serialized */ private HttpRequest createHttpRequest(SwaggerMethodParser methodParser, Object[] args) throws IOException { final String path = methodParser.setPath(args); final UrlBuilder pathUrlBuilder = UrlBuilder.parse(path); final UrlBuilder urlBuilder; if (pathUrlBuilder.getScheme() != null) { urlBuilder = pathUrlBuilder; } else { urlBuilder = new UrlBuilder(); methodParser.setSchemeAndHost(args, urlBuilder); if (path != null && !path.isEmpty() && !"/".equals(path)) { String hostPath = urlBuilder.getPath(); if (hostPath == null || hostPath.isEmpty() || "/".equals(hostPath) || path.contains(": urlBuilder.setPath(path); } else { if (path.startsWith("/")) { urlBuilder.setPath(hostPath + path); } else { urlBuilder.setPath(hostPath + "/" + path); } } } } methodParser.setEncodedQueryParameters(args, urlBuilder); final URL url = urlBuilder.toUrl(); final HttpRequest request = configRequest(new HttpRequest(methodParser.getHttpMethod(), url), methodParser, args); HttpHeaders httpHeaders = request.getHeaders(); methodParser.setHeaders(args, httpHeaders); return request; } @SuppressWarnings("unchecked") private HttpRequest configRequest(final HttpRequest request, final SwaggerMethodParser methodParser, final Object[] args) throws IOException { final Object bodyContentObject = methodParser.setBody(args); if (bodyContentObject == null) { request.getHeaders().set("Content-Length", "0"); } else { String contentType = methodParser.getBodyContentType(); if (contentType == null || contentType.isEmpty()) { if (bodyContentObject instanceof byte[] || bodyContentObject instanceof String) { contentType = ContentType.APPLICATION_OCTET_STREAM; } else { contentType = ContentType.APPLICATION_JSON; } } request.getHeaders().set("Content-Type", contentType); if (bodyContentObject instanceof BinaryData) { BinaryData binaryData = (BinaryData) bodyContentObject; if (binaryData.getLength() != null) { request.setHeader("Content-Length", binaryData.getLength().toString()); } request.setContent(binaryData); return request; } boolean isJson = false; final String[] contentTypeParts = contentType.split(";"); for (final String contentTypePart : contentTypeParts) { if (contentTypePart.trim().equalsIgnoreCase(ContentType.APPLICATION_JSON)) { isJson = true; break; } } if (isJson) { ByteArrayOutputStream stream = new AccessibleByteArrayOutputStream(); serializer.serialize(bodyContentObject, SerializerEncoding.JSON, stream); request.setHeader("Content-Length", String.valueOf(stream.size())); request.setContent(BinaryData.fromStream(new ByteArrayInputStream(stream.toByteArray(), 0, stream.size()))); } else if (bodyContentObject instanceof byte[]) { request.setBody((byte[]) bodyContentObject); } else if (bodyContentObject instanceof String) { final String bodyContentString = (String) bodyContentObject; if (!bodyContentString.isEmpty()) { request.setBody(bodyContentString); } } else if (bodyContentObject instanceof ByteBuffer) { request.setBody(((ByteBuffer) bodyContentObject).array()); } else { ByteArrayOutputStream stream = new AccessibleByteArrayOutputStream(); serializer.serialize(bodyContentObject, SerializerEncoding.fromHeaders(request.getHeaders()), stream); request.setHeader("Content-Length", String.valueOf(stream.size())); request.setBody(stream.toByteArray()); } } return request; } private static Exception instantiateUnexpectedException(final UnexpectedExceptionInformation exception, final HttpResponse httpResponse, final byte[] responseContent, final Object responseDecodedContent) { final int responseStatusCode = httpResponse.getStatusCode(); final String contentType = httpResponse.getHeaderValue("Content-Type"); final String bodyRepresentation; if ("application/octet-stream".equalsIgnoreCase(contentType)) { bodyRepresentation = "(" + httpResponse.getHeaderValue("Content-Length") + "-byte body)"; } else { bodyRepresentation = responseContent == null || responseContent.length == 0 ? "(empty body)" : "\"" + new String(responseContent, StandardCharsets.UTF_8) + "\""; } Exception result; try { final Constructor<? extends HttpResponseException> exceptionConstructor = exception.getExceptionType() .getConstructor(String.class, HttpResponse.class, exception.getExceptionBodyType()); result = exceptionConstructor.newInstance("Status code " + responseStatusCode + ", " + bodyRepresentation, httpResponse, responseDecodedContent); } catch (ReflectiveOperationException e) { String message = "Status code " + responseStatusCode + ", but an instance of " + exception.getExceptionType().getCanonicalName() + " cannot be created." + " Response body: " + bodyRepresentation; result = new IOException(message, e); } return result; } /** * Create a publisher that (1) emits error if the provided response {@code decodedResponse} has 'disallowed status * code' OR (2) emits provided response if it's status code ia allowed. * * 'disallowed status code' is one of the status code defined in the provided SwaggerMethodParser or is in the int[] * of additional allowed status codes. * * @param decodedResponse The HttpResponse to check. * @param methodParser The method parser that contains information about the service interface method that initiated * the HTTP request. * @return An async-version of the provided decodedResponse. */ private HttpDecodedResponse ensureExpectedStatus(final HttpDecodedResponse decodedResponse, final SwaggerMethodParser methodParser, RequestOptions options) { final int responseStatusCode = decodedResponse.getSourceResponse().getStatusCode(); if (methodParser.isExpectedResponseStatusCode(responseStatusCode) || (options != null && options.getErrorOptions().contains(ErrorOptions.NO_THROW))) { return decodedResponse; } byte[] responseBytes = decodedResponse.getSourceResponse().getBodyAsByteArray().block(); if (responseBytes == null || responseBytes.length == 0) { throw new RuntimeException(instantiateUnexpectedException( methodParser.getUnexpectedException(responseStatusCode), decodedResponse.getSourceResponse(), null, null)); } else { Object decodedBody = decodedResponse.getDecodedBodySync(responseBytes); throw new RuntimeException(instantiateUnexpectedException( methodParser.getUnexpectedException(responseStatusCode), decodedResponse.getSourceResponse(), responseBytes, decodedBody)); } } private Object handleRestResponseReturnType(final HttpDecodedResponse response, final SwaggerMethodParser methodParser, final Type entityType) { if (TypeUtil.isTypeOrSubTypeOf(entityType, Response.class)) { final Type bodyType = TypeUtil.getRestResponseBodyType(entityType); if (TypeUtil.isTypeOrSubTypeOf(bodyType, Void.class)) { response.getSourceResponse().getBody().ignoreElements().block(); return createResponseSync(response, entityType, null); } else { Object bodyAsObject = handleBodyReturnTypeSync(response, methodParser, bodyType); Response<?> httpResponse = createResponseSync(response, entityType, bodyAsObject); if (httpResponse == null) { return createResponseSync(response, entityType, null); } return httpResponse; } } else { return handleBodyReturnTypeSync(response, methodParser, entityType); } } @SuppressWarnings("unchecked") private Response<?> createResponseSync(HttpDecodedResponse response, Type entityType, Object bodyAsObject) { final Class<? extends Response<?>> cls = (Class<? extends Response<?>>) TypeUtil.getRawClass(entityType); final HttpResponse httpResponse = response.getSourceResponse(); final HttpRequest request = httpResponse.getRequest(); final int statusCode = httpResponse.getStatusCode(); final HttpHeaders headers = httpResponse.getHeaders(); final Object decodedHeaders = response.getDecodedHeaders(); if (cls.equals(Response.class)) { return cls.cast(new ResponseBase<>(request, statusCode, headers, bodyAsObject, decodedHeaders)); } else if (cls.equals(PagedResponse.class)) { if (bodyAsObject != null && !TypeUtil.isTypeOrSubTypeOf(bodyAsObject.getClass(), Page.class)) { throw LOGGER.logExceptionAsError(new RuntimeException(MUST_IMPLEMENT_PAGE_ERROR)); } else if (bodyAsObject == null) { return (cls.cast(new PagedResponseBase<>(request, statusCode, headers, null, null, decodedHeaders))); } else { return (cls.cast(new PagedResponseBase<>(request, statusCode, headers, (Page<?>) bodyAsObject, decodedHeaders))); } } MethodHandle ctr = RESPONSE_CONSTRUCTORS_CACHE.get(cls); if (ctr == null) { throw new RuntimeException("Cannot find suitable constructor for class " + cls); } return RESPONSE_CONSTRUCTORS_CACHE.invokeSync(ctr, response, bodyAsObject); } private Object handleBodyReturnTypeSync(final HttpDecodedResponse response, final SwaggerMethodParser methodParser, final Type entityType) { final int responseStatusCode = response.getSourceResponse().getStatusCode(); final HttpMethod httpMethod = methodParser.getHttpMethod(); final Type returnValueWireType = methodParser.getReturnValueWireType(); final Object result; if (httpMethod == HttpMethod.HEAD && (TypeUtil.isTypeOrSubTypeOf( entityType, Boolean.TYPE) || TypeUtil.isTypeOrSubTypeOf(entityType, Boolean.class))) { boolean isSuccess = (responseStatusCode / 100) == 2; result = isSuccess; } else if (TypeUtil.isTypeOrSubTypeOf(entityType, byte[].class)) { byte[] responseBodyBytes = response.getSourceResponse().getContent().toBytes(); if (returnValueWireType == Base64Url.class) { responseBodyBytes = new Base64Url(responseBodyBytes).decodedBytes(); } result = responseBodyBytes; } else if (TypeUtil.isTypeOrSubTypeOf(entityType, BinaryData.class)) { result = response.getSourceResponse().getContent(); } else { result = response.getDecodedBody((byte[]) null); } return result; } /** * Handle the provided asynchronous HTTP response and return the deserialized value. * * @param httpDecodedResponse the asynchronous HTTP response to the original HTTP request * @param methodParser the SwaggerMethodParser that the request originates from * @param returnType the type of value that will be returned * @param context Additional context that is passed through the Http pipeline during the service call. * @return the deserialized result */ private Object handleRestReturnType(final HttpDecodedResponse httpDecodedResponse, final SwaggerMethodParser methodParser, final Type returnType, final Context context, final RequestOptions options) { final HttpDecodedResponse expectedResponse = ensureExpectedStatus(httpDecodedResponse, methodParser, options); final Object result; if (TypeUtil.isTypeOrSubTypeOf(returnType, void.class) || TypeUtil.isTypeOrSubTypeOf(returnType, Void.class)) { result = expectedResponse; } else { result = handleRestResponseReturnType(httpDecodedResponse, methodParser, returnType); } return result; } private static void endTracingSpan(Signal<HttpDecodedResponse> signal) { if (!TracerProxy.isTracingEnabled()) { return; } if (signal.isOnComplete() || signal.isOnSubscribe()) { return; } ContextView context = signal.getContextView(); Optional<Context> tracingContext = context.getOrEmpty("TRACING_CONTEXT"); boolean disableTracing = Boolean.TRUE.equals(context.getOrDefault(Tracer.DISABLE_TRACING_KEY, false)); if (!tracingContext.isPresent() || disableTracing) { return; } int statusCode = 0; HttpDecodedResponse httpDecodedResponse; Throwable throwable = null; if (signal.hasValue()) { httpDecodedResponse = signal.get(); statusCode = httpDecodedResponse.getSourceResponse().getStatusCode(); } else if (signal.hasError()) { throwable = signal.getThrowable(); if (throwable instanceof HttpResponseException) { HttpResponseException exception = (HttpResponseException) throwable; statusCode = exception.getResponse().getStatusCode(); } } TracerProxy.end(statusCode, throwable, tracingContext.get()); } /** * Create an instance of the default serializer. * * @return the default serializer */ private static SerializerAdapter createDefaultSerializer() { return JacksonAdapter.createDefaultSerializerAdapter(); } /** * Create the default HttpPipeline. * * @return the default HttpPipeline */ private static HttpPipeline createDefaultPipeline() { List<HttpPipelinePolicy> policies = new ArrayList<>(); policies.add(new UserAgentPolicy()); policies.add(new RetryPolicy()); policies.add(new CookiePolicy()); return new HttpPipelineBuilder() .policies(policies.toArray(new HttpPipelinePolicy[0])) .build(); } /** * Create a proxy implementation of the provided Swagger interface. * * @param swaggerInterface the Swagger interface to provide a proxy implementation for * @param <A> the type of the Swagger interface * @return a proxy implementation of the provided Swagger interface */ public static <A> A create(Class<A> swaggerInterface) { return create(swaggerInterface, createDefaultPipeline(), createDefaultSerializer()); } /** * Create a proxy implementation of the provided Swagger interface. * * @param swaggerInterface the Swagger interface to provide a proxy implementation for * @param httpPipeline the HttpPipelinePolicy and HttpClient pipeline that will be used to send Http requests * @param <A> the type of the Swagger interface * @return a proxy implementation of the provided Swagger interface */ public static <A> A create(Class<A> swaggerInterface, HttpPipeline httpPipeline) { return create(swaggerInterface, httpPipeline, createDefaultSerializer()); } /** * Create a proxy implementation of the provided Swagger interface. * * @param swaggerInterface the Swagger interface to provide a proxy implementation for * @param httpPipeline the HttpPipelinePolicy and HttpClient pipline that will be used to send Http requests * @param serializer the serializer that will be used to convert POJOs to and from request and response bodies * @param <A> the type of the Swagger interface. * @return a proxy implementation of the provided Swagger interface */ @SuppressWarnings("unchecked") public static <A> A create(Class<A> swaggerInterface, HttpPipeline httpPipeline, SerializerAdapter serializer) { final SwaggerInterfaceParser interfaceParser = new SwaggerInterfaceParser(swaggerInterface, serializer); final SyncRestProxy restProxy = new SyncRestProxy(httpPipeline, serializer, interfaceParser); return (A) Proxy.newProxyInstance(swaggerInterface.getClassLoader(), new Class<?>[]{swaggerInterface}, restProxy); } }
class SyncRestProxy implements InvocationHandler { private static final ByteBuffer VALIDATION_BUFFER = ByteBuffer.allocate(0); private static final String BODY_TOO_LARGE = "Request body emitted %d bytes, more than the expected %d bytes."; private static final String BODY_TOO_SMALL = "Request body emitted %d bytes, less than the expected %d bytes."; private static final String MUST_IMPLEMENT_PAGE_ERROR = "Unable to create PagedResponse<T>. Body must be of a type that implements: " + Page.class; private static final ResponseConstructorsCache RESPONSE_CONSTRUCTORS_CACHE = new ResponseConstructorsCache(); private static final ClientLogger LOGGER = new ClientLogger(SyncRestProxy.class); private final HttpPipeline httpPipeline; private final SerializerAdapter serializer; private final SwaggerInterfaceParser interfaceParser; private final HttpResponseDecoder decoder; /** * Create a RestProxy. * * @param httpPipeline the HttpPipelinePolicy and HttpClient httpPipeline that will be used to send HTTP requests. * @param serializer the serializer that will be used to convert response bodies to POJOs. * @param interfaceParser the parser that contains information about the interface describing REST API methods that * this RestProxy "implements". */ private SyncRestProxy(HttpPipeline httpPipeline, SerializerAdapter serializer, SwaggerInterfaceParser interfaceParser) { this.httpPipeline = httpPipeline; this.serializer = serializer; this.interfaceParser = interfaceParser; this.decoder = new HttpResponseDecoder(this.serializer); } /** * Get the SwaggerMethodParser for the provided method. The Method must exist on the Swagger interface that this * RestProxy was created to "implement". * * @param method the method to get a SwaggerMethodParser for * @return the SwaggerMethodParser for the provided method */ private SwaggerMethodParser getMethodParser(Method method) { return interfaceParser.getMethodParser(method); } /** * Send the provided request asynchronously, applying any request policies provided to the HttpClient instance. * * @param request the HTTP request to send * @param contextData the context * @return a {@link Mono} that emits HttpResponse asynchronously */ public HttpResponse send(HttpRequest request, Context contextData) { return httpPipeline.sendSync(request, contextData); } @Override @SuppressWarnings("deprecation") void validateResumeOperationIsNotPresent(Method method) { if (method.isAnnotationPresent(com.azure.core.annotation.ResumeOperation.class)) { throw LOGGER.logExceptionAsError(new IllegalStateException("'ResumeOperation' isn't supported.")); } } static Context mergeRequestOptionsContext(Context context, RequestOptions options) { if (options == null) { return context; } Context optionsContext = options.getContext(); if (optionsContext != null && optionsContext != Context.NONE) { context = CoreUtils.mergeContexts(context, optionsContext); } return context; } static BinaryData validateLengthSync(final HttpRequest request) { final BinaryData binaryData = request.getBodyAsBinaryData(); if (binaryData == null) { return binaryData; } final long expectedLength = Long.parseLong(request.getHeaders().getValue("Content-Length")); Long length = binaryData.getLength(); BinaryDataContent bdc = BinaryDataHelper.getContent(binaryData); if (length == null) { if (bdc instanceof FluxByteBufferContent) { throw new IllegalStateException("Flux Byte Buffer is not supported in Synchronous Rest Proxy."); } else if (bdc instanceof InputStreamContent) { InputStreamContent inputStreamContent = ((InputStreamContent) bdc); InputStream inputStream = inputStreamContent.toStream(); LengthValidatingInputStream lengthValidatingInputStream = new LengthValidatingInputStream(inputStream, expectedLength); return BinaryData.fromStream(lengthValidatingInputStream); } else { byte[] b = (bdc).toBytes(); long len = b.length; if (len > expectedLength) { throw new UnexpectedLengthException(String.format(BODY_TOO_LARGE, len, expectedLength), len, expectedLength); } return BinaryData.fromBytes(b); } } else { if (length > expectedLength) { throw new UnexpectedLengthException(String.format(BODY_TOO_LARGE, length, expectedLength), length, expectedLength); } return binaryData; } } /** * Starts the tracing span for the current service call, additionally set metadata attributes on the span by passing * additional context information. * * @param method Service method being called. * @param context Context information about the current service call. * @return The updated context containing the span context. */ private Context startTracingSpan(Method method, Context context) { if (!TracerProxy.isTracingEnabled()) { return context; } if ((boolean) context.getData(Tracer.DISABLE_TRACING_KEY).orElse(false)) { return context; } String spanName = interfaceParser.getServiceName() + "." + method.getName(); context = TracerProxy.setSpanName(spanName, context); return TracerProxy.start(spanName, context); } /** * Create a HttpRequest for the provided Swagger method using the provided arguments. * * @param methodParser the Swagger method parser to use * @param args the arguments to use to populate the method's annotation values * @return a HttpRequest * @throws IOException thrown if the body contents cannot be serialized */ private HttpRequest createHttpRequest(SwaggerMethodParser methodParser, Object[] args) throws IOException { final String path = methodParser.setPath(args); final UrlBuilder pathUrlBuilder = UrlBuilder.parse(path); final UrlBuilder urlBuilder; if (pathUrlBuilder.getScheme() != null) { urlBuilder = pathUrlBuilder; } else { urlBuilder = new UrlBuilder(); methodParser.setSchemeAndHost(args, urlBuilder); if (path != null && !path.isEmpty() && !"/".equals(path)) { String hostPath = urlBuilder.getPath(); if (hostPath == null || hostPath.isEmpty() || "/".equals(hostPath) || path.contains(": urlBuilder.setPath(path); } else { if (path.startsWith("/")) { urlBuilder.setPath(hostPath + path); } else { urlBuilder.setPath(hostPath + "/" + path); } } } } methodParser.setEncodedQueryParameters(args, urlBuilder); final URL url = urlBuilder.toUrl(); final HttpRequest request = configRequest(new HttpRequest(methodParser.getHttpMethod(), url), methodParser, args); HttpHeaders httpHeaders = request.getHeaders(); methodParser.setHeaders(args, httpHeaders); return request; } @SuppressWarnings("unchecked") private HttpRequest configRequest(final HttpRequest request, final SwaggerMethodParser methodParser, final Object[] args) throws IOException { final Object bodyContentObject = methodParser.setBody(args); if (bodyContentObject == null) { request.getHeaders().set("Content-Length", "0"); } else { String contentType = methodParser.getBodyContentType(); if (contentType == null || contentType.isEmpty()) { if (bodyContentObject instanceof byte[] || bodyContentObject instanceof String) { contentType = ContentType.APPLICATION_OCTET_STREAM; } else { contentType = ContentType.APPLICATION_JSON; } } request.getHeaders().set("Content-Type", contentType); if (bodyContentObject instanceof BinaryData) { BinaryData binaryData = (BinaryData) bodyContentObject; if (binaryData.getLength() != null) { request.setHeader("Content-Length", binaryData.getLength().toString()); } request.setBody(binaryData); return request; } boolean isJson = false; final String[] contentTypeParts = contentType.split(";"); for (final String contentTypePart : contentTypeParts) { if (contentTypePart.trim().equalsIgnoreCase(ContentType.APPLICATION_JSON)) { isJson = true; break; } } if (isJson) { ByteArrayOutputStream stream = new AccessibleByteArrayOutputStream(); serializer.serialize(bodyContentObject, SerializerEncoding.JSON, stream); request.setHeader("Content-Length", String.valueOf(stream.size())); request.setBody(BinaryData.fromStream(new ByteArrayInputStream(stream.toByteArray(), 0, stream.size()))); } else if (bodyContentObject instanceof byte[]) { request.setBody((byte[]) bodyContentObject); } else if (bodyContentObject instanceof String) { final String bodyContentString = (String) bodyContentObject; if (!bodyContentString.isEmpty()) { request.setBody(bodyContentString); } } else if (bodyContentObject instanceof ByteBuffer) { request.setBody(((ByteBuffer) bodyContentObject).array()); } else { ByteArrayOutputStream stream = new AccessibleByteArrayOutputStream(); serializer.serialize(bodyContentObject, SerializerEncoding.fromHeaders(request.getHeaders()), stream); request.setHeader("Content-Length", String.valueOf(stream.size())); request.setBody(stream.toByteArray()); } } return request; } private static Exception instantiateUnexpectedException(final UnexpectedExceptionInformation exception, final HttpResponse httpResponse, final byte[] responseContent, final Object responseDecodedContent) { final int responseStatusCode = httpResponse.getStatusCode(); final String contentType = httpResponse.getHeaderValue("Content-Type"); final String bodyRepresentation; if ("application/octet-stream".equalsIgnoreCase(contentType)) { bodyRepresentation = "(" + httpResponse.getHeaderValue("Content-Length") + "-byte body)"; } else { bodyRepresentation = responseContent == null || responseContent.length == 0 ? "(empty body)" : "\"" + new String(responseContent, StandardCharsets.UTF_8) + "\""; } Exception result; try { final Constructor<? extends HttpResponseException> exceptionConstructor = exception.getExceptionType() .getConstructor(String.class, HttpResponse.class, exception.getExceptionBodyType()); result = exceptionConstructor.newInstance("Status code " + responseStatusCode + ", " + bodyRepresentation, httpResponse, responseDecodedContent); } catch (ReflectiveOperationException e) { String message = "Status code " + responseStatusCode + ", but an instance of " + exception.getExceptionType().getCanonicalName() + " cannot be created." + " Response body: " + bodyRepresentation; result = new IOException(message, e); } return result; } /** * Create a publisher that (1) emits error if the provided response {@code decodedResponse} has 'disallowed status * code' OR (2) emits provided response if it's status code ia allowed. * * 'disallowed status code' is one of the status code defined in the provided SwaggerMethodParser or is in the int[] * of additional allowed status codes. * * @param decodedResponse The HttpResponse to check. * @param methodParser The method parser that contains information about the service interface method that initiated * the HTTP request. * @return An async-version of the provided decodedResponse. */ private HttpDecodedResponse ensureExpectedStatus(final HttpDecodedResponse decodedResponse, final SwaggerMethodParser methodParser, RequestOptions options) { final int responseStatusCode = decodedResponse.getSourceResponse().getStatusCode(); if (methodParser.isExpectedResponseStatusCode(responseStatusCode) || (options != null && options.getErrorOptions().contains(ErrorOptions.NO_THROW))) { return decodedResponse; } Exception e; byte[] responseBytes = decodedResponse.getSourceResponse().getBodyAsBinaryData().toBytes(); if (responseBytes == null || responseBytes.length == 0) { e = instantiateUnexpectedException(methodParser.getUnexpectedException(responseStatusCode), decodedResponse.getSourceResponse(), null, null); } else { Object decodedBody = decodedResponse.getDecodedBodySync(responseBytes); e = instantiateUnexpectedException(methodParser.getUnexpectedException(responseStatusCode), decodedResponse.getSourceResponse(), responseBytes, decodedBody); } if (e instanceof RuntimeException) { throw LOGGER.logExceptionAsError((RuntimeException) e); } else { throw LOGGER.logExceptionAsError(new RuntimeException(e)); } } private Object handleRestResponseReturnType(final HttpDecodedResponse response, final SwaggerMethodParser methodParser, final Type entityType) { if (TypeUtil.isTypeOrSubTypeOf(entityType, Response.class)) { final Type bodyType = TypeUtil.getRestResponseBodyType(entityType); if (TypeUtil.isTypeOrSubTypeOf(bodyType, Void.class)) { response.getSourceResponse().close(); return createResponseSync(response, entityType, null); } else { Object bodyAsObject = handleBodyReturnTypeSync(response, methodParser, bodyType); Response<?> httpResponse = createResponseSync(response, entityType, bodyAsObject); if (httpResponse == null) { return createResponseSync(response, entityType, null); } return httpResponse; } } else { return handleBodyReturnTypeSync(response, methodParser, entityType); } } @SuppressWarnings("unchecked") private Response<?> createResponseSync(HttpDecodedResponse response, Type entityType, Object bodyAsObject) { final Class<? extends Response<?>> cls = (Class<? extends Response<?>>) TypeUtil.getRawClass(entityType); final HttpResponse httpResponse = response.getSourceResponse(); final HttpRequest request = httpResponse.getRequest(); final int statusCode = httpResponse.getStatusCode(); final HttpHeaders headers = httpResponse.getHeaders(); final Object decodedHeaders = response.getDecodedHeaders(); if (cls.equals(Response.class)) { return cls.cast(new ResponseBase<>(request, statusCode, headers, bodyAsObject, decodedHeaders)); } else if (cls.equals(PagedResponse.class)) { if (bodyAsObject != null && !TypeUtil.isTypeOrSubTypeOf(bodyAsObject.getClass(), Page.class)) { throw LOGGER.logExceptionAsError(new RuntimeException(MUST_IMPLEMENT_PAGE_ERROR)); } else if (bodyAsObject == null) { return (cls.cast(new PagedResponseBase<>(request, statusCode, headers, null, null, decodedHeaders))); } else { return (cls.cast(new PagedResponseBase<>(request, statusCode, headers, (Page<?>) bodyAsObject, decodedHeaders))); } } MethodHandle ctr = RESPONSE_CONSTRUCTORS_CACHE.get(cls); if (ctr == null) { throw LOGGER.logExceptionAsError(new RuntimeException("Cannot find suitable constructor for class " + cls)); } return RESPONSE_CONSTRUCTORS_CACHE.invokeSync(ctr, response, bodyAsObject); } private Object handleBodyReturnTypeSync(final HttpDecodedResponse response, final SwaggerMethodParser methodParser, final Type entityType) { final int responseStatusCode = response.getSourceResponse().getStatusCode(); final HttpMethod httpMethod = methodParser.getHttpMethod(); final Type returnValueWireType = methodParser.getReturnValueWireType(); final Object result; if (httpMethod == HttpMethod.HEAD && (TypeUtil.isTypeOrSubTypeOf( entityType, Boolean.TYPE) || TypeUtil.isTypeOrSubTypeOf(entityType, Boolean.class))) { boolean isSuccess = (responseStatusCode / 100) == 2; result = isSuccess; } else if (TypeUtil.isTypeOrSubTypeOf(entityType, byte[].class)) { byte[] responseBodyBytes = response.getSourceResponse().getBodyAsBinaryData().toBytes(); if (returnValueWireType == Base64Url.class) { responseBodyBytes = new Base64Url(responseBodyBytes).decodedBytes(); } result = responseBodyBytes; } else if (TypeUtil.isTypeOrSubTypeOf(entityType, BinaryData.class)) { result = response.getSourceResponse().getBodyAsBinaryData(); } else { result = response.getDecodedBodySync((byte[]) null); } return result; } /** * Handle the provided asynchronous HTTP response and return the deserialized value. * * @param httpDecodedResponse the asynchronous HTTP response to the original HTTP request * @param methodParser the SwaggerMethodParser that the request originates from * @param returnType the type of value that will be returned * @param context Additional context that is passed through the Http pipeline during the service call. * @return the deserialized result */ private Object handleRestReturnType(final HttpDecodedResponse httpDecodedResponse, final SwaggerMethodParser methodParser, final Type returnType, final Context context, final RequestOptions options) { final HttpDecodedResponse expectedResponse = ensureExpectedStatus(httpDecodedResponse, methodParser, options); final Object result; if (TypeUtil.isTypeOrSubTypeOf(returnType, void.class) || TypeUtil.isTypeOrSubTypeOf(returnType, Void.class)) { result = expectedResponse; } else { result = handleRestResponseReturnType(httpDecodedResponse, methodParser, returnType); } return result; } private static void endTracingSpan(HttpDecodedResponse httpDecodedResponse, Throwable throwable, Context tracingContext) { if (tracingContext == null) { return; } Object disableTracingValue = (tracingContext.getData(Tracer.DISABLE_TRACING_KEY).isPresent() ? tracingContext.getData(Tracer.DISABLE_TRACING_KEY).get() : null); boolean disableTracing = Boolean.TRUE.equals(disableTracingValue != null ? disableTracingValue : false); if (disableTracing) { return; } int statusCode = 0; if (httpDecodedResponse != null) { statusCode = httpDecodedResponse.getSourceResponse().getStatusCode(); } else if (throwable != null) { if (throwable instanceof HttpResponseException) { HttpResponseException exception = (HttpResponseException) throwable; statusCode = exception.getResponse().getStatusCode(); } } TracerProxy.end(statusCode, throwable, tracingContext); } /** * Create an instance of the default serializer. * * @return the default serializer */ private static SerializerAdapter createDefaultSerializer() { return JacksonAdapter.createDefaultSerializerAdapter(); } /** * Create the default HttpPipeline. * * @return the default HttpPipeline */ private static HttpPipeline createDefaultPipeline() { List<HttpPipelinePolicy> policies = new ArrayList<>(); policies.add(new UserAgentPolicy()); policies.add(new RetryPolicy()); policies.add(new CookiePolicy()); return new HttpPipelineBuilder() .policies(policies.toArray(new HttpPipelinePolicy[0])) .build(); } /** * Create a proxy implementation of the provided Swagger interface. * * @param swaggerInterface the Swagger interface to provide a proxy implementation for * @param <A> the type of the Swagger interface * @return a proxy implementation of the provided Swagger interface */ public static <A> A create(Class<A> swaggerInterface) { return create(swaggerInterface, createDefaultPipeline(), createDefaultSerializer()); } /** * Create a proxy implementation of the provided Swagger interface. * * @param swaggerInterface the Swagger interface to provide a proxy implementation for * @param httpPipeline the HttpPipelinePolicy and HttpClient pipeline that will be used to send Http requests * @param <A> the type of the Swagger interface * @return a proxy implementation of the provided Swagger interface */ public static <A> A create(Class<A> swaggerInterface, HttpPipeline httpPipeline) { return create(swaggerInterface, httpPipeline, createDefaultSerializer()); } /** * Create a proxy implementation of the provided Swagger interface. * * @param swaggerInterface the Swagger interface to provide a proxy implementation for * @param httpPipeline the HttpPipelinePolicy and HttpClient pipline that will be used to send Http requests * @param serializer the serializer that will be used to convert POJOs to and from request and response bodies * @param <A> the type of the Swagger interface. * @return a proxy implementation of the provided Swagger interface */ @SuppressWarnings("unchecked") public static <A> A create(Class<A> swaggerInterface, HttpPipeline httpPipeline, SerializerAdapter serializer) { final SwaggerInterfaceParser interfaceParser = new SwaggerInterfaceParser(swaggerInterface, serializer); final SyncRestProxy restProxy = new SyncRestProxy(httpPipeline, serializer, interfaceParser); return (A) Proxy.newProxyInstance(swaggerInterface.getClassLoader(), new Class<?>[]{swaggerInterface}, restProxy); } }
let's not use `Exceptions.propagate` in any of sync call stacks. This is going to throw exception from reactor. `UncheckedIOException` is better candidate here.
public Object invoke(Object proxy, final Method method, Object[] args) { validateResumeOperationIsNotPresent(method); try { final SwaggerMethodParser methodParser = getMethodParser(method); final HttpRequest request = createHttpRequest(methodParser, args); Context context = methodParser.setContext(args); RequestOptions options = methodParser.setRequestOptions(args); context = mergeRequestOptionsContext(context, options); context = context.addData("caller-method", methodParser.getFullyQualifiedMethodName()) .addData("azure-eagerly-read-response", shouldEagerlyReadResponse(methodParser.getReturnType())); context = startTracingSpan(method, context); if (options != null) { options.getRequestCallback().accept(request); } if (request.getBody() != null) { request.setBody(validateLength(request)); } final HttpResponse response = send(request, context); HttpDecodedResponse decodedResponse = this.decoder.decodeSync(response, methodParser); return handleRestReturnType(decodedResponse, methodParser, methodParser.getReturnType(), context, options); } catch (IOException e) { throw LOGGER.logExceptionAsError(Exceptions.propagate(e)); } }
throw LOGGER.logExceptionAsError(Exceptions.propagate(e));
public Object invoke(Object proxy, final Method method, Object[] args) { validateResumeOperationIsNotPresent(method); final SwaggerMethodParser methodParser = getMethodParser(method); HttpRequest request; try { request = createHttpRequest(methodParser, args); } catch (IOException e) { throw LOGGER.logExceptionAsError(new UncheckedIOException(e)); } Context context = methodParser.setContext(args); RequestOptions options = methodParser.setRequestOptions(args); context = mergeRequestOptionsContext(context, options); context = context.addData("caller-method", methodParser.getFullyQualifiedMethodName()) .addData("azure-eagerly-read-response", shouldEagerlyReadResponse(methodParser.getReturnType())); HttpDecodedResponse decodedResponse = null; Throwable throwable = null; try { context = startTracingSpan(method, context); if (options != null) { options.getRequestCallback().accept(request); } if (request.getBody() != null) { request.setBody(validateLengthSync(request)); } final HttpResponse response = send(request, context); decodedResponse = this.decoder.decodeSync(response, methodParser); return handleRestReturnType(decodedResponse, methodParser, methodParser.getReturnType(), context, options); } catch (Exception e) { throwable = e; if (e instanceof RuntimeException) { throw LOGGER.logExceptionAsError((RuntimeException) e); } else { throw LOGGER.logExceptionAsError(new RuntimeException(e)); } } finally { if (decodedResponse != null || throwable != null) { endTracingSpan(decodedResponse, throwable, context); } } }
class SyncRestProxy implements InvocationHandler { private static final ByteBuffer VALIDATION_BUFFER = ByteBuffer.allocate(0); private static final String BODY_TOO_LARGE = "Request body emitted %d bytes, more than the expected %d bytes."; private static final String BODY_TOO_SMALL = "Request body emitted %d bytes, less than the expected %d bytes."; private static final String MUST_IMPLEMENT_PAGE_ERROR = "Unable to create PagedResponse<T>. Body must be of a type that implements: " + Page.class; private static final ResponseConstructorsCache RESPONSE_CONSTRUCTORS_CACHE = new ResponseConstructorsCache(); private static final ClientLogger LOGGER = new ClientLogger(SyncRestProxy.class); private final HttpPipeline httpPipeline; private final SerializerAdapter serializer; private final SwaggerInterfaceParser interfaceParser; private final HttpResponseDecoder decoder; /** * Create a RestProxy. * * @param httpPipeline the HttpPipelinePolicy and HttpClient httpPipeline that will be used to send HTTP requests. * @param serializer the serializer that will be used to convert response bodies to POJOs. * @param interfaceParser the parser that contains information about the interface describing REST API methods that * this RestProxy "implements". */ private SyncRestProxy(HttpPipeline httpPipeline, SerializerAdapter serializer, SwaggerInterfaceParser interfaceParser) { this.httpPipeline = httpPipeline; this.serializer = serializer; this.interfaceParser = interfaceParser; this.decoder = new HttpResponseDecoder(this.serializer); } /** * Get the SwaggerMethodParser for the provided method. The Method must exist on the Swagger interface that this * RestProxy was created to "implement". * * @param method the method to get a SwaggerMethodParser for * @return the SwaggerMethodParser for the provided method */ private SwaggerMethodParser getMethodParser(Method method) { return interfaceParser.getMethodParser(method); } /** * Send the provided request asynchronously, applying any request policies provided to the HttpClient instance. * * @param request the HTTP request to send * @param contextData the context * @return a {@link Mono} that emits HttpResponse asynchronously */ public HttpResponse send(HttpRequest request, Context contextData) { return httpPipeline.sendSynchronously(request, contextData); } @Override @SuppressWarnings("deprecation") void validateResumeOperationIsNotPresent(Method method) { if (method.isAnnotationPresent(com.azure.core.annotation.ResumeOperation.class)) { throw LOGGER.logExceptionAsError(Exceptions.propagate(new Exception("'ResumeOperation' isn't supported."))); } } static Context mergeRequestOptionsContext(Context context, RequestOptions options) { if (options == null) { return context; } Context optionsContext = options.getContext(); if (optionsContext != null && optionsContext != Context.NONE) { context = CoreUtils.mergeContexts(context, optionsContext); } return context; } static Flux<ByteBuffer> validateLength(final HttpRequest request) { final Flux<ByteBuffer> bbFlux = request.getBody(); if (bbFlux == null) { return Flux.empty(); } final long expectedLength = Long.parseLong(request.getHeaders().getValue("Content-Length")); return Flux.defer(() -> { final long[] currentTotalLength = new long[1]; return Flux.concat(bbFlux, Flux.just(VALIDATION_BUFFER)).handle((buffer, sink) -> { if (buffer == null) { return; } if (buffer == VALIDATION_BUFFER) { if (expectedLength != currentTotalLength[0]) { sink.error(new UnexpectedLengthException(String.format(BODY_TOO_SMALL, currentTotalLength[0], expectedLength), currentTotalLength[0], expectedLength)); } else { sink.complete(); } return; } currentTotalLength[0] += buffer.remaining(); if (currentTotalLength[0] > expectedLength) { sink.error(new UnexpectedLengthException(String.format(BODY_TOO_LARGE, currentTotalLength[0], expectedLength), currentTotalLength[0], expectedLength)); return; } sink.next(buffer); }); }); } static BinaryData validateLengthSync(final HttpRequest request) { final BinaryData binaryData = request.getContent(); if (binaryData == null) { return binaryData; } final long expectedLength = Long.parseLong(request.getHeaders().getValue("Content-Length")); long length = binaryData.getLength(); if (length > expectedLength) { throw new UnexpectedLengthException(String.format(BODY_TOO_LARGE, binaryData.getLength(), expectedLength), binaryData.getLength(), expectedLength); } return binaryData; } /** * Starts the tracing span for the current service call, additionally set metadata attributes on the span by passing * additional context information. * * @param method Service method being called. * @param context Context information about the current service call. * @return The updated context containing the span context. */ private Context startTracingSpan(Method method, Context context) { if (!TracerProxy.isTracingEnabled()) { return context; } if ((boolean) context.getData(Tracer.DISABLE_TRACING_KEY).orElse(false)) { return context; } String spanName = interfaceParser.getServiceName() + "." + method.getName(); context = TracerProxy.setSpanName(spanName, context); return TracerProxy.start(spanName, context); } /** * Create a HttpRequest for the provided Swagger method using the provided arguments. * * @param methodParser the Swagger method parser to use * @param args the arguments to use to populate the method's annotation values * @return a HttpRequest * @throws IOException thrown if the body contents cannot be serialized */ private HttpRequest createHttpRequest(SwaggerMethodParser methodParser, Object[] args) throws IOException { final String path = methodParser.setPath(args); final UrlBuilder pathUrlBuilder = UrlBuilder.parse(path); final UrlBuilder urlBuilder; if (pathUrlBuilder.getScheme() != null) { urlBuilder = pathUrlBuilder; } else { urlBuilder = new UrlBuilder(); methodParser.setSchemeAndHost(args, urlBuilder); if (path != null && !path.isEmpty() && !"/".equals(path)) { String hostPath = urlBuilder.getPath(); if (hostPath == null || hostPath.isEmpty() || "/".equals(hostPath) || path.contains(": urlBuilder.setPath(path); } else { if (path.startsWith("/")) { urlBuilder.setPath(hostPath + path); } else { urlBuilder.setPath(hostPath + "/" + path); } } } } methodParser.setEncodedQueryParameters(args, urlBuilder); final URL url = urlBuilder.toUrl(); final HttpRequest request = configRequest(new HttpRequest(methodParser.getHttpMethod(), url), methodParser, args); HttpHeaders httpHeaders = request.getHeaders(); methodParser.setHeaders(args, httpHeaders); return request; } @SuppressWarnings("unchecked") private HttpRequest configRequest(final HttpRequest request, final SwaggerMethodParser methodParser, final Object[] args) throws IOException { final Object bodyContentObject = methodParser.setBody(args); if (bodyContentObject == null) { request.getHeaders().set("Content-Length", "0"); } else { String contentType = methodParser.getBodyContentType(); if (contentType == null || contentType.isEmpty()) { if (bodyContentObject instanceof byte[] || bodyContentObject instanceof String) { contentType = ContentType.APPLICATION_OCTET_STREAM; } else { contentType = ContentType.APPLICATION_JSON; } } request.getHeaders().set("Content-Type", contentType); if (bodyContentObject instanceof BinaryData) { BinaryData binaryData = (BinaryData) bodyContentObject; if (binaryData.getLength() != null) { request.setHeader("Content-Length", binaryData.getLength().toString()); } request.setContent(binaryData); return request; } boolean isJson = false; final String[] contentTypeParts = contentType.split(";"); for (final String contentTypePart : contentTypeParts) { if (contentTypePart.trim().equalsIgnoreCase(ContentType.APPLICATION_JSON)) { isJson = true; break; } } if (isJson) { ByteArrayOutputStream stream = new AccessibleByteArrayOutputStream(); serializer.serialize(bodyContentObject, SerializerEncoding.JSON, stream); request.setHeader("Content-Length", String.valueOf(stream.size())); request.setContent(BinaryData.fromStream(new ByteArrayInputStream(stream.toByteArray(), 0, stream.size()))); } else if (bodyContentObject instanceof byte[]) { request.setBody((byte[]) bodyContentObject); } else if (bodyContentObject instanceof String) { final String bodyContentString = (String) bodyContentObject; if (!bodyContentString.isEmpty()) { request.setBody(bodyContentString); } } else if (bodyContentObject instanceof ByteBuffer) { request.setBody(((ByteBuffer) bodyContentObject).array()); } else { ByteArrayOutputStream stream = new AccessibleByteArrayOutputStream(); serializer.serialize(bodyContentObject, SerializerEncoding.fromHeaders(request.getHeaders()), stream); request.setHeader("Content-Length", String.valueOf(stream.size())); request.setBody(stream.toByteArray()); } } return request; } private static Exception instantiateUnexpectedException(final UnexpectedExceptionInformation exception, final HttpResponse httpResponse, final byte[] responseContent, final Object responseDecodedContent) { final int responseStatusCode = httpResponse.getStatusCode(); final String contentType = httpResponse.getHeaderValue("Content-Type"); final String bodyRepresentation; if ("application/octet-stream".equalsIgnoreCase(contentType)) { bodyRepresentation = "(" + httpResponse.getHeaderValue("Content-Length") + "-byte body)"; } else { bodyRepresentation = responseContent == null || responseContent.length == 0 ? "(empty body)" : "\"" + new String(responseContent, StandardCharsets.UTF_8) + "\""; } Exception result; try { final Constructor<? extends HttpResponseException> exceptionConstructor = exception.getExceptionType() .getConstructor(String.class, HttpResponse.class, exception.getExceptionBodyType()); result = exceptionConstructor.newInstance("Status code " + responseStatusCode + ", " + bodyRepresentation, httpResponse, responseDecodedContent); } catch (ReflectiveOperationException e) { String message = "Status code " + responseStatusCode + ", but an instance of " + exception.getExceptionType().getCanonicalName() + " cannot be created." + " Response body: " + bodyRepresentation; result = new IOException(message, e); } return result; } /** * Create a publisher that (1) emits error if the provided response {@code decodedResponse} has 'disallowed status * code' OR (2) emits provided response if it's status code ia allowed. * * 'disallowed status code' is one of the status code defined in the provided SwaggerMethodParser or is in the int[] * of additional allowed status codes. * * @param decodedResponse The HttpResponse to check. * @param methodParser The method parser that contains information about the service interface method that initiated * the HTTP request. * @return An async-version of the provided decodedResponse. */ private HttpDecodedResponse ensureExpectedStatus(final HttpDecodedResponse decodedResponse, final SwaggerMethodParser methodParser, RequestOptions options) { final int responseStatusCode = decodedResponse.getSourceResponse().getStatusCode(); if (methodParser.isExpectedResponseStatusCode(responseStatusCode) || (options != null && options.getErrorOptions().contains(ErrorOptions.NO_THROW))) { return decodedResponse; } byte[] responseBytes = decodedResponse.getSourceResponse().getBodyAsByteArray().block(); if (responseBytes == null || responseBytes.length == 0) { throw new RuntimeException(instantiateUnexpectedException( methodParser.getUnexpectedException(responseStatusCode), decodedResponse.getSourceResponse(), null, null)); } else { Object decodedBody = decodedResponse.getDecodedBodySync(responseBytes); throw new RuntimeException(instantiateUnexpectedException( methodParser.getUnexpectedException(responseStatusCode), decodedResponse.getSourceResponse(), responseBytes, decodedBody)); } } private Object handleRestResponseReturnType(final HttpDecodedResponse response, final SwaggerMethodParser methodParser, final Type entityType) { if (TypeUtil.isTypeOrSubTypeOf(entityType, Response.class)) { final Type bodyType = TypeUtil.getRestResponseBodyType(entityType); if (TypeUtil.isTypeOrSubTypeOf(bodyType, Void.class)) { response.getSourceResponse().getBody().ignoreElements().block(); return createResponseSync(response, entityType, null); } else { Object bodyAsObject = handleBodyReturnTypeSync(response, methodParser, bodyType); Response<?> httpResponse = createResponseSync(response, entityType, bodyAsObject); if (httpResponse == null) { return createResponseSync(response, entityType, null); } return httpResponse; } } else { return handleBodyReturnTypeSync(response, methodParser, entityType); } } @SuppressWarnings("unchecked") private Response<?> createResponseSync(HttpDecodedResponse response, Type entityType, Object bodyAsObject) { final Class<? extends Response<?>> cls = (Class<? extends Response<?>>) TypeUtil.getRawClass(entityType); final HttpResponse httpResponse = response.getSourceResponse(); final HttpRequest request = httpResponse.getRequest(); final int statusCode = httpResponse.getStatusCode(); final HttpHeaders headers = httpResponse.getHeaders(); final Object decodedHeaders = response.getDecodedHeaders(); if (cls.equals(Response.class)) { return cls.cast(new ResponseBase<>(request, statusCode, headers, bodyAsObject, decodedHeaders)); } else if (cls.equals(PagedResponse.class)) { if (bodyAsObject != null && !TypeUtil.isTypeOrSubTypeOf(bodyAsObject.getClass(), Page.class)) { throw LOGGER.logExceptionAsError(new RuntimeException(MUST_IMPLEMENT_PAGE_ERROR)); } else if (bodyAsObject == null) { return (cls.cast(new PagedResponseBase<>(request, statusCode, headers, null, null, decodedHeaders))); } else { return (cls.cast(new PagedResponseBase<>(request, statusCode, headers, (Page<?>) bodyAsObject, decodedHeaders))); } } MethodHandle ctr = RESPONSE_CONSTRUCTORS_CACHE.get(cls); if (ctr == null) { throw new RuntimeException("Cannot find suitable constructor for class " + cls); } return RESPONSE_CONSTRUCTORS_CACHE.invokeSync(ctr, response, bodyAsObject); } private Object handleBodyReturnTypeSync(final HttpDecodedResponse response, final SwaggerMethodParser methodParser, final Type entityType) { final int responseStatusCode = response.getSourceResponse().getStatusCode(); final HttpMethod httpMethod = methodParser.getHttpMethod(); final Type returnValueWireType = methodParser.getReturnValueWireType(); final Object result; if (httpMethod == HttpMethod.HEAD && (TypeUtil.isTypeOrSubTypeOf( entityType, Boolean.TYPE) || TypeUtil.isTypeOrSubTypeOf(entityType, Boolean.class))) { boolean isSuccess = (responseStatusCode / 100) == 2; result = isSuccess; } else if (TypeUtil.isTypeOrSubTypeOf(entityType, byte[].class)) { byte[] responseBodyBytes = response.getSourceResponse().getContent().toBytes(); if (returnValueWireType == Base64Url.class) { responseBodyBytes = new Base64Url(responseBodyBytes).decodedBytes(); } result = responseBodyBytes; } else if (TypeUtil.isTypeOrSubTypeOf(entityType, BinaryData.class)) { result = response.getSourceResponse().getContent(); } else { result = response.getDecodedBody((byte[]) null); } return result; } /** * Handle the provided asynchronous HTTP response and return the deserialized value. * * @param httpDecodedResponse the asynchronous HTTP response to the original HTTP request * @param methodParser the SwaggerMethodParser that the request originates from * @param returnType the type of value that will be returned * @param context Additional context that is passed through the Http pipeline during the service call. * @return the deserialized result */ private Object handleRestReturnType(final HttpDecodedResponse httpDecodedResponse, final SwaggerMethodParser methodParser, final Type returnType, final Context context, final RequestOptions options) { final HttpDecodedResponse expectedResponse = ensureExpectedStatus(httpDecodedResponse, methodParser, options); final Object result; if (TypeUtil.isTypeOrSubTypeOf(returnType, void.class) || TypeUtil.isTypeOrSubTypeOf(returnType, Void.class)) { result = expectedResponse; } else { result = handleRestResponseReturnType(httpDecodedResponse, methodParser, returnType); } return result; } private static void endTracingSpan(Signal<HttpDecodedResponse> signal) { if (!TracerProxy.isTracingEnabled()) { return; } if (signal.isOnComplete() || signal.isOnSubscribe()) { return; } ContextView context = signal.getContextView(); Optional<Context> tracingContext = context.getOrEmpty("TRACING_CONTEXT"); boolean disableTracing = Boolean.TRUE.equals(context.getOrDefault(Tracer.DISABLE_TRACING_KEY, false)); if (!tracingContext.isPresent() || disableTracing) { return; } int statusCode = 0; HttpDecodedResponse httpDecodedResponse; Throwable throwable = null; if (signal.hasValue()) { httpDecodedResponse = signal.get(); statusCode = httpDecodedResponse.getSourceResponse().getStatusCode(); } else if (signal.hasError()) { throwable = signal.getThrowable(); if (throwable instanceof HttpResponseException) { HttpResponseException exception = (HttpResponseException) throwable; statusCode = exception.getResponse().getStatusCode(); } } TracerProxy.end(statusCode, throwable, tracingContext.get()); } /** * Create an instance of the default serializer. * * @return the default serializer */ private static SerializerAdapter createDefaultSerializer() { return JacksonAdapter.createDefaultSerializerAdapter(); } /** * Create the default HttpPipeline. * * @return the default HttpPipeline */ private static HttpPipeline createDefaultPipeline() { List<HttpPipelinePolicy> policies = new ArrayList<>(); policies.add(new UserAgentPolicy()); policies.add(new RetryPolicy()); policies.add(new CookiePolicy()); return new HttpPipelineBuilder() .policies(policies.toArray(new HttpPipelinePolicy[0])) .build(); } /** * Create a proxy implementation of the provided Swagger interface. * * @param swaggerInterface the Swagger interface to provide a proxy implementation for * @param <A> the type of the Swagger interface * @return a proxy implementation of the provided Swagger interface */ public static <A> A create(Class<A> swaggerInterface) { return create(swaggerInterface, createDefaultPipeline(), createDefaultSerializer()); } /** * Create a proxy implementation of the provided Swagger interface. * * @param swaggerInterface the Swagger interface to provide a proxy implementation for * @param httpPipeline the HttpPipelinePolicy and HttpClient pipeline that will be used to send Http requests * @param <A> the type of the Swagger interface * @return a proxy implementation of the provided Swagger interface */ public static <A> A create(Class<A> swaggerInterface, HttpPipeline httpPipeline) { return create(swaggerInterface, httpPipeline, createDefaultSerializer()); } /** * Create a proxy implementation of the provided Swagger interface. * * @param swaggerInterface the Swagger interface to provide a proxy implementation for * @param httpPipeline the HttpPipelinePolicy and HttpClient pipline that will be used to send Http requests * @param serializer the serializer that will be used to convert POJOs to and from request and response bodies * @param <A> the type of the Swagger interface. * @return a proxy implementation of the provided Swagger interface */ @SuppressWarnings("unchecked") public static <A> A create(Class<A> swaggerInterface, HttpPipeline httpPipeline, SerializerAdapter serializer) { final SwaggerInterfaceParser interfaceParser = new SwaggerInterfaceParser(swaggerInterface, serializer); final SyncRestProxy restProxy = new SyncRestProxy(httpPipeline, serializer, interfaceParser); return (A) Proxy.newProxyInstance(swaggerInterface.getClassLoader(), new Class<?>[]{swaggerInterface}, restProxy); } }
class SyncRestProxy implements InvocationHandler { private static final ByteBuffer VALIDATION_BUFFER = ByteBuffer.allocate(0); private static final String BODY_TOO_LARGE = "Request body emitted %d bytes, more than the expected %d bytes."; private static final String BODY_TOO_SMALL = "Request body emitted %d bytes, less than the expected %d bytes."; private static final String MUST_IMPLEMENT_PAGE_ERROR = "Unable to create PagedResponse<T>. Body must be of a type that implements: " + Page.class; private static final ResponseConstructorsCache RESPONSE_CONSTRUCTORS_CACHE = new ResponseConstructorsCache(); private static final ClientLogger LOGGER = new ClientLogger(SyncRestProxy.class); private final HttpPipeline httpPipeline; private final SerializerAdapter serializer; private final SwaggerInterfaceParser interfaceParser; private final HttpResponseDecoder decoder; /** * Create a RestProxy. * * @param httpPipeline the HttpPipelinePolicy and HttpClient httpPipeline that will be used to send HTTP requests. * @param serializer the serializer that will be used to convert response bodies to POJOs. * @param interfaceParser the parser that contains information about the interface describing REST API methods that * this RestProxy "implements". */ private SyncRestProxy(HttpPipeline httpPipeline, SerializerAdapter serializer, SwaggerInterfaceParser interfaceParser) { this.httpPipeline = httpPipeline; this.serializer = serializer; this.interfaceParser = interfaceParser; this.decoder = new HttpResponseDecoder(this.serializer); } /** * Get the SwaggerMethodParser for the provided method. The Method must exist on the Swagger interface that this * RestProxy was created to "implement". * * @param method the method to get a SwaggerMethodParser for * @return the SwaggerMethodParser for the provided method */ private SwaggerMethodParser getMethodParser(Method method) { return interfaceParser.getMethodParser(method); } /** * Send the provided request asynchronously, applying any request policies provided to the HttpClient instance. * * @param request the HTTP request to send * @param contextData the context * @return a {@link Mono} that emits HttpResponse asynchronously */ public HttpResponse send(HttpRequest request, Context contextData) { return httpPipeline.sendSync(request, contextData); } @Override @SuppressWarnings("deprecation") void validateResumeOperationIsNotPresent(Method method) { if (method.isAnnotationPresent(com.azure.core.annotation.ResumeOperation.class)) { throw LOGGER.logExceptionAsError(new IllegalStateException("'ResumeOperation' isn't supported.")); } } static Context mergeRequestOptionsContext(Context context, RequestOptions options) { if (options == null) { return context; } Context optionsContext = options.getContext(); if (optionsContext != null && optionsContext != Context.NONE) { context = CoreUtils.mergeContexts(context, optionsContext); } return context; } static BinaryData validateLengthSync(final HttpRequest request) { final BinaryData binaryData = request.getBodyAsBinaryData(); if (binaryData == null) { return binaryData; } final long expectedLength = Long.parseLong(request.getHeaders().getValue("Content-Length")); Long length = binaryData.getLength(); BinaryDataContent bdc = BinaryDataHelper.getContent(binaryData); if (length == null) { if (bdc instanceof FluxByteBufferContent) { throw new IllegalStateException("Flux Byte Buffer is not supported in Synchronous Rest Proxy."); } else if (bdc instanceof InputStreamContent) { InputStreamContent inputStreamContent = ((InputStreamContent) bdc); InputStream inputStream = inputStreamContent.toStream(); LengthValidatingInputStream lengthValidatingInputStream = new LengthValidatingInputStream(inputStream, expectedLength); return BinaryData.fromStream(lengthValidatingInputStream); } else { byte[] b = (bdc).toBytes(); long len = b.length; if (len > expectedLength) { throw new UnexpectedLengthException(String.format(BODY_TOO_LARGE, len, expectedLength), len, expectedLength); } return BinaryData.fromBytes(b); } } else { if (length > expectedLength) { throw new UnexpectedLengthException(String.format(BODY_TOO_LARGE, length, expectedLength), length, expectedLength); } return binaryData; } } /** * Starts the tracing span for the current service call, additionally set metadata attributes on the span by passing * additional context information. * * @param method Service method being called. * @param context Context information about the current service call. * @return The updated context containing the span context. */ private Context startTracingSpan(Method method, Context context) { if (!TracerProxy.isTracingEnabled()) { return context; } if ((boolean) context.getData(Tracer.DISABLE_TRACING_KEY).orElse(false)) { return context; } String spanName = interfaceParser.getServiceName() + "." + method.getName(); context = TracerProxy.setSpanName(spanName, context); return TracerProxy.start(spanName, context); } /** * Create a HttpRequest for the provided Swagger method using the provided arguments. * * @param methodParser the Swagger method parser to use * @param args the arguments to use to populate the method's annotation values * @return a HttpRequest * @throws IOException thrown if the body contents cannot be serialized */ private HttpRequest createHttpRequest(SwaggerMethodParser methodParser, Object[] args) throws IOException { final String path = methodParser.setPath(args); final UrlBuilder pathUrlBuilder = UrlBuilder.parse(path); final UrlBuilder urlBuilder; if (pathUrlBuilder.getScheme() != null) { urlBuilder = pathUrlBuilder; } else { urlBuilder = new UrlBuilder(); methodParser.setSchemeAndHost(args, urlBuilder); if (path != null && !path.isEmpty() && !"/".equals(path)) { String hostPath = urlBuilder.getPath(); if (hostPath == null || hostPath.isEmpty() || "/".equals(hostPath) || path.contains(": urlBuilder.setPath(path); } else { if (path.startsWith("/")) { urlBuilder.setPath(hostPath + path); } else { urlBuilder.setPath(hostPath + "/" + path); } } } } methodParser.setEncodedQueryParameters(args, urlBuilder); final URL url = urlBuilder.toUrl(); final HttpRequest request = configRequest(new HttpRequest(methodParser.getHttpMethod(), url), methodParser, args); HttpHeaders httpHeaders = request.getHeaders(); methodParser.setHeaders(args, httpHeaders); return request; } @SuppressWarnings("unchecked") private HttpRequest configRequest(final HttpRequest request, final SwaggerMethodParser methodParser, final Object[] args) throws IOException { final Object bodyContentObject = methodParser.setBody(args); if (bodyContentObject == null) { request.getHeaders().set("Content-Length", "0"); } else { String contentType = methodParser.getBodyContentType(); if (contentType == null || contentType.isEmpty()) { if (bodyContentObject instanceof byte[] || bodyContentObject instanceof String) { contentType = ContentType.APPLICATION_OCTET_STREAM; } else { contentType = ContentType.APPLICATION_JSON; } } request.getHeaders().set("Content-Type", contentType); if (bodyContentObject instanceof BinaryData) { BinaryData binaryData = (BinaryData) bodyContentObject; if (binaryData.getLength() != null) { request.setHeader("Content-Length", binaryData.getLength().toString()); } request.setBody(binaryData); return request; } boolean isJson = false; final String[] contentTypeParts = contentType.split(";"); for (final String contentTypePart : contentTypeParts) { if (contentTypePart.trim().equalsIgnoreCase(ContentType.APPLICATION_JSON)) { isJson = true; break; } } if (isJson) { ByteArrayOutputStream stream = new AccessibleByteArrayOutputStream(); serializer.serialize(bodyContentObject, SerializerEncoding.JSON, stream); request.setHeader("Content-Length", String.valueOf(stream.size())); request.setBody(BinaryData.fromStream(new ByteArrayInputStream(stream.toByteArray(), 0, stream.size()))); } else if (bodyContentObject instanceof byte[]) { request.setBody((byte[]) bodyContentObject); } else if (bodyContentObject instanceof String) { final String bodyContentString = (String) bodyContentObject; if (!bodyContentString.isEmpty()) { request.setBody(bodyContentString); } } else if (bodyContentObject instanceof ByteBuffer) { request.setBody(((ByteBuffer) bodyContentObject).array()); } else { ByteArrayOutputStream stream = new AccessibleByteArrayOutputStream(); serializer.serialize(bodyContentObject, SerializerEncoding.fromHeaders(request.getHeaders()), stream); request.setHeader("Content-Length", String.valueOf(stream.size())); request.setBody(stream.toByteArray()); } } return request; } private static Exception instantiateUnexpectedException(final UnexpectedExceptionInformation exception, final HttpResponse httpResponse, final byte[] responseContent, final Object responseDecodedContent) { final int responseStatusCode = httpResponse.getStatusCode(); final String contentType = httpResponse.getHeaderValue("Content-Type"); final String bodyRepresentation; if ("application/octet-stream".equalsIgnoreCase(contentType)) { bodyRepresentation = "(" + httpResponse.getHeaderValue("Content-Length") + "-byte body)"; } else { bodyRepresentation = responseContent == null || responseContent.length == 0 ? "(empty body)" : "\"" + new String(responseContent, StandardCharsets.UTF_8) + "\""; } Exception result; try { final Constructor<? extends HttpResponseException> exceptionConstructor = exception.getExceptionType() .getConstructor(String.class, HttpResponse.class, exception.getExceptionBodyType()); result = exceptionConstructor.newInstance("Status code " + responseStatusCode + ", " + bodyRepresentation, httpResponse, responseDecodedContent); } catch (ReflectiveOperationException e) { String message = "Status code " + responseStatusCode + ", but an instance of " + exception.getExceptionType().getCanonicalName() + " cannot be created." + " Response body: " + bodyRepresentation; result = new IOException(message, e); } return result; } /** * Create a publisher that (1) emits error if the provided response {@code decodedResponse} has 'disallowed status * code' OR (2) emits provided response if it's status code ia allowed. * * 'disallowed status code' is one of the status code defined in the provided SwaggerMethodParser or is in the int[] * of additional allowed status codes. * * @param decodedResponse The HttpResponse to check. * @param methodParser The method parser that contains information about the service interface method that initiated * the HTTP request. * @return An async-version of the provided decodedResponse. */ private HttpDecodedResponse ensureExpectedStatus(final HttpDecodedResponse decodedResponse, final SwaggerMethodParser methodParser, RequestOptions options) { final int responseStatusCode = decodedResponse.getSourceResponse().getStatusCode(); if (methodParser.isExpectedResponseStatusCode(responseStatusCode) || (options != null && options.getErrorOptions().contains(ErrorOptions.NO_THROW))) { return decodedResponse; } Exception e; byte[] responseBytes = decodedResponse.getSourceResponse().getBodyAsBinaryData().toBytes(); if (responseBytes == null || responseBytes.length == 0) { e = instantiateUnexpectedException(methodParser.getUnexpectedException(responseStatusCode), decodedResponse.getSourceResponse(), null, null); } else { Object decodedBody = decodedResponse.getDecodedBodySync(responseBytes); e = instantiateUnexpectedException(methodParser.getUnexpectedException(responseStatusCode), decodedResponse.getSourceResponse(), responseBytes, decodedBody); } if (e instanceof RuntimeException) { throw LOGGER.logExceptionAsError((RuntimeException) e); } else { throw LOGGER.logExceptionAsError(new RuntimeException(e)); } } private Object handleRestResponseReturnType(final HttpDecodedResponse response, final SwaggerMethodParser methodParser, final Type entityType) { if (TypeUtil.isTypeOrSubTypeOf(entityType, Response.class)) { final Type bodyType = TypeUtil.getRestResponseBodyType(entityType); if (TypeUtil.isTypeOrSubTypeOf(bodyType, Void.class)) { response.getSourceResponse().close(); return createResponseSync(response, entityType, null); } else { Object bodyAsObject = handleBodyReturnTypeSync(response, methodParser, bodyType); Response<?> httpResponse = createResponseSync(response, entityType, bodyAsObject); if (httpResponse == null) { return createResponseSync(response, entityType, null); } return httpResponse; } } else { return handleBodyReturnTypeSync(response, methodParser, entityType); } } @SuppressWarnings("unchecked") private Response<?> createResponseSync(HttpDecodedResponse response, Type entityType, Object bodyAsObject) { final Class<? extends Response<?>> cls = (Class<? extends Response<?>>) TypeUtil.getRawClass(entityType); final HttpResponse httpResponse = response.getSourceResponse(); final HttpRequest request = httpResponse.getRequest(); final int statusCode = httpResponse.getStatusCode(); final HttpHeaders headers = httpResponse.getHeaders(); final Object decodedHeaders = response.getDecodedHeaders(); if (cls.equals(Response.class)) { return cls.cast(new ResponseBase<>(request, statusCode, headers, bodyAsObject, decodedHeaders)); } else if (cls.equals(PagedResponse.class)) { if (bodyAsObject != null && !TypeUtil.isTypeOrSubTypeOf(bodyAsObject.getClass(), Page.class)) { throw LOGGER.logExceptionAsError(new RuntimeException(MUST_IMPLEMENT_PAGE_ERROR)); } else if (bodyAsObject == null) { return (cls.cast(new PagedResponseBase<>(request, statusCode, headers, null, null, decodedHeaders))); } else { return (cls.cast(new PagedResponseBase<>(request, statusCode, headers, (Page<?>) bodyAsObject, decodedHeaders))); } } MethodHandle ctr = RESPONSE_CONSTRUCTORS_CACHE.get(cls); if (ctr == null) { throw LOGGER.logExceptionAsError(new RuntimeException("Cannot find suitable constructor for class " + cls)); } return RESPONSE_CONSTRUCTORS_CACHE.invokeSync(ctr, response, bodyAsObject); } private Object handleBodyReturnTypeSync(final HttpDecodedResponse response, final SwaggerMethodParser methodParser, final Type entityType) { final int responseStatusCode = response.getSourceResponse().getStatusCode(); final HttpMethod httpMethod = methodParser.getHttpMethod(); final Type returnValueWireType = methodParser.getReturnValueWireType(); final Object result; if (httpMethod == HttpMethod.HEAD && (TypeUtil.isTypeOrSubTypeOf( entityType, Boolean.TYPE) || TypeUtil.isTypeOrSubTypeOf(entityType, Boolean.class))) { boolean isSuccess = (responseStatusCode / 100) == 2; result = isSuccess; } else if (TypeUtil.isTypeOrSubTypeOf(entityType, byte[].class)) { byte[] responseBodyBytes = response.getSourceResponse().getBodyAsBinaryData().toBytes(); if (returnValueWireType == Base64Url.class) { responseBodyBytes = new Base64Url(responseBodyBytes).decodedBytes(); } result = responseBodyBytes; } else if (TypeUtil.isTypeOrSubTypeOf(entityType, BinaryData.class)) { result = response.getSourceResponse().getBodyAsBinaryData(); } else { result = response.getDecodedBodySync((byte[]) null); } return result; } /** * Handle the provided asynchronous HTTP response and return the deserialized value. * * @param httpDecodedResponse the asynchronous HTTP response to the original HTTP request * @param methodParser the SwaggerMethodParser that the request originates from * @param returnType the type of value that will be returned * @param context Additional context that is passed through the Http pipeline during the service call. * @return the deserialized result */ private Object handleRestReturnType(final HttpDecodedResponse httpDecodedResponse, final SwaggerMethodParser methodParser, final Type returnType, final Context context, final RequestOptions options) { final HttpDecodedResponse expectedResponse = ensureExpectedStatus(httpDecodedResponse, methodParser, options); final Object result; if (TypeUtil.isTypeOrSubTypeOf(returnType, void.class) || TypeUtil.isTypeOrSubTypeOf(returnType, Void.class)) { result = expectedResponse; } else { result = handleRestResponseReturnType(httpDecodedResponse, methodParser, returnType); } return result; } private static void endTracingSpan(HttpDecodedResponse httpDecodedResponse, Throwable throwable, Context tracingContext) { if (tracingContext == null) { return; } Object disableTracingValue = (tracingContext.getData(Tracer.DISABLE_TRACING_KEY).isPresent() ? tracingContext.getData(Tracer.DISABLE_TRACING_KEY).get() : null); boolean disableTracing = Boolean.TRUE.equals(disableTracingValue != null ? disableTracingValue : false); if (disableTracing) { return; } int statusCode = 0; if (httpDecodedResponse != null) { statusCode = httpDecodedResponse.getSourceResponse().getStatusCode(); } else if (throwable != null) { if (throwable instanceof HttpResponseException) { HttpResponseException exception = (HttpResponseException) throwable; statusCode = exception.getResponse().getStatusCode(); } } TracerProxy.end(statusCode, throwable, tracingContext); } /** * Create an instance of the default serializer. * * @return the default serializer */ private static SerializerAdapter createDefaultSerializer() { return JacksonAdapter.createDefaultSerializerAdapter(); } /** * Create the default HttpPipeline. * * @return the default HttpPipeline */ private static HttpPipeline createDefaultPipeline() { List<HttpPipelinePolicy> policies = new ArrayList<>(); policies.add(new UserAgentPolicy()); policies.add(new RetryPolicy()); policies.add(new CookiePolicy()); return new HttpPipelineBuilder() .policies(policies.toArray(new HttpPipelinePolicy[0])) .build(); } /** * Create a proxy implementation of the provided Swagger interface. * * @param swaggerInterface the Swagger interface to provide a proxy implementation for * @param <A> the type of the Swagger interface * @return a proxy implementation of the provided Swagger interface */ public static <A> A create(Class<A> swaggerInterface) { return create(swaggerInterface, createDefaultPipeline(), createDefaultSerializer()); } /** * Create a proxy implementation of the provided Swagger interface. * * @param swaggerInterface the Swagger interface to provide a proxy implementation for * @param httpPipeline the HttpPipelinePolicy and HttpClient pipeline that will be used to send Http requests * @param <A> the type of the Swagger interface * @return a proxy implementation of the provided Swagger interface */ public static <A> A create(Class<A> swaggerInterface, HttpPipeline httpPipeline) { return create(swaggerInterface, httpPipeline, createDefaultSerializer()); } /** * Create a proxy implementation of the provided Swagger interface. * * @param swaggerInterface the Swagger interface to provide a proxy implementation for * @param httpPipeline the HttpPipelinePolicy and HttpClient pipline that will be used to send Http requests * @param serializer the serializer that will be used to convert POJOs to and from request and response bodies * @param <A> the type of the Swagger interface. * @return a proxy implementation of the provided Swagger interface */ @SuppressWarnings("unchecked") public static <A> A create(Class<A> swaggerInterface, HttpPipeline httpPipeline, SerializerAdapter serializer) { final SwaggerInterfaceParser interfaceParser = new SwaggerInterfaceParser(swaggerInterface, serializer); final SyncRestProxy restProxy = new SyncRestProxy(httpPipeline, serializer, interfaceParser); return (A) Proxy.newProxyInstance(swaggerInterface.getClassLoader(), new Class<?>[]{swaggerInterface}, restProxy); } }
```suggestion throw LOGGER.logExceptionAsError(new IllegalStateException("'ResumeOperation' isn't supported.")); ```
void validateResumeOperationIsNotPresent(Method method) { if (method.isAnnotationPresent(com.azure.core.annotation.ResumeOperation.class)) { throw LOGGER.logExceptionAsError(Exceptions.propagate(new Exception("'ResumeOperation' isn't supported."))); } }
throw LOGGER.logExceptionAsError(Exceptions.propagate(new Exception("'ResumeOperation' isn't supported.")));
void validateResumeOperationIsNotPresent(Method method) { if (method.isAnnotationPresent(com.azure.core.annotation.ResumeOperation.class)) { throw LOGGER.logExceptionAsError(new IllegalStateException("'ResumeOperation' isn't supported.")); } }
class SyncRestProxy implements InvocationHandler { private static final ByteBuffer VALIDATION_BUFFER = ByteBuffer.allocate(0); private static final String BODY_TOO_LARGE = "Request body emitted %d bytes, more than the expected %d bytes."; private static final String BODY_TOO_SMALL = "Request body emitted %d bytes, less than the expected %d bytes."; private static final String MUST_IMPLEMENT_PAGE_ERROR = "Unable to create PagedResponse<T>. Body must be of a type that implements: " + Page.class; private static final ResponseConstructorsCache RESPONSE_CONSTRUCTORS_CACHE = new ResponseConstructorsCache(); private static final ClientLogger LOGGER = new ClientLogger(SyncRestProxy.class); private final HttpPipeline httpPipeline; private final SerializerAdapter serializer; private final SwaggerInterfaceParser interfaceParser; private final HttpResponseDecoder decoder; /** * Create a RestProxy. * * @param httpPipeline the HttpPipelinePolicy and HttpClient httpPipeline that will be used to send HTTP requests. * @param serializer the serializer that will be used to convert response bodies to POJOs. * @param interfaceParser the parser that contains information about the interface describing REST API methods that * this RestProxy "implements". */ private SyncRestProxy(HttpPipeline httpPipeline, SerializerAdapter serializer, SwaggerInterfaceParser interfaceParser) { this.httpPipeline = httpPipeline; this.serializer = serializer; this.interfaceParser = interfaceParser; this.decoder = new HttpResponseDecoder(this.serializer); } /** * Get the SwaggerMethodParser for the provided method. The Method must exist on the Swagger interface that this * RestProxy was created to "implement". * * @param method the method to get a SwaggerMethodParser for * @return the SwaggerMethodParser for the provided method */ private SwaggerMethodParser getMethodParser(Method method) { return interfaceParser.getMethodParser(method); } /** * Send the provided request asynchronously, applying any request policies provided to the HttpClient instance. * * @param request the HTTP request to send * @param contextData the context * @return a {@link Mono} that emits HttpResponse asynchronously */ public HttpResponse send(HttpRequest request, Context contextData) { return httpPipeline.sendSynchronously(request, contextData); } @Override public Object invoke(Object proxy, final Method method, Object[] args) { validateResumeOperationIsNotPresent(method); try { final SwaggerMethodParser methodParser = getMethodParser(method); final HttpRequest request = createHttpRequest(methodParser, args); Context context = methodParser.setContext(args); RequestOptions options = methodParser.setRequestOptions(args); context = mergeRequestOptionsContext(context, options); context = context.addData("caller-method", methodParser.getFullyQualifiedMethodName()) .addData("azure-eagerly-read-response", shouldEagerlyReadResponse(methodParser.getReturnType())); context = startTracingSpan(method, context); if (options != null) { options.getRequestCallback().accept(request); } if (request.getBody() != null) { request.setBody(validateLength(request)); } final HttpResponse response = send(request, context); HttpDecodedResponse decodedResponse = this.decoder.decodeSync(response, methodParser); return handleRestReturnType(decodedResponse, methodParser, methodParser.getReturnType(), context, options); } catch (IOException e) { throw LOGGER.logExceptionAsError(Exceptions.propagate(e)); } } @SuppressWarnings("deprecation") static Context mergeRequestOptionsContext(Context context, RequestOptions options) { if (options == null) { return context; } Context optionsContext = options.getContext(); if (optionsContext != null && optionsContext != Context.NONE) { context = CoreUtils.mergeContexts(context, optionsContext); } return context; } static Flux<ByteBuffer> validateLength(final HttpRequest request) { final Flux<ByteBuffer> bbFlux = request.getBody(); if (bbFlux == null) { return Flux.empty(); } final long expectedLength = Long.parseLong(request.getHeaders().getValue("Content-Length")); return Flux.defer(() -> { final long[] currentTotalLength = new long[1]; return Flux.concat(bbFlux, Flux.just(VALIDATION_BUFFER)).handle((buffer, sink) -> { if (buffer == null) { return; } if (buffer == VALIDATION_BUFFER) { if (expectedLength != currentTotalLength[0]) { sink.error(new UnexpectedLengthException(String.format(BODY_TOO_SMALL, currentTotalLength[0], expectedLength), currentTotalLength[0], expectedLength)); } else { sink.complete(); } return; } currentTotalLength[0] += buffer.remaining(); if (currentTotalLength[0] > expectedLength) { sink.error(new UnexpectedLengthException(String.format(BODY_TOO_LARGE, currentTotalLength[0], expectedLength), currentTotalLength[0], expectedLength)); return; } sink.next(buffer); }); }); } static BinaryData validateLengthSync(final HttpRequest request) { final BinaryData binaryData = request.getContent(); if (binaryData == null) { return binaryData; } final long expectedLength = Long.parseLong(request.getHeaders().getValue("Content-Length")); long length = binaryData.getLength(); if (length > expectedLength) { throw new UnexpectedLengthException(String.format(BODY_TOO_LARGE, binaryData.getLength(), expectedLength), binaryData.getLength(), expectedLength); } return binaryData; } /** * Starts the tracing span for the current service call, additionally set metadata attributes on the span by passing * additional context information. * * @param method Service method being called. * @param context Context information about the current service call. * @return The updated context containing the span context. */ private Context startTracingSpan(Method method, Context context) { if (!TracerProxy.isTracingEnabled()) { return context; } if ((boolean) context.getData(Tracer.DISABLE_TRACING_KEY).orElse(false)) { return context; } String spanName = interfaceParser.getServiceName() + "." + method.getName(); context = TracerProxy.setSpanName(spanName, context); return TracerProxy.start(spanName, context); } /** * Create a HttpRequest for the provided Swagger method using the provided arguments. * * @param methodParser the Swagger method parser to use * @param args the arguments to use to populate the method's annotation values * @return a HttpRequest * @throws IOException thrown if the body contents cannot be serialized */ private HttpRequest createHttpRequest(SwaggerMethodParser methodParser, Object[] args) throws IOException { final String path = methodParser.setPath(args); final UrlBuilder pathUrlBuilder = UrlBuilder.parse(path); final UrlBuilder urlBuilder; if (pathUrlBuilder.getScheme() != null) { urlBuilder = pathUrlBuilder; } else { urlBuilder = new UrlBuilder(); methodParser.setSchemeAndHost(args, urlBuilder); if (path != null && !path.isEmpty() && !"/".equals(path)) { String hostPath = urlBuilder.getPath(); if (hostPath == null || hostPath.isEmpty() || "/".equals(hostPath) || path.contains(": urlBuilder.setPath(path); } else { if (path.startsWith("/")) { urlBuilder.setPath(hostPath + path); } else { urlBuilder.setPath(hostPath + "/" + path); } } } } methodParser.setEncodedQueryParameters(args, urlBuilder); final URL url = urlBuilder.toUrl(); final HttpRequest request = configRequest(new HttpRequest(methodParser.getHttpMethod(), url), methodParser, args); HttpHeaders httpHeaders = request.getHeaders(); methodParser.setHeaders(args, httpHeaders); return request; } @SuppressWarnings("unchecked") private HttpRequest configRequest(final HttpRequest request, final SwaggerMethodParser methodParser, final Object[] args) throws IOException { final Object bodyContentObject = methodParser.setBody(args); if (bodyContentObject == null) { request.getHeaders().set("Content-Length", "0"); } else { String contentType = methodParser.getBodyContentType(); if (contentType == null || contentType.isEmpty()) { if (bodyContentObject instanceof byte[] || bodyContentObject instanceof String) { contentType = ContentType.APPLICATION_OCTET_STREAM; } else { contentType = ContentType.APPLICATION_JSON; } } request.getHeaders().set("Content-Type", contentType); if (bodyContentObject instanceof BinaryData) { BinaryData binaryData = (BinaryData) bodyContentObject; if (binaryData.getLength() != null) { request.setHeader("Content-Length", binaryData.getLength().toString()); } request.setContent(binaryData); return request; } boolean isJson = false; final String[] contentTypeParts = contentType.split(";"); for (final String contentTypePart : contentTypeParts) { if (contentTypePart.trim().equalsIgnoreCase(ContentType.APPLICATION_JSON)) { isJson = true; break; } } if (isJson) { ByteArrayOutputStream stream = new AccessibleByteArrayOutputStream(); serializer.serialize(bodyContentObject, SerializerEncoding.JSON, stream); request.setHeader("Content-Length", String.valueOf(stream.size())); request.setContent(BinaryData.fromStream(new ByteArrayInputStream(stream.toByteArray(), 0, stream.size()))); } else if (bodyContentObject instanceof byte[]) { request.setBody((byte[]) bodyContentObject); } else if (bodyContentObject instanceof String) { final String bodyContentString = (String) bodyContentObject; if (!bodyContentString.isEmpty()) { request.setBody(bodyContentString); } } else if (bodyContentObject instanceof ByteBuffer) { request.setBody(((ByteBuffer) bodyContentObject).array()); } else { ByteArrayOutputStream stream = new AccessibleByteArrayOutputStream(); serializer.serialize(bodyContentObject, SerializerEncoding.fromHeaders(request.getHeaders()), stream); request.setHeader("Content-Length", String.valueOf(stream.size())); request.setBody(stream.toByteArray()); } } return request; } private static Exception instantiateUnexpectedException(final UnexpectedExceptionInformation exception, final HttpResponse httpResponse, final byte[] responseContent, final Object responseDecodedContent) { final int responseStatusCode = httpResponse.getStatusCode(); final String contentType = httpResponse.getHeaderValue("Content-Type"); final String bodyRepresentation; if ("application/octet-stream".equalsIgnoreCase(contentType)) { bodyRepresentation = "(" + httpResponse.getHeaderValue("Content-Length") + "-byte body)"; } else { bodyRepresentation = responseContent == null || responseContent.length == 0 ? "(empty body)" : "\"" + new String(responseContent, StandardCharsets.UTF_8) + "\""; } Exception result; try { final Constructor<? extends HttpResponseException> exceptionConstructor = exception.getExceptionType() .getConstructor(String.class, HttpResponse.class, exception.getExceptionBodyType()); result = exceptionConstructor.newInstance("Status code " + responseStatusCode + ", " + bodyRepresentation, httpResponse, responseDecodedContent); } catch (ReflectiveOperationException e) { String message = "Status code " + responseStatusCode + ", but an instance of " + exception.getExceptionType().getCanonicalName() + " cannot be created." + " Response body: " + bodyRepresentation; result = new IOException(message, e); } return result; } /** * Create a publisher that (1) emits error if the provided response {@code decodedResponse} has 'disallowed status * code' OR (2) emits provided response if it's status code ia allowed. * * 'disallowed status code' is one of the status code defined in the provided SwaggerMethodParser or is in the int[] * of additional allowed status codes. * * @param decodedResponse The HttpResponse to check. * @param methodParser The method parser that contains information about the service interface method that initiated * the HTTP request. * @return An async-version of the provided decodedResponse. */ private HttpDecodedResponse ensureExpectedStatus(final HttpDecodedResponse decodedResponse, final SwaggerMethodParser methodParser, RequestOptions options) { final int responseStatusCode = decodedResponse.getSourceResponse().getStatusCode(); if (methodParser.isExpectedResponseStatusCode(responseStatusCode) || (options != null && options.getErrorOptions().contains(ErrorOptions.NO_THROW))) { return decodedResponse; } byte[] responseBytes = decodedResponse.getSourceResponse().getBodyAsByteArray().block(); if (responseBytes == null || responseBytes.length == 0) { throw new RuntimeException(instantiateUnexpectedException( methodParser.getUnexpectedException(responseStatusCode), decodedResponse.getSourceResponse(), null, null)); } else { Object decodedBody = decodedResponse.getDecodedBodySync(responseBytes); throw new RuntimeException(instantiateUnexpectedException( methodParser.getUnexpectedException(responseStatusCode), decodedResponse.getSourceResponse(), responseBytes, decodedBody)); } } private Object handleRestResponseReturnType(final HttpDecodedResponse response, final SwaggerMethodParser methodParser, final Type entityType) { if (TypeUtil.isTypeOrSubTypeOf(entityType, Response.class)) { final Type bodyType = TypeUtil.getRestResponseBodyType(entityType); if (TypeUtil.isTypeOrSubTypeOf(bodyType, Void.class)) { response.getSourceResponse().getBody().ignoreElements().block(); return createResponseSync(response, entityType, null); } else { Object bodyAsObject = handleBodyReturnTypeSync(response, methodParser, bodyType); Response<?> httpResponse = createResponseSync(response, entityType, bodyAsObject); if (httpResponse == null) { return createResponseSync(response, entityType, null); } return httpResponse; } } else { return handleBodyReturnTypeSync(response, methodParser, entityType); } } @SuppressWarnings("unchecked") private Response<?> createResponseSync(HttpDecodedResponse response, Type entityType, Object bodyAsObject) { final Class<? extends Response<?>> cls = (Class<? extends Response<?>>) TypeUtil.getRawClass(entityType); final HttpResponse httpResponse = response.getSourceResponse(); final HttpRequest request = httpResponse.getRequest(); final int statusCode = httpResponse.getStatusCode(); final HttpHeaders headers = httpResponse.getHeaders(); final Object decodedHeaders = response.getDecodedHeaders(); if (cls.equals(Response.class)) { return cls.cast(new ResponseBase<>(request, statusCode, headers, bodyAsObject, decodedHeaders)); } else if (cls.equals(PagedResponse.class)) { if (bodyAsObject != null && !TypeUtil.isTypeOrSubTypeOf(bodyAsObject.getClass(), Page.class)) { throw LOGGER.logExceptionAsError(new RuntimeException(MUST_IMPLEMENT_PAGE_ERROR)); } else if (bodyAsObject == null) { return (cls.cast(new PagedResponseBase<>(request, statusCode, headers, null, null, decodedHeaders))); } else { return (cls.cast(new PagedResponseBase<>(request, statusCode, headers, (Page<?>) bodyAsObject, decodedHeaders))); } } MethodHandle ctr = RESPONSE_CONSTRUCTORS_CACHE.get(cls); if (ctr == null) { throw new RuntimeException("Cannot find suitable constructor for class " + cls); } return RESPONSE_CONSTRUCTORS_CACHE.invokeSync(ctr, response, bodyAsObject); } private Object handleBodyReturnTypeSync(final HttpDecodedResponse response, final SwaggerMethodParser methodParser, final Type entityType) { final int responseStatusCode = response.getSourceResponse().getStatusCode(); final HttpMethod httpMethod = methodParser.getHttpMethod(); final Type returnValueWireType = methodParser.getReturnValueWireType(); final Object result; if (httpMethod == HttpMethod.HEAD && (TypeUtil.isTypeOrSubTypeOf( entityType, Boolean.TYPE) || TypeUtil.isTypeOrSubTypeOf(entityType, Boolean.class))) { boolean isSuccess = (responseStatusCode / 100) == 2; result = isSuccess; } else if (TypeUtil.isTypeOrSubTypeOf(entityType, byte[].class)) { byte[] responseBodyBytes = response.getSourceResponse().getContent().toBytes(); if (returnValueWireType == Base64Url.class) { responseBodyBytes = new Base64Url(responseBodyBytes).decodedBytes(); } result = responseBodyBytes; } else if (TypeUtil.isTypeOrSubTypeOf(entityType, BinaryData.class)) { result = response.getSourceResponse().getContent(); } else { result = response.getDecodedBody((byte[]) null); } return result; } /** * Handle the provided asynchronous HTTP response and return the deserialized value. * * @param httpDecodedResponse the asynchronous HTTP response to the original HTTP request * @param methodParser the SwaggerMethodParser that the request originates from * @param returnType the type of value that will be returned * @param context Additional context that is passed through the Http pipeline during the service call. * @return the deserialized result */ private Object handleRestReturnType(final HttpDecodedResponse httpDecodedResponse, final SwaggerMethodParser methodParser, final Type returnType, final Context context, final RequestOptions options) { final HttpDecodedResponse expectedResponse = ensureExpectedStatus(httpDecodedResponse, methodParser, options); final Object result; if (TypeUtil.isTypeOrSubTypeOf(returnType, void.class) || TypeUtil.isTypeOrSubTypeOf(returnType, Void.class)) { result = expectedResponse; } else { result = handleRestResponseReturnType(httpDecodedResponse, methodParser, returnType); } return result; } private static void endTracingSpan(Signal<HttpDecodedResponse> signal) { if (!TracerProxy.isTracingEnabled()) { return; } if (signal.isOnComplete() || signal.isOnSubscribe()) { return; } ContextView context = signal.getContextView(); Optional<Context> tracingContext = context.getOrEmpty("TRACING_CONTEXT"); boolean disableTracing = Boolean.TRUE.equals(context.getOrDefault(Tracer.DISABLE_TRACING_KEY, false)); if (!tracingContext.isPresent() || disableTracing) { return; } int statusCode = 0; HttpDecodedResponse httpDecodedResponse; Throwable throwable = null; if (signal.hasValue()) { httpDecodedResponse = signal.get(); statusCode = httpDecodedResponse.getSourceResponse().getStatusCode(); } else if (signal.hasError()) { throwable = signal.getThrowable(); if (throwable instanceof HttpResponseException) { HttpResponseException exception = (HttpResponseException) throwable; statusCode = exception.getResponse().getStatusCode(); } } TracerProxy.end(statusCode, throwable, tracingContext.get()); } /** * Create an instance of the default serializer. * * @return the default serializer */ private static SerializerAdapter createDefaultSerializer() { return JacksonAdapter.createDefaultSerializerAdapter(); } /** * Create the default HttpPipeline. * * @return the default HttpPipeline */ private static HttpPipeline createDefaultPipeline() { List<HttpPipelinePolicy> policies = new ArrayList<>(); policies.add(new UserAgentPolicy()); policies.add(new RetryPolicy()); policies.add(new CookiePolicy()); return new HttpPipelineBuilder() .policies(policies.toArray(new HttpPipelinePolicy[0])) .build(); } /** * Create a proxy implementation of the provided Swagger interface. * * @param swaggerInterface the Swagger interface to provide a proxy implementation for * @param <A> the type of the Swagger interface * @return a proxy implementation of the provided Swagger interface */ public static <A> A create(Class<A> swaggerInterface) { return create(swaggerInterface, createDefaultPipeline(), createDefaultSerializer()); } /** * Create a proxy implementation of the provided Swagger interface. * * @param swaggerInterface the Swagger interface to provide a proxy implementation for * @param httpPipeline the HttpPipelinePolicy and HttpClient pipeline that will be used to send Http requests * @param <A> the type of the Swagger interface * @return a proxy implementation of the provided Swagger interface */ public static <A> A create(Class<A> swaggerInterface, HttpPipeline httpPipeline) { return create(swaggerInterface, httpPipeline, createDefaultSerializer()); } /** * Create a proxy implementation of the provided Swagger interface. * * @param swaggerInterface the Swagger interface to provide a proxy implementation for * @param httpPipeline the HttpPipelinePolicy and HttpClient pipline that will be used to send Http requests * @param serializer the serializer that will be used to convert POJOs to and from request and response bodies * @param <A> the type of the Swagger interface. * @return a proxy implementation of the provided Swagger interface */ @SuppressWarnings("unchecked") public static <A> A create(Class<A> swaggerInterface, HttpPipeline httpPipeline, SerializerAdapter serializer) { final SwaggerInterfaceParser interfaceParser = new SwaggerInterfaceParser(swaggerInterface, serializer); final SyncRestProxy restProxy = new SyncRestProxy(httpPipeline, serializer, interfaceParser); return (A) Proxy.newProxyInstance(swaggerInterface.getClassLoader(), new Class<?>[]{swaggerInterface}, restProxy); } }
class SyncRestProxy implements InvocationHandler { private static final ByteBuffer VALIDATION_BUFFER = ByteBuffer.allocate(0); private static final String BODY_TOO_LARGE = "Request body emitted %d bytes, more than the expected %d bytes."; private static final String BODY_TOO_SMALL = "Request body emitted %d bytes, less than the expected %d bytes."; private static final String MUST_IMPLEMENT_PAGE_ERROR = "Unable to create PagedResponse<T>. Body must be of a type that implements: " + Page.class; private static final ResponseConstructorsCache RESPONSE_CONSTRUCTORS_CACHE = new ResponseConstructorsCache(); private static final ClientLogger LOGGER = new ClientLogger(SyncRestProxy.class); private final HttpPipeline httpPipeline; private final SerializerAdapter serializer; private final SwaggerInterfaceParser interfaceParser; private final HttpResponseDecoder decoder; /** * Create a RestProxy. * * @param httpPipeline the HttpPipelinePolicy and HttpClient httpPipeline that will be used to send HTTP requests. * @param serializer the serializer that will be used to convert response bodies to POJOs. * @param interfaceParser the parser that contains information about the interface describing REST API methods that * this RestProxy "implements". */ private SyncRestProxy(HttpPipeline httpPipeline, SerializerAdapter serializer, SwaggerInterfaceParser interfaceParser) { this.httpPipeline = httpPipeline; this.serializer = serializer; this.interfaceParser = interfaceParser; this.decoder = new HttpResponseDecoder(this.serializer); } /** * Get the SwaggerMethodParser for the provided method. The Method must exist on the Swagger interface that this * RestProxy was created to "implement". * * @param method the method to get a SwaggerMethodParser for * @return the SwaggerMethodParser for the provided method */ private SwaggerMethodParser getMethodParser(Method method) { return interfaceParser.getMethodParser(method); } /** * Send the provided request asynchronously, applying any request policies provided to the HttpClient instance. * * @param request the HTTP request to send * @param contextData the context * @return a {@link Mono} that emits HttpResponse asynchronously */ public HttpResponse send(HttpRequest request, Context contextData) { return httpPipeline.sendSync(request, contextData); } @Override public Object invoke(Object proxy, final Method method, Object[] args) { validateResumeOperationIsNotPresent(method); final SwaggerMethodParser methodParser = getMethodParser(method); HttpRequest request; try { request = createHttpRequest(methodParser, args); } catch (IOException e) { throw LOGGER.logExceptionAsError(new UncheckedIOException(e)); } Context context = methodParser.setContext(args); RequestOptions options = methodParser.setRequestOptions(args); context = mergeRequestOptionsContext(context, options); context = context.addData("caller-method", methodParser.getFullyQualifiedMethodName()) .addData("azure-eagerly-read-response", shouldEagerlyReadResponse(methodParser.getReturnType())); HttpDecodedResponse decodedResponse = null; Throwable throwable = null; try { context = startTracingSpan(method, context); if (options != null) { options.getRequestCallback().accept(request); } if (request.getBody() != null) { request.setBody(validateLengthSync(request)); } final HttpResponse response = send(request, context); decodedResponse = this.decoder.decodeSync(response, methodParser); return handleRestReturnType(decodedResponse, methodParser, methodParser.getReturnType(), context, options); } catch (Exception e) { throwable = e; if (e instanceof RuntimeException) { throw LOGGER.logExceptionAsError((RuntimeException) e); } else { throw LOGGER.logExceptionAsError(new RuntimeException(e)); } } finally { if (decodedResponse != null || throwable != null) { endTracingSpan(decodedResponse, throwable, context); } } } @SuppressWarnings("deprecation") static Context mergeRequestOptionsContext(Context context, RequestOptions options) { if (options == null) { return context; } Context optionsContext = options.getContext(); if (optionsContext != null && optionsContext != Context.NONE) { context = CoreUtils.mergeContexts(context, optionsContext); } return context; } static BinaryData validateLengthSync(final HttpRequest request) { final BinaryData binaryData = request.getBodyAsBinaryData(); if (binaryData == null) { return binaryData; } final long expectedLength = Long.parseLong(request.getHeaders().getValue("Content-Length")); Long length = binaryData.getLength(); BinaryDataContent bdc = BinaryDataHelper.getContent(binaryData); if (length == null) { if (bdc instanceof FluxByteBufferContent) { throw new IllegalStateException("Flux Byte Buffer is not supported in Synchronous Rest Proxy."); } else if (bdc instanceof InputStreamContent) { InputStreamContent inputStreamContent = ((InputStreamContent) bdc); InputStream inputStream = inputStreamContent.toStream(); LengthValidatingInputStream lengthValidatingInputStream = new LengthValidatingInputStream(inputStream, expectedLength); return BinaryData.fromStream(lengthValidatingInputStream); } else { byte[] b = (bdc).toBytes(); long len = b.length; if (len > expectedLength) { throw new UnexpectedLengthException(String.format(BODY_TOO_LARGE, len, expectedLength), len, expectedLength); } return BinaryData.fromBytes(b); } } else { if (length > expectedLength) { throw new UnexpectedLengthException(String.format(BODY_TOO_LARGE, length, expectedLength), length, expectedLength); } return binaryData; } } /** * Starts the tracing span for the current service call, additionally set metadata attributes on the span by passing * additional context information. * * @param method Service method being called. * @param context Context information about the current service call. * @return The updated context containing the span context. */ private Context startTracingSpan(Method method, Context context) { if (!TracerProxy.isTracingEnabled()) { return context; } if ((boolean) context.getData(Tracer.DISABLE_TRACING_KEY).orElse(false)) { return context; } String spanName = interfaceParser.getServiceName() + "." + method.getName(); context = TracerProxy.setSpanName(spanName, context); return TracerProxy.start(spanName, context); } /** * Create a HttpRequest for the provided Swagger method using the provided arguments. * * @param methodParser the Swagger method parser to use * @param args the arguments to use to populate the method's annotation values * @return a HttpRequest * @throws IOException thrown if the body contents cannot be serialized */ private HttpRequest createHttpRequest(SwaggerMethodParser methodParser, Object[] args) throws IOException { final String path = methodParser.setPath(args); final UrlBuilder pathUrlBuilder = UrlBuilder.parse(path); final UrlBuilder urlBuilder; if (pathUrlBuilder.getScheme() != null) { urlBuilder = pathUrlBuilder; } else { urlBuilder = new UrlBuilder(); methodParser.setSchemeAndHost(args, urlBuilder); if (path != null && !path.isEmpty() && !"/".equals(path)) { String hostPath = urlBuilder.getPath(); if (hostPath == null || hostPath.isEmpty() || "/".equals(hostPath) || path.contains(": urlBuilder.setPath(path); } else { if (path.startsWith("/")) { urlBuilder.setPath(hostPath + path); } else { urlBuilder.setPath(hostPath + "/" + path); } } } } methodParser.setEncodedQueryParameters(args, urlBuilder); final URL url = urlBuilder.toUrl(); final HttpRequest request = configRequest(new HttpRequest(methodParser.getHttpMethod(), url), methodParser, args); HttpHeaders httpHeaders = request.getHeaders(); methodParser.setHeaders(args, httpHeaders); return request; } @SuppressWarnings("unchecked") private HttpRequest configRequest(final HttpRequest request, final SwaggerMethodParser methodParser, final Object[] args) throws IOException { final Object bodyContentObject = methodParser.setBody(args); if (bodyContentObject == null) { request.getHeaders().set("Content-Length", "0"); } else { String contentType = methodParser.getBodyContentType(); if (contentType == null || contentType.isEmpty()) { if (bodyContentObject instanceof byte[] || bodyContentObject instanceof String) { contentType = ContentType.APPLICATION_OCTET_STREAM; } else { contentType = ContentType.APPLICATION_JSON; } } request.getHeaders().set("Content-Type", contentType); if (bodyContentObject instanceof BinaryData) { BinaryData binaryData = (BinaryData) bodyContentObject; if (binaryData.getLength() != null) { request.setHeader("Content-Length", binaryData.getLength().toString()); } request.setBody(binaryData); return request; } boolean isJson = false; final String[] contentTypeParts = contentType.split(";"); for (final String contentTypePart : contentTypeParts) { if (contentTypePart.trim().equalsIgnoreCase(ContentType.APPLICATION_JSON)) { isJson = true; break; } } if (isJson) { ByteArrayOutputStream stream = new AccessibleByteArrayOutputStream(); serializer.serialize(bodyContentObject, SerializerEncoding.JSON, stream); request.setHeader("Content-Length", String.valueOf(stream.size())); request.setBody(BinaryData.fromStream(new ByteArrayInputStream(stream.toByteArray(), 0, stream.size()))); } else if (bodyContentObject instanceof byte[]) { request.setBody((byte[]) bodyContentObject); } else if (bodyContentObject instanceof String) { final String bodyContentString = (String) bodyContentObject; if (!bodyContentString.isEmpty()) { request.setBody(bodyContentString); } } else if (bodyContentObject instanceof ByteBuffer) { request.setBody(((ByteBuffer) bodyContentObject).array()); } else { ByteArrayOutputStream stream = new AccessibleByteArrayOutputStream(); serializer.serialize(bodyContentObject, SerializerEncoding.fromHeaders(request.getHeaders()), stream); request.setHeader("Content-Length", String.valueOf(stream.size())); request.setBody(stream.toByteArray()); } } return request; } private static Exception instantiateUnexpectedException(final UnexpectedExceptionInformation exception, final HttpResponse httpResponse, final byte[] responseContent, final Object responseDecodedContent) { final int responseStatusCode = httpResponse.getStatusCode(); final String contentType = httpResponse.getHeaderValue("Content-Type"); final String bodyRepresentation; if ("application/octet-stream".equalsIgnoreCase(contentType)) { bodyRepresentation = "(" + httpResponse.getHeaderValue("Content-Length") + "-byte body)"; } else { bodyRepresentation = responseContent == null || responseContent.length == 0 ? "(empty body)" : "\"" + new String(responseContent, StandardCharsets.UTF_8) + "\""; } Exception result; try { final Constructor<? extends HttpResponseException> exceptionConstructor = exception.getExceptionType() .getConstructor(String.class, HttpResponse.class, exception.getExceptionBodyType()); result = exceptionConstructor.newInstance("Status code " + responseStatusCode + ", " + bodyRepresentation, httpResponse, responseDecodedContent); } catch (ReflectiveOperationException e) { String message = "Status code " + responseStatusCode + ", but an instance of " + exception.getExceptionType().getCanonicalName() + " cannot be created." + " Response body: " + bodyRepresentation; result = new IOException(message, e); } return result; } /** * Create a publisher that (1) emits error if the provided response {@code decodedResponse} has 'disallowed status * code' OR (2) emits provided response if it's status code ia allowed. * * 'disallowed status code' is one of the status code defined in the provided SwaggerMethodParser or is in the int[] * of additional allowed status codes. * * @param decodedResponse The HttpResponse to check. * @param methodParser The method parser that contains information about the service interface method that initiated * the HTTP request. * @return An async-version of the provided decodedResponse. */ private HttpDecodedResponse ensureExpectedStatus(final HttpDecodedResponse decodedResponse, final SwaggerMethodParser methodParser, RequestOptions options) { final int responseStatusCode = decodedResponse.getSourceResponse().getStatusCode(); if (methodParser.isExpectedResponseStatusCode(responseStatusCode) || (options != null && options.getErrorOptions().contains(ErrorOptions.NO_THROW))) { return decodedResponse; } Exception e; byte[] responseBytes = decodedResponse.getSourceResponse().getBodyAsBinaryData().toBytes(); if (responseBytes == null || responseBytes.length == 0) { e = instantiateUnexpectedException(methodParser.getUnexpectedException(responseStatusCode), decodedResponse.getSourceResponse(), null, null); } else { Object decodedBody = decodedResponse.getDecodedBodySync(responseBytes); e = instantiateUnexpectedException(methodParser.getUnexpectedException(responseStatusCode), decodedResponse.getSourceResponse(), responseBytes, decodedBody); } if (e instanceof RuntimeException) { throw LOGGER.logExceptionAsError((RuntimeException) e); } else { throw LOGGER.logExceptionAsError(new RuntimeException(e)); } } private Object handleRestResponseReturnType(final HttpDecodedResponse response, final SwaggerMethodParser methodParser, final Type entityType) { if (TypeUtil.isTypeOrSubTypeOf(entityType, Response.class)) { final Type bodyType = TypeUtil.getRestResponseBodyType(entityType); if (TypeUtil.isTypeOrSubTypeOf(bodyType, Void.class)) { response.getSourceResponse().close(); return createResponseSync(response, entityType, null); } else { Object bodyAsObject = handleBodyReturnTypeSync(response, methodParser, bodyType); Response<?> httpResponse = createResponseSync(response, entityType, bodyAsObject); if (httpResponse == null) { return createResponseSync(response, entityType, null); } return httpResponse; } } else { return handleBodyReturnTypeSync(response, methodParser, entityType); } } @SuppressWarnings("unchecked") private Response<?> createResponseSync(HttpDecodedResponse response, Type entityType, Object bodyAsObject) { final Class<? extends Response<?>> cls = (Class<? extends Response<?>>) TypeUtil.getRawClass(entityType); final HttpResponse httpResponse = response.getSourceResponse(); final HttpRequest request = httpResponse.getRequest(); final int statusCode = httpResponse.getStatusCode(); final HttpHeaders headers = httpResponse.getHeaders(); final Object decodedHeaders = response.getDecodedHeaders(); if (cls.equals(Response.class)) { return cls.cast(new ResponseBase<>(request, statusCode, headers, bodyAsObject, decodedHeaders)); } else if (cls.equals(PagedResponse.class)) { if (bodyAsObject != null && !TypeUtil.isTypeOrSubTypeOf(bodyAsObject.getClass(), Page.class)) { throw LOGGER.logExceptionAsError(new RuntimeException(MUST_IMPLEMENT_PAGE_ERROR)); } else if (bodyAsObject == null) { return (cls.cast(new PagedResponseBase<>(request, statusCode, headers, null, null, decodedHeaders))); } else { return (cls.cast(new PagedResponseBase<>(request, statusCode, headers, (Page<?>) bodyAsObject, decodedHeaders))); } } MethodHandle ctr = RESPONSE_CONSTRUCTORS_CACHE.get(cls); if (ctr == null) { throw LOGGER.logExceptionAsError(new RuntimeException("Cannot find suitable constructor for class " + cls)); } return RESPONSE_CONSTRUCTORS_CACHE.invokeSync(ctr, response, bodyAsObject); } private Object handleBodyReturnTypeSync(final HttpDecodedResponse response, final SwaggerMethodParser methodParser, final Type entityType) { final int responseStatusCode = response.getSourceResponse().getStatusCode(); final HttpMethod httpMethod = methodParser.getHttpMethod(); final Type returnValueWireType = methodParser.getReturnValueWireType(); final Object result; if (httpMethod == HttpMethod.HEAD && (TypeUtil.isTypeOrSubTypeOf( entityType, Boolean.TYPE) || TypeUtil.isTypeOrSubTypeOf(entityType, Boolean.class))) { boolean isSuccess = (responseStatusCode / 100) == 2; result = isSuccess; } else if (TypeUtil.isTypeOrSubTypeOf(entityType, byte[].class)) { byte[] responseBodyBytes = response.getSourceResponse().getBodyAsBinaryData().toBytes(); if (returnValueWireType == Base64Url.class) { responseBodyBytes = new Base64Url(responseBodyBytes).decodedBytes(); } result = responseBodyBytes; } else if (TypeUtil.isTypeOrSubTypeOf(entityType, BinaryData.class)) { result = response.getSourceResponse().getBodyAsBinaryData(); } else { result = response.getDecodedBodySync((byte[]) null); } return result; } /** * Handle the provided asynchronous HTTP response and return the deserialized value. * * @param httpDecodedResponse the asynchronous HTTP response to the original HTTP request * @param methodParser the SwaggerMethodParser that the request originates from * @param returnType the type of value that will be returned * @param context Additional context that is passed through the Http pipeline during the service call. * @return the deserialized result */ private Object handleRestReturnType(final HttpDecodedResponse httpDecodedResponse, final SwaggerMethodParser methodParser, final Type returnType, final Context context, final RequestOptions options) { final HttpDecodedResponse expectedResponse = ensureExpectedStatus(httpDecodedResponse, methodParser, options); final Object result; if (TypeUtil.isTypeOrSubTypeOf(returnType, void.class) || TypeUtil.isTypeOrSubTypeOf(returnType, Void.class)) { result = expectedResponse; } else { result = handleRestResponseReturnType(httpDecodedResponse, methodParser, returnType); } return result; } private static void endTracingSpan(HttpDecodedResponse httpDecodedResponse, Throwable throwable, Context tracingContext) { if (tracingContext == null) { return; } Object disableTracingValue = (tracingContext.getData(Tracer.DISABLE_TRACING_KEY).isPresent() ? tracingContext.getData(Tracer.DISABLE_TRACING_KEY).get() : null); boolean disableTracing = Boolean.TRUE.equals(disableTracingValue != null ? disableTracingValue : false); if (disableTracing) { return; } int statusCode = 0; if (httpDecodedResponse != null) { statusCode = httpDecodedResponse.getSourceResponse().getStatusCode(); } else if (throwable != null) { if (throwable instanceof HttpResponseException) { HttpResponseException exception = (HttpResponseException) throwable; statusCode = exception.getResponse().getStatusCode(); } } TracerProxy.end(statusCode, throwable, tracingContext); } /** * Create an instance of the default serializer. * * @return the default serializer */ private static SerializerAdapter createDefaultSerializer() { return JacksonAdapter.createDefaultSerializerAdapter(); } /** * Create the default HttpPipeline. * * @return the default HttpPipeline */ private static HttpPipeline createDefaultPipeline() { List<HttpPipelinePolicy> policies = new ArrayList<>(); policies.add(new UserAgentPolicy()); policies.add(new RetryPolicy()); policies.add(new CookiePolicy()); return new HttpPipelineBuilder() .policies(policies.toArray(new HttpPipelinePolicy[0])) .build(); } /** * Create a proxy implementation of the provided Swagger interface. * * @param swaggerInterface the Swagger interface to provide a proxy implementation for * @param <A> the type of the Swagger interface * @return a proxy implementation of the provided Swagger interface */ public static <A> A create(Class<A> swaggerInterface) { return create(swaggerInterface, createDefaultPipeline(), createDefaultSerializer()); } /** * Create a proxy implementation of the provided Swagger interface. * * @param swaggerInterface the Swagger interface to provide a proxy implementation for * @param httpPipeline the HttpPipelinePolicy and HttpClient pipeline that will be used to send Http requests * @param <A> the type of the Swagger interface * @return a proxy implementation of the provided Swagger interface */ public static <A> A create(Class<A> swaggerInterface, HttpPipeline httpPipeline) { return create(swaggerInterface, httpPipeline, createDefaultSerializer()); } /** * Create a proxy implementation of the provided Swagger interface. * * @param swaggerInterface the Swagger interface to provide a proxy implementation for * @param httpPipeline the HttpPipelinePolicy and HttpClient pipline that will be used to send Http requests * @param serializer the serializer that will be used to convert POJOs to and from request and response bodies * @param <A> the type of the Swagger interface. * @return a proxy implementation of the provided Swagger interface */ @SuppressWarnings("unchecked") public static <A> A create(Class<A> swaggerInterface, HttpPipeline httpPipeline, SerializerAdapter serializer) { final SwaggerInterfaceParser interfaceParser = new SwaggerInterfaceParser(swaggerInterface, serializer); final SyncRestProxy restProxy = new SyncRestProxy(httpPipeline, serializer, interfaceParser); return (A) Proxy.newProxyInstance(swaggerInterface.getClassLoader(), new Class<?>[]{swaggerInterface}, restProxy); } }
should we be calling into `validateLengthSync` here?
public Object invoke(Object proxy, final Method method, Object[] args) { validateResumeOperationIsNotPresent(method); try { final SwaggerMethodParser methodParser = getMethodParser(method); final HttpRequest request = createHttpRequest(methodParser, args); Context context = methodParser.setContext(args); RequestOptions options = methodParser.setRequestOptions(args); context = mergeRequestOptionsContext(context, options); context = context.addData("caller-method", methodParser.getFullyQualifiedMethodName()) .addData("azure-eagerly-read-response", shouldEagerlyReadResponse(methodParser.getReturnType())); context = startTracingSpan(method, context); if (options != null) { options.getRequestCallback().accept(request); } if (request.getBody() != null) { request.setBody(validateLength(request)); } final HttpResponse response = send(request, context); HttpDecodedResponse decodedResponse = this.decoder.decodeSync(response, methodParser); return handleRestReturnType(decodedResponse, methodParser, methodParser.getReturnType(), context, options); } catch (IOException e) { throw LOGGER.logExceptionAsError(Exceptions.propagate(e)); } }
}
public Object invoke(Object proxy, final Method method, Object[] args) { validateResumeOperationIsNotPresent(method); final SwaggerMethodParser methodParser = getMethodParser(method); HttpRequest request; try { request = createHttpRequest(methodParser, args); } catch (IOException e) { throw LOGGER.logExceptionAsError(new UncheckedIOException(e)); } Context context = methodParser.setContext(args); RequestOptions options = methodParser.setRequestOptions(args); context = mergeRequestOptionsContext(context, options); context = context.addData("caller-method", methodParser.getFullyQualifiedMethodName()) .addData("azure-eagerly-read-response", shouldEagerlyReadResponse(methodParser.getReturnType())); HttpDecodedResponse decodedResponse = null; Throwable throwable = null; try { context = startTracingSpan(method, context); if (options != null) { options.getRequestCallback().accept(request); } if (request.getBody() != null) { request.setBody(validateLengthSync(request)); } final HttpResponse response = send(request, context); decodedResponse = this.decoder.decodeSync(response, methodParser); return handleRestReturnType(decodedResponse, methodParser, methodParser.getReturnType(), context, options); } catch (Exception e) { throwable = e; if (e instanceof RuntimeException) { throw LOGGER.logExceptionAsError((RuntimeException) e); } else { throw LOGGER.logExceptionAsError(new RuntimeException(e)); } } finally { if (decodedResponse != null || throwable != null) { endTracingSpan(decodedResponse, throwable, context); } } }
class SyncRestProxy implements InvocationHandler { private static final ByteBuffer VALIDATION_BUFFER = ByteBuffer.allocate(0); private static final String BODY_TOO_LARGE = "Request body emitted %d bytes, more than the expected %d bytes."; private static final String BODY_TOO_SMALL = "Request body emitted %d bytes, less than the expected %d bytes."; private static final String MUST_IMPLEMENT_PAGE_ERROR = "Unable to create PagedResponse<T>. Body must be of a type that implements: " + Page.class; private static final ResponseConstructorsCache RESPONSE_CONSTRUCTORS_CACHE = new ResponseConstructorsCache(); private static final ClientLogger LOGGER = new ClientLogger(SyncRestProxy.class); private final HttpPipeline httpPipeline; private final SerializerAdapter serializer; private final SwaggerInterfaceParser interfaceParser; private final HttpResponseDecoder decoder; /** * Create a RestProxy. * * @param httpPipeline the HttpPipelinePolicy and HttpClient httpPipeline that will be used to send HTTP requests. * @param serializer the serializer that will be used to convert response bodies to POJOs. * @param interfaceParser the parser that contains information about the interface describing REST API methods that * this RestProxy "implements". */ private SyncRestProxy(HttpPipeline httpPipeline, SerializerAdapter serializer, SwaggerInterfaceParser interfaceParser) { this.httpPipeline = httpPipeline; this.serializer = serializer; this.interfaceParser = interfaceParser; this.decoder = new HttpResponseDecoder(this.serializer); } /** * Get the SwaggerMethodParser for the provided method. The Method must exist on the Swagger interface that this * RestProxy was created to "implement". * * @param method the method to get a SwaggerMethodParser for * @return the SwaggerMethodParser for the provided method */ private SwaggerMethodParser getMethodParser(Method method) { return interfaceParser.getMethodParser(method); } /** * Send the provided request asynchronously, applying any request policies provided to the HttpClient instance. * * @param request the HTTP request to send * @param contextData the context * @return a {@link Mono} that emits HttpResponse asynchronously */ public HttpResponse send(HttpRequest request, Context contextData) { return httpPipeline.sendSynchronously(request, contextData); } @Override @SuppressWarnings("deprecation") void validateResumeOperationIsNotPresent(Method method) { if (method.isAnnotationPresent(com.azure.core.annotation.ResumeOperation.class)) { throw LOGGER.logExceptionAsError(Exceptions.propagate(new Exception("'ResumeOperation' isn't supported."))); } } static Context mergeRequestOptionsContext(Context context, RequestOptions options) { if (options == null) { return context; } Context optionsContext = options.getContext(); if (optionsContext != null && optionsContext != Context.NONE) { context = CoreUtils.mergeContexts(context, optionsContext); } return context; } static Flux<ByteBuffer> validateLength(final HttpRequest request) { final Flux<ByteBuffer> bbFlux = request.getBody(); if (bbFlux == null) { return Flux.empty(); } final long expectedLength = Long.parseLong(request.getHeaders().getValue("Content-Length")); return Flux.defer(() -> { final long[] currentTotalLength = new long[1]; return Flux.concat(bbFlux, Flux.just(VALIDATION_BUFFER)).handle((buffer, sink) -> { if (buffer == null) { return; } if (buffer == VALIDATION_BUFFER) { if (expectedLength != currentTotalLength[0]) { sink.error(new UnexpectedLengthException(String.format(BODY_TOO_SMALL, currentTotalLength[0], expectedLength), currentTotalLength[0], expectedLength)); } else { sink.complete(); } return; } currentTotalLength[0] += buffer.remaining(); if (currentTotalLength[0] > expectedLength) { sink.error(new UnexpectedLengthException(String.format(BODY_TOO_LARGE, currentTotalLength[0], expectedLength), currentTotalLength[0], expectedLength)); return; } sink.next(buffer); }); }); } static BinaryData validateLengthSync(final HttpRequest request) { final BinaryData binaryData = request.getContent(); if (binaryData == null) { return binaryData; } final long expectedLength = Long.parseLong(request.getHeaders().getValue("Content-Length")); long length = binaryData.getLength(); if (length > expectedLength) { throw new UnexpectedLengthException(String.format(BODY_TOO_LARGE, binaryData.getLength(), expectedLength), binaryData.getLength(), expectedLength); } return binaryData; } /** * Starts the tracing span for the current service call, additionally set metadata attributes on the span by passing * additional context information. * * @param method Service method being called. * @param context Context information about the current service call. * @return The updated context containing the span context. */ private Context startTracingSpan(Method method, Context context) { if (!TracerProxy.isTracingEnabled()) { return context; } if ((boolean) context.getData(Tracer.DISABLE_TRACING_KEY).orElse(false)) { return context; } String spanName = interfaceParser.getServiceName() + "." + method.getName(); context = TracerProxy.setSpanName(spanName, context); return TracerProxy.start(spanName, context); } /** * Create a HttpRequest for the provided Swagger method using the provided arguments. * * @param methodParser the Swagger method parser to use * @param args the arguments to use to populate the method's annotation values * @return a HttpRequest * @throws IOException thrown if the body contents cannot be serialized */ private HttpRequest createHttpRequest(SwaggerMethodParser methodParser, Object[] args) throws IOException { final String path = methodParser.setPath(args); final UrlBuilder pathUrlBuilder = UrlBuilder.parse(path); final UrlBuilder urlBuilder; if (pathUrlBuilder.getScheme() != null) { urlBuilder = pathUrlBuilder; } else { urlBuilder = new UrlBuilder(); methodParser.setSchemeAndHost(args, urlBuilder); if (path != null && !path.isEmpty() && !"/".equals(path)) { String hostPath = urlBuilder.getPath(); if (hostPath == null || hostPath.isEmpty() || "/".equals(hostPath) || path.contains(": urlBuilder.setPath(path); } else { if (path.startsWith("/")) { urlBuilder.setPath(hostPath + path); } else { urlBuilder.setPath(hostPath + "/" + path); } } } } methodParser.setEncodedQueryParameters(args, urlBuilder); final URL url = urlBuilder.toUrl(); final HttpRequest request = configRequest(new HttpRequest(methodParser.getHttpMethod(), url), methodParser, args); HttpHeaders httpHeaders = request.getHeaders(); methodParser.setHeaders(args, httpHeaders); return request; } @SuppressWarnings("unchecked") private HttpRequest configRequest(final HttpRequest request, final SwaggerMethodParser methodParser, final Object[] args) throws IOException { final Object bodyContentObject = methodParser.setBody(args); if (bodyContentObject == null) { request.getHeaders().set("Content-Length", "0"); } else { String contentType = methodParser.getBodyContentType(); if (contentType == null || contentType.isEmpty()) { if (bodyContentObject instanceof byte[] || bodyContentObject instanceof String) { contentType = ContentType.APPLICATION_OCTET_STREAM; } else { contentType = ContentType.APPLICATION_JSON; } } request.getHeaders().set("Content-Type", contentType); if (bodyContentObject instanceof BinaryData) { BinaryData binaryData = (BinaryData) bodyContentObject; if (binaryData.getLength() != null) { request.setHeader("Content-Length", binaryData.getLength().toString()); } request.setContent(binaryData); return request; } boolean isJson = false; final String[] contentTypeParts = contentType.split(";"); for (final String contentTypePart : contentTypeParts) { if (contentTypePart.trim().equalsIgnoreCase(ContentType.APPLICATION_JSON)) { isJson = true; break; } } if (isJson) { ByteArrayOutputStream stream = new AccessibleByteArrayOutputStream(); serializer.serialize(bodyContentObject, SerializerEncoding.JSON, stream); request.setHeader("Content-Length", String.valueOf(stream.size())); request.setContent(BinaryData.fromStream(new ByteArrayInputStream(stream.toByteArray(), 0, stream.size()))); } else if (bodyContentObject instanceof byte[]) { request.setBody((byte[]) bodyContentObject); } else if (bodyContentObject instanceof String) { final String bodyContentString = (String) bodyContentObject; if (!bodyContentString.isEmpty()) { request.setBody(bodyContentString); } } else if (bodyContentObject instanceof ByteBuffer) { request.setBody(((ByteBuffer) bodyContentObject).array()); } else { ByteArrayOutputStream stream = new AccessibleByteArrayOutputStream(); serializer.serialize(bodyContentObject, SerializerEncoding.fromHeaders(request.getHeaders()), stream); request.setHeader("Content-Length", String.valueOf(stream.size())); request.setBody(stream.toByteArray()); } } return request; } private static Exception instantiateUnexpectedException(final UnexpectedExceptionInformation exception, final HttpResponse httpResponse, final byte[] responseContent, final Object responseDecodedContent) { final int responseStatusCode = httpResponse.getStatusCode(); final String contentType = httpResponse.getHeaderValue("Content-Type"); final String bodyRepresentation; if ("application/octet-stream".equalsIgnoreCase(contentType)) { bodyRepresentation = "(" + httpResponse.getHeaderValue("Content-Length") + "-byte body)"; } else { bodyRepresentation = responseContent == null || responseContent.length == 0 ? "(empty body)" : "\"" + new String(responseContent, StandardCharsets.UTF_8) + "\""; } Exception result; try { final Constructor<? extends HttpResponseException> exceptionConstructor = exception.getExceptionType() .getConstructor(String.class, HttpResponse.class, exception.getExceptionBodyType()); result = exceptionConstructor.newInstance("Status code " + responseStatusCode + ", " + bodyRepresentation, httpResponse, responseDecodedContent); } catch (ReflectiveOperationException e) { String message = "Status code " + responseStatusCode + ", but an instance of " + exception.getExceptionType().getCanonicalName() + " cannot be created." + " Response body: " + bodyRepresentation; result = new IOException(message, e); } return result; } /** * Create a publisher that (1) emits error if the provided response {@code decodedResponse} has 'disallowed status * code' OR (2) emits provided response if it's status code ia allowed. * * 'disallowed status code' is one of the status code defined in the provided SwaggerMethodParser or is in the int[] * of additional allowed status codes. * * @param decodedResponse The HttpResponse to check. * @param methodParser The method parser that contains information about the service interface method that initiated * the HTTP request. * @return An async-version of the provided decodedResponse. */ private HttpDecodedResponse ensureExpectedStatus(final HttpDecodedResponse decodedResponse, final SwaggerMethodParser methodParser, RequestOptions options) { final int responseStatusCode = decodedResponse.getSourceResponse().getStatusCode(); if (methodParser.isExpectedResponseStatusCode(responseStatusCode) || (options != null && options.getErrorOptions().contains(ErrorOptions.NO_THROW))) { return decodedResponse; } byte[] responseBytes = decodedResponse.getSourceResponse().getBodyAsByteArray().block(); if (responseBytes == null || responseBytes.length == 0) { throw new RuntimeException(instantiateUnexpectedException( methodParser.getUnexpectedException(responseStatusCode), decodedResponse.getSourceResponse(), null, null)); } else { Object decodedBody = decodedResponse.getDecodedBodySync(responseBytes); throw new RuntimeException(instantiateUnexpectedException( methodParser.getUnexpectedException(responseStatusCode), decodedResponse.getSourceResponse(), responseBytes, decodedBody)); } } private Object handleRestResponseReturnType(final HttpDecodedResponse response, final SwaggerMethodParser methodParser, final Type entityType) { if (TypeUtil.isTypeOrSubTypeOf(entityType, Response.class)) { final Type bodyType = TypeUtil.getRestResponseBodyType(entityType); if (TypeUtil.isTypeOrSubTypeOf(bodyType, Void.class)) { response.getSourceResponse().getBody().ignoreElements().block(); return createResponseSync(response, entityType, null); } else { Object bodyAsObject = handleBodyReturnTypeSync(response, methodParser, bodyType); Response<?> httpResponse = createResponseSync(response, entityType, bodyAsObject); if (httpResponse == null) { return createResponseSync(response, entityType, null); } return httpResponse; } } else { return handleBodyReturnTypeSync(response, methodParser, entityType); } } @SuppressWarnings("unchecked") private Response<?> createResponseSync(HttpDecodedResponse response, Type entityType, Object bodyAsObject) { final Class<? extends Response<?>> cls = (Class<? extends Response<?>>) TypeUtil.getRawClass(entityType); final HttpResponse httpResponse = response.getSourceResponse(); final HttpRequest request = httpResponse.getRequest(); final int statusCode = httpResponse.getStatusCode(); final HttpHeaders headers = httpResponse.getHeaders(); final Object decodedHeaders = response.getDecodedHeaders(); if (cls.equals(Response.class)) { return cls.cast(new ResponseBase<>(request, statusCode, headers, bodyAsObject, decodedHeaders)); } else if (cls.equals(PagedResponse.class)) { if (bodyAsObject != null && !TypeUtil.isTypeOrSubTypeOf(bodyAsObject.getClass(), Page.class)) { throw LOGGER.logExceptionAsError(new RuntimeException(MUST_IMPLEMENT_PAGE_ERROR)); } else if (bodyAsObject == null) { return (cls.cast(new PagedResponseBase<>(request, statusCode, headers, null, null, decodedHeaders))); } else { return (cls.cast(new PagedResponseBase<>(request, statusCode, headers, (Page<?>) bodyAsObject, decodedHeaders))); } } MethodHandle ctr = RESPONSE_CONSTRUCTORS_CACHE.get(cls); if (ctr == null) { throw new RuntimeException("Cannot find suitable constructor for class " + cls); } return RESPONSE_CONSTRUCTORS_CACHE.invokeSync(ctr, response, bodyAsObject); } private Object handleBodyReturnTypeSync(final HttpDecodedResponse response, final SwaggerMethodParser methodParser, final Type entityType) { final int responseStatusCode = response.getSourceResponse().getStatusCode(); final HttpMethod httpMethod = methodParser.getHttpMethod(); final Type returnValueWireType = methodParser.getReturnValueWireType(); final Object result; if (httpMethod == HttpMethod.HEAD && (TypeUtil.isTypeOrSubTypeOf( entityType, Boolean.TYPE) || TypeUtil.isTypeOrSubTypeOf(entityType, Boolean.class))) { boolean isSuccess = (responseStatusCode / 100) == 2; result = isSuccess; } else if (TypeUtil.isTypeOrSubTypeOf(entityType, byte[].class)) { byte[] responseBodyBytes = response.getSourceResponse().getContent().toBytes(); if (returnValueWireType == Base64Url.class) { responseBodyBytes = new Base64Url(responseBodyBytes).decodedBytes(); } result = responseBodyBytes; } else if (TypeUtil.isTypeOrSubTypeOf(entityType, BinaryData.class)) { result = response.getSourceResponse().getContent(); } else { result = response.getDecodedBody((byte[]) null); } return result; } /** * Handle the provided asynchronous HTTP response and return the deserialized value. * * @param httpDecodedResponse the asynchronous HTTP response to the original HTTP request * @param methodParser the SwaggerMethodParser that the request originates from * @param returnType the type of value that will be returned * @param context Additional context that is passed through the Http pipeline during the service call. * @return the deserialized result */ private Object handleRestReturnType(final HttpDecodedResponse httpDecodedResponse, final SwaggerMethodParser methodParser, final Type returnType, final Context context, final RequestOptions options) { final HttpDecodedResponse expectedResponse = ensureExpectedStatus(httpDecodedResponse, methodParser, options); final Object result; if (TypeUtil.isTypeOrSubTypeOf(returnType, void.class) || TypeUtil.isTypeOrSubTypeOf(returnType, Void.class)) { result = expectedResponse; } else { result = handleRestResponseReturnType(httpDecodedResponse, methodParser, returnType); } return result; } private static void endTracingSpan(Signal<HttpDecodedResponse> signal) { if (!TracerProxy.isTracingEnabled()) { return; } if (signal.isOnComplete() || signal.isOnSubscribe()) { return; } ContextView context = signal.getContextView(); Optional<Context> tracingContext = context.getOrEmpty("TRACING_CONTEXT"); boolean disableTracing = Boolean.TRUE.equals(context.getOrDefault(Tracer.DISABLE_TRACING_KEY, false)); if (!tracingContext.isPresent() || disableTracing) { return; } int statusCode = 0; HttpDecodedResponse httpDecodedResponse; Throwable throwable = null; if (signal.hasValue()) { httpDecodedResponse = signal.get(); statusCode = httpDecodedResponse.getSourceResponse().getStatusCode(); } else if (signal.hasError()) { throwable = signal.getThrowable(); if (throwable instanceof HttpResponseException) { HttpResponseException exception = (HttpResponseException) throwable; statusCode = exception.getResponse().getStatusCode(); } } TracerProxy.end(statusCode, throwable, tracingContext.get()); } /** * Create an instance of the default serializer. * * @return the default serializer */ private static SerializerAdapter createDefaultSerializer() { return JacksonAdapter.createDefaultSerializerAdapter(); } /** * Create the default HttpPipeline. * * @return the default HttpPipeline */ private static HttpPipeline createDefaultPipeline() { List<HttpPipelinePolicy> policies = new ArrayList<>(); policies.add(new UserAgentPolicy()); policies.add(new RetryPolicy()); policies.add(new CookiePolicy()); return new HttpPipelineBuilder() .policies(policies.toArray(new HttpPipelinePolicy[0])) .build(); } /** * Create a proxy implementation of the provided Swagger interface. * * @param swaggerInterface the Swagger interface to provide a proxy implementation for * @param <A> the type of the Swagger interface * @return a proxy implementation of the provided Swagger interface */ public static <A> A create(Class<A> swaggerInterface) { return create(swaggerInterface, createDefaultPipeline(), createDefaultSerializer()); } /** * Create a proxy implementation of the provided Swagger interface. * * @param swaggerInterface the Swagger interface to provide a proxy implementation for * @param httpPipeline the HttpPipelinePolicy and HttpClient pipeline that will be used to send Http requests * @param <A> the type of the Swagger interface * @return a proxy implementation of the provided Swagger interface */ public static <A> A create(Class<A> swaggerInterface, HttpPipeline httpPipeline) { return create(swaggerInterface, httpPipeline, createDefaultSerializer()); } /** * Create a proxy implementation of the provided Swagger interface. * * @param swaggerInterface the Swagger interface to provide a proxy implementation for * @param httpPipeline the HttpPipelinePolicy and HttpClient pipline that will be used to send Http requests * @param serializer the serializer that will be used to convert POJOs to and from request and response bodies * @param <A> the type of the Swagger interface. * @return a proxy implementation of the provided Swagger interface */ @SuppressWarnings("unchecked") public static <A> A create(Class<A> swaggerInterface, HttpPipeline httpPipeline, SerializerAdapter serializer) { final SwaggerInterfaceParser interfaceParser = new SwaggerInterfaceParser(swaggerInterface, serializer); final SyncRestProxy restProxy = new SyncRestProxy(httpPipeline, serializer, interfaceParser); return (A) Proxy.newProxyInstance(swaggerInterface.getClassLoader(), new Class<?>[]{swaggerInterface}, restProxy); } }
class SyncRestProxy implements InvocationHandler { private static final ByteBuffer VALIDATION_BUFFER = ByteBuffer.allocate(0); private static final String BODY_TOO_LARGE = "Request body emitted %d bytes, more than the expected %d bytes."; private static final String BODY_TOO_SMALL = "Request body emitted %d bytes, less than the expected %d bytes."; private static final String MUST_IMPLEMENT_PAGE_ERROR = "Unable to create PagedResponse<T>. Body must be of a type that implements: " + Page.class; private static final ResponseConstructorsCache RESPONSE_CONSTRUCTORS_CACHE = new ResponseConstructorsCache(); private static final ClientLogger LOGGER = new ClientLogger(SyncRestProxy.class); private final HttpPipeline httpPipeline; private final SerializerAdapter serializer; private final SwaggerInterfaceParser interfaceParser; private final HttpResponseDecoder decoder; /** * Create a RestProxy. * * @param httpPipeline the HttpPipelinePolicy and HttpClient httpPipeline that will be used to send HTTP requests. * @param serializer the serializer that will be used to convert response bodies to POJOs. * @param interfaceParser the parser that contains information about the interface describing REST API methods that * this RestProxy "implements". */ private SyncRestProxy(HttpPipeline httpPipeline, SerializerAdapter serializer, SwaggerInterfaceParser interfaceParser) { this.httpPipeline = httpPipeline; this.serializer = serializer; this.interfaceParser = interfaceParser; this.decoder = new HttpResponseDecoder(this.serializer); } /** * Get the SwaggerMethodParser for the provided method. The Method must exist on the Swagger interface that this * RestProxy was created to "implement". * * @param method the method to get a SwaggerMethodParser for * @return the SwaggerMethodParser for the provided method */ private SwaggerMethodParser getMethodParser(Method method) { return interfaceParser.getMethodParser(method); } /** * Send the provided request asynchronously, applying any request policies provided to the HttpClient instance. * * @param request the HTTP request to send * @param contextData the context * @return a {@link Mono} that emits HttpResponse asynchronously */ public HttpResponse send(HttpRequest request, Context contextData) { return httpPipeline.sendSync(request, contextData); } @Override @SuppressWarnings("deprecation") void validateResumeOperationIsNotPresent(Method method) { if (method.isAnnotationPresent(com.azure.core.annotation.ResumeOperation.class)) { throw LOGGER.logExceptionAsError(new IllegalStateException("'ResumeOperation' isn't supported.")); } } static Context mergeRequestOptionsContext(Context context, RequestOptions options) { if (options == null) { return context; } Context optionsContext = options.getContext(); if (optionsContext != null && optionsContext != Context.NONE) { context = CoreUtils.mergeContexts(context, optionsContext); } return context; } static BinaryData validateLengthSync(final HttpRequest request) { final BinaryData binaryData = request.getBodyAsBinaryData(); if (binaryData == null) { return binaryData; } final long expectedLength = Long.parseLong(request.getHeaders().getValue("Content-Length")); Long length = binaryData.getLength(); BinaryDataContent bdc = BinaryDataHelper.getContent(binaryData); if (length == null) { if (bdc instanceof FluxByteBufferContent) { throw new IllegalStateException("Flux Byte Buffer is not supported in Synchronous Rest Proxy."); } else if (bdc instanceof InputStreamContent) { InputStreamContent inputStreamContent = ((InputStreamContent) bdc); InputStream inputStream = inputStreamContent.toStream(); LengthValidatingInputStream lengthValidatingInputStream = new LengthValidatingInputStream(inputStream, expectedLength); return BinaryData.fromStream(lengthValidatingInputStream); } else { byte[] b = (bdc).toBytes(); long len = b.length; if (len > expectedLength) { throw new UnexpectedLengthException(String.format(BODY_TOO_LARGE, len, expectedLength), len, expectedLength); } return BinaryData.fromBytes(b); } } else { if (length > expectedLength) { throw new UnexpectedLengthException(String.format(BODY_TOO_LARGE, length, expectedLength), length, expectedLength); } return binaryData; } } /** * Starts the tracing span for the current service call, additionally set metadata attributes on the span by passing * additional context information. * * @param method Service method being called. * @param context Context information about the current service call. * @return The updated context containing the span context. */ private Context startTracingSpan(Method method, Context context) { if (!TracerProxy.isTracingEnabled()) { return context; } if ((boolean) context.getData(Tracer.DISABLE_TRACING_KEY).orElse(false)) { return context; } String spanName = interfaceParser.getServiceName() + "." + method.getName(); context = TracerProxy.setSpanName(spanName, context); return TracerProxy.start(spanName, context); } /** * Create a HttpRequest for the provided Swagger method using the provided arguments. * * @param methodParser the Swagger method parser to use * @param args the arguments to use to populate the method's annotation values * @return a HttpRequest * @throws IOException thrown if the body contents cannot be serialized */ private HttpRequest createHttpRequest(SwaggerMethodParser methodParser, Object[] args) throws IOException { final String path = methodParser.setPath(args); final UrlBuilder pathUrlBuilder = UrlBuilder.parse(path); final UrlBuilder urlBuilder; if (pathUrlBuilder.getScheme() != null) { urlBuilder = pathUrlBuilder; } else { urlBuilder = new UrlBuilder(); methodParser.setSchemeAndHost(args, urlBuilder); if (path != null && !path.isEmpty() && !"/".equals(path)) { String hostPath = urlBuilder.getPath(); if (hostPath == null || hostPath.isEmpty() || "/".equals(hostPath) || path.contains(": urlBuilder.setPath(path); } else { if (path.startsWith("/")) { urlBuilder.setPath(hostPath + path); } else { urlBuilder.setPath(hostPath + "/" + path); } } } } methodParser.setEncodedQueryParameters(args, urlBuilder); final URL url = urlBuilder.toUrl(); final HttpRequest request = configRequest(new HttpRequest(methodParser.getHttpMethod(), url), methodParser, args); HttpHeaders httpHeaders = request.getHeaders(); methodParser.setHeaders(args, httpHeaders); return request; } @SuppressWarnings("unchecked") private HttpRequest configRequest(final HttpRequest request, final SwaggerMethodParser methodParser, final Object[] args) throws IOException { final Object bodyContentObject = methodParser.setBody(args); if (bodyContentObject == null) { request.getHeaders().set("Content-Length", "0"); } else { String contentType = methodParser.getBodyContentType(); if (contentType == null || contentType.isEmpty()) { if (bodyContentObject instanceof byte[] || bodyContentObject instanceof String) { contentType = ContentType.APPLICATION_OCTET_STREAM; } else { contentType = ContentType.APPLICATION_JSON; } } request.getHeaders().set("Content-Type", contentType); if (bodyContentObject instanceof BinaryData) { BinaryData binaryData = (BinaryData) bodyContentObject; if (binaryData.getLength() != null) { request.setHeader("Content-Length", binaryData.getLength().toString()); } request.setBody(binaryData); return request; } boolean isJson = false; final String[] contentTypeParts = contentType.split(";"); for (final String contentTypePart : contentTypeParts) { if (contentTypePart.trim().equalsIgnoreCase(ContentType.APPLICATION_JSON)) { isJson = true; break; } } if (isJson) { ByteArrayOutputStream stream = new AccessibleByteArrayOutputStream(); serializer.serialize(bodyContentObject, SerializerEncoding.JSON, stream); request.setHeader("Content-Length", String.valueOf(stream.size())); request.setBody(BinaryData.fromStream(new ByteArrayInputStream(stream.toByteArray(), 0, stream.size()))); } else if (bodyContentObject instanceof byte[]) { request.setBody((byte[]) bodyContentObject); } else if (bodyContentObject instanceof String) { final String bodyContentString = (String) bodyContentObject; if (!bodyContentString.isEmpty()) { request.setBody(bodyContentString); } } else if (bodyContentObject instanceof ByteBuffer) { request.setBody(((ByteBuffer) bodyContentObject).array()); } else { ByteArrayOutputStream stream = new AccessibleByteArrayOutputStream(); serializer.serialize(bodyContentObject, SerializerEncoding.fromHeaders(request.getHeaders()), stream); request.setHeader("Content-Length", String.valueOf(stream.size())); request.setBody(stream.toByteArray()); } } return request; } private static Exception instantiateUnexpectedException(final UnexpectedExceptionInformation exception, final HttpResponse httpResponse, final byte[] responseContent, final Object responseDecodedContent) { final int responseStatusCode = httpResponse.getStatusCode(); final String contentType = httpResponse.getHeaderValue("Content-Type"); final String bodyRepresentation; if ("application/octet-stream".equalsIgnoreCase(contentType)) { bodyRepresentation = "(" + httpResponse.getHeaderValue("Content-Length") + "-byte body)"; } else { bodyRepresentation = responseContent == null || responseContent.length == 0 ? "(empty body)" : "\"" + new String(responseContent, StandardCharsets.UTF_8) + "\""; } Exception result; try { final Constructor<? extends HttpResponseException> exceptionConstructor = exception.getExceptionType() .getConstructor(String.class, HttpResponse.class, exception.getExceptionBodyType()); result = exceptionConstructor.newInstance("Status code " + responseStatusCode + ", " + bodyRepresentation, httpResponse, responseDecodedContent); } catch (ReflectiveOperationException e) { String message = "Status code " + responseStatusCode + ", but an instance of " + exception.getExceptionType().getCanonicalName() + " cannot be created." + " Response body: " + bodyRepresentation; result = new IOException(message, e); } return result; } /** * Create a publisher that (1) emits error if the provided response {@code decodedResponse} has 'disallowed status * code' OR (2) emits provided response if it's status code ia allowed. * * 'disallowed status code' is one of the status code defined in the provided SwaggerMethodParser or is in the int[] * of additional allowed status codes. * * @param decodedResponse The HttpResponse to check. * @param methodParser The method parser that contains information about the service interface method that initiated * the HTTP request. * @return An async-version of the provided decodedResponse. */ private HttpDecodedResponse ensureExpectedStatus(final HttpDecodedResponse decodedResponse, final SwaggerMethodParser methodParser, RequestOptions options) { final int responseStatusCode = decodedResponse.getSourceResponse().getStatusCode(); if (methodParser.isExpectedResponseStatusCode(responseStatusCode) || (options != null && options.getErrorOptions().contains(ErrorOptions.NO_THROW))) { return decodedResponse; } Exception e; byte[] responseBytes = decodedResponse.getSourceResponse().getBodyAsBinaryData().toBytes(); if (responseBytes == null || responseBytes.length == 0) { e = instantiateUnexpectedException(methodParser.getUnexpectedException(responseStatusCode), decodedResponse.getSourceResponse(), null, null); } else { Object decodedBody = decodedResponse.getDecodedBodySync(responseBytes); e = instantiateUnexpectedException(methodParser.getUnexpectedException(responseStatusCode), decodedResponse.getSourceResponse(), responseBytes, decodedBody); } if (e instanceof RuntimeException) { throw LOGGER.logExceptionAsError((RuntimeException) e); } else { throw LOGGER.logExceptionAsError(new RuntimeException(e)); } } private Object handleRestResponseReturnType(final HttpDecodedResponse response, final SwaggerMethodParser methodParser, final Type entityType) { if (TypeUtil.isTypeOrSubTypeOf(entityType, Response.class)) { final Type bodyType = TypeUtil.getRestResponseBodyType(entityType); if (TypeUtil.isTypeOrSubTypeOf(bodyType, Void.class)) { response.getSourceResponse().close(); return createResponseSync(response, entityType, null); } else { Object bodyAsObject = handleBodyReturnTypeSync(response, methodParser, bodyType); Response<?> httpResponse = createResponseSync(response, entityType, bodyAsObject); if (httpResponse == null) { return createResponseSync(response, entityType, null); } return httpResponse; } } else { return handleBodyReturnTypeSync(response, methodParser, entityType); } } @SuppressWarnings("unchecked") private Response<?> createResponseSync(HttpDecodedResponse response, Type entityType, Object bodyAsObject) { final Class<? extends Response<?>> cls = (Class<? extends Response<?>>) TypeUtil.getRawClass(entityType); final HttpResponse httpResponse = response.getSourceResponse(); final HttpRequest request = httpResponse.getRequest(); final int statusCode = httpResponse.getStatusCode(); final HttpHeaders headers = httpResponse.getHeaders(); final Object decodedHeaders = response.getDecodedHeaders(); if (cls.equals(Response.class)) { return cls.cast(new ResponseBase<>(request, statusCode, headers, bodyAsObject, decodedHeaders)); } else if (cls.equals(PagedResponse.class)) { if (bodyAsObject != null && !TypeUtil.isTypeOrSubTypeOf(bodyAsObject.getClass(), Page.class)) { throw LOGGER.logExceptionAsError(new RuntimeException(MUST_IMPLEMENT_PAGE_ERROR)); } else if (bodyAsObject == null) { return (cls.cast(new PagedResponseBase<>(request, statusCode, headers, null, null, decodedHeaders))); } else { return (cls.cast(new PagedResponseBase<>(request, statusCode, headers, (Page<?>) bodyAsObject, decodedHeaders))); } } MethodHandle ctr = RESPONSE_CONSTRUCTORS_CACHE.get(cls); if (ctr == null) { throw LOGGER.logExceptionAsError(new RuntimeException("Cannot find suitable constructor for class " + cls)); } return RESPONSE_CONSTRUCTORS_CACHE.invokeSync(ctr, response, bodyAsObject); } private Object handleBodyReturnTypeSync(final HttpDecodedResponse response, final SwaggerMethodParser methodParser, final Type entityType) { final int responseStatusCode = response.getSourceResponse().getStatusCode(); final HttpMethod httpMethod = methodParser.getHttpMethod(); final Type returnValueWireType = methodParser.getReturnValueWireType(); final Object result; if (httpMethod == HttpMethod.HEAD && (TypeUtil.isTypeOrSubTypeOf( entityType, Boolean.TYPE) || TypeUtil.isTypeOrSubTypeOf(entityType, Boolean.class))) { boolean isSuccess = (responseStatusCode / 100) == 2; result = isSuccess; } else if (TypeUtil.isTypeOrSubTypeOf(entityType, byte[].class)) { byte[] responseBodyBytes = response.getSourceResponse().getBodyAsBinaryData().toBytes(); if (returnValueWireType == Base64Url.class) { responseBodyBytes = new Base64Url(responseBodyBytes).decodedBytes(); } result = responseBodyBytes; } else if (TypeUtil.isTypeOrSubTypeOf(entityType, BinaryData.class)) { result = response.getSourceResponse().getBodyAsBinaryData(); } else { result = response.getDecodedBodySync((byte[]) null); } return result; } /** * Handle the provided asynchronous HTTP response and return the deserialized value. * * @param httpDecodedResponse the asynchronous HTTP response to the original HTTP request * @param methodParser the SwaggerMethodParser that the request originates from * @param returnType the type of value that will be returned * @param context Additional context that is passed through the Http pipeline during the service call. * @return the deserialized result */ private Object handleRestReturnType(final HttpDecodedResponse httpDecodedResponse, final SwaggerMethodParser methodParser, final Type returnType, final Context context, final RequestOptions options) { final HttpDecodedResponse expectedResponse = ensureExpectedStatus(httpDecodedResponse, methodParser, options); final Object result; if (TypeUtil.isTypeOrSubTypeOf(returnType, void.class) || TypeUtil.isTypeOrSubTypeOf(returnType, Void.class)) { result = expectedResponse; } else { result = handleRestResponseReturnType(httpDecodedResponse, methodParser, returnType); } return result; } private static void endTracingSpan(HttpDecodedResponse httpDecodedResponse, Throwable throwable, Context tracingContext) { if (tracingContext == null) { return; } Object disableTracingValue = (tracingContext.getData(Tracer.DISABLE_TRACING_KEY).isPresent() ? tracingContext.getData(Tracer.DISABLE_TRACING_KEY).get() : null); boolean disableTracing = Boolean.TRUE.equals(disableTracingValue != null ? disableTracingValue : false); if (disableTracing) { return; } int statusCode = 0; if (httpDecodedResponse != null) { statusCode = httpDecodedResponse.getSourceResponse().getStatusCode(); } else if (throwable != null) { if (throwable instanceof HttpResponseException) { HttpResponseException exception = (HttpResponseException) throwable; statusCode = exception.getResponse().getStatusCode(); } } TracerProxy.end(statusCode, throwable, tracingContext); } /** * Create an instance of the default serializer. * * @return the default serializer */ private static SerializerAdapter createDefaultSerializer() { return JacksonAdapter.createDefaultSerializerAdapter(); } /** * Create the default HttpPipeline. * * @return the default HttpPipeline */ private static HttpPipeline createDefaultPipeline() { List<HttpPipelinePolicy> policies = new ArrayList<>(); policies.add(new UserAgentPolicy()); policies.add(new RetryPolicy()); policies.add(new CookiePolicy()); return new HttpPipelineBuilder() .policies(policies.toArray(new HttpPipelinePolicy[0])) .build(); } /** * Create a proxy implementation of the provided Swagger interface. * * @param swaggerInterface the Swagger interface to provide a proxy implementation for * @param <A> the type of the Swagger interface * @return a proxy implementation of the provided Swagger interface */ public static <A> A create(Class<A> swaggerInterface) { return create(swaggerInterface, createDefaultPipeline(), createDefaultSerializer()); } /** * Create a proxy implementation of the provided Swagger interface. * * @param swaggerInterface the Swagger interface to provide a proxy implementation for * @param httpPipeline the HttpPipelinePolicy and HttpClient pipeline that will be used to send Http requests * @param <A> the type of the Swagger interface * @return a proxy implementation of the provided Swagger interface */ public static <A> A create(Class<A> swaggerInterface, HttpPipeline httpPipeline) { return create(swaggerInterface, httpPipeline, createDefaultSerializer()); } /** * Create a proxy implementation of the provided Swagger interface. * * @param swaggerInterface the Swagger interface to provide a proxy implementation for * @param httpPipeline the HttpPipelinePolicy and HttpClient pipline that will be used to send Http requests * @param serializer the serializer that will be used to convert POJOs to and from request and response bodies * @param <A> the type of the Swagger interface. * @return a proxy implementation of the provided Swagger interface */ @SuppressWarnings("unchecked") public static <A> A create(Class<A> swaggerInterface, HttpPipeline httpPipeline, SerializerAdapter serializer) { final SwaggerInterfaceParser interfaceParser = new SwaggerInterfaceParser(swaggerInterface, serializer); final SyncRestProxy restProxy = new SyncRestProxy(httpPipeline, serializer, interfaceParser); return (A) Proxy.newProxyInstance(swaggerInterface.getClassLoader(), new Class<?>[]{swaggerInterface}, restProxy); } }
I wonder if a better option is to add metadata that tracks if the method called was synchronous or asynchronous into SwaggerMethodParser. This may also help simplify future work with regards of client telemetry that is sent including information such as whether the service client method called was sync or async. Additionally, this may allow us to continue using a single InvocationHandler implementation (RestProxy) instead of having a split path for sync vs async
public Object invoke(Object proxy, final Method method, Object[] args) { validateResumeOperationIsNotPresent(method); try { final SwaggerMethodParser methodParser = getMethodParser(method); final HttpRequest request = createHttpRequest(methodParser, args); Context context = methodParser.setContext(args); RequestOptions options = methodParser.setRequestOptions(args); context = mergeRequestOptionsContext(context, options); context = context.addData("caller-method", methodParser.getFullyQualifiedMethodName()) .addData("azure-eagerly-read-response", shouldEagerlyReadResponse(methodParser.getReturnType())); context = startTracingSpan(method, context); if (options != null) { options.getRequestCallback().accept(request); } if (request.getBody() != null) { request.setBody(validateLength(request)); } final HttpResponse response = send(request, context); HttpDecodedResponse decodedResponse = this.decoder.decodeSync(response, methodParser); return handleRestReturnType(decodedResponse, methodParser, methodParser.getReturnType(), context, options); } catch (IOException e) { throw LOGGER.logExceptionAsError(Exceptions.propagate(e)); } }
final SwaggerMethodParser methodParser = getMethodParser(method);
public Object invoke(Object proxy, final Method method, Object[] args) { validateResumeOperationIsNotPresent(method); final SwaggerMethodParser methodParser = getMethodParser(method); HttpRequest request; try { request = createHttpRequest(methodParser, args); } catch (IOException e) { throw LOGGER.logExceptionAsError(new UncheckedIOException(e)); } Context context = methodParser.setContext(args); RequestOptions options = methodParser.setRequestOptions(args); context = mergeRequestOptionsContext(context, options); context = context.addData("caller-method", methodParser.getFullyQualifiedMethodName()) .addData("azure-eagerly-read-response", shouldEagerlyReadResponse(methodParser.getReturnType())); HttpDecodedResponse decodedResponse = null; Throwable throwable = null; try { context = startTracingSpan(method, context); if (options != null) { options.getRequestCallback().accept(request); } if (request.getBody() != null) { request.setBody(validateLengthSync(request)); } final HttpResponse response = send(request, context); decodedResponse = this.decoder.decodeSync(response, methodParser); return handleRestReturnType(decodedResponse, methodParser, methodParser.getReturnType(), context, options); } catch (Exception e) { throwable = e; if (e instanceof RuntimeException) { throw LOGGER.logExceptionAsError((RuntimeException) e); } else { throw LOGGER.logExceptionAsError(new RuntimeException(e)); } } finally { if (decodedResponse != null || throwable != null) { endTracingSpan(decodedResponse, throwable, context); } } }
class SyncRestProxy implements InvocationHandler { private static final ByteBuffer VALIDATION_BUFFER = ByteBuffer.allocate(0); private static final String BODY_TOO_LARGE = "Request body emitted %d bytes, more than the expected %d bytes."; private static final String BODY_TOO_SMALL = "Request body emitted %d bytes, less than the expected %d bytes."; private static final String MUST_IMPLEMENT_PAGE_ERROR = "Unable to create PagedResponse<T>. Body must be of a type that implements: " + Page.class; private static final ResponseConstructorsCache RESPONSE_CONSTRUCTORS_CACHE = new ResponseConstructorsCache(); private static final ClientLogger LOGGER = new ClientLogger(SyncRestProxy.class); private final HttpPipeline httpPipeline; private final SerializerAdapter serializer; private final SwaggerInterfaceParser interfaceParser; private final HttpResponseDecoder decoder; /** * Create a RestProxy. * * @param httpPipeline the HttpPipelinePolicy and HttpClient httpPipeline that will be used to send HTTP requests. * @param serializer the serializer that will be used to convert response bodies to POJOs. * @param interfaceParser the parser that contains information about the interface describing REST API methods that * this RestProxy "implements". */ private SyncRestProxy(HttpPipeline httpPipeline, SerializerAdapter serializer, SwaggerInterfaceParser interfaceParser) { this.httpPipeline = httpPipeline; this.serializer = serializer; this.interfaceParser = interfaceParser; this.decoder = new HttpResponseDecoder(this.serializer); } /** * Get the SwaggerMethodParser for the provided method. The Method must exist on the Swagger interface that this * RestProxy was created to "implement". * * @param method the method to get a SwaggerMethodParser for * @return the SwaggerMethodParser for the provided method */ private SwaggerMethodParser getMethodParser(Method method) { return interfaceParser.getMethodParser(method); } /** * Send the provided request asynchronously, applying any request policies provided to the HttpClient instance. * * @param request the HTTP request to send * @param contextData the context * @return a {@link Mono} that emits HttpResponse asynchronously */ public HttpResponse send(HttpRequest request, Context contextData) { return httpPipeline.sendSynchronously(request, contextData); } @Override @SuppressWarnings("deprecation") void validateResumeOperationIsNotPresent(Method method) { if (method.isAnnotationPresent(com.azure.core.annotation.ResumeOperation.class)) { throw LOGGER.logExceptionAsError(Exceptions.propagate(new Exception("'ResumeOperation' isn't supported."))); } } static Context mergeRequestOptionsContext(Context context, RequestOptions options) { if (options == null) { return context; } Context optionsContext = options.getContext(); if (optionsContext != null && optionsContext != Context.NONE) { context = CoreUtils.mergeContexts(context, optionsContext); } return context; } static Flux<ByteBuffer> validateLength(final HttpRequest request) { final Flux<ByteBuffer> bbFlux = request.getBody(); if (bbFlux == null) { return Flux.empty(); } final long expectedLength = Long.parseLong(request.getHeaders().getValue("Content-Length")); return Flux.defer(() -> { final long[] currentTotalLength = new long[1]; return Flux.concat(bbFlux, Flux.just(VALIDATION_BUFFER)).handle((buffer, sink) -> { if (buffer == null) { return; } if (buffer == VALIDATION_BUFFER) { if (expectedLength != currentTotalLength[0]) { sink.error(new UnexpectedLengthException(String.format(BODY_TOO_SMALL, currentTotalLength[0], expectedLength), currentTotalLength[0], expectedLength)); } else { sink.complete(); } return; } currentTotalLength[0] += buffer.remaining(); if (currentTotalLength[0] > expectedLength) { sink.error(new UnexpectedLengthException(String.format(BODY_TOO_LARGE, currentTotalLength[0], expectedLength), currentTotalLength[0], expectedLength)); return; } sink.next(buffer); }); }); } static BinaryData validateLengthSync(final HttpRequest request) { final BinaryData binaryData = request.getContent(); if (binaryData == null) { return binaryData; } final long expectedLength = Long.parseLong(request.getHeaders().getValue("Content-Length")); long length = binaryData.getLength(); if (length > expectedLength) { throw new UnexpectedLengthException(String.format(BODY_TOO_LARGE, binaryData.getLength(), expectedLength), binaryData.getLength(), expectedLength); } return binaryData; } /** * Starts the tracing span for the current service call, additionally set metadata attributes on the span by passing * additional context information. * * @param method Service method being called. * @param context Context information about the current service call. * @return The updated context containing the span context. */ private Context startTracingSpan(Method method, Context context) { if (!TracerProxy.isTracingEnabled()) { return context; } if ((boolean) context.getData(Tracer.DISABLE_TRACING_KEY).orElse(false)) { return context; } String spanName = interfaceParser.getServiceName() + "." + method.getName(); context = TracerProxy.setSpanName(spanName, context); return TracerProxy.start(spanName, context); } /** * Create a HttpRequest for the provided Swagger method using the provided arguments. * * @param methodParser the Swagger method parser to use * @param args the arguments to use to populate the method's annotation values * @return a HttpRequest * @throws IOException thrown if the body contents cannot be serialized */ private HttpRequest createHttpRequest(SwaggerMethodParser methodParser, Object[] args) throws IOException { final String path = methodParser.setPath(args); final UrlBuilder pathUrlBuilder = UrlBuilder.parse(path); final UrlBuilder urlBuilder; if (pathUrlBuilder.getScheme() != null) { urlBuilder = pathUrlBuilder; } else { urlBuilder = new UrlBuilder(); methodParser.setSchemeAndHost(args, urlBuilder); if (path != null && !path.isEmpty() && !"/".equals(path)) { String hostPath = urlBuilder.getPath(); if (hostPath == null || hostPath.isEmpty() || "/".equals(hostPath) || path.contains(": urlBuilder.setPath(path); } else { if (path.startsWith("/")) { urlBuilder.setPath(hostPath + path); } else { urlBuilder.setPath(hostPath + "/" + path); } } } } methodParser.setEncodedQueryParameters(args, urlBuilder); final URL url = urlBuilder.toUrl(); final HttpRequest request = configRequest(new HttpRequest(methodParser.getHttpMethod(), url), methodParser, args); HttpHeaders httpHeaders = request.getHeaders(); methodParser.setHeaders(args, httpHeaders); return request; } @SuppressWarnings("unchecked") private HttpRequest configRequest(final HttpRequest request, final SwaggerMethodParser methodParser, final Object[] args) throws IOException { final Object bodyContentObject = methodParser.setBody(args); if (bodyContentObject == null) { request.getHeaders().set("Content-Length", "0"); } else { String contentType = methodParser.getBodyContentType(); if (contentType == null || contentType.isEmpty()) { if (bodyContentObject instanceof byte[] || bodyContentObject instanceof String) { contentType = ContentType.APPLICATION_OCTET_STREAM; } else { contentType = ContentType.APPLICATION_JSON; } } request.getHeaders().set("Content-Type", contentType); if (bodyContentObject instanceof BinaryData) { BinaryData binaryData = (BinaryData) bodyContentObject; if (binaryData.getLength() != null) { request.setHeader("Content-Length", binaryData.getLength().toString()); } request.setContent(binaryData); return request; } boolean isJson = false; final String[] contentTypeParts = contentType.split(";"); for (final String contentTypePart : contentTypeParts) { if (contentTypePart.trim().equalsIgnoreCase(ContentType.APPLICATION_JSON)) { isJson = true; break; } } if (isJson) { ByteArrayOutputStream stream = new AccessibleByteArrayOutputStream(); serializer.serialize(bodyContentObject, SerializerEncoding.JSON, stream); request.setHeader("Content-Length", String.valueOf(stream.size())); request.setContent(BinaryData.fromStream(new ByteArrayInputStream(stream.toByteArray(), 0, stream.size()))); } else if (bodyContentObject instanceof byte[]) { request.setBody((byte[]) bodyContentObject); } else if (bodyContentObject instanceof String) { final String bodyContentString = (String) bodyContentObject; if (!bodyContentString.isEmpty()) { request.setBody(bodyContentString); } } else if (bodyContentObject instanceof ByteBuffer) { request.setBody(((ByteBuffer) bodyContentObject).array()); } else { ByteArrayOutputStream stream = new AccessibleByteArrayOutputStream(); serializer.serialize(bodyContentObject, SerializerEncoding.fromHeaders(request.getHeaders()), stream); request.setHeader("Content-Length", String.valueOf(stream.size())); request.setBody(stream.toByteArray()); } } return request; } private static Exception instantiateUnexpectedException(final UnexpectedExceptionInformation exception, final HttpResponse httpResponse, final byte[] responseContent, final Object responseDecodedContent) { final int responseStatusCode = httpResponse.getStatusCode(); final String contentType = httpResponse.getHeaderValue("Content-Type"); final String bodyRepresentation; if ("application/octet-stream".equalsIgnoreCase(contentType)) { bodyRepresentation = "(" + httpResponse.getHeaderValue("Content-Length") + "-byte body)"; } else { bodyRepresentation = responseContent == null || responseContent.length == 0 ? "(empty body)" : "\"" + new String(responseContent, StandardCharsets.UTF_8) + "\""; } Exception result; try { final Constructor<? extends HttpResponseException> exceptionConstructor = exception.getExceptionType() .getConstructor(String.class, HttpResponse.class, exception.getExceptionBodyType()); result = exceptionConstructor.newInstance("Status code " + responseStatusCode + ", " + bodyRepresentation, httpResponse, responseDecodedContent); } catch (ReflectiveOperationException e) { String message = "Status code " + responseStatusCode + ", but an instance of " + exception.getExceptionType().getCanonicalName() + " cannot be created." + " Response body: " + bodyRepresentation; result = new IOException(message, e); } return result; } /** * Create a publisher that (1) emits error if the provided response {@code decodedResponse} has 'disallowed status * code' OR (2) emits provided response if it's status code ia allowed. * * 'disallowed status code' is one of the status code defined in the provided SwaggerMethodParser or is in the int[] * of additional allowed status codes. * * @param decodedResponse The HttpResponse to check. * @param methodParser The method parser that contains information about the service interface method that initiated * the HTTP request. * @return An async-version of the provided decodedResponse. */ private HttpDecodedResponse ensureExpectedStatus(final HttpDecodedResponse decodedResponse, final SwaggerMethodParser methodParser, RequestOptions options) { final int responseStatusCode = decodedResponse.getSourceResponse().getStatusCode(); if (methodParser.isExpectedResponseStatusCode(responseStatusCode) || (options != null && options.getErrorOptions().contains(ErrorOptions.NO_THROW))) { return decodedResponse; } byte[] responseBytes = decodedResponse.getSourceResponse().getBodyAsByteArray().block(); if (responseBytes == null || responseBytes.length == 0) { throw new RuntimeException(instantiateUnexpectedException( methodParser.getUnexpectedException(responseStatusCode), decodedResponse.getSourceResponse(), null, null)); } else { Object decodedBody = decodedResponse.getDecodedBodySync(responseBytes); throw new RuntimeException(instantiateUnexpectedException( methodParser.getUnexpectedException(responseStatusCode), decodedResponse.getSourceResponse(), responseBytes, decodedBody)); } } private Object handleRestResponseReturnType(final HttpDecodedResponse response, final SwaggerMethodParser methodParser, final Type entityType) { if (TypeUtil.isTypeOrSubTypeOf(entityType, Response.class)) { final Type bodyType = TypeUtil.getRestResponseBodyType(entityType); if (TypeUtil.isTypeOrSubTypeOf(bodyType, Void.class)) { response.getSourceResponse().getBody().ignoreElements().block(); return createResponseSync(response, entityType, null); } else { Object bodyAsObject = handleBodyReturnTypeSync(response, methodParser, bodyType); Response<?> httpResponse = createResponseSync(response, entityType, bodyAsObject); if (httpResponse == null) { return createResponseSync(response, entityType, null); } return httpResponse; } } else { return handleBodyReturnTypeSync(response, methodParser, entityType); } } @SuppressWarnings("unchecked") private Response<?> createResponseSync(HttpDecodedResponse response, Type entityType, Object bodyAsObject) { final Class<? extends Response<?>> cls = (Class<? extends Response<?>>) TypeUtil.getRawClass(entityType); final HttpResponse httpResponse = response.getSourceResponse(); final HttpRequest request = httpResponse.getRequest(); final int statusCode = httpResponse.getStatusCode(); final HttpHeaders headers = httpResponse.getHeaders(); final Object decodedHeaders = response.getDecodedHeaders(); if (cls.equals(Response.class)) { return cls.cast(new ResponseBase<>(request, statusCode, headers, bodyAsObject, decodedHeaders)); } else if (cls.equals(PagedResponse.class)) { if (bodyAsObject != null && !TypeUtil.isTypeOrSubTypeOf(bodyAsObject.getClass(), Page.class)) { throw LOGGER.logExceptionAsError(new RuntimeException(MUST_IMPLEMENT_PAGE_ERROR)); } else if (bodyAsObject == null) { return (cls.cast(new PagedResponseBase<>(request, statusCode, headers, null, null, decodedHeaders))); } else { return (cls.cast(new PagedResponseBase<>(request, statusCode, headers, (Page<?>) bodyAsObject, decodedHeaders))); } } MethodHandle ctr = RESPONSE_CONSTRUCTORS_CACHE.get(cls); if (ctr == null) { throw new RuntimeException("Cannot find suitable constructor for class " + cls); } return RESPONSE_CONSTRUCTORS_CACHE.invokeSync(ctr, response, bodyAsObject); } private Object handleBodyReturnTypeSync(final HttpDecodedResponse response, final SwaggerMethodParser methodParser, final Type entityType) { final int responseStatusCode = response.getSourceResponse().getStatusCode(); final HttpMethod httpMethod = methodParser.getHttpMethod(); final Type returnValueWireType = methodParser.getReturnValueWireType(); final Object result; if (httpMethod == HttpMethod.HEAD && (TypeUtil.isTypeOrSubTypeOf( entityType, Boolean.TYPE) || TypeUtil.isTypeOrSubTypeOf(entityType, Boolean.class))) { boolean isSuccess = (responseStatusCode / 100) == 2; result = isSuccess; } else if (TypeUtil.isTypeOrSubTypeOf(entityType, byte[].class)) { byte[] responseBodyBytes = response.getSourceResponse().getContent().toBytes(); if (returnValueWireType == Base64Url.class) { responseBodyBytes = new Base64Url(responseBodyBytes).decodedBytes(); } result = responseBodyBytes; } else if (TypeUtil.isTypeOrSubTypeOf(entityType, BinaryData.class)) { result = response.getSourceResponse().getContent(); } else { result = response.getDecodedBody((byte[]) null); } return result; } /** * Handle the provided asynchronous HTTP response and return the deserialized value. * * @param httpDecodedResponse the asynchronous HTTP response to the original HTTP request * @param methodParser the SwaggerMethodParser that the request originates from * @param returnType the type of value that will be returned * @param context Additional context that is passed through the Http pipeline during the service call. * @return the deserialized result */ private Object handleRestReturnType(final HttpDecodedResponse httpDecodedResponse, final SwaggerMethodParser methodParser, final Type returnType, final Context context, final RequestOptions options) { final HttpDecodedResponse expectedResponse = ensureExpectedStatus(httpDecodedResponse, methodParser, options); final Object result; if (TypeUtil.isTypeOrSubTypeOf(returnType, void.class) || TypeUtil.isTypeOrSubTypeOf(returnType, Void.class)) { result = expectedResponse; } else { result = handleRestResponseReturnType(httpDecodedResponse, methodParser, returnType); } return result; } private static void endTracingSpan(Signal<HttpDecodedResponse> signal) { if (!TracerProxy.isTracingEnabled()) { return; } if (signal.isOnComplete() || signal.isOnSubscribe()) { return; } ContextView context = signal.getContextView(); Optional<Context> tracingContext = context.getOrEmpty("TRACING_CONTEXT"); boolean disableTracing = Boolean.TRUE.equals(context.getOrDefault(Tracer.DISABLE_TRACING_KEY, false)); if (!tracingContext.isPresent() || disableTracing) { return; } int statusCode = 0; HttpDecodedResponse httpDecodedResponse; Throwable throwable = null; if (signal.hasValue()) { httpDecodedResponse = signal.get(); statusCode = httpDecodedResponse.getSourceResponse().getStatusCode(); } else if (signal.hasError()) { throwable = signal.getThrowable(); if (throwable instanceof HttpResponseException) { HttpResponseException exception = (HttpResponseException) throwable; statusCode = exception.getResponse().getStatusCode(); } } TracerProxy.end(statusCode, throwable, tracingContext.get()); } /** * Create an instance of the default serializer. * * @return the default serializer */ private static SerializerAdapter createDefaultSerializer() { return JacksonAdapter.createDefaultSerializerAdapter(); } /** * Create the default HttpPipeline. * * @return the default HttpPipeline */ private static HttpPipeline createDefaultPipeline() { List<HttpPipelinePolicy> policies = new ArrayList<>(); policies.add(new UserAgentPolicy()); policies.add(new RetryPolicy()); policies.add(new CookiePolicy()); return new HttpPipelineBuilder() .policies(policies.toArray(new HttpPipelinePolicy[0])) .build(); } /** * Create a proxy implementation of the provided Swagger interface. * * @param swaggerInterface the Swagger interface to provide a proxy implementation for * @param <A> the type of the Swagger interface * @return a proxy implementation of the provided Swagger interface */ public static <A> A create(Class<A> swaggerInterface) { return create(swaggerInterface, createDefaultPipeline(), createDefaultSerializer()); } /** * Create a proxy implementation of the provided Swagger interface. * * @param swaggerInterface the Swagger interface to provide a proxy implementation for * @param httpPipeline the HttpPipelinePolicy and HttpClient pipeline that will be used to send Http requests * @param <A> the type of the Swagger interface * @return a proxy implementation of the provided Swagger interface */ public static <A> A create(Class<A> swaggerInterface, HttpPipeline httpPipeline) { return create(swaggerInterface, httpPipeline, createDefaultSerializer()); } /** * Create a proxy implementation of the provided Swagger interface. * * @param swaggerInterface the Swagger interface to provide a proxy implementation for * @param httpPipeline the HttpPipelinePolicy and HttpClient pipline that will be used to send Http requests * @param serializer the serializer that will be used to convert POJOs to and from request and response bodies * @param <A> the type of the Swagger interface. * @return a proxy implementation of the provided Swagger interface */ @SuppressWarnings("unchecked") public static <A> A create(Class<A> swaggerInterface, HttpPipeline httpPipeline, SerializerAdapter serializer) { final SwaggerInterfaceParser interfaceParser = new SwaggerInterfaceParser(swaggerInterface, serializer); final SyncRestProxy restProxy = new SyncRestProxy(httpPipeline, serializer, interfaceParser); return (A) Proxy.newProxyInstance(swaggerInterface.getClassLoader(), new Class<?>[]{swaggerInterface}, restProxy); } }
class SyncRestProxy implements InvocationHandler { private static final ByteBuffer VALIDATION_BUFFER = ByteBuffer.allocate(0); private static final String BODY_TOO_LARGE = "Request body emitted %d bytes, more than the expected %d bytes."; private static final String BODY_TOO_SMALL = "Request body emitted %d bytes, less than the expected %d bytes."; private static final String MUST_IMPLEMENT_PAGE_ERROR = "Unable to create PagedResponse<T>. Body must be of a type that implements: " + Page.class; private static final ResponseConstructorsCache RESPONSE_CONSTRUCTORS_CACHE = new ResponseConstructorsCache(); private static final ClientLogger LOGGER = new ClientLogger(SyncRestProxy.class); private final HttpPipeline httpPipeline; private final SerializerAdapter serializer; private final SwaggerInterfaceParser interfaceParser; private final HttpResponseDecoder decoder; /** * Create a RestProxy. * * @param httpPipeline the HttpPipelinePolicy and HttpClient httpPipeline that will be used to send HTTP requests. * @param serializer the serializer that will be used to convert response bodies to POJOs. * @param interfaceParser the parser that contains information about the interface describing REST API methods that * this RestProxy "implements". */ private SyncRestProxy(HttpPipeline httpPipeline, SerializerAdapter serializer, SwaggerInterfaceParser interfaceParser) { this.httpPipeline = httpPipeline; this.serializer = serializer; this.interfaceParser = interfaceParser; this.decoder = new HttpResponseDecoder(this.serializer); } /** * Get the SwaggerMethodParser for the provided method. The Method must exist on the Swagger interface that this * RestProxy was created to "implement". * * @param method the method to get a SwaggerMethodParser for * @return the SwaggerMethodParser for the provided method */ private SwaggerMethodParser getMethodParser(Method method) { return interfaceParser.getMethodParser(method); } /** * Send the provided request asynchronously, applying any request policies provided to the HttpClient instance. * * @param request the HTTP request to send * @param contextData the context * @return a {@link Mono} that emits HttpResponse asynchronously */ public HttpResponse send(HttpRequest request, Context contextData) { return httpPipeline.sendSync(request, contextData); } @Override @SuppressWarnings("deprecation") void validateResumeOperationIsNotPresent(Method method) { if (method.isAnnotationPresent(com.azure.core.annotation.ResumeOperation.class)) { throw LOGGER.logExceptionAsError(new IllegalStateException("'ResumeOperation' isn't supported.")); } } static Context mergeRequestOptionsContext(Context context, RequestOptions options) { if (options == null) { return context; } Context optionsContext = options.getContext(); if (optionsContext != null && optionsContext != Context.NONE) { context = CoreUtils.mergeContexts(context, optionsContext); } return context; } static BinaryData validateLengthSync(final HttpRequest request) { final BinaryData binaryData = request.getBodyAsBinaryData(); if (binaryData == null) { return binaryData; } final long expectedLength = Long.parseLong(request.getHeaders().getValue("Content-Length")); Long length = binaryData.getLength(); BinaryDataContent bdc = BinaryDataHelper.getContent(binaryData); if (length == null) { if (bdc instanceof FluxByteBufferContent) { throw new IllegalStateException("Flux Byte Buffer is not supported in Synchronous Rest Proxy."); } else if (bdc instanceof InputStreamContent) { InputStreamContent inputStreamContent = ((InputStreamContent) bdc); InputStream inputStream = inputStreamContent.toStream(); LengthValidatingInputStream lengthValidatingInputStream = new LengthValidatingInputStream(inputStream, expectedLength); return BinaryData.fromStream(lengthValidatingInputStream); } else { byte[] b = (bdc).toBytes(); long len = b.length; if (len > expectedLength) { throw new UnexpectedLengthException(String.format(BODY_TOO_LARGE, len, expectedLength), len, expectedLength); } return BinaryData.fromBytes(b); } } else { if (length > expectedLength) { throw new UnexpectedLengthException(String.format(BODY_TOO_LARGE, length, expectedLength), length, expectedLength); } return binaryData; } } /** * Starts the tracing span for the current service call, additionally set metadata attributes on the span by passing * additional context information. * * @param method Service method being called. * @param context Context information about the current service call. * @return The updated context containing the span context. */ private Context startTracingSpan(Method method, Context context) { if (!TracerProxy.isTracingEnabled()) { return context; } if ((boolean) context.getData(Tracer.DISABLE_TRACING_KEY).orElse(false)) { return context; } String spanName = interfaceParser.getServiceName() + "." + method.getName(); context = TracerProxy.setSpanName(spanName, context); return TracerProxy.start(spanName, context); } /** * Create a HttpRequest for the provided Swagger method using the provided arguments. * * @param methodParser the Swagger method parser to use * @param args the arguments to use to populate the method's annotation values * @return a HttpRequest * @throws IOException thrown if the body contents cannot be serialized */ private HttpRequest createHttpRequest(SwaggerMethodParser methodParser, Object[] args) throws IOException { final String path = methodParser.setPath(args); final UrlBuilder pathUrlBuilder = UrlBuilder.parse(path); final UrlBuilder urlBuilder; if (pathUrlBuilder.getScheme() != null) { urlBuilder = pathUrlBuilder; } else { urlBuilder = new UrlBuilder(); methodParser.setSchemeAndHost(args, urlBuilder); if (path != null && !path.isEmpty() && !"/".equals(path)) { String hostPath = urlBuilder.getPath(); if (hostPath == null || hostPath.isEmpty() || "/".equals(hostPath) || path.contains(": urlBuilder.setPath(path); } else { if (path.startsWith("/")) { urlBuilder.setPath(hostPath + path); } else { urlBuilder.setPath(hostPath + "/" + path); } } } } methodParser.setEncodedQueryParameters(args, urlBuilder); final URL url = urlBuilder.toUrl(); final HttpRequest request = configRequest(new HttpRequest(methodParser.getHttpMethod(), url), methodParser, args); HttpHeaders httpHeaders = request.getHeaders(); methodParser.setHeaders(args, httpHeaders); return request; } @SuppressWarnings("unchecked") private HttpRequest configRequest(final HttpRequest request, final SwaggerMethodParser methodParser, final Object[] args) throws IOException { final Object bodyContentObject = methodParser.setBody(args); if (bodyContentObject == null) { request.getHeaders().set("Content-Length", "0"); } else { String contentType = methodParser.getBodyContentType(); if (contentType == null || contentType.isEmpty()) { if (bodyContentObject instanceof byte[] || bodyContentObject instanceof String) { contentType = ContentType.APPLICATION_OCTET_STREAM; } else { contentType = ContentType.APPLICATION_JSON; } } request.getHeaders().set("Content-Type", contentType); if (bodyContentObject instanceof BinaryData) { BinaryData binaryData = (BinaryData) bodyContentObject; if (binaryData.getLength() != null) { request.setHeader("Content-Length", binaryData.getLength().toString()); } request.setBody(binaryData); return request; } boolean isJson = false; final String[] contentTypeParts = contentType.split(";"); for (final String contentTypePart : contentTypeParts) { if (contentTypePart.trim().equalsIgnoreCase(ContentType.APPLICATION_JSON)) { isJson = true; break; } } if (isJson) { ByteArrayOutputStream stream = new AccessibleByteArrayOutputStream(); serializer.serialize(bodyContentObject, SerializerEncoding.JSON, stream); request.setHeader("Content-Length", String.valueOf(stream.size())); request.setBody(BinaryData.fromStream(new ByteArrayInputStream(stream.toByteArray(), 0, stream.size()))); } else if (bodyContentObject instanceof byte[]) { request.setBody((byte[]) bodyContentObject); } else if (bodyContentObject instanceof String) { final String bodyContentString = (String) bodyContentObject; if (!bodyContentString.isEmpty()) { request.setBody(bodyContentString); } } else if (bodyContentObject instanceof ByteBuffer) { request.setBody(((ByteBuffer) bodyContentObject).array()); } else { ByteArrayOutputStream stream = new AccessibleByteArrayOutputStream(); serializer.serialize(bodyContentObject, SerializerEncoding.fromHeaders(request.getHeaders()), stream); request.setHeader("Content-Length", String.valueOf(stream.size())); request.setBody(stream.toByteArray()); } } return request; } private static Exception instantiateUnexpectedException(final UnexpectedExceptionInformation exception, final HttpResponse httpResponse, final byte[] responseContent, final Object responseDecodedContent) { final int responseStatusCode = httpResponse.getStatusCode(); final String contentType = httpResponse.getHeaderValue("Content-Type"); final String bodyRepresentation; if ("application/octet-stream".equalsIgnoreCase(contentType)) { bodyRepresentation = "(" + httpResponse.getHeaderValue("Content-Length") + "-byte body)"; } else { bodyRepresentation = responseContent == null || responseContent.length == 0 ? "(empty body)" : "\"" + new String(responseContent, StandardCharsets.UTF_8) + "\""; } Exception result; try { final Constructor<? extends HttpResponseException> exceptionConstructor = exception.getExceptionType() .getConstructor(String.class, HttpResponse.class, exception.getExceptionBodyType()); result = exceptionConstructor.newInstance("Status code " + responseStatusCode + ", " + bodyRepresentation, httpResponse, responseDecodedContent); } catch (ReflectiveOperationException e) { String message = "Status code " + responseStatusCode + ", but an instance of " + exception.getExceptionType().getCanonicalName() + " cannot be created." + " Response body: " + bodyRepresentation; result = new IOException(message, e); } return result; } /** * Create a publisher that (1) emits error if the provided response {@code decodedResponse} has 'disallowed status * code' OR (2) emits provided response if it's status code ia allowed. * * 'disallowed status code' is one of the status code defined in the provided SwaggerMethodParser or is in the int[] * of additional allowed status codes. * * @param decodedResponse The HttpResponse to check. * @param methodParser The method parser that contains information about the service interface method that initiated * the HTTP request. * @return An async-version of the provided decodedResponse. */ private HttpDecodedResponse ensureExpectedStatus(final HttpDecodedResponse decodedResponse, final SwaggerMethodParser methodParser, RequestOptions options) { final int responseStatusCode = decodedResponse.getSourceResponse().getStatusCode(); if (methodParser.isExpectedResponseStatusCode(responseStatusCode) || (options != null && options.getErrorOptions().contains(ErrorOptions.NO_THROW))) { return decodedResponse; } Exception e; byte[] responseBytes = decodedResponse.getSourceResponse().getBodyAsBinaryData().toBytes(); if (responseBytes == null || responseBytes.length == 0) { e = instantiateUnexpectedException(methodParser.getUnexpectedException(responseStatusCode), decodedResponse.getSourceResponse(), null, null); } else { Object decodedBody = decodedResponse.getDecodedBodySync(responseBytes); e = instantiateUnexpectedException(methodParser.getUnexpectedException(responseStatusCode), decodedResponse.getSourceResponse(), responseBytes, decodedBody); } if (e instanceof RuntimeException) { throw LOGGER.logExceptionAsError((RuntimeException) e); } else { throw LOGGER.logExceptionAsError(new RuntimeException(e)); } } private Object handleRestResponseReturnType(final HttpDecodedResponse response, final SwaggerMethodParser methodParser, final Type entityType) { if (TypeUtil.isTypeOrSubTypeOf(entityType, Response.class)) { final Type bodyType = TypeUtil.getRestResponseBodyType(entityType); if (TypeUtil.isTypeOrSubTypeOf(bodyType, Void.class)) { response.getSourceResponse().close(); return createResponseSync(response, entityType, null); } else { Object bodyAsObject = handleBodyReturnTypeSync(response, methodParser, bodyType); Response<?> httpResponse = createResponseSync(response, entityType, bodyAsObject); if (httpResponse == null) { return createResponseSync(response, entityType, null); } return httpResponse; } } else { return handleBodyReturnTypeSync(response, methodParser, entityType); } } @SuppressWarnings("unchecked") private Response<?> createResponseSync(HttpDecodedResponse response, Type entityType, Object bodyAsObject) { final Class<? extends Response<?>> cls = (Class<? extends Response<?>>) TypeUtil.getRawClass(entityType); final HttpResponse httpResponse = response.getSourceResponse(); final HttpRequest request = httpResponse.getRequest(); final int statusCode = httpResponse.getStatusCode(); final HttpHeaders headers = httpResponse.getHeaders(); final Object decodedHeaders = response.getDecodedHeaders(); if (cls.equals(Response.class)) { return cls.cast(new ResponseBase<>(request, statusCode, headers, bodyAsObject, decodedHeaders)); } else if (cls.equals(PagedResponse.class)) { if (bodyAsObject != null && !TypeUtil.isTypeOrSubTypeOf(bodyAsObject.getClass(), Page.class)) { throw LOGGER.logExceptionAsError(new RuntimeException(MUST_IMPLEMENT_PAGE_ERROR)); } else if (bodyAsObject == null) { return (cls.cast(new PagedResponseBase<>(request, statusCode, headers, null, null, decodedHeaders))); } else { return (cls.cast(new PagedResponseBase<>(request, statusCode, headers, (Page<?>) bodyAsObject, decodedHeaders))); } } MethodHandle ctr = RESPONSE_CONSTRUCTORS_CACHE.get(cls); if (ctr == null) { throw LOGGER.logExceptionAsError(new RuntimeException("Cannot find suitable constructor for class " + cls)); } return RESPONSE_CONSTRUCTORS_CACHE.invokeSync(ctr, response, bodyAsObject); } private Object handleBodyReturnTypeSync(final HttpDecodedResponse response, final SwaggerMethodParser methodParser, final Type entityType) { final int responseStatusCode = response.getSourceResponse().getStatusCode(); final HttpMethod httpMethod = methodParser.getHttpMethod(); final Type returnValueWireType = methodParser.getReturnValueWireType(); final Object result; if (httpMethod == HttpMethod.HEAD && (TypeUtil.isTypeOrSubTypeOf( entityType, Boolean.TYPE) || TypeUtil.isTypeOrSubTypeOf(entityType, Boolean.class))) { boolean isSuccess = (responseStatusCode / 100) == 2; result = isSuccess; } else if (TypeUtil.isTypeOrSubTypeOf(entityType, byte[].class)) { byte[] responseBodyBytes = response.getSourceResponse().getBodyAsBinaryData().toBytes(); if (returnValueWireType == Base64Url.class) { responseBodyBytes = new Base64Url(responseBodyBytes).decodedBytes(); } result = responseBodyBytes; } else if (TypeUtil.isTypeOrSubTypeOf(entityType, BinaryData.class)) { result = response.getSourceResponse().getBodyAsBinaryData(); } else { result = response.getDecodedBodySync((byte[]) null); } return result; } /** * Handle the provided asynchronous HTTP response and return the deserialized value. * * @param httpDecodedResponse the asynchronous HTTP response to the original HTTP request * @param methodParser the SwaggerMethodParser that the request originates from * @param returnType the type of value that will be returned * @param context Additional context that is passed through the Http pipeline during the service call. * @return the deserialized result */ private Object handleRestReturnType(final HttpDecodedResponse httpDecodedResponse, final SwaggerMethodParser methodParser, final Type returnType, final Context context, final RequestOptions options) { final HttpDecodedResponse expectedResponse = ensureExpectedStatus(httpDecodedResponse, methodParser, options); final Object result; if (TypeUtil.isTypeOrSubTypeOf(returnType, void.class) || TypeUtil.isTypeOrSubTypeOf(returnType, Void.class)) { result = expectedResponse; } else { result = handleRestResponseReturnType(httpDecodedResponse, methodParser, returnType); } return result; } private static void endTracingSpan(HttpDecodedResponse httpDecodedResponse, Throwable throwable, Context tracingContext) { if (tracingContext == null) { return; } Object disableTracingValue = (tracingContext.getData(Tracer.DISABLE_TRACING_KEY).isPresent() ? tracingContext.getData(Tracer.DISABLE_TRACING_KEY).get() : null); boolean disableTracing = Boolean.TRUE.equals(disableTracingValue != null ? disableTracingValue : false); if (disableTracing) { return; } int statusCode = 0; if (httpDecodedResponse != null) { statusCode = httpDecodedResponse.getSourceResponse().getStatusCode(); } else if (throwable != null) { if (throwable instanceof HttpResponseException) { HttpResponseException exception = (HttpResponseException) throwable; statusCode = exception.getResponse().getStatusCode(); } } TracerProxy.end(statusCode, throwable, tracingContext); } /** * Create an instance of the default serializer. * * @return the default serializer */ private static SerializerAdapter createDefaultSerializer() { return JacksonAdapter.createDefaultSerializerAdapter(); } /** * Create the default HttpPipeline. * * @return the default HttpPipeline */ private static HttpPipeline createDefaultPipeline() { List<HttpPipelinePolicy> policies = new ArrayList<>(); policies.add(new UserAgentPolicy()); policies.add(new RetryPolicy()); policies.add(new CookiePolicy()); return new HttpPipelineBuilder() .policies(policies.toArray(new HttpPipelinePolicy[0])) .build(); } /** * Create a proxy implementation of the provided Swagger interface. * * @param swaggerInterface the Swagger interface to provide a proxy implementation for * @param <A> the type of the Swagger interface * @return a proxy implementation of the provided Swagger interface */ public static <A> A create(Class<A> swaggerInterface) { return create(swaggerInterface, createDefaultPipeline(), createDefaultSerializer()); } /** * Create a proxy implementation of the provided Swagger interface. * * @param swaggerInterface the Swagger interface to provide a proxy implementation for * @param httpPipeline the HttpPipelinePolicy and HttpClient pipeline that will be used to send Http requests * @param <A> the type of the Swagger interface * @return a proxy implementation of the provided Swagger interface */ public static <A> A create(Class<A> swaggerInterface, HttpPipeline httpPipeline) { return create(swaggerInterface, httpPipeline, createDefaultSerializer()); } /** * Create a proxy implementation of the provided Swagger interface. * * @param swaggerInterface the Swagger interface to provide a proxy implementation for * @param httpPipeline the HttpPipelinePolicy and HttpClient pipline that will be used to send Http requests * @param serializer the serializer that will be used to convert POJOs to and from request and response bodies * @param <A> the type of the Swagger interface. * @return a proxy implementation of the provided Swagger interface */ @SuppressWarnings("unchecked") public static <A> A create(Class<A> swaggerInterface, HttpPipeline httpPipeline, SerializerAdapter serializer) { final SwaggerInterfaceParser interfaceParser = new SwaggerInterfaceParser(swaggerInterface, serializer); final SyncRestProxy restProxy = new SyncRestProxy(httpPipeline, serializer, interfaceParser); return (A) Proxy.newProxyInstance(swaggerInterface.getClassLoader(), new Class<?>[]{swaggerInterface}, restProxy); } }
Don't we also need to check if length is less than the expected length?
static BinaryData validateLengthSync(final HttpRequest request) { final BinaryData binaryData = request.getContent(); if (binaryData == null) { return binaryData; } final long expectedLength = Long.parseLong(request.getHeaders().getValue("Content-Length")); long length = binaryData.getLength(); if (length > expectedLength) { throw new UnexpectedLengthException(String.format(BODY_TOO_LARGE, binaryData.getLength(), expectedLength), binaryData.getLength(), expectedLength); } return binaryData; }
if (length > expectedLength) {
static BinaryData validateLengthSync(final HttpRequest request) { final BinaryData binaryData = request.getBodyAsBinaryData(); if (binaryData == null) { return binaryData; } final long expectedLength = Long.parseLong(request.getHeaders().getValue("Content-Length")); Long length = binaryData.getLength(); BinaryDataContent bdc = BinaryDataHelper.getContent(binaryData); if (length == null) { if (bdc instanceof FluxByteBufferContent) { throw new IllegalStateException("Flux Byte Buffer is not supported in Synchronous Rest Proxy."); } else if (bdc instanceof InputStreamContent) { InputStreamContent inputStreamContent = ((InputStreamContent) bdc); InputStream inputStream = inputStreamContent.toStream(); LengthValidatingInputStream lengthValidatingInputStream = new LengthValidatingInputStream(inputStream, expectedLength); return BinaryData.fromStream(lengthValidatingInputStream); } else { byte[] b = (bdc).toBytes(); long len = b.length; if (len > expectedLength) { throw new UnexpectedLengthException(String.format(BODY_TOO_LARGE, len, expectedLength), len, expectedLength); } return BinaryData.fromBytes(b); } } else { if (length > expectedLength) { throw new UnexpectedLengthException(String.format(BODY_TOO_LARGE, length, expectedLength), length, expectedLength); } return binaryData; } }
class is if (method.isAnnotationPresent(com.azure.core.annotation.ResumeOperation.class)) { throw LOGGER.logExceptionAsError(Exceptions.propagate(new Exception("'ResumeOperation' isn't supported."))); }
class is if (method.isAnnotationPresent(com.azure.core.annotation.ResumeOperation.class)) { throw LOGGER.logExceptionAsError(new IllegalStateException("'ResumeOperation' isn't supported.")); }
we shouldn't eagerly read the stream here. Rather substitute request body with wrapped stream and the validation will happen lazily when stream is read. See https://github.com/Azure/azure-sdk-for-java/blob/400e90a8ceced0f0ad5d586532db6637d29b45d6/sdk/core/azure-core/src/main/java/com/azure/core/implementation/util/LengthValidatingInputStream.java#L48 .
static BinaryData validateLengthSync(final HttpRequest request) { final BinaryData binaryData = request.getContent(); if (binaryData == null) { return binaryData; } final long expectedLength = Long.parseLong(request.getHeaders().getValue("Content-Length")); Long length = binaryData.getLength(); BinaryDataContent bdc = BinaryDataHelper.getContent(binaryData); if (length == null) { if (bdc instanceof FluxByteBufferContent) { throw new IllegalStateException("Flux Byte Buffer is not supported in Synchronous Rest Proxy."); } else if (bdc instanceof InputStreamContent) { LengthValidatingInputStream lengthValidatingInputStream = new LengthValidatingInputStream(((InputStreamContent) bdc).toStream(), expectedLength); try { lengthValidatingInputStream.readAllBytes(); } catch (IOException e) { throw new UncheckedIOException(e); } } else { long len = (bdc).toBytes().length; if (len > expectedLength) { throw new UnexpectedLengthException(String.format(BODY_TOO_LARGE, len, expectedLength), len, expectedLength); } } } return binaryData; }
lengthValidatingInputStream.readAllBytes();
static BinaryData validateLengthSync(final HttpRequest request) { final BinaryData binaryData = request.getBodyAsBinaryData(); if (binaryData == null) { return binaryData; } final long expectedLength = Long.parseLong(request.getHeaders().getValue("Content-Length")); Long length = binaryData.getLength(); BinaryDataContent bdc = BinaryDataHelper.getContent(binaryData); if (length == null) { if (bdc instanceof FluxByteBufferContent) { throw new IllegalStateException("Flux Byte Buffer is not supported in Synchronous Rest Proxy."); } else if (bdc instanceof InputStreamContent) { InputStreamContent inputStreamContent = ((InputStreamContent) bdc); InputStream inputStream = inputStreamContent.toStream(); LengthValidatingInputStream lengthValidatingInputStream = new LengthValidatingInputStream(inputStream, expectedLength); return BinaryData.fromStream(lengthValidatingInputStream); } else { byte[] b = (bdc).toBytes(); long len = b.length; if (len > expectedLength) { throw new UnexpectedLengthException(String.format(BODY_TOO_LARGE, len, expectedLength), len, expectedLength); } return BinaryData.fromBytes(b); } } else { if (length > expectedLength) { throw new UnexpectedLengthException(String.format(BODY_TOO_LARGE, length, expectedLength), length, expectedLength); } return binaryData; } }
class is if (method.isAnnotationPresent(com.azure.core.annotation.ResumeOperation.class)) { throw LOGGER.logExceptionAsError(new IllegalStateException("'ResumeOperation' isn't supported.")); }
class is if (method.isAnnotationPresent(com.azure.core.annotation.ResumeOperation.class)) { throw LOGGER.logExceptionAsError(new IllegalStateException("'ResumeOperation' isn't supported.")); }
should we expose this as an option or configuration property? I case somebody took dependency on this somehow.
public HttpClient build() { OkHttpClient.Builder httpClientBuilder = this.okHttpClient == null ? new OkHttpClient.Builder() : this.okHttpClient.newBuilder(); for (Interceptor interceptor : this.networkInterceptors) { httpClientBuilder = httpClientBuilder.addNetworkInterceptor(interceptor); } httpClientBuilder = httpClientBuilder .connectTimeout(getTimeoutMillis(connectionTimeout, DEFAULT_CONNECT_TIMEOUT), TimeUnit.MILLISECONDS) .writeTimeout(getTimeoutMillis(writeTimeout, DEFAULT_WRITE_TIMEOUT), TimeUnit.MILLISECONDS) .readTimeout(getTimeoutMillis(readTimeout, DEFAULT_READ_TIMEOUT), TimeUnit.MILLISECONDS); if (this.connectionPool != null) { httpClientBuilder = httpClientBuilder.connectionPool(connectionPool); } if (this.dispatcher != null) { httpClientBuilder = httpClientBuilder.dispatcher(dispatcher); } Configuration buildConfiguration = (configuration == null) ? Configuration.getGlobalConfiguration() : configuration; ProxyOptions buildProxyOptions = (proxyOptions == null && buildConfiguration != Configuration.NONE) ? ProxyOptions.fromConfiguration(buildConfiguration, true) : proxyOptions; if (buildProxyOptions != null) { httpClientBuilder = httpClientBuilder.proxySelector(new OkHttpProxySelector( buildProxyOptions.getType().toProxyType(), buildProxyOptions::getAddress, buildProxyOptions.getNonProxyHosts())); if (buildProxyOptions.getUsername() != null) { ProxyAuthenticator proxyAuthenticator = new ProxyAuthenticator(buildProxyOptions.getUsername(), buildProxyOptions.getPassword()); httpClientBuilder = httpClientBuilder.proxyAuthenticator(proxyAuthenticator) .addInterceptor(proxyAuthenticator.getProxyAuthenticationInfoInterceptor()); } } httpClientBuilder.followRedirects(false); return new OkHttpAsyncHttpClient(httpClientBuilder.build()); }
httpClientBuilder.followRedirects(false);
public HttpClient build() { OkHttpClient.Builder httpClientBuilder = this.okHttpClient == null ? new OkHttpClient.Builder() : this.okHttpClient.newBuilder(); for (Interceptor interceptor : this.networkInterceptors) { httpClientBuilder = httpClientBuilder.addNetworkInterceptor(interceptor); } httpClientBuilder = httpClientBuilder .connectTimeout(getTimeoutMillis(connectionTimeout, DEFAULT_CONNECT_TIMEOUT), TimeUnit.MILLISECONDS) .writeTimeout(getTimeoutMillis(writeTimeout, DEFAULT_WRITE_TIMEOUT), TimeUnit.MILLISECONDS) .readTimeout(getTimeoutMillis(readTimeout, DEFAULT_READ_TIMEOUT), TimeUnit.MILLISECONDS); if (this.connectionPool != null) { httpClientBuilder = httpClientBuilder.connectionPool(connectionPool); } if (this.dispatcher != null) { httpClientBuilder = httpClientBuilder.dispatcher(dispatcher); } Configuration buildConfiguration = (configuration == null) ? Configuration.getGlobalConfiguration() : configuration; ProxyOptions buildProxyOptions = (proxyOptions == null && buildConfiguration != Configuration.NONE) ? ProxyOptions.fromConfiguration(buildConfiguration, true) : proxyOptions; if (buildProxyOptions != null) { httpClientBuilder = httpClientBuilder.proxySelector(new OkHttpProxySelector( buildProxyOptions.getType().toProxyType(), buildProxyOptions::getAddress, buildProxyOptions.getNonProxyHosts())); if (buildProxyOptions.getUsername() != null) { ProxyAuthenticator proxyAuthenticator = new ProxyAuthenticator(buildProxyOptions.getUsername(), buildProxyOptions.getPassword()); httpClientBuilder = httpClientBuilder.proxyAuthenticator(proxyAuthenticator) .addInterceptor(proxyAuthenticator.getProxyAuthenticationInfoInterceptor()); } } httpClientBuilder.followRedirects(this.followRedirects); return new OkHttpAsyncHttpClient(httpClientBuilder.build()); }
class OkHttpAsyncHttpClientBuilder { private final okhttp3.OkHttpClient okHttpClient; private static final long MINIMUM_TIMEOUT = TimeUnit.MILLISECONDS.toMillis(1); private static final long DEFAULT_CONNECT_TIMEOUT; private static final long DEFAULT_WRITE_TIMEOUT; private static final long DEFAULT_READ_TIMEOUT; static { ClientLogger logger = new ClientLogger(OkHttpAsyncHttpClientBuilder.class); Configuration configuration = Configuration.getGlobalConfiguration(); DEFAULT_CONNECT_TIMEOUT = getDefaultTimeoutFromEnvironment(configuration, PROPERTY_AZURE_REQUEST_CONNECT_TIMEOUT, Duration.ofSeconds(10), logger).toMillis(); DEFAULT_WRITE_TIMEOUT = getDefaultTimeoutFromEnvironment(configuration, PROPERTY_AZURE_REQUEST_WRITE_TIMEOUT, Duration.ofSeconds(60), logger).toMillis(); DEFAULT_READ_TIMEOUT = getDefaultTimeoutFromEnvironment(configuration, PROPERTY_AZURE_REQUEST_READ_TIMEOUT, Duration.ofSeconds(60), logger).toMillis(); } private List<Interceptor> networkInterceptors = new ArrayList<>(); private Duration readTimeout; private Duration writeTimeout; private Duration connectionTimeout; private ConnectionPool connectionPool; private Dispatcher dispatcher; private ProxyOptions proxyOptions; private Configuration configuration; /** * Creates OkHttpAsyncHttpClientBuilder. */ public OkHttpAsyncHttpClientBuilder() { this.okHttpClient = null; } /** * Creates OkHttpAsyncHttpClientBuilder from the builder of an existing OkHttpClient. * * @param okHttpClient the httpclient */ public OkHttpAsyncHttpClientBuilder(OkHttpClient okHttpClient) { this.okHttpClient = Objects.requireNonNull(okHttpClient, "'okHttpClient' cannot be null."); } /** * Add a network layer interceptor to Http request pipeline. * * @param networkInterceptor the interceptor to add * @return the updated OkHttpAsyncHttpClientBuilder object */ public OkHttpAsyncHttpClientBuilder addNetworkInterceptor(Interceptor networkInterceptor) { Objects.requireNonNull(networkInterceptor, "'networkInterceptor' cannot be null."); this.networkInterceptors.add(networkInterceptor); return this; } /** * Add network layer interceptors to Http request pipeline. * <p> * This replaces all previously-set interceptors. * * @param networkInterceptors The interceptors to add. * @return The updated OkHttpAsyncHttpClientBuilder object. */ public OkHttpAsyncHttpClientBuilder networkInterceptors(List<Interceptor> networkInterceptors) { this.networkInterceptors = Objects.requireNonNull(networkInterceptors, "'networkInterceptors' cannot be null."); return this; } /** * Sets the read timeout duration used when reading the server response. * <p> * The read timeout begins once the first response read is triggered after the server response is received. This * timeout triggers periodically but won't fire its operation if another read operation has completed between when * the timeout is triggered and completes. * <p> * If {@code readTimeout} is null or {@link Configuration * timeout will be used, if it is a {@link Duration} less than or equal to zero then no timeout period will be * applied to response read. When applying the timeout the greatest of one millisecond and the value of {@code * readTimeout} will be used. * * @param readTimeout Read timeout duration. * @return The updated OkHttpAsyncHttpClientBuilder object. */ public OkHttpAsyncHttpClientBuilder readTimeout(Duration readTimeout) { this.readTimeout = readTimeout; return this; } /** * Sets the writing timeout for a request to be sent. * <p> * The writing timeout does not apply to the entire request but to the request being sent over the wire. For example * a request body which emits {@code 10} {@code 8KB} buffers will trigger {@code 10} write operations, the last * write tracker will update when each operation completes and the outbound buffer will be periodically checked to * determine if it is still draining. * <p> * If {@code writeTimeout} is null either {@link Configuration * timeout will be used, if it is a {@link Duration} less than or equal to zero then no write timeout will be * applied. When applying the timeout the greatest of one millisecond and the value of {@code writeTimeout} will be * used. * * @param writeTimeout Write operation timeout duration. * @return The updated OkHttpAsyncHttpClientBuilder object. */ public OkHttpAsyncHttpClientBuilder writeTimeout(Duration writeTimeout) { this.writeTimeout = writeTimeout; return this; } /** * Sets the connection timeout for a request to be sent. * <p> * The connection timeout begins once the request attempts to connect to the remote host and finishes once the * connection is resolved. * <p> * If {@code connectTimeout} is null either {@link Configuration * 10-second timeout will be used, if it is a {@link Duration} less than or equal to zero then no timeout will be * applied. When applying the timeout the greatest of one millisecond and the value of {@code connectTimeout} will * be used. * <p> * By default the connection timeout is 10 seconds. * * @param connectionTimeout Connect timeout duration. * @return The updated OkHttpAsyncHttpClientBuilder object. */ public OkHttpAsyncHttpClientBuilder connectionTimeout(Duration connectionTimeout) { this.connectionTimeout = connectionTimeout; return this; } /** * Sets the Http connection pool. * * @param connectionPool The OkHttp connection pool to use. * @return The updated OkHttpAsyncHttpClientBuilder object. */ public OkHttpAsyncHttpClientBuilder connectionPool(ConnectionPool connectionPool) { this.connectionPool = Objects.requireNonNull(connectionPool, "'connectionPool' cannot be null."); return this; } /** * Sets the dispatcher that also composes the thread pool for executing HTTP requests. * * @param dispatcher The dispatcher to use. * @return The updated OkHttpAsyncHttpClientBuilder object. */ public OkHttpAsyncHttpClientBuilder dispatcher(Dispatcher dispatcher) { this.dispatcher = Objects.requireNonNull(dispatcher, "'dispatcher' cannot be null."); return this; } /** * Sets the proxy. * * @param proxyOptions The proxy configuration to use. * @return The updated OkHttpAsyncHttpClientBuilder object. */ public OkHttpAsyncHttpClientBuilder proxy(ProxyOptions proxyOptions) { this.proxyOptions = proxyOptions; return this; } /** * Sets the configuration store that is used during construction of the HTTP client. * <p> * The default configuration store is a clone of the {@link Configuration * configuration store}, use {@link Configuration * * @param configuration The configuration store. * @return The updated OkHttpAsyncHttpClientBuilder object. */ public OkHttpAsyncHttpClientBuilder configuration(Configuration configuration) { this.configuration = configuration; return this; } /** * Creates a new OkHttp-backed {@link com.azure.core.http.HttpClient} instance on every call, using the * configuration set in the builder at the time of the build method call. * * @return A new OkHttp-backed {@link com.azure.core.http.HttpClient} instance. */ /* * Returns the timeout in milliseconds to use based on the passed Duration and default timeout. * * If the timeout is {@code null} the default timeout will be used. If the timeout is less than or equal to zero * no timeout will be used. If the timeout is less than one millisecond a timeout of one millisecond will be used. */ static long getTimeoutMillis(Duration configuredTimeout, long defaultTimeout) { if (configuredTimeout == null) { return defaultTimeout; } if (configuredTimeout.isZero() || configuredTimeout.isNegative()) { return 0; } return Math.max(configuredTimeout.toMillis(), MINIMUM_TIMEOUT); } }
class OkHttpAsyncHttpClientBuilder { private final okhttp3.OkHttpClient okHttpClient; private static final long MINIMUM_TIMEOUT = TimeUnit.MILLISECONDS.toMillis(1); private static final long DEFAULT_CONNECT_TIMEOUT; private static final long DEFAULT_WRITE_TIMEOUT; private static final long DEFAULT_READ_TIMEOUT; static { ClientLogger logger = new ClientLogger(OkHttpAsyncHttpClientBuilder.class); Configuration configuration = Configuration.getGlobalConfiguration(); DEFAULT_CONNECT_TIMEOUT = getDefaultTimeoutFromEnvironment(configuration, PROPERTY_AZURE_REQUEST_CONNECT_TIMEOUT, Duration.ofSeconds(10), logger).toMillis(); DEFAULT_WRITE_TIMEOUT = getDefaultTimeoutFromEnvironment(configuration, PROPERTY_AZURE_REQUEST_WRITE_TIMEOUT, Duration.ofSeconds(60), logger).toMillis(); DEFAULT_READ_TIMEOUT = getDefaultTimeoutFromEnvironment(configuration, PROPERTY_AZURE_REQUEST_READ_TIMEOUT, Duration.ofSeconds(60), logger).toMillis(); } private List<Interceptor> networkInterceptors = new ArrayList<>(); private Duration readTimeout; private Duration writeTimeout; private Duration connectionTimeout; private ConnectionPool connectionPool; private Dispatcher dispatcher; private ProxyOptions proxyOptions; private Configuration configuration; private boolean followRedirects; /** * Creates OkHttpAsyncHttpClientBuilder. */ public OkHttpAsyncHttpClientBuilder() { this.okHttpClient = null; } /** * Creates OkHttpAsyncHttpClientBuilder from the builder of an existing OkHttpClient. * * @param okHttpClient the httpclient */ public OkHttpAsyncHttpClientBuilder(OkHttpClient okHttpClient) { this.okHttpClient = Objects.requireNonNull(okHttpClient, "'okHttpClient' cannot be null."); } /** * Add a network layer interceptor to Http request pipeline. * * @param networkInterceptor the interceptor to add * @return the updated OkHttpAsyncHttpClientBuilder object */ public OkHttpAsyncHttpClientBuilder addNetworkInterceptor(Interceptor networkInterceptor) { Objects.requireNonNull(networkInterceptor, "'networkInterceptor' cannot be null."); this.networkInterceptors.add(networkInterceptor); return this; } /** * Add network layer interceptors to Http request pipeline. * <p> * This replaces all previously-set interceptors. * * @param networkInterceptors The interceptors to add. * @return The updated OkHttpAsyncHttpClientBuilder object. */ public OkHttpAsyncHttpClientBuilder networkInterceptors(List<Interceptor> networkInterceptors) { this.networkInterceptors = Objects.requireNonNull(networkInterceptors, "'networkInterceptors' cannot be null."); return this; } /** * Sets the read timeout duration used when reading the server response. * <p> * The read timeout begins once the first response read is triggered after the server response is received. This * timeout triggers periodically but won't fire its operation if another read operation has completed between when * the timeout is triggered and completes. * <p> * If {@code readTimeout} is null or {@link Configuration * timeout will be used, if it is a {@link Duration} less than or equal to zero then no timeout period will be * applied to response read. When applying the timeout the greatest of one millisecond and the value of {@code * readTimeout} will be used. * * @param readTimeout Read timeout duration. * @return The updated OkHttpAsyncHttpClientBuilder object. */ public OkHttpAsyncHttpClientBuilder readTimeout(Duration readTimeout) { this.readTimeout = readTimeout; return this; } /** * Sets the writing timeout for a request to be sent. * <p> * The writing timeout does not apply to the entire request but to the request being sent over the wire. For example * a request body which emits {@code 10} {@code 8KB} buffers will trigger {@code 10} write operations, the last * write tracker will update when each operation completes and the outbound buffer will be periodically checked to * determine if it is still draining. * <p> * If {@code writeTimeout} is null either {@link Configuration * timeout will be used, if it is a {@link Duration} less than or equal to zero then no write timeout will be * applied. When applying the timeout the greatest of one millisecond and the value of {@code writeTimeout} will be * used. * * @param writeTimeout Write operation timeout duration. * @return The updated OkHttpAsyncHttpClientBuilder object. */ public OkHttpAsyncHttpClientBuilder writeTimeout(Duration writeTimeout) { this.writeTimeout = writeTimeout; return this; } /** * Sets the connection timeout for a request to be sent. * <p> * The connection timeout begins once the request attempts to connect to the remote host and finishes once the * connection is resolved. * <p> * If {@code connectTimeout} is null either {@link Configuration * 10-second timeout will be used, if it is a {@link Duration} less than or equal to zero then no timeout will be * applied. When applying the timeout the greatest of one millisecond and the value of {@code connectTimeout} will * be used. * <p> * By default the connection timeout is 10 seconds. * * @param connectionTimeout Connect timeout duration. * @return The updated OkHttpAsyncHttpClientBuilder object. */ public OkHttpAsyncHttpClientBuilder connectionTimeout(Duration connectionTimeout) { this.connectionTimeout = connectionTimeout; return this; } /** * Sets the Http connection pool. * * @param connectionPool The OkHttp connection pool to use. * @return The updated OkHttpAsyncHttpClientBuilder object. */ public OkHttpAsyncHttpClientBuilder connectionPool(ConnectionPool connectionPool) { this.connectionPool = Objects.requireNonNull(connectionPool, "'connectionPool' cannot be null."); return this; } /** * Sets the dispatcher that also composes the thread pool for executing HTTP requests. * * @param dispatcher The dispatcher to use. * @return The updated OkHttpAsyncHttpClientBuilder object. */ public OkHttpAsyncHttpClientBuilder dispatcher(Dispatcher dispatcher) { this.dispatcher = Objects.requireNonNull(dispatcher, "'dispatcher' cannot be null."); return this; } /** * Sets the proxy. * * @param proxyOptions The proxy configuration to use. * @return The updated OkHttpAsyncHttpClientBuilder object. */ public OkHttpAsyncHttpClientBuilder proxy(ProxyOptions proxyOptions) { this.proxyOptions = proxyOptions; return this; } /** * Sets the configuration store that is used during construction of the HTTP client. * <p> * The default configuration store is a clone of the {@link Configuration * configuration store}, use {@link Configuration * * @param configuration The configuration store. * @return The updated OkHttpAsyncHttpClientBuilder object. */ public OkHttpAsyncHttpClientBuilder configuration(Configuration configuration) { this.configuration = configuration; return this; } /** * <p>Sets the followRedirect flag on the underlying OkHttp-backed {@link com.azure.core.http.HttpClient}.</p> * * <p>If this is set to 'true' redirects will be followed automatically, and * if your HTTP pipeline is configured with a redirect policy it will not be called.</p> * * @param followRedirects The followRedirects value to use. * @return The updated OkHttpAsyncHttpClientBuilder object. */ public OkHttpAsyncHttpClientBuilder followRedirects(boolean followRedirects) { this.followRedirects = followRedirects; return this; } /** * Creates a new OkHttp-backed {@link com.azure.core.http.HttpClient} instance on every call, using the * configuration set in the builder at the time of the build method call. * * @return A new OkHttp-backed {@link com.azure.core.http.HttpClient} instance. */ /* * Returns the timeout in milliseconds to use based on the passed Duration and default timeout. * * If the timeout is {@code null} the default timeout will be used. If the timeout is less than or equal to zero * no timeout will be used. If the timeout is less than one millisecond a timeout of one millisecond will be used. */ static long getTimeoutMillis(Duration configuredTimeout, long defaultTimeout) { if (configuredTimeout == null) { return defaultTimeout; } if (configuredTimeout.isZero() || configuredTimeout.isNegative()) { return 0; } return Math.max(configuredTimeout.toMillis(), MINIMUM_TIMEOUT); } }
Couldn't this just be ```suggestion httpClientBuilder.followRedirects(followRedirects); ``` since `boolean` is false by default #Resolved
public HttpClient build() { OkHttpClient.Builder httpClientBuilder = this.okHttpClient == null ? new OkHttpClient.Builder() : this.okHttpClient.newBuilder(); for (Interceptor interceptor : this.networkInterceptors) { httpClientBuilder = httpClientBuilder.addNetworkInterceptor(interceptor); } httpClientBuilder = httpClientBuilder .connectTimeout(getTimeoutMillis(connectionTimeout, DEFAULT_CONNECT_TIMEOUT), TimeUnit.MILLISECONDS) .writeTimeout(getTimeoutMillis(writeTimeout, DEFAULT_WRITE_TIMEOUT), TimeUnit.MILLISECONDS) .readTimeout(getTimeoutMillis(readTimeout, DEFAULT_READ_TIMEOUT), TimeUnit.MILLISECONDS); if (this.connectionPool != null) { httpClientBuilder = httpClientBuilder.connectionPool(connectionPool); } if (this.dispatcher != null) { httpClientBuilder = httpClientBuilder.dispatcher(dispatcher); } Configuration buildConfiguration = (configuration == null) ? Configuration.getGlobalConfiguration() : configuration; ProxyOptions buildProxyOptions = (proxyOptions == null && buildConfiguration != Configuration.NONE) ? ProxyOptions.fromConfiguration(buildConfiguration, true) : proxyOptions; if (buildProxyOptions != null) { httpClientBuilder = httpClientBuilder.proxySelector(new OkHttpProxySelector( buildProxyOptions.getType().toProxyType(), buildProxyOptions::getAddress, buildProxyOptions.getNonProxyHosts())); if (buildProxyOptions.getUsername() != null) { ProxyAuthenticator proxyAuthenticator = new ProxyAuthenticator(buildProxyOptions.getUsername(), buildProxyOptions.getPassword()); httpClientBuilder = httpClientBuilder.proxyAuthenticator(proxyAuthenticator) .addInterceptor(proxyAuthenticator.getProxyAuthenticationInfoInterceptor()); } } if (!this.followRedirects) { httpClientBuilder.followRedirects(false); } return new OkHttpAsyncHttpClient(httpClientBuilder.build()); }
}
public HttpClient build() { OkHttpClient.Builder httpClientBuilder = this.okHttpClient == null ? new OkHttpClient.Builder() : this.okHttpClient.newBuilder(); for (Interceptor interceptor : this.networkInterceptors) { httpClientBuilder = httpClientBuilder.addNetworkInterceptor(interceptor); } httpClientBuilder = httpClientBuilder .connectTimeout(getTimeoutMillis(connectionTimeout, DEFAULT_CONNECT_TIMEOUT), TimeUnit.MILLISECONDS) .writeTimeout(getTimeoutMillis(writeTimeout, DEFAULT_WRITE_TIMEOUT), TimeUnit.MILLISECONDS) .readTimeout(getTimeoutMillis(readTimeout, DEFAULT_READ_TIMEOUT), TimeUnit.MILLISECONDS); if (this.connectionPool != null) { httpClientBuilder = httpClientBuilder.connectionPool(connectionPool); } if (this.dispatcher != null) { httpClientBuilder = httpClientBuilder.dispatcher(dispatcher); } Configuration buildConfiguration = (configuration == null) ? Configuration.getGlobalConfiguration() : configuration; ProxyOptions buildProxyOptions = (proxyOptions == null && buildConfiguration != Configuration.NONE) ? ProxyOptions.fromConfiguration(buildConfiguration, true) : proxyOptions; if (buildProxyOptions != null) { httpClientBuilder = httpClientBuilder.proxySelector(new OkHttpProxySelector( buildProxyOptions.getType().toProxyType(), buildProxyOptions::getAddress, buildProxyOptions.getNonProxyHosts())); if (buildProxyOptions.getUsername() != null) { ProxyAuthenticator proxyAuthenticator = new ProxyAuthenticator(buildProxyOptions.getUsername(), buildProxyOptions.getPassword()); httpClientBuilder = httpClientBuilder.proxyAuthenticator(proxyAuthenticator) .addInterceptor(proxyAuthenticator.getProxyAuthenticationInfoInterceptor()); } } httpClientBuilder.followRedirects(this.followRedirects); return new OkHttpAsyncHttpClient(httpClientBuilder.build()); }
class OkHttpAsyncHttpClientBuilder { private final okhttp3.OkHttpClient okHttpClient; private static final long MINIMUM_TIMEOUT = TimeUnit.MILLISECONDS.toMillis(1); private static final long DEFAULT_CONNECT_TIMEOUT; private static final long DEFAULT_WRITE_TIMEOUT; private static final long DEFAULT_READ_TIMEOUT; static { ClientLogger logger = new ClientLogger(OkHttpAsyncHttpClientBuilder.class); Configuration configuration = Configuration.getGlobalConfiguration(); DEFAULT_CONNECT_TIMEOUT = getDefaultTimeoutFromEnvironment(configuration, PROPERTY_AZURE_REQUEST_CONNECT_TIMEOUT, Duration.ofSeconds(10), logger).toMillis(); DEFAULT_WRITE_TIMEOUT = getDefaultTimeoutFromEnvironment(configuration, PROPERTY_AZURE_REQUEST_WRITE_TIMEOUT, Duration.ofSeconds(60), logger).toMillis(); DEFAULT_READ_TIMEOUT = getDefaultTimeoutFromEnvironment(configuration, PROPERTY_AZURE_REQUEST_READ_TIMEOUT, Duration.ofSeconds(60), logger).toMillis(); } private List<Interceptor> networkInterceptors = new ArrayList<>(); private Duration readTimeout; private Duration writeTimeout; private Duration connectionTimeout; private ConnectionPool connectionPool; private Dispatcher dispatcher; private ProxyOptions proxyOptions; private Configuration configuration; private boolean followRedirects; /** * Creates OkHttpAsyncHttpClientBuilder. */ public OkHttpAsyncHttpClientBuilder() { this.okHttpClient = null; } /** * Creates OkHttpAsyncHttpClientBuilder from the builder of an existing OkHttpClient. * * @param okHttpClient the httpclient */ public OkHttpAsyncHttpClientBuilder(OkHttpClient okHttpClient) { this.okHttpClient = Objects.requireNonNull(okHttpClient, "'okHttpClient' cannot be null."); } /** * Add a network layer interceptor to Http request pipeline. * * @param networkInterceptor the interceptor to add * @return the updated OkHttpAsyncHttpClientBuilder object */ public OkHttpAsyncHttpClientBuilder addNetworkInterceptor(Interceptor networkInterceptor) { Objects.requireNonNull(networkInterceptor, "'networkInterceptor' cannot be null."); this.networkInterceptors.add(networkInterceptor); return this; } /** * Add network layer interceptors to Http request pipeline. * <p> * This replaces all previously-set interceptors. * * @param networkInterceptors The interceptors to add. * @return The updated OkHttpAsyncHttpClientBuilder object. */ public OkHttpAsyncHttpClientBuilder networkInterceptors(List<Interceptor> networkInterceptors) { this.networkInterceptors = Objects.requireNonNull(networkInterceptors, "'networkInterceptors' cannot be null."); return this; } /** * Sets the read timeout duration used when reading the server response. * <p> * The read timeout begins once the first response read is triggered after the server response is received. This * timeout triggers periodically but won't fire its operation if another read operation has completed between when * the timeout is triggered and completes. * <p> * If {@code readTimeout} is null or {@link Configuration * timeout will be used, if it is a {@link Duration} less than or equal to zero then no timeout period will be * applied to response read. When applying the timeout the greatest of one millisecond and the value of {@code * readTimeout} will be used. * * @param readTimeout Read timeout duration. * @return The updated OkHttpAsyncHttpClientBuilder object. */ public OkHttpAsyncHttpClientBuilder readTimeout(Duration readTimeout) { this.readTimeout = readTimeout; return this; } /** * Sets the writing timeout for a request to be sent. * <p> * The writing timeout does not apply to the entire request but to the request being sent over the wire. For example * a request body which emits {@code 10} {@code 8KB} buffers will trigger {@code 10} write operations, the last * write tracker will update when each operation completes and the outbound buffer will be periodically checked to * determine if it is still draining. * <p> * If {@code writeTimeout} is null either {@link Configuration * timeout will be used, if it is a {@link Duration} less than or equal to zero then no write timeout will be * applied. When applying the timeout the greatest of one millisecond and the value of {@code writeTimeout} will be * used. * * @param writeTimeout Write operation timeout duration. * @return The updated OkHttpAsyncHttpClientBuilder object. */ public OkHttpAsyncHttpClientBuilder writeTimeout(Duration writeTimeout) { this.writeTimeout = writeTimeout; return this; } /** * Sets the connection timeout for a request to be sent. * <p> * The connection timeout begins once the request attempts to connect to the remote host and finishes once the * connection is resolved. * <p> * If {@code connectTimeout} is null either {@link Configuration * 10-second timeout will be used, if it is a {@link Duration} less than or equal to zero then no timeout will be * applied. When applying the timeout the greatest of one millisecond and the value of {@code connectTimeout} will * be used. * <p> * By default the connection timeout is 10 seconds. * * @param connectionTimeout Connect timeout duration. * @return The updated OkHttpAsyncHttpClientBuilder object. */ public OkHttpAsyncHttpClientBuilder connectionTimeout(Duration connectionTimeout) { this.connectionTimeout = connectionTimeout; return this; } /** * Sets the Http connection pool. * * @param connectionPool The OkHttp connection pool to use. * @return The updated OkHttpAsyncHttpClientBuilder object. */ public OkHttpAsyncHttpClientBuilder connectionPool(ConnectionPool connectionPool) { this.connectionPool = Objects.requireNonNull(connectionPool, "'connectionPool' cannot be null."); return this; } /** * Sets the dispatcher that also composes the thread pool for executing HTTP requests. * * @param dispatcher The dispatcher to use. * @return The updated OkHttpAsyncHttpClientBuilder object. */ public OkHttpAsyncHttpClientBuilder dispatcher(Dispatcher dispatcher) { this.dispatcher = Objects.requireNonNull(dispatcher, "'dispatcher' cannot be null."); return this; } /** * Sets the proxy. * * @param proxyOptions The proxy configuration to use. * @return The updated OkHttpAsyncHttpClientBuilder object. */ public OkHttpAsyncHttpClientBuilder proxy(ProxyOptions proxyOptions) { this.proxyOptions = proxyOptions; return this; } /** * Sets the configuration store that is used during construction of the HTTP client. * <p> * The default configuration store is a clone of the {@link Configuration * configuration store}, use {@link Configuration * * @param configuration The configuration store. * @return The updated OkHttpAsyncHttpClientBuilder object. */ public OkHttpAsyncHttpClientBuilder configuration(Configuration configuration) { this.configuration = configuration; return this; } /** * Sets the followRedirect flag on the underlying OkHttp-backed {@link com.azure.core.http.HttpClient}. * * If this policy is set to 'true' OkHttp-backed {@link com.azure.core.http.HttpClient} will follow redirects automatically * If your HTTP pipeline is configured to call redirect policy it will not be called if this value is true. * * @param followRedirect Whether OKHttpClient should follow redirects by default. * @return The updated OkHttpAsyncHttpClientBuilder object. */ public OkHttpAsyncHttpClientBuilder followRedirects(boolean followRedirect) { this.followRedirects = followRedirect; return this; } /** * Creates a new OkHttp-backed {@link com.azure.core.http.HttpClient} instance on every call, using the * configuration set in the builder at the time of the build method call. * * @return A new OkHttp-backed {@link com.azure.core.http.HttpClient} instance. */ /* * Returns the timeout in milliseconds to use based on the passed Duration and default timeout. * * If the timeout is {@code null} the default timeout will be used. If the timeout is less than or equal to zero * no timeout will be used. If the timeout is less than one millisecond a timeout of one millisecond will be used. */ static long getTimeoutMillis(Duration configuredTimeout, long defaultTimeout) { if (configuredTimeout == null) { return defaultTimeout; } if (configuredTimeout.isZero() || configuredTimeout.isNegative()) { return 0; } return Math.max(configuredTimeout.toMillis(), MINIMUM_TIMEOUT); } }
class OkHttpAsyncHttpClientBuilder { private final okhttp3.OkHttpClient okHttpClient; private static final long MINIMUM_TIMEOUT = TimeUnit.MILLISECONDS.toMillis(1); private static final long DEFAULT_CONNECT_TIMEOUT; private static final long DEFAULT_WRITE_TIMEOUT; private static final long DEFAULT_READ_TIMEOUT; static { ClientLogger logger = new ClientLogger(OkHttpAsyncHttpClientBuilder.class); Configuration configuration = Configuration.getGlobalConfiguration(); DEFAULT_CONNECT_TIMEOUT = getDefaultTimeoutFromEnvironment(configuration, PROPERTY_AZURE_REQUEST_CONNECT_TIMEOUT, Duration.ofSeconds(10), logger).toMillis(); DEFAULT_WRITE_TIMEOUT = getDefaultTimeoutFromEnvironment(configuration, PROPERTY_AZURE_REQUEST_WRITE_TIMEOUT, Duration.ofSeconds(60), logger).toMillis(); DEFAULT_READ_TIMEOUT = getDefaultTimeoutFromEnvironment(configuration, PROPERTY_AZURE_REQUEST_READ_TIMEOUT, Duration.ofSeconds(60), logger).toMillis(); } private List<Interceptor> networkInterceptors = new ArrayList<>(); private Duration readTimeout; private Duration writeTimeout; private Duration connectionTimeout; private ConnectionPool connectionPool; private Dispatcher dispatcher; private ProxyOptions proxyOptions; private Configuration configuration; private boolean followRedirects; /** * Creates OkHttpAsyncHttpClientBuilder. */ public OkHttpAsyncHttpClientBuilder() { this.okHttpClient = null; } /** * Creates OkHttpAsyncHttpClientBuilder from the builder of an existing OkHttpClient. * * @param okHttpClient the httpclient */ public OkHttpAsyncHttpClientBuilder(OkHttpClient okHttpClient) { this.okHttpClient = Objects.requireNonNull(okHttpClient, "'okHttpClient' cannot be null."); } /** * Add a network layer interceptor to Http request pipeline. * * @param networkInterceptor the interceptor to add * @return the updated OkHttpAsyncHttpClientBuilder object */ public OkHttpAsyncHttpClientBuilder addNetworkInterceptor(Interceptor networkInterceptor) { Objects.requireNonNull(networkInterceptor, "'networkInterceptor' cannot be null."); this.networkInterceptors.add(networkInterceptor); return this; } /** * Add network layer interceptors to Http request pipeline. * <p> * This replaces all previously-set interceptors. * * @param networkInterceptors The interceptors to add. * @return The updated OkHttpAsyncHttpClientBuilder object. */ public OkHttpAsyncHttpClientBuilder networkInterceptors(List<Interceptor> networkInterceptors) { this.networkInterceptors = Objects.requireNonNull(networkInterceptors, "'networkInterceptors' cannot be null."); return this; } /** * Sets the read timeout duration used when reading the server response. * <p> * The read timeout begins once the first response read is triggered after the server response is received. This * timeout triggers periodically but won't fire its operation if another read operation has completed between when * the timeout is triggered and completes. * <p> * If {@code readTimeout} is null or {@link Configuration * timeout will be used, if it is a {@link Duration} less than or equal to zero then no timeout period will be * applied to response read. When applying the timeout the greatest of one millisecond and the value of {@code * readTimeout} will be used. * * @param readTimeout Read timeout duration. * @return The updated OkHttpAsyncHttpClientBuilder object. */ public OkHttpAsyncHttpClientBuilder readTimeout(Duration readTimeout) { this.readTimeout = readTimeout; return this; } /** * Sets the writing timeout for a request to be sent. * <p> * The writing timeout does not apply to the entire request but to the request being sent over the wire. For example * a request body which emits {@code 10} {@code 8KB} buffers will trigger {@code 10} write operations, the last * write tracker will update when each operation completes and the outbound buffer will be periodically checked to * determine if it is still draining. * <p> * If {@code writeTimeout} is null either {@link Configuration * timeout will be used, if it is a {@link Duration} less than or equal to zero then no write timeout will be * applied. When applying the timeout the greatest of one millisecond and the value of {@code writeTimeout} will be * used. * * @param writeTimeout Write operation timeout duration. * @return The updated OkHttpAsyncHttpClientBuilder object. */ public OkHttpAsyncHttpClientBuilder writeTimeout(Duration writeTimeout) { this.writeTimeout = writeTimeout; return this; } /** * Sets the connection timeout for a request to be sent. * <p> * The connection timeout begins once the request attempts to connect to the remote host and finishes once the * connection is resolved. * <p> * If {@code connectTimeout} is null either {@link Configuration * 10-second timeout will be used, if it is a {@link Duration} less than or equal to zero then no timeout will be * applied. When applying the timeout the greatest of one millisecond and the value of {@code connectTimeout} will * be used. * <p> * By default the connection timeout is 10 seconds. * * @param connectionTimeout Connect timeout duration. * @return The updated OkHttpAsyncHttpClientBuilder object. */ public OkHttpAsyncHttpClientBuilder connectionTimeout(Duration connectionTimeout) { this.connectionTimeout = connectionTimeout; return this; } /** * Sets the Http connection pool. * * @param connectionPool The OkHttp connection pool to use. * @return The updated OkHttpAsyncHttpClientBuilder object. */ public OkHttpAsyncHttpClientBuilder connectionPool(ConnectionPool connectionPool) { this.connectionPool = Objects.requireNonNull(connectionPool, "'connectionPool' cannot be null."); return this; } /** * Sets the dispatcher that also composes the thread pool for executing HTTP requests. * * @param dispatcher The dispatcher to use. * @return The updated OkHttpAsyncHttpClientBuilder object. */ public OkHttpAsyncHttpClientBuilder dispatcher(Dispatcher dispatcher) { this.dispatcher = Objects.requireNonNull(dispatcher, "'dispatcher' cannot be null."); return this; } /** * Sets the proxy. * * @param proxyOptions The proxy configuration to use. * @return The updated OkHttpAsyncHttpClientBuilder object. */ public OkHttpAsyncHttpClientBuilder proxy(ProxyOptions proxyOptions) { this.proxyOptions = proxyOptions; return this; } /** * Sets the configuration store that is used during construction of the HTTP client. * <p> * The default configuration store is a clone of the {@link Configuration * configuration store}, use {@link Configuration * * @param configuration The configuration store. * @return The updated OkHttpAsyncHttpClientBuilder object. */ public OkHttpAsyncHttpClientBuilder configuration(Configuration configuration) { this.configuration = configuration; return this; } /** * <p>Sets the followRedirect flag on the underlying OkHttp-backed {@link com.azure.core.http.HttpClient}.</p> * * <p>If this is set to 'true' redirects will be followed automatically, and * if your HTTP pipeline is configured with a redirect policy it will not be called.</p> * * @param followRedirects The followRedirects value to use. * @return The updated OkHttpAsyncHttpClientBuilder object. */ public OkHttpAsyncHttpClientBuilder followRedirects(boolean followRedirects) { this.followRedirects = followRedirects; return this; } /** * Creates a new OkHttp-backed {@link com.azure.core.http.HttpClient} instance on every call, using the * configuration set in the builder at the time of the build method call. * * @return A new OkHttp-backed {@link com.azure.core.http.HttpClient} instance. */ /* * Returns the timeout in milliseconds to use based on the passed Duration and default timeout. * * If the timeout is {@code null} the default timeout will be used. If the timeout is less than or equal to zero * no timeout will be used. If the timeout is less than one millisecond a timeout of one millisecond will be used. */ static long getTimeoutMillis(Duration configuredTimeout, long defaultTimeout) { if (configuredTimeout == null) { return defaultTimeout; } if (configuredTimeout.isZero() || configuredTimeout.isNegative()) { return 0; } return Math.max(configuredTimeout.toMillis(), MINIMUM_TIMEOUT); } }
NOTE: Overrides the client assertion at per request level.
public Mono<AccessToken> authenticateWithConfidentialClient(TokenRequestContext request) { return confidentialClientApplicationAccessor.getValue() .flatMap(confidentialClient -> Mono.fromFuture(() -> { ClientCredentialParameters.ClientCredentialParametersBuilder builder = ClientCredentialParameters.builder(new HashSet<>(request.getScopes())) .tenant(IdentityUtil .resolveTenantId(tenantId, request, options)); if (clientAssertionSupplier != null) { builder.clientCredential(ClientCredentialFactory .createFromClientAssertion(clientAssertionSupplier.get())); } return confidentialClient.acquireToken(builder.build()); } )).map(MsalToken::new); }
if (clientAssertionSupplier != null) {
public Mono<AccessToken> authenticateWithConfidentialClient(TokenRequestContext request) { return confidentialClientApplicationAccessor.getValue() .flatMap(confidentialClient -> Mono.fromFuture(() -> { ClientCredentialParameters.ClientCredentialParametersBuilder builder = ClientCredentialParameters.builder(new HashSet<>(request.getScopes())) .tenant(IdentityUtil .resolveTenantId(tenantId, request, options)); if (clientAssertionSupplier != null) { builder.clientCredential(ClientCredentialFactory .createFromClientAssertion(clientAssertionSupplier.get())); } return confidentialClient.acquireToken(builder.build()); } )).map(MsalToken::new); }
class IdentityClient { private static final SerializerAdapter SERIALIZER_ADAPTER = JacksonAdapter.createDefaultSerializerAdapter(); private static final Random RANDOM = new Random(); private static final String WINDOWS_STARTER = "cmd.exe"; private static final String LINUX_MAC_STARTER = "/bin/sh"; private static final String WINDOWS_SWITCHER = "/c"; private static final String LINUX_MAC_SWITCHER = "-c"; private static final String WINDOWS_PROCESS_ERROR_MESSAGE = "'az' is not recognized"; private static final Pattern LINUX_MAC_PROCESS_ERROR_MESSAGE = Pattern.compile("(.*)az:(.*)not found"); private static final String DEFAULT_WINDOWS_SYSTEM_ROOT = System.getenv("SystemRoot"); private static final String DEFAULT_WINDOWS_PS_EXECUTABLE = "pwsh.exe"; private static final String LEGACY_WINDOWS_PS_EXECUTABLE = "powershell.exe"; private static final String DEFAULT_LINUX_PS_EXECUTABLE = "pwsh"; private static final String DEFAULT_MAC_LINUX_PATH = "/bin/"; private static final Duration REFRESH_OFFSET = Duration.ofMinutes(5); private static final String IDENTITY_ENDPOINT_VERSION = "2019-08-01"; private static final String MSI_ENDPOINT_VERSION = "2017-09-01"; private static final String ADFS_TENANT = "adfs"; private static final String HTTP_LOCALHOST = "http: private static final String SERVICE_FABRIC_MANAGED_IDENTITY_API_VERSION = "2019-07-01-preview"; private static final ClientLogger LOGGER = new ClientLogger(IdentityClient.class); private static final Pattern ACCESS_TOKEN_PATTERN = Pattern.compile("\"accessToken\": \"(.*?)(\"|$)"); private static final Pattern TRAILING_FORWARD_SLASHES = Pattern.compile("/+$"); private final IdentityClientOptions options; private final String tenantId; private final String clientId; private final String resourceId; private final String clientSecret; private final String clientAssertionFilePath; private final InputStream certificate; private final String certificatePath; private final Supplier<String> clientAssertionSupplier; private final String certificatePassword; private HttpPipelineAdapter httpPipelineAdapter; private final SynchronizedAccessor<PublicClientApplication> publicClientApplicationAccessor; private final SynchronizedAccessor<ConfidentialClientApplication> confidentialClientApplicationAccessor; private final SynchronizedAccessor<String> clientAssertionAccessor; /** * Creates an IdentityClient with the given options. * * @param tenantId the tenant ID of the application. * @param clientId the client ID of the application. * @param clientSecret the client secret of the application. * @param resourceId the resource ID of the application * @param certificatePath the path to the PKCS12 or PEM certificate of the application. * @param certificate the PKCS12 or PEM certificate of the application. * @param certificatePassword the password protecting the PFX certificate. * @param isSharedTokenCacheCredential Indicate whether the credential is * {@link com.azure.identity.SharedTokenCacheCredential} or not. * @param clientAssertionTimeout the timeout to use for the client assertion. * @param options the options configuring the client. */ IdentityClient(String tenantId, String clientId, String clientSecret, String certificatePath, String clientAssertionFilePath, String resourceId, Supplier<String> clientAssertionSupplier, InputStream certificate, String certificatePassword, boolean isSharedTokenCacheCredential, Duration clientAssertionTimeout, IdentityClientOptions options) { if (tenantId == null) { tenantId = "organizations"; } if (options == null) { options = new IdentityClientOptions(); } this.tenantId = tenantId; this.clientId = clientId; this.resourceId = resourceId; this.clientSecret = clientSecret; this.clientAssertionFilePath = clientAssertionFilePath; this.certificatePath = certificatePath; this.certificate = certificate; this.certificatePassword = certificatePassword; this.clientAssertionSupplier = clientAssertionSupplier; this.options = options; this.publicClientApplicationAccessor = new SynchronizedAccessor<>(() -> getPublicClientApplication(isSharedTokenCacheCredential)); this.confidentialClientApplicationAccessor = new SynchronizedAccessor<>(() -> getConfidentialClientApplication()); this.clientAssertionAccessor = clientAssertionTimeout == null ? new SynchronizedAccessor<>(() -> parseClientAssertion(), Duration.ofMinutes(5)) : new SynchronizedAccessor<>(() -> parseClientAssertion(), clientAssertionTimeout); } private Mono<ConfidentialClientApplication> getConfidentialClientApplication() { return Mono.defer(() -> { if (clientId == null) { return Mono.error(LOGGER.logExceptionAsError(new IllegalArgumentException( "A non-null value for client ID must be provided for user authentication."))); } String authorityUrl = TRAILING_FORWARD_SLASHES.matcher(options.getAuthorityHost()).replaceAll("") + "/" + tenantId; IClientCredential credential; if (clientSecret != null) { credential = ClientCredentialFactory.createFromSecret(clientSecret); } else if (certificate != null || certificatePath != null) { try { if (certificatePassword == null) { byte[] pemCertificateBytes = getCertificateBytes(); List<X509Certificate> x509CertificateList = CertificateUtil.publicKeyFromPem(pemCertificateBytes); PrivateKey privateKey = CertificateUtil.privateKeyFromPem(pemCertificateBytes); if (x509CertificateList.size() == 1) { credential = ClientCredentialFactory.createFromCertificate( privateKey, x509CertificateList.get(0)); } else { credential = ClientCredentialFactory.createFromCertificateChain( privateKey, x509CertificateList); } } else { try (InputStream pfxCertificateStream = getCertificateInputStream()) { credential = ClientCredentialFactory.createFromCertificate(pfxCertificateStream, certificatePassword); } } } catch (IOException | GeneralSecurityException e) { return Mono.error(LOGGER.logExceptionAsError(new RuntimeException( "Failed to parse the certificate for the credential: " + e.getMessage(), e))); } } else if (clientAssertionSupplier != null) { credential = ClientCredentialFactory.createFromClientAssertion(clientAssertionSupplier.get()); } else { return Mono.error(LOGGER.logExceptionAsError( new IllegalArgumentException("Must provide client secret or client certificate path." + " To mitigate this issue, please refer to the troubleshooting guidelines here at " + "https: } ConfidentialClientApplication.Builder applicationBuilder = ConfidentialClientApplication.builder(clientId, credential); try { applicationBuilder = applicationBuilder.authority(authorityUrl); } catch (MalformedURLException e) { return Mono.error(LOGGER.logExceptionAsWarning(new IllegalStateException(e))); } applicationBuilder.sendX5c(options.isIncludeX5c()); initializeHttpPipelineAdapter(); if (httpPipelineAdapter != null) { applicationBuilder.httpClient(httpPipelineAdapter); } else { applicationBuilder.proxy(proxyOptionsToJavaNetProxy(options.getProxyOptions())); } if (options.getExecutorService() != null) { applicationBuilder.executorService(options.getExecutorService()); } TokenCachePersistenceOptions tokenCachePersistenceOptions = options.getTokenCacheOptions(); PersistentTokenCacheImpl tokenCache = null; if (tokenCachePersistenceOptions != null) { try { tokenCache = new PersistentTokenCacheImpl() .setAllowUnencryptedStorage(tokenCachePersistenceOptions.isUnencryptedStorageAllowed()) .setName(tokenCachePersistenceOptions.getName()); applicationBuilder.setTokenCacheAccessAspect(tokenCache); } catch (Throwable t) { return Mono.error(LOGGER.logExceptionAsError(new ClientAuthenticationException( "Shared token cache is unavailable in this environment.", null, t))); } } if (options.getRegionalAuthority() != null) { if (options.getRegionalAuthority() == RegionalAuthority.AUTO_DISCOVER_REGION) { applicationBuilder.autoDetectRegion(true); } else { applicationBuilder.azureRegion(options.getRegionalAuthority().toString()); } } ConfidentialClientApplication confidentialClientApplication = applicationBuilder.build(); return tokenCache != null ? tokenCache.registerCache() .map(ignored -> confidentialClientApplication) : Mono.just(confidentialClientApplication); }); } private Mono<String> parseClientAssertion() { return Mono.fromCallable(() -> { if (clientAssertionFilePath != null) { byte[] encoded = Files.readAllBytes(Paths.get(clientAssertionFilePath)); return new String(encoded, StandardCharsets.UTF_8); } else { throw LOGGER.logExceptionAsError(new IllegalStateException( "Client Assertion File Path is not provided." + " It should be provided to authenticate with client assertion." )); } }); } private Mono<PublicClientApplication> getPublicClientApplication(boolean sharedTokenCacheCredential) { return Mono.defer(() -> { if (clientId == null) { throw LOGGER.logExceptionAsError(new IllegalArgumentException( "A non-null value for client ID must be provided for user authentication.")); } String authorityUrl = TRAILING_FORWARD_SLASHES.matcher(options.getAuthorityHost()).replaceAll("") + "/" + tenantId; PublicClientApplication.Builder publicClientApplicationBuilder = PublicClientApplication.builder(clientId); try { publicClientApplicationBuilder = publicClientApplicationBuilder.authority(authorityUrl); } catch (MalformedURLException e) { throw LOGGER.logExceptionAsWarning(new IllegalStateException(e)); } initializeHttpPipelineAdapter(); if (httpPipelineAdapter != null) { publicClientApplicationBuilder.httpClient(httpPipelineAdapter); } else { publicClientApplicationBuilder.proxy(proxyOptionsToJavaNetProxy(options.getProxyOptions())); } if (options.getExecutorService() != null) { publicClientApplicationBuilder.executorService(options.getExecutorService()); } if (!options.isCp1Disabled()) { Set<String> set = new HashSet<>(1); set.add("CP1"); publicClientApplicationBuilder.clientCapabilities(set); } return Mono.just(publicClientApplicationBuilder); }).flatMap(builder -> { TokenCachePersistenceOptions tokenCachePersistenceOptions = options.getTokenCacheOptions(); PersistentTokenCacheImpl tokenCache = null; if (tokenCachePersistenceOptions != null) { try { tokenCache = new PersistentTokenCacheImpl() .setAllowUnencryptedStorage(tokenCachePersistenceOptions.isUnencryptedStorageAllowed()) .setName(tokenCachePersistenceOptions.getName()); builder.setTokenCacheAccessAspect(tokenCache); } catch (Throwable t) { throw LOGGER.logExceptionAsError(new ClientAuthenticationException( "Shared token cache is unavailable in this environment.", null, t)); } } PublicClientApplication publicClientApplication = builder.build(); return tokenCache != null ? tokenCache.registerCache() .map(ignored -> publicClientApplication) : Mono.just(publicClientApplication); }); } public Mono<MsalToken> authenticateWithIntelliJ(TokenRequestContext request) { try { IntelliJCacheAccessor cacheAccessor = new IntelliJCacheAccessor(options.getIntelliJKeePassDatabasePath()); IntelliJAuthMethodDetails authDetails; try { authDetails = cacheAccessor.getAuthDetailsIfAvailable(); } catch (CredentialUnavailableException e) { return Mono.error(LoggingUtil.logCredentialUnavailableException(LOGGER, options, new CredentialUnavailableException("IntelliJ Authentication not available.", e))); } if (authDetails == null) { return Mono.error(LoggingUtil.logCredentialUnavailableException(LOGGER, options, new CredentialUnavailableException("IntelliJ Authentication not available." + " Please log in with Azure Tools for IntelliJ plugin in the IDE."))); } String authType = authDetails.getAuthMethod(); if ("SP".equalsIgnoreCase(authType)) { Map<String, String> spDetails = cacheAccessor .getIntellijServicePrincipalDetails(authDetails.getCredFilePath()); String authorityUrl = spDetails.get("authURL") + spDetails.get("tenant"); try { ConfidentialClientApplication.Builder applicationBuilder = ConfidentialClientApplication.builder(spDetails.get("client"), ClientCredentialFactory.createFromSecret(spDetails.get("key"))) .authority(authorityUrl); if (httpPipelineAdapter != null) { applicationBuilder.httpClient(httpPipelineAdapter); } else if (options.getProxyOptions() != null) { applicationBuilder.proxy(proxyOptionsToJavaNetProxy(options.getProxyOptions())); } if (options.getExecutorService() != null) { applicationBuilder.executorService(options.getExecutorService()); } ConfidentialClientApplication application = applicationBuilder.build(); return Mono.fromFuture(application.acquireToken( ClientCredentialParameters.builder(new HashSet<>(request.getScopes())) .build())).map(MsalToken::new); } catch (MalformedURLException e) { return Mono.error(e); } } else if ("DC".equalsIgnoreCase(authType)) { LOGGER.verbose("IntelliJ Authentication => Device Code Authentication scheme detected in Azure Tools" + " for IntelliJ Plugin."); if (isADFSTenant()) { LOGGER.verbose("IntelliJ Authentication => The input tenant is detected to be ADFS and" + " the ADFS tenants are not supported via IntelliJ Authentication currently."); return Mono.error(LoggingUtil.logCredentialUnavailableException(LOGGER, options, new CredentialUnavailableException("IntelliJCredential " + "authentication unavailable. ADFS tenant/authorities are not supported."))); } try { JsonNode intelliJCredentials = cacheAccessor.getDeviceCodeCredentials(); String refreshToken = intelliJCredentials.get("refreshToken").textValue(); RefreshTokenParameters.RefreshTokenParametersBuilder refreshTokenParametersBuilder = RefreshTokenParameters.builder(new HashSet<>(request.getScopes()), refreshToken); if (request.getClaims() != null) { ClaimsRequest customClaimRequest = CustomClaimRequest.formatAsClaimsRequest(request.getClaims()); refreshTokenParametersBuilder.claims(customClaimRequest); } return publicClientApplicationAccessor.getValue() .flatMap(pc -> Mono.fromFuture(pc.acquireToken(refreshTokenParametersBuilder.build())) .map(MsalToken::new)); } catch (CredentialUnavailableException e) { return Mono.error(LoggingUtil.logCredentialUnavailableException(LOGGER, options, e)); } } else { LOGGER.verbose("IntelliJ Authentication = > Only Service Principal and Device Code Authentication" + " schemes are currently supported via IntelliJ Credential currently. Please ensure you used one" + " of those schemes from Azure Tools for IntelliJ plugin."); return Mono.error(LoggingUtil.logCredentialUnavailableException(LOGGER, options, new CredentialUnavailableException("IntelliJ Authentication not available." + " Please login with Azure Tools for IntelliJ plugin in the IDE."))); } } catch (IOException e) { return Mono.error(e); } } /** * Asynchronously acquire a token from Active Directory with Azure CLI. * * @param request the details of the token request * @return a Publisher that emits an AccessToken */ public Mono<AccessToken> authenticateWithAzureCli(TokenRequestContext request) { StringBuilder azCommand = new StringBuilder("az account get-access-token --output json --resource "); String scopes = ScopeUtil.scopesToResource(request.getScopes()); try { ScopeUtil.validateScope(scopes); } catch (IllegalArgumentException ex) { return Mono.error(LOGGER.logExceptionAsError(ex)); } azCommand.append(scopes); String tenant = IdentityUtil.resolveTenantId(null, request, options); if (!CoreUtils.isNullOrEmpty(tenant)) { azCommand.append("--tenant ").append(tenant); } AccessToken token; try { String starter; String switcher; if (isWindowsPlatform()) { starter = WINDOWS_STARTER; switcher = WINDOWS_SWITCHER; } else { starter = LINUX_MAC_STARTER; switcher = LINUX_MAC_SWITCHER; } ProcessBuilder builder = new ProcessBuilder(starter, switcher, azCommand.toString()); String workingDirectory = getSafeWorkingDirectory(); if (workingDirectory != null) { builder.directory(new File(workingDirectory)); } else { throw LOGGER.logExceptionAsError(new IllegalStateException("A Safe Working directory could not be" + " found to execute CLI command from. To mitigate this issue, please refer to the troubleshooting " + " guidelines here at https: } builder.redirectErrorStream(true); Process process = builder.start(); StringBuilder output = new StringBuilder(); try (BufferedReader reader = new BufferedReader(new InputStreamReader(process.getInputStream(), StandardCharsets.UTF_8.name()))) { String line; while (true) { line = reader.readLine(); if (line == null) { break; } if (line.startsWith(WINDOWS_PROCESS_ERROR_MESSAGE) || LINUX_MAC_PROCESS_ERROR_MESSAGE.matcher(line).matches()) { throw LoggingUtil.logCredentialUnavailableException(LOGGER, options, new CredentialUnavailableException( "AzureCliCredential authentication unavailable. Azure CLI not installed." + "To mitigate this issue, please refer to the troubleshooting guidelines here at " + "https: } output.append(line); } } String processOutput = output.toString(); process.waitFor(10, TimeUnit.SECONDS); if (process.exitValue() != 0) { if (processOutput.length() > 0) { String redactedOutput = redactInfo(processOutput); if (redactedOutput.contains("az login") || redactedOutput.contains("az account set")) { throw LoggingUtil.logCredentialUnavailableException(LOGGER, options, new CredentialUnavailableException( "AzureCliCredential authentication unavailable." + " Please run 'az login' to set up account. To further mitigate this" + " issue, please refer to the troubleshooting guidelines here at " + "https: } throw LOGGER.logExceptionAsError(new ClientAuthenticationException(redactedOutput, null)); } else { throw LOGGER.logExceptionAsError( new ClientAuthenticationException("Failed to invoke Azure CLI ", null)); } } LOGGER.verbose("Azure CLI Authentication => A token response was received from Azure CLI, deserializing the" + " response into an Access Token."); Map<String, String> objectMap = SERIALIZER_ADAPTER.deserialize(processOutput, Map.class, SerializerEncoding.JSON); String accessToken = objectMap.get("accessToken"); String time = objectMap.get("expiresOn"); String timeToSecond = time.substring(0, time.indexOf(".")); String timeJoinedWithT = String.join("T", timeToSecond.split(" ")); OffsetDateTime expiresOn = LocalDateTime.parse(timeJoinedWithT, DateTimeFormatter.ISO_LOCAL_DATE_TIME) .atZone(ZoneId.systemDefault()) .toOffsetDateTime().withOffsetSameInstant(ZoneOffset.UTC); token = new AccessToken(accessToken, expiresOn); } catch (IOException | InterruptedException e) { throw LOGGER.logExceptionAsError(new IllegalStateException(e)); } catch (RuntimeException e) { return Mono.error(e instanceof CredentialUnavailableException ? LoggingUtil.logCredentialUnavailableException(LOGGER, options, (CredentialUnavailableException) e) : LOGGER.logExceptionAsError(e)); } return Mono.just(token); } /** * Asynchronously acquire a token from Active Directory with Azure Power Shell. * * @param request the details of the token request * @return a Publisher that emits an AccessToken */ public Mono<AccessToken> authenticateWithAzurePowerShell(TokenRequestContext request) { List<CredentialUnavailableException> exceptions = new ArrayList<>(2); PowershellManager defaultPowerShellManager = new PowershellManager(Platform.isWindows() ? DEFAULT_WINDOWS_PS_EXECUTABLE : DEFAULT_LINUX_PS_EXECUTABLE); PowershellManager legacyPowerShellManager = Platform.isWindows() ? new PowershellManager(LEGACY_WINDOWS_PS_EXECUTABLE) : null; List<PowershellManager> powershellManagers = new ArrayList<>(2); powershellManagers.add(defaultPowerShellManager); if (legacyPowerShellManager != null) { powershellManagers.add(legacyPowerShellManager); } return Flux.fromIterable(powershellManagers) .flatMap(powershellManager -> getAccessTokenFromPowerShell(request, powershellManager) .onErrorResume(t -> { if (!t.getClass().getSimpleName().equals("CredentialUnavailableException")) { return Mono.error(new ClientAuthenticationException( "Azure Powershell authentication failed. Error Details: " + t.getMessage() + ". To mitigate this issue, please refer to the troubleshooting guidelines here at " + "https: null, t)); } exceptions.add((CredentialUnavailableException) t); return Mono.empty(); }), 1) .next() .switchIfEmpty(Mono.defer(() -> { CredentialUnavailableException last = exceptions.get(exceptions.size() - 1); for (int z = exceptions.size() - 2; z >= 0; z--) { CredentialUnavailableException current = exceptions.get(z); last = new CredentialUnavailableException("Azure PowerShell authentication failed using default" + "powershell(pwsh) with following error: " + current.getMessage() + "\r\n" + "Azure PowerShell authentication failed using powershell-core(powershell)" + " with following error: " + last.getMessage(), last.getCause()); } return Mono.error(LoggingUtil.logCredentialUnavailableException(LOGGER, options, (last))); })); } /** * Asynchronously acquire a token from Active Directory with Azure PowerShell. * * @param request the details of the token request * @return a Publisher that emits an AccessToken */ public Mono<AccessToken> authenticateWithOBO(TokenRequestContext request) { return confidentialClientApplicationAccessor.getValue() .flatMap(confidentialClient -> Mono.fromFuture(() -> confidentialClient.acquireToken(OnBehalfOfParameters .builder(new HashSet<>(request.getScopes()), options.getUserAssertion()) .tenant(IdentityUtil.resolveTenantId(tenantId, request, options)) .build())) .map(MsalToken::new)); } private Mono<AccessToken> getAccessTokenFromPowerShell(TokenRequestContext request, PowershellManager powershellManager) { return powershellManager.initSession() .flatMap(manager -> { String azAccountsCommand = "Import-Module Az.Accounts -MinimumVersion 2.2.0 -PassThru"; return manager.runCommand(azAccountsCommand) .flatMap(output -> { if (output.contains("The specified module 'Az.Accounts' with version '2.2.0' was not loaded " + "because no valid module file")) { return Mono.error(LoggingUtil.logCredentialUnavailableException(LOGGER, options, new CredentialUnavailableException( "Az.Account module with version >= 2.2.0 is not installed. It needs to be installed to" + " use Azure PowerShell Credential."))); } LOGGER.verbose("Az.accounts module was found installed."); StringBuilder accessTokenCommand = new StringBuilder("Get-AzAccessToken -ResourceUrl "); accessTokenCommand.append(ScopeUtil.scopesToResource(request.getScopes())); accessTokenCommand.append(" | ConvertTo-Json"); String command = accessTokenCommand.toString(); LOGGER.verbose("Azure Powershell Authentication => Executing the command `%s` in Azure " + "Powershell to retrieve the Access Token.", accessTokenCommand); return manager.runCommand(accessTokenCommand.toString()) .flatMap(out -> { if (out.contains("Run Connect-AzAccount to login")) { return Mono.error(LoggingUtil.logCredentialUnavailableException(LOGGER, options, new CredentialUnavailableException( "Run Connect-AzAccount to login to Azure account in PowerShell."))); } try { LOGGER.verbose("Azure Powershell Authentication => Attempting to deserialize the " + "received response from Azure Powershell."); Map<String, String> objectMap = SERIALIZER_ADAPTER.deserialize(out, Map.class, SerializerEncoding.JSON); String accessToken = objectMap.get("Token"); String time = objectMap.get("ExpiresOn"); OffsetDateTime expiresOn = OffsetDateTime.parse(time) .withOffsetSameInstant(ZoneOffset.UTC); return Mono.just(new AccessToken(accessToken, expiresOn)); } catch (IOException e) { return Mono.error(LoggingUtil.logCredentialUnavailableException(LOGGER, options, new CredentialUnavailableException( "Encountered error when deserializing response from Azure Power Shell.", e))); } }); }); }).doFinally(ignored -> powershellManager.close()); } /** * Asynchronously acquire a token from Active Directory with a client secret. * * @param request the details of the token request * @return a Publisher that emits an AccessToken */ private HttpPipeline setupPipeline(HttpClient httpClient) { List<HttpPipelinePolicy> policies = new ArrayList<>(); HttpLogOptions httpLogOptions = new HttpLogOptions(); HttpPolicyProviders.addBeforeRetryPolicies(policies); policies.add(new RetryPolicy()); HttpPolicyProviders.addAfterRetryPolicies(policies); policies.add(new HttpLoggingPolicy(httpLogOptions)); return new HttpPipelineBuilder().httpClient(httpClient) .policies(policies.toArray(new HttpPipelinePolicy[0])).build(); } /** * Asynchronously acquire a token from Active Directory with a username and a password. * * @param request the details of the token request * @param username the username of the user * @param password the password of the user * @return a Publisher that emits an AccessToken */ public Mono<MsalToken> authenticateWithUsernamePassword(TokenRequestContext request, String username, String password) { return publicClientApplicationAccessor.getValue() .flatMap(pc -> Mono.fromFuture(() -> { UserNamePasswordParameters.UserNamePasswordParametersBuilder userNamePasswordParametersBuilder = UserNamePasswordParameters.builder(new HashSet<>(request.getScopes()), username, password.toCharArray()); if (request.getClaims() != null) { ClaimsRequest customClaimRequest = CustomClaimRequest .formatAsClaimsRequest(request.getClaims()); userNamePasswordParametersBuilder.claims(customClaimRequest); } userNamePasswordParametersBuilder.tenant( IdentityUtil.resolveTenantId(tenantId, request, options)); return pc.acquireToken(userNamePasswordParametersBuilder.build()); } )).onErrorMap(t -> new ClientAuthenticationException("Failed to acquire token with username and " + "password. To mitigate this issue, please refer to the troubleshooting guidelines " + "here at https: null, t)).map(MsalToken::new); } /** * Asynchronously acquire a token from the currently logged in client. * * @param request the details of the token request * @param account the account used to log in to acquire the last token * @return a Publisher that emits an AccessToken */ @SuppressWarnings("deprecation") public Mono<MsalToken> authenticateWithPublicClientCache(TokenRequestContext request, IAccount account) { return publicClientApplicationAccessor.getValue() .flatMap(pc -> Mono.fromFuture(() -> { SilentParameters.SilentParametersBuilder parametersBuilder = SilentParameters.builder( new HashSet<>(request.getScopes())); if (request.getClaims() != null) { ClaimsRequest customClaimRequest = CustomClaimRequest.formatAsClaimsRequest(request.getClaims()); parametersBuilder.claims(customClaimRequest); parametersBuilder.forceRefresh(true); } if (account != null) { parametersBuilder = parametersBuilder.account(account); } parametersBuilder.tenant( IdentityUtil.resolveTenantId(tenantId, request, options)); try { return pc.acquireTokenSilently(parametersBuilder.build()); } catch (MalformedURLException e) { return getFailedCompletableFuture(LOGGER.logExceptionAsError(new RuntimeException(e))); } }).map(MsalToken::new) .filter(t -> OffsetDateTime.now().isBefore(t.getExpiresAt().minus(REFRESH_OFFSET))) .switchIfEmpty(Mono.fromFuture(() -> { SilentParameters.SilentParametersBuilder forceParametersBuilder = SilentParameters.builder( new HashSet<>(request.getScopes())).forceRefresh(true); if (request.getClaims() != null) { ClaimsRequest customClaimRequest = CustomClaimRequest .formatAsClaimsRequest(request.getClaims()); forceParametersBuilder.claims(customClaimRequest); } if (account != null) { forceParametersBuilder = forceParametersBuilder.account(account); } forceParametersBuilder.tenant( IdentityUtil.resolveTenantId(tenantId, request, options)); try { return pc.acquireTokenSilently(forceParametersBuilder.build()); } catch (MalformedURLException e) { return getFailedCompletableFuture(LOGGER.logExceptionAsError(new RuntimeException(e))); } }).map(MsalToken::new))); } /** * Asynchronously acquire a token from the currently logged in client. * * @param request the details of the token request * @return a Publisher that emits an AccessToken */ @SuppressWarnings("deprecation") public Mono<AccessToken> authenticateWithConfidentialClientCache(TokenRequestContext request) { return confidentialClientApplicationAccessor.getValue() .flatMap(confidentialClient -> Mono.fromFuture(() -> { SilentParameters.SilentParametersBuilder parametersBuilder = SilentParameters.builder( new HashSet<>(request.getScopes())) .tenant(IdentityUtil.resolveTenantId(tenantId, request, options)); try { return confidentialClient.acquireTokenSilently(parametersBuilder.build()); } catch (MalformedURLException e) { return getFailedCompletableFuture(LOGGER.logExceptionAsError(new RuntimeException(e))); } }).map(ar -> (AccessToken) new MsalToken(ar)) .filter(t -> OffsetDateTime.now().isBefore(t.getExpiresAt().minus(REFRESH_OFFSET)))); } /** * Asynchronously acquire a token from Active Directory with a device code challenge. Active Directory will provide * a device code for login and the user must meet the challenge by authenticating in a browser on the current or a * different device. * * @param request the details of the token request * @param deviceCodeConsumer the user provided closure that will consume the device code challenge * @return a Publisher that emits an AccessToken when the device challenge is met, or an exception if the device * code expires */ public Mono<MsalToken> authenticateWithDeviceCode(TokenRequestContext request, Consumer<DeviceCodeInfo> deviceCodeConsumer) { return publicClientApplicationAccessor.getValue().flatMap(pc -> Mono.fromFuture(() -> { DeviceCodeFlowParameters.DeviceCodeFlowParametersBuilder parametersBuilder = DeviceCodeFlowParameters.builder( new HashSet<>(request.getScopes()), dc -> deviceCodeConsumer.accept( new DeviceCodeInfo(dc.userCode(), dc.deviceCode(), dc.verificationUri(), OffsetDateTime.now().plusSeconds(dc.expiresIn()), dc.message()))) .tenant(IdentityUtil .resolveTenantId(tenantId, request, options)); if (request.getClaims() != null) { ClaimsRequest customClaimRequest = CustomClaimRequest.formatAsClaimsRequest(request.getClaims()); parametersBuilder.claims(customClaimRequest); } return pc.acquireToken(parametersBuilder.build()); }).onErrorMap(t -> new ClientAuthenticationException("Failed to acquire token with device code", null, t)) .map(MsalToken::new)); } /** * Asynchronously acquire a token from Active Directory with Visual Studio cached refresh token. * * @param request the details of the token request * @return a Publisher that emits an AccessToken. */ public Mono<MsalToken> authenticateWithVsCodeCredential(TokenRequestContext request, String cloud) { if (isADFSTenant()) { return Mono.error(LoggingUtil.logCredentialUnavailableException(LOGGER, options, new CredentialUnavailableException("VsCodeCredential " + "authentication unavailable. ADFS tenant/authorities are not supported. " + "To mitigate this issue, please refer to the troubleshooting guidelines here at " + "https: } VisualStudioCacheAccessor accessor = new VisualStudioCacheAccessor(); String credential = null; try { credential = accessor.getCredentials("VS Code Azure", cloud); } catch (CredentialUnavailableException e) { return Mono.error(LoggingUtil.logCredentialUnavailableException(LOGGER, options, e)); } RefreshTokenParameters.RefreshTokenParametersBuilder parametersBuilder = RefreshTokenParameters .builder(new HashSet<>(request.getScopes()), credential); if (request.getClaims() != null) { ClaimsRequest customClaimRequest = CustomClaimRequest.formatAsClaimsRequest(request.getClaims()); parametersBuilder.claims(customClaimRequest); } return publicClientApplicationAccessor.getValue() .flatMap(pc -> Mono.fromFuture(pc.acquireToken(parametersBuilder.build())) .onErrorResume(t -> { if (t instanceof MsalInteractionRequiredException) { return Mono.error(LoggingUtil.logCredentialUnavailableException(LOGGER, options, new CredentialUnavailableException("Failed to acquire token with" + " VS code credential." + " To mitigate this issue, please refer to the troubleshooting guidelines here at " + "https: } return Mono.error(new ClientAuthenticationException("Failed to acquire token with" + " VS code credential", null, t)); }) .map(MsalToken::new)); } /** * Asynchronously acquire a token from Active Directory with an authorization code from an oauth flow. * * @param request the details of the token request * @param authorizationCode the oauth2 authorization code * @param redirectUrl the redirectUrl where the authorization code is sent to * @return a Publisher that emits an AccessToken */ public Mono<MsalToken> authenticateWithAuthorizationCode(TokenRequestContext request, String authorizationCode, URI redirectUrl) { AuthorizationCodeParameters.AuthorizationCodeParametersBuilder parametersBuilder = AuthorizationCodeParameters.builder(authorizationCode, redirectUrl) .scopes(new HashSet<>(request.getScopes())) .tenant(IdentityUtil .resolveTenantId(tenantId, request, options)); if (request.getClaims() != null) { ClaimsRequest customClaimRequest = CustomClaimRequest.formatAsClaimsRequest(request.getClaims()); parametersBuilder.claims(customClaimRequest); } Mono<IAuthenticationResult> acquireToken; if (clientSecret != null) { acquireToken = confidentialClientApplicationAccessor.getValue() .flatMap(pc -> Mono.fromFuture(() -> pc.acquireToken(parametersBuilder.build()))); } else { acquireToken = publicClientApplicationAccessor.getValue() .flatMap(pc -> Mono.fromFuture(() -> pc.acquireToken(parametersBuilder.build()))); } return acquireToken.onErrorMap(t -> new ClientAuthenticationException( "Failed to acquire token with authorization code", null, t)).map(MsalToken::new); } /** * Asynchronously acquire a token from Active Directory by opening a browser and wait for the user to login. The * credential will run a minimal local HttpServer at the given port, so {@code http: * listed as a valid reply URL for the application. * * @param request the details of the token request * @param port the port on which the HTTP server is listening * @param redirectUrl the redirect URL to listen on and receive security code * @param loginHint the username suggestion to pre-fill the login page's username/email address field * @return a Publisher that emits an AccessToken */ public Mono<MsalToken> authenticateWithBrowserInteraction(TokenRequestContext request, Integer port, String redirectUrl, String loginHint) { URI redirectUri; String redirect; if (port != null) { redirect = HTTP_LOCALHOST + ":" + port; } else if (redirectUrl != null) { redirect = redirectUrl; } else { redirect = HTTP_LOCALHOST; } try { redirectUri = new URI(redirect); } catch (URISyntaxException e) { return Mono.error(LOGGER.logExceptionAsError(new RuntimeException(e))); } InteractiveRequestParameters.InteractiveRequestParametersBuilder builder = InteractiveRequestParameters.builder(redirectUri) .scopes(new HashSet<>(request.getScopes())) .prompt(Prompt.SELECT_ACCOUNT) .tenant(IdentityUtil .resolveTenantId(tenantId, request, options)); if (request.getClaims() != null) { ClaimsRequest customClaimRequest = CustomClaimRequest.formatAsClaimsRequest(request.getClaims()); builder.claims(customClaimRequest); } if (loginHint != null) { builder.loginHint(loginHint); } Mono<IAuthenticationResult> acquireToken = publicClientApplicationAccessor.getValue() .flatMap(pc -> Mono.fromFuture(() -> pc.acquireToken(builder.build()))); return acquireToken.onErrorMap(t -> new ClientAuthenticationException( "Failed to acquire token with Interactive Browser Authentication.", null, t)).map(MsalToken::new); } /** * Gets token from shared token cache * */ public Mono<MsalToken> authenticateWithSharedTokenCache(TokenRequestContext request, String username) { return publicClientApplicationAccessor.getValue() .flatMap(pc -> Mono.fromFuture(() -> pc.getAccounts()) .onErrorMap(t -> new CredentialUnavailableException( "Cannot get accounts from token cache. Error: " + t.getMessage(), t)) .flatMap(set -> { IAccount requestedAccount; Map<String, IAccount> accounts = new HashMap<>(); if (set.isEmpty()) { return Mono.error(LoggingUtil.logCredentialUnavailableException(LOGGER, options, new CredentialUnavailableException("SharedTokenCacheCredential " + "authentication unavailable. No accounts were found in the cache."))); } for (IAccount cached : set) { if (username == null || username.equals(cached.username())) { if (!accounts.containsKey(cached.homeAccountId())) { accounts.put(cached.homeAccountId(), cached); } } } if (accounts.isEmpty()) { return Mono.error(new RuntimeException(String.format("SharedTokenCacheCredential " + "authentication unavailable. No account matching the specified username: %s was " + "found in the cache.", username))); } else if (accounts.size() > 1) { if (username == null) { return Mono.error(new RuntimeException("SharedTokenCacheCredential authentication " + "unavailable. Multiple accounts were found in the cache. Use username and " + "tenant id to disambiguate.")); } else { return Mono.error(new RuntimeException(String.format("SharedTokenCacheCredential " + "authentication unavailable. Multiple accounts matching the specified username: " + "%s were found in the cache.", username))); } } else { requestedAccount = accounts.values().iterator().next(); } return authenticateWithPublicClientCache(request, requestedAccount); })); } /** * Asynchronously acquire a token from the Azure Arc Managed Service Identity endpoint. * * @param identityEndpoint the Identity endpoint to acquire token from * @param request the details of the token request * @return a Publisher that emits an AccessToken */ public Mono<AccessToken> authenticateToArcManagedIdentityEndpoint(String identityEndpoint, TokenRequestContext request) { return Mono.fromCallable(() -> { HttpURLConnection connection = null; StringBuilder payload = new StringBuilder(); payload.append("resource="); payload.append(URLEncoder.encode(ScopeUtil.scopesToResource(request.getScopes()), StandardCharsets.UTF_8.name())); payload.append("&api-version="); payload.append(URLEncoder.encode("2019-11-01", StandardCharsets.UTF_8.name())); URL url = new URL(String.format("%s?%s", identityEndpoint, payload)); String secretKey = null; try { connection = (HttpURLConnection) url.openConnection(); connection.setRequestMethod("GET"); connection.setRequestProperty("Metadata", "true"); connection.connect(); new Scanner(connection.getInputStream(), StandardCharsets.UTF_8.name()).useDelimiter("\\A"); } catch (IOException e) { if (connection == null) { throw LOGGER.logExceptionAsError(new ClientAuthenticationException("Failed to initialize " + "Http URL connection to the endpoint.", null, e)); } int status = connection.getResponseCode(); if (status != 401) { throw LOGGER.logExceptionAsError(new ClientAuthenticationException(String.format("Expected a 401" + " Unauthorized response from Azure Arc Managed Identity Endpoint, received: %d", status), null, e)); } String realm = connection.getHeaderField("WWW-Authenticate"); if (realm == null) { throw LOGGER.logExceptionAsError(new ClientAuthenticationException("Did not receive a value" + " for WWW-Authenticate header in the response from Azure Arc Managed Identity Endpoint", null)); } int separatorIndex = realm.indexOf("="); if (separatorIndex == -1) { throw LOGGER.logExceptionAsError(new ClientAuthenticationException("Did not receive a correct value" + " for WWW-Authenticate header in the response from Azure Arc Managed Identity Endpoint", null)); } String secretKeyPath = realm.substring(separatorIndex + 1); secretKey = new String(Files.readAllBytes(Paths.get(secretKeyPath)), StandardCharsets.UTF_8); } finally { if (connection != null) { connection.disconnect(); } } if (secretKey == null) { throw LOGGER.logExceptionAsError(new ClientAuthenticationException("Did not receive a secret value" + " in the response from Azure Arc Managed Identity Endpoint", null)); } try { connection = (HttpURLConnection) url.openConnection(); connection.setRequestMethod("GET"); connection.setRequestProperty("Authorization", String.format("Basic %s", secretKey)); connection.setRequestProperty("Metadata", "true"); connection.connect(); Scanner scanner = new Scanner(connection.getInputStream(), StandardCharsets.UTF_8.name()) .useDelimiter("\\A"); String result = scanner.hasNext() ? scanner.next() : ""; return SERIALIZER_ADAPTER.deserialize(result, MSIToken.class, SerializerEncoding.JSON); } finally { if (connection != null) { connection.disconnect(); } } }); } /** * Asynchronously acquire a token from the Azure Arc Managed Service Identity endpoint. * * @param request the details of the token request * @return a Publisher that emits an AccessToken */ public Mono<AccessToken> authenticateWithExchangeToken(TokenRequestContext request) { return clientAssertionAccessor.getValue() .flatMap(assertionToken -> Mono.fromCallable(() -> { String authorityUrl = TRAILING_FORWARD_SLASHES.matcher(options.getAuthorityHost()).replaceAll("") + "/" + tenantId + "/oauth2/v2.0/token"; StringBuilder urlParametersBuilder = new StringBuilder(); urlParametersBuilder.append("client_assertion="); urlParametersBuilder.append(assertionToken); urlParametersBuilder.append("&client_assertion_type=urn:ietf:params:oauth:client-assertion-type" + ":jwt-bearer"); urlParametersBuilder.append("&client_id="); urlParametersBuilder.append(clientId); urlParametersBuilder.append("&grant_type=client_credentials"); urlParametersBuilder.append("&scope="); urlParametersBuilder.append(URLEncoder.encode(request.getScopes().get(0), StandardCharsets.UTF_8.name())); String urlParams = urlParametersBuilder.toString(); byte[] postData = urlParams.getBytes(StandardCharsets.UTF_8); int postDataLength = postData.length; HttpURLConnection connection = null; URL url = new URL(authorityUrl); try { connection = (HttpURLConnection) url.openConnection(); connection.setRequestMethod("POST"); connection.setRequestProperty("Content-Type", "application/x-www-form-urlencoded"); connection.setRequestProperty("Content-Length", Integer.toString(postDataLength)); connection.setDoOutput(true); try (DataOutputStream outputStream = new DataOutputStream(connection.getOutputStream())) { outputStream.write(postData); } connection.connect(); Scanner s = new Scanner(connection.getInputStream(), StandardCharsets.UTF_8.name()) .useDelimiter("\\A"); String result = s.hasNext() ? s.next() : ""; return SERIALIZER_ADAPTER.deserialize(result, MSIToken.class, SerializerEncoding.JSON); } finally { if (connection != null) { connection.disconnect(); } } })); } /** * Asynchronously acquire a token from the Azure Service Fabric Managed Service Identity endpoint. * * @param identityEndpoint the Identity endpoint to acquire token from * @param identityHeader the identity header to acquire token with * @param request the details of the token request * @return a Publisher that emits an AccessToken */ public Mono<AccessToken> authenticateToServiceFabricManagedIdentityEndpoint(String identityEndpoint, String identityHeader, String thumbprint, TokenRequestContext request) { return Mono.fromCallable(() -> { HttpsURLConnection connection = null; String endpoint = identityEndpoint; String headerValue = identityHeader; String endpointVersion = SERVICE_FABRIC_MANAGED_IDENTITY_API_VERSION; String resource = ScopeUtil.scopesToResource(request.getScopes()); StringBuilder payload = new StringBuilder(); payload.append("resource="); payload.append(URLEncoder.encode(resource, StandardCharsets.UTF_8.name())); payload.append("&api-version="); payload.append(URLEncoder.encode(endpointVersion, StandardCharsets.UTF_8.name())); if (clientId != null) { payload.append("&client_id="); payload.append(URLEncoder.encode(clientId, StandardCharsets.UTF_8.name())); } if (resourceId != null) { payload.append("&mi_res_id="); payload.append(URLEncoder.encode(resourceId, StandardCharsets.UTF_8.name())); } try { URL url = new URL(String.format("%s?%s", endpoint, payload)); connection = (HttpsURLConnection) url.openConnection(); IdentitySslUtil.addTrustedCertificateThumbprint(connection, thumbprint, LOGGER); connection.setRequestMethod("GET"); if (headerValue != null) { connection.setRequestProperty("Secret", headerValue); } connection.setRequestProperty("Metadata", "true"); connection.connect(); Scanner s = new Scanner(connection.getInputStream(), StandardCharsets.UTF_8.name()) .useDelimiter("\\A"); String result = s.hasNext() ? s.next() : ""; return SERIALIZER_ADAPTER.deserialize(result, MSIToken.class, SerializerEncoding.JSON); } finally { if (connection != null) { connection.disconnect(); } } }); } /** * Asynchronously acquire a token from the App Service Managed Service Identity endpoint. * * Specifying identity parameters will use the 2019-08-01 endpoint version. * Specifying MSI parameters will use the 2017-09-01 endpoint version. * * @param identityEndpoint the Identity endpoint to acquire token from * @param identityHeader the identity header to acquire token with * @param msiEndpoint the MSI endpoint to acquire token from * @param msiSecret the MSI secret to acquire token with * @param request the details of the token request * @return a Publisher that emits an AccessToken */ public Mono<AccessToken> authenticateToManagedIdentityEndpoint(String identityEndpoint, String identityHeader, String msiEndpoint, String msiSecret, TokenRequestContext request) { return Mono.fromCallable(() -> { String endpoint; String headerValue; String endpointVersion; if (identityEndpoint != null) { endpoint = identityEndpoint; headerValue = identityHeader; endpointVersion = IDENTITY_ENDPOINT_VERSION; } else { endpoint = msiEndpoint; headerValue = msiSecret; endpointVersion = MSI_ENDPOINT_VERSION; } String resource = ScopeUtil.scopesToResource(request.getScopes()); HttpURLConnection connection = null; StringBuilder payload = new StringBuilder(); payload.append("resource="); payload.append(URLEncoder.encode(resource, StandardCharsets.UTF_8.name())); payload.append("&api-version="); payload.append(URLEncoder.encode(endpointVersion, StandardCharsets.UTF_8.name())); if (clientId != null) { if (endpointVersion.equals(IDENTITY_ENDPOINT_VERSION)) { payload.append("&client_id="); } else { payload.append("&clientid="); } payload.append(URLEncoder.encode(clientId, StandardCharsets.UTF_8.name())); } if (resourceId != null) { payload.append("&mi_res_id="); payload.append(URLEncoder.encode(resourceId, StandardCharsets.UTF_8.name())); } try { URL url = new URL(String.format("%s?%s", endpoint, payload)); connection = (HttpURLConnection) url.openConnection(); connection.setRequestMethod("GET"); if (headerValue != null) { if (IDENTITY_ENDPOINT_VERSION.equals(endpointVersion)) { connection.setRequestProperty("X-IDENTITY-HEADER", headerValue); } else { connection.setRequestProperty("Secret", headerValue); } } connection.setRequestProperty("Metadata", "true"); connection.connect(); Scanner s = new Scanner(connection.getInputStream(), StandardCharsets.UTF_8.name()) .useDelimiter("\\A"); String result = s.hasNext() ? s.next() : ""; return SERIALIZER_ADAPTER.deserialize(result, MSIToken.class, SerializerEncoding.JSON); } finally { if (connection != null) { connection.disconnect(); } } }); } /** * Asynchronously acquire a token from the Virtual Machine IMDS endpoint. * * @param request the details of the token request * @return a Publisher that emits an AccessToken */ public Mono<AccessToken> authenticateToIMDSEndpoint(TokenRequestContext request) { String resource = ScopeUtil.scopesToResource(request.getScopes()); StringBuilder payload = new StringBuilder(); final int imdsUpgradeTimeInMs = 70 * 1000; try { payload.append("api-version="); payload.append(URLEncoder.encode("2018-02-01", StandardCharsets.UTF_8.name())); payload.append("&resource="); payload.append(URLEncoder.encode(resource, StandardCharsets.UTF_8.name())); if (clientId != null) { payload.append("&client_id="); payload.append(URLEncoder.encode(clientId, StandardCharsets.UTF_8.name())); } if (resourceId != null) { payload.append("&mi_res_id="); payload.append(URLEncoder.encode(resourceId, StandardCharsets.UTF_8.name())); } } catch (IOException exception) { return Mono.error(exception); } String endpoint = TRAILING_FORWARD_SLASHES.matcher(options.getImdsAuthorityHost()).replaceAll("") + IdentityConstants.DEFAULT_IMDS_TOKENPATH; return checkIMDSAvailable(endpoint).flatMap(available -> Mono.fromCallable(() -> { int retry = 1; while (retry <= options.getMaxRetry()) { URL url = null; HttpURLConnection connection = null; try { url = new URL(String.format("%s?%s", endpoint, payload)); connection = (HttpURLConnection) url.openConnection(); connection.setRequestMethod("GET"); connection.setRequestProperty("Metadata", "true"); connection.connect(); Scanner s = new Scanner(connection.getInputStream(), StandardCharsets.UTF_8.name()) .useDelimiter("\\A"); String result = s.hasNext() ? s.next() : ""; return SERIALIZER_ADAPTER.deserialize(result, MSIToken.class, SerializerEncoding.JSON); } catch (IOException exception) { if (connection == null) { throw LOGGER.logExceptionAsError(new RuntimeException( String.format("Could not connect to the url: %s.", url), exception)); } int responseCode; try { responseCode = connection.getResponseCode(); } catch (Exception e) { throw LoggingUtil.logCredentialUnavailableException(LOGGER, options, new CredentialUnavailableException( "ManagedIdentityCredential authentication unavailable. " + "Connection to IMDS endpoint cannot be established, " + e.getMessage() + ".", e)); } if (responseCode == 400) { throw LoggingUtil.logCredentialUnavailableException(LOGGER, options, new CredentialUnavailableException( "ManagedIdentityCredential authentication unavailable. " + "Connection to IMDS endpoint cannot be established.", null)); } if (responseCode == 410 || responseCode == 429 || responseCode == 404 || (responseCode >= 500 && responseCode <= 599)) { int retryTimeoutInMs = options.getRetryTimeout() .apply(Duration.ofSeconds(RANDOM.nextInt(retry))).getNano() / 1000; retryTimeoutInMs = (responseCode == 410 && retryTimeoutInMs < imdsUpgradeTimeInMs) ? imdsUpgradeTimeInMs : retryTimeoutInMs; retry++; if (retry > options.getMaxRetry()) { break; } else { sleep(retryTimeoutInMs); } } else { throw LOGGER.logExceptionAsError(new RuntimeException( "Couldn't acquire access token from IMDS, verify your objectId, " + "clientId or msiResourceId", exception)); } } finally { if (connection != null) { connection.disconnect(); } } } throw LOGGER.logExceptionAsError(new RuntimeException( String.format("MSI: Failed to acquire tokens after retrying %s times", options.getMaxRetry()))); })); } private Mono<Boolean> checkIMDSAvailable(String endpoint) { StringBuilder payload = new StringBuilder(); try { payload.append("api-version="); payload.append(URLEncoder.encode("2018-02-01", StandardCharsets.UTF_8.name())); } catch (IOException exception) { return Mono.error(exception); } return Mono.fromCallable(() -> { HttpURLConnection connection = null; URL url = new URL(String.format("%s?%s", endpoint, payload)); try { connection = (HttpURLConnection) url.openConnection(); connection.setRequestMethod("GET"); connection.setConnectTimeout(500); connection.connect(); } catch (Exception e) { throw LoggingUtil.logCredentialUnavailableException(LOGGER, options, new CredentialUnavailableException( "ManagedIdentityCredential authentication unavailable. " + "Connection to IMDS endpoint cannot be established, " + e.getMessage() + ".", e)); } finally { if (connection != null) { connection.disconnect(); } } return true; }); } private static void sleep(int millis) { try { Thread.sleep(millis); } catch (InterruptedException ex) { throw new IllegalStateException(ex); } } private static Proxy proxyOptionsToJavaNetProxy(ProxyOptions options) { switch (options.getType()) { case SOCKS4: case SOCKS5: return new Proxy(Type.SOCKS, options.getAddress()); case HTTP: default: return new Proxy(Type.HTTP, options.getAddress()); } } private String getSafeWorkingDirectory() { if (isWindowsPlatform()) { if (CoreUtils.isNullOrEmpty(DEFAULT_WINDOWS_SYSTEM_ROOT)) { return null; } return DEFAULT_WINDOWS_SYSTEM_ROOT + "\\system32"; } else { return DEFAULT_MAC_LINUX_PATH; } } private boolean isWindowsPlatform() { return System.getProperty("os.name").contains("Windows"); } private String redactInfo(String input) { return ACCESS_TOKEN_PATTERN.matcher(input).replaceAll("****"); } void openUrl(String url) throws IOException { Runtime rt = Runtime.getRuntime(); String os = System.getProperty("os.name").toLowerCase(Locale.ROOT); if (os.contains("win")) { rt.exec("rundll32 url.dll,FileProtocolHandler " + url); } else if (os.contains("mac")) { rt.exec("open " + url); } else if (os.contains("nix") || os.contains("nux")) { rt.exec("xdg-open " + url); } else { LOGGER.error("Browser could not be opened - please open {} in a browser on this device.", url); } } private CompletableFuture<IAuthenticationResult> getFailedCompletableFuture(Exception e) { CompletableFuture<IAuthenticationResult> completableFuture = new CompletableFuture<>(); completableFuture.completeExceptionally(e); return completableFuture; } private void initializeHttpPipelineAdapter() { HttpPipeline httpPipeline = options.getHttpPipeline(); if (httpPipeline != null) { httpPipelineAdapter = new HttpPipelineAdapter(httpPipeline, options); } else { HttpClient httpClient = options.getHttpClient(); if (httpClient != null) { httpPipelineAdapter = new HttpPipelineAdapter(setupPipeline(httpClient), options); } else if (options.getProxyOptions() == null) { httpPipelineAdapter = new HttpPipelineAdapter(setupPipeline(HttpClient.createDefault()), options); } } } /** * Get the configured tenant id. * * @return the tenant id. */ public String getTenantId() { return tenantId; } /** * Get the configured client id. * * @return the client id. */ public String getClientId() { return clientId; } /** * Get the configured identity client options. * * @return the client options. */ public IdentityClientOptions getIdentityClientOptions() { return options; } private boolean isADFSTenant() { return this.tenantId.equals(ADFS_TENANT); } private byte[] getCertificateBytes() throws IOException { if (certificatePath != null) { return Files.readAllBytes(Paths.get(certificatePath)); } else if (certificate != null) { ByteArrayOutputStream outputStream = new ByteArrayOutputStream(); byte[] buffer = new byte[1024]; int read = certificate.read(buffer, 0, buffer.length); while (read != -1) { outputStream.write(buffer, 0, read); read = certificate.read(buffer, 0, buffer.length); } return outputStream.toByteArray(); } else { return new byte[0]; } } private InputStream getCertificateInputStream() throws IOException { if (certificatePath != null) { return new BufferedInputStream(new FileInputStream(certificatePath)); } else { return certificate; } } }
class IdentityClient { private static final SerializerAdapter SERIALIZER_ADAPTER = JacksonAdapter.createDefaultSerializerAdapter(); private static final Random RANDOM = new Random(); private static final String WINDOWS_STARTER = "cmd.exe"; private static final String LINUX_MAC_STARTER = "/bin/sh"; private static final String WINDOWS_SWITCHER = "/c"; private static final String LINUX_MAC_SWITCHER = "-c"; private static final String WINDOWS_PROCESS_ERROR_MESSAGE = "'az' is not recognized"; private static final Pattern LINUX_MAC_PROCESS_ERROR_MESSAGE = Pattern.compile("(.*)az:(.*)not found"); private static final String DEFAULT_WINDOWS_SYSTEM_ROOT = System.getenv("SystemRoot"); private static final String DEFAULT_WINDOWS_PS_EXECUTABLE = "pwsh.exe"; private static final String LEGACY_WINDOWS_PS_EXECUTABLE = "powershell.exe"; private static final String DEFAULT_LINUX_PS_EXECUTABLE = "pwsh"; private static final String DEFAULT_MAC_LINUX_PATH = "/bin/"; private static final Duration REFRESH_OFFSET = Duration.ofMinutes(5); private static final String IDENTITY_ENDPOINT_VERSION = "2019-08-01"; private static final String MSI_ENDPOINT_VERSION = "2017-09-01"; private static final String ADFS_TENANT = "adfs"; private static final String HTTP_LOCALHOST = "http: private static final String SERVICE_FABRIC_MANAGED_IDENTITY_API_VERSION = "2019-07-01-preview"; private static final ClientLogger LOGGER = new ClientLogger(IdentityClient.class); private static final Pattern ACCESS_TOKEN_PATTERN = Pattern.compile("\"accessToken\": \"(.*?)(\"|$)"); private static final Pattern TRAILING_FORWARD_SLASHES = Pattern.compile("/+$"); private final IdentityClientOptions options; private final String tenantId; private final String clientId; private final String resourceId; private final String clientSecret; private final String clientAssertionFilePath; private final InputStream certificate; private final String certificatePath; private final Supplier<String> clientAssertionSupplier; private final String certificatePassword; private HttpPipelineAdapter httpPipelineAdapter; private final SynchronizedAccessor<PublicClientApplication> publicClientApplicationAccessor; private final SynchronizedAccessor<ConfidentialClientApplication> confidentialClientApplicationAccessor; private final SynchronizedAccessor<String> clientAssertionAccessor; /** * Creates an IdentityClient with the given options. * * @param tenantId the tenant ID of the application. * @param clientId the client ID of the application. * @param clientSecret the client secret of the application. * @param resourceId the resource ID of the application * @param certificatePath the path to the PKCS12 or PEM certificate of the application. * @param certificate the PKCS12 or PEM certificate of the application. * @param certificatePassword the password protecting the PFX certificate. * @param isSharedTokenCacheCredential Indicate whether the credential is * {@link com.azure.identity.SharedTokenCacheCredential} or not. * @param clientAssertionTimeout the timeout to use for the client assertion. * @param options the options configuring the client. */ IdentityClient(String tenantId, String clientId, String clientSecret, String certificatePath, String clientAssertionFilePath, String resourceId, Supplier<String> clientAssertionSupplier, InputStream certificate, String certificatePassword, boolean isSharedTokenCacheCredential, Duration clientAssertionTimeout, IdentityClientOptions options) { if (tenantId == null) { tenantId = "organizations"; } if (options == null) { options = new IdentityClientOptions(); } this.tenantId = tenantId; this.clientId = clientId; this.resourceId = resourceId; this.clientSecret = clientSecret; this.clientAssertionFilePath = clientAssertionFilePath; this.certificatePath = certificatePath; this.certificate = certificate; this.certificatePassword = certificatePassword; this.clientAssertionSupplier = clientAssertionSupplier; this.options = options; this.publicClientApplicationAccessor = new SynchronizedAccessor<>(() -> getPublicClientApplication(isSharedTokenCacheCredential)); this.confidentialClientApplicationAccessor = new SynchronizedAccessor<>(() -> getConfidentialClientApplication()); this.clientAssertionAccessor = clientAssertionTimeout == null ? new SynchronizedAccessor<>(() -> parseClientAssertion(), Duration.ofMinutes(5)) : new SynchronizedAccessor<>(() -> parseClientAssertion(), clientAssertionTimeout); } private Mono<ConfidentialClientApplication> getConfidentialClientApplication() { return Mono.defer(() -> { if (clientId == null) { return Mono.error(LOGGER.logExceptionAsError(new IllegalArgumentException( "A non-null value for client ID must be provided for user authentication."))); } String authorityUrl = TRAILING_FORWARD_SLASHES.matcher(options.getAuthorityHost()).replaceAll("") + "/" + tenantId; IClientCredential credential; if (clientSecret != null) { credential = ClientCredentialFactory.createFromSecret(clientSecret); } else if (certificate != null || certificatePath != null) { try { if (certificatePassword == null) { byte[] pemCertificateBytes = getCertificateBytes(); List<X509Certificate> x509CertificateList = CertificateUtil.publicKeyFromPem(pemCertificateBytes); PrivateKey privateKey = CertificateUtil.privateKeyFromPem(pemCertificateBytes); if (x509CertificateList.size() == 1) { credential = ClientCredentialFactory.createFromCertificate( privateKey, x509CertificateList.get(0)); } else { credential = ClientCredentialFactory.createFromCertificateChain( privateKey, x509CertificateList); } } else { try (InputStream pfxCertificateStream = getCertificateInputStream()) { credential = ClientCredentialFactory.createFromCertificate(pfxCertificateStream, certificatePassword); } } } catch (IOException | GeneralSecurityException e) { return Mono.error(LOGGER.logExceptionAsError(new RuntimeException( "Failed to parse the certificate for the credential: " + e.getMessage(), e))); } } else if (clientAssertionSupplier != null) { credential = ClientCredentialFactory.createFromClientAssertion(clientAssertionSupplier.get()); } else { return Mono.error(LOGGER.logExceptionAsError( new IllegalArgumentException("Must provide client secret or client certificate path." + " To mitigate this issue, please refer to the troubleshooting guidelines here at " + "https: } ConfidentialClientApplication.Builder applicationBuilder = ConfidentialClientApplication.builder(clientId, credential); try { applicationBuilder = applicationBuilder.authority(authorityUrl); } catch (MalformedURLException e) { return Mono.error(LOGGER.logExceptionAsWarning(new IllegalStateException(e))); } applicationBuilder.sendX5c(options.isIncludeX5c()); initializeHttpPipelineAdapter(); if (httpPipelineAdapter != null) { applicationBuilder.httpClient(httpPipelineAdapter); } else { applicationBuilder.proxy(proxyOptionsToJavaNetProxy(options.getProxyOptions())); } if (options.getExecutorService() != null) { applicationBuilder.executorService(options.getExecutorService()); } TokenCachePersistenceOptions tokenCachePersistenceOptions = options.getTokenCacheOptions(); PersistentTokenCacheImpl tokenCache = null; if (tokenCachePersistenceOptions != null) { try { tokenCache = new PersistentTokenCacheImpl() .setAllowUnencryptedStorage(tokenCachePersistenceOptions.isUnencryptedStorageAllowed()) .setName(tokenCachePersistenceOptions.getName()); applicationBuilder.setTokenCacheAccessAspect(tokenCache); } catch (Throwable t) { return Mono.error(LOGGER.logExceptionAsError(new ClientAuthenticationException( "Shared token cache is unavailable in this environment.", null, t))); } } if (options.getRegionalAuthority() != null) { if (options.getRegionalAuthority() == RegionalAuthority.AUTO_DISCOVER_REGION) { applicationBuilder.autoDetectRegion(true); } else { applicationBuilder.azureRegion(options.getRegionalAuthority().toString()); } } ConfidentialClientApplication confidentialClientApplication = applicationBuilder.build(); return tokenCache != null ? tokenCache.registerCache() .map(ignored -> confidentialClientApplication) : Mono.just(confidentialClientApplication); }); } private Mono<String> parseClientAssertion() { return Mono.fromCallable(() -> { if (clientAssertionFilePath != null) { byte[] encoded = Files.readAllBytes(Paths.get(clientAssertionFilePath)); return new String(encoded, StandardCharsets.UTF_8); } else { throw LOGGER.logExceptionAsError(new IllegalStateException( "Client Assertion File Path is not provided." + " It should be provided to authenticate with client assertion." )); } }); } private Mono<PublicClientApplication> getPublicClientApplication(boolean sharedTokenCacheCredential) { return Mono.defer(() -> { if (clientId == null) { throw LOGGER.logExceptionAsError(new IllegalArgumentException( "A non-null value for client ID must be provided for user authentication.")); } String authorityUrl = TRAILING_FORWARD_SLASHES.matcher(options.getAuthorityHost()).replaceAll("") + "/" + tenantId; PublicClientApplication.Builder publicClientApplicationBuilder = PublicClientApplication.builder(clientId); try { publicClientApplicationBuilder = publicClientApplicationBuilder.authority(authorityUrl); } catch (MalformedURLException e) { throw LOGGER.logExceptionAsWarning(new IllegalStateException(e)); } initializeHttpPipelineAdapter(); if (httpPipelineAdapter != null) { publicClientApplicationBuilder.httpClient(httpPipelineAdapter); } else { publicClientApplicationBuilder.proxy(proxyOptionsToJavaNetProxy(options.getProxyOptions())); } if (options.getExecutorService() != null) { publicClientApplicationBuilder.executorService(options.getExecutorService()); } if (!options.isCp1Disabled()) { Set<String> set = new HashSet<>(1); set.add("CP1"); publicClientApplicationBuilder.clientCapabilities(set); } return Mono.just(publicClientApplicationBuilder); }).flatMap(builder -> { TokenCachePersistenceOptions tokenCachePersistenceOptions = options.getTokenCacheOptions(); PersistentTokenCacheImpl tokenCache = null; if (tokenCachePersistenceOptions != null) { try { tokenCache = new PersistentTokenCacheImpl() .setAllowUnencryptedStorage(tokenCachePersistenceOptions.isUnencryptedStorageAllowed()) .setName(tokenCachePersistenceOptions.getName()); builder.setTokenCacheAccessAspect(tokenCache); } catch (Throwable t) { throw LOGGER.logExceptionAsError(new ClientAuthenticationException( "Shared token cache is unavailable in this environment.", null, t)); } } PublicClientApplication publicClientApplication = builder.build(); return tokenCache != null ? tokenCache.registerCache() .map(ignored -> publicClientApplication) : Mono.just(publicClientApplication); }); } public Mono<MsalToken> authenticateWithIntelliJ(TokenRequestContext request) { try { IntelliJCacheAccessor cacheAccessor = new IntelliJCacheAccessor(options.getIntelliJKeePassDatabasePath()); IntelliJAuthMethodDetails authDetails; try { authDetails = cacheAccessor.getAuthDetailsIfAvailable(); } catch (CredentialUnavailableException e) { return Mono.error(LoggingUtil.logCredentialUnavailableException(LOGGER, options, new CredentialUnavailableException("IntelliJ Authentication not available.", e))); } if (authDetails == null) { return Mono.error(LoggingUtil.logCredentialUnavailableException(LOGGER, options, new CredentialUnavailableException("IntelliJ Authentication not available." + " Please log in with Azure Tools for IntelliJ plugin in the IDE."))); } String authType = authDetails.getAuthMethod(); if ("SP".equalsIgnoreCase(authType)) { Map<String, String> spDetails = cacheAccessor .getIntellijServicePrincipalDetails(authDetails.getCredFilePath()); String authorityUrl = spDetails.get("authURL") + spDetails.get("tenant"); try { ConfidentialClientApplication.Builder applicationBuilder = ConfidentialClientApplication.builder(spDetails.get("client"), ClientCredentialFactory.createFromSecret(spDetails.get("key"))) .authority(authorityUrl); if (httpPipelineAdapter != null) { applicationBuilder.httpClient(httpPipelineAdapter); } else if (options.getProxyOptions() != null) { applicationBuilder.proxy(proxyOptionsToJavaNetProxy(options.getProxyOptions())); } if (options.getExecutorService() != null) { applicationBuilder.executorService(options.getExecutorService()); } ConfidentialClientApplication application = applicationBuilder.build(); return Mono.fromFuture(application.acquireToken( ClientCredentialParameters.builder(new HashSet<>(request.getScopes())) .build())).map(MsalToken::new); } catch (MalformedURLException e) { return Mono.error(e); } } else if ("DC".equalsIgnoreCase(authType)) { LOGGER.verbose("IntelliJ Authentication => Device Code Authentication scheme detected in Azure Tools" + " for IntelliJ Plugin."); if (isADFSTenant()) { LOGGER.verbose("IntelliJ Authentication => The input tenant is detected to be ADFS and" + " the ADFS tenants are not supported via IntelliJ Authentication currently."); return Mono.error(LoggingUtil.logCredentialUnavailableException(LOGGER, options, new CredentialUnavailableException("IntelliJCredential " + "authentication unavailable. ADFS tenant/authorities are not supported."))); } try { JsonNode intelliJCredentials = cacheAccessor.getDeviceCodeCredentials(); String refreshToken = intelliJCredentials.get("refreshToken").textValue(); RefreshTokenParameters.RefreshTokenParametersBuilder refreshTokenParametersBuilder = RefreshTokenParameters.builder(new HashSet<>(request.getScopes()), refreshToken); if (request.getClaims() != null) { ClaimsRequest customClaimRequest = CustomClaimRequest.formatAsClaimsRequest(request.getClaims()); refreshTokenParametersBuilder.claims(customClaimRequest); } return publicClientApplicationAccessor.getValue() .flatMap(pc -> Mono.fromFuture(pc.acquireToken(refreshTokenParametersBuilder.build())) .map(MsalToken::new)); } catch (CredentialUnavailableException e) { return Mono.error(LoggingUtil.logCredentialUnavailableException(LOGGER, options, e)); } } else { LOGGER.verbose("IntelliJ Authentication = > Only Service Principal and Device Code Authentication" + " schemes are currently supported via IntelliJ Credential currently. Please ensure you used one" + " of those schemes from Azure Tools for IntelliJ plugin."); return Mono.error(LoggingUtil.logCredentialUnavailableException(LOGGER, options, new CredentialUnavailableException("IntelliJ Authentication not available." + " Please login with Azure Tools for IntelliJ plugin in the IDE."))); } } catch (IOException e) { return Mono.error(e); } } /** * Asynchronously acquire a token from Active Directory with Azure CLI. * * @param request the details of the token request * @return a Publisher that emits an AccessToken */ public Mono<AccessToken> authenticateWithAzureCli(TokenRequestContext request) { StringBuilder azCommand = new StringBuilder("az account get-access-token --output json --resource "); String scopes = ScopeUtil.scopesToResource(request.getScopes()); try { ScopeUtil.validateScope(scopes); } catch (IllegalArgumentException ex) { return Mono.error(LOGGER.logExceptionAsError(ex)); } azCommand.append(scopes); String tenant = IdentityUtil.resolveTenantId(null, request, options); if (!CoreUtils.isNullOrEmpty(tenant)) { azCommand.append("--tenant ").append(tenant); } AccessToken token; try { String starter; String switcher; if (isWindowsPlatform()) { starter = WINDOWS_STARTER; switcher = WINDOWS_SWITCHER; } else { starter = LINUX_MAC_STARTER; switcher = LINUX_MAC_SWITCHER; } ProcessBuilder builder = new ProcessBuilder(starter, switcher, azCommand.toString()); String workingDirectory = getSafeWorkingDirectory(); if (workingDirectory != null) { builder.directory(new File(workingDirectory)); } else { throw LOGGER.logExceptionAsError(new IllegalStateException("A Safe Working directory could not be" + " found to execute CLI command from. To mitigate this issue, please refer to the troubleshooting " + " guidelines here at https: } builder.redirectErrorStream(true); Process process = builder.start(); StringBuilder output = new StringBuilder(); try (BufferedReader reader = new BufferedReader(new InputStreamReader(process.getInputStream(), StandardCharsets.UTF_8.name()))) { String line; while (true) { line = reader.readLine(); if (line == null) { break; } if (line.startsWith(WINDOWS_PROCESS_ERROR_MESSAGE) || LINUX_MAC_PROCESS_ERROR_MESSAGE.matcher(line).matches()) { throw LoggingUtil.logCredentialUnavailableException(LOGGER, options, new CredentialUnavailableException( "AzureCliCredential authentication unavailable. Azure CLI not installed." + "To mitigate this issue, please refer to the troubleshooting guidelines here at " + "https: } output.append(line); } } String processOutput = output.toString(); process.waitFor(10, TimeUnit.SECONDS); if (process.exitValue() != 0) { if (processOutput.length() > 0) { String redactedOutput = redactInfo(processOutput); if (redactedOutput.contains("az login") || redactedOutput.contains("az account set")) { throw LoggingUtil.logCredentialUnavailableException(LOGGER, options, new CredentialUnavailableException( "AzureCliCredential authentication unavailable." + " Please run 'az login' to set up account. To further mitigate this" + " issue, please refer to the troubleshooting guidelines here at " + "https: } throw LOGGER.logExceptionAsError(new ClientAuthenticationException(redactedOutput, null)); } else { throw LOGGER.logExceptionAsError( new ClientAuthenticationException("Failed to invoke Azure CLI ", null)); } } LOGGER.verbose("Azure CLI Authentication => A token response was received from Azure CLI, deserializing the" + " response into an Access Token."); Map<String, String> objectMap = SERIALIZER_ADAPTER.deserialize(processOutput, Map.class, SerializerEncoding.JSON); String accessToken = objectMap.get("accessToken"); String time = objectMap.get("expiresOn"); String timeToSecond = time.substring(0, time.indexOf(".")); String timeJoinedWithT = String.join("T", timeToSecond.split(" ")); OffsetDateTime expiresOn = LocalDateTime.parse(timeJoinedWithT, DateTimeFormatter.ISO_LOCAL_DATE_TIME) .atZone(ZoneId.systemDefault()) .toOffsetDateTime().withOffsetSameInstant(ZoneOffset.UTC); token = new AccessToken(accessToken, expiresOn); } catch (IOException | InterruptedException e) { throw LOGGER.logExceptionAsError(new IllegalStateException(e)); } catch (RuntimeException e) { return Mono.error(e instanceof CredentialUnavailableException ? LoggingUtil.logCredentialUnavailableException(LOGGER, options, (CredentialUnavailableException) e) : LOGGER.logExceptionAsError(e)); } return Mono.just(token); } /** * Asynchronously acquire a token from Active Directory with Azure Power Shell. * * @param request the details of the token request * @return a Publisher that emits an AccessToken */ public Mono<AccessToken> authenticateWithAzurePowerShell(TokenRequestContext request) { List<CredentialUnavailableException> exceptions = new ArrayList<>(2); PowershellManager defaultPowerShellManager = new PowershellManager(Platform.isWindows() ? DEFAULT_WINDOWS_PS_EXECUTABLE : DEFAULT_LINUX_PS_EXECUTABLE); PowershellManager legacyPowerShellManager = Platform.isWindows() ? new PowershellManager(LEGACY_WINDOWS_PS_EXECUTABLE) : null; List<PowershellManager> powershellManagers = new ArrayList<>(2); powershellManagers.add(defaultPowerShellManager); if (legacyPowerShellManager != null) { powershellManagers.add(legacyPowerShellManager); } return Flux.fromIterable(powershellManagers) .flatMap(powershellManager -> getAccessTokenFromPowerShell(request, powershellManager) .onErrorResume(t -> { if (!t.getClass().getSimpleName().equals("CredentialUnavailableException")) { return Mono.error(new ClientAuthenticationException( "Azure Powershell authentication failed. Error Details: " + t.getMessage() + ". To mitigate this issue, please refer to the troubleshooting guidelines here at " + "https: null, t)); } exceptions.add((CredentialUnavailableException) t); return Mono.empty(); }), 1) .next() .switchIfEmpty(Mono.defer(() -> { CredentialUnavailableException last = exceptions.get(exceptions.size() - 1); for (int z = exceptions.size() - 2; z >= 0; z--) { CredentialUnavailableException current = exceptions.get(z); last = new CredentialUnavailableException("Azure PowerShell authentication failed using default" + "powershell(pwsh) with following error: " + current.getMessage() + "\r\n" + "Azure PowerShell authentication failed using powershell-core(powershell)" + " with following error: " + last.getMessage(), last.getCause()); } return Mono.error(LoggingUtil.logCredentialUnavailableException(LOGGER, options, (last))); })); } /** * Asynchronously acquire a token from Active Directory with Azure PowerShell. * * @param request the details of the token request * @return a Publisher that emits an AccessToken */ public Mono<AccessToken> authenticateWithOBO(TokenRequestContext request) { return confidentialClientApplicationAccessor.getValue() .flatMap(confidentialClient -> Mono.fromFuture(() -> confidentialClient.acquireToken(OnBehalfOfParameters .builder(new HashSet<>(request.getScopes()), options.getUserAssertion()) .tenant(IdentityUtil.resolveTenantId(tenantId, request, options)) .build())) .map(MsalToken::new)); } private Mono<AccessToken> getAccessTokenFromPowerShell(TokenRequestContext request, PowershellManager powershellManager) { return powershellManager.initSession() .flatMap(manager -> { String azAccountsCommand = "Import-Module Az.Accounts -MinimumVersion 2.2.0 -PassThru"; return manager.runCommand(azAccountsCommand) .flatMap(output -> { if (output.contains("The specified module 'Az.Accounts' with version '2.2.0' was not loaded " + "because no valid module file")) { return Mono.error(LoggingUtil.logCredentialUnavailableException(LOGGER, options, new CredentialUnavailableException( "Az.Account module with version >= 2.2.0 is not installed. It needs to be installed to" + " use Azure PowerShell Credential."))); } LOGGER.verbose("Az.accounts module was found installed."); StringBuilder accessTokenCommand = new StringBuilder("Get-AzAccessToken -ResourceUrl "); accessTokenCommand.append(ScopeUtil.scopesToResource(request.getScopes())); accessTokenCommand.append(" | ConvertTo-Json"); String command = accessTokenCommand.toString(); LOGGER.verbose("Azure Powershell Authentication => Executing the command `%s` in Azure " + "Powershell to retrieve the Access Token.", accessTokenCommand); return manager.runCommand(accessTokenCommand.toString()) .flatMap(out -> { if (out.contains("Run Connect-AzAccount to login")) { return Mono.error(LoggingUtil.logCredentialUnavailableException(LOGGER, options, new CredentialUnavailableException( "Run Connect-AzAccount to login to Azure account in PowerShell."))); } try { LOGGER.verbose("Azure Powershell Authentication => Attempting to deserialize the " + "received response from Azure Powershell."); Map<String, String> objectMap = SERIALIZER_ADAPTER.deserialize(out, Map.class, SerializerEncoding.JSON); String accessToken = objectMap.get("Token"); String time = objectMap.get("ExpiresOn"); OffsetDateTime expiresOn = OffsetDateTime.parse(time) .withOffsetSameInstant(ZoneOffset.UTC); return Mono.just(new AccessToken(accessToken, expiresOn)); } catch (IOException e) { return Mono.error(LoggingUtil.logCredentialUnavailableException(LOGGER, options, new CredentialUnavailableException( "Encountered error when deserializing response from Azure Power Shell.", e))); } }); }); }).doFinally(ignored -> powershellManager.close()); } /** * Asynchronously acquire a token from Active Directory with a client secret. * * @param request the details of the token request * @return a Publisher that emits an AccessToken */ private HttpPipeline setupPipeline(HttpClient httpClient) { List<HttpPipelinePolicy> policies = new ArrayList<>(); HttpLogOptions httpLogOptions = new HttpLogOptions(); HttpPolicyProviders.addBeforeRetryPolicies(policies); policies.add(new RetryPolicy()); HttpPolicyProviders.addAfterRetryPolicies(policies); policies.add(new HttpLoggingPolicy(httpLogOptions)); return new HttpPipelineBuilder().httpClient(httpClient) .policies(policies.toArray(new HttpPipelinePolicy[0])).build(); } /** * Asynchronously acquire a token from Active Directory with a username and a password. * * @param request the details of the token request * @param username the username of the user * @param password the password of the user * @return a Publisher that emits an AccessToken */ public Mono<MsalToken> authenticateWithUsernamePassword(TokenRequestContext request, String username, String password) { return publicClientApplicationAccessor.getValue() .flatMap(pc -> Mono.fromFuture(() -> { UserNamePasswordParameters.UserNamePasswordParametersBuilder userNamePasswordParametersBuilder = UserNamePasswordParameters.builder(new HashSet<>(request.getScopes()), username, password.toCharArray()); if (request.getClaims() != null) { ClaimsRequest customClaimRequest = CustomClaimRequest .formatAsClaimsRequest(request.getClaims()); userNamePasswordParametersBuilder.claims(customClaimRequest); } userNamePasswordParametersBuilder.tenant( IdentityUtil.resolveTenantId(tenantId, request, options)); return pc.acquireToken(userNamePasswordParametersBuilder.build()); } )).onErrorMap(t -> new ClientAuthenticationException("Failed to acquire token with username and " + "password. To mitigate this issue, please refer to the troubleshooting guidelines " + "here at https: null, t)).map(MsalToken::new); } /** * Asynchronously acquire a token from the currently logged in client. * * @param request the details of the token request * @param account the account used to log in to acquire the last token * @return a Publisher that emits an AccessToken */ @SuppressWarnings("deprecation") public Mono<MsalToken> authenticateWithPublicClientCache(TokenRequestContext request, IAccount account) { return publicClientApplicationAccessor.getValue() .flatMap(pc -> Mono.fromFuture(() -> { SilentParameters.SilentParametersBuilder parametersBuilder = SilentParameters.builder( new HashSet<>(request.getScopes())); if (request.getClaims() != null) { ClaimsRequest customClaimRequest = CustomClaimRequest.formatAsClaimsRequest(request.getClaims()); parametersBuilder.claims(customClaimRequest); parametersBuilder.forceRefresh(true); } if (account != null) { parametersBuilder = parametersBuilder.account(account); } parametersBuilder.tenant( IdentityUtil.resolveTenantId(tenantId, request, options)); try { return pc.acquireTokenSilently(parametersBuilder.build()); } catch (MalformedURLException e) { return getFailedCompletableFuture(LOGGER.logExceptionAsError(new RuntimeException(e))); } }).map(MsalToken::new) .filter(t -> OffsetDateTime.now().isBefore(t.getExpiresAt().minus(REFRESH_OFFSET))) .switchIfEmpty(Mono.fromFuture(() -> { SilentParameters.SilentParametersBuilder forceParametersBuilder = SilentParameters.builder( new HashSet<>(request.getScopes())).forceRefresh(true); if (request.getClaims() != null) { ClaimsRequest customClaimRequest = CustomClaimRequest .formatAsClaimsRequest(request.getClaims()); forceParametersBuilder.claims(customClaimRequest); } if (account != null) { forceParametersBuilder = forceParametersBuilder.account(account); } forceParametersBuilder.tenant( IdentityUtil.resolveTenantId(tenantId, request, options)); try { return pc.acquireTokenSilently(forceParametersBuilder.build()); } catch (MalformedURLException e) { return getFailedCompletableFuture(LOGGER.logExceptionAsError(new RuntimeException(e))); } }).map(MsalToken::new))); } /** * Asynchronously acquire a token from the currently logged in client. * * @param request the details of the token request * @return a Publisher that emits an AccessToken */ @SuppressWarnings("deprecation") public Mono<AccessToken> authenticateWithConfidentialClientCache(TokenRequestContext request) { return confidentialClientApplicationAccessor.getValue() .flatMap(confidentialClient -> Mono.fromFuture(() -> { SilentParameters.SilentParametersBuilder parametersBuilder = SilentParameters.builder( new HashSet<>(request.getScopes())) .tenant(IdentityUtil.resolveTenantId(tenantId, request, options)); try { return confidentialClient.acquireTokenSilently(parametersBuilder.build()); } catch (MalformedURLException e) { return getFailedCompletableFuture(LOGGER.logExceptionAsError(new RuntimeException(e))); } }).map(ar -> (AccessToken) new MsalToken(ar)) .filter(t -> OffsetDateTime.now().isBefore(t.getExpiresAt().minus(REFRESH_OFFSET)))); } /** * Asynchronously acquire a token from Active Directory with a device code challenge. Active Directory will provide * a device code for login and the user must meet the challenge by authenticating in a browser on the current or a * different device. * * @param request the details of the token request * @param deviceCodeConsumer the user provided closure that will consume the device code challenge * @return a Publisher that emits an AccessToken when the device challenge is met, or an exception if the device * code expires */ public Mono<MsalToken> authenticateWithDeviceCode(TokenRequestContext request, Consumer<DeviceCodeInfo> deviceCodeConsumer) { return publicClientApplicationAccessor.getValue().flatMap(pc -> Mono.fromFuture(() -> { DeviceCodeFlowParameters.DeviceCodeFlowParametersBuilder parametersBuilder = DeviceCodeFlowParameters.builder( new HashSet<>(request.getScopes()), dc -> deviceCodeConsumer.accept( new DeviceCodeInfo(dc.userCode(), dc.deviceCode(), dc.verificationUri(), OffsetDateTime.now().plusSeconds(dc.expiresIn()), dc.message()))) .tenant(IdentityUtil .resolveTenantId(tenantId, request, options)); if (request.getClaims() != null) { ClaimsRequest customClaimRequest = CustomClaimRequest.formatAsClaimsRequest(request.getClaims()); parametersBuilder.claims(customClaimRequest); } return pc.acquireToken(parametersBuilder.build()); }).onErrorMap(t -> new ClientAuthenticationException("Failed to acquire token with device code", null, t)) .map(MsalToken::new)); } /** * Asynchronously acquire a token from Active Directory with Visual Studio cached refresh token. * * @param request the details of the token request * @return a Publisher that emits an AccessToken. */ public Mono<MsalToken> authenticateWithVsCodeCredential(TokenRequestContext request, String cloud) { if (isADFSTenant()) { return Mono.error(LoggingUtil.logCredentialUnavailableException(LOGGER, options, new CredentialUnavailableException("VsCodeCredential " + "authentication unavailable. ADFS tenant/authorities are not supported. " + "To mitigate this issue, please refer to the troubleshooting guidelines here at " + "https: } VisualStudioCacheAccessor accessor = new VisualStudioCacheAccessor(); String credential = null; try { credential = accessor.getCredentials("VS Code Azure", cloud); } catch (CredentialUnavailableException e) { return Mono.error(LoggingUtil.logCredentialUnavailableException(LOGGER, options, e)); } RefreshTokenParameters.RefreshTokenParametersBuilder parametersBuilder = RefreshTokenParameters .builder(new HashSet<>(request.getScopes()), credential); if (request.getClaims() != null) { ClaimsRequest customClaimRequest = CustomClaimRequest.formatAsClaimsRequest(request.getClaims()); parametersBuilder.claims(customClaimRequest); } return publicClientApplicationAccessor.getValue() .flatMap(pc -> Mono.fromFuture(pc.acquireToken(parametersBuilder.build())) .onErrorResume(t -> { if (t instanceof MsalInteractionRequiredException) { return Mono.error(LoggingUtil.logCredentialUnavailableException(LOGGER, options, new CredentialUnavailableException("Failed to acquire token with" + " VS code credential." + " To mitigate this issue, please refer to the troubleshooting guidelines here at " + "https: } return Mono.error(new ClientAuthenticationException("Failed to acquire token with" + " VS code credential", null, t)); }) .map(MsalToken::new)); } /** * Asynchronously acquire a token from Active Directory with an authorization code from an oauth flow. * * @param request the details of the token request * @param authorizationCode the oauth2 authorization code * @param redirectUrl the redirectUrl where the authorization code is sent to * @return a Publisher that emits an AccessToken */ public Mono<MsalToken> authenticateWithAuthorizationCode(TokenRequestContext request, String authorizationCode, URI redirectUrl) { AuthorizationCodeParameters.AuthorizationCodeParametersBuilder parametersBuilder = AuthorizationCodeParameters.builder(authorizationCode, redirectUrl) .scopes(new HashSet<>(request.getScopes())) .tenant(IdentityUtil .resolveTenantId(tenantId, request, options)); if (request.getClaims() != null) { ClaimsRequest customClaimRequest = CustomClaimRequest.formatAsClaimsRequest(request.getClaims()); parametersBuilder.claims(customClaimRequest); } Mono<IAuthenticationResult> acquireToken; if (clientSecret != null) { acquireToken = confidentialClientApplicationAccessor.getValue() .flatMap(pc -> Mono.fromFuture(() -> pc.acquireToken(parametersBuilder.build()))); } else { acquireToken = publicClientApplicationAccessor.getValue() .flatMap(pc -> Mono.fromFuture(() -> pc.acquireToken(parametersBuilder.build()))); } return acquireToken.onErrorMap(t -> new ClientAuthenticationException( "Failed to acquire token with authorization code", null, t)).map(MsalToken::new); } /** * Asynchronously acquire a token from Active Directory by opening a browser and wait for the user to login. The * credential will run a minimal local HttpServer at the given port, so {@code http: * listed as a valid reply URL for the application. * * @param request the details of the token request * @param port the port on which the HTTP server is listening * @param redirectUrl the redirect URL to listen on and receive security code * @param loginHint the username suggestion to pre-fill the login page's username/email address field * @return a Publisher that emits an AccessToken */ public Mono<MsalToken> authenticateWithBrowserInteraction(TokenRequestContext request, Integer port, String redirectUrl, String loginHint) { URI redirectUri; String redirect; if (port != null) { redirect = HTTP_LOCALHOST + ":" + port; } else if (redirectUrl != null) { redirect = redirectUrl; } else { redirect = HTTP_LOCALHOST; } try { redirectUri = new URI(redirect); } catch (URISyntaxException e) { return Mono.error(LOGGER.logExceptionAsError(new RuntimeException(e))); } InteractiveRequestParameters.InteractiveRequestParametersBuilder builder = InteractiveRequestParameters.builder(redirectUri) .scopes(new HashSet<>(request.getScopes())) .prompt(Prompt.SELECT_ACCOUNT) .tenant(IdentityUtil .resolveTenantId(tenantId, request, options)); if (request.getClaims() != null) { ClaimsRequest customClaimRequest = CustomClaimRequest.formatAsClaimsRequest(request.getClaims()); builder.claims(customClaimRequest); } if (loginHint != null) { builder.loginHint(loginHint); } Mono<IAuthenticationResult> acquireToken = publicClientApplicationAccessor.getValue() .flatMap(pc -> Mono.fromFuture(() -> pc.acquireToken(builder.build()))); return acquireToken.onErrorMap(t -> new ClientAuthenticationException( "Failed to acquire token with Interactive Browser Authentication.", null, t)).map(MsalToken::new); } /** * Gets token from shared token cache * */ public Mono<MsalToken> authenticateWithSharedTokenCache(TokenRequestContext request, String username) { return publicClientApplicationAccessor.getValue() .flatMap(pc -> Mono.fromFuture(() -> pc.getAccounts()) .onErrorMap(t -> new CredentialUnavailableException( "Cannot get accounts from token cache. Error: " + t.getMessage(), t)) .flatMap(set -> { IAccount requestedAccount; Map<String, IAccount> accounts = new HashMap<>(); if (set.isEmpty()) { return Mono.error(LoggingUtil.logCredentialUnavailableException(LOGGER, options, new CredentialUnavailableException("SharedTokenCacheCredential " + "authentication unavailable. No accounts were found in the cache."))); } for (IAccount cached : set) { if (username == null || username.equals(cached.username())) { if (!accounts.containsKey(cached.homeAccountId())) { accounts.put(cached.homeAccountId(), cached); } } } if (accounts.isEmpty()) { return Mono.error(new RuntimeException(String.format("SharedTokenCacheCredential " + "authentication unavailable. No account matching the specified username: %s was " + "found in the cache.", username))); } else if (accounts.size() > 1) { if (username == null) { return Mono.error(new RuntimeException("SharedTokenCacheCredential authentication " + "unavailable. Multiple accounts were found in the cache. Use username and " + "tenant id to disambiguate.")); } else { return Mono.error(new RuntimeException(String.format("SharedTokenCacheCredential " + "authentication unavailable. Multiple accounts matching the specified username: " + "%s were found in the cache.", username))); } } else { requestedAccount = accounts.values().iterator().next(); } return authenticateWithPublicClientCache(request, requestedAccount); })); } /** * Asynchronously acquire a token from the Azure Arc Managed Service Identity endpoint. * * @param identityEndpoint the Identity endpoint to acquire token from * @param request the details of the token request * @return a Publisher that emits an AccessToken */ public Mono<AccessToken> authenticateToArcManagedIdentityEndpoint(String identityEndpoint, TokenRequestContext request) { return Mono.fromCallable(() -> { HttpURLConnection connection = null; StringBuilder payload = new StringBuilder(); payload.append("resource="); payload.append(URLEncoder.encode(ScopeUtil.scopesToResource(request.getScopes()), StandardCharsets.UTF_8.name())); payload.append("&api-version="); payload.append(URLEncoder.encode("2019-11-01", StandardCharsets.UTF_8.name())); URL url = new URL(String.format("%s?%s", identityEndpoint, payload)); String secretKey = null; try { connection = (HttpURLConnection) url.openConnection(); connection.setRequestMethod("GET"); connection.setRequestProperty("Metadata", "true"); connection.connect(); new Scanner(connection.getInputStream(), StandardCharsets.UTF_8.name()).useDelimiter("\\A"); } catch (IOException e) { if (connection == null) { throw LOGGER.logExceptionAsError(new ClientAuthenticationException("Failed to initialize " + "Http URL connection to the endpoint.", null, e)); } int status = connection.getResponseCode(); if (status != 401) { throw LOGGER.logExceptionAsError(new ClientAuthenticationException(String.format("Expected a 401" + " Unauthorized response from Azure Arc Managed Identity Endpoint, received: %d", status), null, e)); } String realm = connection.getHeaderField("WWW-Authenticate"); if (realm == null) { throw LOGGER.logExceptionAsError(new ClientAuthenticationException("Did not receive a value" + " for WWW-Authenticate header in the response from Azure Arc Managed Identity Endpoint", null)); } int separatorIndex = realm.indexOf("="); if (separatorIndex == -1) { throw LOGGER.logExceptionAsError(new ClientAuthenticationException("Did not receive a correct value" + " for WWW-Authenticate header in the response from Azure Arc Managed Identity Endpoint", null)); } String secretKeyPath = realm.substring(separatorIndex + 1); secretKey = new String(Files.readAllBytes(Paths.get(secretKeyPath)), StandardCharsets.UTF_8); } finally { if (connection != null) { connection.disconnect(); } } if (secretKey == null) { throw LOGGER.logExceptionAsError(new ClientAuthenticationException("Did not receive a secret value" + " in the response from Azure Arc Managed Identity Endpoint", null)); } try { connection = (HttpURLConnection) url.openConnection(); connection.setRequestMethod("GET"); connection.setRequestProperty("Authorization", String.format("Basic %s", secretKey)); connection.setRequestProperty("Metadata", "true"); connection.connect(); Scanner scanner = new Scanner(connection.getInputStream(), StandardCharsets.UTF_8.name()) .useDelimiter("\\A"); String result = scanner.hasNext() ? scanner.next() : ""; return SERIALIZER_ADAPTER.deserialize(result, MSIToken.class, SerializerEncoding.JSON); } finally { if (connection != null) { connection.disconnect(); } } }); } /** * Asynchronously acquire a token from the Azure Arc Managed Service Identity endpoint. * * @param request the details of the token request * @return a Publisher that emits an AccessToken */ public Mono<AccessToken> authenticateWithExchangeToken(TokenRequestContext request) { return clientAssertionAccessor.getValue() .flatMap(assertionToken -> Mono.fromCallable(() -> { String authorityUrl = TRAILING_FORWARD_SLASHES.matcher(options.getAuthorityHost()).replaceAll("") + "/" + tenantId + "/oauth2/v2.0/token"; StringBuilder urlParametersBuilder = new StringBuilder(); urlParametersBuilder.append("client_assertion="); urlParametersBuilder.append(assertionToken); urlParametersBuilder.append("&client_assertion_type=urn:ietf:params:oauth:client-assertion-type" + ":jwt-bearer"); urlParametersBuilder.append("&client_id="); urlParametersBuilder.append(clientId); urlParametersBuilder.append("&grant_type=client_credentials"); urlParametersBuilder.append("&scope="); urlParametersBuilder.append(URLEncoder.encode(request.getScopes().get(0), StandardCharsets.UTF_8.name())); String urlParams = urlParametersBuilder.toString(); byte[] postData = urlParams.getBytes(StandardCharsets.UTF_8); int postDataLength = postData.length; HttpURLConnection connection = null; URL url = new URL(authorityUrl); try { connection = (HttpURLConnection) url.openConnection(); connection.setRequestMethod("POST"); connection.setRequestProperty("Content-Type", "application/x-www-form-urlencoded"); connection.setRequestProperty("Content-Length", Integer.toString(postDataLength)); connection.setDoOutput(true); try (DataOutputStream outputStream = new DataOutputStream(connection.getOutputStream())) { outputStream.write(postData); } connection.connect(); Scanner s = new Scanner(connection.getInputStream(), StandardCharsets.UTF_8.name()) .useDelimiter("\\A"); String result = s.hasNext() ? s.next() : ""; return SERIALIZER_ADAPTER.deserialize(result, MSIToken.class, SerializerEncoding.JSON); } finally { if (connection != null) { connection.disconnect(); } } })); } /** * Asynchronously acquire a token from the Azure Service Fabric Managed Service Identity endpoint. * * @param identityEndpoint the Identity endpoint to acquire token from * @param identityHeader the identity header to acquire token with * @param request the details of the token request * @return a Publisher that emits an AccessToken */ public Mono<AccessToken> authenticateToServiceFabricManagedIdentityEndpoint(String identityEndpoint, String identityHeader, String thumbprint, TokenRequestContext request) { return Mono.fromCallable(() -> { HttpsURLConnection connection = null; String endpoint = identityEndpoint; String headerValue = identityHeader; String endpointVersion = SERVICE_FABRIC_MANAGED_IDENTITY_API_VERSION; String resource = ScopeUtil.scopesToResource(request.getScopes()); StringBuilder payload = new StringBuilder(); payload.append("resource="); payload.append(URLEncoder.encode(resource, StandardCharsets.UTF_8.name())); payload.append("&api-version="); payload.append(URLEncoder.encode(endpointVersion, StandardCharsets.UTF_8.name())); if (clientId != null) { payload.append("&client_id="); payload.append(URLEncoder.encode(clientId, StandardCharsets.UTF_8.name())); } if (resourceId != null) { payload.append("&mi_res_id="); payload.append(URLEncoder.encode(resourceId, StandardCharsets.UTF_8.name())); } try { URL url = new URL(String.format("%s?%s", endpoint, payload)); connection = (HttpsURLConnection) url.openConnection(); IdentitySslUtil.addTrustedCertificateThumbprint(connection, thumbprint, LOGGER); connection.setRequestMethod("GET"); if (headerValue != null) { connection.setRequestProperty("Secret", headerValue); } connection.setRequestProperty("Metadata", "true"); connection.connect(); Scanner s = new Scanner(connection.getInputStream(), StandardCharsets.UTF_8.name()) .useDelimiter("\\A"); String result = s.hasNext() ? s.next() : ""; return SERIALIZER_ADAPTER.deserialize(result, MSIToken.class, SerializerEncoding.JSON); } finally { if (connection != null) { connection.disconnect(); } } }); } /** * Asynchronously acquire a token from the App Service Managed Service Identity endpoint. * * Specifying identity parameters will use the 2019-08-01 endpoint version. * Specifying MSI parameters will use the 2017-09-01 endpoint version. * * @param identityEndpoint the Identity endpoint to acquire token from * @param identityHeader the identity header to acquire token with * @param msiEndpoint the MSI endpoint to acquire token from * @param msiSecret the MSI secret to acquire token with * @param request the details of the token request * @return a Publisher that emits an AccessToken */ public Mono<AccessToken> authenticateToManagedIdentityEndpoint(String identityEndpoint, String identityHeader, String msiEndpoint, String msiSecret, TokenRequestContext request) { return Mono.fromCallable(() -> { String endpoint; String headerValue; String endpointVersion; if (identityEndpoint != null) { endpoint = identityEndpoint; headerValue = identityHeader; endpointVersion = IDENTITY_ENDPOINT_VERSION; } else { endpoint = msiEndpoint; headerValue = msiSecret; endpointVersion = MSI_ENDPOINT_VERSION; } String resource = ScopeUtil.scopesToResource(request.getScopes()); HttpURLConnection connection = null; StringBuilder payload = new StringBuilder(); payload.append("resource="); payload.append(URLEncoder.encode(resource, StandardCharsets.UTF_8.name())); payload.append("&api-version="); payload.append(URLEncoder.encode(endpointVersion, StandardCharsets.UTF_8.name())); if (clientId != null) { if (endpointVersion.equals(IDENTITY_ENDPOINT_VERSION)) { payload.append("&client_id="); } else { payload.append("&clientid="); } payload.append(URLEncoder.encode(clientId, StandardCharsets.UTF_8.name())); } if (resourceId != null) { payload.append("&mi_res_id="); payload.append(URLEncoder.encode(resourceId, StandardCharsets.UTF_8.name())); } try { URL url = new URL(String.format("%s?%s", endpoint, payload)); connection = (HttpURLConnection) url.openConnection(); connection.setRequestMethod("GET"); if (headerValue != null) { if (IDENTITY_ENDPOINT_VERSION.equals(endpointVersion)) { connection.setRequestProperty("X-IDENTITY-HEADER", headerValue); } else { connection.setRequestProperty("Secret", headerValue); } } connection.setRequestProperty("Metadata", "true"); connection.connect(); Scanner s = new Scanner(connection.getInputStream(), StandardCharsets.UTF_8.name()) .useDelimiter("\\A"); String result = s.hasNext() ? s.next() : ""; return SERIALIZER_ADAPTER.deserialize(result, MSIToken.class, SerializerEncoding.JSON); } finally { if (connection != null) { connection.disconnect(); } } }); } /** * Asynchronously acquire a token from the Virtual Machine IMDS endpoint. * * @param request the details of the token request * @return a Publisher that emits an AccessToken */ public Mono<AccessToken> authenticateToIMDSEndpoint(TokenRequestContext request) { String resource = ScopeUtil.scopesToResource(request.getScopes()); StringBuilder payload = new StringBuilder(); final int imdsUpgradeTimeInMs = 70 * 1000; try { payload.append("api-version="); payload.append(URLEncoder.encode("2018-02-01", StandardCharsets.UTF_8.name())); payload.append("&resource="); payload.append(URLEncoder.encode(resource, StandardCharsets.UTF_8.name())); if (clientId != null) { payload.append("&client_id="); payload.append(URLEncoder.encode(clientId, StandardCharsets.UTF_8.name())); } if (resourceId != null) { payload.append("&mi_res_id="); payload.append(URLEncoder.encode(resourceId, StandardCharsets.UTF_8.name())); } } catch (IOException exception) { return Mono.error(exception); } String endpoint = TRAILING_FORWARD_SLASHES.matcher(options.getImdsAuthorityHost()).replaceAll("") + IdentityConstants.DEFAULT_IMDS_TOKENPATH; return checkIMDSAvailable(endpoint).flatMap(available -> Mono.fromCallable(() -> { int retry = 1; while (retry <= options.getMaxRetry()) { URL url = null; HttpURLConnection connection = null; try { url = new URL(String.format("%s?%s", endpoint, payload)); connection = (HttpURLConnection) url.openConnection(); connection.setRequestMethod("GET"); connection.setRequestProperty("Metadata", "true"); connection.connect(); Scanner s = new Scanner(connection.getInputStream(), StandardCharsets.UTF_8.name()) .useDelimiter("\\A"); String result = s.hasNext() ? s.next() : ""; return SERIALIZER_ADAPTER.deserialize(result, MSIToken.class, SerializerEncoding.JSON); } catch (IOException exception) { if (connection == null) { throw LOGGER.logExceptionAsError(new RuntimeException( String.format("Could not connect to the url: %s.", url), exception)); } int responseCode; try { responseCode = connection.getResponseCode(); } catch (Exception e) { throw LoggingUtil.logCredentialUnavailableException(LOGGER, options, new CredentialUnavailableException( "ManagedIdentityCredential authentication unavailable. " + "Connection to IMDS endpoint cannot be established, " + e.getMessage() + ".", e)); } if (responseCode == 400) { throw LoggingUtil.logCredentialUnavailableException(LOGGER, options, new CredentialUnavailableException( "ManagedIdentityCredential authentication unavailable. " + "Connection to IMDS endpoint cannot be established.", null)); } if (responseCode == 410 || responseCode == 429 || responseCode == 404 || (responseCode >= 500 && responseCode <= 599)) { int retryTimeoutInMs = options.getRetryTimeout() .apply(Duration.ofSeconds(RANDOM.nextInt(retry))).getNano() / 1000; retryTimeoutInMs = (responseCode == 410 && retryTimeoutInMs < imdsUpgradeTimeInMs) ? imdsUpgradeTimeInMs : retryTimeoutInMs; retry++; if (retry > options.getMaxRetry()) { break; } else { sleep(retryTimeoutInMs); } } else { throw LOGGER.logExceptionAsError(new RuntimeException( "Couldn't acquire access token from IMDS, verify your objectId, " + "clientId or msiResourceId", exception)); } } finally { if (connection != null) { connection.disconnect(); } } } throw LOGGER.logExceptionAsError(new RuntimeException( String.format("MSI: Failed to acquire tokens after retrying %s times", options.getMaxRetry()))); })); } private Mono<Boolean> checkIMDSAvailable(String endpoint) { StringBuilder payload = new StringBuilder(); try { payload.append("api-version="); payload.append(URLEncoder.encode("2018-02-01", StandardCharsets.UTF_8.name())); } catch (IOException exception) { return Mono.error(exception); } return Mono.fromCallable(() -> { HttpURLConnection connection = null; URL url = new URL(String.format("%s?%s", endpoint, payload)); try { connection = (HttpURLConnection) url.openConnection(); connection.setRequestMethod("GET"); connection.setConnectTimeout(500); connection.connect(); } catch (Exception e) { throw LoggingUtil.logCredentialUnavailableException(LOGGER, options, new CredentialUnavailableException( "ManagedIdentityCredential authentication unavailable. " + "Connection to IMDS endpoint cannot be established, " + e.getMessage() + ".", e)); } finally { if (connection != null) { connection.disconnect(); } } return true; }); } private static void sleep(int millis) { try { Thread.sleep(millis); } catch (InterruptedException ex) { throw new IllegalStateException(ex); } } private static Proxy proxyOptionsToJavaNetProxy(ProxyOptions options) { switch (options.getType()) { case SOCKS4: case SOCKS5: return new Proxy(Type.SOCKS, options.getAddress()); case HTTP: default: return new Proxy(Type.HTTP, options.getAddress()); } } private String getSafeWorkingDirectory() { if (isWindowsPlatform()) { if (CoreUtils.isNullOrEmpty(DEFAULT_WINDOWS_SYSTEM_ROOT)) { return null; } return DEFAULT_WINDOWS_SYSTEM_ROOT + "\\system32"; } else { return DEFAULT_MAC_LINUX_PATH; } } private boolean isWindowsPlatform() { return System.getProperty("os.name").contains("Windows"); } private String redactInfo(String input) { return ACCESS_TOKEN_PATTERN.matcher(input).replaceAll("****"); } void openUrl(String url) throws IOException { Runtime rt = Runtime.getRuntime(); String os = System.getProperty("os.name").toLowerCase(Locale.ROOT); if (os.contains("win")) { rt.exec("rundll32 url.dll,FileProtocolHandler " + url); } else if (os.contains("mac")) { rt.exec("open " + url); } else if (os.contains("nix") || os.contains("nux")) { rt.exec("xdg-open " + url); } else { LOGGER.error("Browser could not be opened - please open {} in a browser on this device.", url); } } private CompletableFuture<IAuthenticationResult> getFailedCompletableFuture(Exception e) { CompletableFuture<IAuthenticationResult> completableFuture = new CompletableFuture<>(); completableFuture.completeExceptionally(e); return completableFuture; } private void initializeHttpPipelineAdapter() { HttpPipeline httpPipeline = options.getHttpPipeline(); if (httpPipeline != null) { httpPipelineAdapter = new HttpPipelineAdapter(httpPipeline, options); } else { HttpClient httpClient = options.getHttpClient(); if (httpClient != null) { httpPipelineAdapter = new HttpPipelineAdapter(setupPipeline(httpClient), options); } else if (options.getProxyOptions() == null) { httpPipelineAdapter = new HttpPipelineAdapter(setupPipeline(HttpClient.createDefault()), options); } } } /** * Get the configured tenant id. * * @return the tenant id. */ public String getTenantId() { return tenantId; } /** * Get the configured client id. * * @return the client id. */ public String getClientId() { return clientId; } /** * Get the configured identity client options. * * @return the client options. */ public IdentityClientOptions getIdentityClientOptions() { return options; } private boolean isADFSTenant() { return this.tenantId.equals(ADFS_TENANT); } private byte[] getCertificateBytes() throws IOException { if (certificatePath != null) { return Files.readAllBytes(Paths.get(certificatePath)); } else if (certificate != null) { ByteArrayOutputStream outputStream = new ByteArrayOutputStream(); byte[] buffer = new byte[1024]; int read = certificate.read(buffer, 0, buffer.length); while (read != -1) { outputStream.write(buffer, 0, read); read = certificate.read(buffer, 0, buffer.length); } return outputStream.toByteArray(); } else { return new byte[0]; } } private InputStream getCertificateInputStream() throws IOException { if (certificatePath != null) { return new BufferedInputStream(new FileInputStream(certificatePath)); } else { return certificate; } } }
Does `headerName` need to be passed as an array? #Resolved
String tryGetRedirectHeader(HttpHeaders headers, String headerName) { String headerValue = headers.getValue(headerName); if (CoreUtils.isNullOrEmpty(headerValue)) { LOGGER.error("Redirect url was null for header name: {}, request redirect was terminated.", new Object[]{headerName}); return null; } else { return headerValue; } }
LOGGER.error("Redirect url was null for header name: {}, request redirect was terminated.", new Object[]{headerName});
String tryGetRedirectHeader(HttpHeaders headers, String headerName) { String headerValue = headers.getValue(headerName); if (CoreUtils.isNullOrEmpty(headerValue)) { LOGGER.error("Redirect url was null for header name: {}, request redirect was terminated.", headerName); return null; } else { return headerValue; } }
class ContainerRegistryRedirectPolicy implements HttpPipelinePolicy { private static final ClientLogger LOGGER = new ClientLogger(com.azure.core.http.policy.DefaultRedirectStrategy.class); private static final int MAX_REDIRECT_ATTEMPTS; private static final String REDIRECT_LOCATION_HEADER_NAME; private static final int PERMANENT_REDIRECT_STATUS_CODE; private static final int TEMPORARY_REDIRECT_STATUS_CODE; private static final Set<HttpMethod> REDIRECT_ALLOWED_METHODS; private static final String AUTHORIZATION; static { REDIRECT_ALLOWED_METHODS = new HashSet<>(Arrays.asList(HttpMethod.GET, HttpMethod.HEAD)); PERMANENT_REDIRECT_STATUS_CODE = 308; TEMPORARY_REDIRECT_STATUS_CODE = 307; REDIRECT_LOCATION_HEADER_NAME = "Location"; MAX_REDIRECT_ATTEMPTS = 3; AUTHORIZATION = "Authorization"; } @Override public Mono<HttpResponse> process(HttpPipelineCallContext context, HttpPipelineNextPolicy next) { return this.attemptRedirect(context, next, context.getHttpRequest(), 1, new HashSet<>()); } /** * Function to process through the HTTP Response received in the pipeline * and redirect sending the request with new redirect url. */ private Mono<HttpResponse> attemptRedirect(HttpPipelineCallContext context, HttpPipelineNextPolicy next, HttpRequest originalHttpRequest, int redirectAttempt, Set<String> attemptedRedirectUrls) { context.setHttpRequest(originalHttpRequest.copy()); return next.clone().process().flatMap((httpResponse) -> { if (this.shouldAttemptRedirect(context, httpResponse, redirectAttempt, attemptedRedirectUrls)) { HttpRequest redirectRequestCopy = this.createRedirectRequest(httpResponse); return httpResponse.getBody().ignoreElements() .then(this.attemptRedirect(context, next, redirectRequestCopy, redirectAttempt + 1, attemptedRedirectUrls)) .flatMap(newResponse -> { String digest = httpResponse.getHeaders().getValue(DOCKER_DIGEST_HEADER_NAME); if (digest != null) { newResponse.getHeaders().add(DOCKER_DIGEST_HEADER_NAME, digest); } return Mono.just(newResponse); }); } else { return Mono.just(httpResponse); } }); } public boolean shouldAttemptRedirect(HttpPipelineCallContext context, HttpResponse httpResponse, int tryCount, Set<String> attemptedRedirectUrls) { if (this.isValidRedirectStatusCode(httpResponse.getStatusCode()) && this.isValidRedirectCount(tryCount) && this.isAllowedRedirectMethod(httpResponse.getRequest().getHttpMethod())) { String redirectUrl = this.tryGetRedirectHeader(httpResponse.getHeaders(), REDIRECT_LOCATION_HEADER_NAME); if (redirectUrl != null && !this.alreadyAttemptedRedirectUrl(redirectUrl, attemptedRedirectUrls)) { LOGGER.verbose("[Redirecting] Try count: {}, Attempted Redirect URLs: {}", new Object[]{tryCount, attemptedRedirectUrls.toString()}); attemptedRedirectUrls.add(redirectUrl); return true; } else { return false; } } else { return false; } } private HttpRequest createRedirectRequest(HttpResponse httpResponse) { String responseLocation = this.tryGetRedirectHeader(httpResponse.getHeaders(), REDIRECT_LOCATION_HEADER_NAME); HttpRequest request = httpResponse.getRequest(); request.setUrl(responseLocation); request.getHeaders().remove(AUTHORIZATION); return httpResponse.getRequest().setUrl(responseLocation); } private boolean alreadyAttemptedRedirectUrl(String redirectUrl, Set<String> attemptedRedirectUrls) { if (attemptedRedirectUrls.contains(redirectUrl)) { LOGGER.error("Request was redirected more than once to: {}", new Object[]{redirectUrl}); return true; } else { return false; } } private boolean isValidRedirectCount(int tryCount) { if (tryCount >= MAX_REDIRECT_ATTEMPTS) { LOGGER.error("Request has been redirected more than {} times.", new Object[]{MAX_REDIRECT_ATTEMPTS}); return false; } else { return true; } } private boolean isAllowedRedirectMethod(HttpMethod httpMethod) { if (REDIRECT_ALLOWED_METHODS.contains(httpMethod)) { return true; } else { LOGGER.error("Request was redirected from an invalid redirect allowed method: {}", new Object[]{httpMethod}); return false; } } private boolean isValidRedirectStatusCode(int statusCode) { return statusCode == PERMANENT_REDIRECT_STATUS_CODE || statusCode == TEMPORARY_REDIRECT_STATUS_CODE; } }
class ContainerRegistryRedirectPolicy implements HttpPipelinePolicy { private static final ClientLogger LOGGER = new ClientLogger(com.azure.core.http.policy.DefaultRedirectStrategy.class); private static final int MAX_REDIRECT_ATTEMPTS; private static final String REDIRECT_LOCATION_HEADER_NAME; private static final int PERMANENT_REDIRECT_STATUS_CODE; private static final int TEMPORARY_REDIRECT_STATUS_CODE; private static final Set<HttpMethod> REDIRECT_ALLOWED_METHODS; private static final String AUTHORIZATION; static { REDIRECT_ALLOWED_METHODS = new HashSet<>(Arrays.asList(HttpMethod.GET, HttpMethod.HEAD)); PERMANENT_REDIRECT_STATUS_CODE = 308; TEMPORARY_REDIRECT_STATUS_CODE = 307; REDIRECT_LOCATION_HEADER_NAME = "Location"; MAX_REDIRECT_ATTEMPTS = 3; AUTHORIZATION = "Authorization"; } @Override public Mono<HttpResponse> process(HttpPipelineCallContext context, HttpPipelineNextPolicy next) { return this.attemptRedirect(context, next, context.getHttpRequest(), 1, new HashSet<>()); } /** * Function to process through the HTTP Response received in the pipeline * and redirect sending the request with new redirect url. */ private Mono<HttpResponse> attemptRedirect(HttpPipelineCallContext context, HttpPipelineNextPolicy next, HttpRequest originalHttpRequest, int redirectAttempt, Set<String> attemptedRedirectUrls) { context.setHttpRequest(originalHttpRequest.copy()); return next.clone().process().flatMap((httpResponse) -> { if (this.shouldAttemptRedirect(context, httpResponse, redirectAttempt, attemptedRedirectUrls)) { HttpRequest redirectRequestCopy = this.createRedirectRequest(httpResponse); return httpResponse.getBody().ignoreElements() .then(this.attemptRedirect(context, next, redirectRequestCopy, redirectAttempt + 1, attemptedRedirectUrls)) .flatMap(newResponse -> { String digest = httpResponse.getHeaders().getValue(DOCKER_DIGEST_HEADER_NAME); if (digest != null) { newResponse.getHeaders().add(DOCKER_DIGEST_HEADER_NAME, digest); } return Mono.just(newResponse); }); } else { return Mono.just(httpResponse); } }); } public boolean shouldAttemptRedirect(HttpPipelineCallContext context, HttpResponse httpResponse, int tryCount, Set<String> attemptedRedirectUrls) { if (this.isValidRedirectStatusCode(httpResponse.getStatusCode()) && this.isValidRedirectCount(tryCount) && this.isAllowedRedirectMethod(httpResponse.getRequest().getHttpMethod())) { String redirectUrl = this.tryGetRedirectHeader(httpResponse.getHeaders(), REDIRECT_LOCATION_HEADER_NAME); if (redirectUrl != null && !this.alreadyAttemptedRedirectUrl(redirectUrl, attemptedRedirectUrls)) { LOGGER.verbose("[Redirecting] Try count: {}, Attempted Redirect URLs: {}", tryCount, String.join(",", attemptedRedirectUrls)); attemptedRedirectUrls.add(redirectUrl); return true; } else { return false; } } else { return false; } } private HttpRequest createRedirectRequest(HttpResponse httpResponse) { String responseLocation = this.tryGetRedirectHeader(httpResponse.getHeaders(), REDIRECT_LOCATION_HEADER_NAME); HttpRequest request = httpResponse.getRequest(); request.setUrl(responseLocation); request.getHeaders().remove(AUTHORIZATION); return httpResponse.getRequest().setUrl(responseLocation); } private boolean alreadyAttemptedRedirectUrl(String redirectUrl, Set<String> attemptedRedirectUrls) { if (attemptedRedirectUrls.contains(redirectUrl)) { LOGGER.error("Request was redirected more than once to: {}", new Object[]{redirectUrl}); return true; } else { return false; } } private boolean isValidRedirectCount(int tryCount) { if (tryCount >= MAX_REDIRECT_ATTEMPTS) { LOGGER.error("Request has been redirected more than {} times.", new Object[]{MAX_REDIRECT_ATTEMPTS}); return false; } else { return true; } } private boolean isAllowedRedirectMethod(HttpMethod httpMethod) { if (REDIRECT_ALLOWED_METHODS.contains(httpMethod)) { return true; } else { LOGGER.error("Request was redirected from an invalid redirect allowed method: {}", new Object[]{httpMethod}); return false; } } private boolean isValidRedirectStatusCode(int statusCode) { return statusCode == PERMANENT_REDIRECT_STATUS_CODE || statusCode == TEMPORARY_REDIRECT_STATUS_CODE; } }
nit: Don't use toString in a log variable #Resolved
public boolean shouldAttemptRedirect(HttpPipelineCallContext context, HttpResponse httpResponse, int tryCount, Set<String> attemptedRedirectUrls) { if (this.isValidRedirectStatusCode(httpResponse.getStatusCode()) && this.isValidRedirectCount(tryCount) && this.isAllowedRedirectMethod(httpResponse.getRequest().getHttpMethod())) { String redirectUrl = this.tryGetRedirectHeader(httpResponse.getHeaders(), REDIRECT_LOCATION_HEADER_NAME); if (redirectUrl != null && !this.alreadyAttemptedRedirectUrl(redirectUrl, attemptedRedirectUrls)) { LOGGER.verbose("[Redirecting] Try count: {}, Attempted Redirect URLs: {}", new Object[]{tryCount, attemptedRedirectUrls.toString()}); attemptedRedirectUrls.add(redirectUrl); return true; } else { return false; } } else { return false; } }
LOGGER.verbose("[Redirecting] Try count: {}, Attempted Redirect URLs: {}", new Object[]{tryCount, attemptedRedirectUrls.toString()});
public boolean shouldAttemptRedirect(HttpPipelineCallContext context, HttpResponse httpResponse, int tryCount, Set<String> attemptedRedirectUrls) { if (this.isValidRedirectStatusCode(httpResponse.getStatusCode()) && this.isValidRedirectCount(tryCount) && this.isAllowedRedirectMethod(httpResponse.getRequest().getHttpMethod())) { String redirectUrl = this.tryGetRedirectHeader(httpResponse.getHeaders(), REDIRECT_LOCATION_HEADER_NAME); if (redirectUrl != null && !this.alreadyAttemptedRedirectUrl(redirectUrl, attemptedRedirectUrls)) { LOGGER.verbose("[Redirecting] Try count: {}, Attempted Redirect URLs: {}", tryCount, String.join(",", attemptedRedirectUrls)); attemptedRedirectUrls.add(redirectUrl); return true; } else { return false; } } else { return false; } }
class ContainerRegistryRedirectPolicy implements HttpPipelinePolicy { private static final ClientLogger LOGGER = new ClientLogger(com.azure.core.http.policy.DefaultRedirectStrategy.class); private static final int MAX_REDIRECT_ATTEMPTS; private static final String REDIRECT_LOCATION_HEADER_NAME; private static final int PERMANENT_REDIRECT_STATUS_CODE; private static final int TEMPORARY_REDIRECT_STATUS_CODE; private static final Set<HttpMethod> REDIRECT_ALLOWED_METHODS; private static final String AUTHORIZATION; static { REDIRECT_ALLOWED_METHODS = new HashSet<>(Arrays.asList(HttpMethod.GET, HttpMethod.HEAD)); PERMANENT_REDIRECT_STATUS_CODE = 308; TEMPORARY_REDIRECT_STATUS_CODE = 307; REDIRECT_LOCATION_HEADER_NAME = "Location"; MAX_REDIRECT_ATTEMPTS = 3; AUTHORIZATION = "Authorization"; } @Override public Mono<HttpResponse> process(HttpPipelineCallContext context, HttpPipelineNextPolicy next) { return this.attemptRedirect(context, next, context.getHttpRequest(), 1, new HashSet<>()); } /** * Function to process through the HTTP Response received in the pipeline * and redirect sending the request with new redirect url. */ private Mono<HttpResponse> attemptRedirect(HttpPipelineCallContext context, HttpPipelineNextPolicy next, HttpRequest originalHttpRequest, int redirectAttempt, Set<String> attemptedRedirectUrls) { context.setHttpRequest(originalHttpRequest.copy()); return next.clone().process().flatMap((httpResponse) -> { if (this.shouldAttemptRedirect(context, httpResponse, redirectAttempt, attemptedRedirectUrls)) { HttpRequest redirectRequestCopy = this.createRedirectRequest(httpResponse); return httpResponse.getBody().ignoreElements() .then(this.attemptRedirect(context, next, redirectRequestCopy, redirectAttempt + 1, attemptedRedirectUrls)) .flatMap(newResponse -> { String digest = httpResponse.getHeaders().getValue(DOCKER_DIGEST_HEADER_NAME); if (digest != null) { newResponse.getHeaders().add(DOCKER_DIGEST_HEADER_NAME, digest); } return Mono.just(newResponse); }); } else { return Mono.just(httpResponse); } }); } private HttpRequest createRedirectRequest(HttpResponse httpResponse) { String responseLocation = this.tryGetRedirectHeader(httpResponse.getHeaders(), REDIRECT_LOCATION_HEADER_NAME); HttpRequest request = httpResponse.getRequest(); request.setUrl(responseLocation); request.getHeaders().remove(AUTHORIZATION); return httpResponse.getRequest().setUrl(responseLocation); } private boolean alreadyAttemptedRedirectUrl(String redirectUrl, Set<String> attemptedRedirectUrls) { if (attemptedRedirectUrls.contains(redirectUrl)) { LOGGER.error("Request was redirected more than once to: {}", new Object[]{redirectUrl}); return true; } else { return false; } } private boolean isValidRedirectCount(int tryCount) { if (tryCount >= MAX_REDIRECT_ATTEMPTS) { LOGGER.error("Request has been redirected more than {} times.", new Object[]{MAX_REDIRECT_ATTEMPTS}); return false; } else { return true; } } private boolean isAllowedRedirectMethod(HttpMethod httpMethod) { if (REDIRECT_ALLOWED_METHODS.contains(httpMethod)) { return true; } else { LOGGER.error("Request was redirected from an invalid redirect allowed method: {}", new Object[]{httpMethod}); return false; } } private boolean isValidRedirectStatusCode(int statusCode) { return statusCode == PERMANENT_REDIRECT_STATUS_CODE || statusCode == TEMPORARY_REDIRECT_STATUS_CODE; } String tryGetRedirectHeader(HttpHeaders headers, String headerName) { String headerValue = headers.getValue(headerName); if (CoreUtils.isNullOrEmpty(headerValue)) { LOGGER.error("Redirect url was null for header name: {}, request redirect was terminated.", new Object[]{headerName}); return null; } else { return headerValue; } } }
class ContainerRegistryRedirectPolicy implements HttpPipelinePolicy { private static final ClientLogger LOGGER = new ClientLogger(com.azure.core.http.policy.DefaultRedirectStrategy.class); private static final int MAX_REDIRECT_ATTEMPTS; private static final String REDIRECT_LOCATION_HEADER_NAME; private static final int PERMANENT_REDIRECT_STATUS_CODE; private static final int TEMPORARY_REDIRECT_STATUS_CODE; private static final Set<HttpMethod> REDIRECT_ALLOWED_METHODS; private static final String AUTHORIZATION; static { REDIRECT_ALLOWED_METHODS = new HashSet<>(Arrays.asList(HttpMethod.GET, HttpMethod.HEAD)); PERMANENT_REDIRECT_STATUS_CODE = 308; TEMPORARY_REDIRECT_STATUS_CODE = 307; REDIRECT_LOCATION_HEADER_NAME = "Location"; MAX_REDIRECT_ATTEMPTS = 3; AUTHORIZATION = "Authorization"; } @Override public Mono<HttpResponse> process(HttpPipelineCallContext context, HttpPipelineNextPolicy next) { return this.attemptRedirect(context, next, context.getHttpRequest(), 1, new HashSet<>()); } /** * Function to process through the HTTP Response received in the pipeline * and redirect sending the request with new redirect url. */ private Mono<HttpResponse> attemptRedirect(HttpPipelineCallContext context, HttpPipelineNextPolicy next, HttpRequest originalHttpRequest, int redirectAttempt, Set<String> attemptedRedirectUrls) { context.setHttpRequest(originalHttpRequest.copy()); return next.clone().process().flatMap((httpResponse) -> { if (this.shouldAttemptRedirect(context, httpResponse, redirectAttempt, attemptedRedirectUrls)) { HttpRequest redirectRequestCopy = this.createRedirectRequest(httpResponse); return httpResponse.getBody().ignoreElements() .then(this.attemptRedirect(context, next, redirectRequestCopy, redirectAttempt + 1, attemptedRedirectUrls)) .flatMap(newResponse -> { String digest = httpResponse.getHeaders().getValue(DOCKER_DIGEST_HEADER_NAME); if (digest != null) { newResponse.getHeaders().add(DOCKER_DIGEST_HEADER_NAME, digest); } return Mono.just(newResponse); }); } else { return Mono.just(httpResponse); } }); } private HttpRequest createRedirectRequest(HttpResponse httpResponse) { String responseLocation = this.tryGetRedirectHeader(httpResponse.getHeaders(), REDIRECT_LOCATION_HEADER_NAME); HttpRequest request = httpResponse.getRequest(); request.setUrl(responseLocation); request.getHeaders().remove(AUTHORIZATION); return httpResponse.getRequest().setUrl(responseLocation); } private boolean alreadyAttemptedRedirectUrl(String redirectUrl, Set<String> attemptedRedirectUrls) { if (attemptedRedirectUrls.contains(redirectUrl)) { LOGGER.error("Request was redirected more than once to: {}", new Object[]{redirectUrl}); return true; } else { return false; } } private boolean isValidRedirectCount(int tryCount) { if (tryCount >= MAX_REDIRECT_ATTEMPTS) { LOGGER.error("Request has been redirected more than {} times.", new Object[]{MAX_REDIRECT_ATTEMPTS}); return false; } else { return true; } } private boolean isAllowedRedirectMethod(HttpMethod httpMethod) { if (REDIRECT_ALLOWED_METHODS.contains(httpMethod)) { return true; } else { LOGGER.error("Request was redirected from an invalid redirect allowed method: {}", new Object[]{httpMethod}); return false; } } private boolean isValidRedirectStatusCode(int statusCode) { return statusCode == PERMANENT_REDIRECT_STATUS_CODE || statusCode == TEMPORARY_REDIRECT_STATUS_CODE; } String tryGetRedirectHeader(HttpHeaders headers, String headerName) { String headerValue = headers.getValue(headerName); if (CoreUtils.isNullOrEmpty(headerValue)) { LOGGER.error("Redirect url was null for header name: {}, request redirect was terminated.", headerName); return null; } else { return headerValue; } } }
```suggestion return this.headers; ```
public HttpHeaders getHeaders() { return this.headers ; }
return this.headers ;
public HttpHeaders getHeaders() { return this.headers; }
class MockHttpResponse extends HttpResponse { private static final SerializerAdapter SERIALIZER = new JacksonAdapter(); private final int statusCode; private final HttpHeaders headers; private final byte[] bodyBytes; public MockHttpResponse(HttpRequest request, int statusCode, HttpHeaders headers, byte[] bodyBytes) { super(request); this.statusCode = statusCode; this.headers = headers; this.bodyBytes = bodyBytes; } public MockHttpResponse(HttpRequest request, int statusCode) { this(request, statusCode, new HttpHeaders(), new byte[0]); } public MockHttpResponse(HttpRequest request, int statusCode, HttpHeaders headers) { this(request, statusCode, headers, new byte[0]); } public MockHttpResponse(HttpRequest request, int statusCode, HttpHeaders headers, Object serializable) { this(request, statusCode, headers, serialize(serializable)); } public MockHttpResponse(HttpRequest request, int statusCode, Object serializable) { this(request, statusCode, new HttpHeaders(), serialize(serializable)); } private static byte[] serialize(Object serializable) { try { ByteArrayOutputStream stream = new ByteArrayOutputStream(); SERIALIZER.serialize(serializable, SerializerEncoding.JSON, stream); return stream.toByteArray(); } catch (IOException e) { e.printStackTrace(); } return null; } @Override public int getStatusCode() { return statusCode; } @Override public String getHeaderValue(String name) { return headers.getValue(name); } @Override @Override public Mono<byte[]> getBodyAsByteArray() { if (bodyBytes == null) { return Mono.empty(); } else { return Mono.just(bodyBytes); } } @Override public Flux<ByteBuffer> getBody() { if (bodyBytes == null) { return Flux.empty(); } else { return Flux.just(ByteBuffer.wrap(bodyBytes)); } } @Override public Mono<String> getBodyAsString() { return getBodyAsString(StandardCharsets.UTF_8); } @Override public Mono<String> getBodyAsString(Charset charset) { if (bodyBytes == null) { return Mono.empty(); } else { return Mono.just(new String(bodyBytes, charset)); } } }
class MockHttpResponse extends HttpResponse { private static final SerializerAdapter SERIALIZER = new JacksonAdapter(); private final int statusCode; private final HttpHeaders headers; private final byte[] bodyBytes; public MockHttpResponse(HttpRequest request, int statusCode, HttpHeaders headers, byte[] bodyBytes) { super(request); this.statusCode = statusCode; this.headers = headers; this.bodyBytes = bodyBytes; } public MockHttpResponse(HttpRequest request, int statusCode) { this(request, statusCode, new HttpHeaders(), new byte[0]); } public MockHttpResponse(HttpRequest request, int statusCode, HttpHeaders headers) { this(request, statusCode, headers, new byte[0]); } public MockHttpResponse(HttpRequest request, int statusCode, HttpHeaders headers, Object serializable) { this(request, statusCode, headers, serialize(serializable)); } public MockHttpResponse(HttpRequest request, int statusCode, Object serializable) { this(request, statusCode, new HttpHeaders(), serialize(serializable)); } private static byte[] serialize(Object serializable) { try { ByteArrayOutputStream stream = new ByteArrayOutputStream(); SERIALIZER.serialize(serializable, SerializerEncoding.JSON, stream); return stream.toByteArray(); } catch (IOException e) { e.printStackTrace(); } return null; } @Override public int getStatusCode() { return statusCode; } @Override public String getHeaderValue(String name) { return headers.getValue(name); } @Override @Override public Mono<byte[]> getBodyAsByteArray() { if (bodyBytes == null) { return Mono.empty(); } else { return Mono.just(bodyBytes); } } @Override public Flux<ByteBuffer> getBody() { if (bodyBytes == null) { return Flux.empty(); } else { return Flux.just(ByteBuffer.wrap(bodyBytes)); } } @Override public Mono<String> getBodyAsString() { return getBodyAsString(StandardCharsets.UTF_8); } @Override public Mono<String> getBodyAsString(Charset charset) { if (bodyBytes == null) { return Mono.empty(); } else { return Mono.just(new String(bodyBytes, charset)); } } }
fwiw we should start using `HttpResponse.close` for this.
public Mono<HttpResponse> process(HttpPipelineCallContext context, HttpPipelineNextPolicy next) { if ("http".equals(context.getHttpRequest().getUrl().getProtocol())) { return Mono.error(new RuntimeException("token credentials require a URL using the HTTPS protocol scheme")); } HttpPipelineNextPolicy nextPolicy = next.clone(); return authorizeRequest(context) .then(Mono.defer(() -> next.process())) .flatMap(httpResponse -> { String authHeader = httpResponse.getHeaderValue(WWW_AUTHENTICATE); if (httpResponse.getStatusCode() == 401 && authHeader != null) { return authorizeRequestOnChallenge(context, httpResponse).flatMap(retry -> { if (retry) { return httpResponse.getBody().ignoreElements() .then(nextPolicy.process()); } else { return Mono.just(httpResponse); } }); } return Mono.just(httpResponse); }); }
.then(nextPolicy.process());
public Mono<HttpResponse> process(HttpPipelineCallContext context, HttpPipelineNextPolicy next) { if ("http".equals(context.getHttpRequest().getUrl().getProtocol())) { return Mono.error(new RuntimeException("token credentials require a URL using the HTTPS protocol scheme")); } HttpPipelineNextPolicy nextPolicy = next.clone(); return authorizeRequest(context) .then(Mono.defer(() -> next.process())) .flatMap(httpResponse -> { String authHeader = httpResponse.getHeaderValue(WWW_AUTHENTICATE); if (httpResponse.getStatusCode() == 401 && authHeader != null) { return authorizeRequestOnChallenge(context, httpResponse).flatMap(retry -> { if (retry) { return httpResponse.getBody().ignoreElements() .then(nextPolicy.process()); } else { return Mono.just(httpResponse); } }); } return Mono.just(httpResponse); }); }
class BearerTokenAuthenticationPolicy implements HttpPipelinePolicy { private static final String AUTHORIZATION_HEADER = "Authorization"; private static final String BEARER = "Bearer"; private final String[] scopes; private final AccessTokenCache cache; /** * Creates BearerTokenAuthenticationPolicy. * * @param credential the token credential to authenticate the request * @param scopes the scopes of authentication the credential should get token for */ public BearerTokenAuthenticationPolicy(TokenCredential credential, String... scopes) { Objects.requireNonNull(credential); this.scopes = scopes; this.cache = new AccessTokenCache(credential); } /** * Executed before sending the initial request and authenticates the request. * * @param context The request context. * @return A {@link Mono} containing {@link Void} */ public Mono<Void> authorizeRequest(HttpPipelineCallContext context) { if (this.scopes == null) { return Mono.empty(); } return setAuthorizationHeaderHelper(context, new TokenRequestContext().addScopes(this.scopes), false); } /** * Handles the authentication challenge in the event a 401 response with a WWW-Authenticate authentication * challenge header is received after the initial request and returns appropriate {@link TokenRequestContext} to * be used for re-authentication. * * @param context The request context. * @param response The Http Response containing the authentication challenge header. * @return A {@link Mono} containing {@link TokenRequestContext} */ public Mono<Boolean> authorizeRequestOnChallenge(HttpPipelineCallContext context, HttpResponse response) { return Mono.just(false); } @Override /** * Authorizes the request with the bearer token acquired using the specified {@code tokenRequestContext} * * @param context the HTTP pipeline context. * @param tokenRequestContext the token request context to be used for token acquisition. * @return a {@link Mono} containing {@link Void} */ public Mono<Void> setAuthorizationHeader(HttpPipelineCallContext context, TokenRequestContext tokenRequestContext) { return setAuthorizationHeaderHelper(context, tokenRequestContext, true); } private Mono<Void> setAuthorizationHeaderHelper(HttpPipelineCallContext context, TokenRequestContext tokenRequestContext, boolean checkToForceFetchToken) { return cache.getToken(tokenRequestContext, checkToForceFetchToken) .flatMap(token -> { context.getHttpRequest().getHeaders().set(AUTHORIZATION_HEADER, BEARER + " " + token.getToken()); return Mono.empty(); }); } }
class BearerTokenAuthenticationPolicy implements HttpPipelinePolicy { private static final String AUTHORIZATION_HEADER = "Authorization"; private static final String BEARER = "Bearer"; private final String[] scopes; private final AccessTokenCache cache; /** * Creates BearerTokenAuthenticationPolicy. * * @param credential the token credential to authenticate the request * @param scopes the scopes of authentication the credential should get token for */ public BearerTokenAuthenticationPolicy(TokenCredential credential, String... scopes) { Objects.requireNonNull(credential); this.scopes = scopes; this.cache = new AccessTokenCache(credential); } /** * Executed before sending the initial request and authenticates the request. * * @param context The request context. * @return A {@link Mono} containing {@link Void} */ public Mono<Void> authorizeRequest(HttpPipelineCallContext context) { if (this.scopes == null) { return Mono.empty(); } return setAuthorizationHeaderHelper(context, new TokenRequestContext().addScopes(this.scopes), false); } /** * Handles the authentication challenge in the event a 401 response with a WWW-Authenticate authentication * challenge header is received after the initial request and returns appropriate {@link TokenRequestContext} to * be used for re-authentication. * * @param context The request context. * @param response The Http Response containing the authentication challenge header. * @return A {@link Mono} containing {@link TokenRequestContext} */ public Mono<Boolean> authorizeRequestOnChallenge(HttpPipelineCallContext context, HttpResponse response) { return Mono.just(false); } @Override /** * Authorizes the request with the bearer token acquired using the specified {@code tokenRequestContext} * * @param context the HTTP pipeline context. * @param tokenRequestContext the token request context to be used for token acquisition. * @return a {@link Mono} containing {@link Void} */ public Mono<Void> setAuthorizationHeader(HttpPipelineCallContext context, TokenRequestContext tokenRequestContext) { return setAuthorizationHeaderHelper(context, tokenRequestContext, true); } private Mono<Void> setAuthorizationHeaderHelper(HttpPipelineCallContext context, TokenRequestContext tokenRequestContext, boolean checkToForceFetchToken) { return cache.getToken(tokenRequestContext, checkToForceFetchToken) .flatMap(token -> { context.getHttpRequest().getHeaders().set(AUTHORIZATION_HEADER, BEARER + " " + token.getToken()); return Mono.empty(); }); } }
Assuming every model needs to write this, is there a way to do it before model's fromJson is called in a central place?
public static ResponseInnerError fromJson(JsonReader jsonReader) { JsonToken token = jsonReader.currentToken(); if (token == null) { token = jsonReader.nextToken(); } if (token == JsonToken.NULL) { return null; } else if (token != JsonToken.START_OBJECT) { throw new IllegalStateException("Unexpected token to begin deserialization: " + token); } ResponseInnerError innerError = new ResponseInnerError(); while ((token = jsonReader.nextToken()) != JsonToken.END_OBJECT) { String fieldName = jsonReader.getFieldName(); if ("code".equals(fieldName)) { jsonReader.nextToken(); innerError.setCode(jsonReader.getStringValue()); } else if ("innererror".equals(fieldName)) { token = jsonReader.nextToken(); if (token != JsonToken.NULL) { innerError.setInnerError(ResponseInnerError.fromJson(jsonReader)); } } } return innerError; }
}
public static ResponseInnerError fromJson(JsonReader jsonReader) { JsonToken token = jsonReader.currentToken(); if (token == null) { token = jsonReader.nextToken(); } if (token == JsonToken.NULL) { return null; } else if (token != JsonToken.START_OBJECT) { throw new IllegalStateException("Unexpected token to begin deserialization: " + token); } ResponseInnerError innerError = new ResponseInnerError(); while ((token = jsonReader.nextToken()) != JsonToken.END_OBJECT) { String fieldName = jsonReader.getFieldName(); if ("code".equals(fieldName)) { jsonReader.nextToken(); innerError.setCode(jsonReader.getStringValue()); } else if ("innererror".equals(fieldName)) { token = jsonReader.nextToken(); if (token != JsonToken.NULL) { innerError.setInnerError(ResponseInnerError.fromJson(jsonReader)); } } } return innerError; }
class ResponseInnerError implements JsonCapable<ResponseInnerError> { @JsonProperty(value = "code") private String code; @JsonProperty(value = "innererror") private ResponseInnerError innerError; /** * Returns the error code of the inner error. * * @return the error code of this inner error. */ public String getCode() { return code; } /** * Sets the error code of the inner error. * * @param code the error code of this inner error. * @return the updated {@link ResponseInnerError} instance. */ public ResponseInnerError setCode(String code) { this.code = code; return this; } /** * Returns the nested inner error for this error. * * @return the nested inner error for this error. */ public ResponseInnerError getInnerError() { return innerError; } /** * Sets the nested inner error for this error. * * @param innerError the nested inner error for this error. * @return the updated {@link ResponseInnerError} instance. */ public ResponseInnerError setInnerError(ResponseInnerError innerError) { this.innerError = innerError; return this; } @Override public JsonWriter toJson(JsonWriter jsonWriter) { jsonWriter.writeStartObject(); jsonWriter.writeStringField("code", code) .writeFieldName("innererror"); if (innerError != null) { innerError.toJson(jsonWriter); } else { jsonWriter.writeNull(); } return jsonWriter.writeEndObject() .flush(); } /** * Creates an instance of {@link ResponseInnerError} by reading the {@link JsonReader}. * * @param jsonReader The {@link JsonReader} that will be read. * @return An instance of {@link ResponseInnerError} if the {@link JsonReader} is pointing to * {@link ResponseInnerError} JSON content, or null if it is pointing to {@link JsonToken * @throws IllegalStateException If the {@link JsonReader} wasn't pointing to the correct {@link JsonToken} when * passed. */ }
class ResponseInnerError implements JsonCapable<ResponseInnerError> { @JsonProperty(value = "code") private String code; @JsonProperty(value = "innererror") private ResponseInnerError innerError; /** * Returns the error code of the inner error. * * @return the error code of this inner error. */ public String getCode() { return code; } /** * Sets the error code of the inner error. * * @param code the error code of this inner error. * @return the updated {@link ResponseInnerError} instance. */ public ResponseInnerError setCode(String code) { this.code = code; return this; } /** * Returns the nested inner error for this error. * * @return the nested inner error for this error. */ public ResponseInnerError getInnerError() { return innerError; } /** * Sets the nested inner error for this error. * * @param innerError the nested inner error for this error. * @return the updated {@link ResponseInnerError} instance. */ public ResponseInnerError setInnerError(ResponseInnerError innerError) { this.innerError = innerError; return this; } @Override public JsonWriter toJson(JsonWriter jsonWriter) { jsonWriter.writeStartObject(); jsonWriter.writeStringField("code", code) .writeFieldName("innererror"); if (innerError != null) { innerError.toJson(jsonWriter); } else { jsonWriter.writeNull(); } return jsonWriter.writeEndObject() .flush(); } /** * Creates an instance of {@link ResponseInnerError} by reading the {@link JsonReader}. * * @param jsonReader The {@link JsonReader} that will be read. * @return An instance of {@link ResponseInnerError} if the {@link JsonReader} is pointing to * {@link ResponseInnerError} JSON content, or null if it is pointing to {@link JsonToken * @throws IllegalStateException If the {@link JsonReader} wasn't pointing to the correct {@link JsonToken} when * passed. */ }
should we add some verbose logging?
public static ResponseInnerError fromJson(JsonReader jsonReader) { JsonToken token = jsonReader.currentToken(); if (token == null) { token = jsonReader.nextToken(); } if (token == JsonToken.NULL) { return null; } else if (token != JsonToken.START_OBJECT) { throw new IllegalStateException("Unexpected token to begin deserialization: " + token); } ResponseInnerError innerError = new ResponseInnerError(); while ((token = jsonReader.nextToken()) != JsonToken.END_OBJECT) { String fieldName = jsonReader.getFieldName(); if ("code".equals(fieldName)) { jsonReader.nextToken(); innerError.setCode(jsonReader.getStringValue()); } else if ("innererror".equals(fieldName)) { token = jsonReader.nextToken(); if (token != JsonToken.NULL) { innerError.setInnerError(ResponseInnerError.fromJson(jsonReader)); } } } return innerError; }
public static ResponseInnerError fromJson(JsonReader jsonReader) { JsonToken token = jsonReader.currentToken(); if (token == null) { token = jsonReader.nextToken(); } if (token == JsonToken.NULL) { return null; } else if (token != JsonToken.START_OBJECT) { throw new IllegalStateException("Unexpected token to begin deserialization: " + token); } ResponseInnerError innerError = new ResponseInnerError(); while ((token = jsonReader.nextToken()) != JsonToken.END_OBJECT) { String fieldName = jsonReader.getFieldName(); if ("code".equals(fieldName)) { jsonReader.nextToken(); innerError.setCode(jsonReader.getStringValue()); } else if ("innererror".equals(fieldName)) { token = jsonReader.nextToken(); if (token != JsonToken.NULL) { innerError.setInnerError(ResponseInnerError.fromJson(jsonReader)); } } } return innerError; }
class ResponseInnerError implements JsonCapable<ResponseInnerError> { @JsonProperty(value = "code") private String code; @JsonProperty(value = "innererror") private ResponseInnerError innerError; /** * Returns the error code of the inner error. * * @return the error code of this inner error. */ public String getCode() { return code; } /** * Sets the error code of the inner error. * * @param code the error code of this inner error. * @return the updated {@link ResponseInnerError} instance. */ public ResponseInnerError setCode(String code) { this.code = code; return this; } /** * Returns the nested inner error for this error. * * @return the nested inner error for this error. */ public ResponseInnerError getInnerError() { return innerError; } /** * Sets the nested inner error for this error. * * @param innerError the nested inner error for this error. * @return the updated {@link ResponseInnerError} instance. */ public ResponseInnerError setInnerError(ResponseInnerError innerError) { this.innerError = innerError; return this; } @Override public JsonWriter toJson(JsonWriter jsonWriter) { jsonWriter.writeStartObject(); jsonWriter.writeStringField("code", code) .writeFieldName("innererror"); if (innerError != null) { innerError.toJson(jsonWriter); } else { jsonWriter.writeNull(); } return jsonWriter.writeEndObject() .flush(); } /** * Creates an instance of {@link ResponseInnerError} by reading the {@link JsonReader}. * * @param jsonReader The {@link JsonReader} that will be read. * @return An instance of {@link ResponseInnerError} if the {@link JsonReader} is pointing to * {@link ResponseInnerError} JSON content, or null if it is pointing to {@link JsonToken * @throws IllegalStateException If the {@link JsonReader} wasn't pointing to the correct {@link JsonToken} when * passed. */ }
class ResponseInnerError implements JsonCapable<ResponseInnerError> { @JsonProperty(value = "code") private String code; @JsonProperty(value = "innererror") private ResponseInnerError innerError; /** * Returns the error code of the inner error. * * @return the error code of this inner error. */ public String getCode() { return code; } /** * Sets the error code of the inner error. * * @param code the error code of this inner error. * @return the updated {@link ResponseInnerError} instance. */ public ResponseInnerError setCode(String code) { this.code = code; return this; } /** * Returns the nested inner error for this error. * * @return the nested inner error for this error. */ public ResponseInnerError getInnerError() { return innerError; } /** * Sets the nested inner error for this error. * * @param innerError the nested inner error for this error. * @return the updated {@link ResponseInnerError} instance. */ public ResponseInnerError setInnerError(ResponseInnerError innerError) { this.innerError = innerError; return this; } @Override public JsonWriter toJson(JsonWriter jsonWriter) { jsonWriter.writeStartObject(); jsonWriter.writeStringField("code", code) .writeFieldName("innererror"); if (innerError != null) { innerError.toJson(jsonWriter); } else { jsonWriter.writeNull(); } return jsonWriter.writeEndObject() .flush(); } /** * Creates an instance of {@link ResponseInnerError} by reading the {@link JsonReader}. * * @param jsonReader The {@link JsonReader} that will be read. * @return An instance of {@link ResponseInnerError} if the {@link JsonReader} is pointing to * {@link ResponseInnerError} JSON content, or null if it is pointing to {@link JsonToken * @throws IllegalStateException If the {@link JsonReader} wasn't pointing to the correct {@link JsonToken} when * passed. */ }
```suggestion * @param customerProvidedKey the {@link CustomerProvidedKey} for the path, ```
public DataLakePathAsyncClient getCustomerProvidedKeyAsyncClient(CustomerProvidedKey customerProvidedKey) { CpkInfo finalCustomerProvidedKey = null; if (customerProvidedKey != null) { finalCustomerProvidedKey = new CpkInfo() .setEncryptionKey(customerProvidedKey.getKey()) .setEncryptionKeySha256(customerProvidedKey.getKeySha256()) .setEncryptionAlgorithm(customerProvidedKey.getEncryptionAlgorithm()); } return new DataLakePathAsyncClient(getHttpPipeline(), getAccountUrl(), getServiceVersion(), getAccountName(), getFileSystemName(), getObjectPath(), this.pathResourceType, this.blockBlobAsyncClient, finalCustomerProvidedKey); }
.setEncryptionKey(customerProvidedKey.getKey())
public DataLakePathAsyncClient getCustomerProvidedKeyAsyncClient(CustomerProvidedKey customerProvidedKey) { CpkInfo finalCustomerProvidedKey = null; if (customerProvidedKey != null) { finalCustomerProvidedKey = new CpkInfo() .setEncryptionKey(customerProvidedKey.getKey()) .setEncryptionKeySha256(customerProvidedKey.getKeySha256()) .setEncryptionAlgorithm(customerProvidedKey.getEncryptionAlgorithm()); } return new DataLakePathAsyncClient(getHttpPipeline(), getAccountUrl(), getServiceVersion(), getAccountName(), getFileSystemName(), getObjectPath(), this.pathResourceType, this.blockBlobAsyncClient, getSasToken(), finalCustomerProvidedKey); }
class DataLakePathAsyncClient { private static final ClientLogger LOGGER = new ClientLogger(DataLakePathAsyncClient.class); final AzureDataLakeStorageRestAPIImpl dataLakeStorage; final AzureDataLakeStorageRestAPIImpl fileSystemDataLakeStorage; /** * This {@link AzureDataLakeStorageRestAPIImpl} is pointing to blob endpoint instead of dfs * in order to expose APIs that are on blob endpoint but are only functional for HNS enabled accounts. */ final AzureDataLakeStorageRestAPIImpl blobDataLakeStorage; private final String accountName; private final String fileSystemName; final String pathName; private final DataLakeServiceVersion serviceVersion; private final CpkInfo customerProvidedKey; final PathResourceType pathResourceType; final BlockBlobAsyncClient blockBlobAsyncClient; /** * Package-private constructor for use by {@link DataLakePathClientBuilder}. * * @param pipeline The pipeline used to send and receive service requests. * @param url The endpoint where to send service requests. * @param serviceVersion The version of the service to receive requests. * @param accountName The storage account name. * @param fileSystemName The file system name. * @param pathName The path name. * @param blockBlobAsyncClient The underlying {@link BlobContainerAsyncClient} */ DataLakePathAsyncClient(HttpPipeline pipeline, String url, DataLakeServiceVersion serviceVersion, String accountName, String fileSystemName, String pathName, PathResourceType pathResourceType, BlockBlobAsyncClient blockBlobAsyncClient, CpkInfo customerProvidedKey) { this.accountName = accountName; this.fileSystemName = fileSystemName; this.pathName = Utility.urlDecode(pathName); this.pathResourceType = pathResourceType; this.blockBlobAsyncClient = blockBlobAsyncClient; this.dataLakeStorage = new AzureDataLakeStorageRestAPIImplBuilder() .pipeline(pipeline) .url(url) .fileSystem(fileSystemName) .path(this.pathName) .version(serviceVersion.getVersion()) .buildClient(); this.serviceVersion = serviceVersion; String blobUrl = DataLakeImplUtils.endpointToDesiredEndpoint(url, "blob", "dfs"); this.blobDataLakeStorage = new AzureDataLakeStorageRestAPIImplBuilder() .pipeline(pipeline) .url(blobUrl) .fileSystem(fileSystemName) .path(this.pathName) .version(serviceVersion.getVersion()) .buildClient(); this.fileSystemDataLakeStorage = new AzureDataLakeStorageRestAPIImplBuilder() .pipeline(pipeline) .url(url) .fileSystem(fileSystemName) .version(serviceVersion.getVersion()) .buildClient(); this.customerProvidedKey = customerProvidedKey; } /** * Converts the metadata into a string of format "key1=value1, key2=value2" and Base64 encodes the values. * * @param metadata The metadata. * * @return The metadata represented as a String. */ static String buildMetadataString(Map<String, String> metadata) { if (!CoreUtils.isNullOrEmpty(metadata)) { StringBuilder sb = new StringBuilder(); for (final Map.Entry<String, String> entry : metadata.entrySet()) { if (Objects.isNull(entry.getKey()) || entry.getKey().isEmpty()) { throw new IllegalArgumentException("The key for one of the metadata key-value pairs is null, " + "empty, or whitespace."); } else if (Objects.isNull(entry.getValue()) || entry.getValue().isEmpty()) { throw new IllegalArgumentException("The value for one of the metadata key-value pairs is null, " + "empty, or whitespace."); } /* The service has an internal base64 decode when metadata is copied from ADLS to Storage, so getMetadata will work as normal. Doing this encoding for the customers preserves the existing behavior of metadata. */ sb.append(entry.getKey()).append('=') .append(new String(Base64.getEncoder().encode(entry.getValue().getBytes(StandardCharsets.UTF_8)), StandardCharsets.UTF_8)).append(','); } sb.deleteCharAt(sb.length() - 1); return sb.toString(); } else { return null; } } /** * Gets the URL of the storage account. * * @return the URL. */ String getAccountUrl() { return dataLakeStorage.getUrl(); } /** * Gets the URL of the object represented by this client on the Data Lake service. * * @return the URL. */ String getPathUrl() { return dataLakeStorage.getUrl() + "/" + fileSystemName + "/" + Utility.urlEncode(pathName); } /** * Gets the associated account name. * * @return Account name associated with this storage resource. */ public String getAccountName() { return accountName; } /** * Gets the name of the File System in which this object lives. * * @return The name of the File System. */ public String getFileSystemName() { return fileSystemName; } /** * Gets the full path of this object. * * @return The path of the object. */ String getObjectPath() { return pathName; } /** * Gets the name of this object, not including its full path. * * @return The name of the object. */ String getObjectName() { String[] pathParts = getObjectPath().split("/"); return pathParts[pathParts.length - 1]; } /** * Gets the {@link HttpPipeline} powering this client. * * @return The pipeline. */ public HttpPipeline getHttpPipeline() { return dataLakeStorage.getHttpPipeline(); } /** * Gets the service version the client is using. * * @return the service version the client is using. */ public DataLakeServiceVersion getServiceVersion() { return serviceVersion; } /** * Gets the {@link CpkInfo} used to encrypt this path's content on the server. * * @return the customer provided key used for encryption. */ public CustomerProvidedKey getCustomerProvidedKey() { return new CustomerProvidedKey(customerProvidedKey.getEncryptionKey()); } CpkInfo getCpkInfo() { return this.customerProvidedKey; } /** * Creates a new {@link DataLakePathAsyncClient} with the specified {@code customerProvidedKey}. * * @param customerProvidedKey the {@link CustomerProvidedKey} for the blob, * pass {@code null} to use no customer provided key. * @return a {@link DataLakePathAsyncClient} with the specified {@code customerProvidedKey}. */ /** * Creates a resource. By default, this method will not overwrite an existing path. * * <p><strong>Code Samples</strong></p> * * <!-- src_embed com.azure.storage.file.datalake.DataLakePathAsyncClient.create --> * <pre> * client.create& * System.out.printf& * </pre> * <!-- end com.azure.storage.file.datalake.DataLakePathAsyncClient.create --> * * <p>For more information see the * <a href="https: * Docs</a></p> * * @return A reactive response containing information about the created resource. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<PathInfo> create() { return create(false); } /** * Creates a resource. * * <p><strong>Code Samples</strong></p> * * <!-- src_embed com.azure.storage.file.datalake.DataLakePathAsyncClient.create * <pre> * boolean overwrite = true; * client.create& * System.out.printf& * </pre> * <!-- end com.azure.storage.file.datalake.DataLakePathAsyncClient.create * * <p>For more information see the * <a href="https: * Docs</a></p> * * @param overwrite Whether to overwrite, should data exist on the file. * * @return A reactive response containing information about the created resource. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<PathInfo> create(boolean overwrite) { DataLakeRequestConditions requestConditions = new DataLakeRequestConditions(); if (!overwrite) { requestConditions.setIfNoneMatch(Constants.HeaderConstants.ETAG_WILDCARD); } return createWithResponse(null, null, null, null, requestConditions).flatMap(FluxUtil::toMono); } /** * Creates a resource. * * <p><strong>Code Samples</strong></p> * * <!-- src_embed com.azure.storage.file.datalake.DataLakePathAsyncClient.createWithResponse * <pre> * PathHttpHeaders httpHeaders = new PathHttpHeaders& * .setContentLanguage& * .setContentType& * DataLakeRequestConditions requestConditions = new DataLakeRequestConditions& * .setLeaseId& * String permissions = &quot;permissions&quot;; * String umask = &quot;umask&quot;; * * client.createWithResponse& * requestConditions& * .subscribe& * </pre> * <!-- end com.azure.storage.file.datalake.DataLakePathAsyncClient.createWithResponse * * <p>For more information see the * <a href="https: * Docs</a></p> * * @param permissions POSIX access permissions for the resource owner, the resource owning group, and others. * @param umask Restricts permissions of the resource to be created. * @param headers {@link PathHttpHeaders} * @param metadata Metadata to associate with the resource. If there is leading or trailing whitespace in any * metadata key or value, it must be removed or encoded. * @param requestConditions {@link DataLakeRequestConditions} * @return A {@link Mono} containing a {@link Response} whose {@link Response * PathItem}. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<Response<PathInfo>> createWithResponse(String permissions, String umask, PathHttpHeaders headers, Map<String, String> metadata, DataLakeRequestConditions requestConditions) { try { return withContext(context -> createWithResponse(permissions, umask, pathResourceType, headers, metadata, requestConditions, context)); } catch (RuntimeException ex) { return monoError(LOGGER, ex); } } Mono<Response<PathInfo>> createWithResponse(String permissions, String umask, PathResourceType resourceType, PathHttpHeaders headers, Map<String, String> metadata, DataLakeRequestConditions requestConditions, Context context) { requestConditions = requestConditions == null ? new DataLakeRequestConditions() : requestConditions; LeaseAccessConditions lac = new LeaseAccessConditions().setLeaseId(requestConditions.getLeaseId()); ModifiedAccessConditions mac = new ModifiedAccessConditions() .setIfMatch(requestConditions.getIfMatch()) .setIfNoneMatch(requestConditions.getIfNoneMatch()) .setIfModifiedSince(requestConditions.getIfModifiedSince()) .setIfUnmodifiedSince(requestConditions.getIfUnmodifiedSince()); context = context == null ? Context.NONE : context; return this.dataLakeStorage.getPaths().createWithResponseAsync(null, null, resourceType, null, null, null, null, buildMetadataString(metadata), permissions, umask, headers, lac, mac, null, customerProvidedKey, context.addData(AZ_TRACING_NAMESPACE_KEY, STORAGE_TRACING_NAMESPACE_VALUE)) .map(response -> new SimpleResponse<>(response, new PathInfo(response.getDeserializedHeaders().getETag(), response.getDeserializedHeaders().getLastModified(), response.getDeserializedHeaders().isXMsRequestServerEncrypted() != null, response.getDeserializedHeaders().getXMsEncryptionKeySha256()))); } /** * Package-private delete method for use by {@link DataLakeFileAsyncClient} and {@link DataLakeDirectoryAsyncClient} * * @param recursive Whether to delete all paths beneath the directory. * @param requestConditions {@link DataLakeRequestConditions} * @param context Additional context that is passed through the Http pipeline during the service call. * @return A {@link Mono} containing status code and HTTP headers */ Mono<Response<Void>> deleteWithResponse(Boolean recursive, DataLakeRequestConditions requestConditions, Context context) { requestConditions = requestConditions == null ? new DataLakeRequestConditions() : requestConditions; LeaseAccessConditions lac = new LeaseAccessConditions().setLeaseId(requestConditions.getLeaseId()); ModifiedAccessConditions mac = new ModifiedAccessConditions() .setIfMatch(requestConditions.getIfMatch()) .setIfNoneMatch(requestConditions.getIfNoneMatch()) .setIfModifiedSince(requestConditions.getIfModifiedSince()) .setIfUnmodifiedSince(requestConditions.getIfUnmodifiedSince()); context = context == null ? Context.NONE : context; return this.dataLakeStorage.getPaths().deleteWithResponseAsync(null, null, recursive, null, lac, mac, context.addData(AZ_TRACING_NAMESPACE_KEY, STORAGE_TRACING_NAMESPACE_VALUE)) .map(response -> new SimpleResponse<>(response, null)); } /** * Changes a resource's metadata. The specified metadata in this method will replace existing metadata. If old * values must be preserved, they must be downloaded and included in the call to this method. * * <p><strong>Code Samples</strong></p> * * <!-- src_embed com.azure.storage.file.datalake.DataLakePathAsyncClient.setMetadata * <pre> * client.setMetadata& * .subscribe& * </pre> * <!-- end com.azure.storage.file.datalake.DataLakePathAsyncClient.setMetadata * * <p>For more information, see the * <a href="https: * * @param metadata Metadata to associate with the resource. If there is leading or trailing whitespace in any * metadata key or value, it must be removed or encoded. * @return A reactive response signalling completion. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<Void> setMetadata(Map<String, String> metadata) { return setMetadataWithResponse(metadata, null).flatMap(FluxUtil::toMono); } /** * Changes a resource's metadata. The specified metadata in this method will replace existing metadata. If old * values must be preserved, they must be downloaded and included in the call to this method. * * <p><strong>Code Samples</strong></p> * * <!-- src_embed com.azure.storage.file.datalake.DataLakePathAsyncClient.setMetadata * <pre> * DataLakeRequestConditions requestConditions = new DataLakeRequestConditions& * * client.setMetadataWithResponse& * .subscribe& * response.getStatusCode& * </pre> * <!-- end com.azure.storage.file.datalake.DataLakePathAsyncClient.setMetadata * * <p>For more information, see the * <a href="https: * * @param metadata Metadata to associate with the resource. If there is leading or trailing whitespace in any * metadata key or value, it must be removed or encoded. * @param requestConditions {@link DataLakeRequestConditions} * @return A reactive response signalling completion. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<Response<Void>> setMetadataWithResponse(Map<String, String> metadata, DataLakeRequestConditions requestConditions) { return this.blockBlobAsyncClient.setMetadataWithResponse(metadata, Transforms.toBlobRequestConditions(requestConditions)) .onErrorMap(DataLakeImplUtils::transformBlobStorageException); } /** * Changes a resource's HTTP header properties. If only one HTTP header is updated, the others will all be erased. * In order to preserve existing values, they must be passed alongside the header being changed. * * <p><strong>Code Samples</strong></p> * * <!-- src_embed com.azure.storage.file.datalake.DataLakePathAsyncClient.setHttpHeaders * <pre> * client.setHttpHeaders& * .setContentLanguage& * .setContentType& * </pre> * <!-- end com.azure.storage.file.datalake.DataLakePathAsyncClient.setHttpHeaders * * <p>For more information, see the * <a href="https: * * @param headers {@link PathHttpHeaders} * @return A reactive response signalling completion. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<Void> setHttpHeaders(PathHttpHeaders headers) { return setHttpHeadersWithResponse(headers, null).flatMap(FluxUtil::toMono); } /** * Changes a resource's HTTP header properties. If only one HTTP header is updated, the others will all be erased. * In order to preserve existing values, they must be passed alongside the header being changed. * * <p><strong>Code Samples</strong></p> * * <!-- src_embed com.azure.storage.file.datalake.DataLakePathAsyncClient.setHttpHeadersWithResponse * <pre> * DataLakeRequestConditions requestConditions = new DataLakeRequestConditions& * * client.setHttpHeadersWithResponse& * .setContentLanguage& * .setContentType& * System.out.printf& * </pre> * <!-- end com.azure.storage.file.datalake.DataLakePathAsyncClient.setHttpHeadersWithResponse * * <p>For more information, see the * <a href="https: * * @param headers {@link PathHttpHeaders} * @param requestConditions {@link DataLakeRequestConditions} * @return A reactive response signalling completion. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<Response<Void>> setHttpHeadersWithResponse(PathHttpHeaders headers, DataLakeRequestConditions requestConditions) { return this.blockBlobAsyncClient.setHttpHeadersWithResponse(Transforms.toBlobHttpHeaders(headers), Transforms.toBlobRequestConditions(requestConditions)) .onErrorMap(DataLakeImplUtils::transformBlobStorageException); } /** * Returns the resource's metadata and properties. * * <p><strong>Code Samples</strong></p> * * <!-- src_embed com.azure.storage.file.datalake.DataLakePathAsyncClient.getProperties --> * <pre> * client.getProperties& * System.out.printf& * </pre> * <!-- end com.azure.storage.file.datalake.DataLakePathAsyncClient.getProperties --> * * <p>For more information, see the * <a href="https: * * @return A reactive response containing the resource's properties and metadata. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<PathProperties> getProperties() { return getPropertiesWithResponse(null).flatMap(FluxUtil::toMono); } /** * Returns the resource's metadata and properties. * * <p><strong>Code Samples</strong></p> * * <!-- src_embed com.azure.storage.file.datalake.DataLakePathAsyncClient.getPropertiesWithResponse * <pre> * DataLakeRequestConditions requestConditions = new DataLakeRequestConditions& * * client.getPropertiesWithResponse& * response -&gt; System.out.printf& * response.getValue& * </pre> * <!-- end com.azure.storage.file.datalake.DataLakePathAsyncClient.getPropertiesWithResponse * * <p>For more information, see the * <a href="https: * * @param requestConditions {@link DataLakeRequestConditions} * @return A reactive response containing the resource's properties and metadata. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<Response<PathProperties>> getPropertiesWithResponse(DataLakeRequestConditions requestConditions) { return blockBlobAsyncClient.getPropertiesWithResponse(Transforms.toBlobRequestConditions(requestConditions)) .onErrorMap(DataLakeImplUtils::transformBlobStorageException) .map(response -> new SimpleResponse<>(response, Transforms.toPathProperties(response.getValue()))); } /** * Determines if the path this client represents exists in the cloud. * <p>Note that this method does not guarantee that the path type (file/directory) matches expectations.</p> * <p>For example, a DataLakeFileClient representing a path to a datalake directory will return true, and vice * versa.</p> * * <p><strong>Code Samples</strong></p> * * <!-- src_embed com.azure.storage.file.datalake.DataLakePathAsyncClient.exists --> * <pre> * client.exists& * </pre> * <!-- end com.azure.storage.file.datalake.DataLakePathAsyncClient.exists --> * * @return true if the path exists, false if it doesn't */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<Boolean> exists() { return existsWithResponse().flatMap(FluxUtil::toMono); } /** * Determines if the path this client represents exists in the cloud. * <p>Note that this method does not guarantee that the path type (file/directory) matches expectations.</p> * <p>For example, a DataLakeFileClient representing a path to a datalake directory will return true, and vice * versa.</p> * * <p><strong>Code Samples</strong></p> * * <!-- src_embed com.azure.storage.file.datalake.DataLakePathAsyncClient.existsWithResponse --> * <pre> * client.existsWithResponse& * </pre> * <!-- end com.azure.storage.file.datalake.DataLakePathAsyncClient.existsWithResponse --> * * @return true if the path exists, false if it doesn't */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<Response<Boolean>> existsWithResponse() { return blockBlobAsyncClient.existsWithResponse().onErrorMap(DataLakeImplUtils::transformBlobStorageException); } /** * Changes the access control list, group and/or owner for a resource. * * <p><strong>Code Samples</strong></p> * * <!-- src_embed com.azure.storage.file.datalake.DataLakePathAsyncClient.setAccessControlList * <pre> * PathAccessControlEntry pathAccessControlEntry = new PathAccessControlEntry& * .setEntityId& * .setPermissions& * List&lt;PathAccessControlEntry&gt; pathAccessControlEntries = new ArrayList&lt;&gt;& * pathAccessControlEntries.add& * String group = &quot;group&quot;; * String owner = &quot;owner&quot;; * * client.setAccessControlList& * response -&gt; System.out.printf& * </pre> * <!-- end com.azure.storage.file.datalake.DataLakePathAsyncClient.setAccessControlList * * <p>For more information, see the * <a href="https: * * @param accessControlList A list of {@link PathAccessControlEntry} objects. * @param group The group of the resource. * @param owner The owner of the resource. * @return A reactive response containing the resource info. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<PathInfo> setAccessControlList(List<PathAccessControlEntry> accessControlList, String group, String owner) { return setAccessControlListWithResponse(accessControlList, group, owner, null).flatMap(FluxUtil::toMono); } /** * Changes the access control list, group and/or owner for a resource. * * <p><strong>Code Samples</strong></p> * * <!-- src_embed com.azure.storage.file.datalake.DataLakePathAsyncClient.setAccessControlListWithResponse * <pre> * DataLakeRequestConditions requestConditions = new DataLakeRequestConditions& * PathAccessControlEntry pathAccessControlEntry = new PathAccessControlEntry& * .setEntityId& * .setPermissions& * List&lt;PathAccessControlEntry&gt; pathAccessControlEntries = new ArrayList&lt;&gt;& * pathAccessControlEntries.add& * String group = &quot;group&quot;; * String owner = &quot;owner&quot;; * * client.setAccessControlListWithResponse& * response -&gt; System.out.printf& * </pre> * <!-- end com.azure.storage.file.datalake.DataLakePathAsyncClient.setAccessControlListWithResponse * * <p>For more information, see the * <a href="https: * * @param accessControlList A list of {@link PathAccessControlEntry} objects. * @param group The group of the resource. * @param owner The owner of the resource. * @param requestConditions {@link DataLakeRequestConditions} * @return A reactive response containing the resource info. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<Response<PathInfo>> setAccessControlListWithResponse(List<PathAccessControlEntry> accessControlList, String group, String owner, DataLakeRequestConditions requestConditions) { try { return withContext(context -> setAccessControlWithResponse(accessControlList, null, group, owner, requestConditions, context)); } catch (RuntimeException ex) { return monoError(LOGGER, ex); } } /** * Changes the permissions, group and/or owner for a resource. * * <p><strong>Code Samples</strong></p> * * <!-- src_embed com.azure.storage.file.datalake.DataLakePathAsyncClient.setPermissions * <pre> * PathPermissions permissions = new PathPermissions& * .setGroup& * .setOwner& * .setOther& * String group = &quot;group&quot;; * String owner = &quot;owner&quot;; * * client.setPermissions& * response -&gt; System.out.printf& * </pre> * <!-- end com.azure.storage.file.datalake.DataLakePathAsyncClient.setPermissions * * <p>For more information, see the * <a href="https: * * @param permissions {@link PathPermissions} * @param group The group of the resource. * @param owner The owner of the resource. * @return A reactive response containing the resource info. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<PathInfo> setPermissions(PathPermissions permissions, String group, String owner) { return setPermissionsWithResponse(permissions, group, owner, null).flatMap(FluxUtil::toMono); } /** * Changes the permissions, group and/or owner for a resource. * * <p><strong>Code Samples</strong></p> * * <!-- src_embed com.azure.storage.file.datalake.DataLakePathAsyncClient.setPermissionsWithResponse * <pre> * DataLakeRequestConditions requestConditions = new DataLakeRequestConditions& * PathPermissions permissions = new PathPermissions& * .setGroup& * .setOwner& * .setOther& * String group = &quot;group&quot;; * String owner = &quot;owner&quot;; * * client.setPermissionsWithResponse& * response -&gt; System.out.printf& * </pre> * <!-- end com.azure.storage.file.datalake.DataLakePathAsyncClient.setPermissionsWithResponse * * <p>For more information, see the * <a href="https: * * @param permissions {@link PathPermissions} * @param group The group of the resource. * @param owner The owner of the resource. * @param requestConditions {@link DataLakeRequestConditions} * @return A reactive response containing the resource info. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<Response<PathInfo>> setPermissionsWithResponse(PathPermissions permissions, String group, String owner, DataLakeRequestConditions requestConditions) { try { return withContext(context -> setAccessControlWithResponse(null, permissions, group, owner, requestConditions, context)); } catch (RuntimeException ex) { return monoError(LOGGER, ex); } } Mono<Response<PathInfo>> setAccessControlWithResponse(List<PathAccessControlEntry> accessControlList, PathPermissions permissions, String group, String owner, DataLakeRequestConditions requestConditions, Context context) { requestConditions = requestConditions == null ? new DataLakeRequestConditions() : requestConditions; LeaseAccessConditions lac = new LeaseAccessConditions().setLeaseId(requestConditions.getLeaseId()); ModifiedAccessConditions mac = new ModifiedAccessConditions() .setIfMatch(requestConditions.getIfMatch()) .setIfNoneMatch(requestConditions.getIfNoneMatch()) .setIfModifiedSince(requestConditions.getIfModifiedSince()) .setIfUnmodifiedSince(requestConditions.getIfUnmodifiedSince()); String permissionsString = permissions == null ? null : permissions.toString(); String accessControlListString = accessControlList == null ? null : PathAccessControlEntry.serializeList(accessControlList); context = context == null ? Context.NONE : context; return this.dataLakeStorage.getPaths().setAccessControlWithResponseAsync(null, owner, group, permissionsString, accessControlListString, null, lac, mac, context.addData(AZ_TRACING_NAMESPACE_KEY, STORAGE_TRACING_NAMESPACE_VALUE)) .map(response -> new SimpleResponse<>(response, new PathInfo(response.getDeserializedHeaders().getETag(), response.getDeserializedHeaders().getLastModified()))); } /** * Recursively sets the access control on a path and all subpaths. * * <p><strong>Code Samples</strong></p> * * <!-- src_embed com.azure.storage.file.datalake.DataLakePathAsyncClient.setAccessControlRecursive * <pre> * PathAccessControlEntry pathAccessControlEntry = new PathAccessControlEntry& * .setEntityId& * .setPermissions& * List&lt;PathAccessControlEntry&gt; pathAccessControlEntries = new ArrayList&lt;&gt;& * pathAccessControlEntries.add& * * client.setAccessControlRecursive& * response -&gt; System.out.printf& * response.getCounters& * </pre> * <!-- end com.azure.storage.file.datalake.DataLakePathAsyncClient.setAccessControlRecursive * * <p>For more information, see the * <a href="https: * * @param accessControlList The POSIX access control list for the file or directory. * @return A reactive response containing the result of the operation. * * @throws DataLakeAclChangeFailedException if a request to storage throws a * {@link DataLakeStorageException} or a {@link Exception} to wrap the exception with the continuation token. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<AccessControlChangeResult> setAccessControlRecursive(List<PathAccessControlEntry> accessControlList) { return setAccessControlRecursiveWithResponse(new PathSetAccessControlRecursiveOptions(accessControlList)) .flatMap(FluxUtil::toMono); } /** * Recursively sets the access control on a path and all subpaths. * * <p><strong>Code Samples</strong></p> * * <!-- src_embed com.azure.storage.file.datalake.DataLakePathAsyncClient.setAccessControlRecursiveWithResponse * <pre> * PathAccessControlEntry pathAccessControlEntry = new PathAccessControlEntry& * .setEntityId& * .setPermissions& * List&lt;PathAccessControlEntry&gt; pathAccessControlEntries = new ArrayList&lt;&gt;& * pathAccessControlEntries.add& * * Integer batchSize = 2; * Integer maxBatches = 10; * boolean continueOnFailure = false; * String continuationToken = null; * Consumer&lt;Response&lt;AccessControlChanges&gt;&gt; progressHandler = * response -&gt; System.out.println& * * PathSetAccessControlRecursiveOptions options = * new PathSetAccessControlRecursiveOptions& * .setBatchSize& * .setMaxBatches& * .setContinueOnFailure& * .setContinuationToken& * .setProgressHandler& * * client.setAccessControlRecursive& * response -&gt; System.out.printf& * response.getCounters& * </pre> * <!-- end com.azure.storage.file.datalake.DataLakePathAsyncClient.setAccessControlRecursiveWithResponse * * <p>For more information, see the * <a href="https: * * @param options {@link PathSetAccessControlRecursiveOptions} * @return A reactive response containing the result of the operation. * * @throws DataLakeAclChangeFailedException if a request to storage throws a * {@link DataLakeStorageException} or a {@link Exception} to wrap the exception with the continuation token. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<Response<AccessControlChangeResult>> setAccessControlRecursiveWithResponse( PathSetAccessControlRecursiveOptions options) { try { StorageImplUtils.assertNotNull("options", options); return withContext(context -> setAccessControlRecursiveWithResponse( PathAccessControlEntry.serializeList(options.getAccessControlList()), options.getProgressHandler(), PathSetAccessControlRecursiveMode.SET, options.getBatchSize(), options.getMaxBatches(), options.isContinueOnFailure(), options.getContinuationToken(), context)); } catch (RuntimeException ex) { return monoError(LOGGER, ex); } } /** * Recursively updates the access control on a path and all subpaths. * * <p><strong>Code Samples</strong></p> * * <!-- src_embed com.azure.storage.file.datalake.DataLakePathAsyncClient.updateAccessControlRecursive * <pre> * PathAccessControlEntry pathAccessControlEntry = new PathAccessControlEntry& * .setEntityId& * .setPermissions& * List&lt;PathAccessControlEntry&gt; pathAccessControlEntries = new ArrayList&lt;&gt;& * pathAccessControlEntries.add& * * client.updateAccessControlRecursive& * response -&gt; System.out.printf& * response.getCounters& * </pre> * <!-- end com.azure.storage.file.datalake.DataLakePathAsyncClient.updateAccessControlRecursive * * <p>For more information, see the * <a href="https: * * @param accessControlList The POSIX access control list for the file or directory. * @return A reactive response containing the result of the operation. * * @throws DataLakeAclChangeFailedException if a request to storage throws a * {@link DataLakeStorageException} or a {@link Exception} to wrap the exception with the continuation token. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<AccessControlChangeResult> updateAccessControlRecursive( List<PathAccessControlEntry> accessControlList) { return updateAccessControlRecursiveWithResponse(new PathUpdateAccessControlRecursiveOptions(accessControlList)) .flatMap(FluxUtil::toMono); } /** * Recursively updates the access control on a path and all subpaths. * * <p><strong>Code Samples</strong></p> * * <!-- src_embed com.azure.storage.file.datalake.DataLakePathAsyncClient.updateAccessControlRecursiveWithResponse * <pre> * PathAccessControlEntry pathAccessControlEntry = new PathAccessControlEntry& * .setEntityId& * .setPermissions& * List&lt;PathAccessControlEntry&gt; pathAccessControlEntries = new ArrayList&lt;&gt;& * pathAccessControlEntries.add& * * Integer batchSize = 2; * Integer maxBatches = 10; * boolean continueOnFailure = false; * String continuationToken = null; * Consumer&lt;Response&lt;AccessControlChanges&gt;&gt; progressHandler = * response -&gt; System.out.println& * * PathUpdateAccessControlRecursiveOptions options = * new PathUpdateAccessControlRecursiveOptions& * .setBatchSize& * .setMaxBatches& * .setContinueOnFailure& * .setContinuationToken& * .setProgressHandler& * * client.updateAccessControlRecursive& * response -&gt; System.out.printf& * response.getCounters& * </pre> * <!-- end com.azure.storage.file.datalake.DataLakePathAsyncClient.updateAccessControlRecursiveWithResponse * * <p>For more information, see the * <a href="https: * * @param options {@link PathUpdateAccessControlRecursiveOptions} * @return A reactive response containing the result of the operation. * * @throws DataLakeAclChangeFailedException if a request to storage throws a * {@link DataLakeStorageException} or a {@link Exception} to wrap the exception with the continuation token. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<Response<AccessControlChangeResult>> updateAccessControlRecursiveWithResponse( PathUpdateAccessControlRecursiveOptions options) { try { StorageImplUtils.assertNotNull("options", options); return withContext(context -> setAccessControlRecursiveWithResponse( PathAccessControlEntry.serializeList(options.getAccessControlList()), options.getProgressHandler(), PathSetAccessControlRecursiveMode.MODIFY, options.getBatchSize(), options.getMaxBatches(), options.isContinueOnFailure(), options.getContinuationToken(), context)); } catch (RuntimeException ex) { return monoError(LOGGER, ex); } } /** * Recursively removes the access control on a path and all subpaths. * * <p><strong>Code Samples</strong></p> * * <!-- src_embed com.azure.storage.file.datalake.DataLakePathAsyncClient.removeAccessControlRecursive * <pre> * PathRemoveAccessControlEntry pathAccessControlEntry = new PathRemoveAccessControlEntry& * .setEntityId& * List&lt;PathRemoveAccessControlEntry&gt; pathAccessControlEntries = new ArrayList&lt;&gt;& * pathAccessControlEntries.add& * * client.removeAccessControlRecursive& * response -&gt; System.out.printf& * response.getCounters& * </pre> * <!-- end com.azure.storage.file.datalake.DataLakePathAsyncClient.removeAccessControlRecursive * * <p>For more information, see the * <a href="https: * * @param accessControlList The POSIX access control list for the file or directory. * @return A reactive response containing the result of the operation. * * @throws DataLakeAclChangeFailedException if a request to storage throws a * {@link DataLakeStorageException} or a {@link Exception} to wrap the exception with the continuation token. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<AccessControlChangeResult> removeAccessControlRecursive( List<PathRemoveAccessControlEntry> accessControlList) { return removeAccessControlRecursiveWithResponse(new PathRemoveAccessControlRecursiveOptions(accessControlList)) .flatMap(FluxUtil::toMono); } /** * Recursively removes the access control on a path and all subpaths. * * <p><strong>Code Samples</strong></p> * * <!-- src_embed com.azure.storage.file.datalake.DataLakePathAsyncClient.removeAccessControlRecursiveWithResponse * <pre> * PathRemoveAccessControlEntry pathAccessControlEntry = new PathRemoveAccessControlEntry& * .setEntityId& * List&lt;PathRemoveAccessControlEntry&gt; pathAccessControlEntries = new ArrayList&lt;&gt;& * pathAccessControlEntries.add& * * Integer batchSize = 2; * Integer maxBatches = 10; * boolean continueOnFailure = false; * String continuationToken = null; * Consumer&lt;Response&lt;AccessControlChanges&gt;&gt; progressHandler = * response -&gt; System.out.println& * * PathRemoveAccessControlRecursiveOptions options = * new PathRemoveAccessControlRecursiveOptions& * .setBatchSize& * .setMaxBatches& * .setContinueOnFailure& * .setContinuationToken& * .setProgressHandler& * * client.removeAccessControlRecursive& * response -&gt; System.out.printf& * response.getCounters& * </pre> * <!-- end com.azure.storage.file.datalake.DataLakePathAsyncClient.removeAccessControlRecursiveWithResponse * * <p>For more information, see the * <a href="https: * * @param options {@link PathRemoveAccessControlRecursiveOptions} * @return A reactive response containing the result of the operation. * * @throws DataLakeAclChangeFailedException if a request to storage throws a * {@link DataLakeStorageException} or a {@link Exception} to wrap the exception with the continuation token. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<Response<AccessControlChangeResult>> removeAccessControlRecursiveWithResponse( PathRemoveAccessControlRecursiveOptions options) { try { StorageImplUtils.assertNotNull("options", options); return withContext(context -> setAccessControlRecursiveWithResponse( PathRemoveAccessControlEntry.serializeList(options.getAccessControlList()), options.getProgressHandler(), PathSetAccessControlRecursiveMode.REMOVE, options.getBatchSize(), options.getMaxBatches(), options.isContinueOnFailure(), options.getContinuationToken(), context)); } catch (RuntimeException ex) { return monoError(LOGGER, ex); } } Mono<Response<AccessControlChangeResult>> setAccessControlRecursiveWithResponse( String accessControlList, Consumer<Response<AccessControlChanges>> progressHandler, PathSetAccessControlRecursiveMode mode, Integer batchSize, Integer maxBatches, Boolean continueOnFailure, String continuationToken, Context context) { StorageImplUtils.assertNotNull("accessControlList", accessControlList); context = context == null ? Context.NONE : context; Context contextFinal = context.addData(AZ_TRACING_NAMESPACE_KEY, STORAGE_TRACING_NAMESPACE_VALUE); AtomicInteger directoriesSuccessfulCount = new AtomicInteger(0); AtomicInteger filesSuccessfulCount = new AtomicInteger(0); AtomicInteger failureCount = new AtomicInteger(0); AtomicInteger batchesCount = new AtomicInteger(0); return this.dataLakeStorage.getPaths().setAccessControlRecursiveWithResponseAsync(mode, null, continuationToken, continueOnFailure, batchSize, accessControlList, null, contextFinal) .onErrorMap(e -> { if (e instanceof DataLakeStorageException) { return LOGGER.logExceptionAsError(ModelHelper.changeAclRequestFailed((DataLakeStorageException) e, continuationToken)); } else if (e instanceof Exception) { return LOGGER.logExceptionAsError(ModelHelper.changeAclFailed((Exception) e, continuationToken)); } return e; }) .flatMap(response -> setAccessControlRecursiveWithResponseHelper(response, maxBatches, directoriesSuccessfulCount, filesSuccessfulCount, failureCount, batchesCount, progressHandler, accessControlList, mode, batchSize, continueOnFailure, continuationToken, null, contextFinal)); } Mono<Response<AccessControlChangeResult>> setAccessControlRecursiveWithResponseHelper( PathsSetAccessControlRecursiveResponse response, Integer maxBatches, AtomicInteger directoriesSuccessfulCount, AtomicInteger filesSuccessfulCount, AtomicInteger failureCount, AtomicInteger batchesCount, Consumer<Response<AccessControlChanges>> progressHandler, String accessControlStr, PathSetAccessControlRecursiveMode mode, Integer batchSize, Boolean continueOnFailure, String lastToken, List<AccessControlChangeFailure> batchFailures, Context context) { batchesCount.incrementAndGet(); directoriesSuccessfulCount.addAndGet(response.getValue().getDirectoriesSuccessful()); filesSuccessfulCount.addAndGet(response.getValue().getFilesSuccessful()); failureCount.addAndGet(response.getValue().getFailureCount()); if (failureCount.get() > 0 && batchFailures == null) { batchFailures = response.getValue().getFailedEntries() .stream() .map(aclFailedEntry -> new AccessControlChangeFailure() .setDirectory(aclFailedEntry.getType().equals("DIRECTORY")) .setName(aclFailedEntry.getName()) .setErrorMessage(aclFailedEntry.getErrorMessage()) ).collect(Collectors.toList()); } List<AccessControlChangeFailure> finalBatchFailures = batchFailures; /* Determine which token we should report/return/use next. If there was a token present on the response (still processing and either no errors or forceFlag set), use that one. If there were no failures or force flag set and still nothing present, we are at the end, so use that. If there were failures and no force flag set, use the last token (no token is returned in this case). */ String newToken = response.getDeserializedHeaders().getXMsContinuation(); String effectiveNextToken; if (newToken != null && !newToken.isEmpty()) { effectiveNextToken = newToken; } else { if (failureCount.get() == 0 || (continueOnFailure == null || continueOnFailure)) { effectiveNextToken = newToken; } else { effectiveNextToken = lastToken; } } if (progressHandler != null) { AccessControlChanges changes = new AccessControlChanges(); changes.setContinuationToken(effectiveNextToken); changes.setBatchFailures( response.getValue().getFailedEntries() .stream() .map(aclFailedEntry -> new AccessControlChangeFailure() .setDirectory(aclFailedEntry.getType().equals("DIRECTORY")) .setName(aclFailedEntry.getName()) .setErrorMessage(aclFailedEntry.getErrorMessage()) ).collect(Collectors.toList()) ); changes.setBatchCounters(new AccessControlChangeCounters() .setChangedDirectoriesCount(response.getValue().getDirectoriesSuccessful()) .setChangedFilesCount(response.getValue().getFilesSuccessful()) .setFailedChangesCount(response.getValue().getFailureCount())); changes.setAggregateCounters(new AccessControlChangeCounters() .setChangedDirectoriesCount(directoriesSuccessfulCount.get()) .setChangedFilesCount(filesSuccessfulCount.get()) .setFailedChangesCount(failureCount.get())); progressHandler.accept( new ResponseBase<>(response.getRequest(), response.getStatusCode(), response.getHeaders(), changes, response.getDeserializedHeaders())); } /* Determine if we are finished either because there is no new continuation (failure or finished) token or we have hit maxBatches. */ if ((newToken == null || newToken.isEmpty()) || (maxBatches != null && batchesCount.get() >= maxBatches)) { AccessControlChangeResult result = new AccessControlChangeResult() .setBatchFailures(batchFailures) .setContinuationToken(effectiveNextToken) .setCounters(new AccessControlChangeCounters() .setChangedDirectoriesCount(directoriesSuccessfulCount.get()) .setChangedFilesCount(filesSuccessfulCount.get()) .setFailedChangesCount(failureCount.get())); return Mono.just(new ResponseBase<>(response.getRequest(), response.getStatusCode(), response.getHeaders(), result, response.getDeserializedHeaders() )); } return this.dataLakeStorage.getPaths().setAccessControlRecursiveWithResponseAsync(mode, null, effectiveNextToken, continueOnFailure, batchSize, accessControlStr, null, context) .onErrorMap(e -> { if (e instanceof DataLakeStorageException) { return LOGGER.logExceptionAsError(ModelHelper.changeAclRequestFailed((DataLakeStorageException) e, effectiveNextToken)); } else if (e instanceof Exception) { return LOGGER.logExceptionAsError(ModelHelper.changeAclFailed((Exception) e, effectiveNextToken)); } return e; }) .flatMap(response2 -> setAccessControlRecursiveWithResponseHelper(response2, maxBatches, directoriesSuccessfulCount, filesSuccessfulCount, failureCount, batchesCount, progressHandler, accessControlStr, mode, batchSize, continueOnFailure, effectiveNextToken, finalBatchFailures, context)); } /** * Returns the access control for a resource. * * <p><strong>Code Samples</strong></p> * * <!-- src_embed com.azure.storage.file.datalake.DataLakePathAsyncClient.getAccessControl --> * <pre> * client.getAccessControl& * response -&gt; System.out.printf& * PathAccessControlEntry.serializeList& * response.getOwner& * </pre> * <!-- end com.azure.storage.file.datalake.DataLakePathAsyncClient.getAccessControl --> * * <p>For more information, see the * <a href="https: * * @return A reactive response containing the resource access control. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<PathAccessControl> getAccessControl() { return getAccessControlWithResponse(false, null).flatMap(FluxUtil::toMono); } /** * Returns the access control for a resource. * * <p><strong>Code Samples</strong></p> * * <!-- src_embed com.azure.storage.file.datalake.DataLakePathAsyncClient.getAccessControlWithResponse * <pre> * DataLakeRequestConditions requestConditions = new DataLakeRequestConditions& * boolean userPrincipalNameReturned = false; * * client.getAccessControlWithResponse& * response -&gt; System.out.printf& * PathAccessControlEntry.serializeList& * response.getValue& * </pre> * <!-- end com.azure.storage.file.datalake.DataLakePathAsyncClient.getAccessControlWithResponse * * <p>For more information, see the * <a href="https: * * @param userPrincipalNameReturned When true, user identity values returned as User Principal Names. When false, * user identity values returned as Azure Active Directory Object IDs. Default value is false. * @param requestConditions {@link DataLakeRequestConditions} * @return A reactive response containing the resource access control. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<Response<PathAccessControl>> getAccessControlWithResponse(boolean userPrincipalNameReturned, DataLakeRequestConditions requestConditions) { try { return withContext(context -> getAccessControlWithResponse(userPrincipalNameReturned, requestConditions, context)); } catch (RuntimeException ex) { return monoError(LOGGER, ex); } } Mono<Response<PathAccessControl>> getAccessControlWithResponse(boolean userPrincipalNameReturned, DataLakeRequestConditions requestConditions, Context context) { requestConditions = requestConditions == null ? new DataLakeRequestConditions() : requestConditions; LeaseAccessConditions lac = new LeaseAccessConditions().setLeaseId(requestConditions.getLeaseId()); ModifiedAccessConditions mac = new ModifiedAccessConditions() .setIfMatch(requestConditions.getIfMatch()) .setIfNoneMatch(requestConditions.getIfNoneMatch()) .setIfModifiedSince(requestConditions.getIfModifiedSince()) .setIfUnmodifiedSince(requestConditions.getIfUnmodifiedSince()); context = context == null ? Context.NONE : context; return this.dataLakeStorage.getPaths().getPropertiesWithResponseAsync(null, null, PathGetPropertiesAction.GET_ACCESS_CONTROL, userPrincipalNameReturned, lac, mac, context.addData(AZ_TRACING_NAMESPACE_KEY, STORAGE_TRACING_NAMESPACE_VALUE)) .map(response -> new SimpleResponse<>(response, new PathAccessControl( PathAccessControlEntry.parseList(response.getDeserializedHeaders().getXMsAcl()), PathPermissions.parseSymbolic(response.getDeserializedHeaders().getXMsPermissions()), response.getDeserializedHeaders().getXMsGroup(), response.getDeserializedHeaders().getXMsOwner()))); } /** * Package-private rename method for use by {@link DataLakeFileAsyncClient} and {@link DataLakeDirectoryAsyncClient} * * @param destinationFileSystem The file system of the destination within the account. * {@code null} for the current file system. * @param destinationPath The path of the destination relative to the file system name * @param sourceRequestConditions {@link DataLakeRequestConditions} against the source. * @param destinationRequestConditions {@link DataLakeRequestConditions} against the destination. * @param context Additional context that is passed through the Http pipeline during the service call. * @return A {@link Mono} containing a {@link Response} whose {@link Response * DataLakePathAsyncClient} used to interact with the path created. */ Mono<Response<DataLakePathAsyncClient>> renameWithResponse(String destinationFileSystem, String destinationPath, DataLakeRequestConditions sourceRequestConditions, DataLakeRequestConditions destinationRequestConditions, Context context) { destinationRequestConditions = destinationRequestConditions == null ? new DataLakeRequestConditions() : destinationRequestConditions; sourceRequestConditions = sourceRequestConditions == null ? new DataLakeRequestConditions() : sourceRequestConditions; SourceModifiedAccessConditions sourceConditions = new SourceModifiedAccessConditions() .setSourceIfModifiedSince(sourceRequestConditions.getIfModifiedSince()) .setSourceIfUnmodifiedSince(sourceRequestConditions.getIfUnmodifiedSince()) .setSourceIfMatch(sourceRequestConditions.getIfMatch()) .setSourceIfNoneMatch(sourceRequestConditions.getIfNoneMatch()); LeaseAccessConditions destLac = new LeaseAccessConditions() .setLeaseId(destinationRequestConditions.getLeaseId()); ModifiedAccessConditions destMac = new ModifiedAccessConditions() .setIfMatch(destinationRequestConditions.getIfMatch()) .setIfNoneMatch(destinationRequestConditions.getIfNoneMatch()) .setIfModifiedSince(destinationRequestConditions.getIfModifiedSince()) .setIfUnmodifiedSince(destinationRequestConditions.getIfUnmodifiedSince()); DataLakePathAsyncClient dataLakePathAsyncClient = getPathAsyncClient(destinationFileSystem, destinationPath); String renameSource = "/" + this.fileSystemName + "/" + Utility.urlEncode(pathName); return dataLakePathAsyncClient.dataLakeStorage.getPaths().createWithResponseAsync( null /* request id */, null /* timeout */, null /* pathResourceType */, null /* continuation */, PathRenameMode.LEGACY, renameSource, sourceRequestConditions.getLeaseId(), null /* metadata */, null /* permissions */, null /* umask */, null /* pathHttpHeaders */, destLac, destMac, sourceConditions, customerProvidedKey, context.addData(AZ_TRACING_NAMESPACE_KEY, STORAGE_TRACING_NAMESPACE_VALUE)) .map(response -> new SimpleResponse<>(response, dataLakePathAsyncClient)); } /** * Takes in a destination and creates a DataLakePathAsyncClient with a new path * @param destinationFileSystem The destination file system * @param destinationPath The destination path * @return A DataLakePathAsyncClient */ DataLakePathAsyncClient getPathAsyncClient(String destinationFileSystem, String destinationPath) { if (destinationFileSystem == null) { destinationFileSystem = getFileSystemName(); } if (CoreUtils.isNullOrEmpty(destinationPath)) { throw LOGGER.logExceptionAsError(new IllegalArgumentException("'destinationPath' can not be set to null")); } return new DataLakePathAsyncClient(getHttpPipeline(), getAccountUrl(), serviceVersion, accountName, destinationFileSystem, destinationPath, pathResourceType, prepareBuilderReplacePath(destinationFileSystem, destinationPath).buildBlockBlobAsyncClient(), customerProvidedKey); } /** * Takes in a destination path and creates a SpecializedBlobClientBuilder with a new path name * @param destinationFileSystem The destination file system * @param destinationPath The destination path * @return An updated SpecializedBlobClientBuilder */ SpecializedBlobClientBuilder prepareBuilderReplacePath(String destinationFileSystem, String destinationPath) { if (destinationFileSystem == null) { destinationFileSystem = getFileSystemName(); } String newBlobEndpoint = BlobUrlParts.parse(DataLakeImplUtils.endpointToDesiredEndpoint(getPathUrl(), "blob", "dfs")).setBlobName(destinationPath).setContainerName(destinationFileSystem).toUrl().toString(); return new SpecializedBlobClientBuilder() .pipeline(getHttpPipeline()) .endpoint(newBlobEndpoint) .serviceVersion(TransformUtils.toBlobServiceVersion(getServiceVersion())); } BlockBlobAsyncClient getBlockBlobAsyncClient() { return this.blockBlobAsyncClient; } /** * Generates a user delegation SAS for the path using the specified {@link DataLakeServiceSasSignatureValues}. * <p>See {@link DataLakeServiceSasSignatureValues} for more information on how to construct a user delegation SAS. * </p> * * <p><strong>Code Samples</strong></p> * * <!-- src_embed com.azure.storage.file.datalake.DataLakePathAsyncClient.generateUserDelegationSas * <pre> * OffsetDateTime myExpiryTime = OffsetDateTime.now& * PathSasPermission myPermission = new PathSasPermission& * * DataLakeServiceSasSignatureValues myValues = new DataLakeServiceSasSignatureValues& * .setStartTime& * * client.generateUserDelegationSas& * </pre> * <!-- end com.azure.storage.file.datalake.DataLakePathAsyncClient.generateUserDelegationSas * * @param dataLakeServiceSasSignatureValues {@link DataLakeServiceSasSignatureValues} * @param userDelegationKey A {@link UserDelegationKey} object used to sign the SAS values. * See {@link DataLakeServiceAsyncClient * on how to get a user delegation key. * * @return A {@code String} representing the SAS query parameters. */ public String generateUserDelegationSas(DataLakeServiceSasSignatureValues dataLakeServiceSasSignatureValues, UserDelegationKey userDelegationKey) { return generateUserDelegationSas(dataLakeServiceSasSignatureValues, userDelegationKey, getAccountName(), Context.NONE); } /** * Generates a user delegation SAS for the path using the specified {@link DataLakeServiceSasSignatureValues}. * <p>See {@link DataLakeServiceSasSignatureValues} for more information on how to construct a user delegation SAS. * </p> * * <p><strong>Code Samples</strong></p> * * <!-- src_embed com.azure.storage.file.datalake.DataLakePathAsyncClient.generateUserDelegationSas * <pre> * OffsetDateTime myExpiryTime = OffsetDateTime.now& * PathSasPermission myPermission = new PathSasPermission& * * DataLakeServiceSasSignatureValues myValues = new DataLakeServiceSasSignatureValues& * .setStartTime& * * client.generateUserDelegationSas& * </pre> * <!-- end com.azure.storage.file.datalake.DataLakePathAsyncClient.generateUserDelegationSas * * @param dataLakeServiceSasSignatureValues {@link DataLakeServiceSasSignatureValues} * @param userDelegationKey A {@link UserDelegationKey} object used to sign the SAS values. * See {@link DataLakeServiceAsyncClient * on how to get a user delegation key. * @param accountName The account name. * @param context Additional context that is passed through the code when generating a SAS. * * @return A {@code String} representing the SAS query parameters. */ public String generateUserDelegationSas(DataLakeServiceSasSignatureValues dataLakeServiceSasSignatureValues, UserDelegationKey userDelegationKey, String accountName, Context context) { return new DataLakeSasImplUtil(dataLakeServiceSasSignatureValues, getFileSystemName(), getObjectPath(), PathResourceType.DIRECTORY.equals(this.pathResourceType)) .generateUserDelegationSas(userDelegationKey, accountName, context); } /** * Generates a service SAS for the path using the specified {@link DataLakeServiceSasSignatureValues} * <p>Note : The client must be authenticated via {@link StorageSharedKeyCredential} * <p>See {@link DataLakeServiceSasSignatureValues} for more information on how to construct a service SAS.</p> * * <p><strong>Code Samples</strong></p> * * <!-- src_embed com.azure.storage.file.datalake.DataLakePathAsyncClient.generateSas * <pre> * OffsetDateTime expiryTime = OffsetDateTime.now& * PathSasPermission permission = new PathSasPermission& * * DataLakeServiceSasSignatureValues values = new DataLakeServiceSasSignatureValues& * .setStartTime& * * client.generateSas& * </pre> * <!-- end com.azure.storage.file.datalake.DataLakePathAsyncClient.generateSas * * @param dataLakeServiceSasSignatureValues {@link DataLakeServiceSasSignatureValues} * * @return A {@code String} representing the SAS query parameters. */ public String generateSas(DataLakeServiceSasSignatureValues dataLakeServiceSasSignatureValues) { return generateSas(dataLakeServiceSasSignatureValues, Context.NONE); } /** * Generates a service SAS for the path using the specified {@link DataLakeServiceSasSignatureValues} * <p>Note : The client must be authenticated via {@link StorageSharedKeyCredential} * <p>See {@link DataLakeServiceSasSignatureValues} for more information on how to construct a service SAS.</p> * * <p><strong>Code Samples</strong></p> * * <!-- src_embed com.azure.storage.file.datalake.DataLakePathAsyncClient.generateSas * <pre> * OffsetDateTime expiryTime = OffsetDateTime.now& * PathSasPermission permission = new PathSasPermission& * * DataLakeServiceSasSignatureValues values = new DataLakeServiceSasSignatureValues& * .setStartTime& * * & * client.generateSas& * </pre> * <!-- end com.azure.storage.file.datalake.DataLakePathAsyncClient.generateSas * * @param dataLakeServiceSasSignatureValues {@link DataLakeServiceSasSignatureValues} * @param context Additional context that is passed through the code when generating a SAS. * * @return A {@code String} representing the SAS query parameters. */ public String generateSas(DataLakeServiceSasSignatureValues dataLakeServiceSasSignatureValues, Context context) { return new DataLakeSasImplUtil(dataLakeServiceSasSignatureValues, getFileSystemName(), getObjectPath(), PathResourceType.DIRECTORY.equals(this.pathResourceType)) .generateSas(SasImplUtils.extractSharedKeyCredential(getHttpPipeline()), context); } }
class DataLakePathAsyncClient { private static final ClientLogger LOGGER = new ClientLogger(DataLakePathAsyncClient.class); final AzureDataLakeStorageRestAPIImpl dataLakeStorage; final AzureDataLakeStorageRestAPIImpl fileSystemDataLakeStorage; /** * This {@link AzureDataLakeStorageRestAPIImpl} is pointing to blob endpoint instead of dfs * in order to expose APIs that are on blob endpoint but are only functional for HNS enabled accounts. */ final AzureDataLakeStorageRestAPIImpl blobDataLakeStorage; private final String accountName; private final String fileSystemName; final String pathName; private final DataLakeServiceVersion serviceVersion; private final CpkInfo customerProvidedKey; final PathResourceType pathResourceType; final BlockBlobAsyncClient blockBlobAsyncClient; private final AzureSasCredential sasToken; /** * Package-private constructor for use by {@link DataLakePathClientBuilder}. * * @param pipeline The pipeline used to send and receive service requests. * @param url The endpoint where to send service requests. * @param serviceVersion The version of the service to receive requests. * @param accountName The storage account name. * @param fileSystemName The file system name. * @param pathName The path name. * @param blockBlobAsyncClient The underlying {@link BlobContainerAsyncClient} */ DataLakePathAsyncClient(HttpPipeline pipeline, String url, DataLakeServiceVersion serviceVersion, String accountName, String fileSystemName, String pathName, PathResourceType pathResourceType, BlockBlobAsyncClient blockBlobAsyncClient, AzureSasCredential sasToken, CpkInfo customerProvidedKey) { this.accountName = accountName; this.fileSystemName = fileSystemName; this.pathName = Utility.urlDecode(pathName); this.pathResourceType = pathResourceType; this.blockBlobAsyncClient = blockBlobAsyncClient; this.sasToken = sasToken; this.dataLakeStorage = new AzureDataLakeStorageRestAPIImplBuilder() .pipeline(pipeline) .url(url) .fileSystem(fileSystemName) .path(this.pathName) .version(serviceVersion.getVersion()) .buildClient(); this.serviceVersion = serviceVersion; String blobUrl = DataLakeImplUtils.endpointToDesiredEndpoint(url, "blob", "dfs"); this.blobDataLakeStorage = new AzureDataLakeStorageRestAPIImplBuilder() .pipeline(pipeline) .url(blobUrl) .fileSystem(fileSystemName) .path(this.pathName) .version(serviceVersion.getVersion()) .buildClient(); this.fileSystemDataLakeStorage = new AzureDataLakeStorageRestAPIImplBuilder() .pipeline(pipeline) .url(url) .fileSystem(fileSystemName) .version(serviceVersion.getVersion()) .buildClient(); this.customerProvidedKey = customerProvidedKey; } /** * Converts the metadata into a string of format "key1=value1, key2=value2" and Base64 encodes the values. * * @param metadata The metadata. * * @return The metadata represented as a String. */ static String buildMetadataString(Map<String, String> metadata) { if (!CoreUtils.isNullOrEmpty(metadata)) { StringBuilder sb = new StringBuilder(); for (final Map.Entry<String, String> entry : metadata.entrySet()) { if (Objects.isNull(entry.getKey()) || entry.getKey().isEmpty()) { throw new IllegalArgumentException("The key for one of the metadata key-value pairs is null, " + "empty, or whitespace."); } else if (Objects.isNull(entry.getValue()) || entry.getValue().isEmpty()) { throw new IllegalArgumentException("The value for one of the metadata key-value pairs is null, " + "empty, or whitespace."); } /* The service has an internal base64 decode when metadata is copied from ADLS to Storage, so getMetadata will work as normal. Doing this encoding for the customers preserves the existing behavior of metadata. */ sb.append(entry.getKey()).append('=') .append(new String(Base64.getEncoder().encode(entry.getValue().getBytes(StandardCharsets.UTF_8)), StandardCharsets.UTF_8)).append(','); } sb.deleteCharAt(sb.length() - 1); return sb.toString(); } else { return null; } } /** * Gets the URL of the storage account. * * @return the URL. */ String getAccountUrl() { return dataLakeStorage.getUrl(); } /** * Gets the URL of the object represented by this client on the Data Lake service. * * @return the URL. */ String getPathUrl() { return dataLakeStorage.getUrl() + "/" + fileSystemName + "/" + Utility.urlEncode(pathName); } /** * Gets the associated account name. * * @return Account name associated with this storage resource. */ public String getAccountName() { return accountName; } /** * Gets the name of the File System in which this object lives. * * @return The name of the File System. */ public String getFileSystemName() { return fileSystemName; } /** * Gets the full path of this object. * * @return The path of the object. */ String getObjectPath() { return pathName; } /** * Gets the name of this object, not including its full path. * * @return The name of the object. */ String getObjectName() { String[] pathParts = getObjectPath().split("/"); return pathParts[pathParts.length - 1]; } /** * Gets the {@link HttpPipeline} powering this client. * * @return The pipeline. */ public HttpPipeline getHttpPipeline() { return dataLakeStorage.getHttpPipeline(); } /** * Gets the service version the client is using. * * @return the service version the client is using. */ public DataLakeServiceVersion getServiceVersion() { return serviceVersion; } AzureSasCredential getSasToken() { return this.sasToken; } /** * Gets the {@link CpkInfo} used to encrypt this path's content on the server. * * @return the customer provided key used for encryption. */ public CustomerProvidedKey getCustomerProvidedKey() { return new CustomerProvidedKey(customerProvidedKey.getEncryptionKey()); } CpkInfo getCpkInfo() { return this.customerProvidedKey; } /** * Creates a new {@link DataLakePathAsyncClient} with the specified {@code customerProvidedKey}. * * @param customerProvidedKey the {@link CustomerProvidedKey} for the path, * pass {@code null} to use no customer provided key. * @return a {@link DataLakePathAsyncClient} with the specified {@code customerProvidedKey}. */ /** * Creates a resource. By default, this method will not overwrite an existing path. * * <p><strong>Code Samples</strong></p> * * <!-- src_embed com.azure.storage.file.datalake.DataLakePathAsyncClient.create --> * <pre> * client.create& * System.out.printf& * </pre> * <!-- end com.azure.storage.file.datalake.DataLakePathAsyncClient.create --> * * <p>For more information see the * <a href="https: * Docs</a></p> * * @return A reactive response containing information about the created resource. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<PathInfo> create() { return create(false); } /** * Creates a resource. * * <p><strong>Code Samples</strong></p> * * <!-- src_embed com.azure.storage.file.datalake.DataLakePathAsyncClient.create * <pre> * boolean overwrite = true; * client.create& * System.out.printf& * </pre> * <!-- end com.azure.storage.file.datalake.DataLakePathAsyncClient.create * * <p>For more information see the * <a href="https: * Docs</a></p> * * @param overwrite Whether to overwrite, should data exist on the file. * * @return A reactive response containing information about the created resource. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<PathInfo> create(boolean overwrite) { DataLakeRequestConditions requestConditions = new DataLakeRequestConditions(); if (!overwrite) { requestConditions.setIfNoneMatch(Constants.HeaderConstants.ETAG_WILDCARD); } return createWithResponse(null, null, null, null, requestConditions).flatMap(FluxUtil::toMono); } /** * Creates a resource. * * <p><strong>Code Samples</strong></p> * * <!-- src_embed com.azure.storage.file.datalake.DataLakePathAsyncClient.createWithResponse * <pre> * PathHttpHeaders httpHeaders = new PathHttpHeaders& * .setContentLanguage& * .setContentType& * DataLakeRequestConditions requestConditions = new DataLakeRequestConditions& * .setLeaseId& * String permissions = &quot;permissions&quot;; * String umask = &quot;umask&quot;; * * client.createWithResponse& * requestConditions& * .subscribe& * </pre> * <!-- end com.azure.storage.file.datalake.DataLakePathAsyncClient.createWithResponse * * <p>For more information see the * <a href="https: * Docs</a></p> * * @param permissions POSIX access permissions for the resource owner, the resource owning group, and others. * @param umask Restricts permissions of the resource to be created. * @param headers {@link PathHttpHeaders} * @param metadata Metadata to associate with the resource. If there is leading or trailing whitespace in any * metadata key or value, it must be removed or encoded. * @param requestConditions {@link DataLakeRequestConditions} * @return A {@link Mono} containing a {@link Response} whose {@link Response * PathItem}. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<Response<PathInfo>> createWithResponse(String permissions, String umask, PathHttpHeaders headers, Map<String, String> metadata, DataLakeRequestConditions requestConditions) { try { return withContext(context -> createWithResponse(permissions, umask, pathResourceType, headers, metadata, requestConditions, context)); } catch (RuntimeException ex) { return monoError(LOGGER, ex); } } Mono<Response<PathInfo>> createWithResponse(String permissions, String umask, PathResourceType resourceType, PathHttpHeaders headers, Map<String, String> metadata, DataLakeRequestConditions requestConditions, Context context) { requestConditions = requestConditions == null ? new DataLakeRequestConditions() : requestConditions; LeaseAccessConditions lac = new LeaseAccessConditions().setLeaseId(requestConditions.getLeaseId()); ModifiedAccessConditions mac = new ModifiedAccessConditions() .setIfMatch(requestConditions.getIfMatch()) .setIfNoneMatch(requestConditions.getIfNoneMatch()) .setIfModifiedSince(requestConditions.getIfModifiedSince()) .setIfUnmodifiedSince(requestConditions.getIfUnmodifiedSince()); context = context == null ? Context.NONE : context; return this.dataLakeStorage.getPaths().createWithResponseAsync(null, null, resourceType, null, null, null, null, buildMetadataString(metadata), permissions, umask, headers, lac, mac, null, customerProvidedKey, context.addData(AZ_TRACING_NAMESPACE_KEY, STORAGE_TRACING_NAMESPACE_VALUE)) .map(response -> new SimpleResponse<>(response, new PathInfo(response.getDeserializedHeaders().getETag(), response.getDeserializedHeaders().getLastModified(), response.getDeserializedHeaders().isXMsRequestServerEncrypted() != null, response.getDeserializedHeaders().getXMsEncryptionKeySha256()))); } /** * Creates a resource if it does not exist. * * <p><strong>Code Samples</strong></p> * * <!-- src_embed com.azure.storage.file.datalake.DataLakePathAsyncClient.createIfNotExists --> * <pre> * client.createIfNotExists& * System.out.printf& * </pre> * <!-- end com.azure.storage.file.datalake.DataLakePathAsyncClient.createIfNotExists --> * * <p>For more information see the * <a href="https: * Docs</a></p> * * @return A reactive response signaling completion. {@link PathInfo} contains information about the created resource. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<PathInfo> createIfNotExists() { return createIfNotExistsWithResponse(new DataLakePathCreateOptions()).flatMap(FluxUtil::toMono); } /** * Creates a resource if it does not exist. * * <p><strong>Code Samples</strong></p> * * <!-- src_embed com.azure.storage.file.datalake.DataLakePathAsyncClient.createIfNotExistsWithResponse * <pre> * PathHttpHeaders headers = new PathHttpHeaders& * .setContentLanguage& * .setContentType& * String permissions = &quot;permissions&quot;; * String umask = &quot;umask&quot;; * Map&lt;String, String&gt; metadata = Collections.singletonMap& * DataLakePathCreateOptions options = new DataLakePathCreateOptions& * .setPermissions& * * client.createIfNotExistsWithResponse& * if & * System.out.println& * & * System.out.println& * & * & * </pre> * <!-- end com.azure.storage.file.datalake.DataLakePathAsyncClient.createIfNotExistsWithResponse * * <p>For more information see the * <a href="https: * Docs</a></p> * * @param options {@link DataLakePathCreateOptions} * @return A {@link Mono} containing {@link Response} signaling completion, whose {@link Response * contains a {@link PathInfo} containing information about the resource. If {@link Response}'s status code is * 201, a new resource was successfully created. If status code is 409, a resource already existed at this location. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<Response<PathInfo>> createIfNotExistsWithResponse(DataLakePathCreateOptions options) { try { return withContext(context -> createIfNotExistsWithResponse(options, context)); } catch (RuntimeException ex) { return monoError(LOGGER, ex); } } Mono<Response<PathInfo>> createIfNotExistsWithResponse(DataLakePathCreateOptions options, Context context) { try { options = options == null ? new DataLakePathCreateOptions() : options; options.setRequestConditions(new DataLakeRequestConditions() .setIfNoneMatch(Constants.HeaderConstants.ETAG_WILDCARD)); return createWithResponse(options.getPermissions(), options.getUmask(), pathResourceType, options.getPathHttpHeaders(), options.getMetadata(), options.getRequestConditions(), context) .onErrorResume(t -> t instanceof DataLakeStorageException && ((DataLakeStorageException) t).getStatusCode() == 409, t -> { HttpResponse response = ((DataLakeStorageException) t).getResponse(); return Mono.just(new SimpleResponse<>(response.getRequest(), response.getStatusCode(), response.getHeaders(), null)); }); } catch (RuntimeException ex) { return monoError(LOGGER, ex); } } /** * Package-private delete method for use by {@link DataLakeFileAsyncClient} and {@link DataLakeDirectoryAsyncClient} * * @param recursive Whether to delete all paths beneath the directory. * @param requestConditions {@link DataLakeRequestConditions} * @param context Additional context that is passed through the Http pipeline during the service call. * @return A {@link Mono} containing status code and HTTP headers */ Mono<Response<Void>> deleteWithResponse(Boolean recursive, DataLakeRequestConditions requestConditions, Context context) { requestConditions = requestConditions == null ? new DataLakeRequestConditions() : requestConditions; LeaseAccessConditions lac = new LeaseAccessConditions().setLeaseId(requestConditions.getLeaseId()); ModifiedAccessConditions mac = new ModifiedAccessConditions() .setIfMatch(requestConditions.getIfMatch()) .setIfNoneMatch(requestConditions.getIfNoneMatch()) .setIfModifiedSince(requestConditions.getIfModifiedSince()) .setIfUnmodifiedSince(requestConditions.getIfUnmodifiedSince()); context = context == null ? Context.NONE : context; return this.dataLakeStorage.getPaths().deleteWithResponseAsync(null, null, recursive, null, lac, mac, context.addData(AZ_TRACING_NAMESPACE_KEY, STORAGE_TRACING_NAMESPACE_VALUE)) .map(response -> new SimpleResponse<>(response, null)); } /** * Deletes paths under the resource if it exists. * * <p><strong>Code Samples</strong></p> * * <!-- src_embed com.azure.storage.file.datalake.DataLakePathAsyncClient.deleteIfExists --> * <pre> * client.deleteIfExists& * response -&gt; System.out.printf& * error -&gt; System.out.printf& * </pre> * <!-- end com.azure.storage.file.datalake.DataLakePathAsyncClient.deleteIfExists --> * * <p>For more information see the * <a href="https: * Docs</a></p> * * @return a reactive response signaling completion. {@code true} indicates that the resource under the path was * successfully deleted, {@code false} indicates the resource did not exist. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<Boolean> deleteIfExists() { return deleteIfExistsWithResponse(new DataLakePathDeleteOptions()).map(response -> response.getStatusCode() != 404); } /** * Deletes all paths under the specified resource if exists. * * <p><strong>Code Samples</strong></p> * * <!-- src_embed com.azure.storage.file.datalake.DataLakePathAsyncClient.deleteIfExistsWithResponse * <pre> * DataLakeRequestConditions requestConditions = new DataLakeRequestConditions& * .setLeaseId& * * DataLakePathDeleteOptions options = new DataLakePathDeleteOptions& * .setRequestConditions& * * client.deleteIfExistsWithResponse& * if & * System.out.println& * & * System.out.println& * & * & * </pre> * <!-- end com.azure.storage.file.datalake.DataLakePathAsyncClient.deleteIfExistsWithResponse * * <p>For more information see the * <a href="https: * Docs</a></p> * * @param options {@link DataLakePathDeleteOptions} * * @return A reactive response signaling completion. If {@link Response}'s status code is 200, the resource was * successfully deleted. If status code is 404, the resource does not exist. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<Response<Void>> deleteIfExistsWithResponse(DataLakePathDeleteOptions options) { try { return withContext(context -> deleteIfExistsWithResponse(options, context)); } catch (RuntimeException ex) { return monoError(LOGGER, ex); } } Mono<Response<Void>> deleteIfExistsWithResponse(DataLakePathDeleteOptions options, Context context) { try { options = options == null ? new DataLakePathDeleteOptions() : options; return deleteWithResponse(options.getIsRecursive(), options.getRequestConditions(), context) .onErrorResume(t -> t instanceof DataLakeStorageException && ((DataLakeStorageException) t).getStatusCode() == 404, t -> { HttpResponse response = ((DataLakeStorageException) t).getResponse(); return Mono.just(new SimpleResponse<>(response.getRequest(), response.getStatusCode(), response.getHeaders(), null)); }); } catch (RuntimeException ex) { return monoError(LOGGER, ex); } } /** * Changes a resource's metadata. The specified metadata in this method will replace existing metadata. If old * values must be preserved, they must be downloaded and included in the call to this method. * * <p><strong>Code Samples</strong></p> * * <!-- src_embed com.azure.storage.file.datalake.DataLakePathAsyncClient.setMetadata * <pre> * client.setMetadata& * .subscribe& * </pre> * <!-- end com.azure.storage.file.datalake.DataLakePathAsyncClient.setMetadata * * <p>For more information, see the * <a href="https: * * @param metadata Metadata to associate with the resource. If there is leading or trailing whitespace in any * metadata key or value, it must be removed or encoded. * @return A reactive response signalling completion. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<Void> setMetadata(Map<String, String> metadata) { return setMetadataWithResponse(metadata, null).flatMap(FluxUtil::toMono); } /** * Changes a resource's metadata. The specified metadata in this method will replace existing metadata. If old * values must be preserved, they must be downloaded and included in the call to this method. * * <p><strong>Code Samples</strong></p> * * <!-- src_embed com.azure.storage.file.datalake.DataLakePathAsyncClient.setMetadata * <pre> * DataLakeRequestConditions requestConditions = new DataLakeRequestConditions& * * client.setMetadataWithResponse& * .subscribe& * response.getStatusCode& * </pre> * <!-- end com.azure.storage.file.datalake.DataLakePathAsyncClient.setMetadata * * <p>For more information, see the * <a href="https: * * @param metadata Metadata to associate with the resource. If there is leading or trailing whitespace in any * metadata key or value, it must be removed or encoded. * @param requestConditions {@link DataLakeRequestConditions} * @return A reactive response signalling completion. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<Response<Void>> setMetadataWithResponse(Map<String, String> metadata, DataLakeRequestConditions requestConditions) { return this.blockBlobAsyncClient.setMetadataWithResponse(metadata, Transforms.toBlobRequestConditions(requestConditions)) .onErrorMap(DataLakeImplUtils::transformBlobStorageException); } /** * Changes a resource's HTTP header properties. If only one HTTP header is updated, the others will all be erased. * In order to preserve existing values, they must be passed alongside the header being changed. * * <p><strong>Code Samples</strong></p> * * <!-- src_embed com.azure.storage.file.datalake.DataLakePathAsyncClient.setHttpHeaders * <pre> * client.setHttpHeaders& * .setContentLanguage& * .setContentType& * </pre> * <!-- end com.azure.storage.file.datalake.DataLakePathAsyncClient.setHttpHeaders * * <p>For more information, see the * <a href="https: * * @param headers {@link PathHttpHeaders} * @return A reactive response signalling completion. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<Void> setHttpHeaders(PathHttpHeaders headers) { return setHttpHeadersWithResponse(headers, null).flatMap(FluxUtil::toMono); } /** * Changes a resource's HTTP header properties. If only one HTTP header is updated, the others will all be erased. * In order to preserve existing values, they must be passed alongside the header being changed. * * <p><strong>Code Samples</strong></p> * * <!-- src_embed com.azure.storage.file.datalake.DataLakePathAsyncClient.setHttpHeadersWithResponse * <pre> * DataLakeRequestConditions requestConditions = new DataLakeRequestConditions& * * client.setHttpHeadersWithResponse& * .setContentLanguage& * .setContentType& * System.out.printf& * </pre> * <!-- end com.azure.storage.file.datalake.DataLakePathAsyncClient.setHttpHeadersWithResponse * * <p>For more information, see the * <a href="https: * * @param headers {@link PathHttpHeaders} * @param requestConditions {@link DataLakeRequestConditions} * @return A reactive response signalling completion. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<Response<Void>> setHttpHeadersWithResponse(PathHttpHeaders headers, DataLakeRequestConditions requestConditions) { return this.blockBlobAsyncClient.setHttpHeadersWithResponse(Transforms.toBlobHttpHeaders(headers), Transforms.toBlobRequestConditions(requestConditions)) .onErrorMap(DataLakeImplUtils::transformBlobStorageException); } /** * Returns the resource's metadata and properties. * * <p><strong>Code Samples</strong></p> * * <!-- src_embed com.azure.storage.file.datalake.DataLakePathAsyncClient.getProperties --> * <pre> * client.getProperties& * System.out.printf& * </pre> * <!-- end com.azure.storage.file.datalake.DataLakePathAsyncClient.getProperties --> * * <p>For more information, see the * <a href="https: * * @return A reactive response containing the resource's properties and metadata. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<PathProperties> getProperties() { return getPropertiesWithResponse(null).flatMap(FluxUtil::toMono); } /** * Returns the resource's metadata and properties. * * <p><strong>Code Samples</strong></p> * * <!-- src_embed com.azure.storage.file.datalake.DataLakePathAsyncClient.getPropertiesWithResponse * <pre> * DataLakeRequestConditions requestConditions = new DataLakeRequestConditions& * * client.getPropertiesWithResponse& * response -&gt; System.out.printf& * response.getValue& * </pre> * <!-- end com.azure.storage.file.datalake.DataLakePathAsyncClient.getPropertiesWithResponse * * <p>For more information, see the * <a href="https: * * @param requestConditions {@link DataLakeRequestConditions} * @return A reactive response containing the resource's properties and metadata. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<Response<PathProperties>> getPropertiesWithResponse(DataLakeRequestConditions requestConditions) { return blockBlobAsyncClient.getPropertiesWithResponse(Transforms.toBlobRequestConditions(requestConditions)) .onErrorMap(DataLakeImplUtils::transformBlobStorageException) .map(response -> new SimpleResponse<>(response, Transforms.toPathProperties(response.getValue()))); } /** * Determines if the path this client represents exists in the cloud. * <p>Note that this method does not guarantee that the path type (file/directory) matches expectations.</p> * <p>For example, a DataLakeFileClient representing a path to a datalake directory will return true, and vice * versa.</p> * * <p><strong>Code Samples</strong></p> * * <!-- src_embed com.azure.storage.file.datalake.DataLakePathAsyncClient.exists --> * <pre> * client.exists& * </pre> * <!-- end com.azure.storage.file.datalake.DataLakePathAsyncClient.exists --> * * @return true if the path exists, false if it doesn't */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<Boolean> exists() { return existsWithResponse().flatMap(FluxUtil::toMono); } /** * Determines if the path this client represents exists in the cloud. * <p>Note that this method does not guarantee that the path type (file/directory) matches expectations.</p> * <p>For example, a DataLakeFileClient representing a path to a datalake directory will return true, and vice * versa.</p> * * <p><strong>Code Samples</strong></p> * * <!-- src_embed com.azure.storage.file.datalake.DataLakePathAsyncClient.existsWithResponse --> * <pre> * client.existsWithResponse& * </pre> * <!-- end com.azure.storage.file.datalake.DataLakePathAsyncClient.existsWithResponse --> * * @return true if the path exists, false if it doesn't */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<Response<Boolean>> existsWithResponse() { return blockBlobAsyncClient.existsWithResponse().onErrorMap(DataLakeImplUtils::transformBlobStorageException); } /** * Changes the access control list, group and/or owner for a resource. * * <p><strong>Code Samples</strong></p> * * <!-- src_embed com.azure.storage.file.datalake.DataLakePathAsyncClient.setAccessControlList * <pre> * PathAccessControlEntry pathAccessControlEntry = new PathAccessControlEntry& * .setEntityId& * .setPermissions& * List&lt;PathAccessControlEntry&gt; pathAccessControlEntries = new ArrayList&lt;&gt;& * pathAccessControlEntries.add& * String group = &quot;group&quot;; * String owner = &quot;owner&quot;; * * client.setAccessControlList& * response -&gt; System.out.printf& * </pre> * <!-- end com.azure.storage.file.datalake.DataLakePathAsyncClient.setAccessControlList * * <p>For more information, see the * <a href="https: * * @param accessControlList A list of {@link PathAccessControlEntry} objects. * @param group The group of the resource. * @param owner The owner of the resource. * @return A reactive response containing the resource info. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<PathInfo> setAccessControlList(List<PathAccessControlEntry> accessControlList, String group, String owner) { return setAccessControlListWithResponse(accessControlList, group, owner, null).flatMap(FluxUtil::toMono); } /** * Changes the access control list, group and/or owner for a resource. * * <p><strong>Code Samples</strong></p> * * <!-- src_embed com.azure.storage.file.datalake.DataLakePathAsyncClient.setAccessControlListWithResponse * <pre> * DataLakeRequestConditions requestConditions = new DataLakeRequestConditions& * PathAccessControlEntry pathAccessControlEntry = new PathAccessControlEntry& * .setEntityId& * .setPermissions& * List&lt;PathAccessControlEntry&gt; pathAccessControlEntries = new ArrayList&lt;&gt;& * pathAccessControlEntries.add& * String group = &quot;group&quot;; * String owner = &quot;owner&quot;; * * client.setAccessControlListWithResponse& * response -&gt; System.out.printf& * </pre> * <!-- end com.azure.storage.file.datalake.DataLakePathAsyncClient.setAccessControlListWithResponse * * <p>For more information, see the * <a href="https: * * @param accessControlList A list of {@link PathAccessControlEntry} objects. * @param group The group of the resource. * @param owner The owner of the resource. * @param requestConditions {@link DataLakeRequestConditions} * @return A reactive response containing the resource info. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<Response<PathInfo>> setAccessControlListWithResponse(List<PathAccessControlEntry> accessControlList, String group, String owner, DataLakeRequestConditions requestConditions) { try { return withContext(context -> setAccessControlWithResponse(accessControlList, null, group, owner, requestConditions, context)); } catch (RuntimeException ex) { return monoError(LOGGER, ex); } } /** * Changes the permissions, group and/or owner for a resource. * * <p><strong>Code Samples</strong></p> * * <!-- src_embed com.azure.storage.file.datalake.DataLakePathAsyncClient.setPermissions * <pre> * PathPermissions permissions = new PathPermissions& * .setGroup& * .setOwner& * .setOther& * String group = &quot;group&quot;; * String owner = &quot;owner&quot;; * * client.setPermissions& * response -&gt; System.out.printf& * </pre> * <!-- end com.azure.storage.file.datalake.DataLakePathAsyncClient.setPermissions * * <p>For more information, see the * <a href="https: * * @param permissions {@link PathPermissions} * @param group The group of the resource. * @param owner The owner of the resource. * @return A reactive response containing the resource info. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<PathInfo> setPermissions(PathPermissions permissions, String group, String owner) { return setPermissionsWithResponse(permissions, group, owner, null).flatMap(FluxUtil::toMono); } /** * Changes the permissions, group and/or owner for a resource. * * <p><strong>Code Samples</strong></p> * * <!-- src_embed com.azure.storage.file.datalake.DataLakePathAsyncClient.setPermissionsWithResponse * <pre> * DataLakeRequestConditions requestConditions = new DataLakeRequestConditions& * PathPermissions permissions = new PathPermissions& * .setGroup& * .setOwner& * .setOther& * String group = &quot;group&quot;; * String owner = &quot;owner&quot;; * * client.setPermissionsWithResponse& * response -&gt; System.out.printf& * </pre> * <!-- end com.azure.storage.file.datalake.DataLakePathAsyncClient.setPermissionsWithResponse * * <p>For more information, see the * <a href="https: * * @param permissions {@link PathPermissions} * @param group The group of the resource. * @param owner The owner of the resource. * @param requestConditions {@link DataLakeRequestConditions} * @return A reactive response containing the resource info. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<Response<PathInfo>> setPermissionsWithResponse(PathPermissions permissions, String group, String owner, DataLakeRequestConditions requestConditions) { try { return withContext(context -> setAccessControlWithResponse(null, permissions, group, owner, requestConditions, context)); } catch (RuntimeException ex) { return monoError(LOGGER, ex); } } Mono<Response<PathInfo>> setAccessControlWithResponse(List<PathAccessControlEntry> accessControlList, PathPermissions permissions, String group, String owner, DataLakeRequestConditions requestConditions, Context context) { requestConditions = requestConditions == null ? new DataLakeRequestConditions() : requestConditions; LeaseAccessConditions lac = new LeaseAccessConditions().setLeaseId(requestConditions.getLeaseId()); ModifiedAccessConditions mac = new ModifiedAccessConditions() .setIfMatch(requestConditions.getIfMatch()) .setIfNoneMatch(requestConditions.getIfNoneMatch()) .setIfModifiedSince(requestConditions.getIfModifiedSince()) .setIfUnmodifiedSince(requestConditions.getIfUnmodifiedSince()); String permissionsString = permissions == null ? null : permissions.toString(); String accessControlListString = accessControlList == null ? null : PathAccessControlEntry.serializeList(accessControlList); context = context == null ? Context.NONE : context; return this.dataLakeStorage.getPaths().setAccessControlWithResponseAsync(null, owner, group, permissionsString, accessControlListString, null, lac, mac, context.addData(AZ_TRACING_NAMESPACE_KEY, STORAGE_TRACING_NAMESPACE_VALUE)) .map(response -> new SimpleResponse<>(response, new PathInfo(response.getDeserializedHeaders().getETag(), response.getDeserializedHeaders().getLastModified()))); } /** * Recursively sets the access control on a path and all subpaths. * * <p><strong>Code Samples</strong></p> * * <!-- src_embed com.azure.storage.file.datalake.DataLakePathAsyncClient.setAccessControlRecursive * <pre> * PathAccessControlEntry pathAccessControlEntry = new PathAccessControlEntry& * .setEntityId& * .setPermissions& * List&lt;PathAccessControlEntry&gt; pathAccessControlEntries = new ArrayList&lt;&gt;& * pathAccessControlEntries.add& * * client.setAccessControlRecursive& * response -&gt; System.out.printf& * response.getCounters& * </pre> * <!-- end com.azure.storage.file.datalake.DataLakePathAsyncClient.setAccessControlRecursive * * <p>For more information, see the * <a href="https: * * @param accessControlList The POSIX access control list for the file or directory. * @return A reactive response containing the result of the operation. * * @throws DataLakeAclChangeFailedException if a request to storage throws a * {@link DataLakeStorageException} or a {@link Exception} to wrap the exception with the continuation token. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<AccessControlChangeResult> setAccessControlRecursive(List<PathAccessControlEntry> accessControlList) { return setAccessControlRecursiveWithResponse(new PathSetAccessControlRecursiveOptions(accessControlList)) .flatMap(FluxUtil::toMono); } /** * Recursively sets the access control on a path and all subpaths. * * <p><strong>Code Samples</strong></p> * * <!-- src_embed com.azure.storage.file.datalake.DataLakePathAsyncClient.setAccessControlRecursiveWithResponse * <pre> * PathAccessControlEntry pathAccessControlEntry = new PathAccessControlEntry& * .setEntityId& * .setPermissions& * List&lt;PathAccessControlEntry&gt; pathAccessControlEntries = new ArrayList&lt;&gt;& * pathAccessControlEntries.add& * * Integer batchSize = 2; * Integer maxBatches = 10; * boolean continueOnFailure = false; * String continuationToken = null; * Consumer&lt;Response&lt;AccessControlChanges&gt;&gt; progressHandler = * response -&gt; System.out.println& * * PathSetAccessControlRecursiveOptions options = * new PathSetAccessControlRecursiveOptions& * .setBatchSize& * .setMaxBatches& * .setContinueOnFailure& * .setContinuationToken& * .setProgressHandler& * * client.setAccessControlRecursive& * response -&gt; System.out.printf& * response.getCounters& * </pre> * <!-- end com.azure.storage.file.datalake.DataLakePathAsyncClient.setAccessControlRecursiveWithResponse * * <p>For more information, see the * <a href="https: * * @param options {@link PathSetAccessControlRecursiveOptions} * @return A reactive response containing the result of the operation. * * @throws DataLakeAclChangeFailedException if a request to storage throws a * {@link DataLakeStorageException} or a {@link Exception} to wrap the exception with the continuation token. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<Response<AccessControlChangeResult>> setAccessControlRecursiveWithResponse( PathSetAccessControlRecursiveOptions options) { try { StorageImplUtils.assertNotNull("options", options); return withContext(context -> setAccessControlRecursiveWithResponse( PathAccessControlEntry.serializeList(options.getAccessControlList()), options.getProgressHandler(), PathSetAccessControlRecursiveMode.SET, options.getBatchSize(), options.getMaxBatches(), options.isContinueOnFailure(), options.getContinuationToken(), context)); } catch (RuntimeException ex) { return monoError(LOGGER, ex); } } /** * Recursively updates the access control on a path and all subpaths. * * <p><strong>Code Samples</strong></p> * * <!-- src_embed com.azure.storage.file.datalake.DataLakePathAsyncClient.updateAccessControlRecursive * <pre> * PathAccessControlEntry pathAccessControlEntry = new PathAccessControlEntry& * .setEntityId& * .setPermissions& * List&lt;PathAccessControlEntry&gt; pathAccessControlEntries = new ArrayList&lt;&gt;& * pathAccessControlEntries.add& * * client.updateAccessControlRecursive& * response -&gt; System.out.printf& * response.getCounters& * </pre> * <!-- end com.azure.storage.file.datalake.DataLakePathAsyncClient.updateAccessControlRecursive * * <p>For more information, see the * <a href="https: * * @param accessControlList The POSIX access control list for the file or directory. * @return A reactive response containing the result of the operation. * * @throws DataLakeAclChangeFailedException if a request to storage throws a * {@link DataLakeStorageException} or a {@link Exception} to wrap the exception with the continuation token. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<AccessControlChangeResult> updateAccessControlRecursive( List<PathAccessControlEntry> accessControlList) { return updateAccessControlRecursiveWithResponse(new PathUpdateAccessControlRecursiveOptions(accessControlList)) .flatMap(FluxUtil::toMono); } /** * Recursively updates the access control on a path and all subpaths. * * <p><strong>Code Samples</strong></p> * * <!-- src_embed com.azure.storage.file.datalake.DataLakePathAsyncClient.updateAccessControlRecursiveWithResponse * <pre> * PathAccessControlEntry pathAccessControlEntry = new PathAccessControlEntry& * .setEntityId& * .setPermissions& * List&lt;PathAccessControlEntry&gt; pathAccessControlEntries = new ArrayList&lt;&gt;& * pathAccessControlEntries.add& * * Integer batchSize = 2; * Integer maxBatches = 10; * boolean continueOnFailure = false; * String continuationToken = null; * Consumer&lt;Response&lt;AccessControlChanges&gt;&gt; progressHandler = * response -&gt; System.out.println& * * PathUpdateAccessControlRecursiveOptions options = * new PathUpdateAccessControlRecursiveOptions& * .setBatchSize& * .setMaxBatches& * .setContinueOnFailure& * .setContinuationToken& * .setProgressHandler& * * client.updateAccessControlRecursive& * response -&gt; System.out.printf& * response.getCounters& * </pre> * <!-- end com.azure.storage.file.datalake.DataLakePathAsyncClient.updateAccessControlRecursiveWithResponse * * <p>For more information, see the * <a href="https: * * @param options {@link PathUpdateAccessControlRecursiveOptions} * @return A reactive response containing the result of the operation. * * @throws DataLakeAclChangeFailedException if a request to storage throws a * {@link DataLakeStorageException} or a {@link Exception} to wrap the exception with the continuation token. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<Response<AccessControlChangeResult>> updateAccessControlRecursiveWithResponse( PathUpdateAccessControlRecursiveOptions options) { try { StorageImplUtils.assertNotNull("options", options); return withContext(context -> setAccessControlRecursiveWithResponse( PathAccessControlEntry.serializeList(options.getAccessControlList()), options.getProgressHandler(), PathSetAccessControlRecursiveMode.MODIFY, options.getBatchSize(), options.getMaxBatches(), options.isContinueOnFailure(), options.getContinuationToken(), context)); } catch (RuntimeException ex) { return monoError(LOGGER, ex); } } /** * Recursively removes the access control on a path and all subpaths. * * <p><strong>Code Samples</strong></p> * * <!-- src_embed com.azure.storage.file.datalake.DataLakePathAsyncClient.removeAccessControlRecursive * <pre> * PathRemoveAccessControlEntry pathAccessControlEntry = new PathRemoveAccessControlEntry& * .setEntityId& * List&lt;PathRemoveAccessControlEntry&gt; pathAccessControlEntries = new ArrayList&lt;&gt;& * pathAccessControlEntries.add& * * client.removeAccessControlRecursive& * response -&gt; System.out.printf& * response.getCounters& * </pre> * <!-- end com.azure.storage.file.datalake.DataLakePathAsyncClient.removeAccessControlRecursive * * <p>For more information, see the * <a href="https: * * @param accessControlList The POSIX access control list for the file or directory. * @return A reactive response containing the result of the operation. * * @throws DataLakeAclChangeFailedException if a request to storage throws a * {@link DataLakeStorageException} or a {@link Exception} to wrap the exception with the continuation token. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<AccessControlChangeResult> removeAccessControlRecursive( List<PathRemoveAccessControlEntry> accessControlList) { return removeAccessControlRecursiveWithResponse(new PathRemoveAccessControlRecursiveOptions(accessControlList)) .flatMap(FluxUtil::toMono); } /** * Recursively removes the access control on a path and all subpaths. * * <p><strong>Code Samples</strong></p> * * <!-- src_embed com.azure.storage.file.datalake.DataLakePathAsyncClient.removeAccessControlRecursiveWithResponse * <pre> * PathRemoveAccessControlEntry pathAccessControlEntry = new PathRemoveAccessControlEntry& * .setEntityId& * List&lt;PathRemoveAccessControlEntry&gt; pathAccessControlEntries = new ArrayList&lt;&gt;& * pathAccessControlEntries.add& * * Integer batchSize = 2; * Integer maxBatches = 10; * boolean continueOnFailure = false; * String continuationToken = null; * Consumer&lt;Response&lt;AccessControlChanges&gt;&gt; progressHandler = * response -&gt; System.out.println& * * PathRemoveAccessControlRecursiveOptions options = * new PathRemoveAccessControlRecursiveOptions& * .setBatchSize& * .setMaxBatches& * .setContinueOnFailure& * .setContinuationToken& * .setProgressHandler& * * client.removeAccessControlRecursive& * response -&gt; System.out.printf& * response.getCounters& * </pre> * <!-- end com.azure.storage.file.datalake.DataLakePathAsyncClient.removeAccessControlRecursiveWithResponse * * <p>For more information, see the * <a href="https: * * @param options {@link PathRemoveAccessControlRecursiveOptions} * @return A reactive response containing the result of the operation. * * @throws DataLakeAclChangeFailedException if a request to storage throws a * {@link DataLakeStorageException} or a {@link Exception} to wrap the exception with the continuation token. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<Response<AccessControlChangeResult>> removeAccessControlRecursiveWithResponse( PathRemoveAccessControlRecursiveOptions options) { try { StorageImplUtils.assertNotNull("options", options); return withContext(context -> setAccessControlRecursiveWithResponse( PathRemoveAccessControlEntry.serializeList(options.getAccessControlList()), options.getProgressHandler(), PathSetAccessControlRecursiveMode.REMOVE, options.getBatchSize(), options.getMaxBatches(), options.isContinueOnFailure(), options.getContinuationToken(), context)); } catch (RuntimeException ex) { return monoError(LOGGER, ex); } } Mono<Response<AccessControlChangeResult>> setAccessControlRecursiveWithResponse( String accessControlList, Consumer<Response<AccessControlChanges>> progressHandler, PathSetAccessControlRecursiveMode mode, Integer batchSize, Integer maxBatches, Boolean continueOnFailure, String continuationToken, Context context) { StorageImplUtils.assertNotNull("accessControlList", accessControlList); context = context == null ? Context.NONE : context; Context contextFinal = context.addData(AZ_TRACING_NAMESPACE_KEY, STORAGE_TRACING_NAMESPACE_VALUE); AtomicInteger directoriesSuccessfulCount = new AtomicInteger(0); AtomicInteger filesSuccessfulCount = new AtomicInteger(0); AtomicInteger failureCount = new AtomicInteger(0); AtomicInteger batchesCount = new AtomicInteger(0); return this.dataLakeStorage.getPaths().setAccessControlRecursiveWithResponseAsync(mode, null, continuationToken, continueOnFailure, batchSize, accessControlList, null, contextFinal) .onErrorMap(e -> { if (e instanceof DataLakeStorageException) { return LOGGER.logExceptionAsError(ModelHelper.changeAclRequestFailed((DataLakeStorageException) e, continuationToken)); } else if (e instanceof Exception) { return LOGGER.logExceptionAsError(ModelHelper.changeAclFailed((Exception) e, continuationToken)); } return e; }) .flatMap(response -> setAccessControlRecursiveWithResponseHelper(response, maxBatches, directoriesSuccessfulCount, filesSuccessfulCount, failureCount, batchesCount, progressHandler, accessControlList, mode, batchSize, continueOnFailure, continuationToken, null, contextFinal)); } Mono<Response<AccessControlChangeResult>> setAccessControlRecursiveWithResponseHelper( PathsSetAccessControlRecursiveResponse response, Integer maxBatches, AtomicInteger directoriesSuccessfulCount, AtomicInteger filesSuccessfulCount, AtomicInteger failureCount, AtomicInteger batchesCount, Consumer<Response<AccessControlChanges>> progressHandler, String accessControlStr, PathSetAccessControlRecursiveMode mode, Integer batchSize, Boolean continueOnFailure, String lastToken, List<AccessControlChangeFailure> batchFailures, Context context) { batchesCount.incrementAndGet(); directoriesSuccessfulCount.addAndGet(response.getValue().getDirectoriesSuccessful()); filesSuccessfulCount.addAndGet(response.getValue().getFilesSuccessful()); failureCount.addAndGet(response.getValue().getFailureCount()); if (failureCount.get() > 0 && batchFailures == null) { batchFailures = response.getValue().getFailedEntries() .stream() .map(aclFailedEntry -> new AccessControlChangeFailure() .setDirectory(aclFailedEntry.getType().equals("DIRECTORY")) .setName(aclFailedEntry.getName()) .setErrorMessage(aclFailedEntry.getErrorMessage()) ).collect(Collectors.toList()); } List<AccessControlChangeFailure> finalBatchFailures = batchFailures; /* Determine which token we should report/return/use next. If there was a token present on the response (still processing and either no errors or forceFlag set), use that one. If there were no failures or force flag set and still nothing present, we are at the end, so use that. If there were failures and no force flag set, use the last token (no token is returned in this case). */ String newToken = response.getDeserializedHeaders().getXMsContinuation(); String effectiveNextToken; if (newToken != null && !newToken.isEmpty()) { effectiveNextToken = newToken; } else { if (failureCount.get() == 0 || (continueOnFailure == null || continueOnFailure)) { effectiveNextToken = newToken; } else { effectiveNextToken = lastToken; } } if (progressHandler != null) { AccessControlChanges changes = new AccessControlChanges(); changes.setContinuationToken(effectiveNextToken); changes.setBatchFailures( response.getValue().getFailedEntries() .stream() .map(aclFailedEntry -> new AccessControlChangeFailure() .setDirectory(aclFailedEntry.getType().equals("DIRECTORY")) .setName(aclFailedEntry.getName()) .setErrorMessage(aclFailedEntry.getErrorMessage()) ).collect(Collectors.toList()) ); changes.setBatchCounters(new AccessControlChangeCounters() .setChangedDirectoriesCount(response.getValue().getDirectoriesSuccessful()) .setChangedFilesCount(response.getValue().getFilesSuccessful()) .setFailedChangesCount(response.getValue().getFailureCount())); changes.setAggregateCounters(new AccessControlChangeCounters() .setChangedDirectoriesCount(directoriesSuccessfulCount.get()) .setChangedFilesCount(filesSuccessfulCount.get()) .setFailedChangesCount(failureCount.get())); progressHandler.accept( new ResponseBase<>(response.getRequest(), response.getStatusCode(), response.getHeaders(), changes, response.getDeserializedHeaders())); } /* Determine if we are finished either because there is no new continuation (failure or finished) token or we have hit maxBatches. */ if ((newToken == null || newToken.isEmpty()) || (maxBatches != null && batchesCount.get() >= maxBatches)) { AccessControlChangeResult result = new AccessControlChangeResult() .setBatchFailures(batchFailures) .setContinuationToken(effectiveNextToken) .setCounters(new AccessControlChangeCounters() .setChangedDirectoriesCount(directoriesSuccessfulCount.get()) .setChangedFilesCount(filesSuccessfulCount.get()) .setFailedChangesCount(failureCount.get())); return Mono.just(new ResponseBase<>(response.getRequest(), response.getStatusCode(), response.getHeaders(), result, response.getDeserializedHeaders() )); } return this.dataLakeStorage.getPaths().setAccessControlRecursiveWithResponseAsync(mode, null, effectiveNextToken, continueOnFailure, batchSize, accessControlStr, null, context) .onErrorMap(e -> { if (e instanceof DataLakeStorageException) { return LOGGER.logExceptionAsError(ModelHelper.changeAclRequestFailed((DataLakeStorageException) e, effectiveNextToken)); } else if (e instanceof Exception) { return LOGGER.logExceptionAsError(ModelHelper.changeAclFailed((Exception) e, effectiveNextToken)); } return e; }) .flatMap(response2 -> setAccessControlRecursiveWithResponseHelper(response2, maxBatches, directoriesSuccessfulCount, filesSuccessfulCount, failureCount, batchesCount, progressHandler, accessControlStr, mode, batchSize, continueOnFailure, effectiveNextToken, finalBatchFailures, context)); } /** * Returns the access control for a resource. * * <p><strong>Code Samples</strong></p> * * <!-- src_embed com.azure.storage.file.datalake.DataLakePathAsyncClient.getAccessControl --> * <pre> * client.getAccessControl& * response -&gt; System.out.printf& * PathAccessControlEntry.serializeList& * response.getOwner& * </pre> * <!-- end com.azure.storage.file.datalake.DataLakePathAsyncClient.getAccessControl --> * * <p>For more information, see the * <a href="https: * * @return A reactive response containing the resource access control. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<PathAccessControl> getAccessControl() { return getAccessControlWithResponse(false, null).flatMap(FluxUtil::toMono); } /** * Returns the access control for a resource. * * <p><strong>Code Samples</strong></p> * * <!-- src_embed com.azure.storage.file.datalake.DataLakePathAsyncClient.getAccessControlWithResponse * <pre> * DataLakeRequestConditions requestConditions = new DataLakeRequestConditions& * boolean userPrincipalNameReturned = false; * * client.getAccessControlWithResponse& * response -&gt; System.out.printf& * PathAccessControlEntry.serializeList& * response.getValue& * </pre> * <!-- end com.azure.storage.file.datalake.DataLakePathAsyncClient.getAccessControlWithResponse * * <p>For more information, see the * <a href="https: * * @param userPrincipalNameReturned When true, user identity values returned as User Principal Names. When false, * user identity values returned as Azure Active Directory Object IDs. Default value is false. * @param requestConditions {@link DataLakeRequestConditions} * @return A reactive response containing the resource access control. */ @ServiceMethod(returns = ReturnType.SINGLE) public Mono<Response<PathAccessControl>> getAccessControlWithResponse(boolean userPrincipalNameReturned, DataLakeRequestConditions requestConditions) { try { return withContext(context -> getAccessControlWithResponse(userPrincipalNameReturned, requestConditions, context)); } catch (RuntimeException ex) { return monoError(LOGGER, ex); } } Mono<Response<PathAccessControl>> getAccessControlWithResponse(boolean userPrincipalNameReturned, DataLakeRequestConditions requestConditions, Context context) { requestConditions = requestConditions == null ? new DataLakeRequestConditions() : requestConditions; LeaseAccessConditions lac = new LeaseAccessConditions().setLeaseId(requestConditions.getLeaseId()); ModifiedAccessConditions mac = new ModifiedAccessConditions() .setIfMatch(requestConditions.getIfMatch()) .setIfNoneMatch(requestConditions.getIfNoneMatch()) .setIfModifiedSince(requestConditions.getIfModifiedSince()) .setIfUnmodifiedSince(requestConditions.getIfUnmodifiedSince()); context = context == null ? Context.NONE : context; return this.dataLakeStorage.getPaths().getPropertiesWithResponseAsync(null, null, PathGetPropertiesAction.GET_ACCESS_CONTROL, userPrincipalNameReturned, lac, mac, context.addData(AZ_TRACING_NAMESPACE_KEY, STORAGE_TRACING_NAMESPACE_VALUE)) .map(response -> new SimpleResponse<>(response, new PathAccessControl( PathAccessControlEntry.parseList(response.getDeserializedHeaders().getXMsAcl()), PathPermissions.parseSymbolic(response.getDeserializedHeaders().getXMsPermissions()), response.getDeserializedHeaders().getXMsGroup(), response.getDeserializedHeaders().getXMsOwner()))); } /** * Package-private rename method for use by {@link DataLakeFileAsyncClient} and {@link DataLakeDirectoryAsyncClient} * * @param destinationFileSystem The file system of the destination within the account. * {@code null} for the current file system. * @param destinationPath The path of the destination relative to the file system name * @param sourceRequestConditions {@link DataLakeRequestConditions} against the source. * @param destinationRequestConditions {@link DataLakeRequestConditions} against the destination. * @param context Additional context that is passed through the Http pipeline during the service call. * @return A {@link Mono} containing a {@link Response} whose {@link Response * DataLakePathAsyncClient} used to interact with the path created. */ Mono<Response<DataLakePathAsyncClient>> renameWithResponse(String destinationFileSystem, String destinationPath, DataLakeRequestConditions sourceRequestConditions, DataLakeRequestConditions destinationRequestConditions, Context context) { context = context == null ? Context.NONE : context; destinationRequestConditions = destinationRequestConditions == null ? new DataLakeRequestConditions() : destinationRequestConditions; sourceRequestConditions = sourceRequestConditions == null ? new DataLakeRequestConditions() : sourceRequestConditions; SourceModifiedAccessConditions sourceConditions = new SourceModifiedAccessConditions() .setSourceIfModifiedSince(sourceRequestConditions.getIfModifiedSince()) .setSourceIfUnmodifiedSince(sourceRequestConditions.getIfUnmodifiedSince()) .setSourceIfMatch(sourceRequestConditions.getIfMatch()) .setSourceIfNoneMatch(sourceRequestConditions.getIfNoneMatch()); LeaseAccessConditions destLac = new LeaseAccessConditions() .setLeaseId(destinationRequestConditions.getLeaseId()); ModifiedAccessConditions destMac = new ModifiedAccessConditions() .setIfMatch(destinationRequestConditions.getIfMatch()) .setIfNoneMatch(destinationRequestConditions.getIfNoneMatch()) .setIfModifiedSince(destinationRequestConditions.getIfModifiedSince()) .setIfUnmodifiedSince(destinationRequestConditions.getIfUnmodifiedSince()); DataLakePathAsyncClient dataLakePathAsyncClient = getPathAsyncClient(destinationFileSystem, destinationPath); String renameSource = "/" + this.fileSystemName + "/" + Utility.urlEncode(pathName); renameSource = this.sasToken != null ? renameSource + "?" + this.sasToken.getSignature() : renameSource; return dataLakePathAsyncClient.dataLakeStorage.getPaths().createWithResponseAsync( null /* request id */, null /* timeout */, null /* pathResourceType */, null /* continuation */, PathRenameMode.LEGACY, renameSource, sourceRequestConditions.getLeaseId(), null /* metadata */, null /* permissions */, null /* umask */, null /* pathHttpHeaders */, destLac, destMac, sourceConditions, customerProvidedKey, context.addData(AZ_TRACING_NAMESPACE_KEY, STORAGE_TRACING_NAMESPACE_VALUE)) .map(response -> new SimpleResponse<>(response, dataLakePathAsyncClient)); } /** * Takes in a destination and creates a DataLakePathAsyncClient with a new path * @param destinationFileSystem The destination file system * @param destinationPath The destination path * @return A DataLakePathAsyncClient */ DataLakePathAsyncClient getPathAsyncClient(String destinationFileSystem, String destinationPath) { if (destinationFileSystem == null) { destinationFileSystem = getFileSystemName(); } if (CoreUtils.isNullOrEmpty(destinationPath)) { throw LOGGER.logExceptionAsError(new IllegalArgumentException("'destinationPath' can not be set to null")); } return new DataLakePathAsyncClient(getHttpPipeline(), getAccountUrl(), serviceVersion, accountName, destinationFileSystem, destinationPath, pathResourceType, prepareBuilderReplacePath(destinationFileSystem, destinationPath).buildBlockBlobAsyncClient(), sasToken, customerProvidedKey); } /** * Takes in a destination path and creates a SpecializedBlobClientBuilder with a new path name * @param destinationFileSystem The destination file system * @param destinationPath The destination path * @return An updated SpecializedBlobClientBuilder */ SpecializedBlobClientBuilder prepareBuilderReplacePath(String destinationFileSystem, String destinationPath) { if (destinationFileSystem == null) { destinationFileSystem = getFileSystemName(); } String newBlobEndpoint = BlobUrlParts.parse(DataLakeImplUtils.endpointToDesiredEndpoint(getPathUrl(), "blob", "dfs")).setBlobName(destinationPath).setContainerName(destinationFileSystem).toUrl().toString(); return new SpecializedBlobClientBuilder() .pipeline(getHttpPipeline()) .endpoint(newBlobEndpoint) .serviceVersion(TransformUtils.toBlobServiceVersion(getServiceVersion())); } BlockBlobAsyncClient getBlockBlobAsyncClient() { return this.blockBlobAsyncClient; } /** * Generates a user delegation SAS for the path using the specified {@link DataLakeServiceSasSignatureValues}. * <p>See {@link DataLakeServiceSasSignatureValues} for more information on how to construct a user delegation SAS. * </p> * * <p><strong>Code Samples</strong></p> * * <!-- src_embed com.azure.storage.file.datalake.DataLakePathAsyncClient.generateUserDelegationSas * <pre> * OffsetDateTime myExpiryTime = OffsetDateTime.now& * PathSasPermission myPermission = new PathSasPermission& * * DataLakeServiceSasSignatureValues myValues = new DataLakeServiceSasSignatureValues& * .setStartTime& * * client.generateUserDelegationSas& * </pre> * <!-- end com.azure.storage.file.datalake.DataLakePathAsyncClient.generateUserDelegationSas * * @param dataLakeServiceSasSignatureValues {@link DataLakeServiceSasSignatureValues} * @param userDelegationKey A {@link UserDelegationKey} object used to sign the SAS values. * See {@link DataLakeServiceAsyncClient * on how to get a user delegation key. * * @return A {@code String} representing the SAS query parameters. */ public String generateUserDelegationSas(DataLakeServiceSasSignatureValues dataLakeServiceSasSignatureValues, UserDelegationKey userDelegationKey) { return generateUserDelegationSas(dataLakeServiceSasSignatureValues, userDelegationKey, getAccountName(), Context.NONE); } /** * Generates a user delegation SAS for the path using the specified {@link DataLakeServiceSasSignatureValues}. * <p>See {@link DataLakeServiceSasSignatureValues} for more information on how to construct a user delegation SAS. * </p> * * <p><strong>Code Samples</strong></p> * * <!-- src_embed com.azure.storage.file.datalake.DataLakePathAsyncClient.generateUserDelegationSas * <pre> * OffsetDateTime myExpiryTime = OffsetDateTime.now& * PathSasPermission myPermission = new PathSasPermission& * * DataLakeServiceSasSignatureValues myValues = new DataLakeServiceSasSignatureValues& * .setStartTime& * * client.generateUserDelegationSas& * </pre> * <!-- end com.azure.storage.file.datalake.DataLakePathAsyncClient.generateUserDelegationSas * * @param dataLakeServiceSasSignatureValues {@link DataLakeServiceSasSignatureValues} * @param userDelegationKey A {@link UserDelegationKey} object used to sign the SAS values. * See {@link DataLakeServiceAsyncClient * on how to get a user delegation key. * @param accountName The account name. * @param context Additional context that is passed through the code when generating a SAS. * * @return A {@code String} representing the SAS query parameters. */ public String generateUserDelegationSas(DataLakeServiceSasSignatureValues dataLakeServiceSasSignatureValues, UserDelegationKey userDelegationKey, String accountName, Context context) { return new DataLakeSasImplUtil(dataLakeServiceSasSignatureValues, getFileSystemName(), getObjectPath(), PathResourceType.DIRECTORY.equals(this.pathResourceType)) .generateUserDelegationSas(userDelegationKey, accountName, context); } /** * Generates a service SAS for the path using the specified {@link DataLakeServiceSasSignatureValues} * <p>Note : The client must be authenticated via {@link StorageSharedKeyCredential} * <p>See {@link DataLakeServiceSasSignatureValues} for more information on how to construct a service SAS.</p> * * <p><strong>Code Samples</strong></p> * * <!-- src_embed com.azure.storage.file.datalake.DataLakePathAsyncClient.generateSas * <pre> * OffsetDateTime expiryTime = OffsetDateTime.now& * PathSasPermission permission = new PathSasPermission& * * DataLakeServiceSasSignatureValues values = new DataLakeServiceSasSignatureValues& * .setStartTime& * * client.generateSas& * </pre> * <!-- end com.azure.storage.file.datalake.DataLakePathAsyncClient.generateSas * * @param dataLakeServiceSasSignatureValues {@link DataLakeServiceSasSignatureValues} * * @return A {@code String} representing the SAS query parameters. */ public String generateSas(DataLakeServiceSasSignatureValues dataLakeServiceSasSignatureValues) { return generateSas(dataLakeServiceSasSignatureValues, Context.NONE); } /** * Generates a service SAS for the path using the specified {@link DataLakeServiceSasSignatureValues} * <p>Note : The client must be authenticated via {@link StorageSharedKeyCredential} * <p>See {@link DataLakeServiceSasSignatureValues} for more information on how to construct a service SAS.</p> * * <p><strong>Code Samples</strong></p> * * <!-- src_embed com.azure.storage.file.datalake.DataLakePathAsyncClient.generateSas * <pre> * OffsetDateTime expiryTime = OffsetDateTime.now& * PathSasPermission permission = new PathSasPermission& * * DataLakeServiceSasSignatureValues values = new DataLakeServiceSasSignatureValues& * .setStartTime& * * & * client.generateSas& * </pre> * <!-- end com.azure.storage.file.datalake.DataLakePathAsyncClient.generateSas * * @param dataLakeServiceSasSignatureValues {@link DataLakeServiceSasSignatureValues} * @param context Additional context that is passed through the code when generating a SAS. * * @return A {@code String} representing the SAS query parameters. */ public String generateSas(DataLakeServiceSasSignatureValues dataLakeServiceSasSignatureValues, Context context) { return new DataLakeSasImplUtil(dataLakeServiceSasSignatureValues, getFileSystemName(), getObjectPath(), PathResourceType.DIRECTORY.equals(this.pathResourceType)) .generateSas(SasImplUtils.extractSharedKeyCredential(getHttpPipeline()), context); } }
Why can't we remove the abstract and make it final, and add a private ctor?
private JsonNodeUtils(){ throw new IllegalStateException("Utility class"); }
}
private JsonNodeUtils() { }
class JsonNodeUtils { static final TypeReference<Set<String>> STRING_SET = new TypeReference<Set<String>>() { }; static final TypeReference<Map<String, Object>> STRING_OBJECT_MAP = new TypeReference<Map<String, Object>>() { }; static String findStringValue(JsonNode jsonNode, String fieldName) { if (jsonNode == null) { return null; } JsonNode value = jsonNode.findValue(fieldName); return (value != null && value.isTextual()) ? value.asText() : null; } static <T> T findValue(JsonNode jsonNode, String fieldName, TypeReference<T> valueTypeReference, ObjectMapper mapper) { if (jsonNode == null) { return null; } JsonNode value = jsonNode.findValue(fieldName); return (value != null && value.isContainerNode()) ? mapper.convertValue(value, valueTypeReference) : null; } static JsonNode findObjectNode(JsonNode jsonNode, String fieldName) { if (jsonNode == null) { return null; } JsonNode value = jsonNode.findValue(fieldName); return (value != null && value.isObject()) ? value : null; } }
class JsonNodeUtils { static final TypeReference<Set<String>> STRING_SET = new TypeReference<Set<String>>() { }; static final TypeReference<Map<String, Object>> STRING_OBJECT_MAP = new TypeReference<Map<String, Object>>() { }; static String findStringValue(JsonNode jsonNode, String fieldName) { if (jsonNode == null) { return null; } JsonNode value = jsonNode.findValue(fieldName); return (value != null && value.isTextual()) ? value.asText() : null; } static <T> T findValue(JsonNode jsonNode, String fieldName, TypeReference<T> valueTypeReference, ObjectMapper mapper) { if (jsonNode == null) { return null; } JsonNode value = jsonNode.findValue(fieldName); return (value != null && value.isContainerNode()) ? mapper.convertValue(value, valueTypeReference) : null; } static JsonNode findObjectNode(JsonNode jsonNode, String fieldName) { if (jsonNode == null) { return null; } JsonNode value = jsonNode.findValue(fieldName); return (value != null && value.isObject()) ? value : null; } }
only private ctor is enough, why add the exception?
private SerializerUtils(){ throw new IllegalStateException("Utility class"); }
throw new IllegalStateException("Utility class");
private SerializerUtils() { }
class SerializerUtils { private static final ObjectMapper OBJECT_MAPPER; private static final TypeReference<Map<String, OAuth2AuthorizedClient>> TYPE_REFERENCE = new TypeReference<Map<String, OAuth2AuthorizedClient>>() { }; static { OBJECT_MAPPER = new ObjectMapper(); OBJECT_MAPPER.registerModule(new OAuth2ClientJackson2Module()); OBJECT_MAPPER.registerModule(new AadOAuth2ClientJackson2Module()); OBJECT_MAPPER.registerModule(new CoreJackson2Module()); OBJECT_MAPPER.registerModule(new JavaTimeModule()); } public static String serializeOAuth2AuthorizedClientMap(Map<String, OAuth2AuthorizedClient> authorizedClients) { String result; try { result = OBJECT_MAPPER.writeValueAsString(authorizedClients); } catch (JsonProcessingException e) { throw new IllegalStateException(e); } return result; } public static Map<String, OAuth2AuthorizedClient> deserializeOAuth2AuthorizedClientMap(String authorizedClientsString) { if (authorizedClientsString == null) { return new HashMap<>(); } Map<String, OAuth2AuthorizedClient> authorizedClients; try { authorizedClients = OBJECT_MAPPER.readValue(authorizedClientsString, TYPE_REFERENCE); } catch (JsonProcessingException e) { throw new IllegalStateException(e); } return authorizedClients; } }
class SerializerUtils { private static final ObjectMapper OBJECT_MAPPER; private static final TypeReference<Map<String, OAuth2AuthorizedClient>> TYPE_REFERENCE = new TypeReference<Map<String, OAuth2AuthorizedClient>>() { }; static { OBJECT_MAPPER = new ObjectMapper(); OBJECT_MAPPER.registerModule(new OAuth2ClientJackson2Module()); OBJECT_MAPPER.registerModule(new AadOAuth2ClientJackson2Module()); OBJECT_MAPPER.registerModule(new CoreJackson2Module()); OBJECT_MAPPER.registerModule(new JavaTimeModule()); } public static String serializeOAuth2AuthorizedClientMap(Map<String, OAuth2AuthorizedClient> authorizedClients) { String result; try { result = OBJECT_MAPPER.writeValueAsString(authorizedClients); } catch (JsonProcessingException e) { throw new IllegalStateException(e); } return result; } public static Map<String, OAuth2AuthorizedClient> deserializeOAuth2AuthorizedClientMap(String authorizedClientsString) { if (authorizedClientsString == null) { return new HashMap<>(); } Map<String, OAuth2AuthorizedClient> authorizedClients; try { authorizedClients = OBJECT_MAPPER.readValue(authorizedClientsString, TYPE_REFERENCE); } catch (JsonProcessingException e) { throw new IllegalStateException(e); } return authorizedClients; } }
no need for this exception?
private AzureStorageUtils() { throw new IllegalStateException("Utility class"); }
throw new IllegalStateException("Utility class");
private AzureStorageUtils() { }
class AzureStorageUtils { /** * Path separator character for resource location. */ public static final String PATH_DELIMITER = "/"; /** * Prefix stands for storage protocol. */ private static final String STORAGE_PROTOCOL_PREFIX = "azure-%s: /** * Whether the given combination of location and storageType represents a valid Azure storage resource. * * @param location the location * @param storageType the storagetype of current resource * @return true - valid Azure storage resource<br> * false - not valid Azure storage resource */ static boolean isAzureStorageResource(String location, StorageType storageType) { Assert.notNull(location, "Location must not be null"); return location.toLowerCase(Locale.ROOT).startsWith(getStorageProtocolPrefix(storageType)); } /** * Get the storage protocal prefix string of storageType. * * @param storageType the storagetype of current resource * @return the exact storage protocal prefix string */ static String getStorageProtocolPrefix(StorageType storageType) { return String.format(STORAGE_PROTOCOL_PREFIX, storageType.getType()); } /** * Get the location's path. * * @param location the location represents the resource * @param storageType the storageType * @return the location's path */ static String stripProtocol(String location, StorageType storageType) { Assert.notNull(location, "Location must not be null"); assertIsAzureStorageLocation(location, storageType); return location.substring(getStorageProtocolPrefix(storageType).length()); } /** * Get the storage container(fileShare) name from the given location. * * @param location the location represents the resource * @param storageType the storageType * @return the container(fileShare) name name of current location */ static String getContainerName(String location, StorageType storageType) { assertIsAzureStorageLocation(location, storageType); int containerEndIndex = assertContainerValid(location, storageType); return location.substring(getStorageProtocolPrefix(storageType).length(), containerEndIndex); } /** * Get the file name from the given location. * * @param location the location represents the resource * @param storageType the storageType * @return the file name of current location */ static String getFilename(String location, StorageType storageType) { assertIsAzureStorageLocation(location, storageType); int containerEndIndex = assertContainerValid(location, storageType); if (location.endsWith(PATH_DELIMITER)) { return location.substring(++containerEndIndex, location.length() - 1); } return location.substring(++containerEndIndex); } /** * Assert the given combination of location and storageType represents a valid Azure storage resource. * * @param location the location * @param storageType the storagetype of current resource */ static void assertIsAzureStorageLocation(String location, StorageType storageType) { if (!AzureStorageUtils.isAzureStorageResource(location, storageType)) { throw new IllegalArgumentException( String.format("The location '%s' is not a valid Azure storage %s location", location, storageType.getType())); } } /** * Assert the given combination of location and storageType contains a valid Azure storage container. * * @param location the location * @param storageType the storagetype of current resource * @return the end index of container in the location */ private static int assertContainerValid(String location, StorageType storageType) { String storageProtocolPrefix = AzureStorageUtils.getStorageProtocolPrefix(storageType); int containerEndIndex = location.indexOf(PATH_DELIMITER, storageProtocolPrefix.length()); if (containerEndIndex == -1 || containerEndIndex == storageProtocolPrefix.length()) { throw new IllegalArgumentException( String.format("The location '%s' does not contain a valid container name", location)); } return containerEndIndex; } }
class AzureStorageUtils { /** * Path separator character for resource location. */ public static final String PATH_DELIMITER = "/"; /** * Prefix stands for storage protocol. */ private static final String STORAGE_PROTOCOL_PREFIX = "azure-%s: /** * Whether the given combination of location and storageType represents a valid Azure storage resource. * * @param location the location * @param storageType the storagetype of current resource * @return true - valid Azure storage resource<br> * false - not valid Azure storage resource */ static boolean isAzureStorageResource(String location, StorageType storageType) { Assert.notNull(location, "Location must not be null"); return location.toLowerCase(Locale.ROOT).startsWith(getStorageProtocolPrefix(storageType)); } /** * Get the storage protocal prefix string of storageType. * * @param storageType the storagetype of current resource * @return the exact storage protocal prefix string */ static String getStorageProtocolPrefix(StorageType storageType) { return String.format(STORAGE_PROTOCOL_PREFIX, storageType.getType()); } /** * Get the location's path. * * @param location the location represents the resource * @param storageType the storageType * @return the location's path */ static String stripProtocol(String location, StorageType storageType) { Assert.notNull(location, "Location must not be null"); assertIsAzureStorageLocation(location, storageType); return location.substring(getStorageProtocolPrefix(storageType).length()); } /** * Get the storage container(fileShare) name from the given location. * * @param location the location represents the resource * @param storageType the storageType * @return the container(fileShare) name name of current location */ static String getContainerName(String location, StorageType storageType) { assertIsAzureStorageLocation(location, storageType); int containerEndIndex = assertContainerValid(location, storageType); return location.substring(getStorageProtocolPrefix(storageType).length(), containerEndIndex); } /** * Get the file name from the given location. * * @param location the location represents the resource * @param storageType the storageType * @return the file name of current location */ static String getFilename(String location, StorageType storageType) { assertIsAzureStorageLocation(location, storageType); int containerEndIndex = assertContainerValid(location, storageType); if (location.endsWith(PATH_DELIMITER)) { return location.substring(++containerEndIndex, location.length() - 1); } return location.substring(++containerEndIndex); } /** * Assert the given combination of location and storageType represents a valid Azure storage resource. * * @param location the location * @param storageType the storagetype of current resource */ static void assertIsAzureStorageLocation(String location, StorageType storageType) { if (!AzureStorageUtils.isAzureStorageResource(location, storageType)) { throw new IllegalArgumentException( String.format("The location '%s' is not a valid Azure storage %s location", location, storageType.getType())); } } /** * Assert the given combination of location and storageType contains a valid Azure storage container. * * @param location the location * @param storageType the storagetype of current resource * @return the end index of container in the location */ private static int assertContainerValid(String location, StorageType storageType) { String storageProtocolPrefix = AzureStorageUtils.getStorageProtocolPrefix(storageType); int containerEndIndex = location.indexOf(PATH_DELIMITER, storageProtocolPrefix.length()); if (containerEndIndex == -1 || containerEndIndex == storageProtocolPrefix.length()) { throw new IllegalArgumentException( String.format("The location '%s' does not contain a valid container name", location)); } return containerEndIndex; } }
which sonar issue is this?
public Stream<StorageItem> listItems(String itemPrefix) { ShareClient shareClient = getShareServiceClient().getShareClient(name); if (Boolean.TRUE.equals(shareClient.exists())) { return shareClient.getRootDirectoryClient().listFilesAndDirectories(itemPrefix, null, null, null) .stream() .filter(file -> !file.isDirectory()) .map(file -> new StorageItem(name, file.getName(), getStorageType())); } else { return Stream.empty(); } }
if (Boolean.TRUE.equals(shareClient.exists())) {
public Stream<StorageItem> listItems(String itemPrefix) { ShareClient shareClient = getShareServiceClient().getShareClient(name); if (shareClient.exists()) { return shareClient.getRootDirectoryClient().listFilesAndDirectories(itemPrefix, null, null, null) .stream() .filter(file -> !file.isDirectory()) .map(file -> new StorageItem(name, file.getName(), getStorageType())); } else { return Stream.empty(); } }
class StorageFileContainerClient implements StorageContainerClient { private final String name; StorageFileContainerClient(String name) { this.name = name; } @Override public String getName() { return name; } @Override }
class StorageFileContainerClient implements StorageContainerClient { private final String name; StorageFileContainerClient(String name) { this.name = name; } @Override public String getName() { return name; } @Override }
refer: https://sonarcloud.io/project/issues?directories=sdk%2Fspring%2Fspring-cloud-azure-autoconfigure%2Fsrc%2Fmain%2Fjava%2Fcom%2Fazure%2Fspring%2Fcloud%2Fautoconfigure%2Faad%2Ffilter&resolved=false&types=CODE_SMELL&id=stliu_azure-sdk-for-java&open=AXtAG3Fk00FAjxsg1BZT
private ConfigurableJWTProcessor<SecurityContext> getValidator(JWSAlgorithm jwsAlgorithm) { final ConfigurableJWTProcessor<SecurityContext> jwtProcessor = new DefaultJWTProcessor<>(); final JWSKeySelector<SecurityContext> keySelector = new JWSVerificationKeySelector<>(jwsAlgorithm, keySource); jwtProcessor.setJWSKeySelector(keySelector); jwtProcessor.setJWTClaimsSetVerifier(new DefaultJWTClaimsVerifier<SecurityContext>() { @Override public void verify(JWTClaimsSet claimsSet, SecurityContext ctx) throws BadJWTException { super.verify(claimsSet, ctx); final String issuer = claimsSet.getIssuer(); if (!isAadIssuer(issuer)) { throw new BadJWTException("Invalid token issuer"); } if (Boolean.TRUE.equals(explicitAudienceCheck)) { Optional<String> matchedAudience = claimsSet.getAudience() .stream() .filter(validAudiences::contains) .findFirst(); if (matchedAudience.isPresent()) { LOGGER.debug("Matched audience: [{}]", matchedAudience.get()); } else { throw new BadJWTException("Invalid token audience. Provided value " + claimsSet.getAudience() + "does not match neither client-id nor AppIdUri."); } } } }); return jwtProcessor; }
Optional<String> matchedAudience = claimsSet.getAudience()
private ConfigurableJWTProcessor<SecurityContext> getValidator(JWSAlgorithm jwsAlgorithm) { final ConfigurableJWTProcessor<SecurityContext> jwtProcessor = new DefaultJWTProcessor<>(); final JWSKeySelector<SecurityContext> keySelector = new JWSVerificationKeySelector<>(jwsAlgorithm, keySource); jwtProcessor.setJWSKeySelector(keySelector); jwtProcessor.setJWTClaimsSetVerifier(new DefaultJWTClaimsVerifier<SecurityContext>() { @Override public void verify(JWTClaimsSet claimsSet, SecurityContext ctx) throws BadJWTException { super.verify(claimsSet, ctx); final String issuer = claimsSet.getIssuer(); if (!isAadIssuer(issuer)) { throw new BadJWTException("Invalid token issuer"); } if (explicitAudienceCheck) { Optional<String> matchedAudience = claimsSet.getAudience() .stream() .filter(validAudiences::contains) .findFirst(); if (matchedAudience.isPresent()) { LOGGER.debug("Matched audience: [{}]", matchedAudience.get()); } else { throw new BadJWTException("Invalid token audience. Provided value " + claimsSet.getAudience() + "does not match neither client-id nor AppIdUri."); } } } }); return jwtProcessor; }
class UserPrincipalManager { private static final Logger LOGGER = LoggerFactory.getLogger(UserPrincipalManager.class); private static final String LOGIN_MICROSOFT_ONLINE_ISSUER = "https: private static final String STS_WINDOWS_ISSUER = "https: private static final String STS_CHINA_CLOUD_API_ISSUER = "https: private final JWKSource<SecurityContext> keySource; private final AadAuthenticationProperties aadAuthenticationProperties; private final Boolean explicitAudienceCheck; private final Set<String> validAudiences = new HashSet<>(); /** * ø Creates a new {@link UserPrincipalManager} with a predefined {@link JWKSource}. * <p> * This is helpful in cases the JWK is not a remote JWKSet or for unit testing. * * @param keySource - {@link JWKSource} containing at least one key */ public UserPrincipalManager(JWKSource<SecurityContext> keySource) { this.keySource = keySource; this.explicitAudienceCheck = false; this.aadAuthenticationProperties = null; } /** * Create a new {@link UserPrincipalManager} based of the * {@link AadAuthorizationServerEndpoints * * @param endpoints - used to retrieve the JWKS URL * @param aadAuthenticationProperties - used to retrieve the environment. * @param resourceRetriever - configures the {@link RemoteJWKSet} call. * @param explicitAudienceCheck Whether explicitly check the audience. * @throws IllegalArgumentException If AAD key discovery URI is malformed. */ public UserPrincipalManager(AadAuthorizationServerEndpoints endpoints, AadAuthenticationProperties aadAuthenticationProperties, ResourceRetriever resourceRetriever, boolean explicitAudienceCheck) { this.aadAuthenticationProperties = aadAuthenticationProperties; this.explicitAudienceCheck = explicitAudienceCheck; if (explicitAudienceCheck) { this.validAudiences.add(this.aadAuthenticationProperties.getCredential().getClientId()); this.validAudiences.add(this.aadAuthenticationProperties.getAppIdUri()); } try { String jwkSetEndpoint = endpoints.getJwkSetEndpoint(); keySource = new RemoteJWKSet<>(new URL(jwkSetEndpoint), resourceRetriever); } catch (MalformedURLException e) { throw new IllegalArgumentException("Failed to parse active directory key discovery uri.", e); } } /** * Create a new {@link UserPrincipalManager} based of the * {@link AadAuthorizationServerEndpoints * ()}
class UserPrincipalManager { private static final Logger LOGGER = LoggerFactory.getLogger(UserPrincipalManager.class); private static final String LOGIN_MICROSOFT_ONLINE_ISSUER = "https: private static final String STS_WINDOWS_ISSUER = "https: private static final String STS_CHINA_CLOUD_API_ISSUER = "https: private final JWKSource<SecurityContext> keySource; private final AadAuthenticationProperties aadAuthenticationProperties; private final boolean explicitAudienceCheck; private final Set<String> validAudiences = new HashSet<>(); /** * ø Creates a new {@link UserPrincipalManager} with a predefined {@link JWKSource}. * <p> * This is helpful in cases the JWK is not a remote JWKSet or for unit testing. * * @param keySource - {@link JWKSource} containing at least one key */ public UserPrincipalManager(JWKSource<SecurityContext> keySource) { this.keySource = keySource; this.explicitAudienceCheck = false; this.aadAuthenticationProperties = null; } /** * Create a new {@link UserPrincipalManager} based of the * {@link AadAuthorizationServerEndpoints * * @param endpoints - used to retrieve the JWKS URL * @param aadAuthenticationProperties - used to retrieve the environment. * @param resourceRetriever - configures the {@link RemoteJWKSet} call. * @param explicitAudienceCheck Whether explicitly check the audience. * @throws IllegalArgumentException If AAD key discovery URI is malformed. */ public UserPrincipalManager(AadAuthorizationServerEndpoints endpoints, AadAuthenticationProperties aadAuthenticationProperties, ResourceRetriever resourceRetriever, boolean explicitAudienceCheck) { this.aadAuthenticationProperties = aadAuthenticationProperties; this.explicitAudienceCheck = explicitAudienceCheck; if (explicitAudienceCheck) { this.validAudiences.add(this.aadAuthenticationProperties.getCredential().getClientId()); this.validAudiences.add(this.aadAuthenticationProperties.getAppIdUri()); } try { String jwkSetEndpoint = endpoints.getJwkSetEndpoint(); keySource = new RemoteJWKSet<>(new URL(jwkSetEndpoint), resourceRetriever); } catch (MalformedURLException e) { throw new IllegalArgumentException("Failed to parse active directory key discovery uri.", e); } } /** * Create a new {@link UserPrincipalManager} based of the * {@link AadAuthorizationServerEndpoints * ()}
refer: https://sonarcloud.io/project/issues?directories=sdk%2Fspring%2Fspring-cloud-azure-autoconfigure%2Fsrc%2Fmain%2Fjava%2Fcom%2Fazure%2Fspring%2Fcloud%2Fautoconfigure%2Fkeyvault%2Fenvironment&resolved=false&types=CODE_SMELL&id=stliu_azure-sdk-for-java&open=AYAHSa95i07DX7oNprfr
public String[] getPropertyNames() { if (!caseSensitive) { return properties .keySet() .stream() .flatMap(p -> Stream.of(p, p.replace("-", "."))) .distinct() .toArray(String[]::new); } else { return properties .keySet() .toArray(new String[0]); } }
.flatMap(p -> Stream.of(p, p.replace("-", ".")))
public String[] getPropertyNames() { if (!caseSensitive) { return properties .keySet() .stream() .flatMap(p -> Stream.of(p, p.replace("-", "."))) .distinct() .toArray(String[]::new); } else { return properties .keySet() .toArray(new String[0]); } }
class KeyVaultOperation { private static final Logger LOGGER = LoggerFactory.getLogger(KeyVaultOperation.class); /** * Stores the case-sensitive flag. */ private final boolean caseSensitive; /** * Stores the properties. */ private Map<String, String> properties = new HashMap<>(); /** * Stores the secret client. */ private final SecretClient secretClient; /** * Stores the secret keys. */ private final List<String> secretKeys; /** * Stores the timer object to schedule refresh task. */ private static Timer timer; /** * Constructor. * @param secretClient the Key Vault secret client. * @param refreshDuration the refresh in milliseconds (0 or less disables refresh). * @param secretKeys the secret keys to look for. * @param caseSensitive the case-sensitive flag. */ public KeyVaultOperation(final SecretClient secretClient, final Duration refreshDuration, List<String> secretKeys, boolean caseSensitive) { this.caseSensitive = caseSensitive; this.secretClient = secretClient; this.secretKeys = secretKeys; refreshProperties(); final long refreshInMillis = refreshDuration.toMillis(); if (refreshInMillis > 0) { synchronized (KeyVaultOperation.class) { if (timer != null) { try { timer.cancel(); timer.purge(); } catch (RuntimeException runtimeException) { LOGGER.error("Error of terminating Timer", runtimeException); } } timer = new Timer(true); final TimerTask task = new TimerTask() { @Override public void run() { refreshProperties(); } }; timer.scheduleAtFixedRate(task, refreshInMillis, refreshInMillis); } } } /** * Get the property. * * @param property the property to get. * @return the property value. */ public String getProperty(String property) { return properties.get(toKeyVaultSecretName(property)); } /** * Get the property names. * * @return the property names. */ /** * Refresh the properties by accessing key vault. */ private void refreshProperties() { if (secretKeys == null || secretKeys.isEmpty()) { properties = Optional.of(secretClient) .map(SecretClient::listPropertiesOfSecrets) .map(ContinuablePagedIterable::iterableByPage) .map(i -> StreamSupport.stream(i.spliterator(), false)) .orElseGet(Stream::empty) .map(PagedResponse::getElements) .flatMap(i -> StreamSupport.stream(i.spliterator(), false)) .filter(SecretProperties::isEnabled) .map(p -> secretClient.getSecret(p.getName(), p.getVersion())) .filter(Objects::nonNull) .collect(Collectors.toMap( s -> toKeyVaultSecretName(s.getName()), KeyVaultSecret::getValue )); } else { properties = secretKeys.stream() .map(this::toKeyVaultSecretName) .map(secretClient::getSecret) .filter(Objects::nonNull) .collect(Collectors.toMap( s -> toKeyVaultSecretName(s.getName()), KeyVaultSecret::getValue )); } } /** * For convention, we need to support all relaxed binding format from spring, these may include: * <table> * <tr><td>Spring relaxed binding names</td></tr> * <tr><td>acme.my-project.person.first-name</td></tr> * <tr><td>acme.myProject.person.firstName</td></tr> * <tr><td>acme.my_project.person.first_name</td></tr> * <tr><td>ACME_MYPROJECT_PERSON_FIRSTNAME</td></tr> * </table> * But azure key vault only allows ^[0-9a-zA-Z-]+$ and case-insensitive, so * there must be some conversion between spring names and azure key vault * names. For example, the 4 properties stated above should be converted to * acme-myproject-person-firstname in key vault. * * @param property of secret instance. * @return the value of secret with given name or null. */ private String toKeyVaultSecretName(@NonNull String property) { if (!caseSensitive) { if (property.matches("[a-z0-9A-Z-]+")) { return property.toLowerCase(Locale.US); } else if (property.matches("[A-Z0-9_]+")) { return property.toLowerCase(Locale.US).replace("_", "-"); } else { return property.toLowerCase(Locale.US) .replace("-", "") .replace("_", "") .replace("\\.", "-"); } } else { return property; } } /** * Set the properties. * * @param properties the properties. */ void setProperties(HashMap<String, String> properties) { this.properties = properties; } }
class KeyVaultOperation { private static final Logger LOGGER = LoggerFactory.getLogger(KeyVaultOperation.class); /** * Stores the case-sensitive flag. */ private final boolean caseSensitive; /** * Stores the properties. */ private Map<String, String> properties = new HashMap<>(); /** * Stores the secret client. */ private final SecretClient secretClient; /** * Stores the secret keys. */ private final List<String> secretKeys; /** * Stores the timer object to schedule refresh task. */ private static Timer timer; /** * Constructor. * @param secretClient the Key Vault secret client. * @param refreshDuration the refresh in milliseconds (0 or less disables refresh). * @param secretKeys the secret keys to look for. * @param caseSensitive the case-sensitive flag. */ public KeyVaultOperation(final SecretClient secretClient, final Duration refreshDuration, List<String> secretKeys, boolean caseSensitive) { this.caseSensitive = caseSensitive; this.secretClient = secretClient; this.secretKeys = secretKeys; refreshProperties(); final long refreshInMillis = refreshDuration.toMillis(); if (refreshInMillis > 0) { synchronized (KeyVaultOperation.class) { if (timer != null) { try { timer.cancel(); timer.purge(); } catch (RuntimeException runtimeException) { LOGGER.error("Error of terminating Timer", runtimeException); } } timer = new Timer(true); final TimerTask task = new TimerTask() { @Override public void run() { refreshProperties(); } }; timer.scheduleAtFixedRate(task, refreshInMillis, refreshInMillis); } } } /** * Get the property. * * @param property the property to get. * @return the property value. */ public String getProperty(String property) { return properties.get(toKeyVaultSecretName(property)); } /** * Get the property names. * * @return the property names. */ /** * Refresh the properties by accessing key vault. */ private void refreshProperties() { if (secretKeys == null || secretKeys.isEmpty()) { properties = Optional.of(secretClient) .map(SecretClient::listPropertiesOfSecrets) .map(ContinuablePagedIterable::iterableByPage) .map(i -> StreamSupport.stream(i.spliterator(), false)) .orElseGet(Stream::empty) .map(PagedResponse::getElements) .flatMap(i -> StreamSupport.stream(i.spliterator(), false)) .filter(SecretProperties::isEnabled) .map(p -> secretClient.getSecret(p.getName(), p.getVersion())) .filter(Objects::nonNull) .collect(Collectors.toMap( s -> toKeyVaultSecretName(s.getName()), KeyVaultSecret::getValue )); } else { properties = secretKeys.stream() .map(this::toKeyVaultSecretName) .map(secretClient::getSecret) .filter(Objects::nonNull) .collect(Collectors.toMap( s -> toKeyVaultSecretName(s.getName()), KeyVaultSecret::getValue )); } } /** * For convention, we need to support all relaxed binding format from spring, these may include: * <table> * <tr><td>Spring relaxed binding names</td></tr> * <tr><td>acme.my-project.person.first-name</td></tr> * <tr><td>acme.myProject.person.firstName</td></tr> * <tr><td>acme.my_project.person.first_name</td></tr> * <tr><td>ACME_MYPROJECT_PERSON_FIRSTNAME</td></tr> * </table> * But azure key vault only allows ^[0-9a-zA-Z-]+$ and case-insensitive, so * there must be some conversion between spring names and azure key vault * names. For example, the 4 properties stated above should be converted to * acme-myproject-person-firstname in key vault. * * @param property of secret instance. * @return the value of secret with given name or null. */ private String toKeyVaultSecretName(@NonNull String property) { if (!caseSensitive) { if (property.matches("[a-z0-9A-Z-]+")) { return property.toLowerCase(Locale.US); } else if (property.matches("[A-Z0-9_]+")) { return property.toLowerCase(Locale.US).replace("_", "-"); } else { return property.toLowerCase(Locale.US) .replace("-", "") .replace("_", "") .replace(".", "-"); } } else { return property; } } /** * Set the properties. * * @param properties the properties. */ void setProperties(HashMap<String, String> properties) { this.properties = properties; } }
no need to throw exception in a private constructor
private JsonNodeUtils() { throw new IllegalStateException("Utility class"); }
throw new IllegalStateException("Utility class");
private JsonNodeUtils() { }
class JsonNodeUtils { static final TypeReference<Set<String>> STRING_SET = new TypeReference<Set<String>>() { }; static final TypeReference<Map<String, Object>> STRING_OBJECT_MAP = new TypeReference<Map<String, Object>>() { }; static String findStringValue(JsonNode jsonNode, String fieldName) { if (jsonNode == null) { return null; } JsonNode value = jsonNode.findValue(fieldName); return (value != null && value.isTextual()) ? value.asText() : null; } static <T> T findValue(JsonNode jsonNode, String fieldName, TypeReference<T> valueTypeReference, ObjectMapper mapper) { if (jsonNode == null) { return null; } JsonNode value = jsonNode.findValue(fieldName); return (value != null && value.isContainerNode()) ? mapper.convertValue(value, valueTypeReference) : null; } static JsonNode findObjectNode(JsonNode jsonNode, String fieldName) { if (jsonNode == null) { return null; } JsonNode value = jsonNode.findValue(fieldName); return (value != null && value.isObject()) ? value : null; } }
class JsonNodeUtils { static final TypeReference<Set<String>> STRING_SET = new TypeReference<Set<String>>() { }; static final TypeReference<Map<String, Object>> STRING_OBJECT_MAP = new TypeReference<Map<String, Object>>() { }; static String findStringValue(JsonNode jsonNode, String fieldName) { if (jsonNode == null) { return null; } JsonNode value = jsonNode.findValue(fieldName); return (value != null && value.isTextual()) ? value.asText() : null; } static <T> T findValue(JsonNode jsonNode, String fieldName, TypeReference<T> valueTypeReference, ObjectMapper mapper) { if (jsonNode == null) { return null; } JsonNode value = jsonNode.findValue(fieldName); return (value != null && value.isContainerNode()) ? mapper.convertValue(value, valueTypeReference) : null; } static JsonNode findObjectNode(JsonNode jsonNode, String fieldName) { if (jsonNode == null) { return null; } JsonNode value = jsonNode.findValue(fieldName); return (value != null && value.isObject()) ? value : null; } }
No need for two lines
private String action() { return String.format("Change Spring Boot version to one of the following versions %s .%n" + "You can find the latest Spring Boot versions here [%s]. %n" + "If you want to learn more about the Spring Cloud Azure Release train compatibility, " + "you can visit this page [%s] and check the [Release Trains] section.%nIf you want to disable this " + "check, " + "just set the property [spring.cloud.azure.compatibility-verifier.enabled=false]", this.acceptedVersions, "https: + "-Versions-Mapping"); }
+ "just set the property [spring.cloud.azure.compatibility-verifier.enabled=false]",
private String action() { return String.format("Change Spring Boot version to one of the following versions %s .%n" + "You can find the latest Spring Boot versions here [%s]. %n" + "If you want to learn more about the Spring Cloud Azure Release train compatibility, " + "you can visit this page [%s] and check the [Release Trains] section.%nIf you want to disable this " + "check, just set the property [spring.cloud.azure.compatibility-verifier.enabled=false]", this.acceptedVersions, "https: + "-Versions-Mapping"); }
class AzureSpringBootVersionVerifier implements Predicate<String> { private static final Logger LOGGER = LoggerFactory.getLogger(AzureSpringBootVersionVerifier.class); public static final String SPRINGBOOT_CONDITIONAL_CLASS_NAME_OF_2_5 = "org.springframework.boot.context.properties.bind.Bindable.BindRestriction"; public static final String SPRINGBOOT_CONDITIONAL_CLASS_NAME_OF_2_6 = "org.springframework.boot.autoconfigure.data.redis.ClientResourcesBuilderCustomizer"; /** * Versions supported by Spring Cloud Azure, for present is [2.5, 2.6]. Update this value if needed. */ private final Map<String, String> supportedVersions = new HashMap<>(); /** * Versionsspecified in the configuration or environment. */ private final List<String> acceptedVersions; public AzureSpringBootVersionVerifier(List<String> acceptedVersions) { this.acceptedVersions = acceptedVersions; init(); } /** * Init default supported Spring Boot Version compatibility check meta data. */ private void init() { supportedVersions.put("2.5", SPRINGBOOT_CONDITIONAL_CLASS_NAME_OF_2_5); supportedVersions.put("2.6", SPRINGBOOT_CONDITIONAL_CLASS_NAME_OF_2_6); } /** * Verify the current spring-boot version * * @return Verification result of spring-boot version * @throws AzureCompatibilityNotMetException thrown if using an unsupported spring-boot version */ public VerificationResult verify() { if (this.springBootVersionMatches()) { return VerificationResult.compatible(); } else { List<VerificationResult> errors = new ArrayList<>(Collections.singleton(VerificationResult.notCompatible(this.errorDescription(), this.action()))); throw new AzureCompatibilityNotMetException(errors); } } private String errorDescription() { String versionFromManifest = this.getVersionFromManifest(); return StringUtils.hasText(versionFromManifest) ? String.format("Spring Boot [%s] is not compatible with this" + " Spring Cloud Azure release train", versionFromManifest) : "Spring Boot is not compatible with this " + "Spring Cloud Azure release train"; } String getVersionFromManifest() { return SpringBootVersion.getVersion(); } private boolean springBootVersionMatches() { for (String acceptedVersion : acceptedVersions) { try { boolean matched = this.matchSpringBootVersionFromManifest(acceptedVersion); if (matched) { return true; } } catch (FileNotFoundException e) { String versionString = stripWildCardFromVersion(acceptedVersion); String fullyQuallifiedClassName = this.supportedVersions.get(versionString); if (test(fullyQuallifiedClassName)) { if (LOGGER.isDebugEnabled()) { LOGGER.debug("Predicate [" + versionString + "] was matched"); } return true; } } } return false; } private boolean matchSpringBootVersionFromManifest(String acceptedVersion) throws FileNotFoundException { String version = this.getVersionFromManifest(); if (LOGGER.isDebugEnabled()) { LOGGER.debug("Version found in Boot manifest [" + version + "]"); } if (!StringUtils.hasText(version)) { LOGGER.info("Cannot check Boot version from manifest"); throw new FileNotFoundException("Spring Boot version not found"); } else { return version.startsWith(stripWildCardFromVersion(acceptedVersion)); } } private static String stripWildCardFromVersion(String version) { return version.endsWith(".x") ? version.substring(0, version.indexOf(".x")) : version; } @Override public boolean test(String fullyQuallifiedClassName) { try { if (fullyQuallifiedClassName == null) { return false; } Class.forName(fullyQuallifiedClassName); return true; } catch (ClassNotFoundException ex) { return false; } } }
class AzureSpringBootVersionVerifier { private static final Logger LOGGER = LoggerFactory.getLogger(AzureSpringBootVersionVerifier.class); static final String SPRINGBOOT_CONDITIONAL_CLASS_NAME_OF_2_5 = "org.springframework.boot.context.properties.bind.Bindable.BindRestriction"; static final String SPRINGBOOT_CONDITIONAL_CLASS_NAME_OF_2_6 = "org.springframework.boot.autoconfigure.data.redis.ClientResourcesBuilderCustomizer"; /** * Versions supported by Spring Cloud Azure, for present is [2.5, 2.6]. Update this value if needed. */ private final Map<String, String> supportedVersions = new HashMap<>(); /** * Versionsspecified in the configuration or environment. */ private final List<String> acceptedVersions; private final ClassNameResolverPredicate classNameResolver; public AzureSpringBootVersionVerifier(List<String> acceptedVersions, ClassNameResolverPredicate classNameResolver) { this.acceptedVersions = acceptedVersions; this.classNameResolver = classNameResolver; initDefaultSupportedBootVersionCheckMeta(); } /** * Init default supported Spring Boot Version compatibility check meta data. */ private void initDefaultSupportedBootVersionCheckMeta() { supportedVersions.put("2.5", SPRINGBOOT_CONDITIONAL_CLASS_NAME_OF_2_5); supportedVersions.put("2.6", SPRINGBOOT_CONDITIONAL_CLASS_NAME_OF_2_6); } /** * Verify the current spring-boot version * * @return Verification result of spring-boot version * @throws AzureCompatibilityNotMetException thrown if using an unsupported spring-boot version */ public VerificationResult verify() { if (this.springBootVersionMatches()) { return VerificationResult.compatible(); } else { List<VerificationResult> errors = new ArrayList<>(Collections.singleton(VerificationResult.notCompatible(this.errorDescription(), this.action()))); throw new AzureCompatibilityNotMetException(errors); } } private String errorDescription() { String versionFromManifest = this.getVersionFromManifest(); return StringUtils.hasText(versionFromManifest) ? String.format("Spring Boot [%s] is not compatible with this" + " Spring Cloud Azure release train", versionFromManifest) : "Spring Boot is not compatible with this " + "Spring Cloud Azure release train"; } String getVersionFromManifest() { return SpringBootVersion.getVersion(); } private boolean springBootVersionMatches() { for (String acceptedVersion : acceptedVersions) { try { boolean matched = this.matchSpringBootVersionFromManifest(acceptedVersion); if (matched) { return true; } } catch (FileNotFoundException e) { String versionString = stripWildCardFromVersion(acceptedVersion); String fullyQuallifiedClassName = this.supportedVersions.get(versionString); if (classNameResolver.resolve(fullyQuallifiedClassName)) { if (LOGGER.isDebugEnabled()) { LOGGER.debug("Predicate for Spring Boot Version of [" + versionString + "] was matched"); } return true; } } } return false; } private boolean matchSpringBootVersionFromManifest(String acceptedVersion) throws FileNotFoundException { String version = this.getVersionFromManifest(); if (LOGGER.isDebugEnabled()) { LOGGER.debug("Version found in Boot manifest [" + version + "]"); } if (!StringUtils.hasText(version)) { LOGGER.info("Cannot check Boot version from manifest"); throw new FileNotFoundException("Spring Boot version not found"); } else { return version.startsWith(stripWildCardFromVersion(acceptedVersion)); } } private static String stripWildCardFromVersion(String version) { return version.endsWith(".x") ? version.substring(0, version.indexOf(".x")) : version; } }
`predicate` is not a version string, `Predicate` followed by `versionString` is strange.
private boolean springBootVersionMatches() { for (String acceptedVersion : acceptedVersions) { try { boolean matched = this.matchSpringBootVersionFromManifest(acceptedVersion); if (matched) { return true; } } catch (FileNotFoundException e) { String versionString = stripWildCardFromVersion(acceptedVersion); String fullyQuallifiedClassName = this.supportedVersions.get(versionString); if (test(fullyQuallifiedClassName)) { if (LOGGER.isDebugEnabled()) { LOGGER.debug("Predicate [" + versionString + "] was matched"); } return true; } } } return false; }
LOGGER.debug("Predicate [" + versionString + "] was matched");
private boolean springBootVersionMatches() { for (String acceptedVersion : acceptedVersions) { try { boolean matched = this.matchSpringBootVersionFromManifest(acceptedVersion); if (matched) { return true; } } catch (FileNotFoundException e) { String versionString = stripWildCardFromVersion(acceptedVersion); String fullyQuallifiedClassName = this.supportedVersions.get(versionString); if (classNameResolver.resolve(fullyQuallifiedClassName)) { if (LOGGER.isDebugEnabled()) { LOGGER.debug("Predicate for Spring Boot Version of [" + versionString + "] was matched"); } return true; } } } return false; }
class AzureSpringBootVersionVerifier implements Predicate<String> { private static final Logger LOGGER = LoggerFactory.getLogger(AzureSpringBootVersionVerifier.class); public static final String SPRINGBOOT_CONDITIONAL_CLASS_NAME_OF_2_5 = "org.springframework.boot.context.properties.bind.Bindable.BindRestriction"; public static final String SPRINGBOOT_CONDITIONAL_CLASS_NAME_OF_2_6 = "org.springframework.boot.autoconfigure.data.redis.ClientResourcesBuilderCustomizer"; /** * Versions supported by Spring Cloud Azure, for present is [2.5, 2.6]. Update this value if needed. */ private final Map<String, String> supportedVersions = new HashMap<>(); /** * Versionsspecified in the configuration or environment. */ private final List<String> acceptedVersions; public AzureSpringBootVersionVerifier(List<String> acceptedVersions) { this.acceptedVersions = acceptedVersions; init(); } /** * Init default supported Spring Boot Version compatibility check meta data. */ private void init() { supportedVersions.put("2.5", SPRINGBOOT_CONDITIONAL_CLASS_NAME_OF_2_5); supportedVersions.put("2.6", SPRINGBOOT_CONDITIONAL_CLASS_NAME_OF_2_6); } /** * Verify the current spring-boot version * * @return Verification result of spring-boot version * @throws AzureCompatibilityNotMetException thrown if using an unsupported spring-boot version */ public VerificationResult verify() { if (this.springBootVersionMatches()) { return VerificationResult.compatible(); } else { List<VerificationResult> errors = new ArrayList<>(Collections.singleton(VerificationResult.notCompatible(this.errorDescription(), this.action()))); throw new AzureCompatibilityNotMetException(errors); } } private String errorDescription() { String versionFromManifest = this.getVersionFromManifest(); return StringUtils.hasText(versionFromManifest) ? String.format("Spring Boot [%s] is not compatible with this" + " Spring Cloud Azure release train", versionFromManifest) : "Spring Boot is not compatible with this " + "Spring Cloud Azure release train"; } private String action() { return String.format("Change Spring Boot version to one of the following versions %s .%n" + "You can find the latest Spring Boot versions here [%s]. %n" + "If you want to learn more about the Spring Cloud Azure Release train compatibility, " + "you can visit this page [%s] and check the [Release Trains] section.%nIf you want to disable this " + "check, " + "just set the property [spring.cloud.azure.compatibility-verifier.enabled=false]", this.acceptedVersions, "https: + "-Versions-Mapping"); } String getVersionFromManifest() { return SpringBootVersion.getVersion(); } private boolean matchSpringBootVersionFromManifest(String acceptedVersion) throws FileNotFoundException { String version = this.getVersionFromManifest(); if (LOGGER.isDebugEnabled()) { LOGGER.debug("Version found in Boot manifest [" + version + "]"); } if (!StringUtils.hasText(version)) { LOGGER.info("Cannot check Boot version from manifest"); throw new FileNotFoundException("Spring Boot version not found"); } else { return version.startsWith(stripWildCardFromVersion(acceptedVersion)); } } private static String stripWildCardFromVersion(String version) { return version.endsWith(".x") ? version.substring(0, version.indexOf(".x")) : version; } @Override public boolean test(String fullyQuallifiedClassName) { try { if (fullyQuallifiedClassName == null) { return false; } Class.forName(fullyQuallifiedClassName); return true; } catch (ClassNotFoundException ex) { return false; } } }
class AzureSpringBootVersionVerifier { private static final Logger LOGGER = LoggerFactory.getLogger(AzureSpringBootVersionVerifier.class); static final String SPRINGBOOT_CONDITIONAL_CLASS_NAME_OF_2_5 = "org.springframework.boot.context.properties.bind.Bindable.BindRestriction"; static final String SPRINGBOOT_CONDITIONAL_CLASS_NAME_OF_2_6 = "org.springframework.boot.autoconfigure.data.redis.ClientResourcesBuilderCustomizer"; /** * Versions supported by Spring Cloud Azure, for present is [2.5, 2.6]. Update this value if needed. */ private final Map<String, String> supportedVersions = new HashMap<>(); /** * Versionsspecified in the configuration or environment. */ private final List<String> acceptedVersions; private final ClassNameResolverPredicate classNameResolver; public AzureSpringBootVersionVerifier(List<String> acceptedVersions, ClassNameResolverPredicate classNameResolver) { this.acceptedVersions = acceptedVersions; this.classNameResolver = classNameResolver; initDefaultSupportedBootVersionCheckMeta(); } /** * Init default supported Spring Boot Version compatibility check meta data. */ private void initDefaultSupportedBootVersionCheckMeta() { supportedVersions.put("2.5", SPRINGBOOT_CONDITIONAL_CLASS_NAME_OF_2_5); supportedVersions.put("2.6", SPRINGBOOT_CONDITIONAL_CLASS_NAME_OF_2_6); } /** * Verify the current spring-boot version * * @return Verification result of spring-boot version * @throws AzureCompatibilityNotMetException thrown if using an unsupported spring-boot version */ public VerificationResult verify() { if (this.springBootVersionMatches()) { return VerificationResult.compatible(); } else { List<VerificationResult> errors = new ArrayList<>(Collections.singleton(VerificationResult.notCompatible(this.errorDescription(), this.action()))); throw new AzureCompatibilityNotMetException(errors); } } private String errorDescription() { String versionFromManifest = this.getVersionFromManifest(); return StringUtils.hasText(versionFromManifest) ? String.format("Spring Boot [%s] is not compatible with this" + " Spring Cloud Azure release train", versionFromManifest) : "Spring Boot is not compatible with this " + "Spring Cloud Azure release train"; } private String action() { return String.format("Change Spring Boot version to one of the following versions %s .%n" + "You can find the latest Spring Boot versions here [%s]. %n" + "If you want to learn more about the Spring Cloud Azure Release train compatibility, " + "you can visit this page [%s] and check the [Release Trains] section.%nIf you want to disable this " + "check, just set the property [spring.cloud.azure.compatibility-verifier.enabled=false]", this.acceptedVersions, "https: + "-Versions-Mapping"); } String getVersionFromManifest() { return SpringBootVersion.getVersion(); } private boolean matchSpringBootVersionFromManifest(String acceptedVersion) throws FileNotFoundException { String version = this.getVersionFromManifest(); if (LOGGER.isDebugEnabled()) { LOGGER.debug("Version found in Boot manifest [" + version + "]"); } if (!StringUtils.hasText(version)) { LOGGER.info("Cannot check Boot version from manifest"); throw new FileNotFoundException("Spring Boot version not found"); } else { return version.startsWith(stripWildCardFromVersion(acceptedVersion)); } } private static String stripWildCardFromVersion(String version) { return version.endsWith(".x") ? version.substring(0, version.indexOf(".x")) : version; } }
please add more test cases for this
private String toKeyVaultSecretName(@NonNull String property) { if (!caseSensitive) { if (property.matches("[a-z0-9A-Z-]+")) { return property.toLowerCase(Locale.US); } else if (property.matches("[A-Z0-9_]+")) { return property.toLowerCase(Locale.US).replace("_", "-"); } else { return property.toLowerCase(Locale.US) .replace("-", "") .replace("_", "") .replace(".", "-"); } } else { return property; } }
} else {
private String toKeyVaultSecretName(@NonNull String property) { if (!caseSensitive) { if (property.matches("[a-z0-9A-Z-]+")) { return property.toLowerCase(Locale.US); } else if (property.matches("[A-Z0-9_]+")) { return property.toLowerCase(Locale.US).replace("_", "-"); } else { return property.toLowerCase(Locale.US) .replace("-", "") .replace("_", "") .replace(".", "-"); } } else { return property; } }
class KeyVaultOperation { private static final Logger LOGGER = LoggerFactory.getLogger(KeyVaultOperation.class); /** * Stores the case-sensitive flag. */ private final boolean caseSensitive; /** * Stores the properties. */ private Map<String, String> properties = new HashMap<>(); /** * Stores the secret client. */ private final SecretClient secretClient; /** * Stores the secret keys. */ private final List<String> secretKeys; /** * Stores the timer object to schedule refresh task. */ private static Timer timer; /** * Constructor. * @param secretClient the Key Vault secret client. * @param refreshDuration the refresh in milliseconds (0 or less disables refresh). * @param secretKeys the secret keys to look for. * @param caseSensitive the case-sensitive flag. */ public KeyVaultOperation(final SecretClient secretClient, final Duration refreshDuration, List<String> secretKeys, boolean caseSensitive) { this.caseSensitive = caseSensitive; this.secretClient = secretClient; this.secretKeys = secretKeys; refreshProperties(); final long refreshInMillis = refreshDuration.toMillis(); if (refreshInMillis > 0) { synchronized (KeyVaultOperation.class) { if (timer != null) { try { timer.cancel(); timer.purge(); } catch (RuntimeException runtimeException) { LOGGER.error("Error of terminating Timer", runtimeException); } } timer = new Timer(true); final TimerTask task = new TimerTask() { @Override public void run() { refreshProperties(); } }; timer.scheduleAtFixedRate(task, refreshInMillis, refreshInMillis); } } } /** * Get the property. * * @param property the property to get. * @return the property value. */ public String getProperty(String property) { return properties.get(toKeyVaultSecretName(property)); } /** * Get the property names. * * @return the property names. */ public String[] getPropertyNames() { if (!caseSensitive) { return properties .keySet() .stream() .flatMap(p -> Stream.of(p, p.replace("-", "."))) .distinct() .toArray(String[]::new); } else { return properties .keySet() .toArray(new String[0]); } } /** * Refresh the properties by accessing key vault. */ private void refreshProperties() { if (secretKeys == null || secretKeys.isEmpty()) { properties = Optional.of(secretClient) .map(SecretClient::listPropertiesOfSecrets) .map(ContinuablePagedIterable::iterableByPage) .map(i -> StreamSupport.stream(i.spliterator(), false)) .orElseGet(Stream::empty) .map(PagedResponse::getElements) .flatMap(i -> StreamSupport.stream(i.spliterator(), false)) .filter(SecretProperties::isEnabled) .map(p -> secretClient.getSecret(p.getName(), p.getVersion())) .filter(Objects::nonNull) .collect(Collectors.toMap( s -> toKeyVaultSecretName(s.getName()), KeyVaultSecret::getValue )); } else { properties = secretKeys.stream() .map(this::toKeyVaultSecretName) .map(secretClient::getSecret) .filter(Objects::nonNull) .collect(Collectors.toMap( s -> toKeyVaultSecretName(s.getName()), KeyVaultSecret::getValue )); } } /** * For convention, we need to support all relaxed binding format from spring, these may include: * <table> * <tr><td>Spring relaxed binding names</td></tr> * <tr><td>acme.my-project.person.first-name</td></tr> * <tr><td>acme.myProject.person.firstName</td></tr> * <tr><td>acme.my_project.person.first_name</td></tr> * <tr><td>ACME_MYPROJECT_PERSON_FIRSTNAME</td></tr> * </table> * But azure key vault only allows ^[0-9a-zA-Z-]+$ and case-insensitive, so * there must be some conversion between spring names and azure key vault * names. For example, the 4 properties stated above should be converted to * acme-myproject-person-firstname in key vault. * * @param property of secret instance. * @return the value of secret with given name or null. */ /** * Set the properties. * * @param properties the properties. */ void setProperties(HashMap<String, String> properties) { this.properties = properties; } }
class KeyVaultOperation { private static final Logger LOGGER = LoggerFactory.getLogger(KeyVaultOperation.class); /** * Stores the case-sensitive flag. */ private final boolean caseSensitive; /** * Stores the properties. */ private Map<String, String> properties = new HashMap<>(); /** * Stores the secret client. */ private final SecretClient secretClient; /** * Stores the secret keys. */ private final List<String> secretKeys; /** * Stores the timer object to schedule refresh task. */ private static Timer timer; /** * Constructor. * @param secretClient the Key Vault secret client. * @param refreshDuration the refresh in milliseconds (0 or less disables refresh). * @param secretKeys the secret keys to look for. * @param caseSensitive the case-sensitive flag. */ public KeyVaultOperation(final SecretClient secretClient, final Duration refreshDuration, List<String> secretKeys, boolean caseSensitive) { this.caseSensitive = caseSensitive; this.secretClient = secretClient; this.secretKeys = secretKeys; refreshProperties(); final long refreshInMillis = refreshDuration.toMillis(); if (refreshInMillis > 0) { synchronized (KeyVaultOperation.class) { if (timer != null) { try { timer.cancel(); timer.purge(); } catch (RuntimeException runtimeException) { LOGGER.error("Error of terminating Timer", runtimeException); } } timer = new Timer(true); final TimerTask task = new TimerTask() { @Override public void run() { refreshProperties(); } }; timer.scheduleAtFixedRate(task, refreshInMillis, refreshInMillis); } } } /** * Get the property. * * @param property the property to get. * @return the property value. */ public String getProperty(String property) { return properties.get(toKeyVaultSecretName(property)); } /** * Get the property names. * * @return the property names. */ public String[] getPropertyNames() { if (!caseSensitive) { return properties .keySet() .stream() .flatMap(p -> Stream.of(p, p.replace("-", "."))) .distinct() .toArray(String[]::new); } else { return properties .keySet() .toArray(new String[0]); } } /** * Refresh the properties by accessing key vault. */ private void refreshProperties() { if (secretKeys == null || secretKeys.isEmpty()) { properties = Optional.of(secretClient) .map(SecretClient::listPropertiesOfSecrets) .map(ContinuablePagedIterable::iterableByPage) .map(i -> StreamSupport.stream(i.spliterator(), false)) .orElseGet(Stream::empty) .map(PagedResponse::getElements) .flatMap(i -> StreamSupport.stream(i.spliterator(), false)) .filter(SecretProperties::isEnabled) .map(p -> secretClient.getSecret(p.getName(), p.getVersion())) .filter(Objects::nonNull) .collect(Collectors.toMap( s -> toKeyVaultSecretName(s.getName()), KeyVaultSecret::getValue )); } else { properties = secretKeys.stream() .map(this::toKeyVaultSecretName) .map(secretClient::getSecret) .filter(Objects::nonNull) .collect(Collectors.toMap( s -> toKeyVaultSecretName(s.getName()), KeyVaultSecret::getValue )); } } /** * For convention, we need to support all relaxed binding format from spring, these may include: * <table> * <tr><td>Spring relaxed binding names</td></tr> * <tr><td>acme.my-project.person.first-name</td></tr> * <tr><td>acme.myProject.person.firstName</td></tr> * <tr><td>acme.my_project.person.first_name</td></tr> * <tr><td>ACME_MYPROJECT_PERSON_FIRSTNAME</td></tr> * </table> * But azure key vault only allows ^[0-9a-zA-Z-]+$ and case-insensitive, so * there must be some conversion between spring names and azure key vault * names. For example, the 4 properties stated above should be converted to * acme-myproject-person-firstname in key vault. * * @param property of secret instance. * @return the value of secret with given name or null. */ /** * Set the properties. * * @param properties the properties. */ void setProperties(HashMap<String, String> properties) { this.properties = properties; } }
to avoid confusion between sync and async, should we use `apiTextAnalyticsClient` and `batchApiTextAnalyticsClient`?
public TextAnalyticsAsyncClient buildAsyncClient() { final Configuration buildConfiguration = (configuration == null) ? Configuration.getGlobalConfiguration().clone() : configuration; final TextAnalyticsServiceVersion serviceVersion = version != null ? version : TextAnalyticsServiceVersion.getLatest(); Objects.requireNonNull(endpoint, "'Endpoint' is required and can not be null."); HttpPipeline pipeline = httpPipeline; if (pipeline == null) { ClientOptions buildClientOptions = this.clientOptions == null ? DEFAULT_CLIENT_OPTIONS : this.clientOptions; HttpLogOptions buildLogOptions = this.httpLogOptions == null ? DEFAULT_LOG_OPTIONS : this.httpLogOptions; final String applicationId = CoreUtils.getApplicationId(buildClientOptions, buildLogOptions); final List<HttpPipelinePolicy> policies = new ArrayList<>(); policies.add(new AddHeadersPolicy(DEFAULT_HTTP_HEADERS)); policies.add(new AddHeadersFromContextPolicy()); policies.add(new UserAgentPolicy(applicationId, CLIENT_NAME, CLIENT_VERSION, buildConfiguration)); policies.add(new RequestIdPolicy()); policies.addAll(perCallPolicies); HttpPolicyProviders.addBeforeRetryPolicies(policies); policies.add(ClientBuilderUtil.validateAndGetRetryPolicy(retryPolicy, retryOptions, DEFAULT_RETRY_POLICY)); policies.add(new AddDatePolicy()); if (tokenCredential != null) { policies.add(new BearerTokenAuthenticationPolicy(tokenCredential, DEFAULT_SCOPE)); } else if (credential != null) { policies.add(new AzureKeyCredentialPolicy(OCP_APIM_SUBSCRIPTION_KEY, credential)); } else { throw logger.logExceptionAsError( new IllegalArgumentException("Missing credential information while building a client.")); } policies.addAll(perRetryPolicies); HttpPolicyProviders.addAfterRetryPolicies(policies); policies.add(new HttpLoggingPolicy(httpLogOptions)); HttpHeaders headers = new HttpHeaders(); buildClientOptions.getHeaders().forEach(header -> headers.set(header.getName(), header.getValue())); if (headers.getSize() > 0) { policies.add(new AddHeadersPolicy(headers)); } policies.add(new HttpLoggingPolicy(buildLogOptions)); pipeline = new HttpPipelineBuilder() .clientOptions(buildClientOptions) .httpClient(httpClient) .policies(policies.toArray(new HttpPipelinePolicy[0])) .build(); } if (!isConsolidatedServiceVersion(version)) { final TextAnalyticsClientImpl textAnalyticsAPI = new TextAnalyticsClientImplBuilder() .endpoint(endpoint) .apiVersion(serviceVersion.getVersion()) .pipeline(pipeline) .buildClient(); return new TextAnalyticsAsyncClient(textAnalyticsAPI, serviceVersion, defaultCountryHint, defaultLanguage); } else { final MicrosoftCognitiveLanguageServiceImpl syncApiTextAnalyticsClient = new MicrosoftCognitiveLanguageServiceImplBuilder() .endpoint(endpoint) .apiVersion(serviceVersion.getVersion()) .pipeline(pipeline) .buildClient(); return new TextAnalyticsAsyncClient(syncApiTextAnalyticsClient, serviceVersion, defaultCountryHint, defaultLanguage); } }
final MicrosoftCognitiveLanguageServiceImpl syncApiTextAnalyticsClient =
public TextAnalyticsAsyncClient buildAsyncClient() { final Configuration buildConfiguration = (configuration == null) ? Configuration.getGlobalConfiguration().clone() : configuration; final TextAnalyticsServiceVersion serviceVersion = version != null ? version : TextAnalyticsServiceVersion.getLatest(); Objects.requireNonNull(endpoint, "'Endpoint' is required and can not be null."); HttpPipeline pipeline = httpPipeline; if (pipeline == null) { ClientOptions buildClientOptions = this.clientOptions == null ? DEFAULT_CLIENT_OPTIONS : this.clientOptions; HttpLogOptions buildLogOptions = this.httpLogOptions == null ? DEFAULT_LOG_OPTIONS : this.httpLogOptions; final String applicationId = CoreUtils.getApplicationId(buildClientOptions, buildLogOptions); final List<HttpPipelinePolicy> policies = new ArrayList<>(); policies.add(new AddHeadersPolicy(DEFAULT_HTTP_HEADERS)); policies.add(new AddHeadersFromContextPolicy()); policies.add(new UserAgentPolicy(applicationId, CLIENT_NAME, CLIENT_VERSION, buildConfiguration)); policies.add(new RequestIdPolicy()); policies.addAll(perCallPolicies); HttpPolicyProviders.addBeforeRetryPolicies(policies); policies.add(ClientBuilderUtil.validateAndGetRetryPolicy(retryPolicy, retryOptions, DEFAULT_RETRY_POLICY)); policies.add(new AddDatePolicy()); if (tokenCredential != null) { policies.add(new BearerTokenAuthenticationPolicy(tokenCredential, DEFAULT_SCOPE)); } else if (credential != null) { policies.add(new AzureKeyCredentialPolicy(OCP_APIM_SUBSCRIPTION_KEY, credential)); } else { throw logger.logExceptionAsError( new IllegalArgumentException("Missing credential information while building a client.")); } policies.addAll(perRetryPolicies); HttpPolicyProviders.addAfterRetryPolicies(policies); policies.add(new HttpLoggingPolicy(httpLogOptions)); HttpHeaders headers = new HttpHeaders(); buildClientOptions.getHeaders().forEach(header -> headers.set(header.getName(), header.getValue())); if (headers.getSize() > 0) { policies.add(new AddHeadersPolicy(headers)); } policies.add(new HttpLoggingPolicy(buildLogOptions)); pipeline = new HttpPipelineBuilder() .clientOptions(buildClientOptions) .httpClient(httpClient) .policies(policies.toArray(new HttpPipelinePolicy[0])) .build(); } if (!isConsolidatedServiceVersion(version)) { final TextAnalyticsClientImpl textAnalyticsAPI = new TextAnalyticsClientImplBuilder() .endpoint(endpoint) .apiVersion(serviceVersion.getVersion()) .pipeline(pipeline) .buildClient(); return new TextAnalyticsAsyncClient(textAnalyticsAPI, serviceVersion, defaultCountryHint, defaultLanguage); } else { final MicrosoftCognitiveLanguageServiceImpl batchApiTextAnalyticsClient = new MicrosoftCognitiveLanguageServiceImplBuilder() .endpoint(endpoint) .apiVersion(serviceVersion.getVersion()) .pipeline(pipeline) .buildClient(); return new TextAnalyticsAsyncClient(batchApiTextAnalyticsClient, serviceVersion, defaultCountryHint, defaultLanguage); } }
class TextAnalyticsClientBuilder implements AzureKeyCredentialTrait<TextAnalyticsClientBuilder>, ConfigurationTrait<TextAnalyticsClientBuilder>, EndpointTrait<TextAnalyticsClientBuilder>, HttpTrait<TextAnalyticsClientBuilder>, TokenCredentialTrait<TextAnalyticsClientBuilder> { private static final String DEFAULT_SCOPE = "https: private static final String NAME = "name"; private static final String OCP_APIM_SUBSCRIPTION_KEY = "Ocp-Apim-Subscription-Key"; private static final String TEXT_ANALYTICS_PROPERTIES = "azure-ai-textanalytics.properties"; private static final String VERSION = "version"; private static final RetryPolicy DEFAULT_RETRY_POLICY = new RetryPolicy(); private static final ClientOptions DEFAULT_CLIENT_OPTIONS = new ClientOptions(); private static final HttpLogOptions DEFAULT_LOG_OPTIONS = new HttpLogOptions(); private static final HttpHeaders DEFAULT_HTTP_HEADERS = new HttpHeaders(); private final ClientLogger logger = new ClientLogger(TextAnalyticsClientBuilder.class); private final List<HttpPipelinePolicy> perCallPolicies = new ArrayList<>(); private final List<HttpPipelinePolicy> perRetryPolicies = new ArrayList<>(); private ClientOptions clientOptions; private Configuration configuration; private AzureKeyCredential credential; private String defaultCountryHint; private String defaultLanguage; private String endpoint; private HttpClient httpClient; private HttpLogOptions httpLogOptions; private HttpPipeline httpPipeline; private RetryPolicy retryPolicy; private RetryOptions retryOptions; private TokenCredential tokenCredential; private TextAnalyticsServiceVersion version; private static final String CLIENT_NAME; private static final String CLIENT_VERSION; static { Map<String, String> properties = CoreUtils.getProperties(TEXT_ANALYTICS_PROPERTIES); CLIENT_NAME = properties.getOrDefault(NAME, "UnknownName"); CLIENT_VERSION = properties.getOrDefault(VERSION, "UnknownVersion"); } /** * Creates a {@link TextAnalyticsClient} based on options set in the builder. Every time {@code buildClient()} is * called a new instance of {@link TextAnalyticsClient} is created. * * <p> * If {@link * endpoint} are used to create the {@link TextAnalyticsClient client}. All other builder settings are ignored * </p> * * @return A {@link TextAnalyticsClient} with the options set from the builder. * @throws NullPointerException if {@link * has not been set. * @throws IllegalArgumentException if {@link * @throws IllegalStateException If both {@link * and {@link */ public TextAnalyticsClient buildClient() { return new TextAnalyticsClient(buildAsyncClient()); } /** * Creates a {@link TextAnalyticsAsyncClient} based on options set in the builder. Every time {@code * buildAsyncClient()} is called a new instance of {@link TextAnalyticsAsyncClient} is created. * * <p> * If {@link * endpoint} are used to create the {@link TextAnalyticsClient client}. All other builder settings are ignored. * </p> * * @return A {@link TextAnalyticsAsyncClient} with the options set from the builder. * @throws NullPointerException if {@link * has not been set. * @throws IllegalArgumentException if {@link * @throws IllegalStateException If both {@link * and {@link */ /** * Set the default language option for one client. * * @param language default language * @return The updated {@link TextAnalyticsClientBuilder} object. */ public TextAnalyticsClientBuilder defaultLanguage(String language) { this.defaultLanguage = language; return this; } /** * Set the default country hint option for one client. * * @param countryHint default country hint * @return The updated {@link TextAnalyticsClientBuilder} object. */ public TextAnalyticsClientBuilder defaultCountryHint(String countryHint) { this.defaultCountryHint = countryHint; return this; } /** * Sets the service endpoint for the Azure Text Analytics instance. * * @param endpoint The URL of the Azure Text Analytics instance service requests to and receive responses from. * @return The updated {@link TextAnalyticsClientBuilder} object. * @throws NullPointerException if {@code endpoint} is null * @throws IllegalArgumentException if {@code endpoint} cannot be parsed into a valid URL. */ @Override public TextAnalyticsClientBuilder endpoint(String endpoint) { Objects.requireNonNull(endpoint, "'endpoint' cannot be null."); try { new URL(endpoint); } catch (MalformedURLException ex) { throw logger.logExceptionAsWarning(new IllegalArgumentException("'endpoint' must be a valid URL.", ex)); } if (endpoint.endsWith("/")) { this.endpoint = endpoint.substring(0, endpoint.length() - 1); } else { this.endpoint = endpoint; } return this; } /** * Sets the {@link AzureKeyCredential} to use when authenticating HTTP requests for this * {@link TextAnalyticsClientBuilder}. * * @param keyCredential {@link AzureKeyCredential} API key credential * @return The updated {@link TextAnalyticsClientBuilder} object. * @throws NullPointerException If {@code keyCredential} is null */ @Override public TextAnalyticsClientBuilder credential(AzureKeyCredential keyCredential) { this.credential = Objects.requireNonNull(keyCredential, "'keyCredential' cannot be null."); return this; } /** * Sets the {@link TokenCredential} used to authorize requests sent to the service. Refer to the Azure SDK for Java * <a href="https: * documentation for more details on proper usage of the {@link TokenCredential} type. * * @param tokenCredential {@link TokenCredential} used to authorize requests sent to the service. * @return The updated {@link TextAnalyticsClientBuilder} object. * @throws NullPointerException If {@code tokenCredential} is null. */ @Override public TextAnalyticsClientBuilder credential(TokenCredential tokenCredential) { Objects.requireNonNull(tokenCredential, "'tokenCredential' cannot be null."); this.tokenCredential = tokenCredential; return this; } /** * Sets the {@link HttpLogOptions logging configuration} to use when sending and receiving requests to and from * the service. If a {@code logLevel} is not provided, default value of {@link HttpLogDetailLevel * * <p><strong>Note:</strong> It is important to understand the precedence order of the HttpTrait APIs. In * particular, if a {@link HttpPipeline} is specified, this takes precedence over all other APIs in the trait, and * they will be ignored. If no {@link HttpPipeline} is specified, a HTTP pipeline will be constructed internally * based on the settings provided to this trait. Additionally, there may be other APIs in types that implement this * trait that are also ignored if an {@link HttpPipeline} is specified, so please be sure to refer to the * documentation of types that implement this trait to understand the full set of implications.</p> * * @param logOptions The {@link HttpLogOptions logging configuration} to use when sending and receiving requests to * and from the service. * @return The updated {@link TextAnalyticsClientBuilder} object. */ public TextAnalyticsClientBuilder httpLogOptions(HttpLogOptions logOptions) { this.httpLogOptions = logOptions; return this; } /** * Gets the default Azure Text Analytics headers and query parameters allow list. * * @return The default {@link HttpLogOptions} allow list. */ public static HttpLogOptions getDefaultLogOptions() { return Constants.DEFAULT_LOG_OPTIONS_SUPPLIER.get(); } /** * Allows for setting common properties such as application ID, headers, proxy configuration, etc. Note that it is * recommended that this method be called with an instance of the {@link HttpClientOptions} * class (a subclass of the {@link ClientOptions} base class). The HttpClientOptions subclass provides more * configuration options suitable for HTTP clients, which is applicable for any class that implements this HttpTrait * interface. * * <p><strong>Note:</strong> It is important to understand the precedence order of the HttpTrait APIs. In * particular, if a {@link HttpPipeline} is specified, this takes precedence over all other APIs in the trait, and * they will be ignored. If no {@link HttpPipeline} is specified, a HTTP pipeline will be constructed internally * based on the settings provided to this trait. Additionally, there may be other APIs in types that implement this * trait that are also ignored if an {@link HttpPipeline} is specified, so please be sure to refer to the * documentation of types that implement this trait to understand the full set of implications.</p> * * @param clientOptions A configured instance of {@link HttpClientOptions}. * @return The updated TextAnalyticsClientBuilder object. * @see HttpClientOptions */ @Override public TextAnalyticsClientBuilder clientOptions(ClientOptions clientOptions) { this.clientOptions = clientOptions; return this; } /** * Adds a {@link HttpPipelinePolicy pipeline policy} to apply on each request sent. * * <p><strong>Note:</strong> It is important to understand the precedence order of the HttpTrait APIs. In * particular, if a {@link HttpPipeline} is specified, this takes precedence over all other APIs in the trait, and * they will be ignored. If no {@link HttpPipeline} is specified, a HTTP pipeline will be constructed internally * based on the settings provided to this trait. Additionally, there may be other APIs in types that implement this * trait that are also ignored if an {@link HttpPipeline} is specified, so please be sure to refer to the * documentation of types that implement this trait to understand the full set of implications.</p> * * @param policy A {@link HttpPipelinePolicy pipeline policy}. * @return The updated {@link TextAnalyticsClientBuilder} object. * @throws NullPointerException If {@code policy} is null. */ public TextAnalyticsClientBuilder addPolicy(HttpPipelinePolicy policy) { Objects.requireNonNull(policy, "'policy' cannot be null."); if (policy.getPipelinePosition() == HttpPipelinePosition.PER_CALL) { perCallPolicies.add(policy); } else { perRetryPolicies.add(policy); } return this; } /** * Sets the {@link HttpClient} to use for sending and receiving requests to and from the service. * * <p><strong>Note:</strong> It is important to understand the precedence order of the HttpTrait APIs. In * particular, if a {@link HttpPipeline} is specified, this takes precedence over all other APIs in the trait, and * they will be ignored. If no {@link HttpPipeline} is specified, a HTTP pipeline will be constructed internally * based on the settings provided to this trait. Additionally, there may be other APIs in types that implement this * trait that are also ignored if an {@link HttpPipeline} is specified, so please be sure to refer to the * documentation of types that implement this trait to understand the full set of implications.</p> * * @param client The {@link HttpClient} to use for requests. * @return The updated {@link TextAnalyticsClientBuilder} object. */ @Override public TextAnalyticsClientBuilder httpClient(HttpClient client) { if (this.httpClient != null && client == null) { logger.info("HttpClient is being set to 'null' when it was previously configured."); } this.httpClient = client; return this; } /** * Sets the {@link HttpPipeline} to use for the service client. * * <p><strong>Note:</strong> It is important to understand the precedence order of the HttpTrait APIs. In * particular, if a {@link HttpPipeline} is specified, this takes precedence over all other APIs in the trait, and * they will be ignored. If no {@link HttpPipeline} is specified, a HTTP pipeline will be constructed internally * based on the settings provided to this trait. Additionally, there may be other APIs in types that implement this * trait that are also ignored if an {@link HttpPipeline} is specified, so please be sure to refer to the * documentation of types that implement this trait to understand the full set of implications.</p> * <p> * If {@code pipeline} is set, all other settings are ignored, aside from {@link * TextAnalyticsClientBuilder * TextAnalyticsClient}. * * @param httpPipeline {@link HttpPipeline} to use for sending service requests and receiving responses. * @return The updated {@link TextAnalyticsClientBuilder} object. */ @Override public TextAnalyticsClientBuilder pipeline(HttpPipeline httpPipeline) { if (this.httpPipeline != null && httpPipeline == null) { logger.info("HttpPipeline is being set to 'null' when it was previously configured."); } this.httpPipeline = httpPipeline; return this; } /** * Sets the configuration store that is used during construction of the service client. * <p> * The default configuration store is a clone of the {@link Configuration * configuration store}, use {@link Configuration * * @param configuration The configuration store used to * @return The updated {@link TextAnalyticsClientBuilder} object. */ @Override public TextAnalyticsClientBuilder configuration(Configuration configuration) { this.configuration = configuration; return this; } /** * Sets the {@link RetryPolicy} that is used when each request is sent. * <p> * The default retry policy will be used if not provided {@link TextAnalyticsClientBuilder * build {@link TextAnalyticsAsyncClient} or {@link TextAnalyticsClient}. * <p> * Setting this is mutually exclusive with using {@link * * @param retryPolicy user's retry policy applied to each request. * @return The updated {@link TextAnalyticsClientBuilder} object. */ public TextAnalyticsClientBuilder retryPolicy(RetryPolicy retryPolicy) { this.retryPolicy = retryPolicy; return this; } /** * Sets the {@link RetryOptions} for all the requests made through the client. * * <p><strong>Note:</strong> It is important to understand the precedence order of the HttpTrait APIs. In * particular, if a {@link HttpPipeline} is specified, this takes precedence over all other APIs in the trait, and * they will be ignored. If no {@link HttpPipeline} is specified, a HTTP pipeline will be constructed internally * based on the settings provided to this trait. Additionally, there may be other APIs in types that implement this * trait that are also ignored if an {@link HttpPipeline} is specified, so please be sure to refer to the * documentation of types that implement this trait to understand the full set of implications.</p> * <p> * Setting this is mutually exclusive with using {@link * * @param retryOptions The {@link RetryOptions} to use for all the requests made through the client. * * @return The updated {@link TextAnalyticsClientBuilder} object. */ @Override public TextAnalyticsClientBuilder retryOptions(RetryOptions retryOptions) { this.retryOptions = retryOptions; return this; } /** * Sets the {@link TextAnalyticsServiceVersion} that is used when making API requests. * <p> * If a service version is not provided, the service version that will be used will be the latest known service * version based on the version of the client library being used. If no service version is specified, updating to a * newer version the client library will have the result of potentially moving to a newer service version. * * @param version {@link TextAnalyticsServiceVersion} of the service to be used when making requests. * @return The updated {@link TextAnalyticsClientBuilder} object. */ public TextAnalyticsClientBuilder serviceVersion(TextAnalyticsServiceVersion version) { this.version = version; return this; } private boolean isConsolidatedServiceVersion(TextAnalyticsServiceVersion serviceVersion) { if (serviceVersion == null) { serviceVersion = TextAnalyticsServiceVersion.V2022_03_01; } return !(TextAnalyticsServiceVersion.V3_0 == serviceVersion || TextAnalyticsServiceVersion.V3_1 == serviceVersion); } }
class TextAnalyticsClientBuilder implements AzureKeyCredentialTrait<TextAnalyticsClientBuilder>, ConfigurationTrait<TextAnalyticsClientBuilder>, EndpointTrait<TextAnalyticsClientBuilder>, HttpTrait<TextAnalyticsClientBuilder>, TokenCredentialTrait<TextAnalyticsClientBuilder> { private static final String DEFAULT_SCOPE = "https: private static final String NAME = "name"; private static final String OCP_APIM_SUBSCRIPTION_KEY = "Ocp-Apim-Subscription-Key"; private static final String TEXT_ANALYTICS_PROPERTIES = "azure-ai-textanalytics.properties"; private static final String VERSION = "version"; private static final RetryPolicy DEFAULT_RETRY_POLICY = new RetryPolicy(); private static final ClientOptions DEFAULT_CLIENT_OPTIONS = new ClientOptions(); private static final HttpLogOptions DEFAULT_LOG_OPTIONS = new HttpLogOptions(); private static final HttpHeaders DEFAULT_HTTP_HEADERS = new HttpHeaders(); private final ClientLogger logger = new ClientLogger(TextAnalyticsClientBuilder.class); private final List<HttpPipelinePolicy> perCallPolicies = new ArrayList<>(); private final List<HttpPipelinePolicy> perRetryPolicies = new ArrayList<>(); private ClientOptions clientOptions; private Configuration configuration; private AzureKeyCredential credential; private String defaultCountryHint; private String defaultLanguage; private String endpoint; private HttpClient httpClient; private HttpLogOptions httpLogOptions; private HttpPipeline httpPipeline; private RetryPolicy retryPolicy; private RetryOptions retryOptions; private TokenCredential tokenCredential; private TextAnalyticsServiceVersion version; private static final String CLIENT_NAME; private static final String CLIENT_VERSION; static { Map<String, String> properties = CoreUtils.getProperties(TEXT_ANALYTICS_PROPERTIES); CLIENT_NAME = properties.getOrDefault(NAME, "UnknownName"); CLIENT_VERSION = properties.getOrDefault(VERSION, "UnknownVersion"); } /** * Creates a {@link TextAnalyticsClient} based on options set in the builder. Every time {@code buildClient()} is * called a new instance of {@link TextAnalyticsClient} is created. * * <p> * If {@link * endpoint} are used to create the {@link TextAnalyticsClient client}. All other builder settings are ignored * </p> * * @return A {@link TextAnalyticsClient} with the options set from the builder. * @throws NullPointerException if {@link * has not been set. * @throws IllegalArgumentException if {@link * @throws IllegalStateException If both {@link * and {@link */ public TextAnalyticsClient buildClient() { return new TextAnalyticsClient(buildAsyncClient()); } /** * Creates a {@link TextAnalyticsAsyncClient} based on options set in the builder. Every time {@code * buildAsyncClient()} is called a new instance of {@link TextAnalyticsAsyncClient} is created. * * <p> * If {@link * endpoint} are used to create the {@link TextAnalyticsClient client}. All other builder settings are ignored. * </p> * * @return A {@link TextAnalyticsAsyncClient} with the options set from the builder. * @throws NullPointerException if {@link * has not been set. * @throws IllegalArgumentException if {@link * @throws IllegalStateException If both {@link * and {@link */ /** * Set the default language option for one client. * * @param language default language * @return The updated {@link TextAnalyticsClientBuilder} object. */ public TextAnalyticsClientBuilder defaultLanguage(String language) { this.defaultLanguage = language; return this; } /** * Set the default country hint option for one client. * * @param countryHint default country hint * @return The updated {@link TextAnalyticsClientBuilder} object. */ public TextAnalyticsClientBuilder defaultCountryHint(String countryHint) { this.defaultCountryHint = countryHint; return this; } /** * Sets the service endpoint for the Azure Text Analytics instance. * * @param endpoint The URL of the Azure Text Analytics instance service requests to and receive responses from. * @return The updated {@link TextAnalyticsClientBuilder} object. * @throws NullPointerException if {@code endpoint} is null * @throws IllegalArgumentException if {@code endpoint} cannot be parsed into a valid URL. */ @Override public TextAnalyticsClientBuilder endpoint(String endpoint) { Objects.requireNonNull(endpoint, "'endpoint' cannot be null."); try { new URL(endpoint); } catch (MalformedURLException ex) { throw logger.logExceptionAsWarning(new IllegalArgumentException("'endpoint' must be a valid URL.", ex)); } if (endpoint.endsWith("/")) { this.endpoint = endpoint.substring(0, endpoint.length() - 1); } else { this.endpoint = endpoint; } return this; } /** * Sets the {@link AzureKeyCredential} to use when authenticating HTTP requests for this * {@link TextAnalyticsClientBuilder}. * * @param keyCredential {@link AzureKeyCredential} API key credential * @return The updated {@link TextAnalyticsClientBuilder} object. * @throws NullPointerException If {@code keyCredential} is null */ @Override public TextAnalyticsClientBuilder credential(AzureKeyCredential keyCredential) { this.credential = Objects.requireNonNull(keyCredential, "'keyCredential' cannot be null."); return this; } /** * Sets the {@link TokenCredential} used to authorize requests sent to the service. Refer to the Azure SDK for Java * <a href="https: * documentation for more details on proper usage of the {@link TokenCredential} type. * * @param tokenCredential {@link TokenCredential} used to authorize requests sent to the service. * @return The updated {@link TextAnalyticsClientBuilder} object. * @throws NullPointerException If {@code tokenCredential} is null. */ @Override public TextAnalyticsClientBuilder credential(TokenCredential tokenCredential) { Objects.requireNonNull(tokenCredential, "'tokenCredential' cannot be null."); this.tokenCredential = tokenCredential; return this; } /** * Sets the {@link HttpLogOptions logging configuration} to use when sending and receiving requests to and from * the service. If a {@code logLevel} is not provided, default value of {@link HttpLogDetailLevel * * <p><strong>Note:</strong> It is important to understand the precedence order of the HttpTrait APIs. In * particular, if a {@link HttpPipeline} is specified, this takes precedence over all other APIs in the trait, and * they will be ignored. If no {@link HttpPipeline} is specified, a HTTP pipeline will be constructed internally * based on the settings provided to this trait. Additionally, there may be other APIs in types that implement this * trait that are also ignored if an {@link HttpPipeline} is specified, so please be sure to refer to the * documentation of types that implement this trait to understand the full set of implications.</p> * * @param logOptions The {@link HttpLogOptions logging configuration} to use when sending and receiving requests to * and from the service. * @return The updated {@link TextAnalyticsClientBuilder} object. */ public TextAnalyticsClientBuilder httpLogOptions(HttpLogOptions logOptions) { this.httpLogOptions = logOptions; return this; } /** * Gets the default Azure Text Analytics headers and query parameters allow list. * * @return The default {@link HttpLogOptions} allow list. */ public static HttpLogOptions getDefaultLogOptions() { return Constants.DEFAULT_LOG_OPTIONS_SUPPLIER.get(); } /** * Allows for setting common properties such as application ID, headers, proxy configuration, etc. Note that it is * recommended that this method be called with an instance of the {@link HttpClientOptions} * class (a subclass of the {@link ClientOptions} base class). The HttpClientOptions subclass provides more * configuration options suitable for HTTP clients, which is applicable for any class that implements this HttpTrait * interface. * * <p><strong>Note:</strong> It is important to understand the precedence order of the HttpTrait APIs. In * particular, if a {@link HttpPipeline} is specified, this takes precedence over all other APIs in the trait, and * they will be ignored. If no {@link HttpPipeline} is specified, a HTTP pipeline will be constructed internally * based on the settings provided to this trait. Additionally, there may be other APIs in types that implement this * trait that are also ignored if an {@link HttpPipeline} is specified, so please be sure to refer to the * documentation of types that implement this trait to understand the full set of implications.</p> * * @param clientOptions A configured instance of {@link HttpClientOptions}. * @return The updated TextAnalyticsClientBuilder object. * @see HttpClientOptions */ @Override public TextAnalyticsClientBuilder clientOptions(ClientOptions clientOptions) { this.clientOptions = clientOptions; return this; } /** * Adds a {@link HttpPipelinePolicy pipeline policy} to apply on each request sent. * * <p><strong>Note:</strong> It is important to understand the precedence order of the HttpTrait APIs. In * particular, if a {@link HttpPipeline} is specified, this takes precedence over all other APIs in the trait, and * they will be ignored. If no {@link HttpPipeline} is specified, a HTTP pipeline will be constructed internally * based on the settings provided to this trait. Additionally, there may be other APIs in types that implement this * trait that are also ignored if an {@link HttpPipeline} is specified, so please be sure to refer to the * documentation of types that implement this trait to understand the full set of implications.</p> * * @param policy A {@link HttpPipelinePolicy pipeline policy}. * @return The updated {@link TextAnalyticsClientBuilder} object. * @throws NullPointerException If {@code policy} is null. */ public TextAnalyticsClientBuilder addPolicy(HttpPipelinePolicy policy) { Objects.requireNonNull(policy, "'policy' cannot be null."); if (policy.getPipelinePosition() == HttpPipelinePosition.PER_CALL) { perCallPolicies.add(policy); } else { perRetryPolicies.add(policy); } return this; } /** * Sets the {@link HttpClient} to use for sending and receiving requests to and from the service. * * <p><strong>Note:</strong> It is important to understand the precedence order of the HttpTrait APIs. In * particular, if a {@link HttpPipeline} is specified, this takes precedence over all other APIs in the trait, and * they will be ignored. If no {@link HttpPipeline} is specified, a HTTP pipeline will be constructed internally * based on the settings provided to this trait. Additionally, there may be other APIs in types that implement this * trait that are also ignored if an {@link HttpPipeline} is specified, so please be sure to refer to the * documentation of types that implement this trait to understand the full set of implications.</p> * * @param client The {@link HttpClient} to use for requests. * @return The updated {@link TextAnalyticsClientBuilder} object. */ @Override public TextAnalyticsClientBuilder httpClient(HttpClient client) { if (this.httpClient != null && client == null) { logger.info("HttpClient is being set to 'null' when it was previously configured."); } this.httpClient = client; return this; } /** * Sets the {@link HttpPipeline} to use for the service client. * * <p><strong>Note:</strong> It is important to understand the precedence order of the HttpTrait APIs. In * particular, if a {@link HttpPipeline} is specified, this takes precedence over all other APIs in the trait, and * they will be ignored. If no {@link HttpPipeline} is specified, a HTTP pipeline will be constructed internally * based on the settings provided to this trait. Additionally, there may be other APIs in types that implement this * trait that are also ignored if an {@link HttpPipeline} is specified, so please be sure to refer to the * documentation of types that implement this trait to understand the full set of implications.</p> * <p> * If {@code pipeline} is set, all other settings are ignored, aside from {@link * TextAnalyticsClientBuilder * TextAnalyticsClient}. * * @param httpPipeline {@link HttpPipeline} to use for sending service requests and receiving responses. * @return The updated {@link TextAnalyticsClientBuilder} object. */ @Override public TextAnalyticsClientBuilder pipeline(HttpPipeline httpPipeline) { if (this.httpPipeline != null && httpPipeline == null) { logger.info("HttpPipeline is being set to 'null' when it was previously configured."); } this.httpPipeline = httpPipeline; return this; } /** * Sets the configuration store that is used during construction of the service client. * <p> * The default configuration store is a clone of the {@link Configuration * configuration store}, use {@link Configuration * * @param configuration The configuration store used to * @return The updated {@link TextAnalyticsClientBuilder} object. */ @Override public TextAnalyticsClientBuilder configuration(Configuration configuration) { this.configuration = configuration; return this; } /** * Sets the {@link RetryPolicy} that is used when each request is sent. * <p> * The default retry policy will be used if not provided {@link TextAnalyticsClientBuilder * build {@link TextAnalyticsAsyncClient} or {@link TextAnalyticsClient}. * <p> * Setting this is mutually exclusive with using {@link * * @param retryPolicy user's retry policy applied to each request. * @return The updated {@link TextAnalyticsClientBuilder} object. */ public TextAnalyticsClientBuilder retryPolicy(RetryPolicy retryPolicy) { this.retryPolicy = retryPolicy; return this; } /** * Sets the {@link RetryOptions} for all the requests made through the client. * * <p><strong>Note:</strong> It is important to understand the precedence order of the HttpTrait APIs. In * particular, if a {@link HttpPipeline} is specified, this takes precedence over all other APIs in the trait, and * they will be ignored. If no {@link HttpPipeline} is specified, a HTTP pipeline will be constructed internally * based on the settings provided to this trait. Additionally, there may be other APIs in types that implement this * trait that are also ignored if an {@link HttpPipeline} is specified, so please be sure to refer to the * documentation of types that implement this trait to understand the full set of implications.</p> * <p> * Setting this is mutually exclusive with using {@link * * @param retryOptions The {@link RetryOptions} to use for all the requests made through the client. * * @return The updated {@link TextAnalyticsClientBuilder} object. */ @Override public TextAnalyticsClientBuilder retryOptions(RetryOptions retryOptions) { this.retryOptions = retryOptions; return this; } /** * Sets the {@link TextAnalyticsServiceVersion} that is used when making API requests. * <p> * If a service version is not provided, the service version that will be used will be the latest known service * version based on the version of the client library being used. If no service version is specified, updating to a * newer version the client library will have the result of potentially moving to a newer service version. * * @param version {@link TextAnalyticsServiceVersion} of the service to be used when making requests. * @return The updated {@link TextAnalyticsClientBuilder} object. */ public TextAnalyticsClientBuilder serviceVersion(TextAnalyticsServiceVersion version) { this.version = version; return this; } private boolean isConsolidatedServiceVersion(TextAnalyticsServiceVersion serviceVersion) { if (serviceVersion == null) { serviceVersion = TextAnalyticsServiceVersion.V2022_04_01_PREVIEW; } return !(TextAnalyticsServiceVersion.V3_0 == serviceVersion || TextAnalyticsServiceVersion.V3_1 == serviceVersion); } }
Is the idea here that samples run for different service versions?
public static void main(String[] args) { TextAnalyticsClient client = new TextAnalyticsClientBuilder() .credential(new AzureKeyCredential(Configuration.getGlobalConfiguration().get("AZURE_TEXT_ANALYTICS_API_KEY"))) .endpoint(Configuration.getGlobalConfiguration().get("AZURE_TEXT_ANALYTICS_ENDPOINT")) .buildClient(); String document = "The hotel was dark and unclean. I like Microsoft."; final DocumentSentiment documentSentiment = client.analyzeSentiment(document); SentimentConfidenceScores scores = documentSentiment.getConfidenceScores(); System.out.printf( "Recognized document sentiment: %s, positive score: %f, neutral score: %f, negative score: %f.%n", documentSentiment.getSentiment(), scores.getPositive(), scores.getNeutral(), scores.getNegative()); documentSentiment.getSentences().forEach(sentenceSentiment -> { SentimentConfidenceScores sentenceScores = sentenceSentiment.getConfidenceScores(); System.out.printf("Recognized sentence sentiment: %s, positive score: %f, neutral score: %f, negative score: %f.%n", sentenceSentiment.getSentiment(), sentenceScores.getPositive(), sentenceScores.getNeutral(), sentenceScores.getNegative()); }); }
.credential(new AzureKeyCredential(Configuration.getGlobalConfiguration().get("AZURE_TEXT_ANALYTICS_API_KEY")))
public static void main(String[] args) { TextAnalyticsClient client = new TextAnalyticsClientBuilder() .credential(new AzureKeyCredential("{key}")) .endpoint("{endpoint}") .buildClient(); String document = "The hotel was dark and unclean. I like Microsoft."; final DocumentSentiment documentSentiment = client.analyzeSentiment(document); SentimentConfidenceScores scores = documentSentiment.getConfidenceScores(); System.out.printf( "Recognized document sentiment: %s, positive score: %f, neutral score: %f, negative score: %f.%n", documentSentiment.getSentiment(), scores.getPositive(), scores.getNeutral(), scores.getNegative()); documentSentiment.getSentences().forEach(sentenceSentiment -> { SentimentConfidenceScores sentenceScores = sentenceSentiment.getConfidenceScores(); System.out.printf("Recognized sentence sentiment: %s, positive score: %f, neutral score: %f, negative score: %f.%n", sentenceSentiment.getSentiment(), sentenceScores.getPositive(), sentenceScores.getNeutral(), sentenceScores.getNegative()); }); }
class AnalyzeSentiment { /** * Main method to invoke this demo about how to analyze the sentiment of document. * * @param args Unused arguments to the program. */ }
class AnalyzeSentiment { /** * Main method to invoke this demo about how to analyze the sentiment of document. * * @param args Unused arguments to the program. */ }
has a user always not been able to override the stringIndexType default?
private EntitiesTaskParameters getEntitiesTaskParameters(RecognizeEntitiesAction action) { return (EntitiesTaskParameters) new EntitiesTaskParameters() .setStringIndexType(StringIndexType.UTF16CODE_UNIT) .setModelVersion(action.getModelVersion()) .setLoggingOptOut(action.isServiceLogsDisabled()); }
.setStringIndexType(StringIndexType.UTF16CODE_UNIT)
private EntitiesTaskParameters getEntitiesTaskParameters(RecognizeEntitiesAction action) { return (EntitiesTaskParameters) new EntitiesTaskParameters() .setStringIndexType(StringIndexType.UTF16CODE_UNIT) .setModelVersion(action.getModelVersion()) .setLoggingOptOut(action.isServiceLogsDisabled()); }
class AnalyzeActionsAsyncClient { private static final String ENTITY_RECOGNITION_TASKS = "entityRecognitionTasks"; private static final String ENTITY_RECOGNITION_PII_TASKS = "entityRecognitionPiiTasks"; private static final String KEY_PHRASE_EXTRACTION_TASKS = "keyPhraseExtractionTasks"; private static final String ENTITY_LINKING_TASKS = "entityLinkingTasks"; private static final String SENTIMENT_ANALYSIS_TASKS = "sentimentAnalysisTasks"; private static final String EXTRACTIVE_SUMMARIZATION_TASKS = "extractiveSummarizationTasks"; private static final String CUSTOM_ENTITY_RECOGNITION_TASKS = "customEntityRecognitionTasks"; private static final String CUSTOM_SINGLE_CLASSIFICATION_TASKS = "customClassificationTasks"; private static final String CUSTOM_MULTI_CLASSIFICATION_TASKS = "customMultiClassificationTasks"; private static final String REGEX_ACTION_ERROR_TARGET = String.format(" ENTITY_RECOGNITION_PII_TASKS, ENTITY_RECOGNITION_TASKS, ENTITY_LINKING_TASKS, SENTIMENT_ANALYSIS_TASKS, EXTRACTIVE_SUMMARIZATION_TASKS, CUSTOM_ENTITY_RECOGNITION_TASKS, CUSTOM_SINGLE_CLASSIFICATION_TASKS, CUSTOM_MULTI_CLASSIFICATION_TASKS); private final ClientLogger logger = new ClientLogger(AnalyzeActionsAsyncClient.class); private final TextAnalyticsClientImpl legacyService; private final AnalyzeTextsImpl service; private static final Pattern PATTERN; static { PATTERN = Pattern.compile(REGEX_ACTION_ERROR_TARGET, Pattern.MULTILINE); } AnalyzeActionsAsyncClient(TextAnalyticsClientImpl legacyService) { this.legacyService = legacyService; this.service = null; } AnalyzeActionsAsyncClient(AnalyzeTextsImpl service) { this.legacyService = null; this.service = service; } PollerFlux<AnalyzeActionsOperationDetail, AnalyzeActionsResultPagedFlux> beginAnalyzeActions( Iterable<TextDocumentInput> documents, TextAnalyticsActions actions, AnalyzeActionsOptions options, Context context) { try { inputDocumentsValidation(documents); options = getNotNullAnalyzeActionsOptions(options); final Context finalContext = getNotNullContext(context) .addData(AZ_TRACING_NAMESPACE_KEY, COGNITIVE_TRACING_NAMESPACE_VALUE); final boolean finalIncludeStatistics = options.isIncludeStatistics(); if (service != null) { final AnalyzeTextJobsInput analyzeTextJobsInput = new AnalyzeTextJobsInput() .setDisplayName(actions.getDisplayName()) .setAnalysisInput( new MultiLanguageAnalysisInput().setDocuments(toMultiLanguageInput(documents))) .setTasks(getAnalyzeTextLROTasks(actions)); return new PollerFlux<>( DEFAULT_POLL_INTERVAL, activationOperation( service.submitJobWithResponseAsync(analyzeTextJobsInput, finalContext) .map(analyzeResponse -> { final AnalyzeActionsOperationDetail textAnalyticsOperationResult = new AnalyzeActionsOperationDetail(); AnalyzeActionsOperationDetailPropertiesHelper .setOperationId(textAnalyticsOperationResult, parseOperationId( analyzeResponse.getDeserializedHeaders().getOperationLocation())); return textAnalyticsOperationResult; })), pollingOperationLanguageApi(operationId -> service.jobStatusWithResponseAsync(operationId, finalIncludeStatistics, null, null, finalContext)), (pollingContext, pollResponse) -> Mono.just(pollingContext.getLatestResponse().getValue()), fetchingOperation( operationId -> Mono.just(getAnalyzeOperationFluxPage( operationId, null, null, finalIncludeStatistics, finalContext))) ); } final AnalyzeBatchInput analyzeBatchInput = new AnalyzeBatchInput() .setAnalysisInput(new MultiLanguageBatchInput().setDocuments(toMultiLanguageInput(documents))) .setTasks(getJobManifestTasks(actions)); analyzeBatchInput.setDisplayName(actions.getDisplayName()); return new PollerFlux<>( DEFAULT_POLL_INTERVAL, activationOperation( legacyService.analyzeWithResponseAsync(analyzeBatchInput, finalContext) .map(analyzeResponse -> { final AnalyzeActionsOperationDetail textAnalyticsOperationResult = new AnalyzeActionsOperationDetail(); AnalyzeActionsOperationDetailPropertiesHelper .setOperationId(textAnalyticsOperationResult, parseOperationId(analyzeResponse.getDeserializedHeaders().getOperationLocation())); return textAnalyticsOperationResult; })), pollingOperation(operationId -> legacyService.analyzeStatusWithResponseAsync(operationId.toString(), finalIncludeStatistics, null, null, finalContext)), (pollingContext, activationResponse) -> Mono.error(new RuntimeException("Cancellation is not supported.")), fetchingOperation(operationId -> Mono.just(getAnalyzeOperationFluxPage( operationId, null, null, finalIncludeStatistics, finalContext))) ); } catch (RuntimeException ex) { return PollerFlux.error(ex); } } PollerFlux<AnalyzeActionsOperationDetail, AnalyzeActionsResultPagedIterable> beginAnalyzeActionsIterable( Iterable<TextDocumentInput> documents, TextAnalyticsActions actions, AnalyzeActionsOptions options, Context context) { try { inputDocumentsValidation(documents); options = getNotNullAnalyzeActionsOptions(options); final Context finalContext = getNotNullContext(context) .addData(AZ_TRACING_NAMESPACE_KEY, COGNITIVE_TRACING_NAMESPACE_VALUE); final AnalyzeBatchInput analyzeBatchInput = new AnalyzeBatchInput() .setAnalysisInput(new MultiLanguageBatchInput().setDocuments(toMultiLanguageInput(documents))) .setTasks(getJobManifestTasks(actions)); analyzeBatchInput.setDisplayName(actions.getDisplayName()); final boolean finalIncludeStatistics = options.isIncludeStatistics(); if (service != null) { return new PollerFlux<>( DEFAULT_POLL_INTERVAL, activationOperation( service.submitJobWithResponseAsync( new AnalyzeTextJobsInput() .setDisplayName(actions.getDisplayName()) .setAnalysisInput(new MultiLanguageAnalysisInput().setDocuments(toMultiLanguageInput(documents))) .setTasks(getAnalyzeTextLROTasks(actions)), finalContext) .map(analyzeResponse -> { final AnalyzeActionsOperationDetail operationDetail = new AnalyzeActionsOperationDetail(); AnalyzeActionsOperationDetailPropertiesHelper.setOperationId(operationDetail, parseOperationId(analyzeResponse.getDeserializedHeaders().getOperationLocation())); return operationDetail; })), pollingOperationLanguageApi(operationId -> service.jobStatusWithResponseAsync(operationId, finalIncludeStatistics, null, null, finalContext)), (activationResponse, pollingContext) -> Mono.error(new RuntimeException("Cancellation is not supported.")), fetchingOperationIterable( operationId -> Mono.just(new AnalyzeActionsResultPagedIterable(getAnalyzeOperationFluxPage( operationId, null, null, finalIncludeStatistics, finalContext)))) ); } return new PollerFlux<>( DEFAULT_POLL_INTERVAL, activationOperation( legacyService.analyzeWithResponseAsync(analyzeBatchInput, finalContext) .map(analyzeResponse -> { final AnalyzeActionsOperationDetail operationDetail = new AnalyzeActionsOperationDetail(); AnalyzeActionsOperationDetailPropertiesHelper.setOperationId(operationDetail, parseOperationId(analyzeResponse.getDeserializedHeaders().getOperationLocation())); return operationDetail; })), pollingOperation(operationId -> legacyService.analyzeStatusWithResponseAsync(operationId.toString(), finalIncludeStatistics, null, null, finalContext)), (activationResponse, pollingContext) -> Mono.error(new RuntimeException("Cancellation is not supported.")), fetchingOperationIterable( operationId -> Mono.just(new AnalyzeActionsResultPagedIterable(getAnalyzeOperationFluxPage( operationId, null, null, finalIncludeStatistics, finalContext)))) ); } catch (RuntimeException ex) { return PollerFlux.error(ex); } } private List<AnalyzeTextLROTask> getAnalyzeTextLROTasks(TextAnalyticsActions actions) { if (actions == null) { return null; } final List<AnalyzeTextLROTask> tasks = new ArrayList<>(); final Iterable<RecognizeEntitiesAction> recognizeEntitiesActions = actions.getRecognizeEntitiesActions(); final Iterable<RecognizePiiEntitiesAction> recognizePiiEntitiesActions = actions.getRecognizePiiEntitiesActions(); final Iterable<ExtractKeyPhrasesAction> extractKeyPhrasesActions = actions.getExtractKeyPhrasesActions(); final Iterable<RecognizeLinkedEntitiesAction> recognizeLinkedEntitiesActions = actions.getRecognizeLinkedEntitiesActions(); final Iterable<AnalyzeHealthcareEntitiesAction> analyzeHealthcareEntitiesActions = actions.getAnalyzeHealthcareEntitiesActions(); final Iterable<AnalyzeSentimentAction> analyzeSentimentActions = actions.getAnalyzeSentimentActions(); final Iterable<ExtractSummaryAction> extractSummaryActions = actions.getExtractSummaryActions(); final Iterable<RecognizeCustomEntitiesAction> recognizeCustomEntitiesActions = actions.getRecognizeCustomEntitiesActions(); final Iterable<SingleCategoryClassifyAction> singleCategoryClassifyActions = actions.getSingleCategoryClassifyActions(); final Iterable<MultiCategoryClassifyAction> multiCategoryClassifyActions = actions.getMultiCategoryClassifyActions(); if (recognizeEntitiesActions != null) { recognizeEntitiesActions.forEach(action -> tasks.add(toEntitiesLROTask(action))); } if (recognizePiiEntitiesActions != null) { recognizePiiEntitiesActions.forEach(action -> tasks.add(toPiiLROTask(action))); } if (analyzeHealthcareEntitiesActions != null) { analyzeHealthcareEntitiesActions.forEach(action -> tasks.add(toHealthcareLROTask(action))); } if (extractKeyPhrasesActions != null) { extractKeyPhrasesActions.forEach(action -> tasks.add(toKeyPhraseLROTask(action))); } if (recognizeLinkedEntitiesActions != null) { recognizeLinkedEntitiesActions.forEach(action -> tasks.add(toEntityLinkingLROTask(action))); } if (analyzeSentimentActions != null) { analyzeSentimentActions.forEach(action -> tasks.add(toSentimentAnalysisLROTask(action))); } if (extractSummaryActions != null) { extractSummaryActions.forEach(action -> tasks.add(toExtractiveSummarizationLROTask(action))); } if (recognizeCustomEntitiesActions != null) { recognizeCustomEntitiesActions.forEach(action -> tasks.add(toCustomEntitiesLROTask(action))); } if (singleCategoryClassifyActions != null) { singleCategoryClassifyActions.forEach(action -> tasks.add( toCustomSingleLabelClassificationLROTask(action))); } if (multiCategoryClassifyActions != null) { multiCategoryClassifyActions.forEach(action -> tasks.add(toCustomMultiLabelClassificationLROTask(action))); } return tasks; } private JobManifestTasks getJobManifestTasks(TextAnalyticsActions actions) { if (actions == null) { return null; } final JobManifestTasks jobManifestTasks = new JobManifestTasks(); if (actions.getRecognizeEntitiesActions() != null) { jobManifestTasks.setEntityRecognitionTasks(toEntitiesTasks(actions)); } if (actions.getRecognizePiiEntitiesActions() != null) { jobManifestTasks.setEntityRecognitionPiiTasks(toPiiTasks(actions)); } if (actions.getExtractKeyPhrasesActions() != null) { jobManifestTasks.setKeyPhraseExtractionTasks(toKeyPhrasesTasks(actions)); } if (actions.getRecognizeLinkedEntitiesActions() != null) { jobManifestTasks.setEntityLinkingTasks(toEntityLinkingTasks(actions)); } if (actions.getAnalyzeSentimentActions() != null) { jobManifestTasks.setSentimentAnalysisTasks(toSentimentAnalysisTasks(actions)); } if (actions.getExtractSummaryActions() != null) { jobManifestTasks.setExtractiveSummarizationTasks(toExtractiveSummarizationTask(actions)); } if (actions.getRecognizeCustomEntitiesActions() != null) { jobManifestTasks.setCustomEntityRecognitionTasks(toCustomEntitiesTask(actions)); } if (actions.getSingleCategoryClassifyActions() != null) { jobManifestTasks.setCustomSingleClassificationTasks(toCustomSingleClassificationTask(actions)); } if (actions.getMultiCategoryClassifyActions() != null) { jobManifestTasks.setCustomMultiClassificationTasks(toCustomMultiClassificationTask(actions)); } return jobManifestTasks; } private EntitiesLROTask toEntitiesLROTask(RecognizeEntitiesAction action) { if (action == null) { return null; } final EntitiesLROTask task = new EntitiesLROTask(); task.setParameters(getEntitiesTaskParameters(action)).setTaskName(action.getActionName()); return task; } private List<EntitiesTask> toEntitiesTasks(TextAnalyticsActions actions) { final List<EntitiesTask> entitiesTasks = new ArrayList<>(); for (RecognizeEntitiesAction action : actions.getRecognizeEntitiesActions()) { entitiesTasks.add( action == null ? null : new EntitiesTask() .setTaskName(action.getActionName()) .setParameters(getEntitiesTaskParameters(action))); } return entitiesTasks; } private PiiLROTask toPiiLROTask(RecognizePiiEntitiesAction action) { if (action == null) { return null; } final PiiLROTask task = new PiiLROTask(); task.setParameters(getPiiTaskParameters(action)).setTaskName(action.getActionName()); return task; } private List<PiiTask> toPiiTasks(TextAnalyticsActions actions) { final List<PiiTask> piiTasks = new ArrayList<>(); for (RecognizePiiEntitiesAction action : actions.getRecognizePiiEntitiesActions()) { piiTasks.add( action == null ? null : new PiiTask() .setTaskName(action.getActionName()) .setParameters(getPiiTaskParameters(action))); } return piiTasks; } private PiiTaskParameters getPiiTaskParameters(RecognizePiiEntitiesAction action) { return (PiiTaskParameters) new PiiTaskParameters() .setStringIndexType(StringIndexType.UTF16CODE_UNIT) .setDomain(PiiDomain.fromString( action.getDomainFilter() == null ? null : action.getDomainFilter().toString())) .setPiiCategories(toCategoriesFilter(action.getCategoriesFilter())) .setModelVersion(action.getModelVersion()) .setLoggingOptOut(action.isServiceLogsDisabled()); } private HealthcareLROTask toHealthcareLROTask(AnalyzeHealthcareEntitiesAction action) { if (action == null) { return null; } final HealthcareLROTask task = new HealthcareLROTask(); task.setParameters(getHealthcareTaskParameters(action)).setTaskName(action.getActionName()); return task; } private HealthcareTaskParameters getHealthcareTaskParameters(AnalyzeHealthcareEntitiesAction action) { return (HealthcareTaskParameters) new HealthcareTaskParameters() .setStringIndexType(StringIndexType.UTF16CODE_UNIT) .setModelVersion(action.getModelVersion()) .setLoggingOptOut(action.isServiceLogsDisabled()); } private KeyPhraseLROTask toKeyPhraseLROTask(ExtractKeyPhrasesAction action) { if (action == null) { return null; } final KeyPhraseLROTask task = new KeyPhraseLROTask(); task.setParameters(getKeyPhraseTaskParameters(action)).setTaskName(action.getActionName()); return task; } private List<KeyPhrasesTask> toKeyPhrasesTasks(TextAnalyticsActions actions) { final List<KeyPhrasesTask> keyPhrasesTasks = new ArrayList<>(); for (ExtractKeyPhrasesAction action : actions.getExtractKeyPhrasesActions()) { keyPhrasesTasks.add( action == null ? null : new KeyPhrasesTask() .setTaskName(action.getActionName()) .setParameters(getKeyPhraseTaskParameters(action))); } return keyPhrasesTasks; } private KeyPhraseTaskParameters getKeyPhraseTaskParameters(ExtractKeyPhrasesAction action) { return (KeyPhraseTaskParameters) new KeyPhraseTaskParameters() .setModelVersion(action.getModelVersion()) .setLoggingOptOut(action.isServiceLogsDisabled()); } private EntityLinkingLROTask toEntityLinkingLROTask(RecognizeLinkedEntitiesAction action) { if (action == null) { return null; } final EntityLinkingLROTask task = new EntityLinkingLROTask(); task.setParameters(getEntityLinkingTaskParameters(action)).setTaskName(action.getActionName()); return task; } private List<EntityLinkingTask> toEntityLinkingTasks(TextAnalyticsActions actions) { final List<EntityLinkingTask> tasks = new ArrayList<>(); for (RecognizeLinkedEntitiesAction action : actions.getRecognizeLinkedEntitiesActions()) { tasks.add( action == null ? null : new EntityLinkingTask() .setTaskName(action.getActionName()) .setParameters(getEntityLinkingTaskParameters(action))); } return tasks; } private EntityLinkingTaskParameters getEntityLinkingTaskParameters(RecognizeLinkedEntitiesAction action) { return (EntityLinkingTaskParameters) new EntityLinkingTaskParameters() .setStringIndexType(StringIndexType.UTF16CODE_UNIT) .setModelVersion(action.getModelVersion()) .setLoggingOptOut(action.isServiceLogsDisabled()); } private SentimentAnalysisLROTask toSentimentAnalysisLROTask(AnalyzeSentimentAction action) { if (action == null) { return null; } final SentimentAnalysisLROTask task = new SentimentAnalysisLROTask(); task.setParameters(getSentimentAnalysisTaskParameters(action)).setTaskName(action.getActionName()); return task; } private List<SentimentAnalysisTask> toSentimentAnalysisTasks(TextAnalyticsActions actions) { final List<SentimentAnalysisTask> tasks = new ArrayList<>(); for (AnalyzeSentimentAction action : actions.getAnalyzeSentimentActions()) { tasks.add( action == null ? null : new SentimentAnalysisTask() .setTaskName(action.getActionName()) .setParameters(getSentimentAnalysisTaskParameters(action))); } return tasks; } private SentimentAnalysisTaskParameters getSentimentAnalysisTaskParameters(AnalyzeSentimentAction action) { return (SentimentAnalysisTaskParameters) new SentimentAnalysisTaskParameters() .setStringIndexType(StringIndexType.UTF16CODE_UNIT) .setModelVersion(action.getModelVersion()) .setLoggingOptOut(action.isServiceLogsDisabled()); } private ExtractiveSummarizationLROTask toExtractiveSummarizationLROTask(ExtractSummaryAction action) { if (action == null) { return null; } final ExtractiveSummarizationLROTask task = new ExtractiveSummarizationLROTask(); task.setParameters(getExtractiveSummarizationTaskParameters(action)).setTaskName(action.getActionName()); return task; } private List<ExtractiveSummarizationTask> toExtractiveSummarizationTask(TextAnalyticsActions actions) { final List<ExtractiveSummarizationTask> extractiveSummarizationTasks = new ArrayList<>(); for (ExtractSummaryAction action : actions.getExtractSummaryActions()) { extractiveSummarizationTasks.add( action == null ? null : new ExtractiveSummarizationTask() .setTaskName(action.getActionName()) .setParameters(getExtractiveSummarizationTaskParameters(action))); } return extractiveSummarizationTasks; } private ExtractiveSummarizationTaskParameters getExtractiveSummarizationTaskParameters( ExtractSummaryAction action) { return (ExtractiveSummarizationTaskParameters) new ExtractiveSummarizationTaskParameters() .setStringIndexType(StringIndexType.UTF16CODE_UNIT) .setSentenceCount(action.getMaxSentenceCount()) .setSortBy(action.getOrderBy() == null ? null : ExtractiveSummarizationSortingCriteria .fromString(action.getOrderBy().toString())) .setModelVersion(action.getModelVersion()) .setLoggingOptOut(action.isServiceLogsDisabled()); } private CustomEntitiesLROTask toCustomEntitiesLROTask(RecognizeCustomEntitiesAction action) { if (action == null) { return null; } final CustomEntitiesLROTask task = new CustomEntitiesLROTask(); task.setParameters(getCustomEntitiesTaskParameters(action)).setTaskName(action.getActionName()); return task; } private List<CustomEntitiesTask> toCustomEntitiesTask(TextAnalyticsActions actions) { final List<CustomEntitiesTask> tasks = new ArrayList<>(); for (RecognizeCustomEntitiesAction action : actions.getRecognizeCustomEntitiesActions()) { tasks.add( action == null ? null : new CustomEntitiesTask() .setTaskName(action.getActionName()) .setParameters(getCustomEntitiesTaskParameters(action))); } return tasks; } private CustomEntitiesTaskParameters getCustomEntitiesTaskParameters(RecognizeCustomEntitiesAction action) { return (CustomEntitiesTaskParameters) new CustomEntitiesTaskParameters() .setStringIndexType(StringIndexType.UTF16CODE_UNIT) .setProjectName(action.getProjectName()) .setDeploymentName(action.getDeploymentName()) .setLoggingOptOut(action.isServiceLogsDisabled()); } private CustomSingleLabelClassificationLROTask toCustomSingleLabelClassificationLROTask( SingleCategoryClassifyAction action) { if (action == null) { return null; } final CustomSingleLabelClassificationLROTask task = new CustomSingleLabelClassificationLROTask(); task.setParameters(getCustomSingleClassificationTaskParameters(action)).setTaskName(action.getActionName()); return task; } private List<CustomSingleClassificationTask> toCustomSingleClassificationTask(TextAnalyticsActions actions) { final List<CustomSingleClassificationTask> tasks = new ArrayList<>(); for (SingleCategoryClassifyAction action : actions.getSingleCategoryClassifyActions()) { tasks.add( action == null ? null : new CustomSingleClassificationTask() .setTaskName(action.getActionName()) .setParameters(getCustomSingleClassificationTaskParameters(action))); } return tasks; } private CustomSingleLabelClassificationTaskParameters getCustomSingleClassificationTaskParameters( SingleCategoryClassifyAction action) { return (CustomSingleLabelClassificationTaskParameters) new CustomSingleLabelClassificationTaskParameters() .setProjectName(action.getProjectName()) .setDeploymentName(action.getDeploymentName()) .setLoggingOptOut(action.isServiceLogsDisabled()); } private CustomMultiLabelClassificationLROTask toCustomMultiLabelClassificationLROTask( MultiCategoryClassifyAction action) { if (action == null) { return null; } final CustomMultiLabelClassificationLROTask task = new CustomMultiLabelClassificationLROTask(); task.setParameters(getCustomMultiLabelClassificationTaskParameters(action)).setTaskName(action.getActionName()); return task; } private List<CustomMultiClassificationTask> toCustomMultiClassificationTask(TextAnalyticsActions actions) { final List<CustomMultiClassificationTask> tasks = new ArrayList<>(); for (MultiCategoryClassifyAction action : actions.getMultiCategoryClassifyActions()) { tasks.add( action == null ? null : new CustomMultiClassificationTask() .setTaskName(action.getActionName()) .setParameters(getCustomMultiLabelClassificationTaskParameters(action))); } return tasks; } private CustomMultiLabelClassificationTaskParameters getCustomMultiLabelClassificationTaskParameters( MultiCategoryClassifyAction action) { return (CustomMultiLabelClassificationTaskParameters) new CustomMultiLabelClassificationTaskParameters() .setProjectName(action.getProjectName()) .setDeploymentName(action.getDeploymentName()) .setLoggingOptOut(action.isServiceLogsDisabled()); } private Function<PollingContext<AnalyzeActionsOperationDetail>, Mono<AnalyzeActionsOperationDetail>> activationOperation(Mono<AnalyzeActionsOperationDetail> operationResult) { return pollingContext -> { try { return operationResult.onErrorMap(Utility::mapToHttpResponseExceptionIfExists); } catch (RuntimeException ex) { return monoError(logger, ex); } }; } private Function<PollingContext<AnalyzeActionsOperationDetail>, Mono<PollResponse<AnalyzeActionsOperationDetail>>> pollingOperation(Function<UUID, Mono<Response<AnalyzeJobState>>> pollingFunction) { return pollingContext -> { try { final PollResponse<AnalyzeActionsOperationDetail> operationResultPollResponse = pollingContext.getLatestResponse(); final UUID operationId = UUID.fromString(operationResultPollResponse.getValue().getOperationId()); return pollingFunction.apply(operationId) .flatMap(modelResponse -> processAnalyzedModelResponse(modelResponse, operationResultPollResponse)) .onErrorMap(Utility::mapToHttpResponseExceptionIfExists); } catch (RuntimeException ex) { return monoError(logger, ex); } }; } private Function<PollingContext<AnalyzeActionsOperationDetail>, Mono<PollResponse<AnalyzeActionsOperationDetail>>> pollingOperationLanguageApi(Function<UUID, Mono<Response<AnalyzeTextJobState>>> pollingFunction) { return pollingContext -> { try { final PollResponse<AnalyzeActionsOperationDetail> operationResultPollResponse = pollingContext.getLatestResponse(); final UUID operationId = UUID.fromString(operationResultPollResponse.getValue().getOperationId()); return pollingFunction.apply(operationId) .flatMap(modelResponse -> processAnalyzedModelResponseLanguageApi( modelResponse, operationResultPollResponse)) .onErrorMap(Utility::mapToHttpResponseExceptionIfExists); } catch (RuntimeException ex) { return monoError(logger, ex); } }; } private Function<PollingContext<AnalyzeActionsOperationDetail>, Mono<AnalyzeActionsResultPagedFlux>> fetchingOperation(Function<UUID, Mono<AnalyzeActionsResultPagedFlux>> fetchingFunction) { return pollingContext -> { try { final UUID operationId = UUID.fromString(pollingContext.getLatestResponse().getValue().getOperationId()); return fetchingFunction.apply(operationId); } catch (RuntimeException ex) { return monoError(logger, ex); } }; } private Function<PollingContext<AnalyzeActionsOperationDetail>, Mono<AnalyzeActionsResultPagedIterable>> fetchingOperationIterable(Function<UUID, Mono<AnalyzeActionsResultPagedIterable>> fetchingFunction) { return pollingContext -> { try { final UUID operationId = UUID.fromString(pollingContext.getLatestResponse().getValue().getOperationId()); return fetchingFunction.apply(operationId); } catch (RuntimeException ex) { return monoError(logger, ex); } }; } AnalyzeActionsResultPagedFlux getAnalyzeOperationFluxPage(UUID operationId, Integer top, Integer skip, boolean showStats, Context context) { return new AnalyzeActionsResultPagedFlux( () -> (continuationToken, pageSize) -> getPage(continuationToken, operationId, top, skip, showStats, context).flux()); } Mono<PagedResponse<AnalyzeActionsResult>> getPage(String continuationToken, UUID operationId, Integer top, Integer skip, boolean showStats, Context context) { if (continuationToken != null) { final Map<String, Object> continuationTokenMap = parseNextLink(continuationToken); final Integer topValue = (Integer) continuationTokenMap.getOrDefault("$top", null); final Integer skipValue = (Integer) continuationTokenMap.getOrDefault("$skip", null); final Boolean showStatsValue = (Boolean) continuationTokenMap.getOrDefault(showStats, false); if (service != null) { return service.jobStatusWithResponseAsync(operationId, showStatsValue, topValue, skipValue, context) .map(this::toAnalyzeActionsResultPagedResponseLanguageApi) .onErrorMap(Utility::mapToHttpResponseExceptionIfExists); } return legacyService.analyzeStatusWithResponseAsync(operationId.toString(), showStatsValue, topValue, skipValue, context) .map(this::toAnalyzeActionsResultPagedResponse) .onErrorMap(Utility::mapToHttpResponseExceptionIfExists); } else { if (service != null) { return service.jobStatusWithResponseAsync(operationId, showStats, top, skip, context) .map(this::toAnalyzeActionsResultPagedResponseLanguageApi) .onErrorMap(Utility::mapToHttpResponseExceptionIfExists); } return legacyService.analyzeStatusWithResponseAsync(operationId.toString(), showStats, top, skip, context) .map(this::toAnalyzeActionsResultPagedResponse) .onErrorMap(Utility::mapToHttpResponseExceptionIfExists); } } private PagedResponse<AnalyzeActionsResult> toAnalyzeActionsResultPagedResponse(Response<AnalyzeJobState> response) { final AnalyzeJobState analyzeJobState = response.getValue(); return new PagedResponseBase<Void, AnalyzeActionsResult>( response.getRequest(), response.getStatusCode(), response.getHeaders(), Arrays.asList(toAnalyzeActionsResult(analyzeJobState)), analyzeJobState.getNextLink(), null); } private PagedResponse<AnalyzeActionsResult> toAnalyzeActionsResultPagedResponseLanguageApi(Response<AnalyzeTextJobState> response) { final AnalyzeTextJobState analyzeJobState = response.getValue(); return new PagedResponseBase<Void, AnalyzeActionsResult>( response.getRequest(), response.getStatusCode(), response.getHeaders(), Arrays.asList(toAnalyzeActionsResultLanguageApi(analyzeJobState)), analyzeJobState.getNextLink(), null); } private AnalyzeActionsResult toAnalyzeActionsResult(AnalyzeJobState analyzeJobState) { TasksStateTasksOld tasksStateTasks = analyzeJobState.getTasks(); final List<TasksStateTasksEntityRecognitionPiiTasksItem> piiTasksItems = tasksStateTasks.getEntityRecognitionPiiTasks(); final List<TasksStateTasksEntityRecognitionTasksItem> entityRecognitionTasksItems = tasksStateTasks.getEntityRecognitionTasks(); final List<TasksStateTasksKeyPhraseExtractionTasksItem> keyPhraseExtractionTasks = tasksStateTasks.getKeyPhraseExtractionTasks(); final List<TasksStateTasksEntityLinkingTasksItem> linkedEntityRecognitionTasksItems = tasksStateTasks.getEntityLinkingTasks(); final List<TasksStateTasksSentimentAnalysisTasksItem> sentimentAnalysisTasksItems = tasksStateTasks.getSentimentAnalysisTasks(); List<RecognizeEntitiesActionResult> recognizeEntitiesActionResults = new ArrayList<>(); List<RecognizePiiEntitiesActionResult> recognizePiiEntitiesActionResults = new ArrayList<>(); List<ExtractKeyPhrasesActionResult> extractKeyPhrasesActionResults = new ArrayList<>(); List<RecognizeLinkedEntitiesActionResult> recognizeLinkedEntitiesActionResults = new ArrayList<>(); List<AnalyzeSentimentActionResult> analyzeSentimentActionResults = new ArrayList<>(); List<ExtractSummaryActionResult> extractSummaryActionResults = new ArrayList<>(); List<RecognizeCustomEntitiesActionResult> recognizeCustomEntitiesActionResults = new ArrayList<>(); List<SingleCategoryClassifyActionResult> singleCategoryClassifyActionResults = new ArrayList<>(); List<MultiCategoryClassifyActionResult> multiCategoryClassifyActionResults = new ArrayList<>(); if (!CoreUtils.isNullOrEmpty(entityRecognitionTasksItems)) { for (int i = 0; i < entityRecognitionTasksItems.size(); i++) { final TasksStateTasksEntityRecognitionTasksItem taskItem = entityRecognitionTasksItems.get(i); final RecognizeEntitiesActionResult actionResult = new RecognizeEntitiesActionResult(); final EntitiesResult results = taskItem.getResults(); if (results != null) { RecognizeEntitiesActionResultPropertiesHelper.setDocumentsResults(actionResult, toRecognizeEntitiesResultCollectionResponse(results)); } TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult, taskItem.getLastUpdateDateTime()); recognizeEntitiesActionResults.add(actionResult); } } if (!CoreUtils.isNullOrEmpty(piiTasksItems)) { for (int i = 0; i < piiTasksItems.size(); i++) { final TasksStateTasksEntityRecognitionPiiTasksItem taskItem = piiTasksItems.get(i); final RecognizePiiEntitiesActionResult actionResult = new RecognizePiiEntitiesActionResult(); final PiiResult results = taskItem.getResults(); if (results != null) { RecognizePiiEntitiesActionResultPropertiesHelper.setDocumentsResults(actionResult, toRecognizePiiEntitiesResultCollection(results)); } TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult, taskItem.getLastUpdateDateTime()); recognizePiiEntitiesActionResults.add(actionResult); } } if (!CoreUtils.isNullOrEmpty(keyPhraseExtractionTasks)) { for (int i = 0; i < keyPhraseExtractionTasks.size(); i++) { final TasksStateTasksKeyPhraseExtractionTasksItem taskItem = keyPhraseExtractionTasks.get(i); final ExtractKeyPhrasesActionResult actionResult = new ExtractKeyPhrasesActionResult(); final KeyPhraseResult results = taskItem.getResults(); if (results != null) { ExtractKeyPhrasesActionResultPropertiesHelper.setDocumentsResults(actionResult, toExtractKeyPhrasesResultCollection(results)); } TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult, taskItem.getLastUpdateDateTime()); extractKeyPhrasesActionResults.add(actionResult); } } if (!CoreUtils.isNullOrEmpty(linkedEntityRecognitionTasksItems)) { for (int i = 0; i < linkedEntityRecognitionTasksItems.size(); i++) { final TasksStateTasksEntityLinkingTasksItem taskItem = linkedEntityRecognitionTasksItems.get(i); final RecognizeLinkedEntitiesActionResult actionResult = new RecognizeLinkedEntitiesActionResult(); final EntityLinkingResult results = taskItem.getResults(); if (results != null) { RecognizeLinkedEntitiesActionResultPropertiesHelper.setDocumentsResults(actionResult, toRecognizeLinkedEntitiesResultCollection(results)); } TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult, taskItem.getLastUpdateDateTime()); recognizeLinkedEntitiesActionResults.add(actionResult); } } if (!CoreUtils.isNullOrEmpty(sentimentAnalysisTasksItems)) { for (int i = 0; i < sentimentAnalysisTasksItems.size(); i++) { final TasksStateTasksSentimentAnalysisTasksItem taskItem = sentimentAnalysisTasksItems.get(i); final AnalyzeSentimentActionResult actionResult = new AnalyzeSentimentActionResult(); final SentimentResponse results = taskItem.getResults(); if (results != null) { AnalyzeSentimentActionResultPropertiesHelper.setDocumentsResults(actionResult, toAnalyzeSentimentResultCollection(results)); } TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult, taskItem.getLastUpdateDateTime()); analyzeSentimentActionResults.add(actionResult); } } final List<TextAnalyticsError> errors = analyzeJobState.getErrors(); if (!CoreUtils.isNullOrEmpty(errors)) { for (TextAnalyticsError error : errors) { final String[] targetPair = parseActionErrorTarget(error.getTarget()); final String taskName = targetPair[0]; final Integer taskIndex = Integer.valueOf(targetPair[1]); final TextAnalyticsActionResult actionResult; if (ENTITY_RECOGNITION_TASKS.equals(taskName)) { actionResult = recognizeEntitiesActionResults.get(taskIndex); } else if (ENTITY_RECOGNITION_PII_TASKS.equals(taskName)) { actionResult = recognizePiiEntitiesActionResults.get(taskIndex); } else if (KEY_PHRASE_EXTRACTION_TASKS.equals(taskName)) { actionResult = extractKeyPhrasesActionResults.get(taskIndex); } else if (ENTITY_LINKING_TASKS.equals(taskName)) { actionResult = recognizeLinkedEntitiesActionResults.get(taskIndex); } else if (SENTIMENT_ANALYSIS_TASKS.equals(taskName)) { actionResult = analyzeSentimentActionResults.get(taskIndex); } else if (EXTRACTIVE_SUMMARIZATION_TASKS.equals(taskName)) { actionResult = extractSummaryActionResults.get(taskIndex); } else if (CUSTOM_ENTITY_RECOGNITION_TASKS.equals(taskName)) { actionResult = recognizeCustomEntitiesActionResults.get(taskIndex); } else if (CUSTOM_SINGLE_CLASSIFICATION_TASKS.equals(taskName)) { actionResult = singleCategoryClassifyActionResults.get(taskIndex); } else if (CUSTOM_MULTI_CLASSIFICATION_TASKS.equals(taskName)) { actionResult = multiCategoryClassifyActionResults.get(taskIndex); } else { throw logger.logExceptionAsError(new RuntimeException( "Invalid task name in target reference, " + taskName)); } TextAnalyticsActionResultPropertiesHelper.setIsError(actionResult, true); TextAnalyticsActionResultPropertiesHelper.setError(actionResult, new com.azure.ai.textanalytics.models.TextAnalyticsError( TextAnalyticsErrorCode.fromString( error.getErrorCode() == null ? null : error.getErrorCode().toString()), error.getMessage(), null)); } } final AnalyzeActionsResult analyzeActionsResult = new AnalyzeActionsResult(); AnalyzeActionsResultPropertiesHelper.setRecognizeEntitiesResults(analyzeActionsResult, IterableStream.of(recognizeEntitiesActionResults)); AnalyzeActionsResultPropertiesHelper.setRecognizePiiEntitiesResults(analyzeActionsResult, IterableStream.of(recognizePiiEntitiesActionResults)); AnalyzeActionsResultPropertiesHelper.setExtractKeyPhrasesResults(analyzeActionsResult, IterableStream.of(extractKeyPhrasesActionResults)); AnalyzeActionsResultPropertiesHelper.setRecognizeLinkedEntitiesResults(analyzeActionsResult, IterableStream.of(recognizeLinkedEntitiesActionResults)); AnalyzeActionsResultPropertiesHelper.setAnalyzeSentimentResults(analyzeActionsResult, IterableStream.of(analyzeSentimentActionResults)); AnalyzeActionsResultPropertiesHelper.setExtractSummaryResults(analyzeActionsResult, IterableStream.of(extractSummaryActionResults)); AnalyzeActionsResultPropertiesHelper.setRecognizeCustomEntitiesResults(analyzeActionsResult, IterableStream.of(recognizeCustomEntitiesActionResults)); AnalyzeActionsResultPropertiesHelper.setClassifySingleCategoryResults(analyzeActionsResult, IterableStream.of(singleCategoryClassifyActionResults)); AnalyzeActionsResultPropertiesHelper.setClassifyMultiCategoryResults(analyzeActionsResult, IterableStream.of(multiCategoryClassifyActionResults)); return analyzeActionsResult; } private AnalyzeActionsResult toAnalyzeActionsResultLanguageApi(AnalyzeTextJobState analyzeJobState) { final TasksStateTasks tasksStateTasks = analyzeJobState.getTasks(); final List<AnalyzeTextLROResult> tasksResults = tasksStateTasks.getItems(); final List<RecognizeEntitiesActionResult> recognizeEntitiesActionResults = new ArrayList<>(); final List<RecognizePiiEntitiesActionResult> recognizePiiEntitiesActionResults = new ArrayList<>(); final List<ExtractKeyPhrasesActionResult> extractKeyPhrasesActionResults = new ArrayList<>(); final List<RecognizeLinkedEntitiesActionResult> recognizeLinkedEntitiesActionResults = new ArrayList<>(); final List<AnalyzeHealthcareEntitiesActionResult> analyzeHealthcareEntitiesActionResults = new ArrayList<>(); final List<AnalyzeSentimentActionResult> analyzeSentimentActionResults = new ArrayList<>(); final List<ExtractSummaryActionResult> extractSummaryActionResults = new ArrayList<>(); final List<RecognizeCustomEntitiesActionResult> recognizeCustomEntitiesActionResults = new ArrayList<>(); final List<SingleCategoryClassifyActionResult> singleCategoryClassifyActionResults = new ArrayList<>(); final List<MultiCategoryClassifyActionResult> multiCategoryClassifyActionResults = new ArrayList<>(); if (!CoreUtils.isNullOrEmpty(tasksResults)) { for (int i = 0; i < tasksResults.size(); i++) { final AnalyzeTextLROResult taskResult = tasksResults.get(i); if (taskResult instanceof EntityRecognitionLROResult) { final EntityRecognitionLROResult entityTaskResult = (EntityRecognitionLROResult) taskResult; final RecognizeEntitiesActionResult actionResult = new RecognizeEntitiesActionResult(); final EntitiesResult results = entityTaskResult.getResults(); if (results != null) { RecognizeEntitiesActionResultPropertiesHelper.setDocumentsResults(actionResult, toRecognizeEntitiesResultCollectionResponse(results)); } TextAnalyticsActionResultPropertiesHelper.setActionName(actionResult, entityTaskResult.getTaskName()); TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult, entityTaskResult.getLastUpdateDateTime()); recognizeEntitiesActionResults.add(actionResult); } else if (taskResult instanceof CustomEntityRecognitionLROResult) { final CustomEntityRecognitionLROResult customEntityTaskResult = (CustomEntityRecognitionLROResult) taskResult; final RecognizeCustomEntitiesActionResult actionResult = new RecognizeCustomEntitiesActionResult(); final CustomEntitiesResult results = customEntityTaskResult.getResults(); if (results != null) { RecognizeCustomEntitiesActionResultPropertiesHelper.setDocumentsResults(actionResult, toRecognizeCustomEntitiesResultCollection(results)); } TextAnalyticsActionResultPropertiesHelper.setActionName(actionResult, customEntityTaskResult.getTaskName()); TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult, customEntityTaskResult.getLastUpdateDateTime()); recognizeCustomEntitiesActionResults.add(actionResult); } else if (taskResult instanceof CustomSingleLabelClassificationLROResult) { final CustomSingleLabelClassificationLROResult customSingleLabelClassificationResult = (CustomSingleLabelClassificationLROResult) taskResult; final SingleCategoryClassifyActionResult actionResult = new SingleCategoryClassifyActionResult(); final CustomSingleLabelClassificationResult results = customSingleLabelClassificationResult.getResults(); if (results != null) { SingleCategoryClassifyActionResultPropertiesHelper.setDocumentsResults(actionResult, toSingleCategoryClassifyResultCollection(results)); } TextAnalyticsActionResultPropertiesHelper.setActionName(actionResult, customSingleLabelClassificationResult.getTaskName()); TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult, customSingleLabelClassificationResult.getLastUpdateDateTime()); singleCategoryClassifyActionResults.add(actionResult); } else if (taskResult instanceof CustomMultiLabelClassificationLROResult) { final CustomMultiLabelClassificationLROResult customMultiLabelClassificationLROResult = (CustomMultiLabelClassificationLROResult) taskResult; final MultiCategoryClassifyActionResult actionResult = new MultiCategoryClassifyActionResult(); final CustomMultiLabelClassificationResult results = customMultiLabelClassificationLROResult.getResults(); if (results != null) { MultiCategoryClassifyActionResultPropertiesHelper.setDocumentsResults(actionResult, toMultiCategoryClassifyResultCollection(results)); } TextAnalyticsActionResultPropertiesHelper.setActionName(actionResult, customMultiLabelClassificationLROResult.getTaskName()); TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult, customMultiLabelClassificationLROResult.getLastUpdateDateTime()); multiCategoryClassifyActionResults.add(actionResult); } else if (taskResult instanceof EntityLinkingLROResult) { final EntityLinkingLROResult entityLinkingLROResult = (EntityLinkingLROResult) taskResult; final RecognizeLinkedEntitiesActionResult actionResult = new RecognizeLinkedEntitiesActionResult(); final EntityLinkingResult results = entityLinkingLROResult.getResults(); if (results != null) { RecognizeLinkedEntitiesActionResultPropertiesHelper.setDocumentsResults(actionResult, toRecognizeLinkedEntitiesResultCollection(results)); } TextAnalyticsActionResultPropertiesHelper.setActionName(actionResult, entityLinkingLROResult.getTaskName()); TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult, entityLinkingLROResult.getLastUpdateDateTime()); recognizeLinkedEntitiesActionResults.add(actionResult); } else if (taskResult instanceof PiiEntityRecognitionLROResult) { final PiiEntityRecognitionLROResult piiEntityRecognitionLROResult = (PiiEntityRecognitionLROResult) taskResult; final RecognizePiiEntitiesActionResult actionResult = new RecognizePiiEntitiesActionResult(); final PiiResult results = piiEntityRecognitionLROResult.getResults(); if (results != null) { RecognizePiiEntitiesActionResultPropertiesHelper.setDocumentsResults(actionResult, toRecognizePiiEntitiesResultCollection(results)); } TextAnalyticsActionResultPropertiesHelper.setActionName(actionResult, piiEntityRecognitionLROResult.getTaskName()); TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult, piiEntityRecognitionLROResult.getLastUpdateDateTime()); recognizePiiEntitiesActionResults.add(actionResult); } else if (taskResult instanceof ExtractiveSummarizationLROResult) { final ExtractiveSummarizationLROResult extractiveSummarizationLROResult = (ExtractiveSummarizationLROResult) taskResult; final ExtractSummaryActionResult actionResult = new ExtractSummaryActionResult(); final ExtractiveSummarizationResult results = extractiveSummarizationLROResult.getResults(); if (results != null) { ExtractSummaryActionResultPropertiesHelper.setDocumentsResults(actionResult, toExtractSummaryResultCollection(results)); } TextAnalyticsActionResultPropertiesHelper.setActionName(actionResult, extractiveSummarizationLROResult.getTaskName()); TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult, extractiveSummarizationLROResult.getLastUpdateDateTime()); extractSummaryActionResults.add(actionResult); } else if (taskResult instanceof HealthcareLROResult) { final HealthcareLROResult healthcareLROResult = (HealthcareLROResult) taskResult; final AnalyzeHealthcareEntitiesActionResult actionResult = new AnalyzeHealthcareEntitiesActionResult(); final HealthcareResult results = healthcareLROResult.getResults(); if (results != null) { AnalyzeHealthcareEntitiesActionResultPropertiesHelper.setDocumentsResults(actionResult, toAnalyzeHealthcareEntitiesResultCollection(results)); } TextAnalyticsActionResultPropertiesHelper.setActionName(actionResult, healthcareLROResult.getTaskName()); TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult, healthcareLROResult.getLastUpdateDateTime()); analyzeHealthcareEntitiesActionResults.add(actionResult); } else if (taskResult instanceof SentimentLROResult) { final SentimentLROResult sentimentLROResult = (SentimentLROResult) taskResult; final AnalyzeSentimentActionResult actionResult = new AnalyzeSentimentActionResult(); final SentimentResponse results = sentimentLROResult.getResults(); if (results != null) { AnalyzeSentimentActionResultPropertiesHelper.setDocumentsResults(actionResult, toAnalyzeSentimentResultCollection(results)); } TextAnalyticsActionResultPropertiesHelper.setActionName(actionResult, sentimentLROResult.getTaskName()); TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult, sentimentLROResult.getLastUpdateDateTime()); analyzeSentimentActionResults.add(actionResult); } else if (taskResult instanceof KeyPhraseExtractionLROResult) { final KeyPhraseExtractionLROResult keyPhraseExtractionLROResult = (KeyPhraseExtractionLROResult) taskResult; final ExtractKeyPhrasesActionResult actionResult = new ExtractKeyPhrasesActionResult(); final KeyPhraseResult results = keyPhraseExtractionLROResult.getResults(); if (results != null) { ExtractKeyPhrasesActionResultPropertiesHelper.setDocumentsResults(actionResult, toExtractKeyPhrasesResultCollection(results)); } TextAnalyticsActionResultPropertiesHelper.setActionName(actionResult, keyPhraseExtractionLROResult.getTaskName()); TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult, keyPhraseExtractionLROResult.getLastUpdateDateTime()); extractKeyPhrasesActionResults.add(actionResult); } else { throw logger.logExceptionAsError(new RuntimeException( "Invalid Long running operation task result: " + taskResult.getClass())); } } } final List<Error> errors = analyzeJobState.getErrors(); if (!CoreUtils.isNullOrEmpty(errors)) { for (Error error : errors) { final String[] targetPair = parseActionErrorTarget(error.getTarget()); final String taskName = targetPair[0]; final Integer taskIndex = Integer.valueOf(targetPair[1]); final TextAnalyticsActionResult actionResult; if (ENTITY_RECOGNITION_TASKS.equals(taskName)) { actionResult = recognizeEntitiesActionResults.get(taskIndex); } else if (ENTITY_RECOGNITION_PII_TASKS.equals(taskName)) { actionResult = recognizePiiEntitiesActionResults.get(taskIndex); } else if (KEY_PHRASE_EXTRACTION_TASKS.equals(taskName)) { actionResult = extractKeyPhrasesActionResults.get(taskIndex); } else if (ENTITY_LINKING_TASKS.equals(taskName)) { actionResult = recognizeLinkedEntitiesActionResults.get(taskIndex); } else if (SENTIMENT_ANALYSIS_TASKS.equals(taskName)) { actionResult = analyzeSentimentActionResults.get(taskIndex); } else if (EXTRACTIVE_SUMMARIZATION_TASKS.equals(taskName)) { actionResult = extractSummaryActionResults.get(taskIndex); } else if (CUSTOM_ENTITY_RECOGNITION_TASKS.equals(taskName)) { actionResult = recognizeCustomEntitiesActionResults.get(taskIndex); } else if (CUSTOM_SINGLE_CLASSIFICATION_TASKS.equals(taskName)) { actionResult = singleCategoryClassifyActionResults.get(taskIndex); } else if (CUSTOM_MULTI_CLASSIFICATION_TASKS.equals(taskName)) { actionResult = multiCategoryClassifyActionResults.get(taskIndex); } else { throw logger.logExceptionAsError(new RuntimeException( "Invalid task name in target reference, " + taskName)); } TextAnalyticsActionResultPropertiesHelper.setIsError(actionResult, true); TextAnalyticsActionResultPropertiesHelper.setError(actionResult, new com.azure.ai.textanalytics.models.TextAnalyticsError( TextAnalyticsErrorCode.fromString( error.getCode() == null ? null : error.getCode().toString()), error.getMessage(), null)); } } final AnalyzeActionsResult analyzeActionsResult = new AnalyzeActionsResult(); AnalyzeActionsResultPropertiesHelper.setRecognizeEntitiesResults(analyzeActionsResult, IterableStream.of(recognizeEntitiesActionResults)); AnalyzeActionsResultPropertiesHelper.setRecognizePiiEntitiesResults(analyzeActionsResult, IterableStream.of(recognizePiiEntitiesActionResults)); AnalyzeActionsResultPropertiesHelper.setAnalyzeHealthcareEntitiesResults(analyzeActionsResult, IterableStream.of(analyzeHealthcareEntitiesActionResults)); AnalyzeActionsResultPropertiesHelper.setExtractKeyPhrasesResults(analyzeActionsResult, IterableStream.of(extractKeyPhrasesActionResults)); AnalyzeActionsResultPropertiesHelper.setRecognizeLinkedEntitiesResults(analyzeActionsResult, IterableStream.of(recognizeLinkedEntitiesActionResults)); AnalyzeActionsResultPropertiesHelper.setAnalyzeSentimentResults(analyzeActionsResult, IterableStream.of(analyzeSentimentActionResults)); AnalyzeActionsResultPropertiesHelper.setExtractSummaryResults(analyzeActionsResult, IterableStream.of(extractSummaryActionResults)); AnalyzeActionsResultPropertiesHelper.setRecognizeCustomEntitiesResults(analyzeActionsResult, IterableStream.of(recognizeCustomEntitiesActionResults)); AnalyzeActionsResultPropertiesHelper.setClassifySingleCategoryResults(analyzeActionsResult, IterableStream.of(singleCategoryClassifyActionResults)); AnalyzeActionsResultPropertiesHelper.setClassifyMultiCategoryResults(analyzeActionsResult, IterableStream.of(multiCategoryClassifyActionResults)); return analyzeActionsResult; } private Mono<PollResponse<AnalyzeActionsOperationDetail>> processAnalyzedModelResponse( Response<AnalyzeJobState> analyzeJobStateResponse, PollResponse<AnalyzeActionsOperationDetail> operationResultPollResponse) { LongRunningOperationStatus status = LongRunningOperationStatus.SUCCESSFULLY_COMPLETED; if (analyzeJobStateResponse.getValue() != null && analyzeJobStateResponse.getValue().getStatus() != null) { switch (analyzeJobStateResponse.getValue().getStatus()) { case NOT_STARTED: case RUNNING: status = LongRunningOperationStatus.IN_PROGRESS; break; case SUCCEEDED: status = LongRunningOperationStatus.SUCCESSFULLY_COMPLETED; break; case CANCELLED: status = LongRunningOperationStatus.USER_CANCELLED; break; default: status = LongRunningOperationStatus.fromString( analyzeJobStateResponse.getValue().getStatus().toString(), true); break; } } AnalyzeActionsOperationDetailPropertiesHelper.setDisplayName(operationResultPollResponse.getValue(), analyzeJobStateResponse.getValue().getDisplayName()); AnalyzeActionsOperationDetailPropertiesHelper.setCreatedAt(operationResultPollResponse.getValue(), analyzeJobStateResponse.getValue().getCreatedDateTime()); AnalyzeActionsOperationDetailPropertiesHelper.setExpiresAt(operationResultPollResponse.getValue(), analyzeJobStateResponse.getValue().getExpirationDateTime()); AnalyzeActionsOperationDetailPropertiesHelper.setLastModifiedAt(operationResultPollResponse.getValue(), analyzeJobStateResponse.getValue().getLastUpdateDateTime()); final TasksStateTasksOld tasksResult = analyzeJobStateResponse.getValue().getTasks(); AnalyzeActionsOperationDetailPropertiesHelper.setActionsFailed(operationResultPollResponse.getValue(), tasksResult.getFailed()); AnalyzeActionsOperationDetailPropertiesHelper.setActionsInProgress(operationResultPollResponse.getValue(), tasksResult.getInProgress()); AnalyzeActionsOperationDetailPropertiesHelper.setActionsSucceeded( operationResultPollResponse.getValue(), tasksResult.getCompleted()); AnalyzeActionsOperationDetailPropertiesHelper.setActionsInTotal(operationResultPollResponse.getValue(), tasksResult.getTotal()); return Mono.just(new PollResponse<>(status, operationResultPollResponse.getValue())); } private Mono<PollResponse<AnalyzeActionsOperationDetail>> processAnalyzedModelResponseLanguageApi( Response<AnalyzeTextJobState> analyzeJobStateResponse, PollResponse<AnalyzeActionsOperationDetail> operationResultPollResponse) { LongRunningOperationStatus status = LongRunningOperationStatus.SUCCESSFULLY_COMPLETED; if (analyzeJobStateResponse.getValue() != null && analyzeJobStateResponse.getValue().getStatus() != null) { switch (analyzeJobStateResponse.getValue().getStatus()) { case NOT_STARTED: case RUNNING: status = LongRunningOperationStatus.IN_PROGRESS; break; case SUCCEEDED: status = LongRunningOperationStatus.SUCCESSFULLY_COMPLETED; break; case CANCELLED: status = LongRunningOperationStatus.USER_CANCELLED; break; default: status = LongRunningOperationStatus.fromString( analyzeJobStateResponse.getValue().getStatus().toString(), true); break; } } AnalyzeActionsOperationDetailPropertiesHelper.setDisplayName(operationResultPollResponse.getValue(), analyzeJobStateResponse.getValue().getDisplayName()); AnalyzeActionsOperationDetailPropertiesHelper.setCreatedAt(operationResultPollResponse.getValue(), analyzeJobStateResponse.getValue().getCreatedDateTime()); AnalyzeActionsOperationDetailPropertiesHelper.setExpiresAt(operationResultPollResponse.getValue(), analyzeJobStateResponse.getValue().getExpirationDateTime()); AnalyzeActionsOperationDetailPropertiesHelper.setLastModifiedAt(operationResultPollResponse.getValue(), analyzeJobStateResponse.getValue().getLastUpdateDateTime()); final TasksStateTasks tasksResult = analyzeJobStateResponse.getValue().getTasks(); AnalyzeActionsOperationDetailPropertiesHelper.setActionsFailed(operationResultPollResponse.getValue(), tasksResult.getFailed()); AnalyzeActionsOperationDetailPropertiesHelper.setActionsInProgress(operationResultPollResponse.getValue(), tasksResult.getInProgress()); AnalyzeActionsOperationDetailPropertiesHelper.setActionsSucceeded( operationResultPollResponse.getValue(), tasksResult.getCompleted()); AnalyzeActionsOperationDetailPropertiesHelper.setActionsInTotal(operationResultPollResponse.getValue(), tasksResult.getTotal()); return Mono.just(new PollResponse<>(status, operationResultPollResponse.getValue())); } private Context getNotNullContext(Context context) { return context == null ? Context.NONE : context; } private AnalyzeActionsOptions getNotNullAnalyzeActionsOptions(AnalyzeActionsOptions options) { return options == null ? new AnalyzeActionsOptions() : options; } private String[] parseActionErrorTarget(String targetReference) { if (CoreUtils.isNullOrEmpty(targetReference)) { throw logger.logExceptionAsError(new RuntimeException( "Expected an error with a target field referencing an action but did not get one")); } final Matcher matcher = PATTERN.matcher(targetReference); String[] taskNameIdPair = new String[2]; while (matcher.find()) { taskNameIdPair[0] = matcher.group(1); taskNameIdPair[1] = matcher.group(2); } return taskNameIdPair; } }
class AnalyzeActionsAsyncClient { private static final String ENTITY_RECOGNITION_TASKS = "entityRecognitionTasks"; private static final String ENTITY_RECOGNITION_PII_TASKS = "entityRecognitionPiiTasks"; private static final String KEY_PHRASE_EXTRACTION_TASKS = "keyPhraseExtractionTasks"; private static final String ENTITY_LINKING_TASKS = "entityLinkingTasks"; private static final String SENTIMENT_ANALYSIS_TASKS = "sentimentAnalysisTasks"; private static final String EXTRACTIVE_SUMMARIZATION_TASKS = "extractiveSummarizationTasks"; private static final String CUSTOM_ENTITY_RECOGNITION_TASKS = "customEntityRecognitionTasks"; private static final String CUSTOM_SINGLE_CLASSIFICATION_TASKS = "customClassificationTasks"; private static final String CUSTOM_MULTI_CLASSIFICATION_TASKS = "customMultiClassificationTasks"; private static final String REGEX_ACTION_ERROR_TARGET = String.format(" ENTITY_RECOGNITION_PII_TASKS, ENTITY_RECOGNITION_TASKS, ENTITY_LINKING_TASKS, SENTIMENT_ANALYSIS_TASKS, EXTRACTIVE_SUMMARIZATION_TASKS, CUSTOM_ENTITY_RECOGNITION_TASKS, CUSTOM_SINGLE_CLASSIFICATION_TASKS, CUSTOM_MULTI_CLASSIFICATION_TASKS); private final ClientLogger logger = new ClientLogger(AnalyzeActionsAsyncClient.class); private final TextAnalyticsClientImpl legacyService; private final AnalyzeTextsImpl service; private static final Pattern PATTERN; static { PATTERN = Pattern.compile(REGEX_ACTION_ERROR_TARGET, Pattern.MULTILINE); } AnalyzeActionsAsyncClient(TextAnalyticsClientImpl legacyService) { this.legacyService = legacyService; this.service = null; } AnalyzeActionsAsyncClient(AnalyzeTextsImpl service) { this.legacyService = null; this.service = service; } PollerFlux<AnalyzeActionsOperationDetail, AnalyzeActionsResultPagedFlux> beginAnalyzeActions( Iterable<TextDocumentInput> documents, TextAnalyticsActions actions, AnalyzeActionsOptions options, Context context) { try { Objects.requireNonNull(actions, "'actions' cannot be null."); inputDocumentsValidation(documents); options = getNotNullAnalyzeActionsOptions(options); final Context finalContext = getNotNullContext(context) .addData(AZ_TRACING_NAMESPACE_KEY, COGNITIVE_TRACING_NAMESPACE_VALUE); final boolean finalIncludeStatistics = options.isIncludeStatistics(); if (service != null) { final AnalyzeTextJobsInput analyzeTextJobsInput = new AnalyzeTextJobsInput() .setDisplayName(actions.getDisplayName()) .setAnalysisInput( new MultiLanguageAnalysisInput().setDocuments(toMultiLanguageInput(documents))) .setTasks(getAnalyzeTextLROTasks(actions)); return new PollerFlux<>( DEFAULT_POLL_INTERVAL, activationOperation( service.submitJobWithResponseAsync(analyzeTextJobsInput, finalContext) .map(analyzeResponse -> { final AnalyzeActionsOperationDetail textAnalyticsOperationResult = new AnalyzeActionsOperationDetail(); AnalyzeActionsOperationDetailPropertiesHelper .setOperationId(textAnalyticsOperationResult, parseOperationId( analyzeResponse.getDeserializedHeaders().getOperationLocation())); return textAnalyticsOperationResult; })), pollingOperationLanguageApi(operationId -> service.jobStatusWithResponseAsync(operationId, finalIncludeStatistics, null, null, finalContext)), (pollingContext, pollResponse) -> Mono.just(pollingContext.getLatestResponse().getValue()), fetchingOperation( operationId -> Mono.just(getAnalyzeOperationFluxPage( operationId, null, null, finalIncludeStatistics, finalContext))) ); } final AnalyzeBatchInput analyzeBatchInput = new AnalyzeBatchInput() .setAnalysisInput(new MultiLanguageBatchInput().setDocuments(toMultiLanguageInput(documents))) .setTasks(getJobManifestTasks(actions)); analyzeBatchInput.setDisplayName(actions.getDisplayName()); return new PollerFlux<>( DEFAULT_POLL_INTERVAL, activationOperation( legacyService.analyzeWithResponseAsync(analyzeBatchInput, finalContext) .map(analyzeResponse -> { final AnalyzeActionsOperationDetail textAnalyticsOperationResult = new AnalyzeActionsOperationDetail(); AnalyzeActionsOperationDetailPropertiesHelper .setOperationId(textAnalyticsOperationResult, parseOperationId(analyzeResponse.getDeserializedHeaders().getOperationLocation())); return textAnalyticsOperationResult; })), pollingOperation(operationId -> legacyService.analyzeStatusWithResponseAsync(operationId.toString(), finalIncludeStatistics, null, null, finalContext)), (pollingContext, activationResponse) -> Mono.error(new RuntimeException("Cancellation is not supported.")), fetchingOperation(operationId -> Mono.just(getAnalyzeOperationFluxPage( operationId, null, null, finalIncludeStatistics, finalContext))) ); } catch (RuntimeException ex) { return PollerFlux.error(ex); } } PollerFlux<AnalyzeActionsOperationDetail, AnalyzeActionsResultPagedIterable> beginAnalyzeActionsIterable( Iterable<TextDocumentInput> documents, TextAnalyticsActions actions, AnalyzeActionsOptions options, Context context) { try { Objects.requireNonNull(actions, "'actions' cannot be null."); inputDocumentsValidation(documents); options = getNotNullAnalyzeActionsOptions(options); final Context finalContext = getNotNullContext(context) .addData(AZ_TRACING_NAMESPACE_KEY, COGNITIVE_TRACING_NAMESPACE_VALUE); final AnalyzeBatchInput analyzeBatchInput = new AnalyzeBatchInput() .setAnalysisInput(new MultiLanguageBatchInput().setDocuments(toMultiLanguageInput(documents))) .setTasks(getJobManifestTasks(actions)); analyzeBatchInput.setDisplayName(actions.getDisplayName()); final boolean finalIncludeStatistics = options.isIncludeStatistics(); if (service != null) { return new PollerFlux<>( DEFAULT_POLL_INTERVAL, activationOperation( service.submitJobWithResponseAsync( new AnalyzeTextJobsInput() .setDisplayName(actions.getDisplayName()) .setAnalysisInput(new MultiLanguageAnalysisInput().setDocuments(toMultiLanguageInput(documents))) .setTasks(getAnalyzeTextLROTasks(actions)), finalContext) .map(analyzeResponse -> { final AnalyzeActionsOperationDetail operationDetail = new AnalyzeActionsOperationDetail(); AnalyzeActionsOperationDetailPropertiesHelper.setOperationId(operationDetail, parseOperationId(analyzeResponse.getDeserializedHeaders().getOperationLocation())); return operationDetail; })), pollingOperationLanguageApi(operationId -> service.jobStatusWithResponseAsync(operationId, finalIncludeStatistics, null, null, finalContext)), (activationResponse, pollingContext) -> Mono.error(new RuntimeException("Cancellation is not supported.")), fetchingOperationIterable( operationId -> Mono.just(new AnalyzeActionsResultPagedIterable(getAnalyzeOperationFluxPage( operationId, null, null, finalIncludeStatistics, finalContext)))) ); } return new PollerFlux<>( DEFAULT_POLL_INTERVAL, activationOperation( legacyService.analyzeWithResponseAsync(analyzeBatchInput, finalContext) .map(analyzeResponse -> { final AnalyzeActionsOperationDetail operationDetail = new AnalyzeActionsOperationDetail(); AnalyzeActionsOperationDetailPropertiesHelper.setOperationId(operationDetail, parseOperationId(analyzeResponse.getDeserializedHeaders().getOperationLocation())); return operationDetail; })), pollingOperation(operationId -> legacyService.analyzeStatusWithResponseAsync(operationId.toString(), finalIncludeStatistics, null, null, finalContext)), (activationResponse, pollingContext) -> Mono.error(new RuntimeException("Cancellation is not supported.")), fetchingOperationIterable( operationId -> Mono.just(new AnalyzeActionsResultPagedIterable(getAnalyzeOperationFluxPage( operationId, null, null, finalIncludeStatistics, finalContext)))) ); } catch (RuntimeException ex) { return PollerFlux.error(ex); } } private List<AnalyzeTextLROTask> getAnalyzeTextLROTasks(TextAnalyticsActions actions) { if (actions == null) { return null; } final List<AnalyzeTextLROTask> tasks = new ArrayList<>(); final Iterable<RecognizeEntitiesAction> recognizeEntitiesActions = actions.getRecognizeEntitiesActions(); final Iterable<RecognizePiiEntitiesAction> recognizePiiEntitiesActions = actions.getRecognizePiiEntitiesActions(); final Iterable<ExtractKeyPhrasesAction> extractKeyPhrasesActions = actions.getExtractKeyPhrasesActions(); final Iterable<RecognizeLinkedEntitiesAction> recognizeLinkedEntitiesActions = actions.getRecognizeLinkedEntitiesActions(); final Iterable<AnalyzeHealthcareEntitiesAction> analyzeHealthcareEntitiesActions = actions.getAnalyzeHealthcareEntitiesActions(); final Iterable<AnalyzeSentimentAction> analyzeSentimentActions = actions.getAnalyzeSentimentActions(); final Iterable<ExtractSummaryAction> extractSummaryActions = actions.getExtractSummaryActions(); final Iterable<RecognizeCustomEntitiesAction> recognizeCustomEntitiesActions = actions.getRecognizeCustomEntitiesActions(); final Iterable<SingleCategoryClassifyAction> singleCategoryClassifyActions = actions.getSingleCategoryClassifyActions(); final Iterable<MultiCategoryClassifyAction> multiCategoryClassifyActions = actions.getMultiCategoryClassifyActions(); if (recognizeEntitiesActions != null) { recognizeEntitiesActions.forEach(action -> tasks.add(toEntitiesLROTask(action))); } if (recognizePiiEntitiesActions != null) { recognizePiiEntitiesActions.forEach(action -> tasks.add(toPiiLROTask(action))); } if (analyzeHealthcareEntitiesActions != null) { analyzeHealthcareEntitiesActions.forEach(action -> tasks.add(toHealthcareLROTask(action))); } if (extractKeyPhrasesActions != null) { extractKeyPhrasesActions.forEach(action -> tasks.add(toKeyPhraseLROTask(action))); } if (recognizeLinkedEntitiesActions != null) { recognizeLinkedEntitiesActions.forEach(action -> tasks.add(toEntityLinkingLROTask(action))); } if (analyzeSentimentActions != null) { analyzeSentimentActions.forEach(action -> tasks.add(toSentimentAnalysisLROTask(action))); } if (extractSummaryActions != null) { extractSummaryActions.forEach(action -> tasks.add(toExtractiveSummarizationLROTask(action))); } if (recognizeCustomEntitiesActions != null) { recognizeCustomEntitiesActions.forEach(action -> tasks.add(toCustomEntitiesLROTask(action))); } if (singleCategoryClassifyActions != null) { singleCategoryClassifyActions.forEach(action -> tasks.add( toCustomSingleLabelClassificationLROTask(action))); } if (multiCategoryClassifyActions != null) { multiCategoryClassifyActions.forEach(action -> tasks.add(toCustomMultiLabelClassificationLROTask(action))); } return tasks; } private JobManifestTasks getJobManifestTasks(TextAnalyticsActions actions) { if (actions == null) { return null; } final JobManifestTasks jobManifestTasks = new JobManifestTasks(); if (actions.getRecognizeEntitiesActions() != null) { jobManifestTasks.setEntityRecognitionTasks(toEntitiesTasks(actions)); } if (actions.getRecognizePiiEntitiesActions() != null) { jobManifestTasks.setEntityRecognitionPiiTasks(toPiiTasks(actions)); } if (actions.getExtractKeyPhrasesActions() != null) { jobManifestTasks.setKeyPhraseExtractionTasks(toKeyPhrasesTasks(actions)); } if (actions.getRecognizeLinkedEntitiesActions() != null) { jobManifestTasks.setEntityLinkingTasks(toEntityLinkingTasks(actions)); } if (actions.getAnalyzeSentimentActions() != null) { jobManifestTasks.setSentimentAnalysisTasks(toSentimentAnalysisTasks(actions)); } if (actions.getExtractSummaryActions() != null) { jobManifestTasks.setExtractiveSummarizationTasks(toExtractiveSummarizationTask(actions)); } if (actions.getRecognizeCustomEntitiesActions() != null) { jobManifestTasks.setCustomEntityRecognitionTasks(toCustomEntitiesTask(actions)); } if (actions.getSingleCategoryClassifyActions() != null) { jobManifestTasks.setCustomSingleClassificationTasks(toCustomSingleClassificationTask(actions)); } if (actions.getMultiCategoryClassifyActions() != null) { jobManifestTasks.setCustomMultiClassificationTasks(toCustomMultiClassificationTask(actions)); } return jobManifestTasks; } private EntitiesLROTask toEntitiesLROTask(RecognizeEntitiesAction action) { if (action == null) { return null; } final EntitiesLROTask task = new EntitiesLROTask(); task.setParameters(getEntitiesTaskParameters(action)).setTaskName(action.getActionName()); return task; } private List<EntitiesTask> toEntitiesTasks(TextAnalyticsActions actions) { final List<EntitiesTask> entitiesTasks = new ArrayList<>(); for (RecognizeEntitiesAction action : actions.getRecognizeEntitiesActions()) { entitiesTasks.add( action == null ? null : new EntitiesTask() .setTaskName(action.getActionName()) .setParameters(getEntitiesTaskParameters(action))); } return entitiesTasks; } private PiiLROTask toPiiLROTask(RecognizePiiEntitiesAction action) { if (action == null) { return null; } final PiiLROTask task = new PiiLROTask(); task.setParameters(getPiiTaskParameters(action)).setTaskName(action.getActionName()); return task; } private List<PiiTask> toPiiTasks(TextAnalyticsActions actions) { final List<PiiTask> piiTasks = new ArrayList<>(); for (RecognizePiiEntitiesAction action : actions.getRecognizePiiEntitiesActions()) { piiTasks.add( action == null ? null : new PiiTask() .setTaskName(action.getActionName()) .setParameters(getPiiTaskParameters(action))); } return piiTasks; } private PiiTaskParameters getPiiTaskParameters(RecognizePiiEntitiesAction action) { return (PiiTaskParameters) new PiiTaskParameters() .setStringIndexType(StringIndexType.UTF16CODE_UNIT) .setDomain(PiiDomain.fromString( action.getDomainFilter() == null ? null : action.getDomainFilter().toString())) .setPiiCategories(toCategoriesFilter(action.getCategoriesFilter())) .setModelVersion(action.getModelVersion()) .setLoggingOptOut(action.isServiceLogsDisabled()); } private HealthcareLROTask toHealthcareLROTask(AnalyzeHealthcareEntitiesAction action) { if (action == null) { return null; } final HealthcareLROTask task = new HealthcareLROTask(); task.setParameters(getHealthcareTaskParameters(action)).setTaskName(action.getActionName()); return task; } private HealthcareTaskParameters getHealthcareTaskParameters(AnalyzeHealthcareEntitiesAction action) { return (HealthcareTaskParameters) new HealthcareTaskParameters() .setStringIndexType(StringIndexType.UTF16CODE_UNIT) .setModelVersion(action.getModelVersion()) .setLoggingOptOut(action.isServiceLogsDisabled()); } private KeyPhraseLROTask toKeyPhraseLROTask(ExtractKeyPhrasesAction action) { if (action == null) { return null; } final KeyPhraseLROTask task = new KeyPhraseLROTask(); task.setParameters(getKeyPhraseTaskParameters(action)).setTaskName(action.getActionName()); return task; } private List<KeyPhrasesTask> toKeyPhrasesTasks(TextAnalyticsActions actions) { final List<KeyPhrasesTask> keyPhrasesTasks = new ArrayList<>(); for (ExtractKeyPhrasesAction action : actions.getExtractKeyPhrasesActions()) { keyPhrasesTasks.add( action == null ? null : new KeyPhrasesTask() .setTaskName(action.getActionName()) .setParameters(getKeyPhraseTaskParameters(action))); } return keyPhrasesTasks; } private KeyPhraseTaskParameters getKeyPhraseTaskParameters(ExtractKeyPhrasesAction action) { return (KeyPhraseTaskParameters) new KeyPhraseTaskParameters() .setModelVersion(action.getModelVersion()) .setLoggingOptOut(action.isServiceLogsDisabled()); } private EntityLinkingLROTask toEntityLinkingLROTask(RecognizeLinkedEntitiesAction action) { if (action == null) { return null; } final EntityLinkingLROTask task = new EntityLinkingLROTask(); task.setParameters(getEntityLinkingTaskParameters(action)).setTaskName(action.getActionName()); return task; } private List<EntityLinkingTask> toEntityLinkingTasks(TextAnalyticsActions actions) { final List<EntityLinkingTask> tasks = new ArrayList<>(); for (RecognizeLinkedEntitiesAction action : actions.getRecognizeLinkedEntitiesActions()) { tasks.add( action == null ? null : new EntityLinkingTask() .setTaskName(action.getActionName()) .setParameters(getEntityLinkingTaskParameters(action))); } return tasks; } private EntityLinkingTaskParameters getEntityLinkingTaskParameters(RecognizeLinkedEntitiesAction action) { return (EntityLinkingTaskParameters) new EntityLinkingTaskParameters() .setStringIndexType(StringIndexType.UTF16CODE_UNIT) .setModelVersion(action.getModelVersion()) .setLoggingOptOut(action.isServiceLogsDisabled()); } private SentimentAnalysisLROTask toSentimentAnalysisLROTask(AnalyzeSentimentAction action) { if (action == null) { return null; } final SentimentAnalysisLROTask task = new SentimentAnalysisLROTask(); task.setParameters(getSentimentAnalysisTaskParameters(action)).setTaskName(action.getActionName()); return task; } private List<SentimentAnalysisTask> toSentimentAnalysisTasks(TextAnalyticsActions actions) { final List<SentimentAnalysisTask> tasks = new ArrayList<>(); for (AnalyzeSentimentAction action : actions.getAnalyzeSentimentActions()) { tasks.add( action == null ? null : new SentimentAnalysisTask() .setTaskName(action.getActionName()) .setParameters(getSentimentAnalysisTaskParameters(action))); } return tasks; } private SentimentAnalysisTaskParameters getSentimentAnalysisTaskParameters(AnalyzeSentimentAction action) { return (SentimentAnalysisTaskParameters) new SentimentAnalysisTaskParameters() .setStringIndexType(StringIndexType.UTF16CODE_UNIT) .setOpinionMining(action.isIncludeOpinionMining()) .setModelVersion(action.getModelVersion()) .setLoggingOptOut(action.isServiceLogsDisabled()); } private ExtractiveSummarizationLROTask toExtractiveSummarizationLROTask(ExtractSummaryAction action) { if (action == null) { return null; } final ExtractiveSummarizationLROTask task = new ExtractiveSummarizationLROTask(); task.setParameters(getExtractiveSummarizationTaskParameters(action)).setTaskName(action.getActionName()); return task; } private List<ExtractiveSummarizationTask> toExtractiveSummarizationTask(TextAnalyticsActions actions) { final List<ExtractiveSummarizationTask> extractiveSummarizationTasks = new ArrayList<>(); for (ExtractSummaryAction action : actions.getExtractSummaryActions()) { extractiveSummarizationTasks.add( action == null ? null : new ExtractiveSummarizationTask() .setTaskName(action.getActionName()) .setParameters(getExtractiveSummarizationTaskParameters(action))); } return extractiveSummarizationTasks; } private ExtractiveSummarizationTaskParameters getExtractiveSummarizationTaskParameters( ExtractSummaryAction action) { return (ExtractiveSummarizationTaskParameters) new ExtractiveSummarizationTaskParameters() .setStringIndexType(StringIndexType.UTF16CODE_UNIT) .setSentenceCount(action.getMaxSentenceCount()) .setSortBy(action.getOrderBy() == null ? null : ExtractiveSummarizationSortingCriteria .fromString(action.getOrderBy().toString())) .setModelVersion(action.getModelVersion()) .setLoggingOptOut(action.isServiceLogsDisabled()); } private CustomEntitiesLROTask toCustomEntitiesLROTask(RecognizeCustomEntitiesAction action) { if (action == null) { return null; } final CustomEntitiesLROTask task = new CustomEntitiesLROTask(); task.setParameters(getCustomEntitiesTaskParameters(action)).setTaskName(action.getActionName()); return task; } private List<CustomEntitiesTask> toCustomEntitiesTask(TextAnalyticsActions actions) { final List<CustomEntitiesTask> tasks = new ArrayList<>(); for (RecognizeCustomEntitiesAction action : actions.getRecognizeCustomEntitiesActions()) { tasks.add( action == null ? null : new CustomEntitiesTask() .setTaskName(action.getActionName()) .setParameters(getCustomEntitiesTaskParameters(action))); } return tasks; } private CustomEntitiesTaskParameters getCustomEntitiesTaskParameters(RecognizeCustomEntitiesAction action) { return (CustomEntitiesTaskParameters) new CustomEntitiesTaskParameters() .setStringIndexType(StringIndexType.UTF16CODE_UNIT) .setProjectName(action.getProjectName()) .setDeploymentName(action.getDeploymentName()) .setLoggingOptOut(action.isServiceLogsDisabled()); } private CustomSingleLabelClassificationLROTask toCustomSingleLabelClassificationLROTask( SingleCategoryClassifyAction action) { if (action == null) { return null; } final CustomSingleLabelClassificationLROTask task = new CustomSingleLabelClassificationLROTask(); task.setParameters(getCustomSingleClassificationTaskParameters(action)).setTaskName(action.getActionName()); return task; } private List<CustomSingleClassificationTask> toCustomSingleClassificationTask(TextAnalyticsActions actions) { final List<CustomSingleClassificationTask> tasks = new ArrayList<>(); for (SingleCategoryClassifyAction action : actions.getSingleCategoryClassifyActions()) { tasks.add( action == null ? null : new CustomSingleClassificationTask() .setTaskName(action.getActionName()) .setParameters(getCustomSingleClassificationTaskParameters(action))); } return tasks; } private CustomSingleLabelClassificationTaskParameters getCustomSingleClassificationTaskParameters( SingleCategoryClassifyAction action) { return (CustomSingleLabelClassificationTaskParameters) new CustomSingleLabelClassificationTaskParameters() .setProjectName(action.getProjectName()) .setDeploymentName(action.getDeploymentName()) .setLoggingOptOut(action.isServiceLogsDisabled()); } private CustomMultiLabelClassificationLROTask toCustomMultiLabelClassificationLROTask( MultiCategoryClassifyAction action) { if (action == null) { return null; } final CustomMultiLabelClassificationLROTask task = new CustomMultiLabelClassificationLROTask(); task.setParameters(getCustomMultiLabelClassificationTaskParameters(action)).setTaskName(action.getActionName()); return task; } private List<CustomMultiClassificationTask> toCustomMultiClassificationTask(TextAnalyticsActions actions) { final List<CustomMultiClassificationTask> tasks = new ArrayList<>(); for (MultiCategoryClassifyAction action : actions.getMultiCategoryClassifyActions()) { tasks.add( action == null ? null : new CustomMultiClassificationTask() .setTaskName(action.getActionName()) .setParameters(getCustomMultiLabelClassificationTaskParameters(action))); } return tasks; } private CustomMultiLabelClassificationTaskParameters getCustomMultiLabelClassificationTaskParameters( MultiCategoryClassifyAction action) { return (CustomMultiLabelClassificationTaskParameters) new CustomMultiLabelClassificationTaskParameters() .setProjectName(action.getProjectName()) .setDeploymentName(action.getDeploymentName()) .setLoggingOptOut(action.isServiceLogsDisabled()); } private Function<PollingContext<AnalyzeActionsOperationDetail>, Mono<AnalyzeActionsOperationDetail>> activationOperation(Mono<AnalyzeActionsOperationDetail> operationResult) { return pollingContext -> { try { return operationResult.onErrorMap(Utility::mapToHttpResponseExceptionIfExists); } catch (RuntimeException ex) { return monoError(logger, ex); } }; } private Function<PollingContext<AnalyzeActionsOperationDetail>, Mono<PollResponse<AnalyzeActionsOperationDetail>>> pollingOperation(Function<UUID, Mono<Response<AnalyzeJobState>>> pollingFunction) { return pollingContext -> { try { final PollResponse<AnalyzeActionsOperationDetail> operationResultPollResponse = pollingContext.getLatestResponse(); final UUID operationId = UUID.fromString(operationResultPollResponse.getValue().getOperationId()); return pollingFunction.apply(operationId) .flatMap(modelResponse -> processAnalyzedModelResponse(modelResponse, operationResultPollResponse)) .onErrorMap(Utility::mapToHttpResponseExceptionIfExists); } catch (RuntimeException ex) { return monoError(logger, ex); } }; } private Function<PollingContext<AnalyzeActionsOperationDetail>, Mono<PollResponse<AnalyzeActionsOperationDetail>>> pollingOperationLanguageApi(Function<UUID, Mono<Response<AnalyzeTextJobState>>> pollingFunction) { return pollingContext -> { try { final PollResponse<AnalyzeActionsOperationDetail> operationResultPollResponse = pollingContext.getLatestResponse(); final UUID operationId = UUID.fromString(operationResultPollResponse.getValue().getOperationId()); return pollingFunction.apply(operationId) .flatMap(modelResponse -> processAnalyzedModelResponseLanguageApi( modelResponse, operationResultPollResponse)) .onErrorMap(Utility::mapToHttpResponseExceptionIfExists); } catch (RuntimeException ex) { return monoError(logger, ex); } }; } private Function<PollingContext<AnalyzeActionsOperationDetail>, Mono<AnalyzeActionsResultPagedFlux>> fetchingOperation(Function<UUID, Mono<AnalyzeActionsResultPagedFlux>> fetchingFunction) { return pollingContext -> { try { final UUID operationId = UUID.fromString(pollingContext.getLatestResponse().getValue().getOperationId()); return fetchingFunction.apply(operationId); } catch (RuntimeException ex) { return monoError(logger, ex); } }; } private Function<PollingContext<AnalyzeActionsOperationDetail>, Mono<AnalyzeActionsResultPagedIterable>> fetchingOperationIterable(Function<UUID, Mono<AnalyzeActionsResultPagedIterable>> fetchingFunction) { return pollingContext -> { try { final UUID operationId = UUID.fromString(pollingContext.getLatestResponse().getValue().getOperationId()); return fetchingFunction.apply(operationId); } catch (RuntimeException ex) { return monoError(logger, ex); } }; } AnalyzeActionsResultPagedFlux getAnalyzeOperationFluxPage(UUID operationId, Integer top, Integer skip, boolean showStats, Context context) { return new AnalyzeActionsResultPagedFlux( () -> (continuationToken, pageSize) -> getPage(continuationToken, operationId, top, skip, showStats, context).flux()); } Mono<PagedResponse<AnalyzeActionsResult>> getPage(String continuationToken, UUID operationId, Integer top, Integer skip, boolean showStats, Context context) { if (continuationToken != null) { final Map<String, Object> continuationTokenMap = parseNextLink(continuationToken); final Integer topValue = (Integer) continuationTokenMap.getOrDefault("$top", null); final Integer skipValue = (Integer) continuationTokenMap.getOrDefault("$skip", null); final Boolean showStatsValue = (Boolean) continuationTokenMap.getOrDefault(showStats, false); if (service != null) { return service.jobStatusWithResponseAsync(operationId, showStatsValue, topValue, skipValue, context) .map(this::toAnalyzeActionsResultPagedResponseLanguageApi) .onErrorMap(Utility::mapToHttpResponseExceptionIfExists); } return legacyService.analyzeStatusWithResponseAsync(operationId.toString(), showStatsValue, topValue, skipValue, context) .map(this::toAnalyzeActionsResultPagedResponse) .onErrorMap(Utility::mapToHttpResponseExceptionIfExists); } else { if (service != null) { return service.jobStatusWithResponseAsync(operationId, showStats, top, skip, context) .map(this::toAnalyzeActionsResultPagedResponseLanguageApi) .onErrorMap(Utility::mapToHttpResponseExceptionIfExists); } return legacyService.analyzeStatusWithResponseAsync(operationId.toString(), showStats, top, skip, context) .map(this::toAnalyzeActionsResultPagedResponse) .onErrorMap(Utility::mapToHttpResponseExceptionIfExists); } } private PagedResponse<AnalyzeActionsResult> toAnalyzeActionsResultPagedResponse(Response<AnalyzeJobState> response) { final AnalyzeJobState analyzeJobState = response.getValue(); return new PagedResponseBase<Void, AnalyzeActionsResult>( response.getRequest(), response.getStatusCode(), response.getHeaders(), Arrays.asList(toAnalyzeActionsResult(analyzeJobState)), analyzeJobState.getNextLink(), null); } private PagedResponse<AnalyzeActionsResult> toAnalyzeActionsResultPagedResponseLanguageApi(Response<AnalyzeTextJobState> response) { final AnalyzeTextJobState analyzeJobState = response.getValue(); return new PagedResponseBase<Void, AnalyzeActionsResult>( response.getRequest(), response.getStatusCode(), response.getHeaders(), Arrays.asList(toAnalyzeActionsResultLanguageApi(analyzeJobState)), analyzeJobState.getNextLink(), null); } private AnalyzeActionsResult toAnalyzeActionsResult(AnalyzeJobState analyzeJobState) { TasksStateTasksOld tasksStateTasks = analyzeJobState.getTasks(); final List<TasksStateTasksEntityRecognitionPiiTasksItem> piiTasksItems = tasksStateTasks.getEntityRecognitionPiiTasks(); final List<TasksStateTasksEntityRecognitionTasksItem> entityRecognitionTasksItems = tasksStateTasks.getEntityRecognitionTasks(); final List<TasksStateTasksKeyPhraseExtractionTasksItem> keyPhraseExtractionTasks = tasksStateTasks.getKeyPhraseExtractionTasks(); final List<TasksStateTasksEntityLinkingTasksItem> linkedEntityRecognitionTasksItems = tasksStateTasks.getEntityLinkingTasks(); final List<TasksStateTasksSentimentAnalysisTasksItem> sentimentAnalysisTasksItems = tasksStateTasks.getSentimentAnalysisTasks(); List<RecognizeEntitiesActionResult> recognizeEntitiesActionResults = new ArrayList<>(); List<RecognizePiiEntitiesActionResult> recognizePiiEntitiesActionResults = new ArrayList<>(); List<ExtractKeyPhrasesActionResult> extractKeyPhrasesActionResults = new ArrayList<>(); List<RecognizeLinkedEntitiesActionResult> recognizeLinkedEntitiesActionResults = new ArrayList<>(); List<AnalyzeSentimentActionResult> analyzeSentimentActionResults = new ArrayList<>(); List<ExtractSummaryActionResult> extractSummaryActionResults = new ArrayList<>(); List<RecognizeCustomEntitiesActionResult> recognizeCustomEntitiesActionResults = new ArrayList<>(); List<SingleCategoryClassifyActionResult> singleCategoryClassifyActionResults = new ArrayList<>(); List<MultiCategoryClassifyActionResult> multiCategoryClassifyActionResults = new ArrayList<>(); if (!CoreUtils.isNullOrEmpty(entityRecognitionTasksItems)) { for (int i = 0; i < entityRecognitionTasksItems.size(); i++) { final TasksStateTasksEntityRecognitionTasksItem taskItem = entityRecognitionTasksItems.get(i); final RecognizeEntitiesActionResult actionResult = new RecognizeEntitiesActionResult(); final EntitiesResult results = taskItem.getResults(); if (results != null) { RecognizeEntitiesActionResultPropertiesHelper.setDocumentsResults(actionResult, toRecognizeEntitiesResultCollectionResponse(results)); } TextAnalyticsActionResultPropertiesHelper.setActionName(actionResult, taskItem.getTaskName()); TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult, taskItem.getLastUpdateDateTime()); recognizeEntitiesActionResults.add(actionResult); } } if (!CoreUtils.isNullOrEmpty(piiTasksItems)) { for (int i = 0; i < piiTasksItems.size(); i++) { final TasksStateTasksEntityRecognitionPiiTasksItem taskItem = piiTasksItems.get(i); final RecognizePiiEntitiesActionResult actionResult = new RecognizePiiEntitiesActionResult(); final PiiResult results = taskItem.getResults(); if (results != null) { RecognizePiiEntitiesActionResultPropertiesHelper.setDocumentsResults(actionResult, toRecognizePiiEntitiesResultCollection(results)); } TextAnalyticsActionResultPropertiesHelper.setActionName(actionResult, taskItem.getTaskName()); TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult, taskItem.getLastUpdateDateTime()); recognizePiiEntitiesActionResults.add(actionResult); } } if (!CoreUtils.isNullOrEmpty(keyPhraseExtractionTasks)) { for (int i = 0; i < keyPhraseExtractionTasks.size(); i++) { final TasksStateTasksKeyPhraseExtractionTasksItem taskItem = keyPhraseExtractionTasks.get(i); final ExtractKeyPhrasesActionResult actionResult = new ExtractKeyPhrasesActionResult(); final KeyPhraseResult results = taskItem.getResults(); if (results != null) { ExtractKeyPhrasesActionResultPropertiesHelper.setDocumentsResults(actionResult, toExtractKeyPhrasesResultCollection(results)); } TextAnalyticsActionResultPropertiesHelper.setActionName(actionResult, taskItem.getTaskName()); TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult, taskItem.getLastUpdateDateTime()); extractKeyPhrasesActionResults.add(actionResult); } } if (!CoreUtils.isNullOrEmpty(linkedEntityRecognitionTasksItems)) { for (int i = 0; i < linkedEntityRecognitionTasksItems.size(); i++) { final TasksStateTasksEntityLinkingTasksItem taskItem = linkedEntityRecognitionTasksItems.get(i); final RecognizeLinkedEntitiesActionResult actionResult = new RecognizeLinkedEntitiesActionResult(); final EntityLinkingResult results = taskItem.getResults(); if (results != null) { RecognizeLinkedEntitiesActionResultPropertiesHelper.setDocumentsResults(actionResult, toRecognizeLinkedEntitiesResultCollection(results)); } TextAnalyticsActionResultPropertiesHelper.setActionName(actionResult, taskItem.getTaskName()); TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult, taskItem.getLastUpdateDateTime()); recognizeLinkedEntitiesActionResults.add(actionResult); } } if (!CoreUtils.isNullOrEmpty(sentimentAnalysisTasksItems)) { for (int i = 0; i < sentimentAnalysisTasksItems.size(); i++) { final TasksStateTasksSentimentAnalysisTasksItem taskItem = sentimentAnalysisTasksItems.get(i); final AnalyzeSentimentActionResult actionResult = new AnalyzeSentimentActionResult(); final SentimentResponse results = taskItem.getResults(); if (results != null) { AnalyzeSentimentActionResultPropertiesHelper.setDocumentsResults(actionResult, toAnalyzeSentimentResultCollection(results)); } TextAnalyticsActionResultPropertiesHelper.setActionName(actionResult, taskItem.getTaskName()); TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult, taskItem.getLastUpdateDateTime()); analyzeSentimentActionResults.add(actionResult); } } final List<TextAnalyticsError> errors = analyzeJobState.getErrors(); if (!CoreUtils.isNullOrEmpty(errors)) { for (TextAnalyticsError error : errors) { if (error != null) { final String[] targetPair = parseActionErrorTarget(error.getTarget(), error.getMessage()); final String taskName = targetPair[0]; final Integer taskIndex = Integer.valueOf(targetPair[1]); final TextAnalyticsActionResult actionResult; if (ENTITY_RECOGNITION_TASKS.equals(taskName)) { actionResult = recognizeEntitiesActionResults.get(taskIndex); } else if (ENTITY_RECOGNITION_PII_TASKS.equals(taskName)) { actionResult = recognizePiiEntitiesActionResults.get(taskIndex); } else if (KEY_PHRASE_EXTRACTION_TASKS.equals(taskName)) { actionResult = extractKeyPhrasesActionResults.get(taskIndex); } else if (ENTITY_LINKING_TASKS.equals(taskName)) { actionResult = recognizeLinkedEntitiesActionResults.get(taskIndex); } else if (SENTIMENT_ANALYSIS_TASKS.equals(taskName)) { actionResult = analyzeSentimentActionResults.get(taskIndex); } else if (EXTRACTIVE_SUMMARIZATION_TASKS.equals(taskName)) { actionResult = extractSummaryActionResults.get(taskIndex); } else if (CUSTOM_ENTITY_RECOGNITION_TASKS.equals(taskName)) { actionResult = recognizeCustomEntitiesActionResults.get(taskIndex); } else if (CUSTOM_SINGLE_CLASSIFICATION_TASKS.equals(taskName)) { actionResult = singleCategoryClassifyActionResults.get(taskIndex); } else if (CUSTOM_MULTI_CLASSIFICATION_TASKS.equals(taskName)) { actionResult = multiCategoryClassifyActionResults.get(taskIndex); } else { throw logger.logExceptionAsError(new RuntimeException( "Invalid task name in target reference, " + taskName)); } TextAnalyticsActionResultPropertiesHelper.setIsError(actionResult, true); TextAnalyticsActionResultPropertiesHelper.setError(actionResult, new com.azure.ai.textanalytics.models.TextAnalyticsError( TextAnalyticsErrorCode.fromString( error.getErrorCode() == null ? null : error.getErrorCode().toString()), error.getMessage(), null)); } } } final AnalyzeActionsResult analyzeActionsResult = new AnalyzeActionsResult(); AnalyzeActionsResultPropertiesHelper.setRecognizeEntitiesResults(analyzeActionsResult, IterableStream.of(recognizeEntitiesActionResults)); AnalyzeActionsResultPropertiesHelper.setRecognizePiiEntitiesResults(analyzeActionsResult, IterableStream.of(recognizePiiEntitiesActionResults)); AnalyzeActionsResultPropertiesHelper.setExtractKeyPhrasesResults(analyzeActionsResult, IterableStream.of(extractKeyPhrasesActionResults)); AnalyzeActionsResultPropertiesHelper.setRecognizeLinkedEntitiesResults(analyzeActionsResult, IterableStream.of(recognizeLinkedEntitiesActionResults)); AnalyzeActionsResultPropertiesHelper.setAnalyzeSentimentResults(analyzeActionsResult, IterableStream.of(analyzeSentimentActionResults)); AnalyzeActionsResultPropertiesHelper.setExtractSummaryResults(analyzeActionsResult, IterableStream.of(extractSummaryActionResults)); AnalyzeActionsResultPropertiesHelper.setRecognizeCustomEntitiesResults(analyzeActionsResult, IterableStream.of(recognizeCustomEntitiesActionResults)); AnalyzeActionsResultPropertiesHelper.setClassifySingleCategoryResults(analyzeActionsResult, IterableStream.of(singleCategoryClassifyActionResults)); AnalyzeActionsResultPropertiesHelper.setClassifyMultiCategoryResults(analyzeActionsResult, IterableStream.of(multiCategoryClassifyActionResults)); return analyzeActionsResult; } private AnalyzeActionsResult toAnalyzeActionsResultLanguageApi(AnalyzeTextJobState analyzeJobState) { final TasksStateTasks tasksStateTasks = analyzeJobState.getTasks(); final List<AnalyzeTextLROResult> tasksResults = tasksStateTasks.getItems(); final List<RecognizeEntitiesActionResult> recognizeEntitiesActionResults = new ArrayList<>(); final List<RecognizePiiEntitiesActionResult> recognizePiiEntitiesActionResults = new ArrayList<>(); final List<ExtractKeyPhrasesActionResult> extractKeyPhrasesActionResults = new ArrayList<>(); final List<RecognizeLinkedEntitiesActionResult> recognizeLinkedEntitiesActionResults = new ArrayList<>(); final List<AnalyzeHealthcareEntitiesActionResult> analyzeHealthcareEntitiesActionResults = new ArrayList<>(); final List<AnalyzeSentimentActionResult> analyzeSentimentActionResults = new ArrayList<>(); final List<ExtractSummaryActionResult> extractSummaryActionResults = new ArrayList<>(); final List<RecognizeCustomEntitiesActionResult> recognizeCustomEntitiesActionResults = new ArrayList<>(); final List<SingleCategoryClassifyActionResult> singleCategoryClassifyActionResults = new ArrayList<>(); final List<MultiCategoryClassifyActionResult> multiCategoryClassifyActionResults = new ArrayList<>(); if (!CoreUtils.isNullOrEmpty(tasksResults)) { for (int i = 0; i < tasksResults.size(); i++) { final AnalyzeTextLROResult taskResult = tasksResults.get(i); if (taskResult instanceof EntityRecognitionLROResult) { final EntityRecognitionLROResult entityTaskResult = (EntityRecognitionLROResult) taskResult; final RecognizeEntitiesActionResult actionResult = new RecognizeEntitiesActionResult(); final EntitiesResult results = entityTaskResult.getResults(); if (results != null) { RecognizeEntitiesActionResultPropertiesHelper.setDocumentsResults(actionResult, toRecognizeEntitiesResultCollectionResponse(results)); } TextAnalyticsActionResultPropertiesHelper.setActionName(actionResult, entityTaskResult.getTaskName()); TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult, entityTaskResult.getLastUpdateDateTime()); recognizeEntitiesActionResults.add(actionResult); } else if (taskResult instanceof CustomEntityRecognitionLROResult) { final CustomEntityRecognitionLROResult customEntityTaskResult = (CustomEntityRecognitionLROResult) taskResult; final RecognizeCustomEntitiesActionResult actionResult = new RecognizeCustomEntitiesActionResult(); final CustomEntitiesResult results = customEntityTaskResult.getResults(); if (results != null) { RecognizeCustomEntitiesActionResultPropertiesHelper.setDocumentsResults(actionResult, toRecognizeCustomEntitiesResultCollection(results)); } TextAnalyticsActionResultPropertiesHelper.setActionName(actionResult, customEntityTaskResult.getTaskName()); TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult, customEntityTaskResult.getLastUpdateDateTime()); recognizeCustomEntitiesActionResults.add(actionResult); } else if (taskResult instanceof CustomSingleLabelClassificationLROResult) { final CustomSingleLabelClassificationLROResult customSingleLabelClassificationResult = (CustomSingleLabelClassificationLROResult) taskResult; final SingleCategoryClassifyActionResult actionResult = new SingleCategoryClassifyActionResult(); final CustomSingleLabelClassificationResult results = customSingleLabelClassificationResult.getResults(); if (results != null) { SingleCategoryClassifyActionResultPropertiesHelper.setDocumentsResults(actionResult, toSingleCategoryClassifyResultCollection(results)); } TextAnalyticsActionResultPropertiesHelper.setActionName(actionResult, customSingleLabelClassificationResult.getTaskName()); TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult, customSingleLabelClassificationResult.getLastUpdateDateTime()); singleCategoryClassifyActionResults.add(actionResult); } else if (taskResult instanceof CustomMultiLabelClassificationLROResult) { final CustomMultiLabelClassificationLROResult customMultiLabelClassificationLROResult = (CustomMultiLabelClassificationLROResult) taskResult; final MultiCategoryClassifyActionResult actionResult = new MultiCategoryClassifyActionResult(); final CustomMultiLabelClassificationResult results = customMultiLabelClassificationLROResult.getResults(); if (results != null) { MultiCategoryClassifyActionResultPropertiesHelper.setDocumentsResults(actionResult, toMultiCategoryClassifyResultCollection(results)); } TextAnalyticsActionResultPropertiesHelper.setActionName(actionResult, customMultiLabelClassificationLROResult.getTaskName()); TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult, customMultiLabelClassificationLROResult.getLastUpdateDateTime()); multiCategoryClassifyActionResults.add(actionResult); } else if (taskResult instanceof EntityLinkingLROResult) { final EntityLinkingLROResult entityLinkingLROResult = (EntityLinkingLROResult) taskResult; final RecognizeLinkedEntitiesActionResult actionResult = new RecognizeLinkedEntitiesActionResult(); final EntityLinkingResult results = entityLinkingLROResult.getResults(); if (results != null) { RecognizeLinkedEntitiesActionResultPropertiesHelper.setDocumentsResults(actionResult, toRecognizeLinkedEntitiesResultCollection(results)); } TextAnalyticsActionResultPropertiesHelper.setActionName(actionResult, entityLinkingLROResult.getTaskName()); TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult, entityLinkingLROResult.getLastUpdateDateTime()); recognizeLinkedEntitiesActionResults.add(actionResult); } else if (taskResult instanceof PiiEntityRecognitionLROResult) { final PiiEntityRecognitionLROResult piiEntityRecognitionLROResult = (PiiEntityRecognitionLROResult) taskResult; final RecognizePiiEntitiesActionResult actionResult = new RecognizePiiEntitiesActionResult(); final PiiResult results = piiEntityRecognitionLROResult.getResults(); if (results != null) { RecognizePiiEntitiesActionResultPropertiesHelper.setDocumentsResults(actionResult, toRecognizePiiEntitiesResultCollection(results)); } TextAnalyticsActionResultPropertiesHelper.setActionName(actionResult, piiEntityRecognitionLROResult.getTaskName()); TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult, piiEntityRecognitionLROResult.getLastUpdateDateTime()); recognizePiiEntitiesActionResults.add(actionResult); } else if (taskResult instanceof ExtractiveSummarizationLROResult) { final ExtractiveSummarizationLROResult extractiveSummarizationLROResult = (ExtractiveSummarizationLROResult) taskResult; final ExtractSummaryActionResult actionResult = new ExtractSummaryActionResult(); final ExtractiveSummarizationResult results = extractiveSummarizationLROResult.getResults(); if (results != null) { ExtractSummaryActionResultPropertiesHelper.setDocumentsResults(actionResult, toExtractSummaryResultCollection(results)); } TextAnalyticsActionResultPropertiesHelper.setActionName(actionResult, extractiveSummarizationLROResult.getTaskName()); TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult, extractiveSummarizationLROResult.getLastUpdateDateTime()); extractSummaryActionResults.add(actionResult); } else if (taskResult instanceof HealthcareLROResult) { final HealthcareLROResult healthcareLROResult = (HealthcareLROResult) taskResult; final AnalyzeHealthcareEntitiesActionResult actionResult = new AnalyzeHealthcareEntitiesActionResult(); final HealthcareResult results = healthcareLROResult.getResults(); if (results != null) { AnalyzeHealthcareEntitiesActionResultPropertiesHelper.setDocumentsResults(actionResult, toAnalyzeHealthcareEntitiesResultCollection(results)); } TextAnalyticsActionResultPropertiesHelper.setActionName(actionResult, healthcareLROResult.getTaskName()); TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult, healthcareLROResult.getLastUpdateDateTime()); analyzeHealthcareEntitiesActionResults.add(actionResult); } else if (taskResult instanceof SentimentLROResult) { final SentimentLROResult sentimentLROResult = (SentimentLROResult) taskResult; final AnalyzeSentimentActionResult actionResult = new AnalyzeSentimentActionResult(); final SentimentResponse results = sentimentLROResult.getResults(); if (results != null) { AnalyzeSentimentActionResultPropertiesHelper.setDocumentsResults(actionResult, toAnalyzeSentimentResultCollection(results)); } TextAnalyticsActionResultPropertiesHelper.setActionName(actionResult, sentimentLROResult.getTaskName()); TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult, sentimentLROResult.getLastUpdateDateTime()); analyzeSentimentActionResults.add(actionResult); } else if (taskResult instanceof KeyPhraseExtractionLROResult) { final KeyPhraseExtractionLROResult keyPhraseExtractionLROResult = (KeyPhraseExtractionLROResult) taskResult; final ExtractKeyPhrasesActionResult actionResult = new ExtractKeyPhrasesActionResult(); final KeyPhraseResult results = keyPhraseExtractionLROResult.getResults(); if (results != null) { ExtractKeyPhrasesActionResultPropertiesHelper.setDocumentsResults(actionResult, toExtractKeyPhrasesResultCollection(results)); } TextAnalyticsActionResultPropertiesHelper.setActionName(actionResult, keyPhraseExtractionLROResult.getTaskName()); TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult, keyPhraseExtractionLROResult.getLastUpdateDateTime()); extractKeyPhrasesActionResults.add(actionResult); } else { throw logger.logExceptionAsError(new RuntimeException( "Invalid Long running operation task result: " + taskResult.getClass())); } } } final List<Error> errors = analyzeJobState.getErrors(); if (!CoreUtils.isNullOrEmpty(errors)) { for (Error error : errors) { if (error != null) { final String[] targetPair = parseActionErrorTarget(error.getTarget(), error.getMessage()); final String taskName = targetPair[0]; final Integer taskIndex = Integer.valueOf(targetPair[1]); final TextAnalyticsActionResult actionResult; if (ENTITY_RECOGNITION_TASKS.equals(taskName)) { actionResult = recognizeEntitiesActionResults.get(taskIndex); } else if (ENTITY_RECOGNITION_PII_TASKS.equals(taskName)) { actionResult = recognizePiiEntitiesActionResults.get(taskIndex); } else if (KEY_PHRASE_EXTRACTION_TASKS.equals(taskName)) { actionResult = extractKeyPhrasesActionResults.get(taskIndex); } else if (ENTITY_LINKING_TASKS.equals(taskName)) { actionResult = recognizeLinkedEntitiesActionResults.get(taskIndex); } else if (SENTIMENT_ANALYSIS_TASKS.equals(taskName)) { actionResult = analyzeSentimentActionResults.get(taskIndex); } else if (EXTRACTIVE_SUMMARIZATION_TASKS.equals(taskName)) { actionResult = extractSummaryActionResults.get(taskIndex); } else if (CUSTOM_ENTITY_RECOGNITION_TASKS.equals(taskName)) { actionResult = recognizeCustomEntitiesActionResults.get(taskIndex); } else if (CUSTOM_SINGLE_CLASSIFICATION_TASKS.equals(taskName)) { actionResult = singleCategoryClassifyActionResults.get(taskIndex); } else if (CUSTOM_MULTI_CLASSIFICATION_TASKS.equals(taskName)) { actionResult = multiCategoryClassifyActionResults.get(taskIndex); } else { throw logger.logExceptionAsError(new RuntimeException( "Invalid task name in target reference, " + taskName)); } TextAnalyticsActionResultPropertiesHelper.setIsError(actionResult, true); TextAnalyticsActionResultPropertiesHelper.setError(actionResult, new com.azure.ai.textanalytics.models.TextAnalyticsError( TextAnalyticsErrorCode.fromString( error.getCode() == null ? null : error.getCode().toString()), error.getMessage(), null)); } } } final AnalyzeActionsResult analyzeActionsResult = new AnalyzeActionsResult(); AnalyzeActionsResultPropertiesHelper.setRecognizeEntitiesResults(analyzeActionsResult, IterableStream.of(recognizeEntitiesActionResults)); AnalyzeActionsResultPropertiesHelper.setRecognizePiiEntitiesResults(analyzeActionsResult, IterableStream.of(recognizePiiEntitiesActionResults)); AnalyzeActionsResultPropertiesHelper.setAnalyzeHealthcareEntitiesResults(analyzeActionsResult, IterableStream.of(analyzeHealthcareEntitiesActionResults)); AnalyzeActionsResultPropertiesHelper.setExtractKeyPhrasesResults(analyzeActionsResult, IterableStream.of(extractKeyPhrasesActionResults)); AnalyzeActionsResultPropertiesHelper.setRecognizeLinkedEntitiesResults(analyzeActionsResult, IterableStream.of(recognizeLinkedEntitiesActionResults)); AnalyzeActionsResultPropertiesHelper.setAnalyzeSentimentResults(analyzeActionsResult, IterableStream.of(analyzeSentimentActionResults)); AnalyzeActionsResultPropertiesHelper.setExtractSummaryResults(analyzeActionsResult, IterableStream.of(extractSummaryActionResults)); AnalyzeActionsResultPropertiesHelper.setRecognizeCustomEntitiesResults(analyzeActionsResult, IterableStream.of(recognizeCustomEntitiesActionResults)); AnalyzeActionsResultPropertiesHelper.setClassifySingleCategoryResults(analyzeActionsResult, IterableStream.of(singleCategoryClassifyActionResults)); AnalyzeActionsResultPropertiesHelper.setClassifyMultiCategoryResults(analyzeActionsResult, IterableStream.of(multiCategoryClassifyActionResults)); return analyzeActionsResult; } private Mono<PollResponse<AnalyzeActionsOperationDetail>> processAnalyzedModelResponse( Response<AnalyzeJobState> analyzeJobStateResponse, PollResponse<AnalyzeActionsOperationDetail> operationResultPollResponse) { LongRunningOperationStatus status = LongRunningOperationStatus.SUCCESSFULLY_COMPLETED; if (analyzeJobStateResponse.getValue() != null && analyzeJobStateResponse.getValue().getStatus() != null) { switch (analyzeJobStateResponse.getValue().getStatus()) { case NOT_STARTED: case RUNNING: status = LongRunningOperationStatus.IN_PROGRESS; break; case SUCCEEDED: status = LongRunningOperationStatus.SUCCESSFULLY_COMPLETED; break; case CANCELLED: status = LongRunningOperationStatus.USER_CANCELLED; break; default: status = LongRunningOperationStatus.fromString( analyzeJobStateResponse.getValue().getStatus().toString(), true); break; } } AnalyzeActionsOperationDetailPropertiesHelper.setDisplayName(operationResultPollResponse.getValue(), analyzeJobStateResponse.getValue().getDisplayName()); AnalyzeActionsOperationDetailPropertiesHelper.setCreatedAt(operationResultPollResponse.getValue(), analyzeJobStateResponse.getValue().getCreatedDateTime()); AnalyzeActionsOperationDetailPropertiesHelper.setExpiresAt(operationResultPollResponse.getValue(), analyzeJobStateResponse.getValue().getExpirationDateTime()); AnalyzeActionsOperationDetailPropertiesHelper.setLastModifiedAt(operationResultPollResponse.getValue(), analyzeJobStateResponse.getValue().getLastUpdateDateTime()); final TasksStateTasksOld tasksResult = analyzeJobStateResponse.getValue().getTasks(); AnalyzeActionsOperationDetailPropertiesHelper.setActionsFailed(operationResultPollResponse.getValue(), tasksResult.getFailed()); AnalyzeActionsOperationDetailPropertiesHelper.setActionsInProgress(operationResultPollResponse.getValue(), tasksResult.getInProgress()); AnalyzeActionsOperationDetailPropertiesHelper.setActionsSucceeded( operationResultPollResponse.getValue(), tasksResult.getCompleted()); AnalyzeActionsOperationDetailPropertiesHelper.setActionsInTotal(operationResultPollResponse.getValue(), tasksResult.getTotal()); return Mono.just(new PollResponse<>(status, operationResultPollResponse.getValue())); } private Mono<PollResponse<AnalyzeActionsOperationDetail>> processAnalyzedModelResponseLanguageApi( Response<AnalyzeTextJobState> analyzeJobStateResponse, PollResponse<AnalyzeActionsOperationDetail> operationResultPollResponse) { LongRunningOperationStatus status = LongRunningOperationStatus.SUCCESSFULLY_COMPLETED; if (analyzeJobStateResponse.getValue() != null && analyzeJobStateResponse.getValue().getStatus() != null) { switch (analyzeJobStateResponse.getValue().getStatus()) { case NOT_STARTED: case RUNNING: status = LongRunningOperationStatus.IN_PROGRESS; break; case SUCCEEDED: status = LongRunningOperationStatus.SUCCESSFULLY_COMPLETED; break; case CANCELLED: status = LongRunningOperationStatus.USER_CANCELLED; break; case PARTIALLY_SUCCEEDED: status = LongRunningOperationStatus.fromString("partiallySucceeded", true); break; default: status = LongRunningOperationStatus.fromString( analyzeJobStateResponse.getValue().getStatus().toString(), true); break; } } AnalyzeActionsOperationDetailPropertiesHelper.setDisplayName(operationResultPollResponse.getValue(), analyzeJobStateResponse.getValue().getDisplayName()); AnalyzeActionsOperationDetailPropertiesHelper.setCreatedAt(operationResultPollResponse.getValue(), analyzeJobStateResponse.getValue().getCreatedDateTime()); AnalyzeActionsOperationDetailPropertiesHelper.setExpiresAt(operationResultPollResponse.getValue(), analyzeJobStateResponse.getValue().getExpirationDateTime()); AnalyzeActionsOperationDetailPropertiesHelper.setLastModifiedAt(operationResultPollResponse.getValue(), analyzeJobStateResponse.getValue().getLastUpdateDateTime()); final TasksStateTasks tasksResult = analyzeJobStateResponse.getValue().getTasks(); AnalyzeActionsOperationDetailPropertiesHelper.setActionsFailed(operationResultPollResponse.getValue(), tasksResult.getFailed()); AnalyzeActionsOperationDetailPropertiesHelper.setActionsInProgress(operationResultPollResponse.getValue(), tasksResult.getInProgress()); AnalyzeActionsOperationDetailPropertiesHelper.setActionsSucceeded( operationResultPollResponse.getValue(), tasksResult.getCompleted()); AnalyzeActionsOperationDetailPropertiesHelper.setActionsInTotal(operationResultPollResponse.getValue(), tasksResult.getTotal()); return Mono.just(new PollResponse<>(status, operationResultPollResponse.getValue())); } private Context getNotNullContext(Context context) { return context == null ? Context.NONE : context; } private AnalyzeActionsOptions getNotNullAnalyzeActionsOptions(AnalyzeActionsOptions options) { return options == null ? new AnalyzeActionsOptions() : options; } private String[] parseActionErrorTarget(String targetReference, String errorMessage) { if (CoreUtils.isNullOrEmpty(targetReference)) { if (CoreUtils.isNullOrEmpty(errorMessage)) { errorMessage = "Expected an error with a target field referencing an action but did not get one"; } throw logger.logExceptionAsError(new RuntimeException(errorMessage)); } final Matcher matcher = PATTERN.matcher(targetReference); String[] taskNameIdPair = new String[2]; while (matcher.find()) { taskNameIdPair[0] = matcher.group(1); taskNameIdPair[1] = matcher.group(2); } return taskNameIdPair; } }
this should include fhirVersion if you're exposing fhirBundle on the result
private HealthcareTaskParameters getHealthcareTaskParameters(AnalyzeHealthcareEntitiesAction action) { return (HealthcareTaskParameters) new HealthcareTaskParameters() .setStringIndexType(StringIndexType.UTF16CODE_UNIT) .setModelVersion(action.getModelVersion()) .setLoggingOptOut(action.isServiceLogsDisabled()); }
return (HealthcareTaskParameters) new HealthcareTaskParameters()
private HealthcareTaskParameters getHealthcareTaskParameters(AnalyzeHealthcareEntitiesAction action) { return (HealthcareTaskParameters) new HealthcareTaskParameters() .setStringIndexType(StringIndexType.UTF16CODE_UNIT) .setModelVersion(action.getModelVersion()) .setLoggingOptOut(action.isServiceLogsDisabled()); }
class AnalyzeActionsAsyncClient { private static final String ENTITY_RECOGNITION_TASKS = "entityRecognitionTasks"; private static final String ENTITY_RECOGNITION_PII_TASKS = "entityRecognitionPiiTasks"; private static final String KEY_PHRASE_EXTRACTION_TASKS = "keyPhraseExtractionTasks"; private static final String ENTITY_LINKING_TASKS = "entityLinkingTasks"; private static final String SENTIMENT_ANALYSIS_TASKS = "sentimentAnalysisTasks"; private static final String EXTRACTIVE_SUMMARIZATION_TASKS = "extractiveSummarizationTasks"; private static final String CUSTOM_ENTITY_RECOGNITION_TASKS = "customEntityRecognitionTasks"; private static final String CUSTOM_SINGLE_CLASSIFICATION_TASKS = "customClassificationTasks"; private static final String CUSTOM_MULTI_CLASSIFICATION_TASKS = "customMultiClassificationTasks"; private static final String REGEX_ACTION_ERROR_TARGET = String.format(" ENTITY_RECOGNITION_PII_TASKS, ENTITY_RECOGNITION_TASKS, ENTITY_LINKING_TASKS, SENTIMENT_ANALYSIS_TASKS, EXTRACTIVE_SUMMARIZATION_TASKS, CUSTOM_ENTITY_RECOGNITION_TASKS, CUSTOM_SINGLE_CLASSIFICATION_TASKS, CUSTOM_MULTI_CLASSIFICATION_TASKS); private final ClientLogger logger = new ClientLogger(AnalyzeActionsAsyncClient.class); private final TextAnalyticsClientImpl legacyService; private final AnalyzeTextsImpl service; private static final Pattern PATTERN; static { PATTERN = Pattern.compile(REGEX_ACTION_ERROR_TARGET, Pattern.MULTILINE); } AnalyzeActionsAsyncClient(TextAnalyticsClientImpl legacyService) { this.legacyService = legacyService; this.service = null; } AnalyzeActionsAsyncClient(AnalyzeTextsImpl service) { this.legacyService = null; this.service = service; } PollerFlux<AnalyzeActionsOperationDetail, AnalyzeActionsResultPagedFlux> beginAnalyzeActions( Iterable<TextDocumentInput> documents, TextAnalyticsActions actions, AnalyzeActionsOptions options, Context context) { try { inputDocumentsValidation(documents); options = getNotNullAnalyzeActionsOptions(options); final Context finalContext = getNotNullContext(context) .addData(AZ_TRACING_NAMESPACE_KEY, COGNITIVE_TRACING_NAMESPACE_VALUE); final boolean finalIncludeStatistics = options.isIncludeStatistics(); if (service != null) { final AnalyzeTextJobsInput analyzeTextJobsInput = new AnalyzeTextJobsInput() .setDisplayName(actions.getDisplayName()) .setAnalysisInput( new MultiLanguageAnalysisInput().setDocuments(toMultiLanguageInput(documents))) .setTasks(getAnalyzeTextLROTasks(actions)); return new PollerFlux<>( DEFAULT_POLL_INTERVAL, activationOperation( service.submitJobWithResponseAsync(analyzeTextJobsInput, finalContext) .map(analyzeResponse -> { final AnalyzeActionsOperationDetail textAnalyticsOperationResult = new AnalyzeActionsOperationDetail(); AnalyzeActionsOperationDetailPropertiesHelper .setOperationId(textAnalyticsOperationResult, parseOperationId( analyzeResponse.getDeserializedHeaders().getOperationLocation())); return textAnalyticsOperationResult; })), pollingOperationLanguageApi(operationId -> service.jobStatusWithResponseAsync(operationId, finalIncludeStatistics, null, null, finalContext)), (pollingContext, pollResponse) -> Mono.just(pollingContext.getLatestResponse().getValue()), fetchingOperation( operationId -> Mono.just(getAnalyzeOperationFluxPage( operationId, null, null, finalIncludeStatistics, finalContext))) ); } final AnalyzeBatchInput analyzeBatchInput = new AnalyzeBatchInput() .setAnalysisInput(new MultiLanguageBatchInput().setDocuments(toMultiLanguageInput(documents))) .setTasks(getJobManifestTasks(actions)); analyzeBatchInput.setDisplayName(actions.getDisplayName()); return new PollerFlux<>( DEFAULT_POLL_INTERVAL, activationOperation( legacyService.analyzeWithResponseAsync(analyzeBatchInput, finalContext) .map(analyzeResponse -> { final AnalyzeActionsOperationDetail textAnalyticsOperationResult = new AnalyzeActionsOperationDetail(); AnalyzeActionsOperationDetailPropertiesHelper .setOperationId(textAnalyticsOperationResult, parseOperationId(analyzeResponse.getDeserializedHeaders().getOperationLocation())); return textAnalyticsOperationResult; })), pollingOperation(operationId -> legacyService.analyzeStatusWithResponseAsync(operationId.toString(), finalIncludeStatistics, null, null, finalContext)), (pollingContext, activationResponse) -> Mono.error(new RuntimeException("Cancellation is not supported.")), fetchingOperation(operationId -> Mono.just(getAnalyzeOperationFluxPage( operationId, null, null, finalIncludeStatistics, finalContext))) ); } catch (RuntimeException ex) { return PollerFlux.error(ex); } } PollerFlux<AnalyzeActionsOperationDetail, AnalyzeActionsResultPagedIterable> beginAnalyzeActionsIterable( Iterable<TextDocumentInput> documents, TextAnalyticsActions actions, AnalyzeActionsOptions options, Context context) { try { inputDocumentsValidation(documents); options = getNotNullAnalyzeActionsOptions(options); final Context finalContext = getNotNullContext(context) .addData(AZ_TRACING_NAMESPACE_KEY, COGNITIVE_TRACING_NAMESPACE_VALUE); final AnalyzeBatchInput analyzeBatchInput = new AnalyzeBatchInput() .setAnalysisInput(new MultiLanguageBatchInput().setDocuments(toMultiLanguageInput(documents))) .setTasks(getJobManifestTasks(actions)); analyzeBatchInput.setDisplayName(actions.getDisplayName()); final boolean finalIncludeStatistics = options.isIncludeStatistics(); if (service != null) { return new PollerFlux<>( DEFAULT_POLL_INTERVAL, activationOperation( service.submitJobWithResponseAsync( new AnalyzeTextJobsInput() .setDisplayName(actions.getDisplayName()) .setAnalysisInput(new MultiLanguageAnalysisInput().setDocuments(toMultiLanguageInput(documents))) .setTasks(getAnalyzeTextLROTasks(actions)), finalContext) .map(analyzeResponse -> { final AnalyzeActionsOperationDetail operationDetail = new AnalyzeActionsOperationDetail(); AnalyzeActionsOperationDetailPropertiesHelper.setOperationId(operationDetail, parseOperationId(analyzeResponse.getDeserializedHeaders().getOperationLocation())); return operationDetail; })), pollingOperationLanguageApi(operationId -> service.jobStatusWithResponseAsync(operationId, finalIncludeStatistics, null, null, finalContext)), (activationResponse, pollingContext) -> Mono.error(new RuntimeException("Cancellation is not supported.")), fetchingOperationIterable( operationId -> Mono.just(new AnalyzeActionsResultPagedIterable(getAnalyzeOperationFluxPage( operationId, null, null, finalIncludeStatistics, finalContext)))) ); } return new PollerFlux<>( DEFAULT_POLL_INTERVAL, activationOperation( legacyService.analyzeWithResponseAsync(analyzeBatchInput, finalContext) .map(analyzeResponse -> { final AnalyzeActionsOperationDetail operationDetail = new AnalyzeActionsOperationDetail(); AnalyzeActionsOperationDetailPropertiesHelper.setOperationId(operationDetail, parseOperationId(analyzeResponse.getDeserializedHeaders().getOperationLocation())); return operationDetail; })), pollingOperation(operationId -> legacyService.analyzeStatusWithResponseAsync(operationId.toString(), finalIncludeStatistics, null, null, finalContext)), (activationResponse, pollingContext) -> Mono.error(new RuntimeException("Cancellation is not supported.")), fetchingOperationIterable( operationId -> Mono.just(new AnalyzeActionsResultPagedIterable(getAnalyzeOperationFluxPage( operationId, null, null, finalIncludeStatistics, finalContext)))) ); } catch (RuntimeException ex) { return PollerFlux.error(ex); } } private List<AnalyzeTextLROTask> getAnalyzeTextLROTasks(TextAnalyticsActions actions) { if (actions == null) { return null; } final List<AnalyzeTextLROTask> tasks = new ArrayList<>(); final Iterable<RecognizeEntitiesAction> recognizeEntitiesActions = actions.getRecognizeEntitiesActions(); final Iterable<RecognizePiiEntitiesAction> recognizePiiEntitiesActions = actions.getRecognizePiiEntitiesActions(); final Iterable<ExtractKeyPhrasesAction> extractKeyPhrasesActions = actions.getExtractKeyPhrasesActions(); final Iterable<RecognizeLinkedEntitiesAction> recognizeLinkedEntitiesActions = actions.getRecognizeLinkedEntitiesActions(); final Iterable<AnalyzeHealthcareEntitiesAction> analyzeHealthcareEntitiesActions = actions.getAnalyzeHealthcareEntitiesActions(); final Iterable<AnalyzeSentimentAction> analyzeSentimentActions = actions.getAnalyzeSentimentActions(); final Iterable<ExtractSummaryAction> extractSummaryActions = actions.getExtractSummaryActions(); final Iterable<RecognizeCustomEntitiesAction> recognizeCustomEntitiesActions = actions.getRecognizeCustomEntitiesActions(); final Iterable<SingleCategoryClassifyAction> singleCategoryClassifyActions = actions.getSingleCategoryClassifyActions(); final Iterable<MultiCategoryClassifyAction> multiCategoryClassifyActions = actions.getMultiCategoryClassifyActions(); if (recognizeEntitiesActions != null) { recognizeEntitiesActions.forEach(action -> tasks.add(toEntitiesLROTask(action))); } if (recognizePiiEntitiesActions != null) { recognizePiiEntitiesActions.forEach(action -> tasks.add(toPiiLROTask(action))); } if (analyzeHealthcareEntitiesActions != null) { analyzeHealthcareEntitiesActions.forEach(action -> tasks.add(toHealthcareLROTask(action))); } if (extractKeyPhrasesActions != null) { extractKeyPhrasesActions.forEach(action -> tasks.add(toKeyPhraseLROTask(action))); } if (recognizeLinkedEntitiesActions != null) { recognizeLinkedEntitiesActions.forEach(action -> tasks.add(toEntityLinkingLROTask(action))); } if (analyzeSentimentActions != null) { analyzeSentimentActions.forEach(action -> tasks.add(toSentimentAnalysisLROTask(action))); } if (extractSummaryActions != null) { extractSummaryActions.forEach(action -> tasks.add(toExtractiveSummarizationLROTask(action))); } if (recognizeCustomEntitiesActions != null) { recognizeCustomEntitiesActions.forEach(action -> tasks.add(toCustomEntitiesLROTask(action))); } if (singleCategoryClassifyActions != null) { singleCategoryClassifyActions.forEach(action -> tasks.add( toCustomSingleLabelClassificationLROTask(action))); } if (multiCategoryClassifyActions != null) { multiCategoryClassifyActions.forEach(action -> tasks.add(toCustomMultiLabelClassificationLROTask(action))); } return tasks; } private JobManifestTasks getJobManifestTasks(TextAnalyticsActions actions) { if (actions == null) { return null; } final JobManifestTasks jobManifestTasks = new JobManifestTasks(); if (actions.getRecognizeEntitiesActions() != null) { jobManifestTasks.setEntityRecognitionTasks(toEntitiesTasks(actions)); } if (actions.getRecognizePiiEntitiesActions() != null) { jobManifestTasks.setEntityRecognitionPiiTasks(toPiiTasks(actions)); } if (actions.getExtractKeyPhrasesActions() != null) { jobManifestTasks.setKeyPhraseExtractionTasks(toKeyPhrasesTasks(actions)); } if (actions.getRecognizeLinkedEntitiesActions() != null) { jobManifestTasks.setEntityLinkingTasks(toEntityLinkingTasks(actions)); } if (actions.getAnalyzeSentimentActions() != null) { jobManifestTasks.setSentimentAnalysisTasks(toSentimentAnalysisTasks(actions)); } if (actions.getExtractSummaryActions() != null) { jobManifestTasks.setExtractiveSummarizationTasks(toExtractiveSummarizationTask(actions)); } if (actions.getRecognizeCustomEntitiesActions() != null) { jobManifestTasks.setCustomEntityRecognitionTasks(toCustomEntitiesTask(actions)); } if (actions.getSingleCategoryClassifyActions() != null) { jobManifestTasks.setCustomSingleClassificationTasks(toCustomSingleClassificationTask(actions)); } if (actions.getMultiCategoryClassifyActions() != null) { jobManifestTasks.setCustomMultiClassificationTasks(toCustomMultiClassificationTask(actions)); } return jobManifestTasks; } private EntitiesLROTask toEntitiesLROTask(RecognizeEntitiesAction action) { if (action == null) { return null; } final EntitiesLROTask task = new EntitiesLROTask(); task.setParameters(getEntitiesTaskParameters(action)).setTaskName(action.getActionName()); return task; } private List<EntitiesTask> toEntitiesTasks(TextAnalyticsActions actions) { final List<EntitiesTask> entitiesTasks = new ArrayList<>(); for (RecognizeEntitiesAction action : actions.getRecognizeEntitiesActions()) { entitiesTasks.add( action == null ? null : new EntitiesTask() .setTaskName(action.getActionName()) .setParameters(getEntitiesTaskParameters(action))); } return entitiesTasks; } private EntitiesTaskParameters getEntitiesTaskParameters(RecognizeEntitiesAction action) { return (EntitiesTaskParameters) new EntitiesTaskParameters() .setStringIndexType(StringIndexType.UTF16CODE_UNIT) .setModelVersion(action.getModelVersion()) .setLoggingOptOut(action.isServiceLogsDisabled()); } private PiiLROTask toPiiLROTask(RecognizePiiEntitiesAction action) { if (action == null) { return null; } final PiiLROTask task = new PiiLROTask(); task.setParameters(getPiiTaskParameters(action)).setTaskName(action.getActionName()); return task; } private List<PiiTask> toPiiTasks(TextAnalyticsActions actions) { final List<PiiTask> piiTasks = new ArrayList<>(); for (RecognizePiiEntitiesAction action : actions.getRecognizePiiEntitiesActions()) { piiTasks.add( action == null ? null : new PiiTask() .setTaskName(action.getActionName()) .setParameters(getPiiTaskParameters(action))); } return piiTasks; } private PiiTaskParameters getPiiTaskParameters(RecognizePiiEntitiesAction action) { return (PiiTaskParameters) new PiiTaskParameters() .setStringIndexType(StringIndexType.UTF16CODE_UNIT) .setDomain(PiiDomain.fromString( action.getDomainFilter() == null ? null : action.getDomainFilter().toString())) .setPiiCategories(toCategoriesFilter(action.getCategoriesFilter())) .setModelVersion(action.getModelVersion()) .setLoggingOptOut(action.isServiceLogsDisabled()); } private HealthcareLROTask toHealthcareLROTask(AnalyzeHealthcareEntitiesAction action) { if (action == null) { return null; } final HealthcareLROTask task = new HealthcareLROTask(); task.setParameters(getHealthcareTaskParameters(action)).setTaskName(action.getActionName()); return task; } private KeyPhraseLROTask toKeyPhraseLROTask(ExtractKeyPhrasesAction action) { if (action == null) { return null; } final KeyPhraseLROTask task = new KeyPhraseLROTask(); task.setParameters(getKeyPhraseTaskParameters(action)).setTaskName(action.getActionName()); return task; } private List<KeyPhrasesTask> toKeyPhrasesTasks(TextAnalyticsActions actions) { final List<KeyPhrasesTask> keyPhrasesTasks = new ArrayList<>(); for (ExtractKeyPhrasesAction action : actions.getExtractKeyPhrasesActions()) { keyPhrasesTasks.add( action == null ? null : new KeyPhrasesTask() .setTaskName(action.getActionName()) .setParameters(getKeyPhraseTaskParameters(action))); } return keyPhrasesTasks; } private KeyPhraseTaskParameters getKeyPhraseTaskParameters(ExtractKeyPhrasesAction action) { return (KeyPhraseTaskParameters) new KeyPhraseTaskParameters() .setModelVersion(action.getModelVersion()) .setLoggingOptOut(action.isServiceLogsDisabled()); } private EntityLinkingLROTask toEntityLinkingLROTask(RecognizeLinkedEntitiesAction action) { if (action == null) { return null; } final EntityLinkingLROTask task = new EntityLinkingLROTask(); task.setParameters(getEntityLinkingTaskParameters(action)).setTaskName(action.getActionName()); return task; } private List<EntityLinkingTask> toEntityLinkingTasks(TextAnalyticsActions actions) { final List<EntityLinkingTask> tasks = new ArrayList<>(); for (RecognizeLinkedEntitiesAction action : actions.getRecognizeLinkedEntitiesActions()) { tasks.add( action == null ? null : new EntityLinkingTask() .setTaskName(action.getActionName()) .setParameters(getEntityLinkingTaskParameters(action))); } return tasks; } private EntityLinkingTaskParameters getEntityLinkingTaskParameters(RecognizeLinkedEntitiesAction action) { return (EntityLinkingTaskParameters) new EntityLinkingTaskParameters() .setStringIndexType(StringIndexType.UTF16CODE_UNIT) .setModelVersion(action.getModelVersion()) .setLoggingOptOut(action.isServiceLogsDisabled()); } private SentimentAnalysisLROTask toSentimentAnalysisLROTask(AnalyzeSentimentAction action) { if (action == null) { return null; } final SentimentAnalysisLROTask task = new SentimentAnalysisLROTask(); task.setParameters(getSentimentAnalysisTaskParameters(action)).setTaskName(action.getActionName()); return task; } private List<SentimentAnalysisTask> toSentimentAnalysisTasks(TextAnalyticsActions actions) { final List<SentimentAnalysisTask> tasks = new ArrayList<>(); for (AnalyzeSentimentAction action : actions.getAnalyzeSentimentActions()) { tasks.add( action == null ? null : new SentimentAnalysisTask() .setTaskName(action.getActionName()) .setParameters(getSentimentAnalysisTaskParameters(action))); } return tasks; } private SentimentAnalysisTaskParameters getSentimentAnalysisTaskParameters(AnalyzeSentimentAction action) { return (SentimentAnalysisTaskParameters) new SentimentAnalysisTaskParameters() .setStringIndexType(StringIndexType.UTF16CODE_UNIT) .setModelVersion(action.getModelVersion()) .setLoggingOptOut(action.isServiceLogsDisabled()); } private ExtractiveSummarizationLROTask toExtractiveSummarizationLROTask(ExtractSummaryAction action) { if (action == null) { return null; } final ExtractiveSummarizationLROTask task = new ExtractiveSummarizationLROTask(); task.setParameters(getExtractiveSummarizationTaskParameters(action)).setTaskName(action.getActionName()); return task; } private List<ExtractiveSummarizationTask> toExtractiveSummarizationTask(TextAnalyticsActions actions) { final List<ExtractiveSummarizationTask> extractiveSummarizationTasks = new ArrayList<>(); for (ExtractSummaryAction action : actions.getExtractSummaryActions()) { extractiveSummarizationTasks.add( action == null ? null : new ExtractiveSummarizationTask() .setTaskName(action.getActionName()) .setParameters(getExtractiveSummarizationTaskParameters(action))); } return extractiveSummarizationTasks; } private ExtractiveSummarizationTaskParameters getExtractiveSummarizationTaskParameters( ExtractSummaryAction action) { return (ExtractiveSummarizationTaskParameters) new ExtractiveSummarizationTaskParameters() .setStringIndexType(StringIndexType.UTF16CODE_UNIT) .setSentenceCount(action.getMaxSentenceCount()) .setSortBy(action.getOrderBy() == null ? null : ExtractiveSummarizationSortingCriteria .fromString(action.getOrderBy().toString())) .setModelVersion(action.getModelVersion()) .setLoggingOptOut(action.isServiceLogsDisabled()); } private CustomEntitiesLROTask toCustomEntitiesLROTask(RecognizeCustomEntitiesAction action) { if (action == null) { return null; } final CustomEntitiesLROTask task = new CustomEntitiesLROTask(); task.setParameters(getCustomEntitiesTaskParameters(action)).setTaskName(action.getActionName()); return task; } private List<CustomEntitiesTask> toCustomEntitiesTask(TextAnalyticsActions actions) { final List<CustomEntitiesTask> tasks = new ArrayList<>(); for (RecognizeCustomEntitiesAction action : actions.getRecognizeCustomEntitiesActions()) { tasks.add( action == null ? null : new CustomEntitiesTask() .setTaskName(action.getActionName()) .setParameters(getCustomEntitiesTaskParameters(action))); } return tasks; } private CustomEntitiesTaskParameters getCustomEntitiesTaskParameters(RecognizeCustomEntitiesAction action) { return (CustomEntitiesTaskParameters) new CustomEntitiesTaskParameters() .setStringIndexType(StringIndexType.UTF16CODE_UNIT) .setProjectName(action.getProjectName()) .setDeploymentName(action.getDeploymentName()) .setLoggingOptOut(action.isServiceLogsDisabled()); } private CustomSingleLabelClassificationLROTask toCustomSingleLabelClassificationLROTask( SingleCategoryClassifyAction action) { if (action == null) { return null; } final CustomSingleLabelClassificationLROTask task = new CustomSingleLabelClassificationLROTask(); task.setParameters(getCustomSingleClassificationTaskParameters(action)).setTaskName(action.getActionName()); return task; } private List<CustomSingleClassificationTask> toCustomSingleClassificationTask(TextAnalyticsActions actions) { final List<CustomSingleClassificationTask> tasks = new ArrayList<>(); for (SingleCategoryClassifyAction action : actions.getSingleCategoryClassifyActions()) { tasks.add( action == null ? null : new CustomSingleClassificationTask() .setTaskName(action.getActionName()) .setParameters(getCustomSingleClassificationTaskParameters(action))); } return tasks; } private CustomSingleLabelClassificationTaskParameters getCustomSingleClassificationTaskParameters( SingleCategoryClassifyAction action) { return (CustomSingleLabelClassificationTaskParameters) new CustomSingleLabelClassificationTaskParameters() .setProjectName(action.getProjectName()) .setDeploymentName(action.getDeploymentName()) .setLoggingOptOut(action.isServiceLogsDisabled()); } private CustomMultiLabelClassificationLROTask toCustomMultiLabelClassificationLROTask( MultiCategoryClassifyAction action) { if (action == null) { return null; } final CustomMultiLabelClassificationLROTask task = new CustomMultiLabelClassificationLROTask(); task.setParameters(getCustomMultiLabelClassificationTaskParameters(action)).setTaskName(action.getActionName()); return task; } private List<CustomMultiClassificationTask> toCustomMultiClassificationTask(TextAnalyticsActions actions) { final List<CustomMultiClassificationTask> tasks = new ArrayList<>(); for (MultiCategoryClassifyAction action : actions.getMultiCategoryClassifyActions()) { tasks.add( action == null ? null : new CustomMultiClassificationTask() .setTaskName(action.getActionName()) .setParameters(getCustomMultiLabelClassificationTaskParameters(action))); } return tasks; } private CustomMultiLabelClassificationTaskParameters getCustomMultiLabelClassificationTaskParameters( MultiCategoryClassifyAction action) { return (CustomMultiLabelClassificationTaskParameters) new CustomMultiLabelClassificationTaskParameters() .setProjectName(action.getProjectName()) .setDeploymentName(action.getDeploymentName()) .setLoggingOptOut(action.isServiceLogsDisabled()); } private Function<PollingContext<AnalyzeActionsOperationDetail>, Mono<AnalyzeActionsOperationDetail>> activationOperation(Mono<AnalyzeActionsOperationDetail> operationResult) { return pollingContext -> { try { return operationResult.onErrorMap(Utility::mapToHttpResponseExceptionIfExists); } catch (RuntimeException ex) { return monoError(logger, ex); } }; } private Function<PollingContext<AnalyzeActionsOperationDetail>, Mono<PollResponse<AnalyzeActionsOperationDetail>>> pollingOperation(Function<UUID, Mono<Response<AnalyzeJobState>>> pollingFunction) { return pollingContext -> { try { final PollResponse<AnalyzeActionsOperationDetail> operationResultPollResponse = pollingContext.getLatestResponse(); final UUID operationId = UUID.fromString(operationResultPollResponse.getValue().getOperationId()); return pollingFunction.apply(operationId) .flatMap(modelResponse -> processAnalyzedModelResponse(modelResponse, operationResultPollResponse)) .onErrorMap(Utility::mapToHttpResponseExceptionIfExists); } catch (RuntimeException ex) { return monoError(logger, ex); } }; } private Function<PollingContext<AnalyzeActionsOperationDetail>, Mono<PollResponse<AnalyzeActionsOperationDetail>>> pollingOperationLanguageApi(Function<UUID, Mono<Response<AnalyzeTextJobState>>> pollingFunction) { return pollingContext -> { try { final PollResponse<AnalyzeActionsOperationDetail> operationResultPollResponse = pollingContext.getLatestResponse(); final UUID operationId = UUID.fromString(operationResultPollResponse.getValue().getOperationId()); return pollingFunction.apply(operationId) .flatMap(modelResponse -> processAnalyzedModelResponseLanguageApi( modelResponse, operationResultPollResponse)) .onErrorMap(Utility::mapToHttpResponseExceptionIfExists); } catch (RuntimeException ex) { return monoError(logger, ex); } }; } private Function<PollingContext<AnalyzeActionsOperationDetail>, Mono<AnalyzeActionsResultPagedFlux>> fetchingOperation(Function<UUID, Mono<AnalyzeActionsResultPagedFlux>> fetchingFunction) { return pollingContext -> { try { final UUID operationId = UUID.fromString(pollingContext.getLatestResponse().getValue().getOperationId()); return fetchingFunction.apply(operationId); } catch (RuntimeException ex) { return monoError(logger, ex); } }; } private Function<PollingContext<AnalyzeActionsOperationDetail>, Mono<AnalyzeActionsResultPagedIterable>> fetchingOperationIterable(Function<UUID, Mono<AnalyzeActionsResultPagedIterable>> fetchingFunction) { return pollingContext -> { try { final UUID operationId = UUID.fromString(pollingContext.getLatestResponse().getValue().getOperationId()); return fetchingFunction.apply(operationId); } catch (RuntimeException ex) { return monoError(logger, ex); } }; } AnalyzeActionsResultPagedFlux getAnalyzeOperationFluxPage(UUID operationId, Integer top, Integer skip, boolean showStats, Context context) { return new AnalyzeActionsResultPagedFlux( () -> (continuationToken, pageSize) -> getPage(continuationToken, operationId, top, skip, showStats, context).flux()); } Mono<PagedResponse<AnalyzeActionsResult>> getPage(String continuationToken, UUID operationId, Integer top, Integer skip, boolean showStats, Context context) { if (continuationToken != null) { final Map<String, Object> continuationTokenMap = parseNextLink(continuationToken); final Integer topValue = (Integer) continuationTokenMap.getOrDefault("$top", null); final Integer skipValue = (Integer) continuationTokenMap.getOrDefault("$skip", null); final Boolean showStatsValue = (Boolean) continuationTokenMap.getOrDefault(showStats, false); if (service != null) { return service.jobStatusWithResponseAsync(operationId, showStatsValue, topValue, skipValue, context) .map(this::toAnalyzeActionsResultPagedResponseLanguageApi) .onErrorMap(Utility::mapToHttpResponseExceptionIfExists); } return legacyService.analyzeStatusWithResponseAsync(operationId.toString(), showStatsValue, topValue, skipValue, context) .map(this::toAnalyzeActionsResultPagedResponse) .onErrorMap(Utility::mapToHttpResponseExceptionIfExists); } else { if (service != null) { return service.jobStatusWithResponseAsync(operationId, showStats, top, skip, context) .map(this::toAnalyzeActionsResultPagedResponseLanguageApi) .onErrorMap(Utility::mapToHttpResponseExceptionIfExists); } return legacyService.analyzeStatusWithResponseAsync(operationId.toString(), showStats, top, skip, context) .map(this::toAnalyzeActionsResultPagedResponse) .onErrorMap(Utility::mapToHttpResponseExceptionIfExists); } } private PagedResponse<AnalyzeActionsResult> toAnalyzeActionsResultPagedResponse(Response<AnalyzeJobState> response) { final AnalyzeJobState analyzeJobState = response.getValue(); return new PagedResponseBase<Void, AnalyzeActionsResult>( response.getRequest(), response.getStatusCode(), response.getHeaders(), Arrays.asList(toAnalyzeActionsResult(analyzeJobState)), analyzeJobState.getNextLink(), null); } private PagedResponse<AnalyzeActionsResult> toAnalyzeActionsResultPagedResponseLanguageApi(Response<AnalyzeTextJobState> response) { final AnalyzeTextJobState analyzeJobState = response.getValue(); return new PagedResponseBase<Void, AnalyzeActionsResult>( response.getRequest(), response.getStatusCode(), response.getHeaders(), Arrays.asList(toAnalyzeActionsResultLanguageApi(analyzeJobState)), analyzeJobState.getNextLink(), null); } private AnalyzeActionsResult toAnalyzeActionsResult(AnalyzeJobState analyzeJobState) { TasksStateTasksOld tasksStateTasks = analyzeJobState.getTasks(); final List<TasksStateTasksEntityRecognitionPiiTasksItem> piiTasksItems = tasksStateTasks.getEntityRecognitionPiiTasks(); final List<TasksStateTasksEntityRecognitionTasksItem> entityRecognitionTasksItems = tasksStateTasks.getEntityRecognitionTasks(); final List<TasksStateTasksKeyPhraseExtractionTasksItem> keyPhraseExtractionTasks = tasksStateTasks.getKeyPhraseExtractionTasks(); final List<TasksStateTasksEntityLinkingTasksItem> linkedEntityRecognitionTasksItems = tasksStateTasks.getEntityLinkingTasks(); final List<TasksStateTasksSentimentAnalysisTasksItem> sentimentAnalysisTasksItems = tasksStateTasks.getSentimentAnalysisTasks(); List<RecognizeEntitiesActionResult> recognizeEntitiesActionResults = new ArrayList<>(); List<RecognizePiiEntitiesActionResult> recognizePiiEntitiesActionResults = new ArrayList<>(); List<ExtractKeyPhrasesActionResult> extractKeyPhrasesActionResults = new ArrayList<>(); List<RecognizeLinkedEntitiesActionResult> recognizeLinkedEntitiesActionResults = new ArrayList<>(); List<AnalyzeSentimentActionResult> analyzeSentimentActionResults = new ArrayList<>(); List<ExtractSummaryActionResult> extractSummaryActionResults = new ArrayList<>(); List<RecognizeCustomEntitiesActionResult> recognizeCustomEntitiesActionResults = new ArrayList<>(); List<SingleCategoryClassifyActionResult> singleCategoryClassifyActionResults = new ArrayList<>(); List<MultiCategoryClassifyActionResult> multiCategoryClassifyActionResults = new ArrayList<>(); if (!CoreUtils.isNullOrEmpty(entityRecognitionTasksItems)) { for (int i = 0; i < entityRecognitionTasksItems.size(); i++) { final TasksStateTasksEntityRecognitionTasksItem taskItem = entityRecognitionTasksItems.get(i); final RecognizeEntitiesActionResult actionResult = new RecognizeEntitiesActionResult(); final EntitiesResult results = taskItem.getResults(); if (results != null) { RecognizeEntitiesActionResultPropertiesHelper.setDocumentsResults(actionResult, toRecognizeEntitiesResultCollectionResponse(results)); } TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult, taskItem.getLastUpdateDateTime()); recognizeEntitiesActionResults.add(actionResult); } } if (!CoreUtils.isNullOrEmpty(piiTasksItems)) { for (int i = 0; i < piiTasksItems.size(); i++) { final TasksStateTasksEntityRecognitionPiiTasksItem taskItem = piiTasksItems.get(i); final RecognizePiiEntitiesActionResult actionResult = new RecognizePiiEntitiesActionResult(); final PiiResult results = taskItem.getResults(); if (results != null) { RecognizePiiEntitiesActionResultPropertiesHelper.setDocumentsResults(actionResult, toRecognizePiiEntitiesResultCollection(results)); } TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult, taskItem.getLastUpdateDateTime()); recognizePiiEntitiesActionResults.add(actionResult); } } if (!CoreUtils.isNullOrEmpty(keyPhraseExtractionTasks)) { for (int i = 0; i < keyPhraseExtractionTasks.size(); i++) { final TasksStateTasksKeyPhraseExtractionTasksItem taskItem = keyPhraseExtractionTasks.get(i); final ExtractKeyPhrasesActionResult actionResult = new ExtractKeyPhrasesActionResult(); final KeyPhraseResult results = taskItem.getResults(); if (results != null) { ExtractKeyPhrasesActionResultPropertiesHelper.setDocumentsResults(actionResult, toExtractKeyPhrasesResultCollection(results)); } TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult, taskItem.getLastUpdateDateTime()); extractKeyPhrasesActionResults.add(actionResult); } } if (!CoreUtils.isNullOrEmpty(linkedEntityRecognitionTasksItems)) { for (int i = 0; i < linkedEntityRecognitionTasksItems.size(); i++) { final TasksStateTasksEntityLinkingTasksItem taskItem = linkedEntityRecognitionTasksItems.get(i); final RecognizeLinkedEntitiesActionResult actionResult = new RecognizeLinkedEntitiesActionResult(); final EntityLinkingResult results = taskItem.getResults(); if (results != null) { RecognizeLinkedEntitiesActionResultPropertiesHelper.setDocumentsResults(actionResult, toRecognizeLinkedEntitiesResultCollection(results)); } TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult, taskItem.getLastUpdateDateTime()); recognizeLinkedEntitiesActionResults.add(actionResult); } } if (!CoreUtils.isNullOrEmpty(sentimentAnalysisTasksItems)) { for (int i = 0; i < sentimentAnalysisTasksItems.size(); i++) { final TasksStateTasksSentimentAnalysisTasksItem taskItem = sentimentAnalysisTasksItems.get(i); final AnalyzeSentimentActionResult actionResult = new AnalyzeSentimentActionResult(); final SentimentResponse results = taskItem.getResults(); if (results != null) { AnalyzeSentimentActionResultPropertiesHelper.setDocumentsResults(actionResult, toAnalyzeSentimentResultCollection(results)); } TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult, taskItem.getLastUpdateDateTime()); analyzeSentimentActionResults.add(actionResult); } } final List<TextAnalyticsError> errors = analyzeJobState.getErrors(); if (!CoreUtils.isNullOrEmpty(errors)) { for (TextAnalyticsError error : errors) { final String[] targetPair = parseActionErrorTarget(error.getTarget()); final String taskName = targetPair[0]; final Integer taskIndex = Integer.valueOf(targetPair[1]); final TextAnalyticsActionResult actionResult; if (ENTITY_RECOGNITION_TASKS.equals(taskName)) { actionResult = recognizeEntitiesActionResults.get(taskIndex); } else if (ENTITY_RECOGNITION_PII_TASKS.equals(taskName)) { actionResult = recognizePiiEntitiesActionResults.get(taskIndex); } else if (KEY_PHRASE_EXTRACTION_TASKS.equals(taskName)) { actionResult = extractKeyPhrasesActionResults.get(taskIndex); } else if (ENTITY_LINKING_TASKS.equals(taskName)) { actionResult = recognizeLinkedEntitiesActionResults.get(taskIndex); } else if (SENTIMENT_ANALYSIS_TASKS.equals(taskName)) { actionResult = analyzeSentimentActionResults.get(taskIndex); } else if (EXTRACTIVE_SUMMARIZATION_TASKS.equals(taskName)) { actionResult = extractSummaryActionResults.get(taskIndex); } else if (CUSTOM_ENTITY_RECOGNITION_TASKS.equals(taskName)) { actionResult = recognizeCustomEntitiesActionResults.get(taskIndex); } else if (CUSTOM_SINGLE_CLASSIFICATION_TASKS.equals(taskName)) { actionResult = singleCategoryClassifyActionResults.get(taskIndex); } else if (CUSTOM_MULTI_CLASSIFICATION_TASKS.equals(taskName)) { actionResult = multiCategoryClassifyActionResults.get(taskIndex); } else { throw logger.logExceptionAsError(new RuntimeException( "Invalid task name in target reference, " + taskName)); } TextAnalyticsActionResultPropertiesHelper.setIsError(actionResult, true); TextAnalyticsActionResultPropertiesHelper.setError(actionResult, new com.azure.ai.textanalytics.models.TextAnalyticsError( TextAnalyticsErrorCode.fromString( error.getErrorCode() == null ? null : error.getErrorCode().toString()), error.getMessage(), null)); } } final AnalyzeActionsResult analyzeActionsResult = new AnalyzeActionsResult(); AnalyzeActionsResultPropertiesHelper.setRecognizeEntitiesResults(analyzeActionsResult, IterableStream.of(recognizeEntitiesActionResults)); AnalyzeActionsResultPropertiesHelper.setRecognizePiiEntitiesResults(analyzeActionsResult, IterableStream.of(recognizePiiEntitiesActionResults)); AnalyzeActionsResultPropertiesHelper.setExtractKeyPhrasesResults(analyzeActionsResult, IterableStream.of(extractKeyPhrasesActionResults)); AnalyzeActionsResultPropertiesHelper.setRecognizeLinkedEntitiesResults(analyzeActionsResult, IterableStream.of(recognizeLinkedEntitiesActionResults)); AnalyzeActionsResultPropertiesHelper.setAnalyzeSentimentResults(analyzeActionsResult, IterableStream.of(analyzeSentimentActionResults)); AnalyzeActionsResultPropertiesHelper.setExtractSummaryResults(analyzeActionsResult, IterableStream.of(extractSummaryActionResults)); AnalyzeActionsResultPropertiesHelper.setRecognizeCustomEntitiesResults(analyzeActionsResult, IterableStream.of(recognizeCustomEntitiesActionResults)); AnalyzeActionsResultPropertiesHelper.setClassifySingleCategoryResults(analyzeActionsResult, IterableStream.of(singleCategoryClassifyActionResults)); AnalyzeActionsResultPropertiesHelper.setClassifyMultiCategoryResults(analyzeActionsResult, IterableStream.of(multiCategoryClassifyActionResults)); return analyzeActionsResult; } private AnalyzeActionsResult toAnalyzeActionsResultLanguageApi(AnalyzeTextJobState analyzeJobState) { final TasksStateTasks tasksStateTasks = analyzeJobState.getTasks(); final List<AnalyzeTextLROResult> tasksResults = tasksStateTasks.getItems(); final List<RecognizeEntitiesActionResult> recognizeEntitiesActionResults = new ArrayList<>(); final List<RecognizePiiEntitiesActionResult> recognizePiiEntitiesActionResults = new ArrayList<>(); final List<ExtractKeyPhrasesActionResult> extractKeyPhrasesActionResults = new ArrayList<>(); final List<RecognizeLinkedEntitiesActionResult> recognizeLinkedEntitiesActionResults = new ArrayList<>(); final List<AnalyzeHealthcareEntitiesActionResult> analyzeHealthcareEntitiesActionResults = new ArrayList<>(); final List<AnalyzeSentimentActionResult> analyzeSentimentActionResults = new ArrayList<>(); final List<ExtractSummaryActionResult> extractSummaryActionResults = new ArrayList<>(); final List<RecognizeCustomEntitiesActionResult> recognizeCustomEntitiesActionResults = new ArrayList<>(); final List<SingleCategoryClassifyActionResult> singleCategoryClassifyActionResults = new ArrayList<>(); final List<MultiCategoryClassifyActionResult> multiCategoryClassifyActionResults = new ArrayList<>(); if (!CoreUtils.isNullOrEmpty(tasksResults)) { for (int i = 0; i < tasksResults.size(); i++) { final AnalyzeTextLROResult taskResult = tasksResults.get(i); if (taskResult instanceof EntityRecognitionLROResult) { final EntityRecognitionLROResult entityTaskResult = (EntityRecognitionLROResult) taskResult; final RecognizeEntitiesActionResult actionResult = new RecognizeEntitiesActionResult(); final EntitiesResult results = entityTaskResult.getResults(); if (results != null) { RecognizeEntitiesActionResultPropertiesHelper.setDocumentsResults(actionResult, toRecognizeEntitiesResultCollectionResponse(results)); } TextAnalyticsActionResultPropertiesHelper.setActionName(actionResult, entityTaskResult.getTaskName()); TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult, entityTaskResult.getLastUpdateDateTime()); recognizeEntitiesActionResults.add(actionResult); } else if (taskResult instanceof CustomEntityRecognitionLROResult) { final CustomEntityRecognitionLROResult customEntityTaskResult = (CustomEntityRecognitionLROResult) taskResult; final RecognizeCustomEntitiesActionResult actionResult = new RecognizeCustomEntitiesActionResult(); final CustomEntitiesResult results = customEntityTaskResult.getResults(); if (results != null) { RecognizeCustomEntitiesActionResultPropertiesHelper.setDocumentsResults(actionResult, toRecognizeCustomEntitiesResultCollection(results)); } TextAnalyticsActionResultPropertiesHelper.setActionName(actionResult, customEntityTaskResult.getTaskName()); TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult, customEntityTaskResult.getLastUpdateDateTime()); recognizeCustomEntitiesActionResults.add(actionResult); } else if (taskResult instanceof CustomSingleLabelClassificationLROResult) { final CustomSingleLabelClassificationLROResult customSingleLabelClassificationResult = (CustomSingleLabelClassificationLROResult) taskResult; final SingleCategoryClassifyActionResult actionResult = new SingleCategoryClassifyActionResult(); final CustomSingleLabelClassificationResult results = customSingleLabelClassificationResult.getResults(); if (results != null) { SingleCategoryClassifyActionResultPropertiesHelper.setDocumentsResults(actionResult, toSingleCategoryClassifyResultCollection(results)); } TextAnalyticsActionResultPropertiesHelper.setActionName(actionResult, customSingleLabelClassificationResult.getTaskName()); TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult, customSingleLabelClassificationResult.getLastUpdateDateTime()); singleCategoryClassifyActionResults.add(actionResult); } else if (taskResult instanceof CustomMultiLabelClassificationLROResult) { final CustomMultiLabelClassificationLROResult customMultiLabelClassificationLROResult = (CustomMultiLabelClassificationLROResult) taskResult; final MultiCategoryClassifyActionResult actionResult = new MultiCategoryClassifyActionResult(); final CustomMultiLabelClassificationResult results = customMultiLabelClassificationLROResult.getResults(); if (results != null) { MultiCategoryClassifyActionResultPropertiesHelper.setDocumentsResults(actionResult, toMultiCategoryClassifyResultCollection(results)); } TextAnalyticsActionResultPropertiesHelper.setActionName(actionResult, customMultiLabelClassificationLROResult.getTaskName()); TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult, customMultiLabelClassificationLROResult.getLastUpdateDateTime()); multiCategoryClassifyActionResults.add(actionResult); } else if (taskResult instanceof EntityLinkingLROResult) { final EntityLinkingLROResult entityLinkingLROResult = (EntityLinkingLROResult) taskResult; final RecognizeLinkedEntitiesActionResult actionResult = new RecognizeLinkedEntitiesActionResult(); final EntityLinkingResult results = entityLinkingLROResult.getResults(); if (results != null) { RecognizeLinkedEntitiesActionResultPropertiesHelper.setDocumentsResults(actionResult, toRecognizeLinkedEntitiesResultCollection(results)); } TextAnalyticsActionResultPropertiesHelper.setActionName(actionResult, entityLinkingLROResult.getTaskName()); TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult, entityLinkingLROResult.getLastUpdateDateTime()); recognizeLinkedEntitiesActionResults.add(actionResult); } else if (taskResult instanceof PiiEntityRecognitionLROResult) { final PiiEntityRecognitionLROResult piiEntityRecognitionLROResult = (PiiEntityRecognitionLROResult) taskResult; final RecognizePiiEntitiesActionResult actionResult = new RecognizePiiEntitiesActionResult(); final PiiResult results = piiEntityRecognitionLROResult.getResults(); if (results != null) { RecognizePiiEntitiesActionResultPropertiesHelper.setDocumentsResults(actionResult, toRecognizePiiEntitiesResultCollection(results)); } TextAnalyticsActionResultPropertiesHelper.setActionName(actionResult, piiEntityRecognitionLROResult.getTaskName()); TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult, piiEntityRecognitionLROResult.getLastUpdateDateTime()); recognizePiiEntitiesActionResults.add(actionResult); } else if (taskResult instanceof ExtractiveSummarizationLROResult) { final ExtractiveSummarizationLROResult extractiveSummarizationLROResult = (ExtractiveSummarizationLROResult) taskResult; final ExtractSummaryActionResult actionResult = new ExtractSummaryActionResult(); final ExtractiveSummarizationResult results = extractiveSummarizationLROResult.getResults(); if (results != null) { ExtractSummaryActionResultPropertiesHelper.setDocumentsResults(actionResult, toExtractSummaryResultCollection(results)); } TextAnalyticsActionResultPropertiesHelper.setActionName(actionResult, extractiveSummarizationLROResult.getTaskName()); TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult, extractiveSummarizationLROResult.getLastUpdateDateTime()); extractSummaryActionResults.add(actionResult); } else if (taskResult instanceof HealthcareLROResult) { final HealthcareLROResult healthcareLROResult = (HealthcareLROResult) taskResult; final AnalyzeHealthcareEntitiesActionResult actionResult = new AnalyzeHealthcareEntitiesActionResult(); final HealthcareResult results = healthcareLROResult.getResults(); if (results != null) { AnalyzeHealthcareEntitiesActionResultPropertiesHelper.setDocumentsResults(actionResult, toAnalyzeHealthcareEntitiesResultCollection(results)); } TextAnalyticsActionResultPropertiesHelper.setActionName(actionResult, healthcareLROResult.getTaskName()); TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult, healthcareLROResult.getLastUpdateDateTime()); analyzeHealthcareEntitiesActionResults.add(actionResult); } else if (taskResult instanceof SentimentLROResult) { final SentimentLROResult sentimentLROResult = (SentimentLROResult) taskResult; final AnalyzeSentimentActionResult actionResult = new AnalyzeSentimentActionResult(); final SentimentResponse results = sentimentLROResult.getResults(); if (results != null) { AnalyzeSentimentActionResultPropertiesHelper.setDocumentsResults(actionResult, toAnalyzeSentimentResultCollection(results)); } TextAnalyticsActionResultPropertiesHelper.setActionName(actionResult, sentimentLROResult.getTaskName()); TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult, sentimentLROResult.getLastUpdateDateTime()); analyzeSentimentActionResults.add(actionResult); } else if (taskResult instanceof KeyPhraseExtractionLROResult) { final KeyPhraseExtractionLROResult keyPhraseExtractionLROResult = (KeyPhraseExtractionLROResult) taskResult; final ExtractKeyPhrasesActionResult actionResult = new ExtractKeyPhrasesActionResult(); final KeyPhraseResult results = keyPhraseExtractionLROResult.getResults(); if (results != null) { ExtractKeyPhrasesActionResultPropertiesHelper.setDocumentsResults(actionResult, toExtractKeyPhrasesResultCollection(results)); } TextAnalyticsActionResultPropertiesHelper.setActionName(actionResult, keyPhraseExtractionLROResult.getTaskName()); TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult, keyPhraseExtractionLROResult.getLastUpdateDateTime()); extractKeyPhrasesActionResults.add(actionResult); } else { throw logger.logExceptionAsError(new RuntimeException( "Invalid Long running operation task result: " + taskResult.getClass())); } } } final List<Error> errors = analyzeJobState.getErrors(); if (!CoreUtils.isNullOrEmpty(errors)) { for (Error error : errors) { final String[] targetPair = parseActionErrorTarget(error.getTarget()); final String taskName = targetPair[0]; final Integer taskIndex = Integer.valueOf(targetPair[1]); final TextAnalyticsActionResult actionResult; if (ENTITY_RECOGNITION_TASKS.equals(taskName)) { actionResult = recognizeEntitiesActionResults.get(taskIndex); } else if (ENTITY_RECOGNITION_PII_TASKS.equals(taskName)) { actionResult = recognizePiiEntitiesActionResults.get(taskIndex); } else if (KEY_PHRASE_EXTRACTION_TASKS.equals(taskName)) { actionResult = extractKeyPhrasesActionResults.get(taskIndex); } else if (ENTITY_LINKING_TASKS.equals(taskName)) { actionResult = recognizeLinkedEntitiesActionResults.get(taskIndex); } else if (SENTIMENT_ANALYSIS_TASKS.equals(taskName)) { actionResult = analyzeSentimentActionResults.get(taskIndex); } else if (EXTRACTIVE_SUMMARIZATION_TASKS.equals(taskName)) { actionResult = extractSummaryActionResults.get(taskIndex); } else if (CUSTOM_ENTITY_RECOGNITION_TASKS.equals(taskName)) { actionResult = recognizeCustomEntitiesActionResults.get(taskIndex); } else if (CUSTOM_SINGLE_CLASSIFICATION_TASKS.equals(taskName)) { actionResult = singleCategoryClassifyActionResults.get(taskIndex); } else if (CUSTOM_MULTI_CLASSIFICATION_TASKS.equals(taskName)) { actionResult = multiCategoryClassifyActionResults.get(taskIndex); } else { throw logger.logExceptionAsError(new RuntimeException( "Invalid task name in target reference, " + taskName)); } TextAnalyticsActionResultPropertiesHelper.setIsError(actionResult, true); TextAnalyticsActionResultPropertiesHelper.setError(actionResult, new com.azure.ai.textanalytics.models.TextAnalyticsError( TextAnalyticsErrorCode.fromString( error.getCode() == null ? null : error.getCode().toString()), error.getMessage(), null)); } } final AnalyzeActionsResult analyzeActionsResult = new AnalyzeActionsResult(); AnalyzeActionsResultPropertiesHelper.setRecognizeEntitiesResults(analyzeActionsResult, IterableStream.of(recognizeEntitiesActionResults)); AnalyzeActionsResultPropertiesHelper.setRecognizePiiEntitiesResults(analyzeActionsResult, IterableStream.of(recognizePiiEntitiesActionResults)); AnalyzeActionsResultPropertiesHelper.setAnalyzeHealthcareEntitiesResults(analyzeActionsResult, IterableStream.of(analyzeHealthcareEntitiesActionResults)); AnalyzeActionsResultPropertiesHelper.setExtractKeyPhrasesResults(analyzeActionsResult, IterableStream.of(extractKeyPhrasesActionResults)); AnalyzeActionsResultPropertiesHelper.setRecognizeLinkedEntitiesResults(analyzeActionsResult, IterableStream.of(recognizeLinkedEntitiesActionResults)); AnalyzeActionsResultPropertiesHelper.setAnalyzeSentimentResults(analyzeActionsResult, IterableStream.of(analyzeSentimentActionResults)); AnalyzeActionsResultPropertiesHelper.setExtractSummaryResults(analyzeActionsResult, IterableStream.of(extractSummaryActionResults)); AnalyzeActionsResultPropertiesHelper.setRecognizeCustomEntitiesResults(analyzeActionsResult, IterableStream.of(recognizeCustomEntitiesActionResults)); AnalyzeActionsResultPropertiesHelper.setClassifySingleCategoryResults(analyzeActionsResult, IterableStream.of(singleCategoryClassifyActionResults)); AnalyzeActionsResultPropertiesHelper.setClassifyMultiCategoryResults(analyzeActionsResult, IterableStream.of(multiCategoryClassifyActionResults)); return analyzeActionsResult; } private Mono<PollResponse<AnalyzeActionsOperationDetail>> processAnalyzedModelResponse( Response<AnalyzeJobState> analyzeJobStateResponse, PollResponse<AnalyzeActionsOperationDetail> operationResultPollResponse) { LongRunningOperationStatus status = LongRunningOperationStatus.SUCCESSFULLY_COMPLETED; if (analyzeJobStateResponse.getValue() != null && analyzeJobStateResponse.getValue().getStatus() != null) { switch (analyzeJobStateResponse.getValue().getStatus()) { case NOT_STARTED: case RUNNING: status = LongRunningOperationStatus.IN_PROGRESS; break; case SUCCEEDED: status = LongRunningOperationStatus.SUCCESSFULLY_COMPLETED; break; case CANCELLED: status = LongRunningOperationStatus.USER_CANCELLED; break; default: status = LongRunningOperationStatus.fromString( analyzeJobStateResponse.getValue().getStatus().toString(), true); break; } } AnalyzeActionsOperationDetailPropertiesHelper.setDisplayName(operationResultPollResponse.getValue(), analyzeJobStateResponse.getValue().getDisplayName()); AnalyzeActionsOperationDetailPropertiesHelper.setCreatedAt(operationResultPollResponse.getValue(), analyzeJobStateResponse.getValue().getCreatedDateTime()); AnalyzeActionsOperationDetailPropertiesHelper.setExpiresAt(operationResultPollResponse.getValue(), analyzeJobStateResponse.getValue().getExpirationDateTime()); AnalyzeActionsOperationDetailPropertiesHelper.setLastModifiedAt(operationResultPollResponse.getValue(), analyzeJobStateResponse.getValue().getLastUpdateDateTime()); final TasksStateTasksOld tasksResult = analyzeJobStateResponse.getValue().getTasks(); AnalyzeActionsOperationDetailPropertiesHelper.setActionsFailed(operationResultPollResponse.getValue(), tasksResult.getFailed()); AnalyzeActionsOperationDetailPropertiesHelper.setActionsInProgress(operationResultPollResponse.getValue(), tasksResult.getInProgress()); AnalyzeActionsOperationDetailPropertiesHelper.setActionsSucceeded( operationResultPollResponse.getValue(), tasksResult.getCompleted()); AnalyzeActionsOperationDetailPropertiesHelper.setActionsInTotal(operationResultPollResponse.getValue(), tasksResult.getTotal()); return Mono.just(new PollResponse<>(status, operationResultPollResponse.getValue())); } private Mono<PollResponse<AnalyzeActionsOperationDetail>> processAnalyzedModelResponseLanguageApi( Response<AnalyzeTextJobState> analyzeJobStateResponse, PollResponse<AnalyzeActionsOperationDetail> operationResultPollResponse) { LongRunningOperationStatus status = LongRunningOperationStatus.SUCCESSFULLY_COMPLETED; if (analyzeJobStateResponse.getValue() != null && analyzeJobStateResponse.getValue().getStatus() != null) { switch (analyzeJobStateResponse.getValue().getStatus()) { case NOT_STARTED: case RUNNING: status = LongRunningOperationStatus.IN_PROGRESS; break; case SUCCEEDED: status = LongRunningOperationStatus.SUCCESSFULLY_COMPLETED; break; case CANCELLED: status = LongRunningOperationStatus.USER_CANCELLED; break; default: status = LongRunningOperationStatus.fromString( analyzeJobStateResponse.getValue().getStatus().toString(), true); break; } } AnalyzeActionsOperationDetailPropertiesHelper.setDisplayName(operationResultPollResponse.getValue(), analyzeJobStateResponse.getValue().getDisplayName()); AnalyzeActionsOperationDetailPropertiesHelper.setCreatedAt(operationResultPollResponse.getValue(), analyzeJobStateResponse.getValue().getCreatedDateTime()); AnalyzeActionsOperationDetailPropertiesHelper.setExpiresAt(operationResultPollResponse.getValue(), analyzeJobStateResponse.getValue().getExpirationDateTime()); AnalyzeActionsOperationDetailPropertiesHelper.setLastModifiedAt(operationResultPollResponse.getValue(), analyzeJobStateResponse.getValue().getLastUpdateDateTime()); final TasksStateTasks tasksResult = analyzeJobStateResponse.getValue().getTasks(); AnalyzeActionsOperationDetailPropertiesHelper.setActionsFailed(operationResultPollResponse.getValue(), tasksResult.getFailed()); AnalyzeActionsOperationDetailPropertiesHelper.setActionsInProgress(operationResultPollResponse.getValue(), tasksResult.getInProgress()); AnalyzeActionsOperationDetailPropertiesHelper.setActionsSucceeded( operationResultPollResponse.getValue(), tasksResult.getCompleted()); AnalyzeActionsOperationDetailPropertiesHelper.setActionsInTotal(operationResultPollResponse.getValue(), tasksResult.getTotal()); return Mono.just(new PollResponse<>(status, operationResultPollResponse.getValue())); } private Context getNotNullContext(Context context) { return context == null ? Context.NONE : context; } private AnalyzeActionsOptions getNotNullAnalyzeActionsOptions(AnalyzeActionsOptions options) { return options == null ? new AnalyzeActionsOptions() : options; } private String[] parseActionErrorTarget(String targetReference) { if (CoreUtils.isNullOrEmpty(targetReference)) { throw logger.logExceptionAsError(new RuntimeException( "Expected an error with a target field referencing an action but did not get one")); } final Matcher matcher = PATTERN.matcher(targetReference); String[] taskNameIdPair = new String[2]; while (matcher.find()) { taskNameIdPair[0] = matcher.group(1); taskNameIdPair[1] = matcher.group(2); } return taskNameIdPair; } }
class AnalyzeActionsAsyncClient { private static final String ENTITY_RECOGNITION_TASKS = "entityRecognitionTasks"; private static final String ENTITY_RECOGNITION_PII_TASKS = "entityRecognitionPiiTasks"; private static final String KEY_PHRASE_EXTRACTION_TASKS = "keyPhraseExtractionTasks"; private static final String ENTITY_LINKING_TASKS = "entityLinkingTasks"; private static final String SENTIMENT_ANALYSIS_TASKS = "sentimentAnalysisTasks"; private static final String EXTRACTIVE_SUMMARIZATION_TASKS = "extractiveSummarizationTasks"; private static final String CUSTOM_ENTITY_RECOGNITION_TASKS = "customEntityRecognitionTasks"; private static final String CUSTOM_SINGLE_CLASSIFICATION_TASKS = "customClassificationTasks"; private static final String CUSTOM_MULTI_CLASSIFICATION_TASKS = "customMultiClassificationTasks"; private static final String REGEX_ACTION_ERROR_TARGET = String.format(" ENTITY_RECOGNITION_PII_TASKS, ENTITY_RECOGNITION_TASKS, ENTITY_LINKING_TASKS, SENTIMENT_ANALYSIS_TASKS, EXTRACTIVE_SUMMARIZATION_TASKS, CUSTOM_ENTITY_RECOGNITION_TASKS, CUSTOM_SINGLE_CLASSIFICATION_TASKS, CUSTOM_MULTI_CLASSIFICATION_TASKS); private final ClientLogger logger = new ClientLogger(AnalyzeActionsAsyncClient.class); private final TextAnalyticsClientImpl legacyService; private final AnalyzeTextsImpl service; private static final Pattern PATTERN; static { PATTERN = Pattern.compile(REGEX_ACTION_ERROR_TARGET, Pattern.MULTILINE); } AnalyzeActionsAsyncClient(TextAnalyticsClientImpl legacyService) { this.legacyService = legacyService; this.service = null; } AnalyzeActionsAsyncClient(AnalyzeTextsImpl service) { this.legacyService = null; this.service = service; } PollerFlux<AnalyzeActionsOperationDetail, AnalyzeActionsResultPagedFlux> beginAnalyzeActions( Iterable<TextDocumentInput> documents, TextAnalyticsActions actions, AnalyzeActionsOptions options, Context context) { try { Objects.requireNonNull(actions, "'actions' cannot be null."); inputDocumentsValidation(documents); options = getNotNullAnalyzeActionsOptions(options); final Context finalContext = getNotNullContext(context) .addData(AZ_TRACING_NAMESPACE_KEY, COGNITIVE_TRACING_NAMESPACE_VALUE); final boolean finalIncludeStatistics = options.isIncludeStatistics(); if (service != null) { final AnalyzeTextJobsInput analyzeTextJobsInput = new AnalyzeTextJobsInput() .setDisplayName(actions.getDisplayName()) .setAnalysisInput( new MultiLanguageAnalysisInput().setDocuments(toMultiLanguageInput(documents))) .setTasks(getAnalyzeTextLROTasks(actions)); return new PollerFlux<>( DEFAULT_POLL_INTERVAL, activationOperation( service.submitJobWithResponseAsync(analyzeTextJobsInput, finalContext) .map(analyzeResponse -> { final AnalyzeActionsOperationDetail textAnalyticsOperationResult = new AnalyzeActionsOperationDetail(); AnalyzeActionsOperationDetailPropertiesHelper .setOperationId(textAnalyticsOperationResult, parseOperationId( analyzeResponse.getDeserializedHeaders().getOperationLocation())); return textAnalyticsOperationResult; })), pollingOperationLanguageApi(operationId -> service.jobStatusWithResponseAsync(operationId, finalIncludeStatistics, null, null, finalContext)), (pollingContext, pollResponse) -> Mono.just(pollingContext.getLatestResponse().getValue()), fetchingOperation( operationId -> Mono.just(getAnalyzeOperationFluxPage( operationId, null, null, finalIncludeStatistics, finalContext))) ); } final AnalyzeBatchInput analyzeBatchInput = new AnalyzeBatchInput() .setAnalysisInput(new MultiLanguageBatchInput().setDocuments(toMultiLanguageInput(documents))) .setTasks(getJobManifestTasks(actions)); analyzeBatchInput.setDisplayName(actions.getDisplayName()); return new PollerFlux<>( DEFAULT_POLL_INTERVAL, activationOperation( legacyService.analyzeWithResponseAsync(analyzeBatchInput, finalContext) .map(analyzeResponse -> { final AnalyzeActionsOperationDetail textAnalyticsOperationResult = new AnalyzeActionsOperationDetail(); AnalyzeActionsOperationDetailPropertiesHelper .setOperationId(textAnalyticsOperationResult, parseOperationId(analyzeResponse.getDeserializedHeaders().getOperationLocation())); return textAnalyticsOperationResult; })), pollingOperation(operationId -> legacyService.analyzeStatusWithResponseAsync(operationId.toString(), finalIncludeStatistics, null, null, finalContext)), (pollingContext, activationResponse) -> Mono.error(new RuntimeException("Cancellation is not supported.")), fetchingOperation(operationId -> Mono.just(getAnalyzeOperationFluxPage( operationId, null, null, finalIncludeStatistics, finalContext))) ); } catch (RuntimeException ex) { return PollerFlux.error(ex); } } PollerFlux<AnalyzeActionsOperationDetail, AnalyzeActionsResultPagedIterable> beginAnalyzeActionsIterable( Iterable<TextDocumentInput> documents, TextAnalyticsActions actions, AnalyzeActionsOptions options, Context context) { try { Objects.requireNonNull(actions, "'actions' cannot be null."); inputDocumentsValidation(documents); options = getNotNullAnalyzeActionsOptions(options); final Context finalContext = getNotNullContext(context) .addData(AZ_TRACING_NAMESPACE_KEY, COGNITIVE_TRACING_NAMESPACE_VALUE); final AnalyzeBatchInput analyzeBatchInput = new AnalyzeBatchInput() .setAnalysisInput(new MultiLanguageBatchInput().setDocuments(toMultiLanguageInput(documents))) .setTasks(getJobManifestTasks(actions)); analyzeBatchInput.setDisplayName(actions.getDisplayName()); final boolean finalIncludeStatistics = options.isIncludeStatistics(); if (service != null) { return new PollerFlux<>( DEFAULT_POLL_INTERVAL, activationOperation( service.submitJobWithResponseAsync( new AnalyzeTextJobsInput() .setDisplayName(actions.getDisplayName()) .setAnalysisInput(new MultiLanguageAnalysisInput().setDocuments(toMultiLanguageInput(documents))) .setTasks(getAnalyzeTextLROTasks(actions)), finalContext) .map(analyzeResponse -> { final AnalyzeActionsOperationDetail operationDetail = new AnalyzeActionsOperationDetail(); AnalyzeActionsOperationDetailPropertiesHelper.setOperationId(operationDetail, parseOperationId(analyzeResponse.getDeserializedHeaders().getOperationLocation())); return operationDetail; })), pollingOperationLanguageApi(operationId -> service.jobStatusWithResponseAsync(operationId, finalIncludeStatistics, null, null, finalContext)), (activationResponse, pollingContext) -> Mono.error(new RuntimeException("Cancellation is not supported.")), fetchingOperationIterable( operationId -> Mono.just(new AnalyzeActionsResultPagedIterable(getAnalyzeOperationFluxPage( operationId, null, null, finalIncludeStatistics, finalContext)))) ); } return new PollerFlux<>( DEFAULT_POLL_INTERVAL, activationOperation( legacyService.analyzeWithResponseAsync(analyzeBatchInput, finalContext) .map(analyzeResponse -> { final AnalyzeActionsOperationDetail operationDetail = new AnalyzeActionsOperationDetail(); AnalyzeActionsOperationDetailPropertiesHelper.setOperationId(operationDetail, parseOperationId(analyzeResponse.getDeserializedHeaders().getOperationLocation())); return operationDetail; })), pollingOperation(operationId -> legacyService.analyzeStatusWithResponseAsync(operationId.toString(), finalIncludeStatistics, null, null, finalContext)), (activationResponse, pollingContext) -> Mono.error(new RuntimeException("Cancellation is not supported.")), fetchingOperationIterable( operationId -> Mono.just(new AnalyzeActionsResultPagedIterable(getAnalyzeOperationFluxPage( operationId, null, null, finalIncludeStatistics, finalContext)))) ); } catch (RuntimeException ex) { return PollerFlux.error(ex); } } private List<AnalyzeTextLROTask> getAnalyzeTextLROTasks(TextAnalyticsActions actions) { if (actions == null) { return null; } final List<AnalyzeTextLROTask> tasks = new ArrayList<>(); final Iterable<RecognizeEntitiesAction> recognizeEntitiesActions = actions.getRecognizeEntitiesActions(); final Iterable<RecognizePiiEntitiesAction> recognizePiiEntitiesActions = actions.getRecognizePiiEntitiesActions(); final Iterable<ExtractKeyPhrasesAction> extractKeyPhrasesActions = actions.getExtractKeyPhrasesActions(); final Iterable<RecognizeLinkedEntitiesAction> recognizeLinkedEntitiesActions = actions.getRecognizeLinkedEntitiesActions(); final Iterable<AnalyzeHealthcareEntitiesAction> analyzeHealthcareEntitiesActions = actions.getAnalyzeHealthcareEntitiesActions(); final Iterable<AnalyzeSentimentAction> analyzeSentimentActions = actions.getAnalyzeSentimentActions(); final Iterable<ExtractSummaryAction> extractSummaryActions = actions.getExtractSummaryActions(); final Iterable<RecognizeCustomEntitiesAction> recognizeCustomEntitiesActions = actions.getRecognizeCustomEntitiesActions(); final Iterable<SingleCategoryClassifyAction> singleCategoryClassifyActions = actions.getSingleCategoryClassifyActions(); final Iterable<MultiCategoryClassifyAction> multiCategoryClassifyActions = actions.getMultiCategoryClassifyActions(); if (recognizeEntitiesActions != null) { recognizeEntitiesActions.forEach(action -> tasks.add(toEntitiesLROTask(action))); } if (recognizePiiEntitiesActions != null) { recognizePiiEntitiesActions.forEach(action -> tasks.add(toPiiLROTask(action))); } if (analyzeHealthcareEntitiesActions != null) { analyzeHealthcareEntitiesActions.forEach(action -> tasks.add(toHealthcareLROTask(action))); } if (extractKeyPhrasesActions != null) { extractKeyPhrasesActions.forEach(action -> tasks.add(toKeyPhraseLROTask(action))); } if (recognizeLinkedEntitiesActions != null) { recognizeLinkedEntitiesActions.forEach(action -> tasks.add(toEntityLinkingLROTask(action))); } if (analyzeSentimentActions != null) { analyzeSentimentActions.forEach(action -> tasks.add(toSentimentAnalysisLROTask(action))); } if (extractSummaryActions != null) { extractSummaryActions.forEach(action -> tasks.add(toExtractiveSummarizationLROTask(action))); } if (recognizeCustomEntitiesActions != null) { recognizeCustomEntitiesActions.forEach(action -> tasks.add(toCustomEntitiesLROTask(action))); } if (singleCategoryClassifyActions != null) { singleCategoryClassifyActions.forEach(action -> tasks.add( toCustomSingleLabelClassificationLROTask(action))); } if (multiCategoryClassifyActions != null) { multiCategoryClassifyActions.forEach(action -> tasks.add(toCustomMultiLabelClassificationLROTask(action))); } return tasks; } private JobManifestTasks getJobManifestTasks(TextAnalyticsActions actions) { if (actions == null) { return null; } final JobManifestTasks jobManifestTasks = new JobManifestTasks(); if (actions.getRecognizeEntitiesActions() != null) { jobManifestTasks.setEntityRecognitionTasks(toEntitiesTasks(actions)); } if (actions.getRecognizePiiEntitiesActions() != null) { jobManifestTasks.setEntityRecognitionPiiTasks(toPiiTasks(actions)); } if (actions.getExtractKeyPhrasesActions() != null) { jobManifestTasks.setKeyPhraseExtractionTasks(toKeyPhrasesTasks(actions)); } if (actions.getRecognizeLinkedEntitiesActions() != null) { jobManifestTasks.setEntityLinkingTasks(toEntityLinkingTasks(actions)); } if (actions.getAnalyzeSentimentActions() != null) { jobManifestTasks.setSentimentAnalysisTasks(toSentimentAnalysisTasks(actions)); } if (actions.getExtractSummaryActions() != null) { jobManifestTasks.setExtractiveSummarizationTasks(toExtractiveSummarizationTask(actions)); } if (actions.getRecognizeCustomEntitiesActions() != null) { jobManifestTasks.setCustomEntityRecognitionTasks(toCustomEntitiesTask(actions)); } if (actions.getSingleCategoryClassifyActions() != null) { jobManifestTasks.setCustomSingleClassificationTasks(toCustomSingleClassificationTask(actions)); } if (actions.getMultiCategoryClassifyActions() != null) { jobManifestTasks.setCustomMultiClassificationTasks(toCustomMultiClassificationTask(actions)); } return jobManifestTasks; } private EntitiesLROTask toEntitiesLROTask(RecognizeEntitiesAction action) { if (action == null) { return null; } final EntitiesLROTask task = new EntitiesLROTask(); task.setParameters(getEntitiesTaskParameters(action)).setTaskName(action.getActionName()); return task; } private List<EntitiesTask> toEntitiesTasks(TextAnalyticsActions actions) { final List<EntitiesTask> entitiesTasks = new ArrayList<>(); for (RecognizeEntitiesAction action : actions.getRecognizeEntitiesActions()) { entitiesTasks.add( action == null ? null : new EntitiesTask() .setTaskName(action.getActionName()) .setParameters(getEntitiesTaskParameters(action))); } return entitiesTasks; } private EntitiesTaskParameters getEntitiesTaskParameters(RecognizeEntitiesAction action) { return (EntitiesTaskParameters) new EntitiesTaskParameters() .setStringIndexType(StringIndexType.UTF16CODE_UNIT) .setModelVersion(action.getModelVersion()) .setLoggingOptOut(action.isServiceLogsDisabled()); } private PiiLROTask toPiiLROTask(RecognizePiiEntitiesAction action) { if (action == null) { return null; } final PiiLROTask task = new PiiLROTask(); task.setParameters(getPiiTaskParameters(action)).setTaskName(action.getActionName()); return task; } private List<PiiTask> toPiiTasks(TextAnalyticsActions actions) { final List<PiiTask> piiTasks = new ArrayList<>(); for (RecognizePiiEntitiesAction action : actions.getRecognizePiiEntitiesActions()) { piiTasks.add( action == null ? null : new PiiTask() .setTaskName(action.getActionName()) .setParameters(getPiiTaskParameters(action))); } return piiTasks; } private PiiTaskParameters getPiiTaskParameters(RecognizePiiEntitiesAction action) { return (PiiTaskParameters) new PiiTaskParameters() .setStringIndexType(StringIndexType.UTF16CODE_UNIT) .setDomain(PiiDomain.fromString( action.getDomainFilter() == null ? null : action.getDomainFilter().toString())) .setPiiCategories(toCategoriesFilter(action.getCategoriesFilter())) .setModelVersion(action.getModelVersion()) .setLoggingOptOut(action.isServiceLogsDisabled()); } private HealthcareLROTask toHealthcareLROTask(AnalyzeHealthcareEntitiesAction action) { if (action == null) { return null; } final HealthcareLROTask task = new HealthcareLROTask(); task.setParameters(getHealthcareTaskParameters(action)).setTaskName(action.getActionName()); return task; } private KeyPhraseLROTask toKeyPhraseLROTask(ExtractKeyPhrasesAction action) { if (action == null) { return null; } final KeyPhraseLROTask task = new KeyPhraseLROTask(); task.setParameters(getKeyPhraseTaskParameters(action)).setTaskName(action.getActionName()); return task; } private List<KeyPhrasesTask> toKeyPhrasesTasks(TextAnalyticsActions actions) { final List<KeyPhrasesTask> keyPhrasesTasks = new ArrayList<>(); for (ExtractKeyPhrasesAction action : actions.getExtractKeyPhrasesActions()) { keyPhrasesTasks.add( action == null ? null : new KeyPhrasesTask() .setTaskName(action.getActionName()) .setParameters(getKeyPhraseTaskParameters(action))); } return keyPhrasesTasks; } private KeyPhraseTaskParameters getKeyPhraseTaskParameters(ExtractKeyPhrasesAction action) { return (KeyPhraseTaskParameters) new KeyPhraseTaskParameters() .setModelVersion(action.getModelVersion()) .setLoggingOptOut(action.isServiceLogsDisabled()); } private EntityLinkingLROTask toEntityLinkingLROTask(RecognizeLinkedEntitiesAction action) { if (action == null) { return null; } final EntityLinkingLROTask task = new EntityLinkingLROTask(); task.setParameters(getEntityLinkingTaskParameters(action)).setTaskName(action.getActionName()); return task; } private List<EntityLinkingTask> toEntityLinkingTasks(TextAnalyticsActions actions) { final List<EntityLinkingTask> tasks = new ArrayList<>(); for (RecognizeLinkedEntitiesAction action : actions.getRecognizeLinkedEntitiesActions()) { tasks.add( action == null ? null : new EntityLinkingTask() .setTaskName(action.getActionName()) .setParameters(getEntityLinkingTaskParameters(action))); } return tasks; } private EntityLinkingTaskParameters getEntityLinkingTaskParameters(RecognizeLinkedEntitiesAction action) { return (EntityLinkingTaskParameters) new EntityLinkingTaskParameters() .setStringIndexType(StringIndexType.UTF16CODE_UNIT) .setModelVersion(action.getModelVersion()) .setLoggingOptOut(action.isServiceLogsDisabled()); } private SentimentAnalysisLROTask toSentimentAnalysisLROTask(AnalyzeSentimentAction action) { if (action == null) { return null; } final SentimentAnalysisLROTask task = new SentimentAnalysisLROTask(); task.setParameters(getSentimentAnalysisTaskParameters(action)).setTaskName(action.getActionName()); return task; } private List<SentimentAnalysisTask> toSentimentAnalysisTasks(TextAnalyticsActions actions) { final List<SentimentAnalysisTask> tasks = new ArrayList<>(); for (AnalyzeSentimentAction action : actions.getAnalyzeSentimentActions()) { tasks.add( action == null ? null : new SentimentAnalysisTask() .setTaskName(action.getActionName()) .setParameters(getSentimentAnalysisTaskParameters(action))); } return tasks; } private SentimentAnalysisTaskParameters getSentimentAnalysisTaskParameters(AnalyzeSentimentAction action) { return (SentimentAnalysisTaskParameters) new SentimentAnalysisTaskParameters() .setStringIndexType(StringIndexType.UTF16CODE_UNIT) .setOpinionMining(action.isIncludeOpinionMining()) .setModelVersion(action.getModelVersion()) .setLoggingOptOut(action.isServiceLogsDisabled()); } private ExtractiveSummarizationLROTask toExtractiveSummarizationLROTask(ExtractSummaryAction action) { if (action == null) { return null; } final ExtractiveSummarizationLROTask task = new ExtractiveSummarizationLROTask(); task.setParameters(getExtractiveSummarizationTaskParameters(action)).setTaskName(action.getActionName()); return task; } private List<ExtractiveSummarizationTask> toExtractiveSummarizationTask(TextAnalyticsActions actions) { final List<ExtractiveSummarizationTask> extractiveSummarizationTasks = new ArrayList<>(); for (ExtractSummaryAction action : actions.getExtractSummaryActions()) { extractiveSummarizationTasks.add( action == null ? null : new ExtractiveSummarizationTask() .setTaskName(action.getActionName()) .setParameters(getExtractiveSummarizationTaskParameters(action))); } return extractiveSummarizationTasks; } private ExtractiveSummarizationTaskParameters getExtractiveSummarizationTaskParameters( ExtractSummaryAction action) { return (ExtractiveSummarizationTaskParameters) new ExtractiveSummarizationTaskParameters() .setStringIndexType(StringIndexType.UTF16CODE_UNIT) .setSentenceCount(action.getMaxSentenceCount()) .setSortBy(action.getOrderBy() == null ? null : ExtractiveSummarizationSortingCriteria .fromString(action.getOrderBy().toString())) .setModelVersion(action.getModelVersion()) .setLoggingOptOut(action.isServiceLogsDisabled()); } private CustomEntitiesLROTask toCustomEntitiesLROTask(RecognizeCustomEntitiesAction action) { if (action == null) { return null; } final CustomEntitiesLROTask task = new CustomEntitiesLROTask(); task.setParameters(getCustomEntitiesTaskParameters(action)).setTaskName(action.getActionName()); return task; } private List<CustomEntitiesTask> toCustomEntitiesTask(TextAnalyticsActions actions) { final List<CustomEntitiesTask> tasks = new ArrayList<>(); for (RecognizeCustomEntitiesAction action : actions.getRecognizeCustomEntitiesActions()) { tasks.add( action == null ? null : new CustomEntitiesTask() .setTaskName(action.getActionName()) .setParameters(getCustomEntitiesTaskParameters(action))); } return tasks; } private CustomEntitiesTaskParameters getCustomEntitiesTaskParameters(RecognizeCustomEntitiesAction action) { return (CustomEntitiesTaskParameters) new CustomEntitiesTaskParameters() .setStringIndexType(StringIndexType.UTF16CODE_UNIT) .setProjectName(action.getProjectName()) .setDeploymentName(action.getDeploymentName()) .setLoggingOptOut(action.isServiceLogsDisabled()); } private CustomSingleLabelClassificationLROTask toCustomSingleLabelClassificationLROTask( SingleCategoryClassifyAction action) { if (action == null) { return null; } final CustomSingleLabelClassificationLROTask task = new CustomSingleLabelClassificationLROTask(); task.setParameters(getCustomSingleClassificationTaskParameters(action)).setTaskName(action.getActionName()); return task; } private List<CustomSingleClassificationTask> toCustomSingleClassificationTask(TextAnalyticsActions actions) { final List<CustomSingleClassificationTask> tasks = new ArrayList<>(); for (SingleCategoryClassifyAction action : actions.getSingleCategoryClassifyActions()) { tasks.add( action == null ? null : new CustomSingleClassificationTask() .setTaskName(action.getActionName()) .setParameters(getCustomSingleClassificationTaskParameters(action))); } return tasks; } private CustomSingleLabelClassificationTaskParameters getCustomSingleClassificationTaskParameters( SingleCategoryClassifyAction action) { return (CustomSingleLabelClassificationTaskParameters) new CustomSingleLabelClassificationTaskParameters() .setProjectName(action.getProjectName()) .setDeploymentName(action.getDeploymentName()) .setLoggingOptOut(action.isServiceLogsDisabled()); } private CustomMultiLabelClassificationLROTask toCustomMultiLabelClassificationLROTask( MultiCategoryClassifyAction action) { if (action == null) { return null; } final CustomMultiLabelClassificationLROTask task = new CustomMultiLabelClassificationLROTask(); task.setParameters(getCustomMultiLabelClassificationTaskParameters(action)).setTaskName(action.getActionName()); return task; } private List<CustomMultiClassificationTask> toCustomMultiClassificationTask(TextAnalyticsActions actions) { final List<CustomMultiClassificationTask> tasks = new ArrayList<>(); for (MultiCategoryClassifyAction action : actions.getMultiCategoryClassifyActions()) { tasks.add( action == null ? null : new CustomMultiClassificationTask() .setTaskName(action.getActionName()) .setParameters(getCustomMultiLabelClassificationTaskParameters(action))); } return tasks; } private CustomMultiLabelClassificationTaskParameters getCustomMultiLabelClassificationTaskParameters( MultiCategoryClassifyAction action) { return (CustomMultiLabelClassificationTaskParameters) new CustomMultiLabelClassificationTaskParameters() .setProjectName(action.getProjectName()) .setDeploymentName(action.getDeploymentName()) .setLoggingOptOut(action.isServiceLogsDisabled()); } private Function<PollingContext<AnalyzeActionsOperationDetail>, Mono<AnalyzeActionsOperationDetail>> activationOperation(Mono<AnalyzeActionsOperationDetail> operationResult) { return pollingContext -> { try { return operationResult.onErrorMap(Utility::mapToHttpResponseExceptionIfExists); } catch (RuntimeException ex) { return monoError(logger, ex); } }; } private Function<PollingContext<AnalyzeActionsOperationDetail>, Mono<PollResponse<AnalyzeActionsOperationDetail>>> pollingOperation(Function<UUID, Mono<Response<AnalyzeJobState>>> pollingFunction) { return pollingContext -> { try { final PollResponse<AnalyzeActionsOperationDetail> operationResultPollResponse = pollingContext.getLatestResponse(); final UUID operationId = UUID.fromString(operationResultPollResponse.getValue().getOperationId()); return pollingFunction.apply(operationId) .flatMap(modelResponse -> processAnalyzedModelResponse(modelResponse, operationResultPollResponse)) .onErrorMap(Utility::mapToHttpResponseExceptionIfExists); } catch (RuntimeException ex) { return monoError(logger, ex); } }; } private Function<PollingContext<AnalyzeActionsOperationDetail>, Mono<PollResponse<AnalyzeActionsOperationDetail>>> pollingOperationLanguageApi(Function<UUID, Mono<Response<AnalyzeTextJobState>>> pollingFunction) { return pollingContext -> { try { final PollResponse<AnalyzeActionsOperationDetail> operationResultPollResponse = pollingContext.getLatestResponse(); final UUID operationId = UUID.fromString(operationResultPollResponse.getValue().getOperationId()); return pollingFunction.apply(operationId) .flatMap(modelResponse -> processAnalyzedModelResponseLanguageApi( modelResponse, operationResultPollResponse)) .onErrorMap(Utility::mapToHttpResponseExceptionIfExists); } catch (RuntimeException ex) { return monoError(logger, ex); } }; } private Function<PollingContext<AnalyzeActionsOperationDetail>, Mono<AnalyzeActionsResultPagedFlux>> fetchingOperation(Function<UUID, Mono<AnalyzeActionsResultPagedFlux>> fetchingFunction) { return pollingContext -> { try { final UUID operationId = UUID.fromString(pollingContext.getLatestResponse().getValue().getOperationId()); return fetchingFunction.apply(operationId); } catch (RuntimeException ex) { return monoError(logger, ex); } }; } private Function<PollingContext<AnalyzeActionsOperationDetail>, Mono<AnalyzeActionsResultPagedIterable>> fetchingOperationIterable(Function<UUID, Mono<AnalyzeActionsResultPagedIterable>> fetchingFunction) { return pollingContext -> { try { final UUID operationId = UUID.fromString(pollingContext.getLatestResponse().getValue().getOperationId()); return fetchingFunction.apply(operationId); } catch (RuntimeException ex) { return monoError(logger, ex); } }; } AnalyzeActionsResultPagedFlux getAnalyzeOperationFluxPage(UUID operationId, Integer top, Integer skip, boolean showStats, Context context) { return new AnalyzeActionsResultPagedFlux( () -> (continuationToken, pageSize) -> getPage(continuationToken, operationId, top, skip, showStats, context).flux()); } Mono<PagedResponse<AnalyzeActionsResult>> getPage(String continuationToken, UUID operationId, Integer top, Integer skip, boolean showStats, Context context) { if (continuationToken != null) { final Map<String, Object> continuationTokenMap = parseNextLink(continuationToken); final Integer topValue = (Integer) continuationTokenMap.getOrDefault("$top", null); final Integer skipValue = (Integer) continuationTokenMap.getOrDefault("$skip", null); final Boolean showStatsValue = (Boolean) continuationTokenMap.getOrDefault(showStats, false); if (service != null) { return service.jobStatusWithResponseAsync(operationId, showStatsValue, topValue, skipValue, context) .map(this::toAnalyzeActionsResultPagedResponseLanguageApi) .onErrorMap(Utility::mapToHttpResponseExceptionIfExists); } return legacyService.analyzeStatusWithResponseAsync(operationId.toString(), showStatsValue, topValue, skipValue, context) .map(this::toAnalyzeActionsResultPagedResponse) .onErrorMap(Utility::mapToHttpResponseExceptionIfExists); } else { if (service != null) { return service.jobStatusWithResponseAsync(operationId, showStats, top, skip, context) .map(this::toAnalyzeActionsResultPagedResponseLanguageApi) .onErrorMap(Utility::mapToHttpResponseExceptionIfExists); } return legacyService.analyzeStatusWithResponseAsync(operationId.toString(), showStats, top, skip, context) .map(this::toAnalyzeActionsResultPagedResponse) .onErrorMap(Utility::mapToHttpResponseExceptionIfExists); } } private PagedResponse<AnalyzeActionsResult> toAnalyzeActionsResultPagedResponse(Response<AnalyzeJobState> response) { final AnalyzeJobState analyzeJobState = response.getValue(); return new PagedResponseBase<Void, AnalyzeActionsResult>( response.getRequest(), response.getStatusCode(), response.getHeaders(), Arrays.asList(toAnalyzeActionsResult(analyzeJobState)), analyzeJobState.getNextLink(), null); } private PagedResponse<AnalyzeActionsResult> toAnalyzeActionsResultPagedResponseLanguageApi(Response<AnalyzeTextJobState> response) { final AnalyzeTextJobState analyzeJobState = response.getValue(); return new PagedResponseBase<Void, AnalyzeActionsResult>( response.getRequest(), response.getStatusCode(), response.getHeaders(), Arrays.asList(toAnalyzeActionsResultLanguageApi(analyzeJobState)), analyzeJobState.getNextLink(), null); } private AnalyzeActionsResult toAnalyzeActionsResult(AnalyzeJobState analyzeJobState) { TasksStateTasksOld tasksStateTasks = analyzeJobState.getTasks(); final List<TasksStateTasksEntityRecognitionPiiTasksItem> piiTasksItems = tasksStateTasks.getEntityRecognitionPiiTasks(); final List<TasksStateTasksEntityRecognitionTasksItem> entityRecognitionTasksItems = tasksStateTasks.getEntityRecognitionTasks(); final List<TasksStateTasksKeyPhraseExtractionTasksItem> keyPhraseExtractionTasks = tasksStateTasks.getKeyPhraseExtractionTasks(); final List<TasksStateTasksEntityLinkingTasksItem> linkedEntityRecognitionTasksItems = tasksStateTasks.getEntityLinkingTasks(); final List<TasksStateTasksSentimentAnalysisTasksItem> sentimentAnalysisTasksItems = tasksStateTasks.getSentimentAnalysisTasks(); List<RecognizeEntitiesActionResult> recognizeEntitiesActionResults = new ArrayList<>(); List<RecognizePiiEntitiesActionResult> recognizePiiEntitiesActionResults = new ArrayList<>(); List<ExtractKeyPhrasesActionResult> extractKeyPhrasesActionResults = new ArrayList<>(); List<RecognizeLinkedEntitiesActionResult> recognizeLinkedEntitiesActionResults = new ArrayList<>(); List<AnalyzeSentimentActionResult> analyzeSentimentActionResults = new ArrayList<>(); List<ExtractSummaryActionResult> extractSummaryActionResults = new ArrayList<>(); List<RecognizeCustomEntitiesActionResult> recognizeCustomEntitiesActionResults = new ArrayList<>(); List<SingleCategoryClassifyActionResult> singleCategoryClassifyActionResults = new ArrayList<>(); List<MultiCategoryClassifyActionResult> multiCategoryClassifyActionResults = new ArrayList<>(); if (!CoreUtils.isNullOrEmpty(entityRecognitionTasksItems)) { for (int i = 0; i < entityRecognitionTasksItems.size(); i++) { final TasksStateTasksEntityRecognitionTasksItem taskItem = entityRecognitionTasksItems.get(i); final RecognizeEntitiesActionResult actionResult = new RecognizeEntitiesActionResult(); final EntitiesResult results = taskItem.getResults(); if (results != null) { RecognizeEntitiesActionResultPropertiesHelper.setDocumentsResults(actionResult, toRecognizeEntitiesResultCollectionResponse(results)); } TextAnalyticsActionResultPropertiesHelper.setActionName(actionResult, taskItem.getTaskName()); TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult, taskItem.getLastUpdateDateTime()); recognizeEntitiesActionResults.add(actionResult); } } if (!CoreUtils.isNullOrEmpty(piiTasksItems)) { for (int i = 0; i < piiTasksItems.size(); i++) { final TasksStateTasksEntityRecognitionPiiTasksItem taskItem = piiTasksItems.get(i); final RecognizePiiEntitiesActionResult actionResult = new RecognizePiiEntitiesActionResult(); final PiiResult results = taskItem.getResults(); if (results != null) { RecognizePiiEntitiesActionResultPropertiesHelper.setDocumentsResults(actionResult, toRecognizePiiEntitiesResultCollection(results)); } TextAnalyticsActionResultPropertiesHelper.setActionName(actionResult, taskItem.getTaskName()); TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult, taskItem.getLastUpdateDateTime()); recognizePiiEntitiesActionResults.add(actionResult); } } if (!CoreUtils.isNullOrEmpty(keyPhraseExtractionTasks)) { for (int i = 0; i < keyPhraseExtractionTasks.size(); i++) { final TasksStateTasksKeyPhraseExtractionTasksItem taskItem = keyPhraseExtractionTasks.get(i); final ExtractKeyPhrasesActionResult actionResult = new ExtractKeyPhrasesActionResult(); final KeyPhraseResult results = taskItem.getResults(); if (results != null) { ExtractKeyPhrasesActionResultPropertiesHelper.setDocumentsResults(actionResult, toExtractKeyPhrasesResultCollection(results)); } TextAnalyticsActionResultPropertiesHelper.setActionName(actionResult, taskItem.getTaskName()); TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult, taskItem.getLastUpdateDateTime()); extractKeyPhrasesActionResults.add(actionResult); } } if (!CoreUtils.isNullOrEmpty(linkedEntityRecognitionTasksItems)) { for (int i = 0; i < linkedEntityRecognitionTasksItems.size(); i++) { final TasksStateTasksEntityLinkingTasksItem taskItem = linkedEntityRecognitionTasksItems.get(i); final RecognizeLinkedEntitiesActionResult actionResult = new RecognizeLinkedEntitiesActionResult(); final EntityLinkingResult results = taskItem.getResults(); if (results != null) { RecognizeLinkedEntitiesActionResultPropertiesHelper.setDocumentsResults(actionResult, toRecognizeLinkedEntitiesResultCollection(results)); } TextAnalyticsActionResultPropertiesHelper.setActionName(actionResult, taskItem.getTaskName()); TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult, taskItem.getLastUpdateDateTime()); recognizeLinkedEntitiesActionResults.add(actionResult); } } if (!CoreUtils.isNullOrEmpty(sentimentAnalysisTasksItems)) { for (int i = 0; i < sentimentAnalysisTasksItems.size(); i++) { final TasksStateTasksSentimentAnalysisTasksItem taskItem = sentimentAnalysisTasksItems.get(i); final AnalyzeSentimentActionResult actionResult = new AnalyzeSentimentActionResult(); final SentimentResponse results = taskItem.getResults(); if (results != null) { AnalyzeSentimentActionResultPropertiesHelper.setDocumentsResults(actionResult, toAnalyzeSentimentResultCollection(results)); } TextAnalyticsActionResultPropertiesHelper.setActionName(actionResult, taskItem.getTaskName()); TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult, taskItem.getLastUpdateDateTime()); analyzeSentimentActionResults.add(actionResult); } } final List<TextAnalyticsError> errors = analyzeJobState.getErrors(); if (!CoreUtils.isNullOrEmpty(errors)) { for (TextAnalyticsError error : errors) { if (error != null) { final String[] targetPair = parseActionErrorTarget(error.getTarget(), error.getMessage()); final String taskName = targetPair[0]; final Integer taskIndex = Integer.valueOf(targetPair[1]); final TextAnalyticsActionResult actionResult; if (ENTITY_RECOGNITION_TASKS.equals(taskName)) { actionResult = recognizeEntitiesActionResults.get(taskIndex); } else if (ENTITY_RECOGNITION_PII_TASKS.equals(taskName)) { actionResult = recognizePiiEntitiesActionResults.get(taskIndex); } else if (KEY_PHRASE_EXTRACTION_TASKS.equals(taskName)) { actionResult = extractKeyPhrasesActionResults.get(taskIndex); } else if (ENTITY_LINKING_TASKS.equals(taskName)) { actionResult = recognizeLinkedEntitiesActionResults.get(taskIndex); } else if (SENTIMENT_ANALYSIS_TASKS.equals(taskName)) { actionResult = analyzeSentimentActionResults.get(taskIndex); } else if (EXTRACTIVE_SUMMARIZATION_TASKS.equals(taskName)) { actionResult = extractSummaryActionResults.get(taskIndex); } else if (CUSTOM_ENTITY_RECOGNITION_TASKS.equals(taskName)) { actionResult = recognizeCustomEntitiesActionResults.get(taskIndex); } else if (CUSTOM_SINGLE_CLASSIFICATION_TASKS.equals(taskName)) { actionResult = singleCategoryClassifyActionResults.get(taskIndex); } else if (CUSTOM_MULTI_CLASSIFICATION_TASKS.equals(taskName)) { actionResult = multiCategoryClassifyActionResults.get(taskIndex); } else { throw logger.logExceptionAsError(new RuntimeException( "Invalid task name in target reference, " + taskName)); } TextAnalyticsActionResultPropertiesHelper.setIsError(actionResult, true); TextAnalyticsActionResultPropertiesHelper.setError(actionResult, new com.azure.ai.textanalytics.models.TextAnalyticsError( TextAnalyticsErrorCode.fromString( error.getErrorCode() == null ? null : error.getErrorCode().toString()), error.getMessage(), null)); } } } final AnalyzeActionsResult analyzeActionsResult = new AnalyzeActionsResult(); AnalyzeActionsResultPropertiesHelper.setRecognizeEntitiesResults(analyzeActionsResult, IterableStream.of(recognizeEntitiesActionResults)); AnalyzeActionsResultPropertiesHelper.setRecognizePiiEntitiesResults(analyzeActionsResult, IterableStream.of(recognizePiiEntitiesActionResults)); AnalyzeActionsResultPropertiesHelper.setExtractKeyPhrasesResults(analyzeActionsResult, IterableStream.of(extractKeyPhrasesActionResults)); AnalyzeActionsResultPropertiesHelper.setRecognizeLinkedEntitiesResults(analyzeActionsResult, IterableStream.of(recognizeLinkedEntitiesActionResults)); AnalyzeActionsResultPropertiesHelper.setAnalyzeSentimentResults(analyzeActionsResult, IterableStream.of(analyzeSentimentActionResults)); AnalyzeActionsResultPropertiesHelper.setExtractSummaryResults(analyzeActionsResult, IterableStream.of(extractSummaryActionResults)); AnalyzeActionsResultPropertiesHelper.setRecognizeCustomEntitiesResults(analyzeActionsResult, IterableStream.of(recognizeCustomEntitiesActionResults)); AnalyzeActionsResultPropertiesHelper.setClassifySingleCategoryResults(analyzeActionsResult, IterableStream.of(singleCategoryClassifyActionResults)); AnalyzeActionsResultPropertiesHelper.setClassifyMultiCategoryResults(analyzeActionsResult, IterableStream.of(multiCategoryClassifyActionResults)); return analyzeActionsResult; } private AnalyzeActionsResult toAnalyzeActionsResultLanguageApi(AnalyzeTextJobState analyzeJobState) { final TasksStateTasks tasksStateTasks = analyzeJobState.getTasks(); final List<AnalyzeTextLROResult> tasksResults = tasksStateTasks.getItems(); final List<RecognizeEntitiesActionResult> recognizeEntitiesActionResults = new ArrayList<>(); final List<RecognizePiiEntitiesActionResult> recognizePiiEntitiesActionResults = new ArrayList<>(); final List<ExtractKeyPhrasesActionResult> extractKeyPhrasesActionResults = new ArrayList<>(); final List<RecognizeLinkedEntitiesActionResult> recognizeLinkedEntitiesActionResults = new ArrayList<>(); final List<AnalyzeHealthcareEntitiesActionResult> analyzeHealthcareEntitiesActionResults = new ArrayList<>(); final List<AnalyzeSentimentActionResult> analyzeSentimentActionResults = new ArrayList<>(); final List<ExtractSummaryActionResult> extractSummaryActionResults = new ArrayList<>(); final List<RecognizeCustomEntitiesActionResult> recognizeCustomEntitiesActionResults = new ArrayList<>(); final List<SingleCategoryClassifyActionResult> singleCategoryClassifyActionResults = new ArrayList<>(); final List<MultiCategoryClassifyActionResult> multiCategoryClassifyActionResults = new ArrayList<>(); if (!CoreUtils.isNullOrEmpty(tasksResults)) { for (int i = 0; i < tasksResults.size(); i++) { final AnalyzeTextLROResult taskResult = tasksResults.get(i); if (taskResult instanceof EntityRecognitionLROResult) { final EntityRecognitionLROResult entityTaskResult = (EntityRecognitionLROResult) taskResult; final RecognizeEntitiesActionResult actionResult = new RecognizeEntitiesActionResult(); final EntitiesResult results = entityTaskResult.getResults(); if (results != null) { RecognizeEntitiesActionResultPropertiesHelper.setDocumentsResults(actionResult, toRecognizeEntitiesResultCollectionResponse(results)); } TextAnalyticsActionResultPropertiesHelper.setActionName(actionResult, entityTaskResult.getTaskName()); TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult, entityTaskResult.getLastUpdateDateTime()); recognizeEntitiesActionResults.add(actionResult); } else if (taskResult instanceof CustomEntityRecognitionLROResult) { final CustomEntityRecognitionLROResult customEntityTaskResult = (CustomEntityRecognitionLROResult) taskResult; final RecognizeCustomEntitiesActionResult actionResult = new RecognizeCustomEntitiesActionResult(); final CustomEntitiesResult results = customEntityTaskResult.getResults(); if (results != null) { RecognizeCustomEntitiesActionResultPropertiesHelper.setDocumentsResults(actionResult, toRecognizeCustomEntitiesResultCollection(results)); } TextAnalyticsActionResultPropertiesHelper.setActionName(actionResult, customEntityTaskResult.getTaskName()); TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult, customEntityTaskResult.getLastUpdateDateTime()); recognizeCustomEntitiesActionResults.add(actionResult); } else if (taskResult instanceof CustomSingleLabelClassificationLROResult) { final CustomSingleLabelClassificationLROResult customSingleLabelClassificationResult = (CustomSingleLabelClassificationLROResult) taskResult; final SingleCategoryClassifyActionResult actionResult = new SingleCategoryClassifyActionResult(); final CustomSingleLabelClassificationResult results = customSingleLabelClassificationResult.getResults(); if (results != null) { SingleCategoryClassifyActionResultPropertiesHelper.setDocumentsResults(actionResult, toSingleCategoryClassifyResultCollection(results)); } TextAnalyticsActionResultPropertiesHelper.setActionName(actionResult, customSingleLabelClassificationResult.getTaskName()); TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult, customSingleLabelClassificationResult.getLastUpdateDateTime()); singleCategoryClassifyActionResults.add(actionResult); } else if (taskResult instanceof CustomMultiLabelClassificationLROResult) { final CustomMultiLabelClassificationLROResult customMultiLabelClassificationLROResult = (CustomMultiLabelClassificationLROResult) taskResult; final MultiCategoryClassifyActionResult actionResult = new MultiCategoryClassifyActionResult(); final CustomMultiLabelClassificationResult results = customMultiLabelClassificationLROResult.getResults(); if (results != null) { MultiCategoryClassifyActionResultPropertiesHelper.setDocumentsResults(actionResult, toMultiCategoryClassifyResultCollection(results)); } TextAnalyticsActionResultPropertiesHelper.setActionName(actionResult, customMultiLabelClassificationLROResult.getTaskName()); TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult, customMultiLabelClassificationLROResult.getLastUpdateDateTime()); multiCategoryClassifyActionResults.add(actionResult); } else if (taskResult instanceof EntityLinkingLROResult) { final EntityLinkingLROResult entityLinkingLROResult = (EntityLinkingLROResult) taskResult; final RecognizeLinkedEntitiesActionResult actionResult = new RecognizeLinkedEntitiesActionResult(); final EntityLinkingResult results = entityLinkingLROResult.getResults(); if (results != null) { RecognizeLinkedEntitiesActionResultPropertiesHelper.setDocumentsResults(actionResult, toRecognizeLinkedEntitiesResultCollection(results)); } TextAnalyticsActionResultPropertiesHelper.setActionName(actionResult, entityLinkingLROResult.getTaskName()); TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult, entityLinkingLROResult.getLastUpdateDateTime()); recognizeLinkedEntitiesActionResults.add(actionResult); } else if (taskResult instanceof PiiEntityRecognitionLROResult) { final PiiEntityRecognitionLROResult piiEntityRecognitionLROResult = (PiiEntityRecognitionLROResult) taskResult; final RecognizePiiEntitiesActionResult actionResult = new RecognizePiiEntitiesActionResult(); final PiiResult results = piiEntityRecognitionLROResult.getResults(); if (results != null) { RecognizePiiEntitiesActionResultPropertiesHelper.setDocumentsResults(actionResult, toRecognizePiiEntitiesResultCollection(results)); } TextAnalyticsActionResultPropertiesHelper.setActionName(actionResult, piiEntityRecognitionLROResult.getTaskName()); TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult, piiEntityRecognitionLROResult.getLastUpdateDateTime()); recognizePiiEntitiesActionResults.add(actionResult); } else if (taskResult instanceof ExtractiveSummarizationLROResult) { final ExtractiveSummarizationLROResult extractiveSummarizationLROResult = (ExtractiveSummarizationLROResult) taskResult; final ExtractSummaryActionResult actionResult = new ExtractSummaryActionResult(); final ExtractiveSummarizationResult results = extractiveSummarizationLROResult.getResults(); if (results != null) { ExtractSummaryActionResultPropertiesHelper.setDocumentsResults(actionResult, toExtractSummaryResultCollection(results)); } TextAnalyticsActionResultPropertiesHelper.setActionName(actionResult, extractiveSummarizationLROResult.getTaskName()); TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult, extractiveSummarizationLROResult.getLastUpdateDateTime()); extractSummaryActionResults.add(actionResult); } else if (taskResult instanceof HealthcareLROResult) { final HealthcareLROResult healthcareLROResult = (HealthcareLROResult) taskResult; final AnalyzeHealthcareEntitiesActionResult actionResult = new AnalyzeHealthcareEntitiesActionResult(); final HealthcareResult results = healthcareLROResult.getResults(); if (results != null) { AnalyzeHealthcareEntitiesActionResultPropertiesHelper.setDocumentsResults(actionResult, toAnalyzeHealthcareEntitiesResultCollection(results)); } TextAnalyticsActionResultPropertiesHelper.setActionName(actionResult, healthcareLROResult.getTaskName()); TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult, healthcareLROResult.getLastUpdateDateTime()); analyzeHealthcareEntitiesActionResults.add(actionResult); } else if (taskResult instanceof SentimentLROResult) { final SentimentLROResult sentimentLROResult = (SentimentLROResult) taskResult; final AnalyzeSentimentActionResult actionResult = new AnalyzeSentimentActionResult(); final SentimentResponse results = sentimentLROResult.getResults(); if (results != null) { AnalyzeSentimentActionResultPropertiesHelper.setDocumentsResults(actionResult, toAnalyzeSentimentResultCollection(results)); } TextAnalyticsActionResultPropertiesHelper.setActionName(actionResult, sentimentLROResult.getTaskName()); TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult, sentimentLROResult.getLastUpdateDateTime()); analyzeSentimentActionResults.add(actionResult); } else if (taskResult instanceof KeyPhraseExtractionLROResult) { final KeyPhraseExtractionLROResult keyPhraseExtractionLROResult = (KeyPhraseExtractionLROResult) taskResult; final ExtractKeyPhrasesActionResult actionResult = new ExtractKeyPhrasesActionResult(); final KeyPhraseResult results = keyPhraseExtractionLROResult.getResults(); if (results != null) { ExtractKeyPhrasesActionResultPropertiesHelper.setDocumentsResults(actionResult, toExtractKeyPhrasesResultCollection(results)); } TextAnalyticsActionResultPropertiesHelper.setActionName(actionResult, keyPhraseExtractionLROResult.getTaskName()); TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult, keyPhraseExtractionLROResult.getLastUpdateDateTime()); extractKeyPhrasesActionResults.add(actionResult); } else { throw logger.logExceptionAsError(new RuntimeException( "Invalid Long running operation task result: " + taskResult.getClass())); } } } final List<Error> errors = analyzeJobState.getErrors(); if (!CoreUtils.isNullOrEmpty(errors)) { for (Error error : errors) { if (error != null) { final String[] targetPair = parseActionErrorTarget(error.getTarget(), error.getMessage()); final String taskName = targetPair[0]; final Integer taskIndex = Integer.valueOf(targetPair[1]); final TextAnalyticsActionResult actionResult; if (ENTITY_RECOGNITION_TASKS.equals(taskName)) { actionResult = recognizeEntitiesActionResults.get(taskIndex); } else if (ENTITY_RECOGNITION_PII_TASKS.equals(taskName)) { actionResult = recognizePiiEntitiesActionResults.get(taskIndex); } else if (KEY_PHRASE_EXTRACTION_TASKS.equals(taskName)) { actionResult = extractKeyPhrasesActionResults.get(taskIndex); } else if (ENTITY_LINKING_TASKS.equals(taskName)) { actionResult = recognizeLinkedEntitiesActionResults.get(taskIndex); } else if (SENTIMENT_ANALYSIS_TASKS.equals(taskName)) { actionResult = analyzeSentimentActionResults.get(taskIndex); } else if (EXTRACTIVE_SUMMARIZATION_TASKS.equals(taskName)) { actionResult = extractSummaryActionResults.get(taskIndex); } else if (CUSTOM_ENTITY_RECOGNITION_TASKS.equals(taskName)) { actionResult = recognizeCustomEntitiesActionResults.get(taskIndex); } else if (CUSTOM_SINGLE_CLASSIFICATION_TASKS.equals(taskName)) { actionResult = singleCategoryClassifyActionResults.get(taskIndex); } else if (CUSTOM_MULTI_CLASSIFICATION_TASKS.equals(taskName)) { actionResult = multiCategoryClassifyActionResults.get(taskIndex); } else { throw logger.logExceptionAsError(new RuntimeException( "Invalid task name in target reference, " + taskName)); } TextAnalyticsActionResultPropertiesHelper.setIsError(actionResult, true); TextAnalyticsActionResultPropertiesHelper.setError(actionResult, new com.azure.ai.textanalytics.models.TextAnalyticsError( TextAnalyticsErrorCode.fromString( error.getCode() == null ? null : error.getCode().toString()), error.getMessage(), null)); } } } final AnalyzeActionsResult analyzeActionsResult = new AnalyzeActionsResult(); AnalyzeActionsResultPropertiesHelper.setRecognizeEntitiesResults(analyzeActionsResult, IterableStream.of(recognizeEntitiesActionResults)); AnalyzeActionsResultPropertiesHelper.setRecognizePiiEntitiesResults(analyzeActionsResult, IterableStream.of(recognizePiiEntitiesActionResults)); AnalyzeActionsResultPropertiesHelper.setAnalyzeHealthcareEntitiesResults(analyzeActionsResult, IterableStream.of(analyzeHealthcareEntitiesActionResults)); AnalyzeActionsResultPropertiesHelper.setExtractKeyPhrasesResults(analyzeActionsResult, IterableStream.of(extractKeyPhrasesActionResults)); AnalyzeActionsResultPropertiesHelper.setRecognizeLinkedEntitiesResults(analyzeActionsResult, IterableStream.of(recognizeLinkedEntitiesActionResults)); AnalyzeActionsResultPropertiesHelper.setAnalyzeSentimentResults(analyzeActionsResult, IterableStream.of(analyzeSentimentActionResults)); AnalyzeActionsResultPropertiesHelper.setExtractSummaryResults(analyzeActionsResult, IterableStream.of(extractSummaryActionResults)); AnalyzeActionsResultPropertiesHelper.setRecognizeCustomEntitiesResults(analyzeActionsResult, IterableStream.of(recognizeCustomEntitiesActionResults)); AnalyzeActionsResultPropertiesHelper.setClassifySingleCategoryResults(analyzeActionsResult, IterableStream.of(singleCategoryClassifyActionResults)); AnalyzeActionsResultPropertiesHelper.setClassifyMultiCategoryResults(analyzeActionsResult, IterableStream.of(multiCategoryClassifyActionResults)); return analyzeActionsResult; } private Mono<PollResponse<AnalyzeActionsOperationDetail>> processAnalyzedModelResponse( Response<AnalyzeJobState> analyzeJobStateResponse, PollResponse<AnalyzeActionsOperationDetail> operationResultPollResponse) { LongRunningOperationStatus status = LongRunningOperationStatus.SUCCESSFULLY_COMPLETED; if (analyzeJobStateResponse.getValue() != null && analyzeJobStateResponse.getValue().getStatus() != null) { switch (analyzeJobStateResponse.getValue().getStatus()) { case NOT_STARTED: case RUNNING: status = LongRunningOperationStatus.IN_PROGRESS; break; case SUCCEEDED: status = LongRunningOperationStatus.SUCCESSFULLY_COMPLETED; break; case CANCELLED: status = LongRunningOperationStatus.USER_CANCELLED; break; default: status = LongRunningOperationStatus.fromString( analyzeJobStateResponse.getValue().getStatus().toString(), true); break; } } AnalyzeActionsOperationDetailPropertiesHelper.setDisplayName(operationResultPollResponse.getValue(), analyzeJobStateResponse.getValue().getDisplayName()); AnalyzeActionsOperationDetailPropertiesHelper.setCreatedAt(operationResultPollResponse.getValue(), analyzeJobStateResponse.getValue().getCreatedDateTime()); AnalyzeActionsOperationDetailPropertiesHelper.setExpiresAt(operationResultPollResponse.getValue(), analyzeJobStateResponse.getValue().getExpirationDateTime()); AnalyzeActionsOperationDetailPropertiesHelper.setLastModifiedAt(operationResultPollResponse.getValue(), analyzeJobStateResponse.getValue().getLastUpdateDateTime()); final TasksStateTasksOld tasksResult = analyzeJobStateResponse.getValue().getTasks(); AnalyzeActionsOperationDetailPropertiesHelper.setActionsFailed(operationResultPollResponse.getValue(), tasksResult.getFailed()); AnalyzeActionsOperationDetailPropertiesHelper.setActionsInProgress(operationResultPollResponse.getValue(), tasksResult.getInProgress()); AnalyzeActionsOperationDetailPropertiesHelper.setActionsSucceeded( operationResultPollResponse.getValue(), tasksResult.getCompleted()); AnalyzeActionsOperationDetailPropertiesHelper.setActionsInTotal(operationResultPollResponse.getValue(), tasksResult.getTotal()); return Mono.just(new PollResponse<>(status, operationResultPollResponse.getValue())); } private Mono<PollResponse<AnalyzeActionsOperationDetail>> processAnalyzedModelResponseLanguageApi( Response<AnalyzeTextJobState> analyzeJobStateResponse, PollResponse<AnalyzeActionsOperationDetail> operationResultPollResponse) { LongRunningOperationStatus status = LongRunningOperationStatus.SUCCESSFULLY_COMPLETED; if (analyzeJobStateResponse.getValue() != null && analyzeJobStateResponse.getValue().getStatus() != null) { switch (analyzeJobStateResponse.getValue().getStatus()) { case NOT_STARTED: case RUNNING: status = LongRunningOperationStatus.IN_PROGRESS; break; case SUCCEEDED: status = LongRunningOperationStatus.SUCCESSFULLY_COMPLETED; break; case CANCELLED: status = LongRunningOperationStatus.USER_CANCELLED; break; case PARTIALLY_SUCCEEDED: status = LongRunningOperationStatus.fromString("partiallySucceeded", true); break; default: status = LongRunningOperationStatus.fromString( analyzeJobStateResponse.getValue().getStatus().toString(), true); break; } } AnalyzeActionsOperationDetailPropertiesHelper.setDisplayName(operationResultPollResponse.getValue(), analyzeJobStateResponse.getValue().getDisplayName()); AnalyzeActionsOperationDetailPropertiesHelper.setCreatedAt(operationResultPollResponse.getValue(), analyzeJobStateResponse.getValue().getCreatedDateTime()); AnalyzeActionsOperationDetailPropertiesHelper.setExpiresAt(operationResultPollResponse.getValue(), analyzeJobStateResponse.getValue().getExpirationDateTime()); AnalyzeActionsOperationDetailPropertiesHelper.setLastModifiedAt(operationResultPollResponse.getValue(), analyzeJobStateResponse.getValue().getLastUpdateDateTime()); final TasksStateTasks tasksResult = analyzeJobStateResponse.getValue().getTasks(); AnalyzeActionsOperationDetailPropertiesHelper.setActionsFailed(operationResultPollResponse.getValue(), tasksResult.getFailed()); AnalyzeActionsOperationDetailPropertiesHelper.setActionsInProgress(operationResultPollResponse.getValue(), tasksResult.getInProgress()); AnalyzeActionsOperationDetailPropertiesHelper.setActionsSucceeded( operationResultPollResponse.getValue(), tasksResult.getCompleted()); AnalyzeActionsOperationDetailPropertiesHelper.setActionsInTotal(operationResultPollResponse.getValue(), tasksResult.getTotal()); return Mono.just(new PollResponse<>(status, operationResultPollResponse.getValue())); } private Context getNotNullContext(Context context) { return context == null ? Context.NONE : context; } private AnalyzeActionsOptions getNotNullAnalyzeActionsOptions(AnalyzeActionsOptions options) { return options == null ? new AnalyzeActionsOptions() : options; } private String[] parseActionErrorTarget(String targetReference, String errorMessage) { if (CoreUtils.isNullOrEmpty(targetReference)) { if (CoreUtils.isNullOrEmpty(errorMessage)) { errorMessage = "Expected an error with a target field referencing an action but did not get one"; } throw logger.logExceptionAsError(new RuntimeException(errorMessage)); } final Matcher matcher = PATTERN.matcher(targetReference); String[] taskNameIdPair = new String[2]; while (matcher.find()) { taskNameIdPair[0] = matcher.group(1); taskNameIdPair[1] = matcher.group(2); } return taskNameIdPair; } }
includeOpinionMining?
private SentimentAnalysisTaskParameters getSentimentAnalysisTaskParameters(AnalyzeSentimentAction action) { return (SentimentAnalysisTaskParameters) new SentimentAnalysisTaskParameters() .setStringIndexType(StringIndexType.UTF16CODE_UNIT) .setModelVersion(action.getModelVersion()) .setLoggingOptOut(action.isServiceLogsDisabled()); }
return (SentimentAnalysisTaskParameters) new SentimentAnalysisTaskParameters()
private SentimentAnalysisTaskParameters getSentimentAnalysisTaskParameters(AnalyzeSentimentAction action) { return (SentimentAnalysisTaskParameters) new SentimentAnalysisTaskParameters() .setStringIndexType(StringIndexType.UTF16CODE_UNIT) .setOpinionMining(action.isIncludeOpinionMining()) .setModelVersion(action.getModelVersion()) .setLoggingOptOut(action.isServiceLogsDisabled()); }
class AnalyzeActionsAsyncClient { private static final String ENTITY_RECOGNITION_TASKS = "entityRecognitionTasks"; private static final String ENTITY_RECOGNITION_PII_TASKS = "entityRecognitionPiiTasks"; private static final String KEY_PHRASE_EXTRACTION_TASKS = "keyPhraseExtractionTasks"; private static final String ENTITY_LINKING_TASKS = "entityLinkingTasks"; private static final String SENTIMENT_ANALYSIS_TASKS = "sentimentAnalysisTasks"; private static final String EXTRACTIVE_SUMMARIZATION_TASKS = "extractiveSummarizationTasks"; private static final String CUSTOM_ENTITY_RECOGNITION_TASKS = "customEntityRecognitionTasks"; private static final String CUSTOM_SINGLE_CLASSIFICATION_TASKS = "customClassificationTasks"; private static final String CUSTOM_MULTI_CLASSIFICATION_TASKS = "customMultiClassificationTasks"; private static final String REGEX_ACTION_ERROR_TARGET = String.format(" ENTITY_RECOGNITION_PII_TASKS, ENTITY_RECOGNITION_TASKS, ENTITY_LINKING_TASKS, SENTIMENT_ANALYSIS_TASKS, EXTRACTIVE_SUMMARIZATION_TASKS, CUSTOM_ENTITY_RECOGNITION_TASKS, CUSTOM_SINGLE_CLASSIFICATION_TASKS, CUSTOM_MULTI_CLASSIFICATION_TASKS); private final ClientLogger logger = new ClientLogger(AnalyzeActionsAsyncClient.class); private final TextAnalyticsClientImpl legacyService; private final AnalyzeTextsImpl service; private static final Pattern PATTERN; static { PATTERN = Pattern.compile(REGEX_ACTION_ERROR_TARGET, Pattern.MULTILINE); } AnalyzeActionsAsyncClient(TextAnalyticsClientImpl legacyService) { this.legacyService = legacyService; this.service = null; } AnalyzeActionsAsyncClient(AnalyzeTextsImpl service) { this.legacyService = null; this.service = service; } PollerFlux<AnalyzeActionsOperationDetail, AnalyzeActionsResultPagedFlux> beginAnalyzeActions( Iterable<TextDocumentInput> documents, TextAnalyticsActions actions, AnalyzeActionsOptions options, Context context) { try { inputDocumentsValidation(documents); options = getNotNullAnalyzeActionsOptions(options); final Context finalContext = getNotNullContext(context) .addData(AZ_TRACING_NAMESPACE_KEY, COGNITIVE_TRACING_NAMESPACE_VALUE); final boolean finalIncludeStatistics = options.isIncludeStatistics(); if (service != null) { final AnalyzeTextJobsInput analyzeTextJobsInput = new AnalyzeTextJobsInput() .setDisplayName(actions.getDisplayName()) .setAnalysisInput( new MultiLanguageAnalysisInput().setDocuments(toMultiLanguageInput(documents))) .setTasks(getAnalyzeTextLROTasks(actions)); return new PollerFlux<>( DEFAULT_POLL_INTERVAL, activationOperation( service.submitJobWithResponseAsync(analyzeTextJobsInput, finalContext) .map(analyzeResponse -> { final AnalyzeActionsOperationDetail textAnalyticsOperationResult = new AnalyzeActionsOperationDetail(); AnalyzeActionsOperationDetailPropertiesHelper .setOperationId(textAnalyticsOperationResult, parseOperationId( analyzeResponse.getDeserializedHeaders().getOperationLocation())); return textAnalyticsOperationResult; })), pollingOperationLanguageApi(operationId -> service.jobStatusWithResponseAsync(operationId, finalIncludeStatistics, null, null, finalContext)), (pollingContext, pollResponse) -> Mono.just(pollingContext.getLatestResponse().getValue()), fetchingOperation( operationId -> Mono.just(getAnalyzeOperationFluxPage( operationId, null, null, finalIncludeStatistics, finalContext))) ); } final AnalyzeBatchInput analyzeBatchInput = new AnalyzeBatchInput() .setAnalysisInput(new MultiLanguageBatchInput().setDocuments(toMultiLanguageInput(documents))) .setTasks(getJobManifestTasks(actions)); analyzeBatchInput.setDisplayName(actions.getDisplayName()); return new PollerFlux<>( DEFAULT_POLL_INTERVAL, activationOperation( legacyService.analyzeWithResponseAsync(analyzeBatchInput, finalContext) .map(analyzeResponse -> { final AnalyzeActionsOperationDetail textAnalyticsOperationResult = new AnalyzeActionsOperationDetail(); AnalyzeActionsOperationDetailPropertiesHelper .setOperationId(textAnalyticsOperationResult, parseOperationId(analyzeResponse.getDeserializedHeaders().getOperationLocation())); return textAnalyticsOperationResult; })), pollingOperation(operationId -> legacyService.analyzeStatusWithResponseAsync(operationId.toString(), finalIncludeStatistics, null, null, finalContext)), (pollingContext, activationResponse) -> Mono.error(new RuntimeException("Cancellation is not supported.")), fetchingOperation(operationId -> Mono.just(getAnalyzeOperationFluxPage( operationId, null, null, finalIncludeStatistics, finalContext))) ); } catch (RuntimeException ex) { return PollerFlux.error(ex); } } PollerFlux<AnalyzeActionsOperationDetail, AnalyzeActionsResultPagedIterable> beginAnalyzeActionsIterable( Iterable<TextDocumentInput> documents, TextAnalyticsActions actions, AnalyzeActionsOptions options, Context context) { try { inputDocumentsValidation(documents); options = getNotNullAnalyzeActionsOptions(options); final Context finalContext = getNotNullContext(context) .addData(AZ_TRACING_NAMESPACE_KEY, COGNITIVE_TRACING_NAMESPACE_VALUE); final AnalyzeBatchInput analyzeBatchInput = new AnalyzeBatchInput() .setAnalysisInput(new MultiLanguageBatchInput().setDocuments(toMultiLanguageInput(documents))) .setTasks(getJobManifestTasks(actions)); analyzeBatchInput.setDisplayName(actions.getDisplayName()); final boolean finalIncludeStatistics = options.isIncludeStatistics(); if (service != null) { return new PollerFlux<>( DEFAULT_POLL_INTERVAL, activationOperation( service.submitJobWithResponseAsync( new AnalyzeTextJobsInput() .setDisplayName(actions.getDisplayName()) .setAnalysisInput(new MultiLanguageAnalysisInput().setDocuments(toMultiLanguageInput(documents))) .setTasks(getAnalyzeTextLROTasks(actions)), finalContext) .map(analyzeResponse -> { final AnalyzeActionsOperationDetail operationDetail = new AnalyzeActionsOperationDetail(); AnalyzeActionsOperationDetailPropertiesHelper.setOperationId(operationDetail, parseOperationId(analyzeResponse.getDeserializedHeaders().getOperationLocation())); return operationDetail; })), pollingOperationLanguageApi(operationId -> service.jobStatusWithResponseAsync(operationId, finalIncludeStatistics, null, null, finalContext)), (activationResponse, pollingContext) -> Mono.error(new RuntimeException("Cancellation is not supported.")), fetchingOperationIterable( operationId -> Mono.just(new AnalyzeActionsResultPagedIterable(getAnalyzeOperationFluxPage( operationId, null, null, finalIncludeStatistics, finalContext)))) ); } return new PollerFlux<>( DEFAULT_POLL_INTERVAL, activationOperation( legacyService.analyzeWithResponseAsync(analyzeBatchInput, finalContext) .map(analyzeResponse -> { final AnalyzeActionsOperationDetail operationDetail = new AnalyzeActionsOperationDetail(); AnalyzeActionsOperationDetailPropertiesHelper.setOperationId(operationDetail, parseOperationId(analyzeResponse.getDeserializedHeaders().getOperationLocation())); return operationDetail; })), pollingOperation(operationId -> legacyService.analyzeStatusWithResponseAsync(operationId.toString(), finalIncludeStatistics, null, null, finalContext)), (activationResponse, pollingContext) -> Mono.error(new RuntimeException("Cancellation is not supported.")), fetchingOperationIterable( operationId -> Mono.just(new AnalyzeActionsResultPagedIterable(getAnalyzeOperationFluxPage( operationId, null, null, finalIncludeStatistics, finalContext)))) ); } catch (RuntimeException ex) { return PollerFlux.error(ex); } } private List<AnalyzeTextLROTask> getAnalyzeTextLROTasks(TextAnalyticsActions actions) { if (actions == null) { return null; } final List<AnalyzeTextLROTask> tasks = new ArrayList<>(); final Iterable<RecognizeEntitiesAction> recognizeEntitiesActions = actions.getRecognizeEntitiesActions(); final Iterable<RecognizePiiEntitiesAction> recognizePiiEntitiesActions = actions.getRecognizePiiEntitiesActions(); final Iterable<ExtractKeyPhrasesAction> extractKeyPhrasesActions = actions.getExtractKeyPhrasesActions(); final Iterable<RecognizeLinkedEntitiesAction> recognizeLinkedEntitiesActions = actions.getRecognizeLinkedEntitiesActions(); final Iterable<AnalyzeHealthcareEntitiesAction> analyzeHealthcareEntitiesActions = actions.getAnalyzeHealthcareEntitiesActions(); final Iterable<AnalyzeSentimentAction> analyzeSentimentActions = actions.getAnalyzeSentimentActions(); final Iterable<ExtractSummaryAction> extractSummaryActions = actions.getExtractSummaryActions(); final Iterable<RecognizeCustomEntitiesAction> recognizeCustomEntitiesActions = actions.getRecognizeCustomEntitiesActions(); final Iterable<SingleCategoryClassifyAction> singleCategoryClassifyActions = actions.getSingleCategoryClassifyActions(); final Iterable<MultiCategoryClassifyAction> multiCategoryClassifyActions = actions.getMultiCategoryClassifyActions(); if (recognizeEntitiesActions != null) { recognizeEntitiesActions.forEach(action -> tasks.add(toEntitiesLROTask(action))); } if (recognizePiiEntitiesActions != null) { recognizePiiEntitiesActions.forEach(action -> tasks.add(toPiiLROTask(action))); } if (analyzeHealthcareEntitiesActions != null) { analyzeHealthcareEntitiesActions.forEach(action -> tasks.add(toHealthcareLROTask(action))); } if (extractKeyPhrasesActions != null) { extractKeyPhrasesActions.forEach(action -> tasks.add(toKeyPhraseLROTask(action))); } if (recognizeLinkedEntitiesActions != null) { recognizeLinkedEntitiesActions.forEach(action -> tasks.add(toEntityLinkingLROTask(action))); } if (analyzeSentimentActions != null) { analyzeSentimentActions.forEach(action -> tasks.add(toSentimentAnalysisLROTask(action))); } if (extractSummaryActions != null) { extractSummaryActions.forEach(action -> tasks.add(toExtractiveSummarizationLROTask(action))); } if (recognizeCustomEntitiesActions != null) { recognizeCustomEntitiesActions.forEach(action -> tasks.add(toCustomEntitiesLROTask(action))); } if (singleCategoryClassifyActions != null) { singleCategoryClassifyActions.forEach(action -> tasks.add( toCustomSingleLabelClassificationLROTask(action))); } if (multiCategoryClassifyActions != null) { multiCategoryClassifyActions.forEach(action -> tasks.add(toCustomMultiLabelClassificationLROTask(action))); } return tasks; } private JobManifestTasks getJobManifestTasks(TextAnalyticsActions actions) { if (actions == null) { return null; } final JobManifestTasks jobManifestTasks = new JobManifestTasks(); if (actions.getRecognizeEntitiesActions() != null) { jobManifestTasks.setEntityRecognitionTasks(toEntitiesTasks(actions)); } if (actions.getRecognizePiiEntitiesActions() != null) { jobManifestTasks.setEntityRecognitionPiiTasks(toPiiTasks(actions)); } if (actions.getExtractKeyPhrasesActions() != null) { jobManifestTasks.setKeyPhraseExtractionTasks(toKeyPhrasesTasks(actions)); } if (actions.getRecognizeLinkedEntitiesActions() != null) { jobManifestTasks.setEntityLinkingTasks(toEntityLinkingTasks(actions)); } if (actions.getAnalyzeSentimentActions() != null) { jobManifestTasks.setSentimentAnalysisTasks(toSentimentAnalysisTasks(actions)); } if (actions.getExtractSummaryActions() != null) { jobManifestTasks.setExtractiveSummarizationTasks(toExtractiveSummarizationTask(actions)); } if (actions.getRecognizeCustomEntitiesActions() != null) { jobManifestTasks.setCustomEntityRecognitionTasks(toCustomEntitiesTask(actions)); } if (actions.getSingleCategoryClassifyActions() != null) { jobManifestTasks.setCustomSingleClassificationTasks(toCustomSingleClassificationTask(actions)); } if (actions.getMultiCategoryClassifyActions() != null) { jobManifestTasks.setCustomMultiClassificationTasks(toCustomMultiClassificationTask(actions)); } return jobManifestTasks; } private EntitiesLROTask toEntitiesLROTask(RecognizeEntitiesAction action) { if (action == null) { return null; } final EntitiesLROTask task = new EntitiesLROTask(); task.setParameters(getEntitiesTaskParameters(action)).setTaskName(action.getActionName()); return task; } private List<EntitiesTask> toEntitiesTasks(TextAnalyticsActions actions) { final List<EntitiesTask> entitiesTasks = new ArrayList<>(); for (RecognizeEntitiesAction action : actions.getRecognizeEntitiesActions()) { entitiesTasks.add( action == null ? null : new EntitiesTask() .setTaskName(action.getActionName()) .setParameters(getEntitiesTaskParameters(action))); } return entitiesTasks; } private EntitiesTaskParameters getEntitiesTaskParameters(RecognizeEntitiesAction action) { return (EntitiesTaskParameters) new EntitiesTaskParameters() .setStringIndexType(StringIndexType.UTF16CODE_UNIT) .setModelVersion(action.getModelVersion()) .setLoggingOptOut(action.isServiceLogsDisabled()); } private PiiLROTask toPiiLROTask(RecognizePiiEntitiesAction action) { if (action == null) { return null; } final PiiLROTask task = new PiiLROTask(); task.setParameters(getPiiTaskParameters(action)).setTaskName(action.getActionName()); return task; } private List<PiiTask> toPiiTasks(TextAnalyticsActions actions) { final List<PiiTask> piiTasks = new ArrayList<>(); for (RecognizePiiEntitiesAction action : actions.getRecognizePiiEntitiesActions()) { piiTasks.add( action == null ? null : new PiiTask() .setTaskName(action.getActionName()) .setParameters(getPiiTaskParameters(action))); } return piiTasks; } private PiiTaskParameters getPiiTaskParameters(RecognizePiiEntitiesAction action) { return (PiiTaskParameters) new PiiTaskParameters() .setStringIndexType(StringIndexType.UTF16CODE_UNIT) .setDomain(PiiDomain.fromString( action.getDomainFilter() == null ? null : action.getDomainFilter().toString())) .setPiiCategories(toCategoriesFilter(action.getCategoriesFilter())) .setModelVersion(action.getModelVersion()) .setLoggingOptOut(action.isServiceLogsDisabled()); } private HealthcareLROTask toHealthcareLROTask(AnalyzeHealthcareEntitiesAction action) { if (action == null) { return null; } final HealthcareLROTask task = new HealthcareLROTask(); task.setParameters(getHealthcareTaskParameters(action)).setTaskName(action.getActionName()); return task; } private HealthcareTaskParameters getHealthcareTaskParameters(AnalyzeHealthcareEntitiesAction action) { return (HealthcareTaskParameters) new HealthcareTaskParameters() .setStringIndexType(StringIndexType.UTF16CODE_UNIT) .setModelVersion(action.getModelVersion()) .setLoggingOptOut(action.isServiceLogsDisabled()); } private KeyPhraseLROTask toKeyPhraseLROTask(ExtractKeyPhrasesAction action) { if (action == null) { return null; } final KeyPhraseLROTask task = new KeyPhraseLROTask(); task.setParameters(getKeyPhraseTaskParameters(action)).setTaskName(action.getActionName()); return task; } private List<KeyPhrasesTask> toKeyPhrasesTasks(TextAnalyticsActions actions) { final List<KeyPhrasesTask> keyPhrasesTasks = new ArrayList<>(); for (ExtractKeyPhrasesAction action : actions.getExtractKeyPhrasesActions()) { keyPhrasesTasks.add( action == null ? null : new KeyPhrasesTask() .setTaskName(action.getActionName()) .setParameters(getKeyPhraseTaskParameters(action))); } return keyPhrasesTasks; } private KeyPhraseTaskParameters getKeyPhraseTaskParameters(ExtractKeyPhrasesAction action) { return (KeyPhraseTaskParameters) new KeyPhraseTaskParameters() .setModelVersion(action.getModelVersion()) .setLoggingOptOut(action.isServiceLogsDisabled()); } private EntityLinkingLROTask toEntityLinkingLROTask(RecognizeLinkedEntitiesAction action) { if (action == null) { return null; } final EntityLinkingLROTask task = new EntityLinkingLROTask(); task.setParameters(getEntityLinkingTaskParameters(action)).setTaskName(action.getActionName()); return task; } private List<EntityLinkingTask> toEntityLinkingTasks(TextAnalyticsActions actions) { final List<EntityLinkingTask> tasks = new ArrayList<>(); for (RecognizeLinkedEntitiesAction action : actions.getRecognizeLinkedEntitiesActions()) { tasks.add( action == null ? null : new EntityLinkingTask() .setTaskName(action.getActionName()) .setParameters(getEntityLinkingTaskParameters(action))); } return tasks; } private EntityLinkingTaskParameters getEntityLinkingTaskParameters(RecognizeLinkedEntitiesAction action) { return (EntityLinkingTaskParameters) new EntityLinkingTaskParameters() .setStringIndexType(StringIndexType.UTF16CODE_UNIT) .setModelVersion(action.getModelVersion()) .setLoggingOptOut(action.isServiceLogsDisabled()); } private SentimentAnalysisLROTask toSentimentAnalysisLROTask(AnalyzeSentimentAction action) { if (action == null) { return null; } final SentimentAnalysisLROTask task = new SentimentAnalysisLROTask(); task.setParameters(getSentimentAnalysisTaskParameters(action)).setTaskName(action.getActionName()); return task; } private List<SentimentAnalysisTask> toSentimentAnalysisTasks(TextAnalyticsActions actions) { final List<SentimentAnalysisTask> tasks = new ArrayList<>(); for (AnalyzeSentimentAction action : actions.getAnalyzeSentimentActions()) { tasks.add( action == null ? null : new SentimentAnalysisTask() .setTaskName(action.getActionName()) .setParameters(getSentimentAnalysisTaskParameters(action))); } return tasks; } private ExtractiveSummarizationLROTask toExtractiveSummarizationLROTask(ExtractSummaryAction action) { if (action == null) { return null; } final ExtractiveSummarizationLROTask task = new ExtractiveSummarizationLROTask(); task.setParameters(getExtractiveSummarizationTaskParameters(action)).setTaskName(action.getActionName()); return task; } private List<ExtractiveSummarizationTask> toExtractiveSummarizationTask(TextAnalyticsActions actions) { final List<ExtractiveSummarizationTask> extractiveSummarizationTasks = new ArrayList<>(); for (ExtractSummaryAction action : actions.getExtractSummaryActions()) { extractiveSummarizationTasks.add( action == null ? null : new ExtractiveSummarizationTask() .setTaskName(action.getActionName()) .setParameters(getExtractiveSummarizationTaskParameters(action))); } return extractiveSummarizationTasks; } private ExtractiveSummarizationTaskParameters getExtractiveSummarizationTaskParameters( ExtractSummaryAction action) { return (ExtractiveSummarizationTaskParameters) new ExtractiveSummarizationTaskParameters() .setStringIndexType(StringIndexType.UTF16CODE_UNIT) .setSentenceCount(action.getMaxSentenceCount()) .setSortBy(action.getOrderBy() == null ? null : ExtractiveSummarizationSortingCriteria .fromString(action.getOrderBy().toString())) .setModelVersion(action.getModelVersion()) .setLoggingOptOut(action.isServiceLogsDisabled()); } private CustomEntitiesLROTask toCustomEntitiesLROTask(RecognizeCustomEntitiesAction action) { if (action == null) { return null; } final CustomEntitiesLROTask task = new CustomEntitiesLROTask(); task.setParameters(getCustomEntitiesTaskParameters(action)).setTaskName(action.getActionName()); return task; } private List<CustomEntitiesTask> toCustomEntitiesTask(TextAnalyticsActions actions) { final List<CustomEntitiesTask> tasks = new ArrayList<>(); for (RecognizeCustomEntitiesAction action : actions.getRecognizeCustomEntitiesActions()) { tasks.add( action == null ? null : new CustomEntitiesTask() .setTaskName(action.getActionName()) .setParameters(getCustomEntitiesTaskParameters(action))); } return tasks; } private CustomEntitiesTaskParameters getCustomEntitiesTaskParameters(RecognizeCustomEntitiesAction action) { return (CustomEntitiesTaskParameters) new CustomEntitiesTaskParameters() .setStringIndexType(StringIndexType.UTF16CODE_UNIT) .setProjectName(action.getProjectName()) .setDeploymentName(action.getDeploymentName()) .setLoggingOptOut(action.isServiceLogsDisabled()); } private CustomSingleLabelClassificationLROTask toCustomSingleLabelClassificationLROTask( SingleCategoryClassifyAction action) { if (action == null) { return null; } final CustomSingleLabelClassificationLROTask task = new CustomSingleLabelClassificationLROTask(); task.setParameters(getCustomSingleClassificationTaskParameters(action)).setTaskName(action.getActionName()); return task; } private List<CustomSingleClassificationTask> toCustomSingleClassificationTask(TextAnalyticsActions actions) { final List<CustomSingleClassificationTask> tasks = new ArrayList<>(); for (SingleCategoryClassifyAction action : actions.getSingleCategoryClassifyActions()) { tasks.add( action == null ? null : new CustomSingleClassificationTask() .setTaskName(action.getActionName()) .setParameters(getCustomSingleClassificationTaskParameters(action))); } return tasks; } private CustomSingleLabelClassificationTaskParameters getCustomSingleClassificationTaskParameters( SingleCategoryClassifyAction action) { return (CustomSingleLabelClassificationTaskParameters) new CustomSingleLabelClassificationTaskParameters() .setProjectName(action.getProjectName()) .setDeploymentName(action.getDeploymentName()) .setLoggingOptOut(action.isServiceLogsDisabled()); } private CustomMultiLabelClassificationLROTask toCustomMultiLabelClassificationLROTask( MultiCategoryClassifyAction action) { if (action == null) { return null; } final CustomMultiLabelClassificationLROTask task = new CustomMultiLabelClassificationLROTask(); task.setParameters(getCustomMultiLabelClassificationTaskParameters(action)).setTaskName(action.getActionName()); return task; } private List<CustomMultiClassificationTask> toCustomMultiClassificationTask(TextAnalyticsActions actions) { final List<CustomMultiClassificationTask> tasks = new ArrayList<>(); for (MultiCategoryClassifyAction action : actions.getMultiCategoryClassifyActions()) { tasks.add( action == null ? null : new CustomMultiClassificationTask() .setTaskName(action.getActionName()) .setParameters(getCustomMultiLabelClassificationTaskParameters(action))); } return tasks; } private CustomMultiLabelClassificationTaskParameters getCustomMultiLabelClassificationTaskParameters( MultiCategoryClassifyAction action) { return (CustomMultiLabelClassificationTaskParameters) new CustomMultiLabelClassificationTaskParameters() .setProjectName(action.getProjectName()) .setDeploymentName(action.getDeploymentName()) .setLoggingOptOut(action.isServiceLogsDisabled()); } private Function<PollingContext<AnalyzeActionsOperationDetail>, Mono<AnalyzeActionsOperationDetail>> activationOperation(Mono<AnalyzeActionsOperationDetail> operationResult) { return pollingContext -> { try { return operationResult.onErrorMap(Utility::mapToHttpResponseExceptionIfExists); } catch (RuntimeException ex) { return monoError(logger, ex); } }; } private Function<PollingContext<AnalyzeActionsOperationDetail>, Mono<PollResponse<AnalyzeActionsOperationDetail>>> pollingOperation(Function<UUID, Mono<Response<AnalyzeJobState>>> pollingFunction) { return pollingContext -> { try { final PollResponse<AnalyzeActionsOperationDetail> operationResultPollResponse = pollingContext.getLatestResponse(); final UUID operationId = UUID.fromString(operationResultPollResponse.getValue().getOperationId()); return pollingFunction.apply(operationId) .flatMap(modelResponse -> processAnalyzedModelResponse(modelResponse, operationResultPollResponse)) .onErrorMap(Utility::mapToHttpResponseExceptionIfExists); } catch (RuntimeException ex) { return monoError(logger, ex); } }; } private Function<PollingContext<AnalyzeActionsOperationDetail>, Mono<PollResponse<AnalyzeActionsOperationDetail>>> pollingOperationLanguageApi(Function<UUID, Mono<Response<AnalyzeTextJobState>>> pollingFunction) { return pollingContext -> { try { final PollResponse<AnalyzeActionsOperationDetail> operationResultPollResponse = pollingContext.getLatestResponse(); final UUID operationId = UUID.fromString(operationResultPollResponse.getValue().getOperationId()); return pollingFunction.apply(operationId) .flatMap(modelResponse -> processAnalyzedModelResponseLanguageApi( modelResponse, operationResultPollResponse)) .onErrorMap(Utility::mapToHttpResponseExceptionIfExists); } catch (RuntimeException ex) { return monoError(logger, ex); } }; } private Function<PollingContext<AnalyzeActionsOperationDetail>, Mono<AnalyzeActionsResultPagedFlux>> fetchingOperation(Function<UUID, Mono<AnalyzeActionsResultPagedFlux>> fetchingFunction) { return pollingContext -> { try { final UUID operationId = UUID.fromString(pollingContext.getLatestResponse().getValue().getOperationId()); return fetchingFunction.apply(operationId); } catch (RuntimeException ex) { return monoError(logger, ex); } }; } private Function<PollingContext<AnalyzeActionsOperationDetail>, Mono<AnalyzeActionsResultPagedIterable>> fetchingOperationIterable(Function<UUID, Mono<AnalyzeActionsResultPagedIterable>> fetchingFunction) { return pollingContext -> { try { final UUID operationId = UUID.fromString(pollingContext.getLatestResponse().getValue().getOperationId()); return fetchingFunction.apply(operationId); } catch (RuntimeException ex) { return monoError(logger, ex); } }; } AnalyzeActionsResultPagedFlux getAnalyzeOperationFluxPage(UUID operationId, Integer top, Integer skip, boolean showStats, Context context) { return new AnalyzeActionsResultPagedFlux( () -> (continuationToken, pageSize) -> getPage(continuationToken, operationId, top, skip, showStats, context).flux()); } Mono<PagedResponse<AnalyzeActionsResult>> getPage(String continuationToken, UUID operationId, Integer top, Integer skip, boolean showStats, Context context) { if (continuationToken != null) { final Map<String, Object> continuationTokenMap = parseNextLink(continuationToken); final Integer topValue = (Integer) continuationTokenMap.getOrDefault("$top", null); final Integer skipValue = (Integer) continuationTokenMap.getOrDefault("$skip", null); final Boolean showStatsValue = (Boolean) continuationTokenMap.getOrDefault(showStats, false); if (service != null) { return service.jobStatusWithResponseAsync(operationId, showStatsValue, topValue, skipValue, context) .map(this::toAnalyzeActionsResultPagedResponseLanguageApi) .onErrorMap(Utility::mapToHttpResponseExceptionIfExists); } return legacyService.analyzeStatusWithResponseAsync(operationId.toString(), showStatsValue, topValue, skipValue, context) .map(this::toAnalyzeActionsResultPagedResponse) .onErrorMap(Utility::mapToHttpResponseExceptionIfExists); } else { if (service != null) { return service.jobStatusWithResponseAsync(operationId, showStats, top, skip, context) .map(this::toAnalyzeActionsResultPagedResponseLanguageApi) .onErrorMap(Utility::mapToHttpResponseExceptionIfExists); } return legacyService.analyzeStatusWithResponseAsync(operationId.toString(), showStats, top, skip, context) .map(this::toAnalyzeActionsResultPagedResponse) .onErrorMap(Utility::mapToHttpResponseExceptionIfExists); } } private PagedResponse<AnalyzeActionsResult> toAnalyzeActionsResultPagedResponse(Response<AnalyzeJobState> response) { final AnalyzeJobState analyzeJobState = response.getValue(); return new PagedResponseBase<Void, AnalyzeActionsResult>( response.getRequest(), response.getStatusCode(), response.getHeaders(), Arrays.asList(toAnalyzeActionsResult(analyzeJobState)), analyzeJobState.getNextLink(), null); } private PagedResponse<AnalyzeActionsResult> toAnalyzeActionsResultPagedResponseLanguageApi(Response<AnalyzeTextJobState> response) { final AnalyzeTextJobState analyzeJobState = response.getValue(); return new PagedResponseBase<Void, AnalyzeActionsResult>( response.getRequest(), response.getStatusCode(), response.getHeaders(), Arrays.asList(toAnalyzeActionsResultLanguageApi(analyzeJobState)), analyzeJobState.getNextLink(), null); } private AnalyzeActionsResult toAnalyzeActionsResult(AnalyzeJobState analyzeJobState) { TasksStateTasksOld tasksStateTasks = analyzeJobState.getTasks(); final List<TasksStateTasksEntityRecognitionPiiTasksItem> piiTasksItems = tasksStateTasks.getEntityRecognitionPiiTasks(); final List<TasksStateTasksEntityRecognitionTasksItem> entityRecognitionTasksItems = tasksStateTasks.getEntityRecognitionTasks(); final List<TasksStateTasksKeyPhraseExtractionTasksItem> keyPhraseExtractionTasks = tasksStateTasks.getKeyPhraseExtractionTasks(); final List<TasksStateTasksEntityLinkingTasksItem> linkedEntityRecognitionTasksItems = tasksStateTasks.getEntityLinkingTasks(); final List<TasksStateTasksSentimentAnalysisTasksItem> sentimentAnalysisTasksItems = tasksStateTasks.getSentimentAnalysisTasks(); List<RecognizeEntitiesActionResult> recognizeEntitiesActionResults = new ArrayList<>(); List<RecognizePiiEntitiesActionResult> recognizePiiEntitiesActionResults = new ArrayList<>(); List<ExtractKeyPhrasesActionResult> extractKeyPhrasesActionResults = new ArrayList<>(); List<RecognizeLinkedEntitiesActionResult> recognizeLinkedEntitiesActionResults = new ArrayList<>(); List<AnalyzeSentimentActionResult> analyzeSentimentActionResults = new ArrayList<>(); List<ExtractSummaryActionResult> extractSummaryActionResults = new ArrayList<>(); List<RecognizeCustomEntitiesActionResult> recognizeCustomEntitiesActionResults = new ArrayList<>(); List<SingleCategoryClassifyActionResult> singleCategoryClassifyActionResults = new ArrayList<>(); List<MultiCategoryClassifyActionResult> multiCategoryClassifyActionResults = new ArrayList<>(); if (!CoreUtils.isNullOrEmpty(entityRecognitionTasksItems)) { for (int i = 0; i < entityRecognitionTasksItems.size(); i++) { final TasksStateTasksEntityRecognitionTasksItem taskItem = entityRecognitionTasksItems.get(i); final RecognizeEntitiesActionResult actionResult = new RecognizeEntitiesActionResult(); final EntitiesResult results = taskItem.getResults(); if (results != null) { RecognizeEntitiesActionResultPropertiesHelper.setDocumentsResults(actionResult, toRecognizeEntitiesResultCollectionResponse(results)); } TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult, taskItem.getLastUpdateDateTime()); recognizeEntitiesActionResults.add(actionResult); } } if (!CoreUtils.isNullOrEmpty(piiTasksItems)) { for (int i = 0; i < piiTasksItems.size(); i++) { final TasksStateTasksEntityRecognitionPiiTasksItem taskItem = piiTasksItems.get(i); final RecognizePiiEntitiesActionResult actionResult = new RecognizePiiEntitiesActionResult(); final PiiResult results = taskItem.getResults(); if (results != null) { RecognizePiiEntitiesActionResultPropertiesHelper.setDocumentsResults(actionResult, toRecognizePiiEntitiesResultCollection(results)); } TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult, taskItem.getLastUpdateDateTime()); recognizePiiEntitiesActionResults.add(actionResult); } } if (!CoreUtils.isNullOrEmpty(keyPhraseExtractionTasks)) { for (int i = 0; i < keyPhraseExtractionTasks.size(); i++) { final TasksStateTasksKeyPhraseExtractionTasksItem taskItem = keyPhraseExtractionTasks.get(i); final ExtractKeyPhrasesActionResult actionResult = new ExtractKeyPhrasesActionResult(); final KeyPhraseResult results = taskItem.getResults(); if (results != null) { ExtractKeyPhrasesActionResultPropertiesHelper.setDocumentsResults(actionResult, toExtractKeyPhrasesResultCollection(results)); } TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult, taskItem.getLastUpdateDateTime()); extractKeyPhrasesActionResults.add(actionResult); } } if (!CoreUtils.isNullOrEmpty(linkedEntityRecognitionTasksItems)) { for (int i = 0; i < linkedEntityRecognitionTasksItems.size(); i++) { final TasksStateTasksEntityLinkingTasksItem taskItem = linkedEntityRecognitionTasksItems.get(i); final RecognizeLinkedEntitiesActionResult actionResult = new RecognizeLinkedEntitiesActionResult(); final EntityLinkingResult results = taskItem.getResults(); if (results != null) { RecognizeLinkedEntitiesActionResultPropertiesHelper.setDocumentsResults(actionResult, toRecognizeLinkedEntitiesResultCollection(results)); } TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult, taskItem.getLastUpdateDateTime()); recognizeLinkedEntitiesActionResults.add(actionResult); } } if (!CoreUtils.isNullOrEmpty(sentimentAnalysisTasksItems)) { for (int i = 0; i < sentimentAnalysisTasksItems.size(); i++) { final TasksStateTasksSentimentAnalysisTasksItem taskItem = sentimentAnalysisTasksItems.get(i); final AnalyzeSentimentActionResult actionResult = new AnalyzeSentimentActionResult(); final SentimentResponse results = taskItem.getResults(); if (results != null) { AnalyzeSentimentActionResultPropertiesHelper.setDocumentsResults(actionResult, toAnalyzeSentimentResultCollection(results)); } TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult, taskItem.getLastUpdateDateTime()); analyzeSentimentActionResults.add(actionResult); } } final List<TextAnalyticsError> errors = analyzeJobState.getErrors(); if (!CoreUtils.isNullOrEmpty(errors)) { for (TextAnalyticsError error : errors) { final String[] targetPair = parseActionErrorTarget(error.getTarget()); final String taskName = targetPair[0]; final Integer taskIndex = Integer.valueOf(targetPair[1]); final TextAnalyticsActionResult actionResult; if (ENTITY_RECOGNITION_TASKS.equals(taskName)) { actionResult = recognizeEntitiesActionResults.get(taskIndex); } else if (ENTITY_RECOGNITION_PII_TASKS.equals(taskName)) { actionResult = recognizePiiEntitiesActionResults.get(taskIndex); } else if (KEY_PHRASE_EXTRACTION_TASKS.equals(taskName)) { actionResult = extractKeyPhrasesActionResults.get(taskIndex); } else if (ENTITY_LINKING_TASKS.equals(taskName)) { actionResult = recognizeLinkedEntitiesActionResults.get(taskIndex); } else if (SENTIMENT_ANALYSIS_TASKS.equals(taskName)) { actionResult = analyzeSentimentActionResults.get(taskIndex); } else if (EXTRACTIVE_SUMMARIZATION_TASKS.equals(taskName)) { actionResult = extractSummaryActionResults.get(taskIndex); } else if (CUSTOM_ENTITY_RECOGNITION_TASKS.equals(taskName)) { actionResult = recognizeCustomEntitiesActionResults.get(taskIndex); } else if (CUSTOM_SINGLE_CLASSIFICATION_TASKS.equals(taskName)) { actionResult = singleCategoryClassifyActionResults.get(taskIndex); } else if (CUSTOM_MULTI_CLASSIFICATION_TASKS.equals(taskName)) { actionResult = multiCategoryClassifyActionResults.get(taskIndex); } else { throw logger.logExceptionAsError(new RuntimeException( "Invalid task name in target reference, " + taskName)); } TextAnalyticsActionResultPropertiesHelper.setIsError(actionResult, true); TextAnalyticsActionResultPropertiesHelper.setError(actionResult, new com.azure.ai.textanalytics.models.TextAnalyticsError( TextAnalyticsErrorCode.fromString( error.getErrorCode() == null ? null : error.getErrorCode().toString()), error.getMessage(), null)); } } final AnalyzeActionsResult analyzeActionsResult = new AnalyzeActionsResult(); AnalyzeActionsResultPropertiesHelper.setRecognizeEntitiesResults(analyzeActionsResult, IterableStream.of(recognizeEntitiesActionResults)); AnalyzeActionsResultPropertiesHelper.setRecognizePiiEntitiesResults(analyzeActionsResult, IterableStream.of(recognizePiiEntitiesActionResults)); AnalyzeActionsResultPropertiesHelper.setExtractKeyPhrasesResults(analyzeActionsResult, IterableStream.of(extractKeyPhrasesActionResults)); AnalyzeActionsResultPropertiesHelper.setRecognizeLinkedEntitiesResults(analyzeActionsResult, IterableStream.of(recognizeLinkedEntitiesActionResults)); AnalyzeActionsResultPropertiesHelper.setAnalyzeSentimentResults(analyzeActionsResult, IterableStream.of(analyzeSentimentActionResults)); AnalyzeActionsResultPropertiesHelper.setExtractSummaryResults(analyzeActionsResult, IterableStream.of(extractSummaryActionResults)); AnalyzeActionsResultPropertiesHelper.setRecognizeCustomEntitiesResults(analyzeActionsResult, IterableStream.of(recognizeCustomEntitiesActionResults)); AnalyzeActionsResultPropertiesHelper.setClassifySingleCategoryResults(analyzeActionsResult, IterableStream.of(singleCategoryClassifyActionResults)); AnalyzeActionsResultPropertiesHelper.setClassifyMultiCategoryResults(analyzeActionsResult, IterableStream.of(multiCategoryClassifyActionResults)); return analyzeActionsResult; } private AnalyzeActionsResult toAnalyzeActionsResultLanguageApi(AnalyzeTextJobState analyzeJobState) { final TasksStateTasks tasksStateTasks = analyzeJobState.getTasks(); final List<AnalyzeTextLROResult> tasksResults = tasksStateTasks.getItems(); final List<RecognizeEntitiesActionResult> recognizeEntitiesActionResults = new ArrayList<>(); final List<RecognizePiiEntitiesActionResult> recognizePiiEntitiesActionResults = new ArrayList<>(); final List<ExtractKeyPhrasesActionResult> extractKeyPhrasesActionResults = new ArrayList<>(); final List<RecognizeLinkedEntitiesActionResult> recognizeLinkedEntitiesActionResults = new ArrayList<>(); final List<AnalyzeHealthcareEntitiesActionResult> analyzeHealthcareEntitiesActionResults = new ArrayList<>(); final List<AnalyzeSentimentActionResult> analyzeSentimentActionResults = new ArrayList<>(); final List<ExtractSummaryActionResult> extractSummaryActionResults = new ArrayList<>(); final List<RecognizeCustomEntitiesActionResult> recognizeCustomEntitiesActionResults = new ArrayList<>(); final List<SingleCategoryClassifyActionResult> singleCategoryClassifyActionResults = new ArrayList<>(); final List<MultiCategoryClassifyActionResult> multiCategoryClassifyActionResults = new ArrayList<>(); if (!CoreUtils.isNullOrEmpty(tasksResults)) { for (int i = 0; i < tasksResults.size(); i++) { final AnalyzeTextLROResult taskResult = tasksResults.get(i); if (taskResult instanceof EntityRecognitionLROResult) { final EntityRecognitionLROResult entityTaskResult = (EntityRecognitionLROResult) taskResult; final RecognizeEntitiesActionResult actionResult = new RecognizeEntitiesActionResult(); final EntitiesResult results = entityTaskResult.getResults(); if (results != null) { RecognizeEntitiesActionResultPropertiesHelper.setDocumentsResults(actionResult, toRecognizeEntitiesResultCollectionResponse(results)); } TextAnalyticsActionResultPropertiesHelper.setActionName(actionResult, entityTaskResult.getTaskName()); TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult, entityTaskResult.getLastUpdateDateTime()); recognizeEntitiesActionResults.add(actionResult); } else if (taskResult instanceof CustomEntityRecognitionLROResult) { final CustomEntityRecognitionLROResult customEntityTaskResult = (CustomEntityRecognitionLROResult) taskResult; final RecognizeCustomEntitiesActionResult actionResult = new RecognizeCustomEntitiesActionResult(); final CustomEntitiesResult results = customEntityTaskResult.getResults(); if (results != null) { RecognizeCustomEntitiesActionResultPropertiesHelper.setDocumentsResults(actionResult, toRecognizeCustomEntitiesResultCollection(results)); } TextAnalyticsActionResultPropertiesHelper.setActionName(actionResult, customEntityTaskResult.getTaskName()); TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult, customEntityTaskResult.getLastUpdateDateTime()); recognizeCustomEntitiesActionResults.add(actionResult); } else if (taskResult instanceof CustomSingleLabelClassificationLROResult) { final CustomSingleLabelClassificationLROResult customSingleLabelClassificationResult = (CustomSingleLabelClassificationLROResult) taskResult; final SingleCategoryClassifyActionResult actionResult = new SingleCategoryClassifyActionResult(); final CustomSingleLabelClassificationResult results = customSingleLabelClassificationResult.getResults(); if (results != null) { SingleCategoryClassifyActionResultPropertiesHelper.setDocumentsResults(actionResult, toSingleCategoryClassifyResultCollection(results)); } TextAnalyticsActionResultPropertiesHelper.setActionName(actionResult, customSingleLabelClassificationResult.getTaskName()); TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult, customSingleLabelClassificationResult.getLastUpdateDateTime()); singleCategoryClassifyActionResults.add(actionResult); } else if (taskResult instanceof CustomMultiLabelClassificationLROResult) { final CustomMultiLabelClassificationLROResult customMultiLabelClassificationLROResult = (CustomMultiLabelClassificationLROResult) taskResult; final MultiCategoryClassifyActionResult actionResult = new MultiCategoryClassifyActionResult(); final CustomMultiLabelClassificationResult results = customMultiLabelClassificationLROResult.getResults(); if (results != null) { MultiCategoryClassifyActionResultPropertiesHelper.setDocumentsResults(actionResult, toMultiCategoryClassifyResultCollection(results)); } TextAnalyticsActionResultPropertiesHelper.setActionName(actionResult, customMultiLabelClassificationLROResult.getTaskName()); TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult, customMultiLabelClassificationLROResult.getLastUpdateDateTime()); multiCategoryClassifyActionResults.add(actionResult); } else if (taskResult instanceof EntityLinkingLROResult) { final EntityLinkingLROResult entityLinkingLROResult = (EntityLinkingLROResult) taskResult; final RecognizeLinkedEntitiesActionResult actionResult = new RecognizeLinkedEntitiesActionResult(); final EntityLinkingResult results = entityLinkingLROResult.getResults(); if (results != null) { RecognizeLinkedEntitiesActionResultPropertiesHelper.setDocumentsResults(actionResult, toRecognizeLinkedEntitiesResultCollection(results)); } TextAnalyticsActionResultPropertiesHelper.setActionName(actionResult, entityLinkingLROResult.getTaskName()); TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult, entityLinkingLROResult.getLastUpdateDateTime()); recognizeLinkedEntitiesActionResults.add(actionResult); } else if (taskResult instanceof PiiEntityRecognitionLROResult) { final PiiEntityRecognitionLROResult piiEntityRecognitionLROResult = (PiiEntityRecognitionLROResult) taskResult; final RecognizePiiEntitiesActionResult actionResult = new RecognizePiiEntitiesActionResult(); final PiiResult results = piiEntityRecognitionLROResult.getResults(); if (results != null) { RecognizePiiEntitiesActionResultPropertiesHelper.setDocumentsResults(actionResult, toRecognizePiiEntitiesResultCollection(results)); } TextAnalyticsActionResultPropertiesHelper.setActionName(actionResult, piiEntityRecognitionLROResult.getTaskName()); TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult, piiEntityRecognitionLROResult.getLastUpdateDateTime()); recognizePiiEntitiesActionResults.add(actionResult); } else if (taskResult instanceof ExtractiveSummarizationLROResult) { final ExtractiveSummarizationLROResult extractiveSummarizationLROResult = (ExtractiveSummarizationLROResult) taskResult; final ExtractSummaryActionResult actionResult = new ExtractSummaryActionResult(); final ExtractiveSummarizationResult results = extractiveSummarizationLROResult.getResults(); if (results != null) { ExtractSummaryActionResultPropertiesHelper.setDocumentsResults(actionResult, toExtractSummaryResultCollection(results)); } TextAnalyticsActionResultPropertiesHelper.setActionName(actionResult, extractiveSummarizationLROResult.getTaskName()); TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult, extractiveSummarizationLROResult.getLastUpdateDateTime()); extractSummaryActionResults.add(actionResult); } else if (taskResult instanceof HealthcareLROResult) { final HealthcareLROResult healthcareLROResult = (HealthcareLROResult) taskResult; final AnalyzeHealthcareEntitiesActionResult actionResult = new AnalyzeHealthcareEntitiesActionResult(); final HealthcareResult results = healthcareLROResult.getResults(); if (results != null) { AnalyzeHealthcareEntitiesActionResultPropertiesHelper.setDocumentsResults(actionResult, toAnalyzeHealthcareEntitiesResultCollection(results)); } TextAnalyticsActionResultPropertiesHelper.setActionName(actionResult, healthcareLROResult.getTaskName()); TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult, healthcareLROResult.getLastUpdateDateTime()); analyzeHealthcareEntitiesActionResults.add(actionResult); } else if (taskResult instanceof SentimentLROResult) { final SentimentLROResult sentimentLROResult = (SentimentLROResult) taskResult; final AnalyzeSentimentActionResult actionResult = new AnalyzeSentimentActionResult(); final SentimentResponse results = sentimentLROResult.getResults(); if (results != null) { AnalyzeSentimentActionResultPropertiesHelper.setDocumentsResults(actionResult, toAnalyzeSentimentResultCollection(results)); } TextAnalyticsActionResultPropertiesHelper.setActionName(actionResult, sentimentLROResult.getTaskName()); TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult, sentimentLROResult.getLastUpdateDateTime()); analyzeSentimentActionResults.add(actionResult); } else if (taskResult instanceof KeyPhraseExtractionLROResult) { final KeyPhraseExtractionLROResult keyPhraseExtractionLROResult = (KeyPhraseExtractionLROResult) taskResult; final ExtractKeyPhrasesActionResult actionResult = new ExtractKeyPhrasesActionResult(); final KeyPhraseResult results = keyPhraseExtractionLROResult.getResults(); if (results != null) { ExtractKeyPhrasesActionResultPropertiesHelper.setDocumentsResults(actionResult, toExtractKeyPhrasesResultCollection(results)); } TextAnalyticsActionResultPropertiesHelper.setActionName(actionResult, keyPhraseExtractionLROResult.getTaskName()); TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult, keyPhraseExtractionLROResult.getLastUpdateDateTime()); extractKeyPhrasesActionResults.add(actionResult); } else { throw logger.logExceptionAsError(new RuntimeException( "Invalid Long running operation task result: " + taskResult.getClass())); } } } final List<Error> errors = analyzeJobState.getErrors(); if (!CoreUtils.isNullOrEmpty(errors)) { for (Error error : errors) { final String[] targetPair = parseActionErrorTarget(error.getTarget()); final String taskName = targetPair[0]; final Integer taskIndex = Integer.valueOf(targetPair[1]); final TextAnalyticsActionResult actionResult; if (ENTITY_RECOGNITION_TASKS.equals(taskName)) { actionResult = recognizeEntitiesActionResults.get(taskIndex); } else if (ENTITY_RECOGNITION_PII_TASKS.equals(taskName)) { actionResult = recognizePiiEntitiesActionResults.get(taskIndex); } else if (KEY_PHRASE_EXTRACTION_TASKS.equals(taskName)) { actionResult = extractKeyPhrasesActionResults.get(taskIndex); } else if (ENTITY_LINKING_TASKS.equals(taskName)) { actionResult = recognizeLinkedEntitiesActionResults.get(taskIndex); } else if (SENTIMENT_ANALYSIS_TASKS.equals(taskName)) { actionResult = analyzeSentimentActionResults.get(taskIndex); } else if (EXTRACTIVE_SUMMARIZATION_TASKS.equals(taskName)) { actionResult = extractSummaryActionResults.get(taskIndex); } else if (CUSTOM_ENTITY_RECOGNITION_TASKS.equals(taskName)) { actionResult = recognizeCustomEntitiesActionResults.get(taskIndex); } else if (CUSTOM_SINGLE_CLASSIFICATION_TASKS.equals(taskName)) { actionResult = singleCategoryClassifyActionResults.get(taskIndex); } else if (CUSTOM_MULTI_CLASSIFICATION_TASKS.equals(taskName)) { actionResult = multiCategoryClassifyActionResults.get(taskIndex); } else { throw logger.logExceptionAsError(new RuntimeException( "Invalid task name in target reference, " + taskName)); } TextAnalyticsActionResultPropertiesHelper.setIsError(actionResult, true); TextAnalyticsActionResultPropertiesHelper.setError(actionResult, new com.azure.ai.textanalytics.models.TextAnalyticsError( TextAnalyticsErrorCode.fromString( error.getCode() == null ? null : error.getCode().toString()), error.getMessage(), null)); } } final AnalyzeActionsResult analyzeActionsResult = new AnalyzeActionsResult(); AnalyzeActionsResultPropertiesHelper.setRecognizeEntitiesResults(analyzeActionsResult, IterableStream.of(recognizeEntitiesActionResults)); AnalyzeActionsResultPropertiesHelper.setRecognizePiiEntitiesResults(analyzeActionsResult, IterableStream.of(recognizePiiEntitiesActionResults)); AnalyzeActionsResultPropertiesHelper.setAnalyzeHealthcareEntitiesResults(analyzeActionsResult, IterableStream.of(analyzeHealthcareEntitiesActionResults)); AnalyzeActionsResultPropertiesHelper.setExtractKeyPhrasesResults(analyzeActionsResult, IterableStream.of(extractKeyPhrasesActionResults)); AnalyzeActionsResultPropertiesHelper.setRecognizeLinkedEntitiesResults(analyzeActionsResult, IterableStream.of(recognizeLinkedEntitiesActionResults)); AnalyzeActionsResultPropertiesHelper.setAnalyzeSentimentResults(analyzeActionsResult, IterableStream.of(analyzeSentimentActionResults)); AnalyzeActionsResultPropertiesHelper.setExtractSummaryResults(analyzeActionsResult, IterableStream.of(extractSummaryActionResults)); AnalyzeActionsResultPropertiesHelper.setRecognizeCustomEntitiesResults(analyzeActionsResult, IterableStream.of(recognizeCustomEntitiesActionResults)); AnalyzeActionsResultPropertiesHelper.setClassifySingleCategoryResults(analyzeActionsResult, IterableStream.of(singleCategoryClassifyActionResults)); AnalyzeActionsResultPropertiesHelper.setClassifyMultiCategoryResults(analyzeActionsResult, IterableStream.of(multiCategoryClassifyActionResults)); return analyzeActionsResult; } private Mono<PollResponse<AnalyzeActionsOperationDetail>> processAnalyzedModelResponse( Response<AnalyzeJobState> analyzeJobStateResponse, PollResponse<AnalyzeActionsOperationDetail> operationResultPollResponse) { LongRunningOperationStatus status = LongRunningOperationStatus.SUCCESSFULLY_COMPLETED; if (analyzeJobStateResponse.getValue() != null && analyzeJobStateResponse.getValue().getStatus() != null) { switch (analyzeJobStateResponse.getValue().getStatus()) { case NOT_STARTED: case RUNNING: status = LongRunningOperationStatus.IN_PROGRESS; break; case SUCCEEDED: status = LongRunningOperationStatus.SUCCESSFULLY_COMPLETED; break; case CANCELLED: status = LongRunningOperationStatus.USER_CANCELLED; break; default: status = LongRunningOperationStatus.fromString( analyzeJobStateResponse.getValue().getStatus().toString(), true); break; } } AnalyzeActionsOperationDetailPropertiesHelper.setDisplayName(operationResultPollResponse.getValue(), analyzeJobStateResponse.getValue().getDisplayName()); AnalyzeActionsOperationDetailPropertiesHelper.setCreatedAt(operationResultPollResponse.getValue(), analyzeJobStateResponse.getValue().getCreatedDateTime()); AnalyzeActionsOperationDetailPropertiesHelper.setExpiresAt(operationResultPollResponse.getValue(), analyzeJobStateResponse.getValue().getExpirationDateTime()); AnalyzeActionsOperationDetailPropertiesHelper.setLastModifiedAt(operationResultPollResponse.getValue(), analyzeJobStateResponse.getValue().getLastUpdateDateTime()); final TasksStateTasksOld tasksResult = analyzeJobStateResponse.getValue().getTasks(); AnalyzeActionsOperationDetailPropertiesHelper.setActionsFailed(operationResultPollResponse.getValue(), tasksResult.getFailed()); AnalyzeActionsOperationDetailPropertiesHelper.setActionsInProgress(operationResultPollResponse.getValue(), tasksResult.getInProgress()); AnalyzeActionsOperationDetailPropertiesHelper.setActionsSucceeded( operationResultPollResponse.getValue(), tasksResult.getCompleted()); AnalyzeActionsOperationDetailPropertiesHelper.setActionsInTotal(operationResultPollResponse.getValue(), tasksResult.getTotal()); return Mono.just(new PollResponse<>(status, operationResultPollResponse.getValue())); } private Mono<PollResponse<AnalyzeActionsOperationDetail>> processAnalyzedModelResponseLanguageApi( Response<AnalyzeTextJobState> analyzeJobStateResponse, PollResponse<AnalyzeActionsOperationDetail> operationResultPollResponse) { LongRunningOperationStatus status = LongRunningOperationStatus.SUCCESSFULLY_COMPLETED; if (analyzeJobStateResponse.getValue() != null && analyzeJobStateResponse.getValue().getStatus() != null) { switch (analyzeJobStateResponse.getValue().getStatus()) { case NOT_STARTED: case RUNNING: status = LongRunningOperationStatus.IN_PROGRESS; break; case SUCCEEDED: status = LongRunningOperationStatus.SUCCESSFULLY_COMPLETED; break; case CANCELLED: status = LongRunningOperationStatus.USER_CANCELLED; break; default: status = LongRunningOperationStatus.fromString( analyzeJobStateResponse.getValue().getStatus().toString(), true); break; } } AnalyzeActionsOperationDetailPropertiesHelper.setDisplayName(operationResultPollResponse.getValue(), analyzeJobStateResponse.getValue().getDisplayName()); AnalyzeActionsOperationDetailPropertiesHelper.setCreatedAt(operationResultPollResponse.getValue(), analyzeJobStateResponse.getValue().getCreatedDateTime()); AnalyzeActionsOperationDetailPropertiesHelper.setExpiresAt(operationResultPollResponse.getValue(), analyzeJobStateResponse.getValue().getExpirationDateTime()); AnalyzeActionsOperationDetailPropertiesHelper.setLastModifiedAt(operationResultPollResponse.getValue(), analyzeJobStateResponse.getValue().getLastUpdateDateTime()); final TasksStateTasks tasksResult = analyzeJobStateResponse.getValue().getTasks(); AnalyzeActionsOperationDetailPropertiesHelper.setActionsFailed(operationResultPollResponse.getValue(), tasksResult.getFailed()); AnalyzeActionsOperationDetailPropertiesHelper.setActionsInProgress(operationResultPollResponse.getValue(), tasksResult.getInProgress()); AnalyzeActionsOperationDetailPropertiesHelper.setActionsSucceeded( operationResultPollResponse.getValue(), tasksResult.getCompleted()); AnalyzeActionsOperationDetailPropertiesHelper.setActionsInTotal(operationResultPollResponse.getValue(), tasksResult.getTotal()); return Mono.just(new PollResponse<>(status, operationResultPollResponse.getValue())); } private Context getNotNullContext(Context context) { return context == null ? Context.NONE : context; } private AnalyzeActionsOptions getNotNullAnalyzeActionsOptions(AnalyzeActionsOptions options) { return options == null ? new AnalyzeActionsOptions() : options; } private String[] parseActionErrorTarget(String targetReference) { if (CoreUtils.isNullOrEmpty(targetReference)) { throw logger.logExceptionAsError(new RuntimeException( "Expected an error with a target field referencing an action but did not get one")); } final Matcher matcher = PATTERN.matcher(targetReference); String[] taskNameIdPair = new String[2]; while (matcher.find()) { taskNameIdPair[0] = matcher.group(1); taskNameIdPair[1] = matcher.group(2); } return taskNameIdPair; } }
class AnalyzeActionsAsyncClient { private static final String ENTITY_RECOGNITION_TASKS = "entityRecognitionTasks"; private static final String ENTITY_RECOGNITION_PII_TASKS = "entityRecognitionPiiTasks"; private static final String KEY_PHRASE_EXTRACTION_TASKS = "keyPhraseExtractionTasks"; private static final String ENTITY_LINKING_TASKS = "entityLinkingTasks"; private static final String SENTIMENT_ANALYSIS_TASKS = "sentimentAnalysisTasks"; private static final String EXTRACTIVE_SUMMARIZATION_TASKS = "extractiveSummarizationTasks"; private static final String CUSTOM_ENTITY_RECOGNITION_TASKS = "customEntityRecognitionTasks"; private static final String CUSTOM_SINGLE_CLASSIFICATION_TASKS = "customClassificationTasks"; private static final String CUSTOM_MULTI_CLASSIFICATION_TASKS = "customMultiClassificationTasks"; private static final String REGEX_ACTION_ERROR_TARGET = String.format(" ENTITY_RECOGNITION_PII_TASKS, ENTITY_RECOGNITION_TASKS, ENTITY_LINKING_TASKS, SENTIMENT_ANALYSIS_TASKS, EXTRACTIVE_SUMMARIZATION_TASKS, CUSTOM_ENTITY_RECOGNITION_TASKS, CUSTOM_SINGLE_CLASSIFICATION_TASKS, CUSTOM_MULTI_CLASSIFICATION_TASKS); private final ClientLogger logger = new ClientLogger(AnalyzeActionsAsyncClient.class); private final TextAnalyticsClientImpl legacyService; private final AnalyzeTextsImpl service; private static final Pattern PATTERN; static { PATTERN = Pattern.compile(REGEX_ACTION_ERROR_TARGET, Pattern.MULTILINE); } AnalyzeActionsAsyncClient(TextAnalyticsClientImpl legacyService) { this.legacyService = legacyService; this.service = null; } AnalyzeActionsAsyncClient(AnalyzeTextsImpl service) { this.legacyService = null; this.service = service; } PollerFlux<AnalyzeActionsOperationDetail, AnalyzeActionsResultPagedFlux> beginAnalyzeActions( Iterable<TextDocumentInput> documents, TextAnalyticsActions actions, AnalyzeActionsOptions options, Context context) { try { Objects.requireNonNull(actions, "'actions' cannot be null."); inputDocumentsValidation(documents); options = getNotNullAnalyzeActionsOptions(options); final Context finalContext = getNotNullContext(context) .addData(AZ_TRACING_NAMESPACE_KEY, COGNITIVE_TRACING_NAMESPACE_VALUE); final boolean finalIncludeStatistics = options.isIncludeStatistics(); if (service != null) { final AnalyzeTextJobsInput analyzeTextJobsInput = new AnalyzeTextJobsInput() .setDisplayName(actions.getDisplayName()) .setAnalysisInput( new MultiLanguageAnalysisInput().setDocuments(toMultiLanguageInput(documents))) .setTasks(getAnalyzeTextLROTasks(actions)); return new PollerFlux<>( DEFAULT_POLL_INTERVAL, activationOperation( service.submitJobWithResponseAsync(analyzeTextJobsInput, finalContext) .map(analyzeResponse -> { final AnalyzeActionsOperationDetail textAnalyticsOperationResult = new AnalyzeActionsOperationDetail(); AnalyzeActionsOperationDetailPropertiesHelper .setOperationId(textAnalyticsOperationResult, parseOperationId( analyzeResponse.getDeserializedHeaders().getOperationLocation())); return textAnalyticsOperationResult; })), pollingOperationLanguageApi(operationId -> service.jobStatusWithResponseAsync(operationId, finalIncludeStatistics, null, null, finalContext)), (pollingContext, pollResponse) -> Mono.just(pollingContext.getLatestResponse().getValue()), fetchingOperation( operationId -> Mono.just(getAnalyzeOperationFluxPage( operationId, null, null, finalIncludeStatistics, finalContext))) ); } final AnalyzeBatchInput analyzeBatchInput = new AnalyzeBatchInput() .setAnalysisInput(new MultiLanguageBatchInput().setDocuments(toMultiLanguageInput(documents))) .setTasks(getJobManifestTasks(actions)); analyzeBatchInput.setDisplayName(actions.getDisplayName()); return new PollerFlux<>( DEFAULT_POLL_INTERVAL, activationOperation( legacyService.analyzeWithResponseAsync(analyzeBatchInput, finalContext) .map(analyzeResponse -> { final AnalyzeActionsOperationDetail textAnalyticsOperationResult = new AnalyzeActionsOperationDetail(); AnalyzeActionsOperationDetailPropertiesHelper .setOperationId(textAnalyticsOperationResult, parseOperationId(analyzeResponse.getDeserializedHeaders().getOperationLocation())); return textAnalyticsOperationResult; })), pollingOperation(operationId -> legacyService.analyzeStatusWithResponseAsync(operationId.toString(), finalIncludeStatistics, null, null, finalContext)), (pollingContext, activationResponse) -> Mono.error(new RuntimeException("Cancellation is not supported.")), fetchingOperation(operationId -> Mono.just(getAnalyzeOperationFluxPage( operationId, null, null, finalIncludeStatistics, finalContext))) ); } catch (RuntimeException ex) { return PollerFlux.error(ex); } } PollerFlux<AnalyzeActionsOperationDetail, AnalyzeActionsResultPagedIterable> beginAnalyzeActionsIterable( Iterable<TextDocumentInput> documents, TextAnalyticsActions actions, AnalyzeActionsOptions options, Context context) { try { Objects.requireNonNull(actions, "'actions' cannot be null."); inputDocumentsValidation(documents); options = getNotNullAnalyzeActionsOptions(options); final Context finalContext = getNotNullContext(context) .addData(AZ_TRACING_NAMESPACE_KEY, COGNITIVE_TRACING_NAMESPACE_VALUE); final AnalyzeBatchInput analyzeBatchInput = new AnalyzeBatchInput() .setAnalysisInput(new MultiLanguageBatchInput().setDocuments(toMultiLanguageInput(documents))) .setTasks(getJobManifestTasks(actions)); analyzeBatchInput.setDisplayName(actions.getDisplayName()); final boolean finalIncludeStatistics = options.isIncludeStatistics(); if (service != null) { return new PollerFlux<>( DEFAULT_POLL_INTERVAL, activationOperation( service.submitJobWithResponseAsync( new AnalyzeTextJobsInput() .setDisplayName(actions.getDisplayName()) .setAnalysisInput(new MultiLanguageAnalysisInput().setDocuments(toMultiLanguageInput(documents))) .setTasks(getAnalyzeTextLROTasks(actions)), finalContext) .map(analyzeResponse -> { final AnalyzeActionsOperationDetail operationDetail = new AnalyzeActionsOperationDetail(); AnalyzeActionsOperationDetailPropertiesHelper.setOperationId(operationDetail, parseOperationId(analyzeResponse.getDeserializedHeaders().getOperationLocation())); return operationDetail; })), pollingOperationLanguageApi(operationId -> service.jobStatusWithResponseAsync(operationId, finalIncludeStatistics, null, null, finalContext)), (activationResponse, pollingContext) -> Mono.error(new RuntimeException("Cancellation is not supported.")), fetchingOperationIterable( operationId -> Mono.just(new AnalyzeActionsResultPagedIterable(getAnalyzeOperationFluxPage( operationId, null, null, finalIncludeStatistics, finalContext)))) ); } return new PollerFlux<>( DEFAULT_POLL_INTERVAL, activationOperation( legacyService.analyzeWithResponseAsync(analyzeBatchInput, finalContext) .map(analyzeResponse -> { final AnalyzeActionsOperationDetail operationDetail = new AnalyzeActionsOperationDetail(); AnalyzeActionsOperationDetailPropertiesHelper.setOperationId(operationDetail, parseOperationId(analyzeResponse.getDeserializedHeaders().getOperationLocation())); return operationDetail; })), pollingOperation(operationId -> legacyService.analyzeStatusWithResponseAsync(operationId.toString(), finalIncludeStatistics, null, null, finalContext)), (activationResponse, pollingContext) -> Mono.error(new RuntimeException("Cancellation is not supported.")), fetchingOperationIterable( operationId -> Mono.just(new AnalyzeActionsResultPagedIterable(getAnalyzeOperationFluxPage( operationId, null, null, finalIncludeStatistics, finalContext)))) ); } catch (RuntimeException ex) { return PollerFlux.error(ex); } } private List<AnalyzeTextLROTask> getAnalyzeTextLROTasks(TextAnalyticsActions actions) { if (actions == null) { return null; } final List<AnalyzeTextLROTask> tasks = new ArrayList<>(); final Iterable<RecognizeEntitiesAction> recognizeEntitiesActions = actions.getRecognizeEntitiesActions(); final Iterable<RecognizePiiEntitiesAction> recognizePiiEntitiesActions = actions.getRecognizePiiEntitiesActions(); final Iterable<ExtractKeyPhrasesAction> extractKeyPhrasesActions = actions.getExtractKeyPhrasesActions(); final Iterable<RecognizeLinkedEntitiesAction> recognizeLinkedEntitiesActions = actions.getRecognizeLinkedEntitiesActions(); final Iterable<AnalyzeHealthcareEntitiesAction> analyzeHealthcareEntitiesActions = actions.getAnalyzeHealthcareEntitiesActions(); final Iterable<AnalyzeSentimentAction> analyzeSentimentActions = actions.getAnalyzeSentimentActions(); final Iterable<ExtractSummaryAction> extractSummaryActions = actions.getExtractSummaryActions(); final Iterable<RecognizeCustomEntitiesAction> recognizeCustomEntitiesActions = actions.getRecognizeCustomEntitiesActions(); final Iterable<SingleCategoryClassifyAction> singleCategoryClassifyActions = actions.getSingleCategoryClassifyActions(); final Iterable<MultiCategoryClassifyAction> multiCategoryClassifyActions = actions.getMultiCategoryClassifyActions(); if (recognizeEntitiesActions != null) { recognizeEntitiesActions.forEach(action -> tasks.add(toEntitiesLROTask(action))); } if (recognizePiiEntitiesActions != null) { recognizePiiEntitiesActions.forEach(action -> tasks.add(toPiiLROTask(action))); } if (analyzeHealthcareEntitiesActions != null) { analyzeHealthcareEntitiesActions.forEach(action -> tasks.add(toHealthcareLROTask(action))); } if (extractKeyPhrasesActions != null) { extractKeyPhrasesActions.forEach(action -> tasks.add(toKeyPhraseLROTask(action))); } if (recognizeLinkedEntitiesActions != null) { recognizeLinkedEntitiesActions.forEach(action -> tasks.add(toEntityLinkingLROTask(action))); } if (analyzeSentimentActions != null) { analyzeSentimentActions.forEach(action -> tasks.add(toSentimentAnalysisLROTask(action))); } if (extractSummaryActions != null) { extractSummaryActions.forEach(action -> tasks.add(toExtractiveSummarizationLROTask(action))); } if (recognizeCustomEntitiesActions != null) { recognizeCustomEntitiesActions.forEach(action -> tasks.add(toCustomEntitiesLROTask(action))); } if (singleCategoryClassifyActions != null) { singleCategoryClassifyActions.forEach(action -> tasks.add( toCustomSingleLabelClassificationLROTask(action))); } if (multiCategoryClassifyActions != null) { multiCategoryClassifyActions.forEach(action -> tasks.add(toCustomMultiLabelClassificationLROTask(action))); } return tasks; } private JobManifestTasks getJobManifestTasks(TextAnalyticsActions actions) { if (actions == null) { return null; } final JobManifestTasks jobManifestTasks = new JobManifestTasks(); if (actions.getRecognizeEntitiesActions() != null) { jobManifestTasks.setEntityRecognitionTasks(toEntitiesTasks(actions)); } if (actions.getRecognizePiiEntitiesActions() != null) { jobManifestTasks.setEntityRecognitionPiiTasks(toPiiTasks(actions)); } if (actions.getExtractKeyPhrasesActions() != null) { jobManifestTasks.setKeyPhraseExtractionTasks(toKeyPhrasesTasks(actions)); } if (actions.getRecognizeLinkedEntitiesActions() != null) { jobManifestTasks.setEntityLinkingTasks(toEntityLinkingTasks(actions)); } if (actions.getAnalyzeSentimentActions() != null) { jobManifestTasks.setSentimentAnalysisTasks(toSentimentAnalysisTasks(actions)); } if (actions.getExtractSummaryActions() != null) { jobManifestTasks.setExtractiveSummarizationTasks(toExtractiveSummarizationTask(actions)); } if (actions.getRecognizeCustomEntitiesActions() != null) { jobManifestTasks.setCustomEntityRecognitionTasks(toCustomEntitiesTask(actions)); } if (actions.getSingleCategoryClassifyActions() != null) { jobManifestTasks.setCustomSingleClassificationTasks(toCustomSingleClassificationTask(actions)); } if (actions.getMultiCategoryClassifyActions() != null) { jobManifestTasks.setCustomMultiClassificationTasks(toCustomMultiClassificationTask(actions)); } return jobManifestTasks; } private EntitiesLROTask toEntitiesLROTask(RecognizeEntitiesAction action) { if (action == null) { return null; } final EntitiesLROTask task = new EntitiesLROTask(); task.setParameters(getEntitiesTaskParameters(action)).setTaskName(action.getActionName()); return task; } private List<EntitiesTask> toEntitiesTasks(TextAnalyticsActions actions) { final List<EntitiesTask> entitiesTasks = new ArrayList<>(); for (RecognizeEntitiesAction action : actions.getRecognizeEntitiesActions()) { entitiesTasks.add( action == null ? null : new EntitiesTask() .setTaskName(action.getActionName()) .setParameters(getEntitiesTaskParameters(action))); } return entitiesTasks; } private EntitiesTaskParameters getEntitiesTaskParameters(RecognizeEntitiesAction action) { return (EntitiesTaskParameters) new EntitiesTaskParameters() .setStringIndexType(StringIndexType.UTF16CODE_UNIT) .setModelVersion(action.getModelVersion()) .setLoggingOptOut(action.isServiceLogsDisabled()); } private PiiLROTask toPiiLROTask(RecognizePiiEntitiesAction action) { if (action == null) { return null; } final PiiLROTask task = new PiiLROTask(); task.setParameters(getPiiTaskParameters(action)).setTaskName(action.getActionName()); return task; } private List<PiiTask> toPiiTasks(TextAnalyticsActions actions) { final List<PiiTask> piiTasks = new ArrayList<>(); for (RecognizePiiEntitiesAction action : actions.getRecognizePiiEntitiesActions()) { piiTasks.add( action == null ? null : new PiiTask() .setTaskName(action.getActionName()) .setParameters(getPiiTaskParameters(action))); } return piiTasks; } private PiiTaskParameters getPiiTaskParameters(RecognizePiiEntitiesAction action) { return (PiiTaskParameters) new PiiTaskParameters() .setStringIndexType(StringIndexType.UTF16CODE_UNIT) .setDomain(PiiDomain.fromString( action.getDomainFilter() == null ? null : action.getDomainFilter().toString())) .setPiiCategories(toCategoriesFilter(action.getCategoriesFilter())) .setModelVersion(action.getModelVersion()) .setLoggingOptOut(action.isServiceLogsDisabled()); } private HealthcareLROTask toHealthcareLROTask(AnalyzeHealthcareEntitiesAction action) { if (action == null) { return null; } final HealthcareLROTask task = new HealthcareLROTask(); task.setParameters(getHealthcareTaskParameters(action)).setTaskName(action.getActionName()); return task; } private HealthcareTaskParameters getHealthcareTaskParameters(AnalyzeHealthcareEntitiesAction action) { return (HealthcareTaskParameters) new HealthcareTaskParameters() .setStringIndexType(StringIndexType.UTF16CODE_UNIT) .setModelVersion(action.getModelVersion()) .setLoggingOptOut(action.isServiceLogsDisabled()); } private KeyPhraseLROTask toKeyPhraseLROTask(ExtractKeyPhrasesAction action) { if (action == null) { return null; } final KeyPhraseLROTask task = new KeyPhraseLROTask(); task.setParameters(getKeyPhraseTaskParameters(action)).setTaskName(action.getActionName()); return task; } private List<KeyPhrasesTask> toKeyPhrasesTasks(TextAnalyticsActions actions) { final List<KeyPhrasesTask> keyPhrasesTasks = new ArrayList<>(); for (ExtractKeyPhrasesAction action : actions.getExtractKeyPhrasesActions()) { keyPhrasesTasks.add( action == null ? null : new KeyPhrasesTask() .setTaskName(action.getActionName()) .setParameters(getKeyPhraseTaskParameters(action))); } return keyPhrasesTasks; } private KeyPhraseTaskParameters getKeyPhraseTaskParameters(ExtractKeyPhrasesAction action) { return (KeyPhraseTaskParameters) new KeyPhraseTaskParameters() .setModelVersion(action.getModelVersion()) .setLoggingOptOut(action.isServiceLogsDisabled()); } private EntityLinkingLROTask toEntityLinkingLROTask(RecognizeLinkedEntitiesAction action) { if (action == null) { return null; } final EntityLinkingLROTask task = new EntityLinkingLROTask(); task.setParameters(getEntityLinkingTaskParameters(action)).setTaskName(action.getActionName()); return task; } private List<EntityLinkingTask> toEntityLinkingTasks(TextAnalyticsActions actions) { final List<EntityLinkingTask> tasks = new ArrayList<>(); for (RecognizeLinkedEntitiesAction action : actions.getRecognizeLinkedEntitiesActions()) { tasks.add( action == null ? null : new EntityLinkingTask() .setTaskName(action.getActionName()) .setParameters(getEntityLinkingTaskParameters(action))); } return tasks; } private EntityLinkingTaskParameters getEntityLinkingTaskParameters(RecognizeLinkedEntitiesAction action) { return (EntityLinkingTaskParameters) new EntityLinkingTaskParameters() .setStringIndexType(StringIndexType.UTF16CODE_UNIT) .setModelVersion(action.getModelVersion()) .setLoggingOptOut(action.isServiceLogsDisabled()); } private SentimentAnalysisLROTask toSentimentAnalysisLROTask(AnalyzeSentimentAction action) { if (action == null) { return null; } final SentimentAnalysisLROTask task = new SentimentAnalysisLROTask(); task.setParameters(getSentimentAnalysisTaskParameters(action)).setTaskName(action.getActionName()); return task; } private List<SentimentAnalysisTask> toSentimentAnalysisTasks(TextAnalyticsActions actions) { final List<SentimentAnalysisTask> tasks = new ArrayList<>(); for (AnalyzeSentimentAction action : actions.getAnalyzeSentimentActions()) { tasks.add( action == null ? null : new SentimentAnalysisTask() .setTaskName(action.getActionName()) .setParameters(getSentimentAnalysisTaskParameters(action))); } return tasks; } private ExtractiveSummarizationLROTask toExtractiveSummarizationLROTask(ExtractSummaryAction action) { if (action == null) { return null; } final ExtractiveSummarizationLROTask task = new ExtractiveSummarizationLROTask(); task.setParameters(getExtractiveSummarizationTaskParameters(action)).setTaskName(action.getActionName()); return task; } private List<ExtractiveSummarizationTask> toExtractiveSummarizationTask(TextAnalyticsActions actions) { final List<ExtractiveSummarizationTask> extractiveSummarizationTasks = new ArrayList<>(); for (ExtractSummaryAction action : actions.getExtractSummaryActions()) { extractiveSummarizationTasks.add( action == null ? null : new ExtractiveSummarizationTask() .setTaskName(action.getActionName()) .setParameters(getExtractiveSummarizationTaskParameters(action))); } return extractiveSummarizationTasks; } private ExtractiveSummarizationTaskParameters getExtractiveSummarizationTaskParameters( ExtractSummaryAction action) { return (ExtractiveSummarizationTaskParameters) new ExtractiveSummarizationTaskParameters() .setStringIndexType(StringIndexType.UTF16CODE_UNIT) .setSentenceCount(action.getMaxSentenceCount()) .setSortBy(action.getOrderBy() == null ? null : ExtractiveSummarizationSortingCriteria .fromString(action.getOrderBy().toString())) .setModelVersion(action.getModelVersion()) .setLoggingOptOut(action.isServiceLogsDisabled()); } private CustomEntitiesLROTask toCustomEntitiesLROTask(RecognizeCustomEntitiesAction action) { if (action == null) { return null; } final CustomEntitiesLROTask task = new CustomEntitiesLROTask(); task.setParameters(getCustomEntitiesTaskParameters(action)).setTaskName(action.getActionName()); return task; } private List<CustomEntitiesTask> toCustomEntitiesTask(TextAnalyticsActions actions) { final List<CustomEntitiesTask> tasks = new ArrayList<>(); for (RecognizeCustomEntitiesAction action : actions.getRecognizeCustomEntitiesActions()) { tasks.add( action == null ? null : new CustomEntitiesTask() .setTaskName(action.getActionName()) .setParameters(getCustomEntitiesTaskParameters(action))); } return tasks; } private CustomEntitiesTaskParameters getCustomEntitiesTaskParameters(RecognizeCustomEntitiesAction action) { return (CustomEntitiesTaskParameters) new CustomEntitiesTaskParameters() .setStringIndexType(StringIndexType.UTF16CODE_UNIT) .setProjectName(action.getProjectName()) .setDeploymentName(action.getDeploymentName()) .setLoggingOptOut(action.isServiceLogsDisabled()); } private CustomSingleLabelClassificationLROTask toCustomSingleLabelClassificationLROTask( SingleCategoryClassifyAction action) { if (action == null) { return null; } final CustomSingleLabelClassificationLROTask task = new CustomSingleLabelClassificationLROTask(); task.setParameters(getCustomSingleClassificationTaskParameters(action)).setTaskName(action.getActionName()); return task; } private List<CustomSingleClassificationTask> toCustomSingleClassificationTask(TextAnalyticsActions actions) { final List<CustomSingleClassificationTask> tasks = new ArrayList<>(); for (SingleCategoryClassifyAction action : actions.getSingleCategoryClassifyActions()) { tasks.add( action == null ? null : new CustomSingleClassificationTask() .setTaskName(action.getActionName()) .setParameters(getCustomSingleClassificationTaskParameters(action))); } return tasks; } private CustomSingleLabelClassificationTaskParameters getCustomSingleClassificationTaskParameters( SingleCategoryClassifyAction action) { return (CustomSingleLabelClassificationTaskParameters) new CustomSingleLabelClassificationTaskParameters() .setProjectName(action.getProjectName()) .setDeploymentName(action.getDeploymentName()) .setLoggingOptOut(action.isServiceLogsDisabled()); } private CustomMultiLabelClassificationLROTask toCustomMultiLabelClassificationLROTask( MultiCategoryClassifyAction action) { if (action == null) { return null; } final CustomMultiLabelClassificationLROTask task = new CustomMultiLabelClassificationLROTask(); task.setParameters(getCustomMultiLabelClassificationTaskParameters(action)).setTaskName(action.getActionName()); return task; } private List<CustomMultiClassificationTask> toCustomMultiClassificationTask(TextAnalyticsActions actions) { final List<CustomMultiClassificationTask> tasks = new ArrayList<>(); for (MultiCategoryClassifyAction action : actions.getMultiCategoryClassifyActions()) { tasks.add( action == null ? null : new CustomMultiClassificationTask() .setTaskName(action.getActionName()) .setParameters(getCustomMultiLabelClassificationTaskParameters(action))); } return tasks; } private CustomMultiLabelClassificationTaskParameters getCustomMultiLabelClassificationTaskParameters( MultiCategoryClassifyAction action) { return (CustomMultiLabelClassificationTaskParameters) new CustomMultiLabelClassificationTaskParameters() .setProjectName(action.getProjectName()) .setDeploymentName(action.getDeploymentName()) .setLoggingOptOut(action.isServiceLogsDisabled()); } private Function<PollingContext<AnalyzeActionsOperationDetail>, Mono<AnalyzeActionsOperationDetail>> activationOperation(Mono<AnalyzeActionsOperationDetail> operationResult) { return pollingContext -> { try { return operationResult.onErrorMap(Utility::mapToHttpResponseExceptionIfExists); } catch (RuntimeException ex) { return monoError(logger, ex); } }; } private Function<PollingContext<AnalyzeActionsOperationDetail>, Mono<PollResponse<AnalyzeActionsOperationDetail>>> pollingOperation(Function<UUID, Mono<Response<AnalyzeJobState>>> pollingFunction) { return pollingContext -> { try { final PollResponse<AnalyzeActionsOperationDetail> operationResultPollResponse = pollingContext.getLatestResponse(); final UUID operationId = UUID.fromString(operationResultPollResponse.getValue().getOperationId()); return pollingFunction.apply(operationId) .flatMap(modelResponse -> processAnalyzedModelResponse(modelResponse, operationResultPollResponse)) .onErrorMap(Utility::mapToHttpResponseExceptionIfExists); } catch (RuntimeException ex) { return monoError(logger, ex); } }; } private Function<PollingContext<AnalyzeActionsOperationDetail>, Mono<PollResponse<AnalyzeActionsOperationDetail>>> pollingOperationLanguageApi(Function<UUID, Mono<Response<AnalyzeTextJobState>>> pollingFunction) { return pollingContext -> { try { final PollResponse<AnalyzeActionsOperationDetail> operationResultPollResponse = pollingContext.getLatestResponse(); final UUID operationId = UUID.fromString(operationResultPollResponse.getValue().getOperationId()); return pollingFunction.apply(operationId) .flatMap(modelResponse -> processAnalyzedModelResponseLanguageApi( modelResponse, operationResultPollResponse)) .onErrorMap(Utility::mapToHttpResponseExceptionIfExists); } catch (RuntimeException ex) { return monoError(logger, ex); } }; } private Function<PollingContext<AnalyzeActionsOperationDetail>, Mono<AnalyzeActionsResultPagedFlux>> fetchingOperation(Function<UUID, Mono<AnalyzeActionsResultPagedFlux>> fetchingFunction) { return pollingContext -> { try { final UUID operationId = UUID.fromString(pollingContext.getLatestResponse().getValue().getOperationId()); return fetchingFunction.apply(operationId); } catch (RuntimeException ex) { return monoError(logger, ex); } }; } private Function<PollingContext<AnalyzeActionsOperationDetail>, Mono<AnalyzeActionsResultPagedIterable>> fetchingOperationIterable(Function<UUID, Mono<AnalyzeActionsResultPagedIterable>> fetchingFunction) { return pollingContext -> { try { final UUID operationId = UUID.fromString(pollingContext.getLatestResponse().getValue().getOperationId()); return fetchingFunction.apply(operationId); } catch (RuntimeException ex) { return monoError(logger, ex); } }; } AnalyzeActionsResultPagedFlux getAnalyzeOperationFluxPage(UUID operationId, Integer top, Integer skip, boolean showStats, Context context) { return new AnalyzeActionsResultPagedFlux( () -> (continuationToken, pageSize) -> getPage(continuationToken, operationId, top, skip, showStats, context).flux()); } Mono<PagedResponse<AnalyzeActionsResult>> getPage(String continuationToken, UUID operationId, Integer top, Integer skip, boolean showStats, Context context) { if (continuationToken != null) { final Map<String, Object> continuationTokenMap = parseNextLink(continuationToken); final Integer topValue = (Integer) continuationTokenMap.getOrDefault("$top", null); final Integer skipValue = (Integer) continuationTokenMap.getOrDefault("$skip", null); final Boolean showStatsValue = (Boolean) continuationTokenMap.getOrDefault(showStats, false); if (service != null) { return service.jobStatusWithResponseAsync(operationId, showStatsValue, topValue, skipValue, context) .map(this::toAnalyzeActionsResultPagedResponseLanguageApi) .onErrorMap(Utility::mapToHttpResponseExceptionIfExists); } return legacyService.analyzeStatusWithResponseAsync(operationId.toString(), showStatsValue, topValue, skipValue, context) .map(this::toAnalyzeActionsResultPagedResponse) .onErrorMap(Utility::mapToHttpResponseExceptionIfExists); } else { if (service != null) { return service.jobStatusWithResponseAsync(operationId, showStats, top, skip, context) .map(this::toAnalyzeActionsResultPagedResponseLanguageApi) .onErrorMap(Utility::mapToHttpResponseExceptionIfExists); } return legacyService.analyzeStatusWithResponseAsync(operationId.toString(), showStats, top, skip, context) .map(this::toAnalyzeActionsResultPagedResponse) .onErrorMap(Utility::mapToHttpResponseExceptionIfExists); } } private PagedResponse<AnalyzeActionsResult> toAnalyzeActionsResultPagedResponse(Response<AnalyzeJobState> response) { final AnalyzeJobState analyzeJobState = response.getValue(); return new PagedResponseBase<Void, AnalyzeActionsResult>( response.getRequest(), response.getStatusCode(), response.getHeaders(), Arrays.asList(toAnalyzeActionsResult(analyzeJobState)), analyzeJobState.getNextLink(), null); } private PagedResponse<AnalyzeActionsResult> toAnalyzeActionsResultPagedResponseLanguageApi(Response<AnalyzeTextJobState> response) { final AnalyzeTextJobState analyzeJobState = response.getValue(); return new PagedResponseBase<Void, AnalyzeActionsResult>( response.getRequest(), response.getStatusCode(), response.getHeaders(), Arrays.asList(toAnalyzeActionsResultLanguageApi(analyzeJobState)), analyzeJobState.getNextLink(), null); } private AnalyzeActionsResult toAnalyzeActionsResult(AnalyzeJobState analyzeJobState) { TasksStateTasksOld tasksStateTasks = analyzeJobState.getTasks(); final List<TasksStateTasksEntityRecognitionPiiTasksItem> piiTasksItems = tasksStateTasks.getEntityRecognitionPiiTasks(); final List<TasksStateTasksEntityRecognitionTasksItem> entityRecognitionTasksItems = tasksStateTasks.getEntityRecognitionTasks(); final List<TasksStateTasksKeyPhraseExtractionTasksItem> keyPhraseExtractionTasks = tasksStateTasks.getKeyPhraseExtractionTasks(); final List<TasksStateTasksEntityLinkingTasksItem> linkedEntityRecognitionTasksItems = tasksStateTasks.getEntityLinkingTasks(); final List<TasksStateTasksSentimentAnalysisTasksItem> sentimentAnalysisTasksItems = tasksStateTasks.getSentimentAnalysisTasks(); List<RecognizeEntitiesActionResult> recognizeEntitiesActionResults = new ArrayList<>(); List<RecognizePiiEntitiesActionResult> recognizePiiEntitiesActionResults = new ArrayList<>(); List<ExtractKeyPhrasesActionResult> extractKeyPhrasesActionResults = new ArrayList<>(); List<RecognizeLinkedEntitiesActionResult> recognizeLinkedEntitiesActionResults = new ArrayList<>(); List<AnalyzeSentimentActionResult> analyzeSentimentActionResults = new ArrayList<>(); List<ExtractSummaryActionResult> extractSummaryActionResults = new ArrayList<>(); List<RecognizeCustomEntitiesActionResult> recognizeCustomEntitiesActionResults = new ArrayList<>(); List<SingleCategoryClassifyActionResult> singleCategoryClassifyActionResults = new ArrayList<>(); List<MultiCategoryClassifyActionResult> multiCategoryClassifyActionResults = new ArrayList<>(); if (!CoreUtils.isNullOrEmpty(entityRecognitionTasksItems)) { for (int i = 0; i < entityRecognitionTasksItems.size(); i++) { final TasksStateTasksEntityRecognitionTasksItem taskItem = entityRecognitionTasksItems.get(i); final RecognizeEntitiesActionResult actionResult = new RecognizeEntitiesActionResult(); final EntitiesResult results = taskItem.getResults(); if (results != null) { RecognizeEntitiesActionResultPropertiesHelper.setDocumentsResults(actionResult, toRecognizeEntitiesResultCollectionResponse(results)); } TextAnalyticsActionResultPropertiesHelper.setActionName(actionResult, taskItem.getTaskName()); TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult, taskItem.getLastUpdateDateTime()); recognizeEntitiesActionResults.add(actionResult); } } if (!CoreUtils.isNullOrEmpty(piiTasksItems)) { for (int i = 0; i < piiTasksItems.size(); i++) { final TasksStateTasksEntityRecognitionPiiTasksItem taskItem = piiTasksItems.get(i); final RecognizePiiEntitiesActionResult actionResult = new RecognizePiiEntitiesActionResult(); final PiiResult results = taskItem.getResults(); if (results != null) { RecognizePiiEntitiesActionResultPropertiesHelper.setDocumentsResults(actionResult, toRecognizePiiEntitiesResultCollection(results)); } TextAnalyticsActionResultPropertiesHelper.setActionName(actionResult, taskItem.getTaskName()); TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult, taskItem.getLastUpdateDateTime()); recognizePiiEntitiesActionResults.add(actionResult); } } if (!CoreUtils.isNullOrEmpty(keyPhraseExtractionTasks)) { for (int i = 0; i < keyPhraseExtractionTasks.size(); i++) { final TasksStateTasksKeyPhraseExtractionTasksItem taskItem = keyPhraseExtractionTasks.get(i); final ExtractKeyPhrasesActionResult actionResult = new ExtractKeyPhrasesActionResult(); final KeyPhraseResult results = taskItem.getResults(); if (results != null) { ExtractKeyPhrasesActionResultPropertiesHelper.setDocumentsResults(actionResult, toExtractKeyPhrasesResultCollection(results)); } TextAnalyticsActionResultPropertiesHelper.setActionName(actionResult, taskItem.getTaskName()); TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult, taskItem.getLastUpdateDateTime()); extractKeyPhrasesActionResults.add(actionResult); } } if (!CoreUtils.isNullOrEmpty(linkedEntityRecognitionTasksItems)) { for (int i = 0; i < linkedEntityRecognitionTasksItems.size(); i++) { final TasksStateTasksEntityLinkingTasksItem taskItem = linkedEntityRecognitionTasksItems.get(i); final RecognizeLinkedEntitiesActionResult actionResult = new RecognizeLinkedEntitiesActionResult(); final EntityLinkingResult results = taskItem.getResults(); if (results != null) { RecognizeLinkedEntitiesActionResultPropertiesHelper.setDocumentsResults(actionResult, toRecognizeLinkedEntitiesResultCollection(results)); } TextAnalyticsActionResultPropertiesHelper.setActionName(actionResult, taskItem.getTaskName()); TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult, taskItem.getLastUpdateDateTime()); recognizeLinkedEntitiesActionResults.add(actionResult); } } if (!CoreUtils.isNullOrEmpty(sentimentAnalysisTasksItems)) { for (int i = 0; i < sentimentAnalysisTasksItems.size(); i++) { final TasksStateTasksSentimentAnalysisTasksItem taskItem = sentimentAnalysisTasksItems.get(i); final AnalyzeSentimentActionResult actionResult = new AnalyzeSentimentActionResult(); final SentimentResponse results = taskItem.getResults(); if (results != null) { AnalyzeSentimentActionResultPropertiesHelper.setDocumentsResults(actionResult, toAnalyzeSentimentResultCollection(results)); } TextAnalyticsActionResultPropertiesHelper.setActionName(actionResult, taskItem.getTaskName()); TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult, taskItem.getLastUpdateDateTime()); analyzeSentimentActionResults.add(actionResult); } } final List<TextAnalyticsError> errors = analyzeJobState.getErrors(); if (!CoreUtils.isNullOrEmpty(errors)) { for (TextAnalyticsError error : errors) { if (error != null) { final String[] targetPair = parseActionErrorTarget(error.getTarget(), error.getMessage()); final String taskName = targetPair[0]; final Integer taskIndex = Integer.valueOf(targetPair[1]); final TextAnalyticsActionResult actionResult; if (ENTITY_RECOGNITION_TASKS.equals(taskName)) { actionResult = recognizeEntitiesActionResults.get(taskIndex); } else if (ENTITY_RECOGNITION_PII_TASKS.equals(taskName)) { actionResult = recognizePiiEntitiesActionResults.get(taskIndex); } else if (KEY_PHRASE_EXTRACTION_TASKS.equals(taskName)) { actionResult = extractKeyPhrasesActionResults.get(taskIndex); } else if (ENTITY_LINKING_TASKS.equals(taskName)) { actionResult = recognizeLinkedEntitiesActionResults.get(taskIndex); } else if (SENTIMENT_ANALYSIS_TASKS.equals(taskName)) { actionResult = analyzeSentimentActionResults.get(taskIndex); } else if (EXTRACTIVE_SUMMARIZATION_TASKS.equals(taskName)) { actionResult = extractSummaryActionResults.get(taskIndex); } else if (CUSTOM_ENTITY_RECOGNITION_TASKS.equals(taskName)) { actionResult = recognizeCustomEntitiesActionResults.get(taskIndex); } else if (CUSTOM_SINGLE_CLASSIFICATION_TASKS.equals(taskName)) { actionResult = singleCategoryClassifyActionResults.get(taskIndex); } else if (CUSTOM_MULTI_CLASSIFICATION_TASKS.equals(taskName)) { actionResult = multiCategoryClassifyActionResults.get(taskIndex); } else { throw logger.logExceptionAsError(new RuntimeException( "Invalid task name in target reference, " + taskName)); } TextAnalyticsActionResultPropertiesHelper.setIsError(actionResult, true); TextAnalyticsActionResultPropertiesHelper.setError(actionResult, new com.azure.ai.textanalytics.models.TextAnalyticsError( TextAnalyticsErrorCode.fromString( error.getErrorCode() == null ? null : error.getErrorCode().toString()), error.getMessage(), null)); } } } final AnalyzeActionsResult analyzeActionsResult = new AnalyzeActionsResult(); AnalyzeActionsResultPropertiesHelper.setRecognizeEntitiesResults(analyzeActionsResult, IterableStream.of(recognizeEntitiesActionResults)); AnalyzeActionsResultPropertiesHelper.setRecognizePiiEntitiesResults(analyzeActionsResult, IterableStream.of(recognizePiiEntitiesActionResults)); AnalyzeActionsResultPropertiesHelper.setExtractKeyPhrasesResults(analyzeActionsResult, IterableStream.of(extractKeyPhrasesActionResults)); AnalyzeActionsResultPropertiesHelper.setRecognizeLinkedEntitiesResults(analyzeActionsResult, IterableStream.of(recognizeLinkedEntitiesActionResults)); AnalyzeActionsResultPropertiesHelper.setAnalyzeSentimentResults(analyzeActionsResult, IterableStream.of(analyzeSentimentActionResults)); AnalyzeActionsResultPropertiesHelper.setExtractSummaryResults(analyzeActionsResult, IterableStream.of(extractSummaryActionResults)); AnalyzeActionsResultPropertiesHelper.setRecognizeCustomEntitiesResults(analyzeActionsResult, IterableStream.of(recognizeCustomEntitiesActionResults)); AnalyzeActionsResultPropertiesHelper.setClassifySingleCategoryResults(analyzeActionsResult, IterableStream.of(singleCategoryClassifyActionResults)); AnalyzeActionsResultPropertiesHelper.setClassifyMultiCategoryResults(analyzeActionsResult, IterableStream.of(multiCategoryClassifyActionResults)); return analyzeActionsResult; } private AnalyzeActionsResult toAnalyzeActionsResultLanguageApi(AnalyzeTextJobState analyzeJobState) { final TasksStateTasks tasksStateTasks = analyzeJobState.getTasks(); final List<AnalyzeTextLROResult> tasksResults = tasksStateTasks.getItems(); final List<RecognizeEntitiesActionResult> recognizeEntitiesActionResults = new ArrayList<>(); final List<RecognizePiiEntitiesActionResult> recognizePiiEntitiesActionResults = new ArrayList<>(); final List<ExtractKeyPhrasesActionResult> extractKeyPhrasesActionResults = new ArrayList<>(); final List<RecognizeLinkedEntitiesActionResult> recognizeLinkedEntitiesActionResults = new ArrayList<>(); final List<AnalyzeHealthcareEntitiesActionResult> analyzeHealthcareEntitiesActionResults = new ArrayList<>(); final List<AnalyzeSentimentActionResult> analyzeSentimentActionResults = new ArrayList<>(); final List<ExtractSummaryActionResult> extractSummaryActionResults = new ArrayList<>(); final List<RecognizeCustomEntitiesActionResult> recognizeCustomEntitiesActionResults = new ArrayList<>(); final List<SingleCategoryClassifyActionResult> singleCategoryClassifyActionResults = new ArrayList<>(); final List<MultiCategoryClassifyActionResult> multiCategoryClassifyActionResults = new ArrayList<>(); if (!CoreUtils.isNullOrEmpty(tasksResults)) { for (int i = 0; i < tasksResults.size(); i++) { final AnalyzeTextLROResult taskResult = tasksResults.get(i); if (taskResult instanceof EntityRecognitionLROResult) { final EntityRecognitionLROResult entityTaskResult = (EntityRecognitionLROResult) taskResult; final RecognizeEntitiesActionResult actionResult = new RecognizeEntitiesActionResult(); final EntitiesResult results = entityTaskResult.getResults(); if (results != null) { RecognizeEntitiesActionResultPropertiesHelper.setDocumentsResults(actionResult, toRecognizeEntitiesResultCollectionResponse(results)); } TextAnalyticsActionResultPropertiesHelper.setActionName(actionResult, entityTaskResult.getTaskName()); TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult, entityTaskResult.getLastUpdateDateTime()); recognizeEntitiesActionResults.add(actionResult); } else if (taskResult instanceof CustomEntityRecognitionLROResult) { final CustomEntityRecognitionLROResult customEntityTaskResult = (CustomEntityRecognitionLROResult) taskResult; final RecognizeCustomEntitiesActionResult actionResult = new RecognizeCustomEntitiesActionResult(); final CustomEntitiesResult results = customEntityTaskResult.getResults(); if (results != null) { RecognizeCustomEntitiesActionResultPropertiesHelper.setDocumentsResults(actionResult, toRecognizeCustomEntitiesResultCollection(results)); } TextAnalyticsActionResultPropertiesHelper.setActionName(actionResult, customEntityTaskResult.getTaskName()); TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult, customEntityTaskResult.getLastUpdateDateTime()); recognizeCustomEntitiesActionResults.add(actionResult); } else if (taskResult instanceof CustomSingleLabelClassificationLROResult) { final CustomSingleLabelClassificationLROResult customSingleLabelClassificationResult = (CustomSingleLabelClassificationLROResult) taskResult; final SingleCategoryClassifyActionResult actionResult = new SingleCategoryClassifyActionResult(); final CustomSingleLabelClassificationResult results = customSingleLabelClassificationResult.getResults(); if (results != null) { SingleCategoryClassifyActionResultPropertiesHelper.setDocumentsResults(actionResult, toSingleCategoryClassifyResultCollection(results)); } TextAnalyticsActionResultPropertiesHelper.setActionName(actionResult, customSingleLabelClassificationResult.getTaskName()); TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult, customSingleLabelClassificationResult.getLastUpdateDateTime()); singleCategoryClassifyActionResults.add(actionResult); } else if (taskResult instanceof CustomMultiLabelClassificationLROResult) { final CustomMultiLabelClassificationLROResult customMultiLabelClassificationLROResult = (CustomMultiLabelClassificationLROResult) taskResult; final MultiCategoryClassifyActionResult actionResult = new MultiCategoryClassifyActionResult(); final CustomMultiLabelClassificationResult results = customMultiLabelClassificationLROResult.getResults(); if (results != null) { MultiCategoryClassifyActionResultPropertiesHelper.setDocumentsResults(actionResult, toMultiCategoryClassifyResultCollection(results)); } TextAnalyticsActionResultPropertiesHelper.setActionName(actionResult, customMultiLabelClassificationLROResult.getTaskName()); TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult, customMultiLabelClassificationLROResult.getLastUpdateDateTime()); multiCategoryClassifyActionResults.add(actionResult); } else if (taskResult instanceof EntityLinkingLROResult) { final EntityLinkingLROResult entityLinkingLROResult = (EntityLinkingLROResult) taskResult; final RecognizeLinkedEntitiesActionResult actionResult = new RecognizeLinkedEntitiesActionResult(); final EntityLinkingResult results = entityLinkingLROResult.getResults(); if (results != null) { RecognizeLinkedEntitiesActionResultPropertiesHelper.setDocumentsResults(actionResult, toRecognizeLinkedEntitiesResultCollection(results)); } TextAnalyticsActionResultPropertiesHelper.setActionName(actionResult, entityLinkingLROResult.getTaskName()); TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult, entityLinkingLROResult.getLastUpdateDateTime()); recognizeLinkedEntitiesActionResults.add(actionResult); } else if (taskResult instanceof PiiEntityRecognitionLROResult) { final PiiEntityRecognitionLROResult piiEntityRecognitionLROResult = (PiiEntityRecognitionLROResult) taskResult; final RecognizePiiEntitiesActionResult actionResult = new RecognizePiiEntitiesActionResult(); final PiiResult results = piiEntityRecognitionLROResult.getResults(); if (results != null) { RecognizePiiEntitiesActionResultPropertiesHelper.setDocumentsResults(actionResult, toRecognizePiiEntitiesResultCollection(results)); } TextAnalyticsActionResultPropertiesHelper.setActionName(actionResult, piiEntityRecognitionLROResult.getTaskName()); TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult, piiEntityRecognitionLROResult.getLastUpdateDateTime()); recognizePiiEntitiesActionResults.add(actionResult); } else if (taskResult instanceof ExtractiveSummarizationLROResult) { final ExtractiveSummarizationLROResult extractiveSummarizationLROResult = (ExtractiveSummarizationLROResult) taskResult; final ExtractSummaryActionResult actionResult = new ExtractSummaryActionResult(); final ExtractiveSummarizationResult results = extractiveSummarizationLROResult.getResults(); if (results != null) { ExtractSummaryActionResultPropertiesHelper.setDocumentsResults(actionResult, toExtractSummaryResultCollection(results)); } TextAnalyticsActionResultPropertiesHelper.setActionName(actionResult, extractiveSummarizationLROResult.getTaskName()); TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult, extractiveSummarizationLROResult.getLastUpdateDateTime()); extractSummaryActionResults.add(actionResult); } else if (taskResult instanceof HealthcareLROResult) { final HealthcareLROResult healthcareLROResult = (HealthcareLROResult) taskResult; final AnalyzeHealthcareEntitiesActionResult actionResult = new AnalyzeHealthcareEntitiesActionResult(); final HealthcareResult results = healthcareLROResult.getResults(); if (results != null) { AnalyzeHealthcareEntitiesActionResultPropertiesHelper.setDocumentsResults(actionResult, toAnalyzeHealthcareEntitiesResultCollection(results)); } TextAnalyticsActionResultPropertiesHelper.setActionName(actionResult, healthcareLROResult.getTaskName()); TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult, healthcareLROResult.getLastUpdateDateTime()); analyzeHealthcareEntitiesActionResults.add(actionResult); } else if (taskResult instanceof SentimentLROResult) { final SentimentLROResult sentimentLROResult = (SentimentLROResult) taskResult; final AnalyzeSentimentActionResult actionResult = new AnalyzeSentimentActionResult(); final SentimentResponse results = sentimentLROResult.getResults(); if (results != null) { AnalyzeSentimentActionResultPropertiesHelper.setDocumentsResults(actionResult, toAnalyzeSentimentResultCollection(results)); } TextAnalyticsActionResultPropertiesHelper.setActionName(actionResult, sentimentLROResult.getTaskName()); TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult, sentimentLROResult.getLastUpdateDateTime()); analyzeSentimentActionResults.add(actionResult); } else if (taskResult instanceof KeyPhraseExtractionLROResult) { final KeyPhraseExtractionLROResult keyPhraseExtractionLROResult = (KeyPhraseExtractionLROResult) taskResult; final ExtractKeyPhrasesActionResult actionResult = new ExtractKeyPhrasesActionResult(); final KeyPhraseResult results = keyPhraseExtractionLROResult.getResults(); if (results != null) { ExtractKeyPhrasesActionResultPropertiesHelper.setDocumentsResults(actionResult, toExtractKeyPhrasesResultCollection(results)); } TextAnalyticsActionResultPropertiesHelper.setActionName(actionResult, keyPhraseExtractionLROResult.getTaskName()); TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult, keyPhraseExtractionLROResult.getLastUpdateDateTime()); extractKeyPhrasesActionResults.add(actionResult); } else { throw logger.logExceptionAsError(new RuntimeException( "Invalid Long running operation task result: " + taskResult.getClass())); } } } final List<Error> errors = analyzeJobState.getErrors(); if (!CoreUtils.isNullOrEmpty(errors)) { for (Error error : errors) { if (error != null) { final String[] targetPair = parseActionErrorTarget(error.getTarget(), error.getMessage()); final String taskName = targetPair[0]; final Integer taskIndex = Integer.valueOf(targetPair[1]); final TextAnalyticsActionResult actionResult; if (ENTITY_RECOGNITION_TASKS.equals(taskName)) { actionResult = recognizeEntitiesActionResults.get(taskIndex); } else if (ENTITY_RECOGNITION_PII_TASKS.equals(taskName)) { actionResult = recognizePiiEntitiesActionResults.get(taskIndex); } else if (KEY_PHRASE_EXTRACTION_TASKS.equals(taskName)) { actionResult = extractKeyPhrasesActionResults.get(taskIndex); } else if (ENTITY_LINKING_TASKS.equals(taskName)) { actionResult = recognizeLinkedEntitiesActionResults.get(taskIndex); } else if (SENTIMENT_ANALYSIS_TASKS.equals(taskName)) { actionResult = analyzeSentimentActionResults.get(taskIndex); } else if (EXTRACTIVE_SUMMARIZATION_TASKS.equals(taskName)) { actionResult = extractSummaryActionResults.get(taskIndex); } else if (CUSTOM_ENTITY_RECOGNITION_TASKS.equals(taskName)) { actionResult = recognizeCustomEntitiesActionResults.get(taskIndex); } else if (CUSTOM_SINGLE_CLASSIFICATION_TASKS.equals(taskName)) { actionResult = singleCategoryClassifyActionResults.get(taskIndex); } else if (CUSTOM_MULTI_CLASSIFICATION_TASKS.equals(taskName)) { actionResult = multiCategoryClassifyActionResults.get(taskIndex); } else { throw logger.logExceptionAsError(new RuntimeException( "Invalid task name in target reference, " + taskName)); } TextAnalyticsActionResultPropertiesHelper.setIsError(actionResult, true); TextAnalyticsActionResultPropertiesHelper.setError(actionResult, new com.azure.ai.textanalytics.models.TextAnalyticsError( TextAnalyticsErrorCode.fromString( error.getCode() == null ? null : error.getCode().toString()), error.getMessage(), null)); } } } final AnalyzeActionsResult analyzeActionsResult = new AnalyzeActionsResult(); AnalyzeActionsResultPropertiesHelper.setRecognizeEntitiesResults(analyzeActionsResult, IterableStream.of(recognizeEntitiesActionResults)); AnalyzeActionsResultPropertiesHelper.setRecognizePiiEntitiesResults(analyzeActionsResult, IterableStream.of(recognizePiiEntitiesActionResults)); AnalyzeActionsResultPropertiesHelper.setAnalyzeHealthcareEntitiesResults(analyzeActionsResult, IterableStream.of(analyzeHealthcareEntitiesActionResults)); AnalyzeActionsResultPropertiesHelper.setExtractKeyPhrasesResults(analyzeActionsResult, IterableStream.of(extractKeyPhrasesActionResults)); AnalyzeActionsResultPropertiesHelper.setRecognizeLinkedEntitiesResults(analyzeActionsResult, IterableStream.of(recognizeLinkedEntitiesActionResults)); AnalyzeActionsResultPropertiesHelper.setAnalyzeSentimentResults(analyzeActionsResult, IterableStream.of(analyzeSentimentActionResults)); AnalyzeActionsResultPropertiesHelper.setExtractSummaryResults(analyzeActionsResult, IterableStream.of(extractSummaryActionResults)); AnalyzeActionsResultPropertiesHelper.setRecognizeCustomEntitiesResults(analyzeActionsResult, IterableStream.of(recognizeCustomEntitiesActionResults)); AnalyzeActionsResultPropertiesHelper.setClassifySingleCategoryResults(analyzeActionsResult, IterableStream.of(singleCategoryClassifyActionResults)); AnalyzeActionsResultPropertiesHelper.setClassifyMultiCategoryResults(analyzeActionsResult, IterableStream.of(multiCategoryClassifyActionResults)); return analyzeActionsResult; } private Mono<PollResponse<AnalyzeActionsOperationDetail>> processAnalyzedModelResponse( Response<AnalyzeJobState> analyzeJobStateResponse, PollResponse<AnalyzeActionsOperationDetail> operationResultPollResponse) { LongRunningOperationStatus status = LongRunningOperationStatus.SUCCESSFULLY_COMPLETED; if (analyzeJobStateResponse.getValue() != null && analyzeJobStateResponse.getValue().getStatus() != null) { switch (analyzeJobStateResponse.getValue().getStatus()) { case NOT_STARTED: case RUNNING: status = LongRunningOperationStatus.IN_PROGRESS; break; case SUCCEEDED: status = LongRunningOperationStatus.SUCCESSFULLY_COMPLETED; break; case CANCELLED: status = LongRunningOperationStatus.USER_CANCELLED; break; default: status = LongRunningOperationStatus.fromString( analyzeJobStateResponse.getValue().getStatus().toString(), true); break; } } AnalyzeActionsOperationDetailPropertiesHelper.setDisplayName(operationResultPollResponse.getValue(), analyzeJobStateResponse.getValue().getDisplayName()); AnalyzeActionsOperationDetailPropertiesHelper.setCreatedAt(operationResultPollResponse.getValue(), analyzeJobStateResponse.getValue().getCreatedDateTime()); AnalyzeActionsOperationDetailPropertiesHelper.setExpiresAt(operationResultPollResponse.getValue(), analyzeJobStateResponse.getValue().getExpirationDateTime()); AnalyzeActionsOperationDetailPropertiesHelper.setLastModifiedAt(operationResultPollResponse.getValue(), analyzeJobStateResponse.getValue().getLastUpdateDateTime()); final TasksStateTasksOld tasksResult = analyzeJobStateResponse.getValue().getTasks(); AnalyzeActionsOperationDetailPropertiesHelper.setActionsFailed(operationResultPollResponse.getValue(), tasksResult.getFailed()); AnalyzeActionsOperationDetailPropertiesHelper.setActionsInProgress(operationResultPollResponse.getValue(), tasksResult.getInProgress()); AnalyzeActionsOperationDetailPropertiesHelper.setActionsSucceeded( operationResultPollResponse.getValue(), tasksResult.getCompleted()); AnalyzeActionsOperationDetailPropertiesHelper.setActionsInTotal(operationResultPollResponse.getValue(), tasksResult.getTotal()); return Mono.just(new PollResponse<>(status, operationResultPollResponse.getValue())); } private Mono<PollResponse<AnalyzeActionsOperationDetail>> processAnalyzedModelResponseLanguageApi( Response<AnalyzeTextJobState> analyzeJobStateResponse, PollResponse<AnalyzeActionsOperationDetail> operationResultPollResponse) { LongRunningOperationStatus status = LongRunningOperationStatus.SUCCESSFULLY_COMPLETED; if (analyzeJobStateResponse.getValue() != null && analyzeJobStateResponse.getValue().getStatus() != null) { switch (analyzeJobStateResponse.getValue().getStatus()) { case NOT_STARTED: case RUNNING: status = LongRunningOperationStatus.IN_PROGRESS; break; case SUCCEEDED: status = LongRunningOperationStatus.SUCCESSFULLY_COMPLETED; break; case CANCELLED: status = LongRunningOperationStatus.USER_CANCELLED; break; case PARTIALLY_SUCCEEDED: status = LongRunningOperationStatus.fromString("partiallySucceeded", true); break; default: status = LongRunningOperationStatus.fromString( analyzeJobStateResponse.getValue().getStatus().toString(), true); break; } } AnalyzeActionsOperationDetailPropertiesHelper.setDisplayName(operationResultPollResponse.getValue(), analyzeJobStateResponse.getValue().getDisplayName()); AnalyzeActionsOperationDetailPropertiesHelper.setCreatedAt(operationResultPollResponse.getValue(), analyzeJobStateResponse.getValue().getCreatedDateTime()); AnalyzeActionsOperationDetailPropertiesHelper.setExpiresAt(operationResultPollResponse.getValue(), analyzeJobStateResponse.getValue().getExpirationDateTime()); AnalyzeActionsOperationDetailPropertiesHelper.setLastModifiedAt(operationResultPollResponse.getValue(), analyzeJobStateResponse.getValue().getLastUpdateDateTime()); final TasksStateTasks tasksResult = analyzeJobStateResponse.getValue().getTasks(); AnalyzeActionsOperationDetailPropertiesHelper.setActionsFailed(operationResultPollResponse.getValue(), tasksResult.getFailed()); AnalyzeActionsOperationDetailPropertiesHelper.setActionsInProgress(operationResultPollResponse.getValue(), tasksResult.getInProgress()); AnalyzeActionsOperationDetailPropertiesHelper.setActionsSucceeded( operationResultPollResponse.getValue(), tasksResult.getCompleted()); AnalyzeActionsOperationDetailPropertiesHelper.setActionsInTotal(operationResultPollResponse.getValue(), tasksResult.getTotal()); return Mono.just(new PollResponse<>(status, operationResultPollResponse.getValue())); } private Context getNotNullContext(Context context) { return context == null ? Context.NONE : context; } private AnalyzeActionsOptions getNotNullAnalyzeActionsOptions(AnalyzeActionsOptions options) { return options == null ? new AnalyzeActionsOptions() : options; } private String[] parseActionErrorTarget(String targetReference, String errorMessage) { if (CoreUtils.isNullOrEmpty(targetReference)) { if (CoreUtils.isNullOrEmpty(errorMessage)) { errorMessage = "Expected an error with a target field referencing an action but did not get one"; } throw logger.logExceptionAsError(new RuntimeException(errorMessage)); } final Matcher matcher = PATTERN.matcher(targetReference); String[] taskNameIdPair = new String[2]; while (matcher.find()) { taskNameIdPair[0] = matcher.group(1); taskNameIdPair[1] = matcher.group(2); } return taskNameIdPair; } }
why commented out?
private AnalyzeActionsResult toAnalyzeActionsResult(AnalyzeJobState analyzeJobState) { TasksStateTasksOld tasksStateTasks = analyzeJobState.getTasks(); final List<TasksStateTasksEntityRecognitionPiiTasksItem> piiTasksItems = tasksStateTasks.getEntityRecognitionPiiTasks(); final List<TasksStateTasksEntityRecognitionTasksItem> entityRecognitionTasksItems = tasksStateTasks.getEntityRecognitionTasks(); final List<TasksStateTasksKeyPhraseExtractionTasksItem> keyPhraseExtractionTasks = tasksStateTasks.getKeyPhraseExtractionTasks(); final List<TasksStateTasksEntityLinkingTasksItem> linkedEntityRecognitionTasksItems = tasksStateTasks.getEntityLinkingTasks(); final List<TasksStateTasksSentimentAnalysisTasksItem> sentimentAnalysisTasksItems = tasksStateTasks.getSentimentAnalysisTasks(); List<RecognizeEntitiesActionResult> recognizeEntitiesActionResults = new ArrayList<>(); List<RecognizePiiEntitiesActionResult> recognizePiiEntitiesActionResults = new ArrayList<>(); List<ExtractKeyPhrasesActionResult> extractKeyPhrasesActionResults = new ArrayList<>(); List<RecognizeLinkedEntitiesActionResult> recognizeLinkedEntitiesActionResults = new ArrayList<>(); List<AnalyzeSentimentActionResult> analyzeSentimentActionResults = new ArrayList<>(); List<ExtractSummaryActionResult> extractSummaryActionResults = new ArrayList<>(); List<RecognizeCustomEntitiesActionResult> recognizeCustomEntitiesActionResults = new ArrayList<>(); List<SingleCategoryClassifyActionResult> singleCategoryClassifyActionResults = new ArrayList<>(); List<MultiCategoryClassifyActionResult> multiCategoryClassifyActionResults = new ArrayList<>(); if (!CoreUtils.isNullOrEmpty(entityRecognitionTasksItems)) { for (int i = 0; i < entityRecognitionTasksItems.size(); i++) { final TasksStateTasksEntityRecognitionTasksItem taskItem = entityRecognitionTasksItems.get(i); final RecognizeEntitiesActionResult actionResult = new RecognizeEntitiesActionResult(); final EntitiesResult results = taskItem.getResults(); if (results != null) { RecognizeEntitiesActionResultPropertiesHelper.setDocumentsResults(actionResult, toRecognizeEntitiesResultCollectionResponse(results)); } TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult, taskItem.getLastUpdateDateTime()); recognizeEntitiesActionResults.add(actionResult); } } if (!CoreUtils.isNullOrEmpty(piiTasksItems)) { for (int i = 0; i < piiTasksItems.size(); i++) { final TasksStateTasksEntityRecognitionPiiTasksItem taskItem = piiTasksItems.get(i); final RecognizePiiEntitiesActionResult actionResult = new RecognizePiiEntitiesActionResult(); final PiiResult results = taskItem.getResults(); if (results != null) { RecognizePiiEntitiesActionResultPropertiesHelper.setDocumentsResults(actionResult, toRecognizePiiEntitiesResultCollection(results)); } TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult, taskItem.getLastUpdateDateTime()); recognizePiiEntitiesActionResults.add(actionResult); } } if (!CoreUtils.isNullOrEmpty(keyPhraseExtractionTasks)) { for (int i = 0; i < keyPhraseExtractionTasks.size(); i++) { final TasksStateTasksKeyPhraseExtractionTasksItem taskItem = keyPhraseExtractionTasks.get(i); final ExtractKeyPhrasesActionResult actionResult = new ExtractKeyPhrasesActionResult(); final KeyPhraseResult results = taskItem.getResults(); if (results != null) { ExtractKeyPhrasesActionResultPropertiesHelper.setDocumentsResults(actionResult, toExtractKeyPhrasesResultCollection(results)); } TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult, taskItem.getLastUpdateDateTime()); extractKeyPhrasesActionResults.add(actionResult); } } if (!CoreUtils.isNullOrEmpty(linkedEntityRecognitionTasksItems)) { for (int i = 0; i < linkedEntityRecognitionTasksItems.size(); i++) { final TasksStateTasksEntityLinkingTasksItem taskItem = linkedEntityRecognitionTasksItems.get(i); final RecognizeLinkedEntitiesActionResult actionResult = new RecognizeLinkedEntitiesActionResult(); final EntityLinkingResult results = taskItem.getResults(); if (results != null) { RecognizeLinkedEntitiesActionResultPropertiesHelper.setDocumentsResults(actionResult, toRecognizeLinkedEntitiesResultCollection(results)); } TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult, taskItem.getLastUpdateDateTime()); recognizeLinkedEntitiesActionResults.add(actionResult); } } if (!CoreUtils.isNullOrEmpty(sentimentAnalysisTasksItems)) { for (int i = 0; i < sentimentAnalysisTasksItems.size(); i++) { final TasksStateTasksSentimentAnalysisTasksItem taskItem = sentimentAnalysisTasksItems.get(i); final AnalyzeSentimentActionResult actionResult = new AnalyzeSentimentActionResult(); final SentimentResponse results = taskItem.getResults(); if (results != null) { AnalyzeSentimentActionResultPropertiesHelper.setDocumentsResults(actionResult, toAnalyzeSentimentResultCollection(results)); } TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult, taskItem.getLastUpdateDateTime()); analyzeSentimentActionResults.add(actionResult); } } final List<TextAnalyticsError> errors = analyzeJobState.getErrors(); if (!CoreUtils.isNullOrEmpty(errors)) { for (TextAnalyticsError error : errors) { final String[] targetPair = parseActionErrorTarget(error.getTarget()); final String taskName = targetPair[0]; final Integer taskIndex = Integer.valueOf(targetPair[1]); final TextAnalyticsActionResult actionResult; if (ENTITY_RECOGNITION_TASKS.equals(taskName)) { actionResult = recognizeEntitiesActionResults.get(taskIndex); } else if (ENTITY_RECOGNITION_PII_TASKS.equals(taskName)) { actionResult = recognizePiiEntitiesActionResults.get(taskIndex); } else if (KEY_PHRASE_EXTRACTION_TASKS.equals(taskName)) { actionResult = extractKeyPhrasesActionResults.get(taskIndex); } else if (ENTITY_LINKING_TASKS.equals(taskName)) { actionResult = recognizeLinkedEntitiesActionResults.get(taskIndex); } else if (SENTIMENT_ANALYSIS_TASKS.equals(taskName)) { actionResult = analyzeSentimentActionResults.get(taskIndex); } else if (EXTRACTIVE_SUMMARIZATION_TASKS.equals(taskName)) { actionResult = extractSummaryActionResults.get(taskIndex); } else if (CUSTOM_ENTITY_RECOGNITION_TASKS.equals(taskName)) { actionResult = recognizeCustomEntitiesActionResults.get(taskIndex); } else if (CUSTOM_SINGLE_CLASSIFICATION_TASKS.equals(taskName)) { actionResult = singleCategoryClassifyActionResults.get(taskIndex); } else if (CUSTOM_MULTI_CLASSIFICATION_TASKS.equals(taskName)) { actionResult = multiCategoryClassifyActionResults.get(taskIndex); } else { throw logger.logExceptionAsError(new RuntimeException( "Invalid task name in target reference, " + taskName)); } TextAnalyticsActionResultPropertiesHelper.setIsError(actionResult, true); TextAnalyticsActionResultPropertiesHelper.setError(actionResult, new com.azure.ai.textanalytics.models.TextAnalyticsError( TextAnalyticsErrorCode.fromString( error.getErrorCode() == null ? null : error.getErrorCode().toString()), error.getMessage(), null)); } } final AnalyzeActionsResult analyzeActionsResult = new AnalyzeActionsResult(); AnalyzeActionsResultPropertiesHelper.setRecognizeEntitiesResults(analyzeActionsResult, IterableStream.of(recognizeEntitiesActionResults)); AnalyzeActionsResultPropertiesHelper.setRecognizePiiEntitiesResults(analyzeActionsResult, IterableStream.of(recognizePiiEntitiesActionResults)); AnalyzeActionsResultPropertiesHelper.setExtractKeyPhrasesResults(analyzeActionsResult, IterableStream.of(extractKeyPhrasesActionResults)); AnalyzeActionsResultPropertiesHelper.setRecognizeLinkedEntitiesResults(analyzeActionsResult, IterableStream.of(recognizeLinkedEntitiesActionResults)); AnalyzeActionsResultPropertiesHelper.setAnalyzeSentimentResults(analyzeActionsResult, IterableStream.of(analyzeSentimentActionResults)); AnalyzeActionsResultPropertiesHelper.setExtractSummaryResults(analyzeActionsResult, IterableStream.of(extractSummaryActionResults)); AnalyzeActionsResultPropertiesHelper.setRecognizeCustomEntitiesResults(analyzeActionsResult, IterableStream.of(recognizeCustomEntitiesActionResults)); AnalyzeActionsResultPropertiesHelper.setClassifySingleCategoryResults(analyzeActionsResult, IterableStream.of(singleCategoryClassifyActionResults)); AnalyzeActionsResultPropertiesHelper.setClassifyMultiCategoryResults(analyzeActionsResult, IterableStream.of(multiCategoryClassifyActionResults)); return analyzeActionsResult; }
private AnalyzeActionsResult toAnalyzeActionsResult(AnalyzeJobState analyzeJobState) { TasksStateTasksOld tasksStateTasks = analyzeJobState.getTasks(); final List<TasksStateTasksEntityRecognitionPiiTasksItem> piiTasksItems = tasksStateTasks.getEntityRecognitionPiiTasks(); final List<TasksStateTasksEntityRecognitionTasksItem> entityRecognitionTasksItems = tasksStateTasks.getEntityRecognitionTasks(); final List<TasksStateTasksKeyPhraseExtractionTasksItem> keyPhraseExtractionTasks = tasksStateTasks.getKeyPhraseExtractionTasks(); final List<TasksStateTasksEntityLinkingTasksItem> linkedEntityRecognitionTasksItems = tasksStateTasks.getEntityLinkingTasks(); final List<TasksStateTasksSentimentAnalysisTasksItem> sentimentAnalysisTasksItems = tasksStateTasks.getSentimentAnalysisTasks(); List<RecognizeEntitiesActionResult> recognizeEntitiesActionResults = new ArrayList<>(); List<RecognizePiiEntitiesActionResult> recognizePiiEntitiesActionResults = new ArrayList<>(); List<ExtractKeyPhrasesActionResult> extractKeyPhrasesActionResults = new ArrayList<>(); List<RecognizeLinkedEntitiesActionResult> recognizeLinkedEntitiesActionResults = new ArrayList<>(); List<AnalyzeSentimentActionResult> analyzeSentimentActionResults = new ArrayList<>(); List<ExtractSummaryActionResult> extractSummaryActionResults = new ArrayList<>(); List<RecognizeCustomEntitiesActionResult> recognizeCustomEntitiesActionResults = new ArrayList<>(); List<SingleCategoryClassifyActionResult> singleCategoryClassifyActionResults = new ArrayList<>(); List<MultiCategoryClassifyActionResult> multiCategoryClassifyActionResults = new ArrayList<>(); if (!CoreUtils.isNullOrEmpty(entityRecognitionTasksItems)) { for (int i = 0; i < entityRecognitionTasksItems.size(); i++) { final TasksStateTasksEntityRecognitionTasksItem taskItem = entityRecognitionTasksItems.get(i); final RecognizeEntitiesActionResult actionResult = new RecognizeEntitiesActionResult(); final EntitiesResult results = taskItem.getResults(); if (results != null) { RecognizeEntitiesActionResultPropertiesHelper.setDocumentsResults(actionResult, toRecognizeEntitiesResultCollectionResponse(results)); } TextAnalyticsActionResultPropertiesHelper.setActionName(actionResult, taskItem.getTaskName()); TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult, taskItem.getLastUpdateDateTime()); recognizeEntitiesActionResults.add(actionResult); } } if (!CoreUtils.isNullOrEmpty(piiTasksItems)) { for (int i = 0; i < piiTasksItems.size(); i++) { final TasksStateTasksEntityRecognitionPiiTasksItem taskItem = piiTasksItems.get(i); final RecognizePiiEntitiesActionResult actionResult = new RecognizePiiEntitiesActionResult(); final PiiResult results = taskItem.getResults(); if (results != null) { RecognizePiiEntitiesActionResultPropertiesHelper.setDocumentsResults(actionResult, toRecognizePiiEntitiesResultCollection(results)); } TextAnalyticsActionResultPropertiesHelper.setActionName(actionResult, taskItem.getTaskName()); TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult, taskItem.getLastUpdateDateTime()); recognizePiiEntitiesActionResults.add(actionResult); } } if (!CoreUtils.isNullOrEmpty(keyPhraseExtractionTasks)) { for (int i = 0; i < keyPhraseExtractionTasks.size(); i++) { final TasksStateTasksKeyPhraseExtractionTasksItem taskItem = keyPhraseExtractionTasks.get(i); final ExtractKeyPhrasesActionResult actionResult = new ExtractKeyPhrasesActionResult(); final KeyPhraseResult results = taskItem.getResults(); if (results != null) { ExtractKeyPhrasesActionResultPropertiesHelper.setDocumentsResults(actionResult, toExtractKeyPhrasesResultCollection(results)); } TextAnalyticsActionResultPropertiesHelper.setActionName(actionResult, taskItem.getTaskName()); TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult, taskItem.getLastUpdateDateTime()); extractKeyPhrasesActionResults.add(actionResult); } } if (!CoreUtils.isNullOrEmpty(linkedEntityRecognitionTasksItems)) { for (int i = 0; i < linkedEntityRecognitionTasksItems.size(); i++) { final TasksStateTasksEntityLinkingTasksItem taskItem = linkedEntityRecognitionTasksItems.get(i); final RecognizeLinkedEntitiesActionResult actionResult = new RecognizeLinkedEntitiesActionResult(); final EntityLinkingResult results = taskItem.getResults(); if (results != null) { RecognizeLinkedEntitiesActionResultPropertiesHelper.setDocumentsResults(actionResult, toRecognizeLinkedEntitiesResultCollection(results)); } TextAnalyticsActionResultPropertiesHelper.setActionName(actionResult, taskItem.getTaskName()); TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult, taskItem.getLastUpdateDateTime()); recognizeLinkedEntitiesActionResults.add(actionResult); } } if (!CoreUtils.isNullOrEmpty(sentimentAnalysisTasksItems)) { for (int i = 0; i < sentimentAnalysisTasksItems.size(); i++) { final TasksStateTasksSentimentAnalysisTasksItem taskItem = sentimentAnalysisTasksItems.get(i); final AnalyzeSentimentActionResult actionResult = new AnalyzeSentimentActionResult(); final SentimentResponse results = taskItem.getResults(); if (results != null) { AnalyzeSentimentActionResultPropertiesHelper.setDocumentsResults(actionResult, toAnalyzeSentimentResultCollection(results)); } TextAnalyticsActionResultPropertiesHelper.setActionName(actionResult, taskItem.getTaskName()); TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult, taskItem.getLastUpdateDateTime()); analyzeSentimentActionResults.add(actionResult); } } final List<TextAnalyticsError> errors = analyzeJobState.getErrors(); if (!CoreUtils.isNullOrEmpty(errors)) { for (TextAnalyticsError error : errors) { if (error != null) { final String[] targetPair = parseActionErrorTarget(error.getTarget(), error.getMessage()); final String taskName = targetPair[0]; final Integer taskIndex = Integer.valueOf(targetPair[1]); final TextAnalyticsActionResult actionResult; if (ENTITY_RECOGNITION_TASKS.equals(taskName)) { actionResult = recognizeEntitiesActionResults.get(taskIndex); } else if (ENTITY_RECOGNITION_PII_TASKS.equals(taskName)) { actionResult = recognizePiiEntitiesActionResults.get(taskIndex); } else if (KEY_PHRASE_EXTRACTION_TASKS.equals(taskName)) { actionResult = extractKeyPhrasesActionResults.get(taskIndex); } else if (ENTITY_LINKING_TASKS.equals(taskName)) { actionResult = recognizeLinkedEntitiesActionResults.get(taskIndex); } else if (SENTIMENT_ANALYSIS_TASKS.equals(taskName)) { actionResult = analyzeSentimentActionResults.get(taskIndex); } else if (EXTRACTIVE_SUMMARIZATION_TASKS.equals(taskName)) { actionResult = extractSummaryActionResults.get(taskIndex); } else if (CUSTOM_ENTITY_RECOGNITION_TASKS.equals(taskName)) { actionResult = recognizeCustomEntitiesActionResults.get(taskIndex); } else if (CUSTOM_SINGLE_CLASSIFICATION_TASKS.equals(taskName)) { actionResult = singleCategoryClassifyActionResults.get(taskIndex); } else if (CUSTOM_MULTI_CLASSIFICATION_TASKS.equals(taskName)) { actionResult = multiCategoryClassifyActionResults.get(taskIndex); } else { throw logger.logExceptionAsError(new RuntimeException( "Invalid task name in target reference, " + taskName)); } TextAnalyticsActionResultPropertiesHelper.setIsError(actionResult, true); TextAnalyticsActionResultPropertiesHelper.setError(actionResult, new com.azure.ai.textanalytics.models.TextAnalyticsError( TextAnalyticsErrorCode.fromString( error.getErrorCode() == null ? null : error.getErrorCode().toString()), error.getMessage(), null)); } } } final AnalyzeActionsResult analyzeActionsResult = new AnalyzeActionsResult(); AnalyzeActionsResultPropertiesHelper.setRecognizeEntitiesResults(analyzeActionsResult, IterableStream.of(recognizeEntitiesActionResults)); AnalyzeActionsResultPropertiesHelper.setRecognizePiiEntitiesResults(analyzeActionsResult, IterableStream.of(recognizePiiEntitiesActionResults)); AnalyzeActionsResultPropertiesHelper.setExtractKeyPhrasesResults(analyzeActionsResult, IterableStream.of(extractKeyPhrasesActionResults)); AnalyzeActionsResultPropertiesHelper.setRecognizeLinkedEntitiesResults(analyzeActionsResult, IterableStream.of(recognizeLinkedEntitiesActionResults)); AnalyzeActionsResultPropertiesHelper.setAnalyzeSentimentResults(analyzeActionsResult, IterableStream.of(analyzeSentimentActionResults)); AnalyzeActionsResultPropertiesHelper.setExtractSummaryResults(analyzeActionsResult, IterableStream.of(extractSummaryActionResults)); AnalyzeActionsResultPropertiesHelper.setRecognizeCustomEntitiesResults(analyzeActionsResult, IterableStream.of(recognizeCustomEntitiesActionResults)); AnalyzeActionsResultPropertiesHelper.setClassifySingleCategoryResults(analyzeActionsResult, IterableStream.of(singleCategoryClassifyActionResults)); AnalyzeActionsResultPropertiesHelper.setClassifyMultiCategoryResults(analyzeActionsResult, IterableStream.of(multiCategoryClassifyActionResults)); return analyzeActionsResult; }
class AnalyzeActionsAsyncClient { private static final String ENTITY_RECOGNITION_TASKS = "entityRecognitionTasks"; private static final String ENTITY_RECOGNITION_PII_TASKS = "entityRecognitionPiiTasks"; private static final String KEY_PHRASE_EXTRACTION_TASKS = "keyPhraseExtractionTasks"; private static final String ENTITY_LINKING_TASKS = "entityLinkingTasks"; private static final String SENTIMENT_ANALYSIS_TASKS = "sentimentAnalysisTasks"; private static final String EXTRACTIVE_SUMMARIZATION_TASKS = "extractiveSummarizationTasks"; private static final String CUSTOM_ENTITY_RECOGNITION_TASKS = "customEntityRecognitionTasks"; private static final String CUSTOM_SINGLE_CLASSIFICATION_TASKS = "customClassificationTasks"; private static final String CUSTOM_MULTI_CLASSIFICATION_TASKS = "customMultiClassificationTasks"; private static final String REGEX_ACTION_ERROR_TARGET = String.format(" ENTITY_RECOGNITION_PII_TASKS, ENTITY_RECOGNITION_TASKS, ENTITY_LINKING_TASKS, SENTIMENT_ANALYSIS_TASKS, EXTRACTIVE_SUMMARIZATION_TASKS, CUSTOM_ENTITY_RECOGNITION_TASKS, CUSTOM_SINGLE_CLASSIFICATION_TASKS, CUSTOM_MULTI_CLASSIFICATION_TASKS); private final ClientLogger logger = new ClientLogger(AnalyzeActionsAsyncClient.class); private final TextAnalyticsClientImpl legacyService; private final AnalyzeTextsImpl service; private static final Pattern PATTERN; static { PATTERN = Pattern.compile(REGEX_ACTION_ERROR_TARGET, Pattern.MULTILINE); } AnalyzeActionsAsyncClient(TextAnalyticsClientImpl legacyService) { this.legacyService = legacyService; this.service = null; } AnalyzeActionsAsyncClient(AnalyzeTextsImpl service) { this.legacyService = null; this.service = service; } PollerFlux<AnalyzeActionsOperationDetail, AnalyzeActionsResultPagedFlux> beginAnalyzeActions( Iterable<TextDocumentInput> documents, TextAnalyticsActions actions, AnalyzeActionsOptions options, Context context) { try { inputDocumentsValidation(documents); options = getNotNullAnalyzeActionsOptions(options); final Context finalContext = getNotNullContext(context) .addData(AZ_TRACING_NAMESPACE_KEY, COGNITIVE_TRACING_NAMESPACE_VALUE); final boolean finalIncludeStatistics = options.isIncludeStatistics(); if (service != null) { final AnalyzeTextJobsInput analyzeTextJobsInput = new AnalyzeTextJobsInput() .setDisplayName(actions.getDisplayName()) .setAnalysisInput( new MultiLanguageAnalysisInput().setDocuments(toMultiLanguageInput(documents))) .setTasks(getAnalyzeTextLROTasks(actions)); return new PollerFlux<>( DEFAULT_POLL_INTERVAL, activationOperation( service.submitJobWithResponseAsync(analyzeTextJobsInput, finalContext) .map(analyzeResponse -> { final AnalyzeActionsOperationDetail textAnalyticsOperationResult = new AnalyzeActionsOperationDetail(); AnalyzeActionsOperationDetailPropertiesHelper .setOperationId(textAnalyticsOperationResult, parseOperationId( analyzeResponse.getDeserializedHeaders().getOperationLocation())); return textAnalyticsOperationResult; })), pollingOperationLanguageApi(operationId -> service.jobStatusWithResponseAsync(operationId, finalIncludeStatistics, null, null, finalContext)), (pollingContext, pollResponse) -> Mono.just(pollingContext.getLatestResponse().getValue()), fetchingOperation( operationId -> Mono.just(getAnalyzeOperationFluxPage( operationId, null, null, finalIncludeStatistics, finalContext))) ); } final AnalyzeBatchInput analyzeBatchInput = new AnalyzeBatchInput() .setAnalysisInput(new MultiLanguageBatchInput().setDocuments(toMultiLanguageInput(documents))) .setTasks(getJobManifestTasks(actions)); analyzeBatchInput.setDisplayName(actions.getDisplayName()); return new PollerFlux<>( DEFAULT_POLL_INTERVAL, activationOperation( legacyService.analyzeWithResponseAsync(analyzeBatchInput, finalContext) .map(analyzeResponse -> { final AnalyzeActionsOperationDetail textAnalyticsOperationResult = new AnalyzeActionsOperationDetail(); AnalyzeActionsOperationDetailPropertiesHelper .setOperationId(textAnalyticsOperationResult, parseOperationId(analyzeResponse.getDeserializedHeaders().getOperationLocation())); return textAnalyticsOperationResult; })), pollingOperation(operationId -> legacyService.analyzeStatusWithResponseAsync(operationId.toString(), finalIncludeStatistics, null, null, finalContext)), (pollingContext, activationResponse) -> Mono.error(new RuntimeException("Cancellation is not supported.")), fetchingOperation(operationId -> Mono.just(getAnalyzeOperationFluxPage( operationId, null, null, finalIncludeStatistics, finalContext))) ); } catch (RuntimeException ex) { return PollerFlux.error(ex); } } PollerFlux<AnalyzeActionsOperationDetail, AnalyzeActionsResultPagedIterable> beginAnalyzeActionsIterable( Iterable<TextDocumentInput> documents, TextAnalyticsActions actions, AnalyzeActionsOptions options, Context context) { try { inputDocumentsValidation(documents); options = getNotNullAnalyzeActionsOptions(options); final Context finalContext = getNotNullContext(context) .addData(AZ_TRACING_NAMESPACE_KEY, COGNITIVE_TRACING_NAMESPACE_VALUE); final AnalyzeBatchInput analyzeBatchInput = new AnalyzeBatchInput() .setAnalysisInput(new MultiLanguageBatchInput().setDocuments(toMultiLanguageInput(documents))) .setTasks(getJobManifestTasks(actions)); analyzeBatchInput.setDisplayName(actions.getDisplayName()); final boolean finalIncludeStatistics = options.isIncludeStatistics(); if (service != null) { return new PollerFlux<>( DEFAULT_POLL_INTERVAL, activationOperation( service.submitJobWithResponseAsync( new AnalyzeTextJobsInput() .setDisplayName(actions.getDisplayName()) .setAnalysisInput(new MultiLanguageAnalysisInput().setDocuments(toMultiLanguageInput(documents))) .setTasks(getAnalyzeTextLROTasks(actions)), finalContext) .map(analyzeResponse -> { final AnalyzeActionsOperationDetail operationDetail = new AnalyzeActionsOperationDetail(); AnalyzeActionsOperationDetailPropertiesHelper.setOperationId(operationDetail, parseOperationId(analyzeResponse.getDeserializedHeaders().getOperationLocation())); return operationDetail; })), pollingOperationLanguageApi(operationId -> service.jobStatusWithResponseAsync(operationId, finalIncludeStatistics, null, null, finalContext)), (activationResponse, pollingContext) -> Mono.error(new RuntimeException("Cancellation is not supported.")), fetchingOperationIterable( operationId -> Mono.just(new AnalyzeActionsResultPagedIterable(getAnalyzeOperationFluxPage( operationId, null, null, finalIncludeStatistics, finalContext)))) ); } return new PollerFlux<>( DEFAULT_POLL_INTERVAL, activationOperation( legacyService.analyzeWithResponseAsync(analyzeBatchInput, finalContext) .map(analyzeResponse -> { final AnalyzeActionsOperationDetail operationDetail = new AnalyzeActionsOperationDetail(); AnalyzeActionsOperationDetailPropertiesHelper.setOperationId(operationDetail, parseOperationId(analyzeResponse.getDeserializedHeaders().getOperationLocation())); return operationDetail; })), pollingOperation(operationId -> legacyService.analyzeStatusWithResponseAsync(operationId.toString(), finalIncludeStatistics, null, null, finalContext)), (activationResponse, pollingContext) -> Mono.error(new RuntimeException("Cancellation is not supported.")), fetchingOperationIterable( operationId -> Mono.just(new AnalyzeActionsResultPagedIterable(getAnalyzeOperationFluxPage( operationId, null, null, finalIncludeStatistics, finalContext)))) ); } catch (RuntimeException ex) { return PollerFlux.error(ex); } } private List<AnalyzeTextLROTask> getAnalyzeTextLROTasks(TextAnalyticsActions actions) { if (actions == null) { return null; } final List<AnalyzeTextLROTask> tasks = new ArrayList<>(); final Iterable<RecognizeEntitiesAction> recognizeEntitiesActions = actions.getRecognizeEntitiesActions(); final Iterable<RecognizePiiEntitiesAction> recognizePiiEntitiesActions = actions.getRecognizePiiEntitiesActions(); final Iterable<ExtractKeyPhrasesAction> extractKeyPhrasesActions = actions.getExtractKeyPhrasesActions(); final Iterable<RecognizeLinkedEntitiesAction> recognizeLinkedEntitiesActions = actions.getRecognizeLinkedEntitiesActions(); final Iterable<AnalyzeHealthcareEntitiesAction> analyzeHealthcareEntitiesActions = actions.getAnalyzeHealthcareEntitiesActions(); final Iterable<AnalyzeSentimentAction> analyzeSentimentActions = actions.getAnalyzeSentimentActions(); final Iterable<ExtractSummaryAction> extractSummaryActions = actions.getExtractSummaryActions(); final Iterable<RecognizeCustomEntitiesAction> recognizeCustomEntitiesActions = actions.getRecognizeCustomEntitiesActions(); final Iterable<SingleCategoryClassifyAction> singleCategoryClassifyActions = actions.getSingleCategoryClassifyActions(); final Iterable<MultiCategoryClassifyAction> multiCategoryClassifyActions = actions.getMultiCategoryClassifyActions(); if (recognizeEntitiesActions != null) { recognizeEntitiesActions.forEach(action -> tasks.add(toEntitiesLROTask(action))); } if (recognizePiiEntitiesActions != null) { recognizePiiEntitiesActions.forEach(action -> tasks.add(toPiiLROTask(action))); } if (analyzeHealthcareEntitiesActions != null) { analyzeHealthcareEntitiesActions.forEach(action -> tasks.add(toHealthcareLROTask(action))); } if (extractKeyPhrasesActions != null) { extractKeyPhrasesActions.forEach(action -> tasks.add(toKeyPhraseLROTask(action))); } if (recognizeLinkedEntitiesActions != null) { recognizeLinkedEntitiesActions.forEach(action -> tasks.add(toEntityLinkingLROTask(action))); } if (analyzeSentimentActions != null) { analyzeSentimentActions.forEach(action -> tasks.add(toSentimentAnalysisLROTask(action))); } if (extractSummaryActions != null) { extractSummaryActions.forEach(action -> tasks.add(toExtractiveSummarizationLROTask(action))); } if (recognizeCustomEntitiesActions != null) { recognizeCustomEntitiesActions.forEach(action -> tasks.add(toCustomEntitiesLROTask(action))); } if (singleCategoryClassifyActions != null) { singleCategoryClassifyActions.forEach(action -> tasks.add( toCustomSingleLabelClassificationLROTask(action))); } if (multiCategoryClassifyActions != null) { multiCategoryClassifyActions.forEach(action -> tasks.add(toCustomMultiLabelClassificationLROTask(action))); } return tasks; } private JobManifestTasks getJobManifestTasks(TextAnalyticsActions actions) { if (actions == null) { return null; } final JobManifestTasks jobManifestTasks = new JobManifestTasks(); if (actions.getRecognizeEntitiesActions() != null) { jobManifestTasks.setEntityRecognitionTasks(toEntitiesTasks(actions)); } if (actions.getRecognizePiiEntitiesActions() != null) { jobManifestTasks.setEntityRecognitionPiiTasks(toPiiTasks(actions)); } if (actions.getExtractKeyPhrasesActions() != null) { jobManifestTasks.setKeyPhraseExtractionTasks(toKeyPhrasesTasks(actions)); } if (actions.getRecognizeLinkedEntitiesActions() != null) { jobManifestTasks.setEntityLinkingTasks(toEntityLinkingTasks(actions)); } if (actions.getAnalyzeSentimentActions() != null) { jobManifestTasks.setSentimentAnalysisTasks(toSentimentAnalysisTasks(actions)); } if (actions.getExtractSummaryActions() != null) { jobManifestTasks.setExtractiveSummarizationTasks(toExtractiveSummarizationTask(actions)); } if (actions.getRecognizeCustomEntitiesActions() != null) { jobManifestTasks.setCustomEntityRecognitionTasks(toCustomEntitiesTask(actions)); } if (actions.getSingleCategoryClassifyActions() != null) { jobManifestTasks.setCustomSingleClassificationTasks(toCustomSingleClassificationTask(actions)); } if (actions.getMultiCategoryClassifyActions() != null) { jobManifestTasks.setCustomMultiClassificationTasks(toCustomMultiClassificationTask(actions)); } return jobManifestTasks; } private EntitiesLROTask toEntitiesLROTask(RecognizeEntitiesAction action) { if (action == null) { return null; } final EntitiesLROTask task = new EntitiesLROTask(); task.setParameters(getEntitiesTaskParameters(action)).setTaskName(action.getActionName()); return task; } private List<EntitiesTask> toEntitiesTasks(TextAnalyticsActions actions) { final List<EntitiesTask> entitiesTasks = new ArrayList<>(); for (RecognizeEntitiesAction action : actions.getRecognizeEntitiesActions()) { entitiesTasks.add( action == null ? null : new EntitiesTask() .setTaskName(action.getActionName()) .setParameters(getEntitiesTaskParameters(action))); } return entitiesTasks; } private EntitiesTaskParameters getEntitiesTaskParameters(RecognizeEntitiesAction action) { return (EntitiesTaskParameters) new EntitiesTaskParameters() .setStringIndexType(StringIndexType.UTF16CODE_UNIT) .setModelVersion(action.getModelVersion()) .setLoggingOptOut(action.isServiceLogsDisabled()); } private PiiLROTask toPiiLROTask(RecognizePiiEntitiesAction action) { if (action == null) { return null; } final PiiLROTask task = new PiiLROTask(); task.setParameters(getPiiTaskParameters(action)).setTaskName(action.getActionName()); return task; } private List<PiiTask> toPiiTasks(TextAnalyticsActions actions) { final List<PiiTask> piiTasks = new ArrayList<>(); for (RecognizePiiEntitiesAction action : actions.getRecognizePiiEntitiesActions()) { piiTasks.add( action == null ? null : new PiiTask() .setTaskName(action.getActionName()) .setParameters(getPiiTaskParameters(action))); } return piiTasks; } private PiiTaskParameters getPiiTaskParameters(RecognizePiiEntitiesAction action) { return (PiiTaskParameters) new PiiTaskParameters() .setStringIndexType(StringIndexType.UTF16CODE_UNIT) .setDomain(PiiDomain.fromString( action.getDomainFilter() == null ? null : action.getDomainFilter().toString())) .setPiiCategories(toCategoriesFilter(action.getCategoriesFilter())) .setModelVersion(action.getModelVersion()) .setLoggingOptOut(action.isServiceLogsDisabled()); } private HealthcareLROTask toHealthcareLROTask(AnalyzeHealthcareEntitiesAction action) { if (action == null) { return null; } final HealthcareLROTask task = new HealthcareLROTask(); task.setParameters(getHealthcareTaskParameters(action)).setTaskName(action.getActionName()); return task; } private HealthcareTaskParameters getHealthcareTaskParameters(AnalyzeHealthcareEntitiesAction action) { return (HealthcareTaskParameters) new HealthcareTaskParameters() .setStringIndexType(StringIndexType.UTF16CODE_UNIT) .setModelVersion(action.getModelVersion()) .setLoggingOptOut(action.isServiceLogsDisabled()); } private KeyPhraseLROTask toKeyPhraseLROTask(ExtractKeyPhrasesAction action) { if (action == null) { return null; } final KeyPhraseLROTask task = new KeyPhraseLROTask(); task.setParameters(getKeyPhraseTaskParameters(action)).setTaskName(action.getActionName()); return task; } private List<KeyPhrasesTask> toKeyPhrasesTasks(TextAnalyticsActions actions) { final List<KeyPhrasesTask> keyPhrasesTasks = new ArrayList<>(); for (ExtractKeyPhrasesAction action : actions.getExtractKeyPhrasesActions()) { keyPhrasesTasks.add( action == null ? null : new KeyPhrasesTask() .setTaskName(action.getActionName()) .setParameters(getKeyPhraseTaskParameters(action))); } return keyPhrasesTasks; } private KeyPhraseTaskParameters getKeyPhraseTaskParameters(ExtractKeyPhrasesAction action) { return (KeyPhraseTaskParameters) new KeyPhraseTaskParameters() .setModelVersion(action.getModelVersion()) .setLoggingOptOut(action.isServiceLogsDisabled()); } private EntityLinkingLROTask toEntityLinkingLROTask(RecognizeLinkedEntitiesAction action) { if (action == null) { return null; } final EntityLinkingLROTask task = new EntityLinkingLROTask(); task.setParameters(getEntityLinkingTaskParameters(action)).setTaskName(action.getActionName()); return task; } private List<EntityLinkingTask> toEntityLinkingTasks(TextAnalyticsActions actions) { final List<EntityLinkingTask> tasks = new ArrayList<>(); for (RecognizeLinkedEntitiesAction action : actions.getRecognizeLinkedEntitiesActions()) { tasks.add( action == null ? null : new EntityLinkingTask() .setTaskName(action.getActionName()) .setParameters(getEntityLinkingTaskParameters(action))); } return tasks; } private EntityLinkingTaskParameters getEntityLinkingTaskParameters(RecognizeLinkedEntitiesAction action) { return (EntityLinkingTaskParameters) new EntityLinkingTaskParameters() .setStringIndexType(StringIndexType.UTF16CODE_UNIT) .setModelVersion(action.getModelVersion()) .setLoggingOptOut(action.isServiceLogsDisabled()); } private SentimentAnalysisLROTask toSentimentAnalysisLROTask(AnalyzeSentimentAction action) { if (action == null) { return null; } final SentimentAnalysisLROTask task = new SentimentAnalysisLROTask(); task.setParameters(getSentimentAnalysisTaskParameters(action)).setTaskName(action.getActionName()); return task; } private List<SentimentAnalysisTask> toSentimentAnalysisTasks(TextAnalyticsActions actions) { final List<SentimentAnalysisTask> tasks = new ArrayList<>(); for (AnalyzeSentimentAction action : actions.getAnalyzeSentimentActions()) { tasks.add( action == null ? null : new SentimentAnalysisTask() .setTaskName(action.getActionName()) .setParameters(getSentimentAnalysisTaskParameters(action))); } return tasks; } private SentimentAnalysisTaskParameters getSentimentAnalysisTaskParameters(AnalyzeSentimentAction action) { return (SentimentAnalysisTaskParameters) new SentimentAnalysisTaskParameters() .setStringIndexType(StringIndexType.UTF16CODE_UNIT) .setModelVersion(action.getModelVersion()) .setLoggingOptOut(action.isServiceLogsDisabled()); } private ExtractiveSummarizationLROTask toExtractiveSummarizationLROTask(ExtractSummaryAction action) { if (action == null) { return null; } final ExtractiveSummarizationLROTask task = new ExtractiveSummarizationLROTask(); task.setParameters(getExtractiveSummarizationTaskParameters(action)).setTaskName(action.getActionName()); return task; } private List<ExtractiveSummarizationTask> toExtractiveSummarizationTask(TextAnalyticsActions actions) { final List<ExtractiveSummarizationTask> extractiveSummarizationTasks = new ArrayList<>(); for (ExtractSummaryAction action : actions.getExtractSummaryActions()) { extractiveSummarizationTasks.add( action == null ? null : new ExtractiveSummarizationTask() .setTaskName(action.getActionName()) .setParameters(getExtractiveSummarizationTaskParameters(action))); } return extractiveSummarizationTasks; } private ExtractiveSummarizationTaskParameters getExtractiveSummarizationTaskParameters( ExtractSummaryAction action) { return (ExtractiveSummarizationTaskParameters) new ExtractiveSummarizationTaskParameters() .setStringIndexType(StringIndexType.UTF16CODE_UNIT) .setSentenceCount(action.getMaxSentenceCount()) .setSortBy(action.getOrderBy() == null ? null : ExtractiveSummarizationSortingCriteria .fromString(action.getOrderBy().toString())) .setModelVersion(action.getModelVersion()) .setLoggingOptOut(action.isServiceLogsDisabled()); } private CustomEntitiesLROTask toCustomEntitiesLROTask(RecognizeCustomEntitiesAction action) { if (action == null) { return null; } final CustomEntitiesLROTask task = new CustomEntitiesLROTask(); task.setParameters(getCustomEntitiesTaskParameters(action)).setTaskName(action.getActionName()); return task; } private List<CustomEntitiesTask> toCustomEntitiesTask(TextAnalyticsActions actions) { final List<CustomEntitiesTask> tasks = new ArrayList<>(); for (RecognizeCustomEntitiesAction action : actions.getRecognizeCustomEntitiesActions()) { tasks.add( action == null ? null : new CustomEntitiesTask() .setTaskName(action.getActionName()) .setParameters(getCustomEntitiesTaskParameters(action))); } return tasks; } private CustomEntitiesTaskParameters getCustomEntitiesTaskParameters(RecognizeCustomEntitiesAction action) { return (CustomEntitiesTaskParameters) new CustomEntitiesTaskParameters() .setStringIndexType(StringIndexType.UTF16CODE_UNIT) .setProjectName(action.getProjectName()) .setDeploymentName(action.getDeploymentName()) .setLoggingOptOut(action.isServiceLogsDisabled()); } private CustomSingleLabelClassificationLROTask toCustomSingleLabelClassificationLROTask( SingleCategoryClassifyAction action) { if (action == null) { return null; } final CustomSingleLabelClassificationLROTask task = new CustomSingleLabelClassificationLROTask(); task.setParameters(getCustomSingleClassificationTaskParameters(action)).setTaskName(action.getActionName()); return task; } private List<CustomSingleClassificationTask> toCustomSingleClassificationTask(TextAnalyticsActions actions) { final List<CustomSingleClassificationTask> tasks = new ArrayList<>(); for (SingleCategoryClassifyAction action : actions.getSingleCategoryClassifyActions()) { tasks.add( action == null ? null : new CustomSingleClassificationTask() .setTaskName(action.getActionName()) .setParameters(getCustomSingleClassificationTaskParameters(action))); } return tasks; } private CustomSingleLabelClassificationTaskParameters getCustomSingleClassificationTaskParameters( SingleCategoryClassifyAction action) { return (CustomSingleLabelClassificationTaskParameters) new CustomSingleLabelClassificationTaskParameters() .setProjectName(action.getProjectName()) .setDeploymentName(action.getDeploymentName()) .setLoggingOptOut(action.isServiceLogsDisabled()); } private CustomMultiLabelClassificationLROTask toCustomMultiLabelClassificationLROTask( MultiCategoryClassifyAction action) { if (action == null) { return null; } final CustomMultiLabelClassificationLROTask task = new CustomMultiLabelClassificationLROTask(); task.setParameters(getCustomMultiLabelClassificationTaskParameters(action)).setTaskName(action.getActionName()); return task; } private List<CustomMultiClassificationTask> toCustomMultiClassificationTask(TextAnalyticsActions actions) { final List<CustomMultiClassificationTask> tasks = new ArrayList<>(); for (MultiCategoryClassifyAction action : actions.getMultiCategoryClassifyActions()) { tasks.add( action == null ? null : new CustomMultiClassificationTask() .setTaskName(action.getActionName()) .setParameters(getCustomMultiLabelClassificationTaskParameters(action))); } return tasks; } private CustomMultiLabelClassificationTaskParameters getCustomMultiLabelClassificationTaskParameters( MultiCategoryClassifyAction action) { return (CustomMultiLabelClassificationTaskParameters) new CustomMultiLabelClassificationTaskParameters() .setProjectName(action.getProjectName()) .setDeploymentName(action.getDeploymentName()) .setLoggingOptOut(action.isServiceLogsDisabled()); } private Function<PollingContext<AnalyzeActionsOperationDetail>, Mono<AnalyzeActionsOperationDetail>> activationOperation(Mono<AnalyzeActionsOperationDetail> operationResult) { return pollingContext -> { try { return operationResult.onErrorMap(Utility::mapToHttpResponseExceptionIfExists); } catch (RuntimeException ex) { return monoError(logger, ex); } }; } private Function<PollingContext<AnalyzeActionsOperationDetail>, Mono<PollResponse<AnalyzeActionsOperationDetail>>> pollingOperation(Function<UUID, Mono<Response<AnalyzeJobState>>> pollingFunction) { return pollingContext -> { try { final PollResponse<AnalyzeActionsOperationDetail> operationResultPollResponse = pollingContext.getLatestResponse(); final UUID operationId = UUID.fromString(operationResultPollResponse.getValue().getOperationId()); return pollingFunction.apply(operationId) .flatMap(modelResponse -> processAnalyzedModelResponse(modelResponse, operationResultPollResponse)) .onErrorMap(Utility::mapToHttpResponseExceptionIfExists); } catch (RuntimeException ex) { return monoError(logger, ex); } }; } private Function<PollingContext<AnalyzeActionsOperationDetail>, Mono<PollResponse<AnalyzeActionsOperationDetail>>> pollingOperationLanguageApi(Function<UUID, Mono<Response<AnalyzeTextJobState>>> pollingFunction) { return pollingContext -> { try { final PollResponse<AnalyzeActionsOperationDetail> operationResultPollResponse = pollingContext.getLatestResponse(); final UUID operationId = UUID.fromString(operationResultPollResponse.getValue().getOperationId()); return pollingFunction.apply(operationId) .flatMap(modelResponse -> processAnalyzedModelResponseLanguageApi( modelResponse, operationResultPollResponse)) .onErrorMap(Utility::mapToHttpResponseExceptionIfExists); } catch (RuntimeException ex) { return monoError(logger, ex); } }; } private Function<PollingContext<AnalyzeActionsOperationDetail>, Mono<AnalyzeActionsResultPagedFlux>> fetchingOperation(Function<UUID, Mono<AnalyzeActionsResultPagedFlux>> fetchingFunction) { return pollingContext -> { try { final UUID operationId = UUID.fromString(pollingContext.getLatestResponse().getValue().getOperationId()); return fetchingFunction.apply(operationId); } catch (RuntimeException ex) { return monoError(logger, ex); } }; } private Function<PollingContext<AnalyzeActionsOperationDetail>, Mono<AnalyzeActionsResultPagedIterable>> fetchingOperationIterable(Function<UUID, Mono<AnalyzeActionsResultPagedIterable>> fetchingFunction) { return pollingContext -> { try { final UUID operationId = UUID.fromString(pollingContext.getLatestResponse().getValue().getOperationId()); return fetchingFunction.apply(operationId); } catch (RuntimeException ex) { return monoError(logger, ex); } }; } AnalyzeActionsResultPagedFlux getAnalyzeOperationFluxPage(UUID operationId, Integer top, Integer skip, boolean showStats, Context context) { return new AnalyzeActionsResultPagedFlux( () -> (continuationToken, pageSize) -> getPage(continuationToken, operationId, top, skip, showStats, context).flux()); } Mono<PagedResponse<AnalyzeActionsResult>> getPage(String continuationToken, UUID operationId, Integer top, Integer skip, boolean showStats, Context context) { if (continuationToken != null) { final Map<String, Object> continuationTokenMap = parseNextLink(continuationToken); final Integer topValue = (Integer) continuationTokenMap.getOrDefault("$top", null); final Integer skipValue = (Integer) continuationTokenMap.getOrDefault("$skip", null); final Boolean showStatsValue = (Boolean) continuationTokenMap.getOrDefault(showStats, false); if (service != null) { return service.jobStatusWithResponseAsync(operationId, showStatsValue, topValue, skipValue, context) .map(this::toAnalyzeActionsResultPagedResponseLanguageApi) .onErrorMap(Utility::mapToHttpResponseExceptionIfExists); } return legacyService.analyzeStatusWithResponseAsync(operationId.toString(), showStatsValue, topValue, skipValue, context) .map(this::toAnalyzeActionsResultPagedResponse) .onErrorMap(Utility::mapToHttpResponseExceptionIfExists); } else { if (service != null) { return service.jobStatusWithResponseAsync(operationId, showStats, top, skip, context) .map(this::toAnalyzeActionsResultPagedResponseLanguageApi) .onErrorMap(Utility::mapToHttpResponseExceptionIfExists); } return legacyService.analyzeStatusWithResponseAsync(operationId.toString(), showStats, top, skip, context) .map(this::toAnalyzeActionsResultPagedResponse) .onErrorMap(Utility::mapToHttpResponseExceptionIfExists); } } private PagedResponse<AnalyzeActionsResult> toAnalyzeActionsResultPagedResponse(Response<AnalyzeJobState> response) { final AnalyzeJobState analyzeJobState = response.getValue(); return new PagedResponseBase<Void, AnalyzeActionsResult>( response.getRequest(), response.getStatusCode(), response.getHeaders(), Arrays.asList(toAnalyzeActionsResult(analyzeJobState)), analyzeJobState.getNextLink(), null); } private PagedResponse<AnalyzeActionsResult> toAnalyzeActionsResultPagedResponseLanguageApi(Response<AnalyzeTextJobState> response) { final AnalyzeTextJobState analyzeJobState = response.getValue(); return new PagedResponseBase<Void, AnalyzeActionsResult>( response.getRequest(), response.getStatusCode(), response.getHeaders(), Arrays.asList(toAnalyzeActionsResultLanguageApi(analyzeJobState)), analyzeJobState.getNextLink(), null); } private AnalyzeActionsResult toAnalyzeActionsResultLanguageApi(AnalyzeTextJobState analyzeJobState) { final TasksStateTasks tasksStateTasks = analyzeJobState.getTasks(); final List<AnalyzeTextLROResult> tasksResults = tasksStateTasks.getItems(); final List<RecognizeEntitiesActionResult> recognizeEntitiesActionResults = new ArrayList<>(); final List<RecognizePiiEntitiesActionResult> recognizePiiEntitiesActionResults = new ArrayList<>(); final List<ExtractKeyPhrasesActionResult> extractKeyPhrasesActionResults = new ArrayList<>(); final List<RecognizeLinkedEntitiesActionResult> recognizeLinkedEntitiesActionResults = new ArrayList<>(); final List<AnalyzeHealthcareEntitiesActionResult> analyzeHealthcareEntitiesActionResults = new ArrayList<>(); final List<AnalyzeSentimentActionResult> analyzeSentimentActionResults = new ArrayList<>(); final List<ExtractSummaryActionResult> extractSummaryActionResults = new ArrayList<>(); final List<RecognizeCustomEntitiesActionResult> recognizeCustomEntitiesActionResults = new ArrayList<>(); final List<SingleCategoryClassifyActionResult> singleCategoryClassifyActionResults = new ArrayList<>(); final List<MultiCategoryClassifyActionResult> multiCategoryClassifyActionResults = new ArrayList<>(); if (!CoreUtils.isNullOrEmpty(tasksResults)) { for (int i = 0; i < tasksResults.size(); i++) { final AnalyzeTextLROResult taskResult = tasksResults.get(i); if (taskResult instanceof EntityRecognitionLROResult) { final EntityRecognitionLROResult entityTaskResult = (EntityRecognitionLROResult) taskResult; final RecognizeEntitiesActionResult actionResult = new RecognizeEntitiesActionResult(); final EntitiesResult results = entityTaskResult.getResults(); if (results != null) { RecognizeEntitiesActionResultPropertiesHelper.setDocumentsResults(actionResult, toRecognizeEntitiesResultCollectionResponse(results)); } TextAnalyticsActionResultPropertiesHelper.setActionName(actionResult, entityTaskResult.getTaskName()); TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult, entityTaskResult.getLastUpdateDateTime()); recognizeEntitiesActionResults.add(actionResult); } else if (taskResult instanceof CustomEntityRecognitionLROResult) { final CustomEntityRecognitionLROResult customEntityTaskResult = (CustomEntityRecognitionLROResult) taskResult; final RecognizeCustomEntitiesActionResult actionResult = new RecognizeCustomEntitiesActionResult(); final CustomEntitiesResult results = customEntityTaskResult.getResults(); if (results != null) { RecognizeCustomEntitiesActionResultPropertiesHelper.setDocumentsResults(actionResult, toRecognizeCustomEntitiesResultCollection(results)); } TextAnalyticsActionResultPropertiesHelper.setActionName(actionResult, customEntityTaskResult.getTaskName()); TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult, customEntityTaskResult.getLastUpdateDateTime()); recognizeCustomEntitiesActionResults.add(actionResult); } else if (taskResult instanceof CustomSingleLabelClassificationLROResult) { final CustomSingleLabelClassificationLROResult customSingleLabelClassificationResult = (CustomSingleLabelClassificationLROResult) taskResult; final SingleCategoryClassifyActionResult actionResult = new SingleCategoryClassifyActionResult(); final CustomSingleLabelClassificationResult results = customSingleLabelClassificationResult.getResults(); if (results != null) { SingleCategoryClassifyActionResultPropertiesHelper.setDocumentsResults(actionResult, toSingleCategoryClassifyResultCollection(results)); } TextAnalyticsActionResultPropertiesHelper.setActionName(actionResult, customSingleLabelClassificationResult.getTaskName()); TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult, customSingleLabelClassificationResult.getLastUpdateDateTime()); singleCategoryClassifyActionResults.add(actionResult); } else if (taskResult instanceof CustomMultiLabelClassificationLROResult) { final CustomMultiLabelClassificationLROResult customMultiLabelClassificationLROResult = (CustomMultiLabelClassificationLROResult) taskResult; final MultiCategoryClassifyActionResult actionResult = new MultiCategoryClassifyActionResult(); final CustomMultiLabelClassificationResult results = customMultiLabelClassificationLROResult.getResults(); if (results != null) { MultiCategoryClassifyActionResultPropertiesHelper.setDocumentsResults(actionResult, toMultiCategoryClassifyResultCollection(results)); } TextAnalyticsActionResultPropertiesHelper.setActionName(actionResult, customMultiLabelClassificationLROResult.getTaskName()); TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult, customMultiLabelClassificationLROResult.getLastUpdateDateTime()); multiCategoryClassifyActionResults.add(actionResult); } else if (taskResult instanceof EntityLinkingLROResult) { final EntityLinkingLROResult entityLinkingLROResult = (EntityLinkingLROResult) taskResult; final RecognizeLinkedEntitiesActionResult actionResult = new RecognizeLinkedEntitiesActionResult(); final EntityLinkingResult results = entityLinkingLROResult.getResults(); if (results != null) { RecognizeLinkedEntitiesActionResultPropertiesHelper.setDocumentsResults(actionResult, toRecognizeLinkedEntitiesResultCollection(results)); } TextAnalyticsActionResultPropertiesHelper.setActionName(actionResult, entityLinkingLROResult.getTaskName()); TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult, entityLinkingLROResult.getLastUpdateDateTime()); recognizeLinkedEntitiesActionResults.add(actionResult); } else if (taskResult instanceof PiiEntityRecognitionLROResult) { final PiiEntityRecognitionLROResult piiEntityRecognitionLROResult = (PiiEntityRecognitionLROResult) taskResult; final RecognizePiiEntitiesActionResult actionResult = new RecognizePiiEntitiesActionResult(); final PiiResult results = piiEntityRecognitionLROResult.getResults(); if (results != null) { RecognizePiiEntitiesActionResultPropertiesHelper.setDocumentsResults(actionResult, toRecognizePiiEntitiesResultCollection(results)); } TextAnalyticsActionResultPropertiesHelper.setActionName(actionResult, piiEntityRecognitionLROResult.getTaskName()); TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult, piiEntityRecognitionLROResult.getLastUpdateDateTime()); recognizePiiEntitiesActionResults.add(actionResult); } else if (taskResult instanceof ExtractiveSummarizationLROResult) { final ExtractiveSummarizationLROResult extractiveSummarizationLROResult = (ExtractiveSummarizationLROResult) taskResult; final ExtractSummaryActionResult actionResult = new ExtractSummaryActionResult(); final ExtractiveSummarizationResult results = extractiveSummarizationLROResult.getResults(); if (results != null) { ExtractSummaryActionResultPropertiesHelper.setDocumentsResults(actionResult, toExtractSummaryResultCollection(results)); } TextAnalyticsActionResultPropertiesHelper.setActionName(actionResult, extractiveSummarizationLROResult.getTaskName()); TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult, extractiveSummarizationLROResult.getLastUpdateDateTime()); extractSummaryActionResults.add(actionResult); } else if (taskResult instanceof HealthcareLROResult) { final HealthcareLROResult healthcareLROResult = (HealthcareLROResult) taskResult; final AnalyzeHealthcareEntitiesActionResult actionResult = new AnalyzeHealthcareEntitiesActionResult(); final HealthcareResult results = healthcareLROResult.getResults(); if (results != null) { AnalyzeHealthcareEntitiesActionResultPropertiesHelper.setDocumentsResults(actionResult, toAnalyzeHealthcareEntitiesResultCollection(results)); } TextAnalyticsActionResultPropertiesHelper.setActionName(actionResult, healthcareLROResult.getTaskName()); TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult, healthcareLROResult.getLastUpdateDateTime()); analyzeHealthcareEntitiesActionResults.add(actionResult); } else if (taskResult instanceof SentimentLROResult) { final SentimentLROResult sentimentLROResult = (SentimentLROResult) taskResult; final AnalyzeSentimentActionResult actionResult = new AnalyzeSentimentActionResult(); final SentimentResponse results = sentimentLROResult.getResults(); if (results != null) { AnalyzeSentimentActionResultPropertiesHelper.setDocumentsResults(actionResult, toAnalyzeSentimentResultCollection(results)); } TextAnalyticsActionResultPropertiesHelper.setActionName(actionResult, sentimentLROResult.getTaskName()); TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult, sentimentLROResult.getLastUpdateDateTime()); analyzeSentimentActionResults.add(actionResult); } else if (taskResult instanceof KeyPhraseExtractionLROResult) { final KeyPhraseExtractionLROResult keyPhraseExtractionLROResult = (KeyPhraseExtractionLROResult) taskResult; final ExtractKeyPhrasesActionResult actionResult = new ExtractKeyPhrasesActionResult(); final KeyPhraseResult results = keyPhraseExtractionLROResult.getResults(); if (results != null) { ExtractKeyPhrasesActionResultPropertiesHelper.setDocumentsResults(actionResult, toExtractKeyPhrasesResultCollection(results)); } TextAnalyticsActionResultPropertiesHelper.setActionName(actionResult, keyPhraseExtractionLROResult.getTaskName()); TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult, keyPhraseExtractionLROResult.getLastUpdateDateTime()); extractKeyPhrasesActionResults.add(actionResult); } else { throw logger.logExceptionAsError(new RuntimeException( "Invalid Long running operation task result: " + taskResult.getClass())); } } } final List<Error> errors = analyzeJobState.getErrors(); if (!CoreUtils.isNullOrEmpty(errors)) { for (Error error : errors) { final String[] targetPair = parseActionErrorTarget(error.getTarget()); final String taskName = targetPair[0]; final Integer taskIndex = Integer.valueOf(targetPair[1]); final TextAnalyticsActionResult actionResult; if (ENTITY_RECOGNITION_TASKS.equals(taskName)) { actionResult = recognizeEntitiesActionResults.get(taskIndex); } else if (ENTITY_RECOGNITION_PII_TASKS.equals(taskName)) { actionResult = recognizePiiEntitiesActionResults.get(taskIndex); } else if (KEY_PHRASE_EXTRACTION_TASKS.equals(taskName)) { actionResult = extractKeyPhrasesActionResults.get(taskIndex); } else if (ENTITY_LINKING_TASKS.equals(taskName)) { actionResult = recognizeLinkedEntitiesActionResults.get(taskIndex); } else if (SENTIMENT_ANALYSIS_TASKS.equals(taskName)) { actionResult = analyzeSentimentActionResults.get(taskIndex); } else if (EXTRACTIVE_SUMMARIZATION_TASKS.equals(taskName)) { actionResult = extractSummaryActionResults.get(taskIndex); } else if (CUSTOM_ENTITY_RECOGNITION_TASKS.equals(taskName)) { actionResult = recognizeCustomEntitiesActionResults.get(taskIndex); } else if (CUSTOM_SINGLE_CLASSIFICATION_TASKS.equals(taskName)) { actionResult = singleCategoryClassifyActionResults.get(taskIndex); } else if (CUSTOM_MULTI_CLASSIFICATION_TASKS.equals(taskName)) { actionResult = multiCategoryClassifyActionResults.get(taskIndex); } else { throw logger.logExceptionAsError(new RuntimeException( "Invalid task name in target reference, " + taskName)); } TextAnalyticsActionResultPropertiesHelper.setIsError(actionResult, true); TextAnalyticsActionResultPropertiesHelper.setError(actionResult, new com.azure.ai.textanalytics.models.TextAnalyticsError( TextAnalyticsErrorCode.fromString( error.getCode() == null ? null : error.getCode().toString()), error.getMessage(), null)); } } final AnalyzeActionsResult analyzeActionsResult = new AnalyzeActionsResult(); AnalyzeActionsResultPropertiesHelper.setRecognizeEntitiesResults(analyzeActionsResult, IterableStream.of(recognizeEntitiesActionResults)); AnalyzeActionsResultPropertiesHelper.setRecognizePiiEntitiesResults(analyzeActionsResult, IterableStream.of(recognizePiiEntitiesActionResults)); AnalyzeActionsResultPropertiesHelper.setAnalyzeHealthcareEntitiesResults(analyzeActionsResult, IterableStream.of(analyzeHealthcareEntitiesActionResults)); AnalyzeActionsResultPropertiesHelper.setExtractKeyPhrasesResults(analyzeActionsResult, IterableStream.of(extractKeyPhrasesActionResults)); AnalyzeActionsResultPropertiesHelper.setRecognizeLinkedEntitiesResults(analyzeActionsResult, IterableStream.of(recognizeLinkedEntitiesActionResults)); AnalyzeActionsResultPropertiesHelper.setAnalyzeSentimentResults(analyzeActionsResult, IterableStream.of(analyzeSentimentActionResults)); AnalyzeActionsResultPropertiesHelper.setExtractSummaryResults(analyzeActionsResult, IterableStream.of(extractSummaryActionResults)); AnalyzeActionsResultPropertiesHelper.setRecognizeCustomEntitiesResults(analyzeActionsResult, IterableStream.of(recognizeCustomEntitiesActionResults)); AnalyzeActionsResultPropertiesHelper.setClassifySingleCategoryResults(analyzeActionsResult, IterableStream.of(singleCategoryClassifyActionResults)); AnalyzeActionsResultPropertiesHelper.setClassifyMultiCategoryResults(analyzeActionsResult, IterableStream.of(multiCategoryClassifyActionResults)); return analyzeActionsResult; } private Mono<PollResponse<AnalyzeActionsOperationDetail>> processAnalyzedModelResponse( Response<AnalyzeJobState> analyzeJobStateResponse, PollResponse<AnalyzeActionsOperationDetail> operationResultPollResponse) { LongRunningOperationStatus status = LongRunningOperationStatus.SUCCESSFULLY_COMPLETED; if (analyzeJobStateResponse.getValue() != null && analyzeJobStateResponse.getValue().getStatus() != null) { switch (analyzeJobStateResponse.getValue().getStatus()) { case NOT_STARTED: case RUNNING: status = LongRunningOperationStatus.IN_PROGRESS; break; case SUCCEEDED: status = LongRunningOperationStatus.SUCCESSFULLY_COMPLETED; break; case CANCELLED: status = LongRunningOperationStatus.USER_CANCELLED; break; default: status = LongRunningOperationStatus.fromString( analyzeJobStateResponse.getValue().getStatus().toString(), true); break; } } AnalyzeActionsOperationDetailPropertiesHelper.setDisplayName(operationResultPollResponse.getValue(), analyzeJobStateResponse.getValue().getDisplayName()); AnalyzeActionsOperationDetailPropertiesHelper.setCreatedAt(operationResultPollResponse.getValue(), analyzeJobStateResponse.getValue().getCreatedDateTime()); AnalyzeActionsOperationDetailPropertiesHelper.setExpiresAt(operationResultPollResponse.getValue(), analyzeJobStateResponse.getValue().getExpirationDateTime()); AnalyzeActionsOperationDetailPropertiesHelper.setLastModifiedAt(operationResultPollResponse.getValue(), analyzeJobStateResponse.getValue().getLastUpdateDateTime()); final TasksStateTasksOld tasksResult = analyzeJobStateResponse.getValue().getTasks(); AnalyzeActionsOperationDetailPropertiesHelper.setActionsFailed(operationResultPollResponse.getValue(), tasksResult.getFailed()); AnalyzeActionsOperationDetailPropertiesHelper.setActionsInProgress(operationResultPollResponse.getValue(), tasksResult.getInProgress()); AnalyzeActionsOperationDetailPropertiesHelper.setActionsSucceeded( operationResultPollResponse.getValue(), tasksResult.getCompleted()); AnalyzeActionsOperationDetailPropertiesHelper.setActionsInTotal(operationResultPollResponse.getValue(), tasksResult.getTotal()); return Mono.just(new PollResponse<>(status, operationResultPollResponse.getValue())); } private Mono<PollResponse<AnalyzeActionsOperationDetail>> processAnalyzedModelResponseLanguageApi( Response<AnalyzeTextJobState> analyzeJobStateResponse, PollResponse<AnalyzeActionsOperationDetail> operationResultPollResponse) { LongRunningOperationStatus status = LongRunningOperationStatus.SUCCESSFULLY_COMPLETED; if (analyzeJobStateResponse.getValue() != null && analyzeJobStateResponse.getValue().getStatus() != null) { switch (analyzeJobStateResponse.getValue().getStatus()) { case NOT_STARTED: case RUNNING: status = LongRunningOperationStatus.IN_PROGRESS; break; case SUCCEEDED: status = LongRunningOperationStatus.SUCCESSFULLY_COMPLETED; break; case CANCELLED: status = LongRunningOperationStatus.USER_CANCELLED; break; default: status = LongRunningOperationStatus.fromString( analyzeJobStateResponse.getValue().getStatus().toString(), true); break; } } AnalyzeActionsOperationDetailPropertiesHelper.setDisplayName(operationResultPollResponse.getValue(), analyzeJobStateResponse.getValue().getDisplayName()); AnalyzeActionsOperationDetailPropertiesHelper.setCreatedAt(operationResultPollResponse.getValue(), analyzeJobStateResponse.getValue().getCreatedDateTime()); AnalyzeActionsOperationDetailPropertiesHelper.setExpiresAt(operationResultPollResponse.getValue(), analyzeJobStateResponse.getValue().getExpirationDateTime()); AnalyzeActionsOperationDetailPropertiesHelper.setLastModifiedAt(operationResultPollResponse.getValue(), analyzeJobStateResponse.getValue().getLastUpdateDateTime()); final TasksStateTasks tasksResult = analyzeJobStateResponse.getValue().getTasks(); AnalyzeActionsOperationDetailPropertiesHelper.setActionsFailed(operationResultPollResponse.getValue(), tasksResult.getFailed()); AnalyzeActionsOperationDetailPropertiesHelper.setActionsInProgress(operationResultPollResponse.getValue(), tasksResult.getInProgress()); AnalyzeActionsOperationDetailPropertiesHelper.setActionsSucceeded( operationResultPollResponse.getValue(), tasksResult.getCompleted()); AnalyzeActionsOperationDetailPropertiesHelper.setActionsInTotal(operationResultPollResponse.getValue(), tasksResult.getTotal()); return Mono.just(new PollResponse<>(status, operationResultPollResponse.getValue())); } private Context getNotNullContext(Context context) { return context == null ? Context.NONE : context; } private AnalyzeActionsOptions getNotNullAnalyzeActionsOptions(AnalyzeActionsOptions options) { return options == null ? new AnalyzeActionsOptions() : options; } private String[] parseActionErrorTarget(String targetReference) { if (CoreUtils.isNullOrEmpty(targetReference)) { throw logger.logExceptionAsError(new RuntimeException( "Expected an error with a target field referencing an action but did not get one")); } final Matcher matcher = PATTERN.matcher(targetReference); String[] taskNameIdPair = new String[2]; while (matcher.find()) { taskNameIdPair[0] = matcher.group(1); taskNameIdPair[1] = matcher.group(2); } return taskNameIdPair; } }
class AnalyzeActionsAsyncClient { private static final String ENTITY_RECOGNITION_TASKS = "entityRecognitionTasks"; private static final String ENTITY_RECOGNITION_PII_TASKS = "entityRecognitionPiiTasks"; private static final String KEY_PHRASE_EXTRACTION_TASKS = "keyPhraseExtractionTasks"; private static final String ENTITY_LINKING_TASKS = "entityLinkingTasks"; private static final String SENTIMENT_ANALYSIS_TASKS = "sentimentAnalysisTasks"; private static final String EXTRACTIVE_SUMMARIZATION_TASKS = "extractiveSummarizationTasks"; private static final String CUSTOM_ENTITY_RECOGNITION_TASKS = "customEntityRecognitionTasks"; private static final String CUSTOM_SINGLE_CLASSIFICATION_TASKS = "customClassificationTasks"; private static final String CUSTOM_MULTI_CLASSIFICATION_TASKS = "customMultiClassificationTasks"; private static final String REGEX_ACTION_ERROR_TARGET = String.format(" ENTITY_RECOGNITION_PII_TASKS, ENTITY_RECOGNITION_TASKS, ENTITY_LINKING_TASKS, SENTIMENT_ANALYSIS_TASKS, EXTRACTIVE_SUMMARIZATION_TASKS, CUSTOM_ENTITY_RECOGNITION_TASKS, CUSTOM_SINGLE_CLASSIFICATION_TASKS, CUSTOM_MULTI_CLASSIFICATION_TASKS); private final ClientLogger logger = new ClientLogger(AnalyzeActionsAsyncClient.class); private final TextAnalyticsClientImpl legacyService; private final AnalyzeTextsImpl service; private static final Pattern PATTERN; static { PATTERN = Pattern.compile(REGEX_ACTION_ERROR_TARGET, Pattern.MULTILINE); } AnalyzeActionsAsyncClient(TextAnalyticsClientImpl legacyService) { this.legacyService = legacyService; this.service = null; } AnalyzeActionsAsyncClient(AnalyzeTextsImpl service) { this.legacyService = null; this.service = service; } PollerFlux<AnalyzeActionsOperationDetail, AnalyzeActionsResultPagedFlux> beginAnalyzeActions( Iterable<TextDocumentInput> documents, TextAnalyticsActions actions, AnalyzeActionsOptions options, Context context) { try { Objects.requireNonNull(actions, "'actions' cannot be null."); inputDocumentsValidation(documents); options = getNotNullAnalyzeActionsOptions(options); final Context finalContext = getNotNullContext(context) .addData(AZ_TRACING_NAMESPACE_KEY, COGNITIVE_TRACING_NAMESPACE_VALUE); final boolean finalIncludeStatistics = options.isIncludeStatistics(); if (service != null) { final AnalyzeTextJobsInput analyzeTextJobsInput = new AnalyzeTextJobsInput() .setDisplayName(actions.getDisplayName()) .setAnalysisInput( new MultiLanguageAnalysisInput().setDocuments(toMultiLanguageInput(documents))) .setTasks(getAnalyzeTextLROTasks(actions)); return new PollerFlux<>( DEFAULT_POLL_INTERVAL, activationOperation( service.submitJobWithResponseAsync(analyzeTextJobsInput, finalContext) .map(analyzeResponse -> { final AnalyzeActionsOperationDetail textAnalyticsOperationResult = new AnalyzeActionsOperationDetail(); AnalyzeActionsOperationDetailPropertiesHelper .setOperationId(textAnalyticsOperationResult, parseOperationId( analyzeResponse.getDeserializedHeaders().getOperationLocation())); return textAnalyticsOperationResult; })), pollingOperationLanguageApi(operationId -> service.jobStatusWithResponseAsync(operationId, finalIncludeStatistics, null, null, finalContext)), (pollingContext, pollResponse) -> Mono.just(pollingContext.getLatestResponse().getValue()), fetchingOperation( operationId -> Mono.just(getAnalyzeOperationFluxPage( operationId, null, null, finalIncludeStatistics, finalContext))) ); } final AnalyzeBatchInput analyzeBatchInput = new AnalyzeBatchInput() .setAnalysisInput(new MultiLanguageBatchInput().setDocuments(toMultiLanguageInput(documents))) .setTasks(getJobManifestTasks(actions)); analyzeBatchInput.setDisplayName(actions.getDisplayName()); return new PollerFlux<>( DEFAULT_POLL_INTERVAL, activationOperation( legacyService.analyzeWithResponseAsync(analyzeBatchInput, finalContext) .map(analyzeResponse -> { final AnalyzeActionsOperationDetail textAnalyticsOperationResult = new AnalyzeActionsOperationDetail(); AnalyzeActionsOperationDetailPropertiesHelper .setOperationId(textAnalyticsOperationResult, parseOperationId(analyzeResponse.getDeserializedHeaders().getOperationLocation())); return textAnalyticsOperationResult; })), pollingOperation(operationId -> legacyService.analyzeStatusWithResponseAsync(operationId.toString(), finalIncludeStatistics, null, null, finalContext)), (pollingContext, activationResponse) -> Mono.error(new RuntimeException("Cancellation is not supported.")), fetchingOperation(operationId -> Mono.just(getAnalyzeOperationFluxPage( operationId, null, null, finalIncludeStatistics, finalContext))) ); } catch (RuntimeException ex) { return PollerFlux.error(ex); } } PollerFlux<AnalyzeActionsOperationDetail, AnalyzeActionsResultPagedIterable> beginAnalyzeActionsIterable( Iterable<TextDocumentInput> documents, TextAnalyticsActions actions, AnalyzeActionsOptions options, Context context) { try { Objects.requireNonNull(actions, "'actions' cannot be null."); inputDocumentsValidation(documents); options = getNotNullAnalyzeActionsOptions(options); final Context finalContext = getNotNullContext(context) .addData(AZ_TRACING_NAMESPACE_KEY, COGNITIVE_TRACING_NAMESPACE_VALUE); final AnalyzeBatchInput analyzeBatchInput = new AnalyzeBatchInput() .setAnalysisInput(new MultiLanguageBatchInput().setDocuments(toMultiLanguageInput(documents))) .setTasks(getJobManifestTasks(actions)); analyzeBatchInput.setDisplayName(actions.getDisplayName()); final boolean finalIncludeStatistics = options.isIncludeStatistics(); if (service != null) { return new PollerFlux<>( DEFAULT_POLL_INTERVAL, activationOperation( service.submitJobWithResponseAsync( new AnalyzeTextJobsInput() .setDisplayName(actions.getDisplayName()) .setAnalysisInput(new MultiLanguageAnalysisInput().setDocuments(toMultiLanguageInput(documents))) .setTasks(getAnalyzeTextLROTasks(actions)), finalContext) .map(analyzeResponse -> { final AnalyzeActionsOperationDetail operationDetail = new AnalyzeActionsOperationDetail(); AnalyzeActionsOperationDetailPropertiesHelper.setOperationId(operationDetail, parseOperationId(analyzeResponse.getDeserializedHeaders().getOperationLocation())); return operationDetail; })), pollingOperationLanguageApi(operationId -> service.jobStatusWithResponseAsync(operationId, finalIncludeStatistics, null, null, finalContext)), (activationResponse, pollingContext) -> Mono.error(new RuntimeException("Cancellation is not supported.")), fetchingOperationIterable( operationId -> Mono.just(new AnalyzeActionsResultPagedIterable(getAnalyzeOperationFluxPage( operationId, null, null, finalIncludeStatistics, finalContext)))) ); } return new PollerFlux<>( DEFAULT_POLL_INTERVAL, activationOperation( legacyService.analyzeWithResponseAsync(analyzeBatchInput, finalContext) .map(analyzeResponse -> { final AnalyzeActionsOperationDetail operationDetail = new AnalyzeActionsOperationDetail(); AnalyzeActionsOperationDetailPropertiesHelper.setOperationId(operationDetail, parseOperationId(analyzeResponse.getDeserializedHeaders().getOperationLocation())); return operationDetail; })), pollingOperation(operationId -> legacyService.analyzeStatusWithResponseAsync(operationId.toString(), finalIncludeStatistics, null, null, finalContext)), (activationResponse, pollingContext) -> Mono.error(new RuntimeException("Cancellation is not supported.")), fetchingOperationIterable( operationId -> Mono.just(new AnalyzeActionsResultPagedIterable(getAnalyzeOperationFluxPage( operationId, null, null, finalIncludeStatistics, finalContext)))) ); } catch (RuntimeException ex) { return PollerFlux.error(ex); } } private List<AnalyzeTextLROTask> getAnalyzeTextLROTasks(TextAnalyticsActions actions) { if (actions == null) { return null; } final List<AnalyzeTextLROTask> tasks = new ArrayList<>(); final Iterable<RecognizeEntitiesAction> recognizeEntitiesActions = actions.getRecognizeEntitiesActions(); final Iterable<RecognizePiiEntitiesAction> recognizePiiEntitiesActions = actions.getRecognizePiiEntitiesActions(); final Iterable<ExtractKeyPhrasesAction> extractKeyPhrasesActions = actions.getExtractKeyPhrasesActions(); final Iterable<RecognizeLinkedEntitiesAction> recognizeLinkedEntitiesActions = actions.getRecognizeLinkedEntitiesActions(); final Iterable<AnalyzeHealthcareEntitiesAction> analyzeHealthcareEntitiesActions = actions.getAnalyzeHealthcareEntitiesActions(); final Iterable<AnalyzeSentimentAction> analyzeSentimentActions = actions.getAnalyzeSentimentActions(); final Iterable<ExtractSummaryAction> extractSummaryActions = actions.getExtractSummaryActions(); final Iterable<RecognizeCustomEntitiesAction> recognizeCustomEntitiesActions = actions.getRecognizeCustomEntitiesActions(); final Iterable<SingleCategoryClassifyAction> singleCategoryClassifyActions = actions.getSingleCategoryClassifyActions(); final Iterable<MultiCategoryClassifyAction> multiCategoryClassifyActions = actions.getMultiCategoryClassifyActions(); if (recognizeEntitiesActions != null) { recognizeEntitiesActions.forEach(action -> tasks.add(toEntitiesLROTask(action))); } if (recognizePiiEntitiesActions != null) { recognizePiiEntitiesActions.forEach(action -> tasks.add(toPiiLROTask(action))); } if (analyzeHealthcareEntitiesActions != null) { analyzeHealthcareEntitiesActions.forEach(action -> tasks.add(toHealthcareLROTask(action))); } if (extractKeyPhrasesActions != null) { extractKeyPhrasesActions.forEach(action -> tasks.add(toKeyPhraseLROTask(action))); } if (recognizeLinkedEntitiesActions != null) { recognizeLinkedEntitiesActions.forEach(action -> tasks.add(toEntityLinkingLROTask(action))); } if (analyzeSentimentActions != null) { analyzeSentimentActions.forEach(action -> tasks.add(toSentimentAnalysisLROTask(action))); } if (extractSummaryActions != null) { extractSummaryActions.forEach(action -> tasks.add(toExtractiveSummarizationLROTask(action))); } if (recognizeCustomEntitiesActions != null) { recognizeCustomEntitiesActions.forEach(action -> tasks.add(toCustomEntitiesLROTask(action))); } if (singleCategoryClassifyActions != null) { singleCategoryClassifyActions.forEach(action -> tasks.add( toCustomSingleLabelClassificationLROTask(action))); } if (multiCategoryClassifyActions != null) { multiCategoryClassifyActions.forEach(action -> tasks.add(toCustomMultiLabelClassificationLROTask(action))); } return tasks; } private JobManifestTasks getJobManifestTasks(TextAnalyticsActions actions) { if (actions == null) { return null; } final JobManifestTasks jobManifestTasks = new JobManifestTasks(); if (actions.getRecognizeEntitiesActions() != null) { jobManifestTasks.setEntityRecognitionTasks(toEntitiesTasks(actions)); } if (actions.getRecognizePiiEntitiesActions() != null) { jobManifestTasks.setEntityRecognitionPiiTasks(toPiiTasks(actions)); } if (actions.getExtractKeyPhrasesActions() != null) { jobManifestTasks.setKeyPhraseExtractionTasks(toKeyPhrasesTasks(actions)); } if (actions.getRecognizeLinkedEntitiesActions() != null) { jobManifestTasks.setEntityLinkingTasks(toEntityLinkingTasks(actions)); } if (actions.getAnalyzeSentimentActions() != null) { jobManifestTasks.setSentimentAnalysisTasks(toSentimentAnalysisTasks(actions)); } if (actions.getExtractSummaryActions() != null) { jobManifestTasks.setExtractiveSummarizationTasks(toExtractiveSummarizationTask(actions)); } if (actions.getRecognizeCustomEntitiesActions() != null) { jobManifestTasks.setCustomEntityRecognitionTasks(toCustomEntitiesTask(actions)); } if (actions.getSingleCategoryClassifyActions() != null) { jobManifestTasks.setCustomSingleClassificationTasks(toCustomSingleClassificationTask(actions)); } if (actions.getMultiCategoryClassifyActions() != null) { jobManifestTasks.setCustomMultiClassificationTasks(toCustomMultiClassificationTask(actions)); } return jobManifestTasks; } private EntitiesLROTask toEntitiesLROTask(RecognizeEntitiesAction action) { if (action == null) { return null; } final EntitiesLROTask task = new EntitiesLROTask(); task.setParameters(getEntitiesTaskParameters(action)).setTaskName(action.getActionName()); return task; } private List<EntitiesTask> toEntitiesTasks(TextAnalyticsActions actions) { final List<EntitiesTask> entitiesTasks = new ArrayList<>(); for (RecognizeEntitiesAction action : actions.getRecognizeEntitiesActions()) { entitiesTasks.add( action == null ? null : new EntitiesTask() .setTaskName(action.getActionName()) .setParameters(getEntitiesTaskParameters(action))); } return entitiesTasks; } private EntitiesTaskParameters getEntitiesTaskParameters(RecognizeEntitiesAction action) { return (EntitiesTaskParameters) new EntitiesTaskParameters() .setStringIndexType(StringIndexType.UTF16CODE_UNIT) .setModelVersion(action.getModelVersion()) .setLoggingOptOut(action.isServiceLogsDisabled()); } private PiiLROTask toPiiLROTask(RecognizePiiEntitiesAction action) { if (action == null) { return null; } final PiiLROTask task = new PiiLROTask(); task.setParameters(getPiiTaskParameters(action)).setTaskName(action.getActionName()); return task; } private List<PiiTask> toPiiTasks(TextAnalyticsActions actions) { final List<PiiTask> piiTasks = new ArrayList<>(); for (RecognizePiiEntitiesAction action : actions.getRecognizePiiEntitiesActions()) { piiTasks.add( action == null ? null : new PiiTask() .setTaskName(action.getActionName()) .setParameters(getPiiTaskParameters(action))); } return piiTasks; } private PiiTaskParameters getPiiTaskParameters(RecognizePiiEntitiesAction action) { return (PiiTaskParameters) new PiiTaskParameters() .setStringIndexType(StringIndexType.UTF16CODE_UNIT) .setDomain(PiiDomain.fromString( action.getDomainFilter() == null ? null : action.getDomainFilter().toString())) .setPiiCategories(toCategoriesFilter(action.getCategoriesFilter())) .setModelVersion(action.getModelVersion()) .setLoggingOptOut(action.isServiceLogsDisabled()); } private HealthcareLROTask toHealthcareLROTask(AnalyzeHealthcareEntitiesAction action) { if (action == null) { return null; } final HealthcareLROTask task = new HealthcareLROTask(); task.setParameters(getHealthcareTaskParameters(action)).setTaskName(action.getActionName()); return task; } private HealthcareTaskParameters getHealthcareTaskParameters(AnalyzeHealthcareEntitiesAction action) { return (HealthcareTaskParameters) new HealthcareTaskParameters() .setStringIndexType(StringIndexType.UTF16CODE_UNIT) .setModelVersion(action.getModelVersion()) .setLoggingOptOut(action.isServiceLogsDisabled()); } private KeyPhraseLROTask toKeyPhraseLROTask(ExtractKeyPhrasesAction action) { if (action == null) { return null; } final KeyPhraseLROTask task = new KeyPhraseLROTask(); task.setParameters(getKeyPhraseTaskParameters(action)).setTaskName(action.getActionName()); return task; } private List<KeyPhrasesTask> toKeyPhrasesTasks(TextAnalyticsActions actions) { final List<KeyPhrasesTask> keyPhrasesTasks = new ArrayList<>(); for (ExtractKeyPhrasesAction action : actions.getExtractKeyPhrasesActions()) { keyPhrasesTasks.add( action == null ? null : new KeyPhrasesTask() .setTaskName(action.getActionName()) .setParameters(getKeyPhraseTaskParameters(action))); } return keyPhrasesTasks; } private KeyPhraseTaskParameters getKeyPhraseTaskParameters(ExtractKeyPhrasesAction action) { return (KeyPhraseTaskParameters) new KeyPhraseTaskParameters() .setModelVersion(action.getModelVersion()) .setLoggingOptOut(action.isServiceLogsDisabled()); } private EntityLinkingLROTask toEntityLinkingLROTask(RecognizeLinkedEntitiesAction action) { if (action == null) { return null; } final EntityLinkingLROTask task = new EntityLinkingLROTask(); task.setParameters(getEntityLinkingTaskParameters(action)).setTaskName(action.getActionName()); return task; } private List<EntityLinkingTask> toEntityLinkingTasks(TextAnalyticsActions actions) { final List<EntityLinkingTask> tasks = new ArrayList<>(); for (RecognizeLinkedEntitiesAction action : actions.getRecognizeLinkedEntitiesActions()) { tasks.add( action == null ? null : new EntityLinkingTask() .setTaskName(action.getActionName()) .setParameters(getEntityLinkingTaskParameters(action))); } return tasks; } private EntityLinkingTaskParameters getEntityLinkingTaskParameters(RecognizeLinkedEntitiesAction action) { return (EntityLinkingTaskParameters) new EntityLinkingTaskParameters() .setStringIndexType(StringIndexType.UTF16CODE_UNIT) .setModelVersion(action.getModelVersion()) .setLoggingOptOut(action.isServiceLogsDisabled()); } private SentimentAnalysisLROTask toSentimentAnalysisLROTask(AnalyzeSentimentAction action) { if (action == null) { return null; } final SentimentAnalysisLROTask task = new SentimentAnalysisLROTask(); task.setParameters(getSentimentAnalysisTaskParameters(action)).setTaskName(action.getActionName()); return task; } private List<SentimentAnalysisTask> toSentimentAnalysisTasks(TextAnalyticsActions actions) { final List<SentimentAnalysisTask> tasks = new ArrayList<>(); for (AnalyzeSentimentAction action : actions.getAnalyzeSentimentActions()) { tasks.add( action == null ? null : new SentimentAnalysisTask() .setTaskName(action.getActionName()) .setParameters(getSentimentAnalysisTaskParameters(action))); } return tasks; } private SentimentAnalysisTaskParameters getSentimentAnalysisTaskParameters(AnalyzeSentimentAction action) { return (SentimentAnalysisTaskParameters) new SentimentAnalysisTaskParameters() .setStringIndexType(StringIndexType.UTF16CODE_UNIT) .setOpinionMining(action.isIncludeOpinionMining()) .setModelVersion(action.getModelVersion()) .setLoggingOptOut(action.isServiceLogsDisabled()); } private ExtractiveSummarizationLROTask toExtractiveSummarizationLROTask(ExtractSummaryAction action) { if (action == null) { return null; } final ExtractiveSummarizationLROTask task = new ExtractiveSummarizationLROTask(); task.setParameters(getExtractiveSummarizationTaskParameters(action)).setTaskName(action.getActionName()); return task; } private List<ExtractiveSummarizationTask> toExtractiveSummarizationTask(TextAnalyticsActions actions) { final List<ExtractiveSummarizationTask> extractiveSummarizationTasks = new ArrayList<>(); for (ExtractSummaryAction action : actions.getExtractSummaryActions()) { extractiveSummarizationTasks.add( action == null ? null : new ExtractiveSummarizationTask() .setTaskName(action.getActionName()) .setParameters(getExtractiveSummarizationTaskParameters(action))); } return extractiveSummarizationTasks; } private ExtractiveSummarizationTaskParameters getExtractiveSummarizationTaskParameters( ExtractSummaryAction action) { return (ExtractiveSummarizationTaskParameters) new ExtractiveSummarizationTaskParameters() .setStringIndexType(StringIndexType.UTF16CODE_UNIT) .setSentenceCount(action.getMaxSentenceCount()) .setSortBy(action.getOrderBy() == null ? null : ExtractiveSummarizationSortingCriteria .fromString(action.getOrderBy().toString())) .setModelVersion(action.getModelVersion()) .setLoggingOptOut(action.isServiceLogsDisabled()); } private CustomEntitiesLROTask toCustomEntitiesLROTask(RecognizeCustomEntitiesAction action) { if (action == null) { return null; } final CustomEntitiesLROTask task = new CustomEntitiesLROTask(); task.setParameters(getCustomEntitiesTaskParameters(action)).setTaskName(action.getActionName()); return task; } private List<CustomEntitiesTask> toCustomEntitiesTask(TextAnalyticsActions actions) { final List<CustomEntitiesTask> tasks = new ArrayList<>(); for (RecognizeCustomEntitiesAction action : actions.getRecognizeCustomEntitiesActions()) { tasks.add( action == null ? null : new CustomEntitiesTask() .setTaskName(action.getActionName()) .setParameters(getCustomEntitiesTaskParameters(action))); } return tasks; } private CustomEntitiesTaskParameters getCustomEntitiesTaskParameters(RecognizeCustomEntitiesAction action) { return (CustomEntitiesTaskParameters) new CustomEntitiesTaskParameters() .setStringIndexType(StringIndexType.UTF16CODE_UNIT) .setProjectName(action.getProjectName()) .setDeploymentName(action.getDeploymentName()) .setLoggingOptOut(action.isServiceLogsDisabled()); } private CustomSingleLabelClassificationLROTask toCustomSingleLabelClassificationLROTask( SingleCategoryClassifyAction action) { if (action == null) { return null; } final CustomSingleLabelClassificationLROTask task = new CustomSingleLabelClassificationLROTask(); task.setParameters(getCustomSingleClassificationTaskParameters(action)).setTaskName(action.getActionName()); return task; } private List<CustomSingleClassificationTask> toCustomSingleClassificationTask(TextAnalyticsActions actions) { final List<CustomSingleClassificationTask> tasks = new ArrayList<>(); for (SingleCategoryClassifyAction action : actions.getSingleCategoryClassifyActions()) { tasks.add( action == null ? null : new CustomSingleClassificationTask() .setTaskName(action.getActionName()) .setParameters(getCustomSingleClassificationTaskParameters(action))); } return tasks; } private CustomSingleLabelClassificationTaskParameters getCustomSingleClassificationTaskParameters( SingleCategoryClassifyAction action) { return (CustomSingleLabelClassificationTaskParameters) new CustomSingleLabelClassificationTaskParameters() .setProjectName(action.getProjectName()) .setDeploymentName(action.getDeploymentName()) .setLoggingOptOut(action.isServiceLogsDisabled()); } private CustomMultiLabelClassificationLROTask toCustomMultiLabelClassificationLROTask( MultiCategoryClassifyAction action) { if (action == null) { return null; } final CustomMultiLabelClassificationLROTask task = new CustomMultiLabelClassificationLROTask(); task.setParameters(getCustomMultiLabelClassificationTaskParameters(action)).setTaskName(action.getActionName()); return task; } private List<CustomMultiClassificationTask> toCustomMultiClassificationTask(TextAnalyticsActions actions) { final List<CustomMultiClassificationTask> tasks = new ArrayList<>(); for (MultiCategoryClassifyAction action : actions.getMultiCategoryClassifyActions()) { tasks.add( action == null ? null : new CustomMultiClassificationTask() .setTaskName(action.getActionName()) .setParameters(getCustomMultiLabelClassificationTaskParameters(action))); } return tasks; } private CustomMultiLabelClassificationTaskParameters getCustomMultiLabelClassificationTaskParameters( MultiCategoryClassifyAction action) { return (CustomMultiLabelClassificationTaskParameters) new CustomMultiLabelClassificationTaskParameters() .setProjectName(action.getProjectName()) .setDeploymentName(action.getDeploymentName()) .setLoggingOptOut(action.isServiceLogsDisabled()); } private Function<PollingContext<AnalyzeActionsOperationDetail>, Mono<AnalyzeActionsOperationDetail>> activationOperation(Mono<AnalyzeActionsOperationDetail> operationResult) { return pollingContext -> { try { return operationResult.onErrorMap(Utility::mapToHttpResponseExceptionIfExists); } catch (RuntimeException ex) { return monoError(logger, ex); } }; } private Function<PollingContext<AnalyzeActionsOperationDetail>, Mono<PollResponse<AnalyzeActionsOperationDetail>>> pollingOperation(Function<UUID, Mono<Response<AnalyzeJobState>>> pollingFunction) { return pollingContext -> { try { final PollResponse<AnalyzeActionsOperationDetail> operationResultPollResponse = pollingContext.getLatestResponse(); final UUID operationId = UUID.fromString(operationResultPollResponse.getValue().getOperationId()); return pollingFunction.apply(operationId) .flatMap(modelResponse -> processAnalyzedModelResponse(modelResponse, operationResultPollResponse)) .onErrorMap(Utility::mapToHttpResponseExceptionIfExists); } catch (RuntimeException ex) { return monoError(logger, ex); } }; } private Function<PollingContext<AnalyzeActionsOperationDetail>, Mono<PollResponse<AnalyzeActionsOperationDetail>>> pollingOperationLanguageApi(Function<UUID, Mono<Response<AnalyzeTextJobState>>> pollingFunction) { return pollingContext -> { try { final PollResponse<AnalyzeActionsOperationDetail> operationResultPollResponse = pollingContext.getLatestResponse(); final UUID operationId = UUID.fromString(operationResultPollResponse.getValue().getOperationId()); return pollingFunction.apply(operationId) .flatMap(modelResponse -> processAnalyzedModelResponseLanguageApi( modelResponse, operationResultPollResponse)) .onErrorMap(Utility::mapToHttpResponseExceptionIfExists); } catch (RuntimeException ex) { return monoError(logger, ex); } }; } private Function<PollingContext<AnalyzeActionsOperationDetail>, Mono<AnalyzeActionsResultPagedFlux>> fetchingOperation(Function<UUID, Mono<AnalyzeActionsResultPagedFlux>> fetchingFunction) { return pollingContext -> { try { final UUID operationId = UUID.fromString(pollingContext.getLatestResponse().getValue().getOperationId()); return fetchingFunction.apply(operationId); } catch (RuntimeException ex) { return monoError(logger, ex); } }; } private Function<PollingContext<AnalyzeActionsOperationDetail>, Mono<AnalyzeActionsResultPagedIterable>> fetchingOperationIterable(Function<UUID, Mono<AnalyzeActionsResultPagedIterable>> fetchingFunction) { return pollingContext -> { try { final UUID operationId = UUID.fromString(pollingContext.getLatestResponse().getValue().getOperationId()); return fetchingFunction.apply(operationId); } catch (RuntimeException ex) { return monoError(logger, ex); } }; } AnalyzeActionsResultPagedFlux getAnalyzeOperationFluxPage(UUID operationId, Integer top, Integer skip, boolean showStats, Context context) { return new AnalyzeActionsResultPagedFlux( () -> (continuationToken, pageSize) -> getPage(continuationToken, operationId, top, skip, showStats, context).flux()); } Mono<PagedResponse<AnalyzeActionsResult>> getPage(String continuationToken, UUID operationId, Integer top, Integer skip, boolean showStats, Context context) { if (continuationToken != null) { final Map<String, Object> continuationTokenMap = parseNextLink(continuationToken); final Integer topValue = (Integer) continuationTokenMap.getOrDefault("$top", null); final Integer skipValue = (Integer) continuationTokenMap.getOrDefault("$skip", null); final Boolean showStatsValue = (Boolean) continuationTokenMap.getOrDefault(showStats, false); if (service != null) { return service.jobStatusWithResponseAsync(operationId, showStatsValue, topValue, skipValue, context) .map(this::toAnalyzeActionsResultPagedResponseLanguageApi) .onErrorMap(Utility::mapToHttpResponseExceptionIfExists); } return legacyService.analyzeStatusWithResponseAsync(operationId.toString(), showStatsValue, topValue, skipValue, context) .map(this::toAnalyzeActionsResultPagedResponse) .onErrorMap(Utility::mapToHttpResponseExceptionIfExists); } else { if (service != null) { return service.jobStatusWithResponseAsync(operationId, showStats, top, skip, context) .map(this::toAnalyzeActionsResultPagedResponseLanguageApi) .onErrorMap(Utility::mapToHttpResponseExceptionIfExists); } return legacyService.analyzeStatusWithResponseAsync(operationId.toString(), showStats, top, skip, context) .map(this::toAnalyzeActionsResultPagedResponse) .onErrorMap(Utility::mapToHttpResponseExceptionIfExists); } } private PagedResponse<AnalyzeActionsResult> toAnalyzeActionsResultPagedResponse(Response<AnalyzeJobState> response) { final AnalyzeJobState analyzeJobState = response.getValue(); return new PagedResponseBase<Void, AnalyzeActionsResult>( response.getRequest(), response.getStatusCode(), response.getHeaders(), Arrays.asList(toAnalyzeActionsResult(analyzeJobState)), analyzeJobState.getNextLink(), null); } private PagedResponse<AnalyzeActionsResult> toAnalyzeActionsResultPagedResponseLanguageApi(Response<AnalyzeTextJobState> response) { final AnalyzeTextJobState analyzeJobState = response.getValue(); return new PagedResponseBase<Void, AnalyzeActionsResult>( response.getRequest(), response.getStatusCode(), response.getHeaders(), Arrays.asList(toAnalyzeActionsResultLanguageApi(analyzeJobState)), analyzeJobState.getNextLink(), null); } private AnalyzeActionsResult toAnalyzeActionsResultLanguageApi(AnalyzeTextJobState analyzeJobState) { final TasksStateTasks tasksStateTasks = analyzeJobState.getTasks(); final List<AnalyzeTextLROResult> tasksResults = tasksStateTasks.getItems(); final List<RecognizeEntitiesActionResult> recognizeEntitiesActionResults = new ArrayList<>(); final List<RecognizePiiEntitiesActionResult> recognizePiiEntitiesActionResults = new ArrayList<>(); final List<ExtractKeyPhrasesActionResult> extractKeyPhrasesActionResults = new ArrayList<>(); final List<RecognizeLinkedEntitiesActionResult> recognizeLinkedEntitiesActionResults = new ArrayList<>(); final List<AnalyzeHealthcareEntitiesActionResult> analyzeHealthcareEntitiesActionResults = new ArrayList<>(); final List<AnalyzeSentimentActionResult> analyzeSentimentActionResults = new ArrayList<>(); final List<ExtractSummaryActionResult> extractSummaryActionResults = new ArrayList<>(); final List<RecognizeCustomEntitiesActionResult> recognizeCustomEntitiesActionResults = new ArrayList<>(); final List<SingleCategoryClassifyActionResult> singleCategoryClassifyActionResults = new ArrayList<>(); final List<MultiCategoryClassifyActionResult> multiCategoryClassifyActionResults = new ArrayList<>(); if (!CoreUtils.isNullOrEmpty(tasksResults)) { for (int i = 0; i < tasksResults.size(); i++) { final AnalyzeTextLROResult taskResult = tasksResults.get(i); if (taskResult instanceof EntityRecognitionLROResult) { final EntityRecognitionLROResult entityTaskResult = (EntityRecognitionLROResult) taskResult; final RecognizeEntitiesActionResult actionResult = new RecognizeEntitiesActionResult(); final EntitiesResult results = entityTaskResult.getResults(); if (results != null) { RecognizeEntitiesActionResultPropertiesHelper.setDocumentsResults(actionResult, toRecognizeEntitiesResultCollectionResponse(results)); } TextAnalyticsActionResultPropertiesHelper.setActionName(actionResult, entityTaskResult.getTaskName()); TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult, entityTaskResult.getLastUpdateDateTime()); recognizeEntitiesActionResults.add(actionResult); } else if (taskResult instanceof CustomEntityRecognitionLROResult) { final CustomEntityRecognitionLROResult customEntityTaskResult = (CustomEntityRecognitionLROResult) taskResult; final RecognizeCustomEntitiesActionResult actionResult = new RecognizeCustomEntitiesActionResult(); final CustomEntitiesResult results = customEntityTaskResult.getResults(); if (results != null) { RecognizeCustomEntitiesActionResultPropertiesHelper.setDocumentsResults(actionResult, toRecognizeCustomEntitiesResultCollection(results)); } TextAnalyticsActionResultPropertiesHelper.setActionName(actionResult, customEntityTaskResult.getTaskName()); TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult, customEntityTaskResult.getLastUpdateDateTime()); recognizeCustomEntitiesActionResults.add(actionResult); } else if (taskResult instanceof CustomSingleLabelClassificationLROResult) { final CustomSingleLabelClassificationLROResult customSingleLabelClassificationResult = (CustomSingleLabelClassificationLROResult) taskResult; final SingleCategoryClassifyActionResult actionResult = new SingleCategoryClassifyActionResult(); final CustomSingleLabelClassificationResult results = customSingleLabelClassificationResult.getResults(); if (results != null) { SingleCategoryClassifyActionResultPropertiesHelper.setDocumentsResults(actionResult, toSingleCategoryClassifyResultCollection(results)); } TextAnalyticsActionResultPropertiesHelper.setActionName(actionResult, customSingleLabelClassificationResult.getTaskName()); TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult, customSingleLabelClassificationResult.getLastUpdateDateTime()); singleCategoryClassifyActionResults.add(actionResult); } else if (taskResult instanceof CustomMultiLabelClassificationLROResult) { final CustomMultiLabelClassificationLROResult customMultiLabelClassificationLROResult = (CustomMultiLabelClassificationLROResult) taskResult; final MultiCategoryClassifyActionResult actionResult = new MultiCategoryClassifyActionResult(); final CustomMultiLabelClassificationResult results = customMultiLabelClassificationLROResult.getResults(); if (results != null) { MultiCategoryClassifyActionResultPropertiesHelper.setDocumentsResults(actionResult, toMultiCategoryClassifyResultCollection(results)); } TextAnalyticsActionResultPropertiesHelper.setActionName(actionResult, customMultiLabelClassificationLROResult.getTaskName()); TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult, customMultiLabelClassificationLROResult.getLastUpdateDateTime()); multiCategoryClassifyActionResults.add(actionResult); } else if (taskResult instanceof EntityLinkingLROResult) { final EntityLinkingLROResult entityLinkingLROResult = (EntityLinkingLROResult) taskResult; final RecognizeLinkedEntitiesActionResult actionResult = new RecognizeLinkedEntitiesActionResult(); final EntityLinkingResult results = entityLinkingLROResult.getResults(); if (results != null) { RecognizeLinkedEntitiesActionResultPropertiesHelper.setDocumentsResults(actionResult, toRecognizeLinkedEntitiesResultCollection(results)); } TextAnalyticsActionResultPropertiesHelper.setActionName(actionResult, entityLinkingLROResult.getTaskName()); TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult, entityLinkingLROResult.getLastUpdateDateTime()); recognizeLinkedEntitiesActionResults.add(actionResult); } else if (taskResult instanceof PiiEntityRecognitionLROResult) { final PiiEntityRecognitionLROResult piiEntityRecognitionLROResult = (PiiEntityRecognitionLROResult) taskResult; final RecognizePiiEntitiesActionResult actionResult = new RecognizePiiEntitiesActionResult(); final PiiResult results = piiEntityRecognitionLROResult.getResults(); if (results != null) { RecognizePiiEntitiesActionResultPropertiesHelper.setDocumentsResults(actionResult, toRecognizePiiEntitiesResultCollection(results)); } TextAnalyticsActionResultPropertiesHelper.setActionName(actionResult, piiEntityRecognitionLROResult.getTaskName()); TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult, piiEntityRecognitionLROResult.getLastUpdateDateTime()); recognizePiiEntitiesActionResults.add(actionResult); } else if (taskResult instanceof ExtractiveSummarizationLROResult) { final ExtractiveSummarizationLROResult extractiveSummarizationLROResult = (ExtractiveSummarizationLROResult) taskResult; final ExtractSummaryActionResult actionResult = new ExtractSummaryActionResult(); final ExtractiveSummarizationResult results = extractiveSummarizationLROResult.getResults(); if (results != null) { ExtractSummaryActionResultPropertiesHelper.setDocumentsResults(actionResult, toExtractSummaryResultCollection(results)); } TextAnalyticsActionResultPropertiesHelper.setActionName(actionResult, extractiveSummarizationLROResult.getTaskName()); TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult, extractiveSummarizationLROResult.getLastUpdateDateTime()); extractSummaryActionResults.add(actionResult); } else if (taskResult instanceof HealthcareLROResult) { final HealthcareLROResult healthcareLROResult = (HealthcareLROResult) taskResult; final AnalyzeHealthcareEntitiesActionResult actionResult = new AnalyzeHealthcareEntitiesActionResult(); final HealthcareResult results = healthcareLROResult.getResults(); if (results != null) { AnalyzeHealthcareEntitiesActionResultPropertiesHelper.setDocumentsResults(actionResult, toAnalyzeHealthcareEntitiesResultCollection(results)); } TextAnalyticsActionResultPropertiesHelper.setActionName(actionResult, healthcareLROResult.getTaskName()); TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult, healthcareLROResult.getLastUpdateDateTime()); analyzeHealthcareEntitiesActionResults.add(actionResult); } else if (taskResult instanceof SentimentLROResult) { final SentimentLROResult sentimentLROResult = (SentimentLROResult) taskResult; final AnalyzeSentimentActionResult actionResult = new AnalyzeSentimentActionResult(); final SentimentResponse results = sentimentLROResult.getResults(); if (results != null) { AnalyzeSentimentActionResultPropertiesHelper.setDocumentsResults(actionResult, toAnalyzeSentimentResultCollection(results)); } TextAnalyticsActionResultPropertiesHelper.setActionName(actionResult, sentimentLROResult.getTaskName()); TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult, sentimentLROResult.getLastUpdateDateTime()); analyzeSentimentActionResults.add(actionResult); } else if (taskResult instanceof KeyPhraseExtractionLROResult) { final KeyPhraseExtractionLROResult keyPhraseExtractionLROResult = (KeyPhraseExtractionLROResult) taskResult; final ExtractKeyPhrasesActionResult actionResult = new ExtractKeyPhrasesActionResult(); final KeyPhraseResult results = keyPhraseExtractionLROResult.getResults(); if (results != null) { ExtractKeyPhrasesActionResultPropertiesHelper.setDocumentsResults(actionResult, toExtractKeyPhrasesResultCollection(results)); } TextAnalyticsActionResultPropertiesHelper.setActionName(actionResult, keyPhraseExtractionLROResult.getTaskName()); TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult, keyPhraseExtractionLROResult.getLastUpdateDateTime()); extractKeyPhrasesActionResults.add(actionResult); } else { throw logger.logExceptionAsError(new RuntimeException( "Invalid Long running operation task result: " + taskResult.getClass())); } } } final List<Error> errors = analyzeJobState.getErrors(); if (!CoreUtils.isNullOrEmpty(errors)) { for (Error error : errors) { if (error != null) { final String[] targetPair = parseActionErrorTarget(error.getTarget(), error.getMessage()); final String taskName = targetPair[0]; final Integer taskIndex = Integer.valueOf(targetPair[1]); final TextAnalyticsActionResult actionResult; if (ENTITY_RECOGNITION_TASKS.equals(taskName)) { actionResult = recognizeEntitiesActionResults.get(taskIndex); } else if (ENTITY_RECOGNITION_PII_TASKS.equals(taskName)) { actionResult = recognizePiiEntitiesActionResults.get(taskIndex); } else if (KEY_PHRASE_EXTRACTION_TASKS.equals(taskName)) { actionResult = extractKeyPhrasesActionResults.get(taskIndex); } else if (ENTITY_LINKING_TASKS.equals(taskName)) { actionResult = recognizeLinkedEntitiesActionResults.get(taskIndex); } else if (SENTIMENT_ANALYSIS_TASKS.equals(taskName)) { actionResult = analyzeSentimentActionResults.get(taskIndex); } else if (EXTRACTIVE_SUMMARIZATION_TASKS.equals(taskName)) { actionResult = extractSummaryActionResults.get(taskIndex); } else if (CUSTOM_ENTITY_RECOGNITION_TASKS.equals(taskName)) { actionResult = recognizeCustomEntitiesActionResults.get(taskIndex); } else if (CUSTOM_SINGLE_CLASSIFICATION_TASKS.equals(taskName)) { actionResult = singleCategoryClassifyActionResults.get(taskIndex); } else if (CUSTOM_MULTI_CLASSIFICATION_TASKS.equals(taskName)) { actionResult = multiCategoryClassifyActionResults.get(taskIndex); } else { throw logger.logExceptionAsError(new RuntimeException( "Invalid task name in target reference, " + taskName)); } TextAnalyticsActionResultPropertiesHelper.setIsError(actionResult, true); TextAnalyticsActionResultPropertiesHelper.setError(actionResult, new com.azure.ai.textanalytics.models.TextAnalyticsError( TextAnalyticsErrorCode.fromString( error.getCode() == null ? null : error.getCode().toString()), error.getMessage(), null)); } } } final AnalyzeActionsResult analyzeActionsResult = new AnalyzeActionsResult(); AnalyzeActionsResultPropertiesHelper.setRecognizeEntitiesResults(analyzeActionsResult, IterableStream.of(recognizeEntitiesActionResults)); AnalyzeActionsResultPropertiesHelper.setRecognizePiiEntitiesResults(analyzeActionsResult, IterableStream.of(recognizePiiEntitiesActionResults)); AnalyzeActionsResultPropertiesHelper.setAnalyzeHealthcareEntitiesResults(analyzeActionsResult, IterableStream.of(analyzeHealthcareEntitiesActionResults)); AnalyzeActionsResultPropertiesHelper.setExtractKeyPhrasesResults(analyzeActionsResult, IterableStream.of(extractKeyPhrasesActionResults)); AnalyzeActionsResultPropertiesHelper.setRecognizeLinkedEntitiesResults(analyzeActionsResult, IterableStream.of(recognizeLinkedEntitiesActionResults)); AnalyzeActionsResultPropertiesHelper.setAnalyzeSentimentResults(analyzeActionsResult, IterableStream.of(analyzeSentimentActionResults)); AnalyzeActionsResultPropertiesHelper.setExtractSummaryResults(analyzeActionsResult, IterableStream.of(extractSummaryActionResults)); AnalyzeActionsResultPropertiesHelper.setRecognizeCustomEntitiesResults(analyzeActionsResult, IterableStream.of(recognizeCustomEntitiesActionResults)); AnalyzeActionsResultPropertiesHelper.setClassifySingleCategoryResults(analyzeActionsResult, IterableStream.of(singleCategoryClassifyActionResults)); AnalyzeActionsResultPropertiesHelper.setClassifyMultiCategoryResults(analyzeActionsResult, IterableStream.of(multiCategoryClassifyActionResults)); return analyzeActionsResult; } private Mono<PollResponse<AnalyzeActionsOperationDetail>> processAnalyzedModelResponse( Response<AnalyzeJobState> analyzeJobStateResponse, PollResponse<AnalyzeActionsOperationDetail> operationResultPollResponse) { LongRunningOperationStatus status = LongRunningOperationStatus.SUCCESSFULLY_COMPLETED; if (analyzeJobStateResponse.getValue() != null && analyzeJobStateResponse.getValue().getStatus() != null) { switch (analyzeJobStateResponse.getValue().getStatus()) { case NOT_STARTED: case RUNNING: status = LongRunningOperationStatus.IN_PROGRESS; break; case SUCCEEDED: status = LongRunningOperationStatus.SUCCESSFULLY_COMPLETED; break; case CANCELLED: status = LongRunningOperationStatus.USER_CANCELLED; break; default: status = LongRunningOperationStatus.fromString( analyzeJobStateResponse.getValue().getStatus().toString(), true); break; } } AnalyzeActionsOperationDetailPropertiesHelper.setDisplayName(operationResultPollResponse.getValue(), analyzeJobStateResponse.getValue().getDisplayName()); AnalyzeActionsOperationDetailPropertiesHelper.setCreatedAt(operationResultPollResponse.getValue(), analyzeJobStateResponse.getValue().getCreatedDateTime()); AnalyzeActionsOperationDetailPropertiesHelper.setExpiresAt(operationResultPollResponse.getValue(), analyzeJobStateResponse.getValue().getExpirationDateTime()); AnalyzeActionsOperationDetailPropertiesHelper.setLastModifiedAt(operationResultPollResponse.getValue(), analyzeJobStateResponse.getValue().getLastUpdateDateTime()); final TasksStateTasksOld tasksResult = analyzeJobStateResponse.getValue().getTasks(); AnalyzeActionsOperationDetailPropertiesHelper.setActionsFailed(operationResultPollResponse.getValue(), tasksResult.getFailed()); AnalyzeActionsOperationDetailPropertiesHelper.setActionsInProgress(operationResultPollResponse.getValue(), tasksResult.getInProgress()); AnalyzeActionsOperationDetailPropertiesHelper.setActionsSucceeded( operationResultPollResponse.getValue(), tasksResult.getCompleted()); AnalyzeActionsOperationDetailPropertiesHelper.setActionsInTotal(operationResultPollResponse.getValue(), tasksResult.getTotal()); return Mono.just(new PollResponse<>(status, operationResultPollResponse.getValue())); } private Mono<PollResponse<AnalyzeActionsOperationDetail>> processAnalyzedModelResponseLanguageApi( Response<AnalyzeTextJobState> analyzeJobStateResponse, PollResponse<AnalyzeActionsOperationDetail> operationResultPollResponse) { LongRunningOperationStatus status = LongRunningOperationStatus.SUCCESSFULLY_COMPLETED; if (analyzeJobStateResponse.getValue() != null && analyzeJobStateResponse.getValue().getStatus() != null) { switch (analyzeJobStateResponse.getValue().getStatus()) { case NOT_STARTED: case RUNNING: status = LongRunningOperationStatus.IN_PROGRESS; break; case SUCCEEDED: status = LongRunningOperationStatus.SUCCESSFULLY_COMPLETED; break; case CANCELLED: status = LongRunningOperationStatus.USER_CANCELLED; break; case PARTIALLY_SUCCEEDED: status = LongRunningOperationStatus.fromString("partiallySucceeded", true); break; default: status = LongRunningOperationStatus.fromString( analyzeJobStateResponse.getValue().getStatus().toString(), true); break; } } AnalyzeActionsOperationDetailPropertiesHelper.setDisplayName(operationResultPollResponse.getValue(), analyzeJobStateResponse.getValue().getDisplayName()); AnalyzeActionsOperationDetailPropertiesHelper.setCreatedAt(operationResultPollResponse.getValue(), analyzeJobStateResponse.getValue().getCreatedDateTime()); AnalyzeActionsOperationDetailPropertiesHelper.setExpiresAt(operationResultPollResponse.getValue(), analyzeJobStateResponse.getValue().getExpirationDateTime()); AnalyzeActionsOperationDetailPropertiesHelper.setLastModifiedAt(operationResultPollResponse.getValue(), analyzeJobStateResponse.getValue().getLastUpdateDateTime()); final TasksStateTasks tasksResult = analyzeJobStateResponse.getValue().getTasks(); AnalyzeActionsOperationDetailPropertiesHelper.setActionsFailed(operationResultPollResponse.getValue(), tasksResult.getFailed()); AnalyzeActionsOperationDetailPropertiesHelper.setActionsInProgress(operationResultPollResponse.getValue(), tasksResult.getInProgress()); AnalyzeActionsOperationDetailPropertiesHelper.setActionsSucceeded( operationResultPollResponse.getValue(), tasksResult.getCompleted()); AnalyzeActionsOperationDetailPropertiesHelper.setActionsInTotal(operationResultPollResponse.getValue(), tasksResult.getTotal()); return Mono.just(new PollResponse<>(status, operationResultPollResponse.getValue())); } private Context getNotNullContext(Context context) { return context == null ? Context.NONE : context; } private AnalyzeActionsOptions getNotNullAnalyzeActionsOptions(AnalyzeActionsOptions options) { return options == null ? new AnalyzeActionsOptions() : options; } private String[] parseActionErrorTarget(String targetReference, String errorMessage) { if (CoreUtils.isNullOrEmpty(targetReference)) { if (CoreUtils.isNullOrEmpty(errorMessage)) { errorMessage = "Expected an error with a target field referencing an action but did not get one"; } throw logger.logExceptionAsError(new RuntimeException(errorMessage)); } final Matcher matcher = PATTERN.matcher(targetReference); String[] taskNameIdPair = new String[2]; while (matcher.find()) { taskNameIdPair[0] = matcher.group(1); taskNameIdPair[1] = matcher.group(2); } return taskNameIdPair; } }
what's the behavior in the java library if a target is not returned by the service?
private AnalyzeActionsResult toAnalyzeActionsResultLanguageApi(AnalyzeTextJobState analyzeJobState) { final TasksStateTasks tasksStateTasks = analyzeJobState.getTasks(); final List<AnalyzeTextLROResult> tasksResults = tasksStateTasks.getItems(); final List<RecognizeEntitiesActionResult> recognizeEntitiesActionResults = new ArrayList<>(); final List<RecognizePiiEntitiesActionResult> recognizePiiEntitiesActionResults = new ArrayList<>(); final List<ExtractKeyPhrasesActionResult> extractKeyPhrasesActionResults = new ArrayList<>(); final List<RecognizeLinkedEntitiesActionResult> recognizeLinkedEntitiesActionResults = new ArrayList<>(); final List<AnalyzeHealthcareEntitiesActionResult> analyzeHealthcareEntitiesActionResults = new ArrayList<>(); final List<AnalyzeSentimentActionResult> analyzeSentimentActionResults = new ArrayList<>(); final List<ExtractSummaryActionResult> extractSummaryActionResults = new ArrayList<>(); final List<RecognizeCustomEntitiesActionResult> recognizeCustomEntitiesActionResults = new ArrayList<>(); final List<SingleCategoryClassifyActionResult> singleCategoryClassifyActionResults = new ArrayList<>(); final List<MultiCategoryClassifyActionResult> multiCategoryClassifyActionResults = new ArrayList<>(); if (!CoreUtils.isNullOrEmpty(tasksResults)) { for (int i = 0; i < tasksResults.size(); i++) { final AnalyzeTextLROResult taskResult = tasksResults.get(i); if (taskResult instanceof EntityRecognitionLROResult) { final EntityRecognitionLROResult entityTaskResult = (EntityRecognitionLROResult) taskResult; final RecognizeEntitiesActionResult actionResult = new RecognizeEntitiesActionResult(); final EntitiesResult results = entityTaskResult.getResults(); if (results != null) { RecognizeEntitiesActionResultPropertiesHelper.setDocumentsResults(actionResult, toRecognizeEntitiesResultCollectionResponse(results)); } TextAnalyticsActionResultPropertiesHelper.setActionName(actionResult, entityTaskResult.getTaskName()); TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult, entityTaskResult.getLastUpdateDateTime()); recognizeEntitiesActionResults.add(actionResult); } else if (taskResult instanceof CustomEntityRecognitionLROResult) { final CustomEntityRecognitionLROResult customEntityTaskResult = (CustomEntityRecognitionLROResult) taskResult; final RecognizeCustomEntitiesActionResult actionResult = new RecognizeCustomEntitiesActionResult(); final CustomEntitiesResult results = customEntityTaskResult.getResults(); if (results != null) { RecognizeCustomEntitiesActionResultPropertiesHelper.setDocumentsResults(actionResult, toRecognizeCustomEntitiesResultCollection(results)); } TextAnalyticsActionResultPropertiesHelper.setActionName(actionResult, customEntityTaskResult.getTaskName()); TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult, customEntityTaskResult.getLastUpdateDateTime()); recognizeCustomEntitiesActionResults.add(actionResult); } else if (taskResult instanceof CustomSingleLabelClassificationLROResult) { final CustomSingleLabelClassificationLROResult customSingleLabelClassificationResult = (CustomSingleLabelClassificationLROResult) taskResult; final SingleCategoryClassifyActionResult actionResult = new SingleCategoryClassifyActionResult(); final CustomSingleLabelClassificationResult results = customSingleLabelClassificationResult.getResults(); if (results != null) { SingleCategoryClassifyActionResultPropertiesHelper.setDocumentsResults(actionResult, toSingleCategoryClassifyResultCollection(results)); } TextAnalyticsActionResultPropertiesHelper.setActionName(actionResult, customSingleLabelClassificationResult.getTaskName()); TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult, customSingleLabelClassificationResult.getLastUpdateDateTime()); singleCategoryClassifyActionResults.add(actionResult); } else if (taskResult instanceof CustomMultiLabelClassificationLROResult) { final CustomMultiLabelClassificationLROResult customMultiLabelClassificationLROResult = (CustomMultiLabelClassificationLROResult) taskResult; final MultiCategoryClassifyActionResult actionResult = new MultiCategoryClassifyActionResult(); final CustomMultiLabelClassificationResult results = customMultiLabelClassificationLROResult.getResults(); if (results != null) { MultiCategoryClassifyActionResultPropertiesHelper.setDocumentsResults(actionResult, toMultiCategoryClassifyResultCollection(results)); } TextAnalyticsActionResultPropertiesHelper.setActionName(actionResult, customMultiLabelClassificationLROResult.getTaskName()); TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult, customMultiLabelClassificationLROResult.getLastUpdateDateTime()); multiCategoryClassifyActionResults.add(actionResult); } else if (taskResult instanceof EntityLinkingLROResult) { final EntityLinkingLROResult entityLinkingLROResult = (EntityLinkingLROResult) taskResult; final RecognizeLinkedEntitiesActionResult actionResult = new RecognizeLinkedEntitiesActionResult(); final EntityLinkingResult results = entityLinkingLROResult.getResults(); if (results != null) { RecognizeLinkedEntitiesActionResultPropertiesHelper.setDocumentsResults(actionResult, toRecognizeLinkedEntitiesResultCollection(results)); } TextAnalyticsActionResultPropertiesHelper.setActionName(actionResult, entityLinkingLROResult.getTaskName()); TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult, entityLinkingLROResult.getLastUpdateDateTime()); recognizeLinkedEntitiesActionResults.add(actionResult); } else if (taskResult instanceof PiiEntityRecognitionLROResult) { final PiiEntityRecognitionLROResult piiEntityRecognitionLROResult = (PiiEntityRecognitionLROResult) taskResult; final RecognizePiiEntitiesActionResult actionResult = new RecognizePiiEntitiesActionResult(); final PiiResult results = piiEntityRecognitionLROResult.getResults(); if (results != null) { RecognizePiiEntitiesActionResultPropertiesHelper.setDocumentsResults(actionResult, toRecognizePiiEntitiesResultCollection(results)); } TextAnalyticsActionResultPropertiesHelper.setActionName(actionResult, piiEntityRecognitionLROResult.getTaskName()); TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult, piiEntityRecognitionLROResult.getLastUpdateDateTime()); recognizePiiEntitiesActionResults.add(actionResult); } else if (taskResult instanceof ExtractiveSummarizationLROResult) { final ExtractiveSummarizationLROResult extractiveSummarizationLROResult = (ExtractiveSummarizationLROResult) taskResult; final ExtractSummaryActionResult actionResult = new ExtractSummaryActionResult(); final ExtractiveSummarizationResult results = extractiveSummarizationLROResult.getResults(); if (results != null) { ExtractSummaryActionResultPropertiesHelper.setDocumentsResults(actionResult, toExtractSummaryResultCollection(results)); } TextAnalyticsActionResultPropertiesHelper.setActionName(actionResult, extractiveSummarizationLROResult.getTaskName()); TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult, extractiveSummarizationLROResult.getLastUpdateDateTime()); extractSummaryActionResults.add(actionResult); } else if (taskResult instanceof HealthcareLROResult) { final HealthcareLROResult healthcareLROResult = (HealthcareLROResult) taskResult; final AnalyzeHealthcareEntitiesActionResult actionResult = new AnalyzeHealthcareEntitiesActionResult(); final HealthcareResult results = healthcareLROResult.getResults(); if (results != null) { AnalyzeHealthcareEntitiesActionResultPropertiesHelper.setDocumentsResults(actionResult, toAnalyzeHealthcareEntitiesResultCollection(results)); } TextAnalyticsActionResultPropertiesHelper.setActionName(actionResult, healthcareLROResult.getTaskName()); TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult, healthcareLROResult.getLastUpdateDateTime()); analyzeHealthcareEntitiesActionResults.add(actionResult); } else if (taskResult instanceof SentimentLROResult) { final SentimentLROResult sentimentLROResult = (SentimentLROResult) taskResult; final AnalyzeSentimentActionResult actionResult = new AnalyzeSentimentActionResult(); final SentimentResponse results = sentimentLROResult.getResults(); if (results != null) { AnalyzeSentimentActionResultPropertiesHelper.setDocumentsResults(actionResult, toAnalyzeSentimentResultCollection(results)); } TextAnalyticsActionResultPropertiesHelper.setActionName(actionResult, sentimentLROResult.getTaskName()); TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult, sentimentLROResult.getLastUpdateDateTime()); analyzeSentimentActionResults.add(actionResult); } else if (taskResult instanceof KeyPhraseExtractionLROResult) { final KeyPhraseExtractionLROResult keyPhraseExtractionLROResult = (KeyPhraseExtractionLROResult) taskResult; final ExtractKeyPhrasesActionResult actionResult = new ExtractKeyPhrasesActionResult(); final KeyPhraseResult results = keyPhraseExtractionLROResult.getResults(); if (results != null) { ExtractKeyPhrasesActionResultPropertiesHelper.setDocumentsResults(actionResult, toExtractKeyPhrasesResultCollection(results)); } TextAnalyticsActionResultPropertiesHelper.setActionName(actionResult, keyPhraseExtractionLROResult.getTaskName()); TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult, keyPhraseExtractionLROResult.getLastUpdateDateTime()); extractKeyPhrasesActionResults.add(actionResult); } else { throw logger.logExceptionAsError(new RuntimeException( "Invalid Long running operation task result: " + taskResult.getClass())); } } } final List<Error> errors = analyzeJobState.getErrors(); if (!CoreUtils.isNullOrEmpty(errors)) { for (Error error : errors) { final String[] targetPair = parseActionErrorTarget(error.getTarget()); final String taskName = targetPair[0]; final Integer taskIndex = Integer.valueOf(targetPair[1]); final TextAnalyticsActionResult actionResult; if (ENTITY_RECOGNITION_TASKS.equals(taskName)) { actionResult = recognizeEntitiesActionResults.get(taskIndex); } else if (ENTITY_RECOGNITION_PII_TASKS.equals(taskName)) { actionResult = recognizePiiEntitiesActionResults.get(taskIndex); } else if (KEY_PHRASE_EXTRACTION_TASKS.equals(taskName)) { actionResult = extractKeyPhrasesActionResults.get(taskIndex); } else if (ENTITY_LINKING_TASKS.equals(taskName)) { actionResult = recognizeLinkedEntitiesActionResults.get(taskIndex); } else if (SENTIMENT_ANALYSIS_TASKS.equals(taskName)) { actionResult = analyzeSentimentActionResults.get(taskIndex); } else if (EXTRACTIVE_SUMMARIZATION_TASKS.equals(taskName)) { actionResult = extractSummaryActionResults.get(taskIndex); } else if (CUSTOM_ENTITY_RECOGNITION_TASKS.equals(taskName)) { actionResult = recognizeCustomEntitiesActionResults.get(taskIndex); } else if (CUSTOM_SINGLE_CLASSIFICATION_TASKS.equals(taskName)) { actionResult = singleCategoryClassifyActionResults.get(taskIndex); } else if (CUSTOM_MULTI_CLASSIFICATION_TASKS.equals(taskName)) { actionResult = multiCategoryClassifyActionResults.get(taskIndex); } else { throw logger.logExceptionAsError(new RuntimeException( "Invalid task name in target reference, " + taskName)); } TextAnalyticsActionResultPropertiesHelper.setIsError(actionResult, true); TextAnalyticsActionResultPropertiesHelper.setError(actionResult, new com.azure.ai.textanalytics.models.TextAnalyticsError( TextAnalyticsErrorCode.fromString( error.getCode() == null ? null : error.getCode().toString()), error.getMessage(), null)); } } final AnalyzeActionsResult analyzeActionsResult = new AnalyzeActionsResult(); AnalyzeActionsResultPropertiesHelper.setRecognizeEntitiesResults(analyzeActionsResult, IterableStream.of(recognizeEntitiesActionResults)); AnalyzeActionsResultPropertiesHelper.setRecognizePiiEntitiesResults(analyzeActionsResult, IterableStream.of(recognizePiiEntitiesActionResults)); AnalyzeActionsResultPropertiesHelper.setAnalyzeHealthcareEntitiesResults(analyzeActionsResult, IterableStream.of(analyzeHealthcareEntitiesActionResults)); AnalyzeActionsResultPropertiesHelper.setExtractKeyPhrasesResults(analyzeActionsResult, IterableStream.of(extractKeyPhrasesActionResults)); AnalyzeActionsResultPropertiesHelper.setRecognizeLinkedEntitiesResults(analyzeActionsResult, IterableStream.of(recognizeLinkedEntitiesActionResults)); AnalyzeActionsResultPropertiesHelper.setAnalyzeSentimentResults(analyzeActionsResult, IterableStream.of(analyzeSentimentActionResults)); AnalyzeActionsResultPropertiesHelper.setExtractSummaryResults(analyzeActionsResult, IterableStream.of(extractSummaryActionResults)); AnalyzeActionsResultPropertiesHelper.setRecognizeCustomEntitiesResults(analyzeActionsResult, IterableStream.of(recognizeCustomEntitiesActionResults)); AnalyzeActionsResultPropertiesHelper.setClassifySingleCategoryResults(analyzeActionsResult, IterableStream.of(singleCategoryClassifyActionResults)); AnalyzeActionsResultPropertiesHelper.setClassifyMultiCategoryResults(analyzeActionsResult, IterableStream.of(multiCategoryClassifyActionResults)); return analyzeActionsResult; }
private AnalyzeActionsResult toAnalyzeActionsResultLanguageApi(AnalyzeTextJobState analyzeJobState) { final TasksStateTasks tasksStateTasks = analyzeJobState.getTasks(); final List<AnalyzeTextLROResult> tasksResults = tasksStateTasks.getItems(); final List<RecognizeEntitiesActionResult> recognizeEntitiesActionResults = new ArrayList<>(); final List<RecognizePiiEntitiesActionResult> recognizePiiEntitiesActionResults = new ArrayList<>(); final List<ExtractKeyPhrasesActionResult> extractKeyPhrasesActionResults = new ArrayList<>(); final List<RecognizeLinkedEntitiesActionResult> recognizeLinkedEntitiesActionResults = new ArrayList<>(); final List<AnalyzeHealthcareEntitiesActionResult> analyzeHealthcareEntitiesActionResults = new ArrayList<>(); final List<AnalyzeSentimentActionResult> analyzeSentimentActionResults = new ArrayList<>(); final List<ExtractSummaryActionResult> extractSummaryActionResults = new ArrayList<>(); final List<RecognizeCustomEntitiesActionResult> recognizeCustomEntitiesActionResults = new ArrayList<>(); final List<SingleCategoryClassifyActionResult> singleCategoryClassifyActionResults = new ArrayList<>(); final List<MultiCategoryClassifyActionResult> multiCategoryClassifyActionResults = new ArrayList<>(); if (!CoreUtils.isNullOrEmpty(tasksResults)) { for (int i = 0; i < tasksResults.size(); i++) { final AnalyzeTextLROResult taskResult = tasksResults.get(i); if (taskResult instanceof EntityRecognitionLROResult) { final EntityRecognitionLROResult entityTaskResult = (EntityRecognitionLROResult) taskResult; final RecognizeEntitiesActionResult actionResult = new RecognizeEntitiesActionResult(); final EntitiesResult results = entityTaskResult.getResults(); if (results != null) { RecognizeEntitiesActionResultPropertiesHelper.setDocumentsResults(actionResult, toRecognizeEntitiesResultCollectionResponse(results)); } TextAnalyticsActionResultPropertiesHelper.setActionName(actionResult, entityTaskResult.getTaskName()); TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult, entityTaskResult.getLastUpdateDateTime()); recognizeEntitiesActionResults.add(actionResult); } else if (taskResult instanceof CustomEntityRecognitionLROResult) { final CustomEntityRecognitionLROResult customEntityTaskResult = (CustomEntityRecognitionLROResult) taskResult; final RecognizeCustomEntitiesActionResult actionResult = new RecognizeCustomEntitiesActionResult(); final CustomEntitiesResult results = customEntityTaskResult.getResults(); if (results != null) { RecognizeCustomEntitiesActionResultPropertiesHelper.setDocumentsResults(actionResult, toRecognizeCustomEntitiesResultCollection(results)); } TextAnalyticsActionResultPropertiesHelper.setActionName(actionResult, customEntityTaskResult.getTaskName()); TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult, customEntityTaskResult.getLastUpdateDateTime()); recognizeCustomEntitiesActionResults.add(actionResult); } else if (taskResult instanceof CustomSingleLabelClassificationLROResult) { final CustomSingleLabelClassificationLROResult customSingleLabelClassificationResult = (CustomSingleLabelClassificationLROResult) taskResult; final SingleCategoryClassifyActionResult actionResult = new SingleCategoryClassifyActionResult(); final CustomSingleLabelClassificationResult results = customSingleLabelClassificationResult.getResults(); if (results != null) { SingleCategoryClassifyActionResultPropertiesHelper.setDocumentsResults(actionResult, toSingleCategoryClassifyResultCollection(results)); } TextAnalyticsActionResultPropertiesHelper.setActionName(actionResult, customSingleLabelClassificationResult.getTaskName()); TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult, customSingleLabelClassificationResult.getLastUpdateDateTime()); singleCategoryClassifyActionResults.add(actionResult); } else if (taskResult instanceof CustomMultiLabelClassificationLROResult) { final CustomMultiLabelClassificationLROResult customMultiLabelClassificationLROResult = (CustomMultiLabelClassificationLROResult) taskResult; final MultiCategoryClassifyActionResult actionResult = new MultiCategoryClassifyActionResult(); final CustomMultiLabelClassificationResult results = customMultiLabelClassificationLROResult.getResults(); if (results != null) { MultiCategoryClassifyActionResultPropertiesHelper.setDocumentsResults(actionResult, toMultiCategoryClassifyResultCollection(results)); } TextAnalyticsActionResultPropertiesHelper.setActionName(actionResult, customMultiLabelClassificationLROResult.getTaskName()); TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult, customMultiLabelClassificationLROResult.getLastUpdateDateTime()); multiCategoryClassifyActionResults.add(actionResult); } else if (taskResult instanceof EntityLinkingLROResult) { final EntityLinkingLROResult entityLinkingLROResult = (EntityLinkingLROResult) taskResult; final RecognizeLinkedEntitiesActionResult actionResult = new RecognizeLinkedEntitiesActionResult(); final EntityLinkingResult results = entityLinkingLROResult.getResults(); if (results != null) { RecognizeLinkedEntitiesActionResultPropertiesHelper.setDocumentsResults(actionResult, toRecognizeLinkedEntitiesResultCollection(results)); } TextAnalyticsActionResultPropertiesHelper.setActionName(actionResult, entityLinkingLROResult.getTaskName()); TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult, entityLinkingLROResult.getLastUpdateDateTime()); recognizeLinkedEntitiesActionResults.add(actionResult); } else if (taskResult instanceof PiiEntityRecognitionLROResult) { final PiiEntityRecognitionLROResult piiEntityRecognitionLROResult = (PiiEntityRecognitionLROResult) taskResult; final RecognizePiiEntitiesActionResult actionResult = new RecognizePiiEntitiesActionResult(); final PiiResult results = piiEntityRecognitionLROResult.getResults(); if (results != null) { RecognizePiiEntitiesActionResultPropertiesHelper.setDocumentsResults(actionResult, toRecognizePiiEntitiesResultCollection(results)); } TextAnalyticsActionResultPropertiesHelper.setActionName(actionResult, piiEntityRecognitionLROResult.getTaskName()); TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult, piiEntityRecognitionLROResult.getLastUpdateDateTime()); recognizePiiEntitiesActionResults.add(actionResult); } else if (taskResult instanceof ExtractiveSummarizationLROResult) { final ExtractiveSummarizationLROResult extractiveSummarizationLROResult = (ExtractiveSummarizationLROResult) taskResult; final ExtractSummaryActionResult actionResult = new ExtractSummaryActionResult(); final ExtractiveSummarizationResult results = extractiveSummarizationLROResult.getResults(); if (results != null) { ExtractSummaryActionResultPropertiesHelper.setDocumentsResults(actionResult, toExtractSummaryResultCollection(results)); } TextAnalyticsActionResultPropertiesHelper.setActionName(actionResult, extractiveSummarizationLROResult.getTaskName()); TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult, extractiveSummarizationLROResult.getLastUpdateDateTime()); extractSummaryActionResults.add(actionResult); } else if (taskResult instanceof HealthcareLROResult) { final HealthcareLROResult healthcareLROResult = (HealthcareLROResult) taskResult; final AnalyzeHealthcareEntitiesActionResult actionResult = new AnalyzeHealthcareEntitiesActionResult(); final HealthcareResult results = healthcareLROResult.getResults(); if (results != null) { AnalyzeHealthcareEntitiesActionResultPropertiesHelper.setDocumentsResults(actionResult, toAnalyzeHealthcareEntitiesResultCollection(results)); } TextAnalyticsActionResultPropertiesHelper.setActionName(actionResult, healthcareLROResult.getTaskName()); TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult, healthcareLROResult.getLastUpdateDateTime()); analyzeHealthcareEntitiesActionResults.add(actionResult); } else if (taskResult instanceof SentimentLROResult) { final SentimentLROResult sentimentLROResult = (SentimentLROResult) taskResult; final AnalyzeSentimentActionResult actionResult = new AnalyzeSentimentActionResult(); final SentimentResponse results = sentimentLROResult.getResults(); if (results != null) { AnalyzeSentimentActionResultPropertiesHelper.setDocumentsResults(actionResult, toAnalyzeSentimentResultCollection(results)); } TextAnalyticsActionResultPropertiesHelper.setActionName(actionResult, sentimentLROResult.getTaskName()); TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult, sentimentLROResult.getLastUpdateDateTime()); analyzeSentimentActionResults.add(actionResult); } else if (taskResult instanceof KeyPhraseExtractionLROResult) { final KeyPhraseExtractionLROResult keyPhraseExtractionLROResult = (KeyPhraseExtractionLROResult) taskResult; final ExtractKeyPhrasesActionResult actionResult = new ExtractKeyPhrasesActionResult(); final KeyPhraseResult results = keyPhraseExtractionLROResult.getResults(); if (results != null) { ExtractKeyPhrasesActionResultPropertiesHelper.setDocumentsResults(actionResult, toExtractKeyPhrasesResultCollection(results)); } TextAnalyticsActionResultPropertiesHelper.setActionName(actionResult, keyPhraseExtractionLROResult.getTaskName()); TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult, keyPhraseExtractionLROResult.getLastUpdateDateTime()); extractKeyPhrasesActionResults.add(actionResult); } else { throw logger.logExceptionAsError(new RuntimeException( "Invalid Long running operation task result: " + taskResult.getClass())); } } } final List<Error> errors = analyzeJobState.getErrors(); if (!CoreUtils.isNullOrEmpty(errors)) { for (Error error : errors) { if (error != null) { final String[] targetPair = parseActionErrorTarget(error.getTarget(), error.getMessage()); final String taskName = targetPair[0]; final Integer taskIndex = Integer.valueOf(targetPair[1]); final TextAnalyticsActionResult actionResult; if (ENTITY_RECOGNITION_TASKS.equals(taskName)) { actionResult = recognizeEntitiesActionResults.get(taskIndex); } else if (ENTITY_RECOGNITION_PII_TASKS.equals(taskName)) { actionResult = recognizePiiEntitiesActionResults.get(taskIndex); } else if (KEY_PHRASE_EXTRACTION_TASKS.equals(taskName)) { actionResult = extractKeyPhrasesActionResults.get(taskIndex); } else if (ENTITY_LINKING_TASKS.equals(taskName)) { actionResult = recognizeLinkedEntitiesActionResults.get(taskIndex); } else if (SENTIMENT_ANALYSIS_TASKS.equals(taskName)) { actionResult = analyzeSentimentActionResults.get(taskIndex); } else if (EXTRACTIVE_SUMMARIZATION_TASKS.equals(taskName)) { actionResult = extractSummaryActionResults.get(taskIndex); } else if (CUSTOM_ENTITY_RECOGNITION_TASKS.equals(taskName)) { actionResult = recognizeCustomEntitiesActionResults.get(taskIndex); } else if (CUSTOM_SINGLE_CLASSIFICATION_TASKS.equals(taskName)) { actionResult = singleCategoryClassifyActionResults.get(taskIndex); } else if (CUSTOM_MULTI_CLASSIFICATION_TASKS.equals(taskName)) { actionResult = multiCategoryClassifyActionResults.get(taskIndex); } else { throw logger.logExceptionAsError(new RuntimeException( "Invalid task name in target reference, " + taskName)); } TextAnalyticsActionResultPropertiesHelper.setIsError(actionResult, true); TextAnalyticsActionResultPropertiesHelper.setError(actionResult, new com.azure.ai.textanalytics.models.TextAnalyticsError( TextAnalyticsErrorCode.fromString( error.getCode() == null ? null : error.getCode().toString()), error.getMessage(), null)); } } } final AnalyzeActionsResult analyzeActionsResult = new AnalyzeActionsResult(); AnalyzeActionsResultPropertiesHelper.setRecognizeEntitiesResults(analyzeActionsResult, IterableStream.of(recognizeEntitiesActionResults)); AnalyzeActionsResultPropertiesHelper.setRecognizePiiEntitiesResults(analyzeActionsResult, IterableStream.of(recognizePiiEntitiesActionResults)); AnalyzeActionsResultPropertiesHelper.setAnalyzeHealthcareEntitiesResults(analyzeActionsResult, IterableStream.of(analyzeHealthcareEntitiesActionResults)); AnalyzeActionsResultPropertiesHelper.setExtractKeyPhrasesResults(analyzeActionsResult, IterableStream.of(extractKeyPhrasesActionResults)); AnalyzeActionsResultPropertiesHelper.setRecognizeLinkedEntitiesResults(analyzeActionsResult, IterableStream.of(recognizeLinkedEntitiesActionResults)); AnalyzeActionsResultPropertiesHelper.setAnalyzeSentimentResults(analyzeActionsResult, IterableStream.of(analyzeSentimentActionResults)); AnalyzeActionsResultPropertiesHelper.setExtractSummaryResults(analyzeActionsResult, IterableStream.of(extractSummaryActionResults)); AnalyzeActionsResultPropertiesHelper.setRecognizeCustomEntitiesResults(analyzeActionsResult, IterableStream.of(recognizeCustomEntitiesActionResults)); AnalyzeActionsResultPropertiesHelper.setClassifySingleCategoryResults(analyzeActionsResult, IterableStream.of(singleCategoryClassifyActionResults)); AnalyzeActionsResultPropertiesHelper.setClassifyMultiCategoryResults(analyzeActionsResult, IterableStream.of(multiCategoryClassifyActionResults)); return analyzeActionsResult; }
class AnalyzeActionsAsyncClient { private static final String ENTITY_RECOGNITION_TASKS = "entityRecognitionTasks"; private static final String ENTITY_RECOGNITION_PII_TASKS = "entityRecognitionPiiTasks"; private static final String KEY_PHRASE_EXTRACTION_TASKS = "keyPhraseExtractionTasks"; private static final String ENTITY_LINKING_TASKS = "entityLinkingTasks"; private static final String SENTIMENT_ANALYSIS_TASKS = "sentimentAnalysisTasks"; private static final String EXTRACTIVE_SUMMARIZATION_TASKS = "extractiveSummarizationTasks"; private static final String CUSTOM_ENTITY_RECOGNITION_TASKS = "customEntityRecognitionTasks"; private static final String CUSTOM_SINGLE_CLASSIFICATION_TASKS = "customClassificationTasks"; private static final String CUSTOM_MULTI_CLASSIFICATION_TASKS = "customMultiClassificationTasks"; private static final String REGEX_ACTION_ERROR_TARGET = String.format(" ENTITY_RECOGNITION_PII_TASKS, ENTITY_RECOGNITION_TASKS, ENTITY_LINKING_TASKS, SENTIMENT_ANALYSIS_TASKS, EXTRACTIVE_SUMMARIZATION_TASKS, CUSTOM_ENTITY_RECOGNITION_TASKS, CUSTOM_SINGLE_CLASSIFICATION_TASKS, CUSTOM_MULTI_CLASSIFICATION_TASKS); private final ClientLogger logger = new ClientLogger(AnalyzeActionsAsyncClient.class); private final TextAnalyticsClientImpl legacyService; private final AnalyzeTextsImpl service; private static final Pattern PATTERN; static { PATTERN = Pattern.compile(REGEX_ACTION_ERROR_TARGET, Pattern.MULTILINE); } AnalyzeActionsAsyncClient(TextAnalyticsClientImpl legacyService) { this.legacyService = legacyService; this.service = null; } AnalyzeActionsAsyncClient(AnalyzeTextsImpl service) { this.legacyService = null; this.service = service; } PollerFlux<AnalyzeActionsOperationDetail, AnalyzeActionsResultPagedFlux> beginAnalyzeActions( Iterable<TextDocumentInput> documents, TextAnalyticsActions actions, AnalyzeActionsOptions options, Context context) { try { inputDocumentsValidation(documents); options = getNotNullAnalyzeActionsOptions(options); final Context finalContext = getNotNullContext(context) .addData(AZ_TRACING_NAMESPACE_KEY, COGNITIVE_TRACING_NAMESPACE_VALUE); final boolean finalIncludeStatistics = options.isIncludeStatistics(); if (service != null) { final AnalyzeTextJobsInput analyzeTextJobsInput = new AnalyzeTextJobsInput() .setDisplayName(actions.getDisplayName()) .setAnalysisInput( new MultiLanguageAnalysisInput().setDocuments(toMultiLanguageInput(documents))) .setTasks(getAnalyzeTextLROTasks(actions)); return new PollerFlux<>( DEFAULT_POLL_INTERVAL, activationOperation( service.submitJobWithResponseAsync(analyzeTextJobsInput, finalContext) .map(analyzeResponse -> { final AnalyzeActionsOperationDetail textAnalyticsOperationResult = new AnalyzeActionsOperationDetail(); AnalyzeActionsOperationDetailPropertiesHelper .setOperationId(textAnalyticsOperationResult, parseOperationId( analyzeResponse.getDeserializedHeaders().getOperationLocation())); return textAnalyticsOperationResult; })), pollingOperationLanguageApi(operationId -> service.jobStatusWithResponseAsync(operationId, finalIncludeStatistics, null, null, finalContext)), (pollingContext, pollResponse) -> Mono.just(pollingContext.getLatestResponse().getValue()), fetchingOperation( operationId -> Mono.just(getAnalyzeOperationFluxPage( operationId, null, null, finalIncludeStatistics, finalContext))) ); } final AnalyzeBatchInput analyzeBatchInput = new AnalyzeBatchInput() .setAnalysisInput(new MultiLanguageBatchInput().setDocuments(toMultiLanguageInput(documents))) .setTasks(getJobManifestTasks(actions)); analyzeBatchInput.setDisplayName(actions.getDisplayName()); return new PollerFlux<>( DEFAULT_POLL_INTERVAL, activationOperation( legacyService.analyzeWithResponseAsync(analyzeBatchInput, finalContext) .map(analyzeResponse -> { final AnalyzeActionsOperationDetail textAnalyticsOperationResult = new AnalyzeActionsOperationDetail(); AnalyzeActionsOperationDetailPropertiesHelper .setOperationId(textAnalyticsOperationResult, parseOperationId(analyzeResponse.getDeserializedHeaders().getOperationLocation())); return textAnalyticsOperationResult; })), pollingOperation(operationId -> legacyService.analyzeStatusWithResponseAsync(operationId.toString(), finalIncludeStatistics, null, null, finalContext)), (pollingContext, activationResponse) -> Mono.error(new RuntimeException("Cancellation is not supported.")), fetchingOperation(operationId -> Mono.just(getAnalyzeOperationFluxPage( operationId, null, null, finalIncludeStatistics, finalContext))) ); } catch (RuntimeException ex) { return PollerFlux.error(ex); } } PollerFlux<AnalyzeActionsOperationDetail, AnalyzeActionsResultPagedIterable> beginAnalyzeActionsIterable( Iterable<TextDocumentInput> documents, TextAnalyticsActions actions, AnalyzeActionsOptions options, Context context) { try { inputDocumentsValidation(documents); options = getNotNullAnalyzeActionsOptions(options); final Context finalContext = getNotNullContext(context) .addData(AZ_TRACING_NAMESPACE_KEY, COGNITIVE_TRACING_NAMESPACE_VALUE); final AnalyzeBatchInput analyzeBatchInput = new AnalyzeBatchInput() .setAnalysisInput(new MultiLanguageBatchInput().setDocuments(toMultiLanguageInput(documents))) .setTasks(getJobManifestTasks(actions)); analyzeBatchInput.setDisplayName(actions.getDisplayName()); final boolean finalIncludeStatistics = options.isIncludeStatistics(); if (service != null) { return new PollerFlux<>( DEFAULT_POLL_INTERVAL, activationOperation( service.submitJobWithResponseAsync( new AnalyzeTextJobsInput() .setDisplayName(actions.getDisplayName()) .setAnalysisInput(new MultiLanguageAnalysisInput().setDocuments(toMultiLanguageInput(documents))) .setTasks(getAnalyzeTextLROTasks(actions)), finalContext) .map(analyzeResponse -> { final AnalyzeActionsOperationDetail operationDetail = new AnalyzeActionsOperationDetail(); AnalyzeActionsOperationDetailPropertiesHelper.setOperationId(operationDetail, parseOperationId(analyzeResponse.getDeserializedHeaders().getOperationLocation())); return operationDetail; })), pollingOperationLanguageApi(operationId -> service.jobStatusWithResponseAsync(operationId, finalIncludeStatistics, null, null, finalContext)), (activationResponse, pollingContext) -> Mono.error(new RuntimeException("Cancellation is not supported.")), fetchingOperationIterable( operationId -> Mono.just(new AnalyzeActionsResultPagedIterable(getAnalyzeOperationFluxPage( operationId, null, null, finalIncludeStatistics, finalContext)))) ); } return new PollerFlux<>( DEFAULT_POLL_INTERVAL, activationOperation( legacyService.analyzeWithResponseAsync(analyzeBatchInput, finalContext) .map(analyzeResponse -> { final AnalyzeActionsOperationDetail operationDetail = new AnalyzeActionsOperationDetail(); AnalyzeActionsOperationDetailPropertiesHelper.setOperationId(operationDetail, parseOperationId(analyzeResponse.getDeserializedHeaders().getOperationLocation())); return operationDetail; })), pollingOperation(operationId -> legacyService.analyzeStatusWithResponseAsync(operationId.toString(), finalIncludeStatistics, null, null, finalContext)), (activationResponse, pollingContext) -> Mono.error(new RuntimeException("Cancellation is not supported.")), fetchingOperationIterable( operationId -> Mono.just(new AnalyzeActionsResultPagedIterable(getAnalyzeOperationFluxPage( operationId, null, null, finalIncludeStatistics, finalContext)))) ); } catch (RuntimeException ex) { return PollerFlux.error(ex); } } private List<AnalyzeTextLROTask> getAnalyzeTextLROTasks(TextAnalyticsActions actions) { if (actions == null) { return null; } final List<AnalyzeTextLROTask> tasks = new ArrayList<>(); final Iterable<RecognizeEntitiesAction> recognizeEntitiesActions = actions.getRecognizeEntitiesActions(); final Iterable<RecognizePiiEntitiesAction> recognizePiiEntitiesActions = actions.getRecognizePiiEntitiesActions(); final Iterable<ExtractKeyPhrasesAction> extractKeyPhrasesActions = actions.getExtractKeyPhrasesActions(); final Iterable<RecognizeLinkedEntitiesAction> recognizeLinkedEntitiesActions = actions.getRecognizeLinkedEntitiesActions(); final Iterable<AnalyzeHealthcareEntitiesAction> analyzeHealthcareEntitiesActions = actions.getAnalyzeHealthcareEntitiesActions(); final Iterable<AnalyzeSentimentAction> analyzeSentimentActions = actions.getAnalyzeSentimentActions(); final Iterable<ExtractSummaryAction> extractSummaryActions = actions.getExtractSummaryActions(); final Iterable<RecognizeCustomEntitiesAction> recognizeCustomEntitiesActions = actions.getRecognizeCustomEntitiesActions(); final Iterable<SingleCategoryClassifyAction> singleCategoryClassifyActions = actions.getSingleCategoryClassifyActions(); final Iterable<MultiCategoryClassifyAction> multiCategoryClassifyActions = actions.getMultiCategoryClassifyActions(); if (recognizeEntitiesActions != null) { recognizeEntitiesActions.forEach(action -> tasks.add(toEntitiesLROTask(action))); } if (recognizePiiEntitiesActions != null) { recognizePiiEntitiesActions.forEach(action -> tasks.add(toPiiLROTask(action))); } if (analyzeHealthcareEntitiesActions != null) { analyzeHealthcareEntitiesActions.forEach(action -> tasks.add(toHealthcareLROTask(action))); } if (extractKeyPhrasesActions != null) { extractKeyPhrasesActions.forEach(action -> tasks.add(toKeyPhraseLROTask(action))); } if (recognizeLinkedEntitiesActions != null) { recognizeLinkedEntitiesActions.forEach(action -> tasks.add(toEntityLinkingLROTask(action))); } if (analyzeSentimentActions != null) { analyzeSentimentActions.forEach(action -> tasks.add(toSentimentAnalysisLROTask(action))); } if (extractSummaryActions != null) { extractSummaryActions.forEach(action -> tasks.add(toExtractiveSummarizationLROTask(action))); } if (recognizeCustomEntitiesActions != null) { recognizeCustomEntitiesActions.forEach(action -> tasks.add(toCustomEntitiesLROTask(action))); } if (singleCategoryClassifyActions != null) { singleCategoryClassifyActions.forEach(action -> tasks.add( toCustomSingleLabelClassificationLROTask(action))); } if (multiCategoryClassifyActions != null) { multiCategoryClassifyActions.forEach(action -> tasks.add(toCustomMultiLabelClassificationLROTask(action))); } return tasks; } private JobManifestTasks getJobManifestTasks(TextAnalyticsActions actions) { if (actions == null) { return null; } final JobManifestTasks jobManifestTasks = new JobManifestTasks(); if (actions.getRecognizeEntitiesActions() != null) { jobManifestTasks.setEntityRecognitionTasks(toEntitiesTasks(actions)); } if (actions.getRecognizePiiEntitiesActions() != null) { jobManifestTasks.setEntityRecognitionPiiTasks(toPiiTasks(actions)); } if (actions.getExtractKeyPhrasesActions() != null) { jobManifestTasks.setKeyPhraseExtractionTasks(toKeyPhrasesTasks(actions)); } if (actions.getRecognizeLinkedEntitiesActions() != null) { jobManifestTasks.setEntityLinkingTasks(toEntityLinkingTasks(actions)); } if (actions.getAnalyzeSentimentActions() != null) { jobManifestTasks.setSentimentAnalysisTasks(toSentimentAnalysisTasks(actions)); } if (actions.getExtractSummaryActions() != null) { jobManifestTasks.setExtractiveSummarizationTasks(toExtractiveSummarizationTask(actions)); } if (actions.getRecognizeCustomEntitiesActions() != null) { jobManifestTasks.setCustomEntityRecognitionTasks(toCustomEntitiesTask(actions)); } if (actions.getSingleCategoryClassifyActions() != null) { jobManifestTasks.setCustomSingleClassificationTasks(toCustomSingleClassificationTask(actions)); } if (actions.getMultiCategoryClassifyActions() != null) { jobManifestTasks.setCustomMultiClassificationTasks(toCustomMultiClassificationTask(actions)); } return jobManifestTasks; } private EntitiesLROTask toEntitiesLROTask(RecognizeEntitiesAction action) { if (action == null) { return null; } final EntitiesLROTask task = new EntitiesLROTask(); task.setParameters(getEntitiesTaskParameters(action)).setTaskName(action.getActionName()); return task; } private List<EntitiesTask> toEntitiesTasks(TextAnalyticsActions actions) { final List<EntitiesTask> entitiesTasks = new ArrayList<>(); for (RecognizeEntitiesAction action : actions.getRecognizeEntitiesActions()) { entitiesTasks.add( action == null ? null : new EntitiesTask() .setTaskName(action.getActionName()) .setParameters(getEntitiesTaskParameters(action))); } return entitiesTasks; } private EntitiesTaskParameters getEntitiesTaskParameters(RecognizeEntitiesAction action) { return (EntitiesTaskParameters) new EntitiesTaskParameters() .setStringIndexType(StringIndexType.UTF16CODE_UNIT) .setModelVersion(action.getModelVersion()) .setLoggingOptOut(action.isServiceLogsDisabled()); } private PiiLROTask toPiiLROTask(RecognizePiiEntitiesAction action) { if (action == null) { return null; } final PiiLROTask task = new PiiLROTask(); task.setParameters(getPiiTaskParameters(action)).setTaskName(action.getActionName()); return task; } private List<PiiTask> toPiiTasks(TextAnalyticsActions actions) { final List<PiiTask> piiTasks = new ArrayList<>(); for (RecognizePiiEntitiesAction action : actions.getRecognizePiiEntitiesActions()) { piiTasks.add( action == null ? null : new PiiTask() .setTaskName(action.getActionName()) .setParameters(getPiiTaskParameters(action))); } return piiTasks; } private PiiTaskParameters getPiiTaskParameters(RecognizePiiEntitiesAction action) { return (PiiTaskParameters) new PiiTaskParameters() .setStringIndexType(StringIndexType.UTF16CODE_UNIT) .setDomain(PiiDomain.fromString( action.getDomainFilter() == null ? null : action.getDomainFilter().toString())) .setPiiCategories(toCategoriesFilter(action.getCategoriesFilter())) .setModelVersion(action.getModelVersion()) .setLoggingOptOut(action.isServiceLogsDisabled()); } private HealthcareLROTask toHealthcareLROTask(AnalyzeHealthcareEntitiesAction action) { if (action == null) { return null; } final HealthcareLROTask task = new HealthcareLROTask(); task.setParameters(getHealthcareTaskParameters(action)).setTaskName(action.getActionName()); return task; } private HealthcareTaskParameters getHealthcareTaskParameters(AnalyzeHealthcareEntitiesAction action) { return (HealthcareTaskParameters) new HealthcareTaskParameters() .setStringIndexType(StringIndexType.UTF16CODE_UNIT) .setModelVersion(action.getModelVersion()) .setLoggingOptOut(action.isServiceLogsDisabled()); } private KeyPhraseLROTask toKeyPhraseLROTask(ExtractKeyPhrasesAction action) { if (action == null) { return null; } final KeyPhraseLROTask task = new KeyPhraseLROTask(); task.setParameters(getKeyPhraseTaskParameters(action)).setTaskName(action.getActionName()); return task; } private List<KeyPhrasesTask> toKeyPhrasesTasks(TextAnalyticsActions actions) { final List<KeyPhrasesTask> keyPhrasesTasks = new ArrayList<>(); for (ExtractKeyPhrasesAction action : actions.getExtractKeyPhrasesActions()) { keyPhrasesTasks.add( action == null ? null : new KeyPhrasesTask() .setTaskName(action.getActionName()) .setParameters(getKeyPhraseTaskParameters(action))); } return keyPhrasesTasks; } private KeyPhraseTaskParameters getKeyPhraseTaskParameters(ExtractKeyPhrasesAction action) { return (KeyPhraseTaskParameters) new KeyPhraseTaskParameters() .setModelVersion(action.getModelVersion()) .setLoggingOptOut(action.isServiceLogsDisabled()); } private EntityLinkingLROTask toEntityLinkingLROTask(RecognizeLinkedEntitiesAction action) { if (action == null) { return null; } final EntityLinkingLROTask task = new EntityLinkingLROTask(); task.setParameters(getEntityLinkingTaskParameters(action)).setTaskName(action.getActionName()); return task; } private List<EntityLinkingTask> toEntityLinkingTasks(TextAnalyticsActions actions) { final List<EntityLinkingTask> tasks = new ArrayList<>(); for (RecognizeLinkedEntitiesAction action : actions.getRecognizeLinkedEntitiesActions()) { tasks.add( action == null ? null : new EntityLinkingTask() .setTaskName(action.getActionName()) .setParameters(getEntityLinkingTaskParameters(action))); } return tasks; } private EntityLinkingTaskParameters getEntityLinkingTaskParameters(RecognizeLinkedEntitiesAction action) { return (EntityLinkingTaskParameters) new EntityLinkingTaskParameters() .setStringIndexType(StringIndexType.UTF16CODE_UNIT) .setModelVersion(action.getModelVersion()) .setLoggingOptOut(action.isServiceLogsDisabled()); } private SentimentAnalysisLROTask toSentimentAnalysisLROTask(AnalyzeSentimentAction action) { if (action == null) { return null; } final SentimentAnalysisLROTask task = new SentimentAnalysisLROTask(); task.setParameters(getSentimentAnalysisTaskParameters(action)).setTaskName(action.getActionName()); return task; } private List<SentimentAnalysisTask> toSentimentAnalysisTasks(TextAnalyticsActions actions) { final List<SentimentAnalysisTask> tasks = new ArrayList<>(); for (AnalyzeSentimentAction action : actions.getAnalyzeSentimentActions()) { tasks.add( action == null ? null : new SentimentAnalysisTask() .setTaskName(action.getActionName()) .setParameters(getSentimentAnalysisTaskParameters(action))); } return tasks; } private SentimentAnalysisTaskParameters getSentimentAnalysisTaskParameters(AnalyzeSentimentAction action) { return (SentimentAnalysisTaskParameters) new SentimentAnalysisTaskParameters() .setStringIndexType(StringIndexType.UTF16CODE_UNIT) .setModelVersion(action.getModelVersion()) .setLoggingOptOut(action.isServiceLogsDisabled()); } private ExtractiveSummarizationLROTask toExtractiveSummarizationLROTask(ExtractSummaryAction action) { if (action == null) { return null; } final ExtractiveSummarizationLROTask task = new ExtractiveSummarizationLROTask(); task.setParameters(getExtractiveSummarizationTaskParameters(action)).setTaskName(action.getActionName()); return task; } private List<ExtractiveSummarizationTask> toExtractiveSummarizationTask(TextAnalyticsActions actions) { final List<ExtractiveSummarizationTask> extractiveSummarizationTasks = new ArrayList<>(); for (ExtractSummaryAction action : actions.getExtractSummaryActions()) { extractiveSummarizationTasks.add( action == null ? null : new ExtractiveSummarizationTask() .setTaskName(action.getActionName()) .setParameters(getExtractiveSummarizationTaskParameters(action))); } return extractiveSummarizationTasks; } private ExtractiveSummarizationTaskParameters getExtractiveSummarizationTaskParameters( ExtractSummaryAction action) { return (ExtractiveSummarizationTaskParameters) new ExtractiveSummarizationTaskParameters() .setStringIndexType(StringIndexType.UTF16CODE_UNIT) .setSentenceCount(action.getMaxSentenceCount()) .setSortBy(action.getOrderBy() == null ? null : ExtractiveSummarizationSortingCriteria .fromString(action.getOrderBy().toString())) .setModelVersion(action.getModelVersion()) .setLoggingOptOut(action.isServiceLogsDisabled()); } private CustomEntitiesLROTask toCustomEntitiesLROTask(RecognizeCustomEntitiesAction action) { if (action == null) { return null; } final CustomEntitiesLROTask task = new CustomEntitiesLROTask(); task.setParameters(getCustomEntitiesTaskParameters(action)).setTaskName(action.getActionName()); return task; } private List<CustomEntitiesTask> toCustomEntitiesTask(TextAnalyticsActions actions) { final List<CustomEntitiesTask> tasks = new ArrayList<>(); for (RecognizeCustomEntitiesAction action : actions.getRecognizeCustomEntitiesActions()) { tasks.add( action == null ? null : new CustomEntitiesTask() .setTaskName(action.getActionName()) .setParameters(getCustomEntitiesTaskParameters(action))); } return tasks; } private CustomEntitiesTaskParameters getCustomEntitiesTaskParameters(RecognizeCustomEntitiesAction action) { return (CustomEntitiesTaskParameters) new CustomEntitiesTaskParameters() .setStringIndexType(StringIndexType.UTF16CODE_UNIT) .setProjectName(action.getProjectName()) .setDeploymentName(action.getDeploymentName()) .setLoggingOptOut(action.isServiceLogsDisabled()); } private CustomSingleLabelClassificationLROTask toCustomSingleLabelClassificationLROTask( SingleCategoryClassifyAction action) { if (action == null) { return null; } final CustomSingleLabelClassificationLROTask task = new CustomSingleLabelClassificationLROTask(); task.setParameters(getCustomSingleClassificationTaskParameters(action)).setTaskName(action.getActionName()); return task; } private List<CustomSingleClassificationTask> toCustomSingleClassificationTask(TextAnalyticsActions actions) { final List<CustomSingleClassificationTask> tasks = new ArrayList<>(); for (SingleCategoryClassifyAction action : actions.getSingleCategoryClassifyActions()) { tasks.add( action == null ? null : new CustomSingleClassificationTask() .setTaskName(action.getActionName()) .setParameters(getCustomSingleClassificationTaskParameters(action))); } return tasks; } private CustomSingleLabelClassificationTaskParameters getCustomSingleClassificationTaskParameters( SingleCategoryClassifyAction action) { return (CustomSingleLabelClassificationTaskParameters) new CustomSingleLabelClassificationTaskParameters() .setProjectName(action.getProjectName()) .setDeploymentName(action.getDeploymentName()) .setLoggingOptOut(action.isServiceLogsDisabled()); } private CustomMultiLabelClassificationLROTask toCustomMultiLabelClassificationLROTask( MultiCategoryClassifyAction action) { if (action == null) { return null; } final CustomMultiLabelClassificationLROTask task = new CustomMultiLabelClassificationLROTask(); task.setParameters(getCustomMultiLabelClassificationTaskParameters(action)).setTaskName(action.getActionName()); return task; } private List<CustomMultiClassificationTask> toCustomMultiClassificationTask(TextAnalyticsActions actions) { final List<CustomMultiClassificationTask> tasks = new ArrayList<>(); for (MultiCategoryClassifyAction action : actions.getMultiCategoryClassifyActions()) { tasks.add( action == null ? null : new CustomMultiClassificationTask() .setTaskName(action.getActionName()) .setParameters(getCustomMultiLabelClassificationTaskParameters(action))); } return tasks; } private CustomMultiLabelClassificationTaskParameters getCustomMultiLabelClassificationTaskParameters( MultiCategoryClassifyAction action) { return (CustomMultiLabelClassificationTaskParameters) new CustomMultiLabelClassificationTaskParameters() .setProjectName(action.getProjectName()) .setDeploymentName(action.getDeploymentName()) .setLoggingOptOut(action.isServiceLogsDisabled()); } private Function<PollingContext<AnalyzeActionsOperationDetail>, Mono<AnalyzeActionsOperationDetail>> activationOperation(Mono<AnalyzeActionsOperationDetail> operationResult) { return pollingContext -> { try { return operationResult.onErrorMap(Utility::mapToHttpResponseExceptionIfExists); } catch (RuntimeException ex) { return monoError(logger, ex); } }; } private Function<PollingContext<AnalyzeActionsOperationDetail>, Mono<PollResponse<AnalyzeActionsOperationDetail>>> pollingOperation(Function<UUID, Mono<Response<AnalyzeJobState>>> pollingFunction) { return pollingContext -> { try { final PollResponse<AnalyzeActionsOperationDetail> operationResultPollResponse = pollingContext.getLatestResponse(); final UUID operationId = UUID.fromString(operationResultPollResponse.getValue().getOperationId()); return pollingFunction.apply(operationId) .flatMap(modelResponse -> processAnalyzedModelResponse(modelResponse, operationResultPollResponse)) .onErrorMap(Utility::mapToHttpResponseExceptionIfExists); } catch (RuntimeException ex) { return monoError(logger, ex); } }; } private Function<PollingContext<AnalyzeActionsOperationDetail>, Mono<PollResponse<AnalyzeActionsOperationDetail>>> pollingOperationLanguageApi(Function<UUID, Mono<Response<AnalyzeTextJobState>>> pollingFunction) { return pollingContext -> { try { final PollResponse<AnalyzeActionsOperationDetail> operationResultPollResponse = pollingContext.getLatestResponse(); final UUID operationId = UUID.fromString(operationResultPollResponse.getValue().getOperationId()); return pollingFunction.apply(operationId) .flatMap(modelResponse -> processAnalyzedModelResponseLanguageApi( modelResponse, operationResultPollResponse)) .onErrorMap(Utility::mapToHttpResponseExceptionIfExists); } catch (RuntimeException ex) { return monoError(logger, ex); } }; } private Function<PollingContext<AnalyzeActionsOperationDetail>, Mono<AnalyzeActionsResultPagedFlux>> fetchingOperation(Function<UUID, Mono<AnalyzeActionsResultPagedFlux>> fetchingFunction) { return pollingContext -> { try { final UUID operationId = UUID.fromString(pollingContext.getLatestResponse().getValue().getOperationId()); return fetchingFunction.apply(operationId); } catch (RuntimeException ex) { return monoError(logger, ex); } }; } private Function<PollingContext<AnalyzeActionsOperationDetail>, Mono<AnalyzeActionsResultPagedIterable>> fetchingOperationIterable(Function<UUID, Mono<AnalyzeActionsResultPagedIterable>> fetchingFunction) { return pollingContext -> { try { final UUID operationId = UUID.fromString(pollingContext.getLatestResponse().getValue().getOperationId()); return fetchingFunction.apply(operationId); } catch (RuntimeException ex) { return monoError(logger, ex); } }; } AnalyzeActionsResultPagedFlux getAnalyzeOperationFluxPage(UUID operationId, Integer top, Integer skip, boolean showStats, Context context) { return new AnalyzeActionsResultPagedFlux( () -> (continuationToken, pageSize) -> getPage(continuationToken, operationId, top, skip, showStats, context).flux()); } Mono<PagedResponse<AnalyzeActionsResult>> getPage(String continuationToken, UUID operationId, Integer top, Integer skip, boolean showStats, Context context) { if (continuationToken != null) { final Map<String, Object> continuationTokenMap = parseNextLink(continuationToken); final Integer topValue = (Integer) continuationTokenMap.getOrDefault("$top", null); final Integer skipValue = (Integer) continuationTokenMap.getOrDefault("$skip", null); final Boolean showStatsValue = (Boolean) continuationTokenMap.getOrDefault(showStats, false); if (service != null) { return service.jobStatusWithResponseAsync(operationId, showStatsValue, topValue, skipValue, context) .map(this::toAnalyzeActionsResultPagedResponseLanguageApi) .onErrorMap(Utility::mapToHttpResponseExceptionIfExists); } return legacyService.analyzeStatusWithResponseAsync(operationId.toString(), showStatsValue, topValue, skipValue, context) .map(this::toAnalyzeActionsResultPagedResponse) .onErrorMap(Utility::mapToHttpResponseExceptionIfExists); } else { if (service != null) { return service.jobStatusWithResponseAsync(operationId, showStats, top, skip, context) .map(this::toAnalyzeActionsResultPagedResponseLanguageApi) .onErrorMap(Utility::mapToHttpResponseExceptionIfExists); } return legacyService.analyzeStatusWithResponseAsync(operationId.toString(), showStats, top, skip, context) .map(this::toAnalyzeActionsResultPagedResponse) .onErrorMap(Utility::mapToHttpResponseExceptionIfExists); } } private PagedResponse<AnalyzeActionsResult> toAnalyzeActionsResultPagedResponse(Response<AnalyzeJobState> response) { final AnalyzeJobState analyzeJobState = response.getValue(); return new PagedResponseBase<Void, AnalyzeActionsResult>( response.getRequest(), response.getStatusCode(), response.getHeaders(), Arrays.asList(toAnalyzeActionsResult(analyzeJobState)), analyzeJobState.getNextLink(), null); } private PagedResponse<AnalyzeActionsResult> toAnalyzeActionsResultPagedResponseLanguageApi(Response<AnalyzeTextJobState> response) { final AnalyzeTextJobState analyzeJobState = response.getValue(); return new PagedResponseBase<Void, AnalyzeActionsResult>( response.getRequest(), response.getStatusCode(), response.getHeaders(), Arrays.asList(toAnalyzeActionsResultLanguageApi(analyzeJobState)), analyzeJobState.getNextLink(), null); } private AnalyzeActionsResult toAnalyzeActionsResult(AnalyzeJobState analyzeJobState) { TasksStateTasksOld tasksStateTasks = analyzeJobState.getTasks(); final List<TasksStateTasksEntityRecognitionPiiTasksItem> piiTasksItems = tasksStateTasks.getEntityRecognitionPiiTasks(); final List<TasksStateTasksEntityRecognitionTasksItem> entityRecognitionTasksItems = tasksStateTasks.getEntityRecognitionTasks(); final List<TasksStateTasksKeyPhraseExtractionTasksItem> keyPhraseExtractionTasks = tasksStateTasks.getKeyPhraseExtractionTasks(); final List<TasksStateTasksEntityLinkingTasksItem> linkedEntityRecognitionTasksItems = tasksStateTasks.getEntityLinkingTasks(); final List<TasksStateTasksSentimentAnalysisTasksItem> sentimentAnalysisTasksItems = tasksStateTasks.getSentimentAnalysisTasks(); List<RecognizeEntitiesActionResult> recognizeEntitiesActionResults = new ArrayList<>(); List<RecognizePiiEntitiesActionResult> recognizePiiEntitiesActionResults = new ArrayList<>(); List<ExtractKeyPhrasesActionResult> extractKeyPhrasesActionResults = new ArrayList<>(); List<RecognizeLinkedEntitiesActionResult> recognizeLinkedEntitiesActionResults = new ArrayList<>(); List<AnalyzeSentimentActionResult> analyzeSentimentActionResults = new ArrayList<>(); List<ExtractSummaryActionResult> extractSummaryActionResults = new ArrayList<>(); List<RecognizeCustomEntitiesActionResult> recognizeCustomEntitiesActionResults = new ArrayList<>(); List<SingleCategoryClassifyActionResult> singleCategoryClassifyActionResults = new ArrayList<>(); List<MultiCategoryClassifyActionResult> multiCategoryClassifyActionResults = new ArrayList<>(); if (!CoreUtils.isNullOrEmpty(entityRecognitionTasksItems)) { for (int i = 0; i < entityRecognitionTasksItems.size(); i++) { final TasksStateTasksEntityRecognitionTasksItem taskItem = entityRecognitionTasksItems.get(i); final RecognizeEntitiesActionResult actionResult = new RecognizeEntitiesActionResult(); final EntitiesResult results = taskItem.getResults(); if (results != null) { RecognizeEntitiesActionResultPropertiesHelper.setDocumentsResults(actionResult, toRecognizeEntitiesResultCollectionResponse(results)); } TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult, taskItem.getLastUpdateDateTime()); recognizeEntitiesActionResults.add(actionResult); } } if (!CoreUtils.isNullOrEmpty(piiTasksItems)) { for (int i = 0; i < piiTasksItems.size(); i++) { final TasksStateTasksEntityRecognitionPiiTasksItem taskItem = piiTasksItems.get(i); final RecognizePiiEntitiesActionResult actionResult = new RecognizePiiEntitiesActionResult(); final PiiResult results = taskItem.getResults(); if (results != null) { RecognizePiiEntitiesActionResultPropertiesHelper.setDocumentsResults(actionResult, toRecognizePiiEntitiesResultCollection(results)); } TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult, taskItem.getLastUpdateDateTime()); recognizePiiEntitiesActionResults.add(actionResult); } } if (!CoreUtils.isNullOrEmpty(keyPhraseExtractionTasks)) { for (int i = 0; i < keyPhraseExtractionTasks.size(); i++) { final TasksStateTasksKeyPhraseExtractionTasksItem taskItem = keyPhraseExtractionTasks.get(i); final ExtractKeyPhrasesActionResult actionResult = new ExtractKeyPhrasesActionResult(); final KeyPhraseResult results = taskItem.getResults(); if (results != null) { ExtractKeyPhrasesActionResultPropertiesHelper.setDocumentsResults(actionResult, toExtractKeyPhrasesResultCollection(results)); } TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult, taskItem.getLastUpdateDateTime()); extractKeyPhrasesActionResults.add(actionResult); } } if (!CoreUtils.isNullOrEmpty(linkedEntityRecognitionTasksItems)) { for (int i = 0; i < linkedEntityRecognitionTasksItems.size(); i++) { final TasksStateTasksEntityLinkingTasksItem taskItem = linkedEntityRecognitionTasksItems.get(i); final RecognizeLinkedEntitiesActionResult actionResult = new RecognizeLinkedEntitiesActionResult(); final EntityLinkingResult results = taskItem.getResults(); if (results != null) { RecognizeLinkedEntitiesActionResultPropertiesHelper.setDocumentsResults(actionResult, toRecognizeLinkedEntitiesResultCollection(results)); } TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult, taskItem.getLastUpdateDateTime()); recognizeLinkedEntitiesActionResults.add(actionResult); } } if (!CoreUtils.isNullOrEmpty(sentimentAnalysisTasksItems)) { for (int i = 0; i < sentimentAnalysisTasksItems.size(); i++) { final TasksStateTasksSentimentAnalysisTasksItem taskItem = sentimentAnalysisTasksItems.get(i); final AnalyzeSentimentActionResult actionResult = new AnalyzeSentimentActionResult(); final SentimentResponse results = taskItem.getResults(); if (results != null) { AnalyzeSentimentActionResultPropertiesHelper.setDocumentsResults(actionResult, toAnalyzeSentimentResultCollection(results)); } TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult, taskItem.getLastUpdateDateTime()); analyzeSentimentActionResults.add(actionResult); } } final List<TextAnalyticsError> errors = analyzeJobState.getErrors(); if (!CoreUtils.isNullOrEmpty(errors)) { for (TextAnalyticsError error : errors) { final String[] targetPair = parseActionErrorTarget(error.getTarget()); final String taskName = targetPair[0]; final Integer taskIndex = Integer.valueOf(targetPair[1]); final TextAnalyticsActionResult actionResult; if (ENTITY_RECOGNITION_TASKS.equals(taskName)) { actionResult = recognizeEntitiesActionResults.get(taskIndex); } else if (ENTITY_RECOGNITION_PII_TASKS.equals(taskName)) { actionResult = recognizePiiEntitiesActionResults.get(taskIndex); } else if (KEY_PHRASE_EXTRACTION_TASKS.equals(taskName)) { actionResult = extractKeyPhrasesActionResults.get(taskIndex); } else if (ENTITY_LINKING_TASKS.equals(taskName)) { actionResult = recognizeLinkedEntitiesActionResults.get(taskIndex); } else if (SENTIMENT_ANALYSIS_TASKS.equals(taskName)) { actionResult = analyzeSentimentActionResults.get(taskIndex); } else if (EXTRACTIVE_SUMMARIZATION_TASKS.equals(taskName)) { actionResult = extractSummaryActionResults.get(taskIndex); } else if (CUSTOM_ENTITY_RECOGNITION_TASKS.equals(taskName)) { actionResult = recognizeCustomEntitiesActionResults.get(taskIndex); } else if (CUSTOM_SINGLE_CLASSIFICATION_TASKS.equals(taskName)) { actionResult = singleCategoryClassifyActionResults.get(taskIndex); } else if (CUSTOM_MULTI_CLASSIFICATION_TASKS.equals(taskName)) { actionResult = multiCategoryClassifyActionResults.get(taskIndex); } else { throw logger.logExceptionAsError(new RuntimeException( "Invalid task name in target reference, " + taskName)); } TextAnalyticsActionResultPropertiesHelper.setIsError(actionResult, true); TextAnalyticsActionResultPropertiesHelper.setError(actionResult, new com.azure.ai.textanalytics.models.TextAnalyticsError( TextAnalyticsErrorCode.fromString( error.getErrorCode() == null ? null : error.getErrorCode().toString()), error.getMessage(), null)); } } final AnalyzeActionsResult analyzeActionsResult = new AnalyzeActionsResult(); AnalyzeActionsResultPropertiesHelper.setRecognizeEntitiesResults(analyzeActionsResult, IterableStream.of(recognizeEntitiesActionResults)); AnalyzeActionsResultPropertiesHelper.setRecognizePiiEntitiesResults(analyzeActionsResult, IterableStream.of(recognizePiiEntitiesActionResults)); AnalyzeActionsResultPropertiesHelper.setExtractKeyPhrasesResults(analyzeActionsResult, IterableStream.of(extractKeyPhrasesActionResults)); AnalyzeActionsResultPropertiesHelper.setRecognizeLinkedEntitiesResults(analyzeActionsResult, IterableStream.of(recognizeLinkedEntitiesActionResults)); AnalyzeActionsResultPropertiesHelper.setAnalyzeSentimentResults(analyzeActionsResult, IterableStream.of(analyzeSentimentActionResults)); AnalyzeActionsResultPropertiesHelper.setExtractSummaryResults(analyzeActionsResult, IterableStream.of(extractSummaryActionResults)); AnalyzeActionsResultPropertiesHelper.setRecognizeCustomEntitiesResults(analyzeActionsResult, IterableStream.of(recognizeCustomEntitiesActionResults)); AnalyzeActionsResultPropertiesHelper.setClassifySingleCategoryResults(analyzeActionsResult, IterableStream.of(singleCategoryClassifyActionResults)); AnalyzeActionsResultPropertiesHelper.setClassifyMultiCategoryResults(analyzeActionsResult, IterableStream.of(multiCategoryClassifyActionResults)); return analyzeActionsResult; } private Mono<PollResponse<AnalyzeActionsOperationDetail>> processAnalyzedModelResponse( Response<AnalyzeJobState> analyzeJobStateResponse, PollResponse<AnalyzeActionsOperationDetail> operationResultPollResponse) { LongRunningOperationStatus status = LongRunningOperationStatus.SUCCESSFULLY_COMPLETED; if (analyzeJobStateResponse.getValue() != null && analyzeJobStateResponse.getValue().getStatus() != null) { switch (analyzeJobStateResponse.getValue().getStatus()) { case NOT_STARTED: case RUNNING: status = LongRunningOperationStatus.IN_PROGRESS; break; case SUCCEEDED: status = LongRunningOperationStatus.SUCCESSFULLY_COMPLETED; break; case CANCELLED: status = LongRunningOperationStatus.USER_CANCELLED; break; default: status = LongRunningOperationStatus.fromString( analyzeJobStateResponse.getValue().getStatus().toString(), true); break; } } AnalyzeActionsOperationDetailPropertiesHelper.setDisplayName(operationResultPollResponse.getValue(), analyzeJobStateResponse.getValue().getDisplayName()); AnalyzeActionsOperationDetailPropertiesHelper.setCreatedAt(operationResultPollResponse.getValue(), analyzeJobStateResponse.getValue().getCreatedDateTime()); AnalyzeActionsOperationDetailPropertiesHelper.setExpiresAt(operationResultPollResponse.getValue(), analyzeJobStateResponse.getValue().getExpirationDateTime()); AnalyzeActionsOperationDetailPropertiesHelper.setLastModifiedAt(operationResultPollResponse.getValue(), analyzeJobStateResponse.getValue().getLastUpdateDateTime()); final TasksStateTasksOld tasksResult = analyzeJobStateResponse.getValue().getTasks(); AnalyzeActionsOperationDetailPropertiesHelper.setActionsFailed(operationResultPollResponse.getValue(), tasksResult.getFailed()); AnalyzeActionsOperationDetailPropertiesHelper.setActionsInProgress(operationResultPollResponse.getValue(), tasksResult.getInProgress()); AnalyzeActionsOperationDetailPropertiesHelper.setActionsSucceeded( operationResultPollResponse.getValue(), tasksResult.getCompleted()); AnalyzeActionsOperationDetailPropertiesHelper.setActionsInTotal(operationResultPollResponse.getValue(), tasksResult.getTotal()); return Mono.just(new PollResponse<>(status, operationResultPollResponse.getValue())); } private Mono<PollResponse<AnalyzeActionsOperationDetail>> processAnalyzedModelResponseLanguageApi( Response<AnalyzeTextJobState> analyzeJobStateResponse, PollResponse<AnalyzeActionsOperationDetail> operationResultPollResponse) { LongRunningOperationStatus status = LongRunningOperationStatus.SUCCESSFULLY_COMPLETED; if (analyzeJobStateResponse.getValue() != null && analyzeJobStateResponse.getValue().getStatus() != null) { switch (analyzeJobStateResponse.getValue().getStatus()) { case NOT_STARTED: case RUNNING: status = LongRunningOperationStatus.IN_PROGRESS; break; case SUCCEEDED: status = LongRunningOperationStatus.SUCCESSFULLY_COMPLETED; break; case CANCELLED: status = LongRunningOperationStatus.USER_CANCELLED; break; default: status = LongRunningOperationStatus.fromString( analyzeJobStateResponse.getValue().getStatus().toString(), true); break; } } AnalyzeActionsOperationDetailPropertiesHelper.setDisplayName(operationResultPollResponse.getValue(), analyzeJobStateResponse.getValue().getDisplayName()); AnalyzeActionsOperationDetailPropertiesHelper.setCreatedAt(operationResultPollResponse.getValue(), analyzeJobStateResponse.getValue().getCreatedDateTime()); AnalyzeActionsOperationDetailPropertiesHelper.setExpiresAt(operationResultPollResponse.getValue(), analyzeJobStateResponse.getValue().getExpirationDateTime()); AnalyzeActionsOperationDetailPropertiesHelper.setLastModifiedAt(operationResultPollResponse.getValue(), analyzeJobStateResponse.getValue().getLastUpdateDateTime()); final TasksStateTasks tasksResult = analyzeJobStateResponse.getValue().getTasks(); AnalyzeActionsOperationDetailPropertiesHelper.setActionsFailed(operationResultPollResponse.getValue(), tasksResult.getFailed()); AnalyzeActionsOperationDetailPropertiesHelper.setActionsInProgress(operationResultPollResponse.getValue(), tasksResult.getInProgress()); AnalyzeActionsOperationDetailPropertiesHelper.setActionsSucceeded( operationResultPollResponse.getValue(), tasksResult.getCompleted()); AnalyzeActionsOperationDetailPropertiesHelper.setActionsInTotal(operationResultPollResponse.getValue(), tasksResult.getTotal()); return Mono.just(new PollResponse<>(status, operationResultPollResponse.getValue())); } private Context getNotNullContext(Context context) { return context == null ? Context.NONE : context; } private AnalyzeActionsOptions getNotNullAnalyzeActionsOptions(AnalyzeActionsOptions options) { return options == null ? new AnalyzeActionsOptions() : options; } private String[] parseActionErrorTarget(String targetReference) { if (CoreUtils.isNullOrEmpty(targetReference)) { throw logger.logExceptionAsError(new RuntimeException( "Expected an error with a target field referencing an action but did not get one")); } final Matcher matcher = PATTERN.matcher(targetReference); String[] taskNameIdPair = new String[2]; while (matcher.find()) { taskNameIdPair[0] = matcher.group(1); taskNameIdPair[1] = matcher.group(2); } return taskNameIdPair; } }
class AnalyzeActionsAsyncClient { private static final String ENTITY_RECOGNITION_TASKS = "entityRecognitionTasks"; private static final String ENTITY_RECOGNITION_PII_TASKS = "entityRecognitionPiiTasks"; private static final String KEY_PHRASE_EXTRACTION_TASKS = "keyPhraseExtractionTasks"; private static final String ENTITY_LINKING_TASKS = "entityLinkingTasks"; private static final String SENTIMENT_ANALYSIS_TASKS = "sentimentAnalysisTasks"; private static final String EXTRACTIVE_SUMMARIZATION_TASKS = "extractiveSummarizationTasks"; private static final String CUSTOM_ENTITY_RECOGNITION_TASKS = "customEntityRecognitionTasks"; private static final String CUSTOM_SINGLE_CLASSIFICATION_TASKS = "customClassificationTasks"; private static final String CUSTOM_MULTI_CLASSIFICATION_TASKS = "customMultiClassificationTasks"; private static final String REGEX_ACTION_ERROR_TARGET = String.format(" ENTITY_RECOGNITION_PII_TASKS, ENTITY_RECOGNITION_TASKS, ENTITY_LINKING_TASKS, SENTIMENT_ANALYSIS_TASKS, EXTRACTIVE_SUMMARIZATION_TASKS, CUSTOM_ENTITY_RECOGNITION_TASKS, CUSTOM_SINGLE_CLASSIFICATION_TASKS, CUSTOM_MULTI_CLASSIFICATION_TASKS); private final ClientLogger logger = new ClientLogger(AnalyzeActionsAsyncClient.class); private final TextAnalyticsClientImpl legacyService; private final AnalyzeTextsImpl service; private static final Pattern PATTERN; static { PATTERN = Pattern.compile(REGEX_ACTION_ERROR_TARGET, Pattern.MULTILINE); } AnalyzeActionsAsyncClient(TextAnalyticsClientImpl legacyService) { this.legacyService = legacyService; this.service = null; } AnalyzeActionsAsyncClient(AnalyzeTextsImpl service) { this.legacyService = null; this.service = service; } PollerFlux<AnalyzeActionsOperationDetail, AnalyzeActionsResultPagedFlux> beginAnalyzeActions( Iterable<TextDocumentInput> documents, TextAnalyticsActions actions, AnalyzeActionsOptions options, Context context) { try { Objects.requireNonNull(actions, "'actions' cannot be null."); inputDocumentsValidation(documents); options = getNotNullAnalyzeActionsOptions(options); final Context finalContext = getNotNullContext(context) .addData(AZ_TRACING_NAMESPACE_KEY, COGNITIVE_TRACING_NAMESPACE_VALUE); final boolean finalIncludeStatistics = options.isIncludeStatistics(); if (service != null) { final AnalyzeTextJobsInput analyzeTextJobsInput = new AnalyzeTextJobsInput() .setDisplayName(actions.getDisplayName()) .setAnalysisInput( new MultiLanguageAnalysisInput().setDocuments(toMultiLanguageInput(documents))) .setTasks(getAnalyzeTextLROTasks(actions)); return new PollerFlux<>( DEFAULT_POLL_INTERVAL, activationOperation( service.submitJobWithResponseAsync(analyzeTextJobsInput, finalContext) .map(analyzeResponse -> { final AnalyzeActionsOperationDetail textAnalyticsOperationResult = new AnalyzeActionsOperationDetail(); AnalyzeActionsOperationDetailPropertiesHelper .setOperationId(textAnalyticsOperationResult, parseOperationId( analyzeResponse.getDeserializedHeaders().getOperationLocation())); return textAnalyticsOperationResult; })), pollingOperationLanguageApi(operationId -> service.jobStatusWithResponseAsync(operationId, finalIncludeStatistics, null, null, finalContext)), (pollingContext, pollResponse) -> Mono.just(pollingContext.getLatestResponse().getValue()), fetchingOperation( operationId -> Mono.just(getAnalyzeOperationFluxPage( operationId, null, null, finalIncludeStatistics, finalContext))) ); } final AnalyzeBatchInput analyzeBatchInput = new AnalyzeBatchInput() .setAnalysisInput(new MultiLanguageBatchInput().setDocuments(toMultiLanguageInput(documents))) .setTasks(getJobManifestTasks(actions)); analyzeBatchInput.setDisplayName(actions.getDisplayName()); return new PollerFlux<>( DEFAULT_POLL_INTERVAL, activationOperation( legacyService.analyzeWithResponseAsync(analyzeBatchInput, finalContext) .map(analyzeResponse -> { final AnalyzeActionsOperationDetail textAnalyticsOperationResult = new AnalyzeActionsOperationDetail(); AnalyzeActionsOperationDetailPropertiesHelper .setOperationId(textAnalyticsOperationResult, parseOperationId(analyzeResponse.getDeserializedHeaders().getOperationLocation())); return textAnalyticsOperationResult; })), pollingOperation(operationId -> legacyService.analyzeStatusWithResponseAsync(operationId.toString(), finalIncludeStatistics, null, null, finalContext)), (pollingContext, activationResponse) -> Mono.error(new RuntimeException("Cancellation is not supported.")), fetchingOperation(operationId -> Mono.just(getAnalyzeOperationFluxPage( operationId, null, null, finalIncludeStatistics, finalContext))) ); } catch (RuntimeException ex) { return PollerFlux.error(ex); } } PollerFlux<AnalyzeActionsOperationDetail, AnalyzeActionsResultPagedIterable> beginAnalyzeActionsIterable( Iterable<TextDocumentInput> documents, TextAnalyticsActions actions, AnalyzeActionsOptions options, Context context) { try { Objects.requireNonNull(actions, "'actions' cannot be null."); inputDocumentsValidation(documents); options = getNotNullAnalyzeActionsOptions(options); final Context finalContext = getNotNullContext(context) .addData(AZ_TRACING_NAMESPACE_KEY, COGNITIVE_TRACING_NAMESPACE_VALUE); final AnalyzeBatchInput analyzeBatchInput = new AnalyzeBatchInput() .setAnalysisInput(new MultiLanguageBatchInput().setDocuments(toMultiLanguageInput(documents))) .setTasks(getJobManifestTasks(actions)); analyzeBatchInput.setDisplayName(actions.getDisplayName()); final boolean finalIncludeStatistics = options.isIncludeStatistics(); if (service != null) { return new PollerFlux<>( DEFAULT_POLL_INTERVAL, activationOperation( service.submitJobWithResponseAsync( new AnalyzeTextJobsInput() .setDisplayName(actions.getDisplayName()) .setAnalysisInput(new MultiLanguageAnalysisInput().setDocuments(toMultiLanguageInput(documents))) .setTasks(getAnalyzeTextLROTasks(actions)), finalContext) .map(analyzeResponse -> { final AnalyzeActionsOperationDetail operationDetail = new AnalyzeActionsOperationDetail(); AnalyzeActionsOperationDetailPropertiesHelper.setOperationId(operationDetail, parseOperationId(analyzeResponse.getDeserializedHeaders().getOperationLocation())); return operationDetail; })), pollingOperationLanguageApi(operationId -> service.jobStatusWithResponseAsync(operationId, finalIncludeStatistics, null, null, finalContext)), (activationResponse, pollingContext) -> Mono.error(new RuntimeException("Cancellation is not supported.")), fetchingOperationIterable( operationId -> Mono.just(new AnalyzeActionsResultPagedIterable(getAnalyzeOperationFluxPage( operationId, null, null, finalIncludeStatistics, finalContext)))) ); } return new PollerFlux<>( DEFAULT_POLL_INTERVAL, activationOperation( legacyService.analyzeWithResponseAsync(analyzeBatchInput, finalContext) .map(analyzeResponse -> { final AnalyzeActionsOperationDetail operationDetail = new AnalyzeActionsOperationDetail(); AnalyzeActionsOperationDetailPropertiesHelper.setOperationId(operationDetail, parseOperationId(analyzeResponse.getDeserializedHeaders().getOperationLocation())); return operationDetail; })), pollingOperation(operationId -> legacyService.analyzeStatusWithResponseAsync(operationId.toString(), finalIncludeStatistics, null, null, finalContext)), (activationResponse, pollingContext) -> Mono.error(new RuntimeException("Cancellation is not supported.")), fetchingOperationIterable( operationId -> Mono.just(new AnalyzeActionsResultPagedIterable(getAnalyzeOperationFluxPage( operationId, null, null, finalIncludeStatistics, finalContext)))) ); } catch (RuntimeException ex) { return PollerFlux.error(ex); } } private List<AnalyzeTextLROTask> getAnalyzeTextLROTasks(TextAnalyticsActions actions) { if (actions == null) { return null; } final List<AnalyzeTextLROTask> tasks = new ArrayList<>(); final Iterable<RecognizeEntitiesAction> recognizeEntitiesActions = actions.getRecognizeEntitiesActions(); final Iterable<RecognizePiiEntitiesAction> recognizePiiEntitiesActions = actions.getRecognizePiiEntitiesActions(); final Iterable<ExtractKeyPhrasesAction> extractKeyPhrasesActions = actions.getExtractKeyPhrasesActions(); final Iterable<RecognizeLinkedEntitiesAction> recognizeLinkedEntitiesActions = actions.getRecognizeLinkedEntitiesActions(); final Iterable<AnalyzeHealthcareEntitiesAction> analyzeHealthcareEntitiesActions = actions.getAnalyzeHealthcareEntitiesActions(); final Iterable<AnalyzeSentimentAction> analyzeSentimentActions = actions.getAnalyzeSentimentActions(); final Iterable<ExtractSummaryAction> extractSummaryActions = actions.getExtractSummaryActions(); final Iterable<RecognizeCustomEntitiesAction> recognizeCustomEntitiesActions = actions.getRecognizeCustomEntitiesActions(); final Iterable<SingleCategoryClassifyAction> singleCategoryClassifyActions = actions.getSingleCategoryClassifyActions(); final Iterable<MultiCategoryClassifyAction> multiCategoryClassifyActions = actions.getMultiCategoryClassifyActions(); if (recognizeEntitiesActions != null) { recognizeEntitiesActions.forEach(action -> tasks.add(toEntitiesLROTask(action))); } if (recognizePiiEntitiesActions != null) { recognizePiiEntitiesActions.forEach(action -> tasks.add(toPiiLROTask(action))); } if (analyzeHealthcareEntitiesActions != null) { analyzeHealthcareEntitiesActions.forEach(action -> tasks.add(toHealthcareLROTask(action))); } if (extractKeyPhrasesActions != null) { extractKeyPhrasesActions.forEach(action -> tasks.add(toKeyPhraseLROTask(action))); } if (recognizeLinkedEntitiesActions != null) { recognizeLinkedEntitiesActions.forEach(action -> tasks.add(toEntityLinkingLROTask(action))); } if (analyzeSentimentActions != null) { analyzeSentimentActions.forEach(action -> tasks.add(toSentimentAnalysisLROTask(action))); } if (extractSummaryActions != null) { extractSummaryActions.forEach(action -> tasks.add(toExtractiveSummarizationLROTask(action))); } if (recognizeCustomEntitiesActions != null) { recognizeCustomEntitiesActions.forEach(action -> tasks.add(toCustomEntitiesLROTask(action))); } if (singleCategoryClassifyActions != null) { singleCategoryClassifyActions.forEach(action -> tasks.add( toCustomSingleLabelClassificationLROTask(action))); } if (multiCategoryClassifyActions != null) { multiCategoryClassifyActions.forEach(action -> tasks.add(toCustomMultiLabelClassificationLROTask(action))); } return tasks; } private JobManifestTasks getJobManifestTasks(TextAnalyticsActions actions) { if (actions == null) { return null; } final JobManifestTasks jobManifestTasks = new JobManifestTasks(); if (actions.getRecognizeEntitiesActions() != null) { jobManifestTasks.setEntityRecognitionTasks(toEntitiesTasks(actions)); } if (actions.getRecognizePiiEntitiesActions() != null) { jobManifestTasks.setEntityRecognitionPiiTasks(toPiiTasks(actions)); } if (actions.getExtractKeyPhrasesActions() != null) { jobManifestTasks.setKeyPhraseExtractionTasks(toKeyPhrasesTasks(actions)); } if (actions.getRecognizeLinkedEntitiesActions() != null) { jobManifestTasks.setEntityLinkingTasks(toEntityLinkingTasks(actions)); } if (actions.getAnalyzeSentimentActions() != null) { jobManifestTasks.setSentimentAnalysisTasks(toSentimentAnalysisTasks(actions)); } if (actions.getExtractSummaryActions() != null) { jobManifestTasks.setExtractiveSummarizationTasks(toExtractiveSummarizationTask(actions)); } if (actions.getRecognizeCustomEntitiesActions() != null) { jobManifestTasks.setCustomEntityRecognitionTasks(toCustomEntitiesTask(actions)); } if (actions.getSingleCategoryClassifyActions() != null) { jobManifestTasks.setCustomSingleClassificationTasks(toCustomSingleClassificationTask(actions)); } if (actions.getMultiCategoryClassifyActions() != null) { jobManifestTasks.setCustomMultiClassificationTasks(toCustomMultiClassificationTask(actions)); } return jobManifestTasks; } private EntitiesLROTask toEntitiesLROTask(RecognizeEntitiesAction action) { if (action == null) { return null; } final EntitiesLROTask task = new EntitiesLROTask(); task.setParameters(getEntitiesTaskParameters(action)).setTaskName(action.getActionName()); return task; } private List<EntitiesTask> toEntitiesTasks(TextAnalyticsActions actions) { final List<EntitiesTask> entitiesTasks = new ArrayList<>(); for (RecognizeEntitiesAction action : actions.getRecognizeEntitiesActions()) { entitiesTasks.add( action == null ? null : new EntitiesTask() .setTaskName(action.getActionName()) .setParameters(getEntitiesTaskParameters(action))); } return entitiesTasks; } private EntitiesTaskParameters getEntitiesTaskParameters(RecognizeEntitiesAction action) { return (EntitiesTaskParameters) new EntitiesTaskParameters() .setStringIndexType(StringIndexType.UTF16CODE_UNIT) .setModelVersion(action.getModelVersion()) .setLoggingOptOut(action.isServiceLogsDisabled()); } private PiiLROTask toPiiLROTask(RecognizePiiEntitiesAction action) { if (action == null) { return null; } final PiiLROTask task = new PiiLROTask(); task.setParameters(getPiiTaskParameters(action)).setTaskName(action.getActionName()); return task; } private List<PiiTask> toPiiTasks(TextAnalyticsActions actions) { final List<PiiTask> piiTasks = new ArrayList<>(); for (RecognizePiiEntitiesAction action : actions.getRecognizePiiEntitiesActions()) { piiTasks.add( action == null ? null : new PiiTask() .setTaskName(action.getActionName()) .setParameters(getPiiTaskParameters(action))); } return piiTasks; } private PiiTaskParameters getPiiTaskParameters(RecognizePiiEntitiesAction action) { return (PiiTaskParameters) new PiiTaskParameters() .setStringIndexType(StringIndexType.UTF16CODE_UNIT) .setDomain(PiiDomain.fromString( action.getDomainFilter() == null ? null : action.getDomainFilter().toString())) .setPiiCategories(toCategoriesFilter(action.getCategoriesFilter())) .setModelVersion(action.getModelVersion()) .setLoggingOptOut(action.isServiceLogsDisabled()); } private HealthcareLROTask toHealthcareLROTask(AnalyzeHealthcareEntitiesAction action) { if (action == null) { return null; } final HealthcareLROTask task = new HealthcareLROTask(); task.setParameters(getHealthcareTaskParameters(action)).setTaskName(action.getActionName()); return task; } private HealthcareTaskParameters getHealthcareTaskParameters(AnalyzeHealthcareEntitiesAction action) { return (HealthcareTaskParameters) new HealthcareTaskParameters() .setStringIndexType(StringIndexType.UTF16CODE_UNIT) .setModelVersion(action.getModelVersion()) .setLoggingOptOut(action.isServiceLogsDisabled()); } private KeyPhraseLROTask toKeyPhraseLROTask(ExtractKeyPhrasesAction action) { if (action == null) { return null; } final KeyPhraseLROTask task = new KeyPhraseLROTask(); task.setParameters(getKeyPhraseTaskParameters(action)).setTaskName(action.getActionName()); return task; } private List<KeyPhrasesTask> toKeyPhrasesTasks(TextAnalyticsActions actions) { final List<KeyPhrasesTask> keyPhrasesTasks = new ArrayList<>(); for (ExtractKeyPhrasesAction action : actions.getExtractKeyPhrasesActions()) { keyPhrasesTasks.add( action == null ? null : new KeyPhrasesTask() .setTaskName(action.getActionName()) .setParameters(getKeyPhraseTaskParameters(action))); } return keyPhrasesTasks; } private KeyPhraseTaskParameters getKeyPhraseTaskParameters(ExtractKeyPhrasesAction action) { return (KeyPhraseTaskParameters) new KeyPhraseTaskParameters() .setModelVersion(action.getModelVersion()) .setLoggingOptOut(action.isServiceLogsDisabled()); } private EntityLinkingLROTask toEntityLinkingLROTask(RecognizeLinkedEntitiesAction action) { if (action == null) { return null; } final EntityLinkingLROTask task = new EntityLinkingLROTask(); task.setParameters(getEntityLinkingTaskParameters(action)).setTaskName(action.getActionName()); return task; } private List<EntityLinkingTask> toEntityLinkingTasks(TextAnalyticsActions actions) { final List<EntityLinkingTask> tasks = new ArrayList<>(); for (RecognizeLinkedEntitiesAction action : actions.getRecognizeLinkedEntitiesActions()) { tasks.add( action == null ? null : new EntityLinkingTask() .setTaskName(action.getActionName()) .setParameters(getEntityLinkingTaskParameters(action))); } return tasks; } private EntityLinkingTaskParameters getEntityLinkingTaskParameters(RecognizeLinkedEntitiesAction action) { return (EntityLinkingTaskParameters) new EntityLinkingTaskParameters() .setStringIndexType(StringIndexType.UTF16CODE_UNIT) .setModelVersion(action.getModelVersion()) .setLoggingOptOut(action.isServiceLogsDisabled()); } private SentimentAnalysisLROTask toSentimentAnalysisLROTask(AnalyzeSentimentAction action) { if (action == null) { return null; } final SentimentAnalysisLROTask task = new SentimentAnalysisLROTask(); task.setParameters(getSentimentAnalysisTaskParameters(action)).setTaskName(action.getActionName()); return task; } private List<SentimentAnalysisTask> toSentimentAnalysisTasks(TextAnalyticsActions actions) { final List<SentimentAnalysisTask> tasks = new ArrayList<>(); for (AnalyzeSentimentAction action : actions.getAnalyzeSentimentActions()) { tasks.add( action == null ? null : new SentimentAnalysisTask() .setTaskName(action.getActionName()) .setParameters(getSentimentAnalysisTaskParameters(action))); } return tasks; } private SentimentAnalysisTaskParameters getSentimentAnalysisTaskParameters(AnalyzeSentimentAction action) { return (SentimentAnalysisTaskParameters) new SentimentAnalysisTaskParameters() .setStringIndexType(StringIndexType.UTF16CODE_UNIT) .setOpinionMining(action.isIncludeOpinionMining()) .setModelVersion(action.getModelVersion()) .setLoggingOptOut(action.isServiceLogsDisabled()); } private ExtractiveSummarizationLROTask toExtractiveSummarizationLROTask(ExtractSummaryAction action) { if (action == null) { return null; } final ExtractiveSummarizationLROTask task = new ExtractiveSummarizationLROTask(); task.setParameters(getExtractiveSummarizationTaskParameters(action)).setTaskName(action.getActionName()); return task; } private List<ExtractiveSummarizationTask> toExtractiveSummarizationTask(TextAnalyticsActions actions) { final List<ExtractiveSummarizationTask> extractiveSummarizationTasks = new ArrayList<>(); for (ExtractSummaryAction action : actions.getExtractSummaryActions()) { extractiveSummarizationTasks.add( action == null ? null : new ExtractiveSummarizationTask() .setTaskName(action.getActionName()) .setParameters(getExtractiveSummarizationTaskParameters(action))); } return extractiveSummarizationTasks; } private ExtractiveSummarizationTaskParameters getExtractiveSummarizationTaskParameters( ExtractSummaryAction action) { return (ExtractiveSummarizationTaskParameters) new ExtractiveSummarizationTaskParameters() .setStringIndexType(StringIndexType.UTF16CODE_UNIT) .setSentenceCount(action.getMaxSentenceCount()) .setSortBy(action.getOrderBy() == null ? null : ExtractiveSummarizationSortingCriteria .fromString(action.getOrderBy().toString())) .setModelVersion(action.getModelVersion()) .setLoggingOptOut(action.isServiceLogsDisabled()); } private CustomEntitiesLROTask toCustomEntitiesLROTask(RecognizeCustomEntitiesAction action) { if (action == null) { return null; } final CustomEntitiesLROTask task = new CustomEntitiesLROTask(); task.setParameters(getCustomEntitiesTaskParameters(action)).setTaskName(action.getActionName()); return task; } private List<CustomEntitiesTask> toCustomEntitiesTask(TextAnalyticsActions actions) { final List<CustomEntitiesTask> tasks = new ArrayList<>(); for (RecognizeCustomEntitiesAction action : actions.getRecognizeCustomEntitiesActions()) { tasks.add( action == null ? null : new CustomEntitiesTask() .setTaskName(action.getActionName()) .setParameters(getCustomEntitiesTaskParameters(action))); } return tasks; } private CustomEntitiesTaskParameters getCustomEntitiesTaskParameters(RecognizeCustomEntitiesAction action) { return (CustomEntitiesTaskParameters) new CustomEntitiesTaskParameters() .setStringIndexType(StringIndexType.UTF16CODE_UNIT) .setProjectName(action.getProjectName()) .setDeploymentName(action.getDeploymentName()) .setLoggingOptOut(action.isServiceLogsDisabled()); } private CustomSingleLabelClassificationLROTask toCustomSingleLabelClassificationLROTask( SingleCategoryClassifyAction action) { if (action == null) { return null; } final CustomSingleLabelClassificationLROTask task = new CustomSingleLabelClassificationLROTask(); task.setParameters(getCustomSingleClassificationTaskParameters(action)).setTaskName(action.getActionName()); return task; } private List<CustomSingleClassificationTask> toCustomSingleClassificationTask(TextAnalyticsActions actions) { final List<CustomSingleClassificationTask> tasks = new ArrayList<>(); for (SingleCategoryClassifyAction action : actions.getSingleCategoryClassifyActions()) { tasks.add( action == null ? null : new CustomSingleClassificationTask() .setTaskName(action.getActionName()) .setParameters(getCustomSingleClassificationTaskParameters(action))); } return tasks; } private CustomSingleLabelClassificationTaskParameters getCustomSingleClassificationTaskParameters( SingleCategoryClassifyAction action) { return (CustomSingleLabelClassificationTaskParameters) new CustomSingleLabelClassificationTaskParameters() .setProjectName(action.getProjectName()) .setDeploymentName(action.getDeploymentName()) .setLoggingOptOut(action.isServiceLogsDisabled()); } private CustomMultiLabelClassificationLROTask toCustomMultiLabelClassificationLROTask( MultiCategoryClassifyAction action) { if (action == null) { return null; } final CustomMultiLabelClassificationLROTask task = new CustomMultiLabelClassificationLROTask(); task.setParameters(getCustomMultiLabelClassificationTaskParameters(action)).setTaskName(action.getActionName()); return task; } private List<CustomMultiClassificationTask> toCustomMultiClassificationTask(TextAnalyticsActions actions) { final List<CustomMultiClassificationTask> tasks = new ArrayList<>(); for (MultiCategoryClassifyAction action : actions.getMultiCategoryClassifyActions()) { tasks.add( action == null ? null : new CustomMultiClassificationTask() .setTaskName(action.getActionName()) .setParameters(getCustomMultiLabelClassificationTaskParameters(action))); } return tasks; } private CustomMultiLabelClassificationTaskParameters getCustomMultiLabelClassificationTaskParameters( MultiCategoryClassifyAction action) { return (CustomMultiLabelClassificationTaskParameters) new CustomMultiLabelClassificationTaskParameters() .setProjectName(action.getProjectName()) .setDeploymentName(action.getDeploymentName()) .setLoggingOptOut(action.isServiceLogsDisabled()); } private Function<PollingContext<AnalyzeActionsOperationDetail>, Mono<AnalyzeActionsOperationDetail>> activationOperation(Mono<AnalyzeActionsOperationDetail> operationResult) { return pollingContext -> { try { return operationResult.onErrorMap(Utility::mapToHttpResponseExceptionIfExists); } catch (RuntimeException ex) { return monoError(logger, ex); } }; } private Function<PollingContext<AnalyzeActionsOperationDetail>, Mono<PollResponse<AnalyzeActionsOperationDetail>>> pollingOperation(Function<UUID, Mono<Response<AnalyzeJobState>>> pollingFunction) { return pollingContext -> { try { final PollResponse<AnalyzeActionsOperationDetail> operationResultPollResponse = pollingContext.getLatestResponse(); final UUID operationId = UUID.fromString(operationResultPollResponse.getValue().getOperationId()); return pollingFunction.apply(operationId) .flatMap(modelResponse -> processAnalyzedModelResponse(modelResponse, operationResultPollResponse)) .onErrorMap(Utility::mapToHttpResponseExceptionIfExists); } catch (RuntimeException ex) { return monoError(logger, ex); } }; } private Function<PollingContext<AnalyzeActionsOperationDetail>, Mono<PollResponse<AnalyzeActionsOperationDetail>>> pollingOperationLanguageApi(Function<UUID, Mono<Response<AnalyzeTextJobState>>> pollingFunction) { return pollingContext -> { try { final PollResponse<AnalyzeActionsOperationDetail> operationResultPollResponse = pollingContext.getLatestResponse(); final UUID operationId = UUID.fromString(operationResultPollResponse.getValue().getOperationId()); return pollingFunction.apply(operationId) .flatMap(modelResponse -> processAnalyzedModelResponseLanguageApi( modelResponse, operationResultPollResponse)) .onErrorMap(Utility::mapToHttpResponseExceptionIfExists); } catch (RuntimeException ex) { return monoError(logger, ex); } }; } private Function<PollingContext<AnalyzeActionsOperationDetail>, Mono<AnalyzeActionsResultPagedFlux>> fetchingOperation(Function<UUID, Mono<AnalyzeActionsResultPagedFlux>> fetchingFunction) { return pollingContext -> { try { final UUID operationId = UUID.fromString(pollingContext.getLatestResponse().getValue().getOperationId()); return fetchingFunction.apply(operationId); } catch (RuntimeException ex) { return monoError(logger, ex); } }; } private Function<PollingContext<AnalyzeActionsOperationDetail>, Mono<AnalyzeActionsResultPagedIterable>> fetchingOperationIterable(Function<UUID, Mono<AnalyzeActionsResultPagedIterable>> fetchingFunction) { return pollingContext -> { try { final UUID operationId = UUID.fromString(pollingContext.getLatestResponse().getValue().getOperationId()); return fetchingFunction.apply(operationId); } catch (RuntimeException ex) { return monoError(logger, ex); } }; } AnalyzeActionsResultPagedFlux getAnalyzeOperationFluxPage(UUID operationId, Integer top, Integer skip, boolean showStats, Context context) { return new AnalyzeActionsResultPagedFlux( () -> (continuationToken, pageSize) -> getPage(continuationToken, operationId, top, skip, showStats, context).flux()); } Mono<PagedResponse<AnalyzeActionsResult>> getPage(String continuationToken, UUID operationId, Integer top, Integer skip, boolean showStats, Context context) { if (continuationToken != null) { final Map<String, Object> continuationTokenMap = parseNextLink(continuationToken); final Integer topValue = (Integer) continuationTokenMap.getOrDefault("$top", null); final Integer skipValue = (Integer) continuationTokenMap.getOrDefault("$skip", null); final Boolean showStatsValue = (Boolean) continuationTokenMap.getOrDefault(showStats, false); if (service != null) { return service.jobStatusWithResponseAsync(operationId, showStatsValue, topValue, skipValue, context) .map(this::toAnalyzeActionsResultPagedResponseLanguageApi) .onErrorMap(Utility::mapToHttpResponseExceptionIfExists); } return legacyService.analyzeStatusWithResponseAsync(operationId.toString(), showStatsValue, topValue, skipValue, context) .map(this::toAnalyzeActionsResultPagedResponse) .onErrorMap(Utility::mapToHttpResponseExceptionIfExists); } else { if (service != null) { return service.jobStatusWithResponseAsync(operationId, showStats, top, skip, context) .map(this::toAnalyzeActionsResultPagedResponseLanguageApi) .onErrorMap(Utility::mapToHttpResponseExceptionIfExists); } return legacyService.analyzeStatusWithResponseAsync(operationId.toString(), showStats, top, skip, context) .map(this::toAnalyzeActionsResultPagedResponse) .onErrorMap(Utility::mapToHttpResponseExceptionIfExists); } } private PagedResponse<AnalyzeActionsResult> toAnalyzeActionsResultPagedResponse(Response<AnalyzeJobState> response) { final AnalyzeJobState analyzeJobState = response.getValue(); return new PagedResponseBase<Void, AnalyzeActionsResult>( response.getRequest(), response.getStatusCode(), response.getHeaders(), Arrays.asList(toAnalyzeActionsResult(analyzeJobState)), analyzeJobState.getNextLink(), null); } private PagedResponse<AnalyzeActionsResult> toAnalyzeActionsResultPagedResponseLanguageApi(Response<AnalyzeTextJobState> response) { final AnalyzeTextJobState analyzeJobState = response.getValue(); return new PagedResponseBase<Void, AnalyzeActionsResult>( response.getRequest(), response.getStatusCode(), response.getHeaders(), Arrays.asList(toAnalyzeActionsResultLanguageApi(analyzeJobState)), analyzeJobState.getNextLink(), null); } private AnalyzeActionsResult toAnalyzeActionsResult(AnalyzeJobState analyzeJobState) { TasksStateTasksOld tasksStateTasks = analyzeJobState.getTasks(); final List<TasksStateTasksEntityRecognitionPiiTasksItem> piiTasksItems = tasksStateTasks.getEntityRecognitionPiiTasks(); final List<TasksStateTasksEntityRecognitionTasksItem> entityRecognitionTasksItems = tasksStateTasks.getEntityRecognitionTasks(); final List<TasksStateTasksKeyPhraseExtractionTasksItem> keyPhraseExtractionTasks = tasksStateTasks.getKeyPhraseExtractionTasks(); final List<TasksStateTasksEntityLinkingTasksItem> linkedEntityRecognitionTasksItems = tasksStateTasks.getEntityLinkingTasks(); final List<TasksStateTasksSentimentAnalysisTasksItem> sentimentAnalysisTasksItems = tasksStateTasks.getSentimentAnalysisTasks(); List<RecognizeEntitiesActionResult> recognizeEntitiesActionResults = new ArrayList<>(); List<RecognizePiiEntitiesActionResult> recognizePiiEntitiesActionResults = new ArrayList<>(); List<ExtractKeyPhrasesActionResult> extractKeyPhrasesActionResults = new ArrayList<>(); List<RecognizeLinkedEntitiesActionResult> recognizeLinkedEntitiesActionResults = new ArrayList<>(); List<AnalyzeSentimentActionResult> analyzeSentimentActionResults = new ArrayList<>(); List<ExtractSummaryActionResult> extractSummaryActionResults = new ArrayList<>(); List<RecognizeCustomEntitiesActionResult> recognizeCustomEntitiesActionResults = new ArrayList<>(); List<SingleCategoryClassifyActionResult> singleCategoryClassifyActionResults = new ArrayList<>(); List<MultiCategoryClassifyActionResult> multiCategoryClassifyActionResults = new ArrayList<>(); if (!CoreUtils.isNullOrEmpty(entityRecognitionTasksItems)) { for (int i = 0; i < entityRecognitionTasksItems.size(); i++) { final TasksStateTasksEntityRecognitionTasksItem taskItem = entityRecognitionTasksItems.get(i); final RecognizeEntitiesActionResult actionResult = new RecognizeEntitiesActionResult(); final EntitiesResult results = taskItem.getResults(); if (results != null) { RecognizeEntitiesActionResultPropertiesHelper.setDocumentsResults(actionResult, toRecognizeEntitiesResultCollectionResponse(results)); } TextAnalyticsActionResultPropertiesHelper.setActionName(actionResult, taskItem.getTaskName()); TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult, taskItem.getLastUpdateDateTime()); recognizeEntitiesActionResults.add(actionResult); } } if (!CoreUtils.isNullOrEmpty(piiTasksItems)) { for (int i = 0; i < piiTasksItems.size(); i++) { final TasksStateTasksEntityRecognitionPiiTasksItem taskItem = piiTasksItems.get(i); final RecognizePiiEntitiesActionResult actionResult = new RecognizePiiEntitiesActionResult(); final PiiResult results = taskItem.getResults(); if (results != null) { RecognizePiiEntitiesActionResultPropertiesHelper.setDocumentsResults(actionResult, toRecognizePiiEntitiesResultCollection(results)); } TextAnalyticsActionResultPropertiesHelper.setActionName(actionResult, taskItem.getTaskName()); TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult, taskItem.getLastUpdateDateTime()); recognizePiiEntitiesActionResults.add(actionResult); } } if (!CoreUtils.isNullOrEmpty(keyPhraseExtractionTasks)) { for (int i = 0; i < keyPhraseExtractionTasks.size(); i++) { final TasksStateTasksKeyPhraseExtractionTasksItem taskItem = keyPhraseExtractionTasks.get(i); final ExtractKeyPhrasesActionResult actionResult = new ExtractKeyPhrasesActionResult(); final KeyPhraseResult results = taskItem.getResults(); if (results != null) { ExtractKeyPhrasesActionResultPropertiesHelper.setDocumentsResults(actionResult, toExtractKeyPhrasesResultCollection(results)); } TextAnalyticsActionResultPropertiesHelper.setActionName(actionResult, taskItem.getTaskName()); TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult, taskItem.getLastUpdateDateTime()); extractKeyPhrasesActionResults.add(actionResult); } } if (!CoreUtils.isNullOrEmpty(linkedEntityRecognitionTasksItems)) { for (int i = 0; i < linkedEntityRecognitionTasksItems.size(); i++) { final TasksStateTasksEntityLinkingTasksItem taskItem = linkedEntityRecognitionTasksItems.get(i); final RecognizeLinkedEntitiesActionResult actionResult = new RecognizeLinkedEntitiesActionResult(); final EntityLinkingResult results = taskItem.getResults(); if (results != null) { RecognizeLinkedEntitiesActionResultPropertiesHelper.setDocumentsResults(actionResult, toRecognizeLinkedEntitiesResultCollection(results)); } TextAnalyticsActionResultPropertiesHelper.setActionName(actionResult, taskItem.getTaskName()); TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult, taskItem.getLastUpdateDateTime()); recognizeLinkedEntitiesActionResults.add(actionResult); } } if (!CoreUtils.isNullOrEmpty(sentimentAnalysisTasksItems)) { for (int i = 0; i < sentimentAnalysisTasksItems.size(); i++) { final TasksStateTasksSentimentAnalysisTasksItem taskItem = sentimentAnalysisTasksItems.get(i); final AnalyzeSentimentActionResult actionResult = new AnalyzeSentimentActionResult(); final SentimentResponse results = taskItem.getResults(); if (results != null) { AnalyzeSentimentActionResultPropertiesHelper.setDocumentsResults(actionResult, toAnalyzeSentimentResultCollection(results)); } TextAnalyticsActionResultPropertiesHelper.setActionName(actionResult, taskItem.getTaskName()); TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult, taskItem.getLastUpdateDateTime()); analyzeSentimentActionResults.add(actionResult); } } final List<TextAnalyticsError> errors = analyzeJobState.getErrors(); if (!CoreUtils.isNullOrEmpty(errors)) { for (TextAnalyticsError error : errors) { if (error != null) { final String[] targetPair = parseActionErrorTarget(error.getTarget(), error.getMessage()); final String taskName = targetPair[0]; final Integer taskIndex = Integer.valueOf(targetPair[1]); final TextAnalyticsActionResult actionResult; if (ENTITY_RECOGNITION_TASKS.equals(taskName)) { actionResult = recognizeEntitiesActionResults.get(taskIndex); } else if (ENTITY_RECOGNITION_PII_TASKS.equals(taskName)) { actionResult = recognizePiiEntitiesActionResults.get(taskIndex); } else if (KEY_PHRASE_EXTRACTION_TASKS.equals(taskName)) { actionResult = extractKeyPhrasesActionResults.get(taskIndex); } else if (ENTITY_LINKING_TASKS.equals(taskName)) { actionResult = recognizeLinkedEntitiesActionResults.get(taskIndex); } else if (SENTIMENT_ANALYSIS_TASKS.equals(taskName)) { actionResult = analyzeSentimentActionResults.get(taskIndex); } else if (EXTRACTIVE_SUMMARIZATION_TASKS.equals(taskName)) { actionResult = extractSummaryActionResults.get(taskIndex); } else if (CUSTOM_ENTITY_RECOGNITION_TASKS.equals(taskName)) { actionResult = recognizeCustomEntitiesActionResults.get(taskIndex); } else if (CUSTOM_SINGLE_CLASSIFICATION_TASKS.equals(taskName)) { actionResult = singleCategoryClassifyActionResults.get(taskIndex); } else if (CUSTOM_MULTI_CLASSIFICATION_TASKS.equals(taskName)) { actionResult = multiCategoryClassifyActionResults.get(taskIndex); } else { throw logger.logExceptionAsError(new RuntimeException( "Invalid task name in target reference, " + taskName)); } TextAnalyticsActionResultPropertiesHelper.setIsError(actionResult, true); TextAnalyticsActionResultPropertiesHelper.setError(actionResult, new com.azure.ai.textanalytics.models.TextAnalyticsError( TextAnalyticsErrorCode.fromString( error.getErrorCode() == null ? null : error.getErrorCode().toString()), error.getMessage(), null)); } } } final AnalyzeActionsResult analyzeActionsResult = new AnalyzeActionsResult(); AnalyzeActionsResultPropertiesHelper.setRecognizeEntitiesResults(analyzeActionsResult, IterableStream.of(recognizeEntitiesActionResults)); AnalyzeActionsResultPropertiesHelper.setRecognizePiiEntitiesResults(analyzeActionsResult, IterableStream.of(recognizePiiEntitiesActionResults)); AnalyzeActionsResultPropertiesHelper.setExtractKeyPhrasesResults(analyzeActionsResult, IterableStream.of(extractKeyPhrasesActionResults)); AnalyzeActionsResultPropertiesHelper.setRecognizeLinkedEntitiesResults(analyzeActionsResult, IterableStream.of(recognizeLinkedEntitiesActionResults)); AnalyzeActionsResultPropertiesHelper.setAnalyzeSentimentResults(analyzeActionsResult, IterableStream.of(analyzeSentimentActionResults)); AnalyzeActionsResultPropertiesHelper.setExtractSummaryResults(analyzeActionsResult, IterableStream.of(extractSummaryActionResults)); AnalyzeActionsResultPropertiesHelper.setRecognizeCustomEntitiesResults(analyzeActionsResult, IterableStream.of(recognizeCustomEntitiesActionResults)); AnalyzeActionsResultPropertiesHelper.setClassifySingleCategoryResults(analyzeActionsResult, IterableStream.of(singleCategoryClassifyActionResults)); AnalyzeActionsResultPropertiesHelper.setClassifyMultiCategoryResults(analyzeActionsResult, IterableStream.of(multiCategoryClassifyActionResults)); return analyzeActionsResult; } private Mono<PollResponse<AnalyzeActionsOperationDetail>> processAnalyzedModelResponse( Response<AnalyzeJobState> analyzeJobStateResponse, PollResponse<AnalyzeActionsOperationDetail> operationResultPollResponse) { LongRunningOperationStatus status = LongRunningOperationStatus.SUCCESSFULLY_COMPLETED; if (analyzeJobStateResponse.getValue() != null && analyzeJobStateResponse.getValue().getStatus() != null) { switch (analyzeJobStateResponse.getValue().getStatus()) { case NOT_STARTED: case RUNNING: status = LongRunningOperationStatus.IN_PROGRESS; break; case SUCCEEDED: status = LongRunningOperationStatus.SUCCESSFULLY_COMPLETED; break; case CANCELLED: status = LongRunningOperationStatus.USER_CANCELLED; break; default: status = LongRunningOperationStatus.fromString( analyzeJobStateResponse.getValue().getStatus().toString(), true); break; } } AnalyzeActionsOperationDetailPropertiesHelper.setDisplayName(operationResultPollResponse.getValue(), analyzeJobStateResponse.getValue().getDisplayName()); AnalyzeActionsOperationDetailPropertiesHelper.setCreatedAt(operationResultPollResponse.getValue(), analyzeJobStateResponse.getValue().getCreatedDateTime()); AnalyzeActionsOperationDetailPropertiesHelper.setExpiresAt(operationResultPollResponse.getValue(), analyzeJobStateResponse.getValue().getExpirationDateTime()); AnalyzeActionsOperationDetailPropertiesHelper.setLastModifiedAt(operationResultPollResponse.getValue(), analyzeJobStateResponse.getValue().getLastUpdateDateTime()); final TasksStateTasksOld tasksResult = analyzeJobStateResponse.getValue().getTasks(); AnalyzeActionsOperationDetailPropertiesHelper.setActionsFailed(operationResultPollResponse.getValue(), tasksResult.getFailed()); AnalyzeActionsOperationDetailPropertiesHelper.setActionsInProgress(operationResultPollResponse.getValue(), tasksResult.getInProgress()); AnalyzeActionsOperationDetailPropertiesHelper.setActionsSucceeded( operationResultPollResponse.getValue(), tasksResult.getCompleted()); AnalyzeActionsOperationDetailPropertiesHelper.setActionsInTotal(operationResultPollResponse.getValue(), tasksResult.getTotal()); return Mono.just(new PollResponse<>(status, operationResultPollResponse.getValue())); } private Mono<PollResponse<AnalyzeActionsOperationDetail>> processAnalyzedModelResponseLanguageApi( Response<AnalyzeTextJobState> analyzeJobStateResponse, PollResponse<AnalyzeActionsOperationDetail> operationResultPollResponse) { LongRunningOperationStatus status = LongRunningOperationStatus.SUCCESSFULLY_COMPLETED; if (analyzeJobStateResponse.getValue() != null && analyzeJobStateResponse.getValue().getStatus() != null) { switch (analyzeJobStateResponse.getValue().getStatus()) { case NOT_STARTED: case RUNNING: status = LongRunningOperationStatus.IN_PROGRESS; break; case SUCCEEDED: status = LongRunningOperationStatus.SUCCESSFULLY_COMPLETED; break; case CANCELLED: status = LongRunningOperationStatus.USER_CANCELLED; break; case PARTIALLY_SUCCEEDED: status = LongRunningOperationStatus.fromString("partiallySucceeded", true); break; default: status = LongRunningOperationStatus.fromString( analyzeJobStateResponse.getValue().getStatus().toString(), true); break; } } AnalyzeActionsOperationDetailPropertiesHelper.setDisplayName(operationResultPollResponse.getValue(), analyzeJobStateResponse.getValue().getDisplayName()); AnalyzeActionsOperationDetailPropertiesHelper.setCreatedAt(operationResultPollResponse.getValue(), analyzeJobStateResponse.getValue().getCreatedDateTime()); AnalyzeActionsOperationDetailPropertiesHelper.setExpiresAt(operationResultPollResponse.getValue(), analyzeJobStateResponse.getValue().getExpirationDateTime()); AnalyzeActionsOperationDetailPropertiesHelper.setLastModifiedAt(operationResultPollResponse.getValue(), analyzeJobStateResponse.getValue().getLastUpdateDateTime()); final TasksStateTasks tasksResult = analyzeJobStateResponse.getValue().getTasks(); AnalyzeActionsOperationDetailPropertiesHelper.setActionsFailed(operationResultPollResponse.getValue(), tasksResult.getFailed()); AnalyzeActionsOperationDetailPropertiesHelper.setActionsInProgress(operationResultPollResponse.getValue(), tasksResult.getInProgress()); AnalyzeActionsOperationDetailPropertiesHelper.setActionsSucceeded( operationResultPollResponse.getValue(), tasksResult.getCompleted()); AnalyzeActionsOperationDetailPropertiesHelper.setActionsInTotal(operationResultPollResponse.getValue(), tasksResult.getTotal()); return Mono.just(new PollResponse<>(status, operationResultPollResponse.getValue())); } private Context getNotNullContext(Context context) { return context == null ? Context.NONE : context; } private AnalyzeActionsOptions getNotNullAnalyzeActionsOptions(AnalyzeActionsOptions options) { return options == null ? new AnalyzeActionsOptions() : options; } private String[] parseActionErrorTarget(String targetReference, String errorMessage) { if (CoreUtils.isNullOrEmpty(targetReference)) { if (CoreUtils.isNullOrEmpty(errorMessage)) { errorMessage = "Expected an error with a target field referencing an action but did not get one"; } throw logger.logExceptionAsError(new RuntimeException(errorMessage)); } final Matcher matcher = PATTERN.matcher(targetReference); String[] taskNameIdPair = new String[2]; while (matcher.find()) { taskNameIdPair[0] = matcher.group(1); taskNameIdPair[1] = matcher.group(2); } return taskNameIdPair; } }
should the version have preview appended to it?
private boolean isConsolidatedServiceVersion(TextAnalyticsServiceVersion serviceVersion) { if (serviceVersion == null) { serviceVersion = TextAnalyticsServiceVersion.V2022_04_01; } return !(TextAnalyticsServiceVersion.V3_0 == serviceVersion || TextAnalyticsServiceVersion.V3_1 == serviceVersion); }
serviceVersion = TextAnalyticsServiceVersion.V2022_04_01;
private boolean isConsolidatedServiceVersion(TextAnalyticsServiceVersion serviceVersion) { if (serviceVersion == null) { serviceVersion = TextAnalyticsServiceVersion.V2022_04_01_PREVIEW; } return !(TextAnalyticsServiceVersion.V3_0 == serviceVersion || TextAnalyticsServiceVersion.V3_1 == serviceVersion); }
class that implements this HttpTrait * interface. * * <p><strong>Note:</strong> It is important to understand the precedence order of the HttpTrait APIs. In * particular, if a {@link HttpPipeline}
class that implements this HttpTrait * interface. * * <p><strong>Note:</strong> It is important to understand the precedence order of the HttpTrait APIs. In * particular, if a {@link HttpPipeline}
We should generally avoid the use of [magic number ](https://stackoverflow.com/questions/47882/what-is-a-magic-number-and-why-is-it-bad) and replace it with what it stands for.
public static String parseOperationId(String operationLocation) { if (!CoreUtils.isNullOrEmpty(operationLocation)) { int lastIndex = operationLocation.lastIndexOf('/'); if (lastIndex != -1) { return operationLocation.substring(lastIndex + 1, lastIndex + 37); } } throw LOGGER.logExceptionAsError( new RuntimeException("Failed to parse operation header for operation Id from: " + operationLocation)); }
return operationLocation.substring(lastIndex + 1, lastIndex + 37);
public static String parseOperationId(String operationLocation) { if (!CoreUtils.isNullOrEmpty(operationLocation)) { final int indexBeforeOperationId = operationLocation.lastIndexOf('/'); if (indexBeforeOperationId != -1) { return operationLocation.substring(indexBeforeOperationId + 1, indexBeforeOperationId + OPERATION_ID_LENGTH); } } throw LOGGER.logExceptionAsError( new RuntimeException("Failed to parse operation header for operation Id from: " + operationLocation)); }
class Utility { public static final Duration DEFAULT_POLL_INTERVAL = Duration.ofSeconds(30); private static final ClientLogger LOGGER = new ClientLogger(Utility.class); private static final int NEUTRAL_SCORE_ZERO = 0; private static final String DOCUMENT_SENTENCES_ASSESSMENTS_REG_EXP = " private static final Pattern PATTERN; static { PATTERN = Pattern.compile(DOCUMENT_SENTENCES_ASSESSMENTS_REG_EXP); } private Utility() { } /** * Verify that list of documents are not null or empty. Otherwise, throw exception. * * @param documents A list of documents. * * @throws NullPointerException if {@code documents} is null. * @throws IllegalArgumentException if {@code documents} is empty. */ public static void inputDocumentsValidation(Iterable<?> documents) { Objects.requireNonNull(documents, "'documents' cannot be null."); final Iterator<?> iterator = documents.iterator(); if (!iterator.hasNext()) { throw new IllegalArgumentException("'documents' cannot be empty."); } } /** * Mapping a {@link ErrorResponseException} to {@link HttpResponseException} if exist. Otherwise, return * original {@link Throwable}. * * @param throwable A {@link Throwable}. * @return A {@link HttpResponseException} or the original throwable type. */ public static Throwable mapToHttpResponseExceptionIfExists(Throwable throwable) { if (throwable instanceof ErrorResponseException) { ErrorResponseException errorException = (ErrorResponseException) throwable; final ErrorResponse errorResponse = errorException.getValue(); com.azure.ai.textanalytics.models.TextAnalyticsError textAnalyticsError = null; if (errorResponse != null && errorResponse.getError() != null) { textAnalyticsError = toTextAnalyticsError(errorResponse.getError()); } return new HttpResponseException(errorException.getMessage(), errorException.getResponse(), textAnalyticsError); } return throwable; } /** * Given a list of documents will apply the indexing function to it and return the updated list. * * @param documents the inputs to apply the mapping function to. * @param mappingFunction the function which applies the index to the incoming input value. * @param <T> the type of items being returned in the list. * @return The list holding all the generic items combined. */ public static <T> List<T> mapByIndex(Iterable<String> documents, BiFunction<String, String, T> mappingFunction) { Objects.requireNonNull(documents, "'documents' cannot be null."); AtomicInteger i = new AtomicInteger(0); List<T> result = new ArrayList<>(); documents.forEach(document -> result.add(mappingFunction.apply(String.valueOf(i.getAndIncrement()), document)) ); return result; } /** * Convert {@link DocumentStatistics} to {@link TextDocumentStatistics} * * @param statistics the {@link DocumentStatistics} provided by the service. * @return the {@link TextDocumentStatistics} returned by the SDK. */ public static TextDocumentStatistics toTextDocumentStatistics(DocumentStatistics statistics) { return new TextDocumentStatistics(statistics.getCharactersCount(), statistics.getTransactionsCount()); } /** * Convert {@link RequestStatistics} to {@link TextDocumentBatchStatistics} * * @param statistics the {@link RequestStatistics} provided by the service. * @return the {@link TextDocumentBatchStatistics} returned by the SDK. */ public static TextDocumentBatchStatistics toBatchStatistics(RequestStatistics statistics) { return new TextDocumentBatchStatistics(statistics.getDocumentsCount(), statistics.getValidDocumentsCount(), statistics.getErroneousDocumentsCount(), statistics.getTransactionsCount()); } /** * Convert {@link Error} to {@link com.azure.ai.textanalytics.models.TextAnalyticsError} * This function maps the service returned {@link Error inner error} to the top level * {@link com.azure.ai.textanalytics.models.TextAnalyticsError error}, if inner error present. * * @param error the {@link Error} returned by the service. * @return the {@link com.azure.ai.textanalytics.models.TextAnalyticsError} returned by the SDK. */ public static TextAnalyticsError toTextAnalyticsError(Error error) { final InnerErrorModel innerError = error.getInnererror(); if (innerError == null) { final ErrorCode errorCode = error.getCode(); return new com.azure.ai.textanalytics.models.TextAnalyticsError( TextAnalyticsErrorCode.fromString(errorCode == null ? null : errorCode.toString()), error.getMessage(), error.getTarget()); } final InnerErrorCode innerErrorCodeValue = innerError.getCode(); return new com.azure.ai.textanalytics.models.TextAnalyticsError( TextAnalyticsErrorCode.fromString(innerErrorCodeValue == null ? null : innerErrorCodeValue.toString()), innerError.getMessage(), innerError.getTarget()); } public static TextAnalyticsWarning toTextAnalyticsWarning( DocumentWarning warning) { final WarningCodeValue warningCodeValue = warning.getCode(); return new TextAnalyticsWarning( WarningCode.fromString(warningCodeValue == null ? null : warningCodeValue.toString()), warning.getMessage()); } /** * Convert the incoming input {@link TextDocumentInput} to the service expected {@link MultiLanguageInput}. * * @param documents the user provided input in {@link TextDocumentInput} * @return the service required input {@link MultiLanguageInput} */ public static List<MultiLanguageInput> toMultiLanguageInput(Iterable<TextDocumentInput> documents) { List<MultiLanguageInput> multiLanguageInputs = new ArrayList<>(); for (TextDocumentInput textDocumentInput : documents) { multiLanguageInputs.add(new MultiLanguageInput().setId(textDocumentInput.getId()) .setText(textDocumentInput.getText()).setLanguage(textDocumentInput.getLanguage())); } return multiLanguageInputs; } /** * Convert the incoming input {@link com.azure.ai.textanalytics.models.TextAnalyticsError} * to a {@link TextAnalyticsException}. * * @param error the {@link com.azure.ai.textanalytics.models.TextAnalyticsError}. * @return the {@link TextAnalyticsException} to be thrown. */ public static TextAnalyticsException toTextAnalyticsException( com.azure.ai.textanalytics.models.TextAnalyticsError error) { return new TextAnalyticsException(error.getMessage(), error.getErrorCode(), error.getTarget()); } /** * Convert to a list of {@link LanguageInput} from {@link DetectLanguageInput}. * * @param documents The list of documents to detect languages for. * * @return a list of {@link LanguageInput}. */ public static List<LanguageInput> toLanguageInput(Iterable<DetectLanguageInput> documents) { final List<LanguageInput> multiLanguageInputs = new ArrayList<>(); documents.forEach(textDocumentInput -> multiLanguageInputs.add(new LanguageInput() .setId(textDocumentInput.getId()) .setText(textDocumentInput.getText()) .setCountryHint(textDocumentInput.getCountryHint()))); return multiLanguageInputs; } /** * Extracts the operation ID from the 'operation-location' URL. An example of 'operation-location' is * https: * * @param operationLocation The URL specified in the 'Operation-Location' response header containing the * operation ID used to track the progress and obtain the ID of the analyze operation. * * @return The operation ID that tracks the long running operation progress. */ /** * Extract the next pagination link which contains the request parameter values, into map, * such as '$skip=20' and '$top=2'. * * @param nextLink the next pagination link. * * @return A map that holds the request parameter value of next pagination link. */ public static Map<String, Object> parseNextLink(String nextLink) { if (!CoreUtils.isNullOrEmpty(nextLink)) { final Map<String, Object> parameterMap = new HashMap<>(); final String[] strings = nextLink.split("\\?", 2); final String[] parameters = strings[1].split("&"); for (String parameter : parameters) { final String[] parameterPair = parameter.split("="); final String key = parameterPair[0]; final String value = parameterPair[1]; if ("showStats".equals(key)) { parameterMap.put(key, value); } else if ("$skip".equals(key) || "$top".equals(key)) { parameterMap.put(key, Integer.valueOf(value)); } } return parameterMap; } return new HashMap<>(); } public static Response<AnalyzeSentimentResultCollection> toAnalyzeSentimentResultCollectionResponse( Response<SentimentResponse> response) { return new SimpleResponse<>(response, toAnalyzeSentimentResultCollection(response.getValue())); } public static Response<AnalyzeSentimentResultCollection> toAnalyzeSentimentResultCollectionResponse2( Response<AnalyzeTextTaskResult> response) { return new SimpleResponse<>(response, toAnalyzeSentimentResultCollection(((SentimentTaskResult) response.getValue()).getResults())); } public static Response<DetectLanguageResultCollection> toDetectLanguageResultCollectionResponse( Response<LanguageResult> response) { final LanguageResult languageResult = response.getValue(); final List<DetectLanguageResult> detectLanguageResults = new ArrayList<>(); for (DocumentLanguage documentLanguage : languageResult.getDocuments()) { com.azure.ai.textanalytics.implementation.models.DetectedLanguage detectedLanguage = documentLanguage.getDetectedLanguage(); final List<TextAnalyticsWarning> warnings = documentLanguage.getWarnings().stream() .map(warning -> toTextAnalyticsWarning(warning)) .collect(Collectors.toList()); detectLanguageResults.add(new DetectLanguageResult( documentLanguage.getId(), documentLanguage.getStatistics() == null ? null : toTextDocumentStatistics(documentLanguage.getStatistics()), null, new DetectedLanguage(detectedLanguage.getName(), detectedLanguage.getIso6391Name(), detectedLanguage.getConfidenceScore(), new IterableStream<>(warnings)))); } for (DocumentError documentError : languageResult.getErrors()) { detectLanguageResults.add(new DetectLanguageResult(documentError.getId(), null, toTextAnalyticsError(documentError.getError()), null)); } return new SimpleResponse<>(response, new DetectLanguageResultCollection(detectLanguageResults, languageResult.getModelVersion(), languageResult.getStatistics() == null ? null : toBatchStatistics(languageResult.getStatistics()))); } public static Response<DetectLanguageResultCollection> toDetectLanguageResultCollectionResponse2( Response<AnalyzeTextTaskResult> response) { final LanguageDetectionResult languageResult = ((LanguageDetectionTaskResult) response.getValue()).getResults(); final List<DetectLanguageResult> detectLanguageResults = new ArrayList<>(); for (LanguageDetectionDocumentResult documentLanguage : languageResult.getDocuments()) { com.azure.ai.textanalytics.implementation.models.DetectedLanguage detectedLanguage = documentLanguage.getDetectedLanguage(); final List<TextAnalyticsWarning> warnings = documentLanguage.getWarnings() .stream() .map(warning -> toTextAnalyticsWarning(warning)) .collect(Collectors.toList()); detectLanguageResults.add(new DetectLanguageResult( documentLanguage.getId(), documentLanguage.getStatistics() == null ? null : toTextDocumentStatistics(documentLanguage.getStatistics()), null, new DetectedLanguage(detectedLanguage.getName(), detectedLanguage.getIso6391Name(), detectedLanguage.getConfidenceScore(), new IterableStream<>(warnings) ))); } for (DocumentError documentError : languageResult.getErrors()) { detectLanguageResults.add(new DetectLanguageResult(documentError.getId(), null, toTextAnalyticsError(documentError.getError()), null)); } return new SimpleResponse<>(response, new DetectLanguageResultCollection(detectLanguageResults, languageResult.getModelVersion(), languageResult.getStatistics() == null ? null : toBatchStatistics(languageResult.getStatistics()))); } public static Response<ExtractKeyPhrasesResultCollection> toExtractKeyPhrasesResultCollectionResponse( final Response<KeyPhraseResult> response) { final KeyPhraseResult keyPhraseResult = response.getValue(); final List<ExtractKeyPhraseResult> keyPhraseResultList = new ArrayList<>(); for (KeyPhraseResultDocumentsItem documentKeyPhrases : keyPhraseResult.getDocuments()) { final String documentId = documentKeyPhrases.getId(); keyPhraseResultList.add(new ExtractKeyPhraseResult( documentId, documentKeyPhrases.getStatistics() == null ? null : toTextDocumentStatistics(documentKeyPhrases.getStatistics()), null, new KeyPhrasesCollection( new IterableStream<>(documentKeyPhrases.getKeyPhrases()), new IterableStream<>(documentKeyPhrases.getWarnings().stream().map( warning -> toTextAnalyticsWarning(warning)).collect(Collectors.toList()))))); } for (DocumentError documentError : keyPhraseResult.getErrors()) { keyPhraseResultList.add(new ExtractKeyPhraseResult(documentError.getId(), null, toTextAnalyticsError(documentError.getError()), null)); } return new SimpleResponse<>(response, new ExtractKeyPhrasesResultCollection(keyPhraseResultList, keyPhraseResult.getModelVersion(), keyPhraseResult.getStatistics() == null ? null : toBatchStatistics(keyPhraseResult.getStatistics()))); } public static Response<ExtractKeyPhrasesResultCollection> toExtractKeyPhrasesResultCollectionResponse2( final Response<AnalyzeTextTaskResult> response) { final KeyPhraseResult keyPhraseResult = ((KeyPhraseTaskResult) response.getValue()).getResults(); final List<ExtractKeyPhraseResult> keyPhraseResultList = new ArrayList<>(); for (KeyPhraseResultDocumentsItem documentKeyPhrases : keyPhraseResult.getDocuments()) { final String documentId = documentKeyPhrases.getId(); keyPhraseResultList.add(new ExtractKeyPhraseResult( documentId, documentKeyPhrases.getStatistics() == null ? null : toTextDocumentStatistics(documentKeyPhrases.getStatistics()), null, new KeyPhrasesCollection( new IterableStream<>(documentKeyPhrases.getKeyPhrases()), new IterableStream<>(documentKeyPhrases.getWarnings().stream().map( warning -> toTextAnalyticsWarning(warning)).collect(Collectors.toList()))))); } for (DocumentError documentError : keyPhraseResult.getErrors()) { keyPhraseResultList.add(new ExtractKeyPhraseResult(documentError.getId(), null, toTextAnalyticsError(documentError.getError()), null)); } return new SimpleResponse<>(response, new ExtractKeyPhrasesResultCollection(keyPhraseResultList, keyPhraseResult.getModelVersion(), keyPhraseResult.getStatistics() == null ? null : toBatchStatistics(keyPhraseResult.getStatistics()))); } public static RecognizeEntitiesResultCollection toRecognizeEntitiesResultCollectionResponse( final EntitiesResult entitiesResult) { List<RecognizeEntitiesResult> recognizeEntitiesResults = new ArrayList<>(); entitiesResult.getDocuments().forEach(documentEntities -> recognizeEntitiesResults.add(toRecognizeEntitiesResult(documentEntities))); for (DocumentError documentError : entitiesResult.getErrors()) { recognizeEntitiesResults.add(new RecognizeEntitiesResult(documentError.getId(), null, toTextAnalyticsError(documentError.getError()), null)); } return new RecognizeEntitiesResultCollection(recognizeEntitiesResults, entitiesResult.getModelVersion(), entitiesResult.getStatistics() == null ? null : toBatchStatistics(entitiesResult.getStatistics())); } public static Response<RecognizeEntitiesResultCollection> toRecognizeEntitiesResultCollection( final Response<EntitiesResult> response) { EntitiesResult entitiesResult = response.getValue(); return new SimpleResponse<>(response, new RecognizeEntitiesResultCollection( toRecognizeEntitiesResults(entitiesResult), entitiesResult.getModelVersion(), entitiesResult.getStatistics() == null ? null : toBatchStatistics(entitiesResult.getStatistics()))); } public static Response<RecognizeEntitiesResultCollection> toRecognizeEntitiesResultCollection2( final Response<AnalyzeTextTaskResult> response) { EntitiesTaskResult entitiesTaskResult = (EntitiesTaskResult) response.getValue(); final EntitiesResult results = entitiesTaskResult.getResults(); return new SimpleResponse<>(response, new RecognizeEntitiesResultCollection( toRecognizeEntitiesResults(results), results.getModelVersion(), results.getStatistics() == null ? null : toBatchStatistics(results.getStatistics()))); } public static List<RecognizeEntitiesResult> toRecognizeEntitiesResults(EntitiesResult results) { List<RecognizeEntitiesResult> recognizeEntitiesResults = new ArrayList<>(); results.getDocuments().forEach( documentEntities -> recognizeEntitiesResults.add(new RecognizeEntitiesResult( documentEntities.getId(), documentEntities.getStatistics() == null ? null : toTextDocumentStatistics(documentEntities.getStatistics()), null, new CategorizedEntityCollection( new IterableStream<>(documentEntities.getEntities().stream().map(entity -> { final CategorizedEntity categorizedEntity = new CategorizedEntity(entity.getText(), EntityCategory.fromString(entity.getCategory()), entity.getSubcategory(), entity.getConfidenceScore()); CategorizedEntityPropertiesHelper.setLength(categorizedEntity, entity.getLength()); CategorizedEntityPropertiesHelper.setOffset(categorizedEntity, entity.getOffset()); return categorizedEntity; }).collect(Collectors.toList())), new IterableStream<>( documentEntities.getWarnings().stream() .map(warning -> toTextAnalyticsWarning(warning)).collect(Collectors.toList())))))); for (DocumentError documentError : results.getErrors()) { recognizeEntitiesResults.add(new RecognizeEntitiesResult(documentError.getId(), null, toTextAnalyticsError(documentError.getError()), null)); } return recognizeEntitiesResults; } public static RecognizeEntitiesResult toRecognizeEntitiesResult(EntitiesResultDocumentsItem documentEntities) { return new RecognizeEntitiesResult( documentEntities.getId(), documentEntities.getStatistics() == null ? null : toTextDocumentStatistics(documentEntities.getStatistics()), null, new CategorizedEntityCollection( new IterableStream<>(documentEntities.getEntities().stream().map(entity -> { final CategorizedEntity categorizedEntity = new CategorizedEntity(entity.getText(), EntityCategory.fromString(entity.getCategory()), entity.getSubcategory(), entity.getConfidenceScore()); CategorizedEntityPropertiesHelper.setLength(categorizedEntity, entity.getLength()); CategorizedEntityPropertiesHelper.setOffset(categorizedEntity, entity.getOffset()); return categorizedEntity; }).collect(Collectors.toList())), new IterableStream<>(documentEntities.getWarnings().stream().map( warning -> toTextAnalyticsWarning(warning)).collect(Collectors.toList())))); } public static RecognizeEntitiesResult toRecognizeEntitiesResult(CustomEntitiesResultDocumentsItem documentEntities) { return new RecognizeEntitiesResult( documentEntities.getId(), documentEntities.getStatistics() == null ? null : toTextDocumentStatistics(documentEntities.getStatistics()), null, new CategorizedEntityCollection( new IterableStream<>(documentEntities.getEntities().stream().map(entity -> { final CategorizedEntity categorizedEntity = new CategorizedEntity(entity.getText(), EntityCategory.fromString(entity.getCategory()), entity.getSubcategory(), entity.getConfidenceScore()); CategorizedEntityPropertiesHelper.setLength(categorizedEntity, entity.getLength()); CategorizedEntityPropertiesHelper.setOffset(categorizedEntity, entity.getOffset()); return categorizedEntity; }).collect(Collectors.toList())), new IterableStream<>(documentEntities.getWarnings().stream().map( warning -> toTextAnalyticsWarning(warning)).collect(Collectors.toList())))); } public static Response<RecognizePiiEntitiesResultCollection> toRecognizePiiEntitiesResultCollectionResponse( final Response<PiiResult> response) { final PiiResult piiEntitiesResult = response.getValue(); return new SimpleResponse<>(response, new RecognizePiiEntitiesResultCollection( toRecognizePiiEntitiesResults(piiEntitiesResult), piiEntitiesResult.getModelVersion(), piiEntitiesResult.getStatistics() == null ? null : toBatchStatistics(piiEntitiesResult.getStatistics()) )); } public static Response<RecognizePiiEntitiesResultCollection> toRecognizePiiEntitiesResultCollectionResponse2( final Response<AnalyzeTextTaskResult> response) { final PiiResult piiEntitiesResult = ((PiiTaskResult) response.getValue()).getResults(); return new SimpleResponse<>(response, new RecognizePiiEntitiesResultCollection( toRecognizePiiEntitiesResults(piiEntitiesResult), piiEntitiesResult.getModelVersion(), piiEntitiesResult.getStatistics() == null ? null : toBatchStatistics(piiEntitiesResult.getStatistics()) )); } public static List<RecognizePiiEntitiesResult> toRecognizePiiEntitiesResults(PiiResult piiEntitiesResult) { final List<RecognizePiiEntitiesResult> recognizeEntitiesResults = new ArrayList<>(); piiEntitiesResult.getDocuments().forEach(documentEntities -> { final List<PiiEntity> piiEntities = documentEntities.getEntities().stream().map( entity -> { final PiiEntity piiEntity = new PiiEntity(); PiiEntityPropertiesHelper.setText(piiEntity, entity.getText()); PiiEntityPropertiesHelper.setCategory(piiEntity, PiiEntityCategory.fromString(entity.getCategory())); PiiEntityPropertiesHelper.setSubcategory(piiEntity, entity.getSubcategory()); PiiEntityPropertiesHelper.setConfidenceScore(piiEntity, entity.getConfidenceScore()); PiiEntityPropertiesHelper.setOffset(piiEntity, entity.getOffset()); PiiEntityPropertiesHelper.setLength(piiEntity, entity.getLength()); return piiEntity; }) .collect(Collectors.toList()); final List<TextAnalyticsWarning> warnings = documentEntities.getWarnings().stream().map( warning -> toTextAnalyticsWarning(warning)).collect(Collectors.toList()); recognizeEntitiesResults.add(new RecognizePiiEntitiesResult( documentEntities.getId(), documentEntities.getStatistics() == null ? null : toTextDocumentStatistics(documentEntities.getStatistics()), null, new PiiEntityCollection(new IterableStream<>(piiEntities), documentEntities.getRedactedText(), new IterableStream<>(warnings)) )); }); for (DocumentError documentError : piiEntitiesResult.getErrors()) { recognizeEntitiesResults.add(new RecognizePiiEntitiesResult(documentError.getId(), null, toTextAnalyticsError(documentError.getError()), null)); } return recognizeEntitiesResults; } public static RecognizeEntitiesResult toRecognizeEntitiesResult(DocumentEntities documentEntities) { return new RecognizeEntitiesResult( documentEntities.getId(), documentEntities.getStatistics() == null ? null : toTextDocumentStatistics(documentEntities.getStatistics()), null, new CategorizedEntityCollection( new IterableStream<>(documentEntities.getEntities().stream().map(entity -> { final CategorizedEntity categorizedEntity = new CategorizedEntity(entity.getText(), EntityCategory.fromString(entity.getCategory()), entity.getSubcategory(), entity.getConfidenceScore()); CategorizedEntityPropertiesHelper.setLength(categorizedEntity, entity.getLength()); CategorizedEntityPropertiesHelper.setOffset(categorizedEntity, entity.getOffset()); return categorizedEntity; }).collect(Collectors.toList())), new IterableStream<>(documentEntities.getWarnings().stream().map( warning -> toTextAnalyticsWarning(warning)).collect(Collectors.toList())))); } public static RecognizePiiEntitiesResultCollection toRecognizePiiEntitiesResultCollection( final PiiResult piiEntitiesResult) { final List<RecognizePiiEntitiesResult> recognizeEntitiesResults = new ArrayList<>(); piiEntitiesResult.getDocuments().forEach(documentEntities -> { final List<PiiEntity> piiEntities = documentEntities.getEntities().stream().map(entity -> { final PiiEntity piiEntity = new PiiEntity(); PiiEntityPropertiesHelper.setText(piiEntity, entity.getText()); PiiEntityPropertiesHelper.setCategory(piiEntity, PiiEntityCategory.fromString(entity.getCategory())); PiiEntityPropertiesHelper.setSubcategory(piiEntity, entity.getSubcategory()); PiiEntityPropertiesHelper.setConfidenceScore(piiEntity, entity.getConfidenceScore()); PiiEntityPropertiesHelper.setOffset(piiEntity, entity.getOffset()); return piiEntity; }).collect(Collectors.toList()); final List<TextAnalyticsWarning> warnings = documentEntities.getWarnings().stream().map( warning -> toTextAnalyticsWarning(warning)).collect(Collectors.toList()); recognizeEntitiesResults.add(new RecognizePiiEntitiesResult( documentEntities.getId(), documentEntities.getStatistics() == null ? null : toTextDocumentStatistics(documentEntities.getStatistics()), null, new PiiEntityCollection(new IterableStream<>(piiEntities), documentEntities.getRedactedText(), new IterableStream<>(warnings)) )); }); for (DocumentError documentError : piiEntitiesResult.getErrors()) { recognizeEntitiesResults.add(new RecognizePiiEntitiesResult(documentError.getId(), null, toTextAnalyticsError(documentError.getError()), null)); } return new RecognizePiiEntitiesResultCollection(recognizeEntitiesResults, piiEntitiesResult.getModelVersion(), piiEntitiesResult.getStatistics() == null ? null : toBatchStatistics(piiEntitiesResult.getStatistics())); } public static ExtractKeyPhrasesResultCollection toExtractKeyPhrasesResultCollection( final KeyPhraseResult keyPhraseResult) { final List<ExtractKeyPhraseResult> keyPhraseResultList = new ArrayList<>(); for (KeyPhraseResultDocumentsItem documentKeyPhrases : keyPhraseResult.getDocuments()) { final String documentId = documentKeyPhrases.getId(); keyPhraseResultList.add(new ExtractKeyPhraseResult( documentId, documentKeyPhrases.getStatistics() == null ? null : toTextDocumentStatistics(documentKeyPhrases.getStatistics()), null, new KeyPhrasesCollection( new IterableStream<>(documentKeyPhrases.getKeyPhrases()), new IterableStream<>(documentKeyPhrases.getWarnings().stream().map( warning -> toTextAnalyticsWarning(warning)).collect(Collectors.toList()))))); } for (DocumentError documentError : keyPhraseResult.getErrors()) { keyPhraseResultList.add(new ExtractKeyPhraseResult(documentError.getId(), null, toTextAnalyticsError(documentError.getError()), null)); } return new ExtractKeyPhrasesResultCollection(keyPhraseResultList, keyPhraseResult.getModelVersion(), keyPhraseResult.getStatistics() == null ? null : toBatchStatistics(keyPhraseResult.getStatistics())); } public static Response<RecognizeLinkedEntitiesResultCollection> toRecognizeLinkedEntitiesResultCollectionResponse( final Response<EntityLinkingResult> response) { final EntityLinkingResult entityLinkingResult = response.getValue(); return new SimpleResponse<>(response, new RecognizeLinkedEntitiesResultCollection(toRecognizeLinkedEntitiesResultCollection(entityLinkingResult), entityLinkingResult.getModelVersion(), entityLinkingResult.getStatistics() == null ? null : toBatchStatistics(entityLinkingResult.getStatistics()))); } public static Response<RecognizeLinkedEntitiesResultCollection> toRecognizeLinkedEntitiesResultCollection( final Response<AnalyzeTextTaskResult> response) { final EntityLinkingResult entityLinkingResult = ((EntityLinkingTaskResult) response.getValue()).getResults(); return new SimpleResponse<>(response, new RecognizeLinkedEntitiesResultCollection(toRecognizeLinkedEntitiesResultCollection(entityLinkingResult), entityLinkingResult.getModelVersion(), entityLinkingResult.getStatistics() == null ? null : toBatchStatistics(entityLinkingResult.getStatistics()))); } public static RecognizeLinkedEntitiesResultCollection toRecognizeLinkedEntitiesResultCollection( final EntityLinkingResult entityLinkingResult) { final List<RecognizeLinkedEntitiesResult> linkedEntitiesResults = entityLinkingResult.getDocuments().stream().map( documentLinkedEntities -> new RecognizeLinkedEntitiesResult( documentLinkedEntities.getId(), documentLinkedEntities.getStatistics() == null ? null : toTextDocumentStatistics(documentLinkedEntities.getStatistics()), null, new LinkedEntityCollection(new IterableStream<>( documentLinkedEntities.getEntities().stream().map( linkedEntity -> { final LinkedEntity entity = new LinkedEntity( linkedEntity.getName(), new IterableStream<>( linkedEntity.getMatches().stream().map( match -> { final LinkedEntityMatch linkedEntityMatch = new LinkedEntityMatch( match.getText(), match.getConfidenceScore()); LinkedEntityMatchPropertiesHelper.setOffset(linkedEntityMatch, match.getOffset()); LinkedEntityMatchPropertiesHelper.setLength(linkedEntityMatch, match.getLength()); return linkedEntityMatch; }).collect(Collectors.toList())), linkedEntity.getLanguage(), linkedEntity.getId(), linkedEntity.getUrl(), linkedEntity.getDataSource()); LinkedEntityPropertiesHelper.setBingEntitySearchApiId(entity, linkedEntity.getBingId()); return entity; }).collect(Collectors.toList())), new IterableStream<>(documentLinkedEntities.getWarnings().stream().map( warning -> toTextAnalyticsWarning(warning)).collect(Collectors.toList())))) ).collect(Collectors.toList()); for (DocumentError documentError : entityLinkingResult.getErrors()) { linkedEntitiesResults.add(new RecognizeLinkedEntitiesResult(documentError.getId(), null, toTextAnalyticsError(documentError.getError()), null)); } return new RecognizeLinkedEntitiesResultCollection(linkedEntitiesResults, entityLinkingResult.getModelVersion(), entityLinkingResult.getStatistics() == null ? null : toBatchStatistics(entityLinkingResult.getStatistics())); } /** * Helper method to convert {@link SentimentResponse} to {@link AnalyzeSentimentResultCollection}. * * @param sentimentResponse The {@link SentimentResponse}. * * @return A {@link AnalyzeSentimentResultCollection}. */ public static AnalyzeSentimentResultCollection toAnalyzeSentimentResultCollection( SentimentResponse sentimentResponse) { final List<AnalyzeSentimentResult> analyzeSentimentResults = new ArrayList<>(); final List<SentimentResponseDocumentsItem> documentSentiments = sentimentResponse.getDocuments(); for (SentimentResponseDocumentsItem documentSentiment : documentSentiments) { analyzeSentimentResults.add(toAnalyzeSentimentResult(documentSentiment, documentSentiments)); } for (DocumentError documentError : sentimentResponse.getErrors()) { analyzeSentimentResults.add(new AnalyzeSentimentResult(documentError.getId(), null, toTextAnalyticsError(documentError.getError()), null)); } return new AnalyzeSentimentResultCollection(analyzeSentimentResults, sentimentResponse.getModelVersion(), sentimentResponse.getStatistics() == null ? null : toBatchStatistics(sentimentResponse.getStatistics())); } /** * Helper method to convert {@link ExtractiveSummarizationResult} to {@link ExtractSummaryResultCollection}. * * @param extractiveSummarizationResult The {@link ExtractiveSummarizationResult}. * * @return A {@link ExtractSummaryResultCollection}. */ public static ExtractSummaryResultCollection toExtractSummaryResultCollection( ExtractiveSummarizationResult extractiveSummarizationResult) { final List<ExtractSummaryResult> extractSummaryResults = new ArrayList<>(); final List<ExtractiveSummarizationResultDocumentsItem> extractedDocumentSummaries = extractiveSummarizationResult.getDocuments(); for (ExtractiveSummarizationResultDocumentsItem documentSummary : extractedDocumentSummaries) { extractSummaryResults.add(toExtractSummaryResult(documentSummary)); } for (DocumentError documentError : extractiveSummarizationResult.getErrors()) { extractSummaryResults.add(new ExtractSummaryResult(documentError.getId(), null, toTextAnalyticsError(documentError.getError()))); } return new ExtractSummaryResultCollection(extractSummaryResults, extractiveSummarizationResult.getModelVersion(), extractiveSummarizationResult.getStatistics() == null ? null : toBatchStatistics(extractiveSummarizationResult.getStatistics())); } /** * Transfer {@link HealthcareResult} into {@link AnalyzeHealthcareEntitiesResultCollection}. * * @param healthcareResult the service side raw data, HealthcareResult. * * @return the client side explored model, AnalyzeHealthcareEntitiesResultCollection. */ public static AnalyzeHealthcareEntitiesResultCollection toAnalyzeHealthcareEntitiesResultCollection( HealthcareResult healthcareResult) { List<AnalyzeHealthcareEntitiesResult> analyzeHealthcareEntitiesResults = new ArrayList<>(); healthcareResult.getDocuments().forEach( documentEntities -> { final AnalyzeHealthcareEntitiesResult analyzeHealthcareEntitiesResult = new AnalyzeHealthcareEntitiesResult( documentEntities.getId(), documentEntities.getStatistics() == null ? null : toTextDocumentStatistics(documentEntities.getStatistics()), null); final List<TextAnalyticsWarning> warnings = documentEntities.getWarnings().stream().map( textAnalyticsWarning -> new TextAnalyticsWarning( Optional.ofNullable(textAnalyticsWarning.getCode()) .map(warningCodeValue -> WarningCode.fromString(warningCodeValue.toString())) .orElse(null), textAnalyticsWarning.getMessage()) ).collect(Collectors.toList()); AnalyzeHealthcareEntitiesResultPropertiesHelper.setWarnings(analyzeHealthcareEntitiesResult, IterableStream.of(warnings)); final List<HealthcareEntity> healthcareEntities = documentEntities.getEntities().stream().map( entity -> { final HealthcareEntity healthcareEntity = new HealthcareEntity(); HealthcareEntityPropertiesHelper.setText(healthcareEntity, entity.getText()); HealthcareEntityPropertiesHelper.setNormalizedText(healthcareEntity, entity.getName()); if (entity.getCategory() != null) { HealthcareEntityPropertiesHelper.setCategory(healthcareEntity, HealthcareEntityCategory.fromString(entity.getCategory().toString())); } HealthcareEntityPropertiesHelper.setConfidenceScore(healthcareEntity, entity.getConfidenceScore()); HealthcareEntityPropertiesHelper.setOffset(healthcareEntity, entity.getOffset()); HealthcareEntityPropertiesHelper.setLength(healthcareEntity, entity.getLength()); final List<EntityDataSource> entityDataSources = Optional.ofNullable(entity.getLinks()).map( links -> links.stream().map( link -> { final EntityDataSource dataSource = new EntityDataSource(); EntityDataSourcePropertiesHelper.setName(dataSource, link.getDataSource()); EntityDataSourcePropertiesHelper.setEntityId(dataSource, link.getId()); return dataSource; } ).collect(Collectors.toList())) .orElse(new ArrayList<>()); HealthcareEntityPropertiesHelper.setDataSources(healthcareEntity, IterableStream.of(entityDataSources)); final HealthcareAssertion assertion = entity.getAssertion(); if (assertion != null) { HealthcareEntityPropertiesHelper.setAssertion(healthcareEntity, toHealthcareEntityAssertion(assertion)); } return healthcareEntity; }).collect(Collectors.toList()); AnalyzeHealthcareEntitiesResultPropertiesHelper.setEntities(analyzeHealthcareEntitiesResult, IterableStream.of(healthcareEntities)); final List<HealthcareEntityRelation> healthcareEntityRelations = documentEntities.getRelations().stream().map( healthcareRelation -> { final HealthcareEntityRelation entityRelation = new HealthcareEntityRelation(); final RelationType relationType = healthcareRelation.getRelationType(); if (relationType != null) { HealthcareEntityRelationPropertiesHelper.setRelationType(entityRelation, HealthcareEntityRelationType.fromString(relationType.toString())); } final List<HealthcareEntityRelationRole> relationRoles = healthcareRelation.getEntities().stream().map( relationEntity -> { final HealthcareEntityRelationRole relationRole = new HealthcareEntityRelationRole(); HealthcareEntityRelationRolePropertiesHelper.setName(relationRole, relationEntity.getRole()); HealthcareEntityRelationRolePropertiesHelper.setEntity(relationRole, healthcareEntities.get(getHealthcareEntityIndex(relationEntity.getRef()))); return relationRole; }).collect(Collectors.toList()); HealthcareEntityRelationPropertiesHelper.setRoles(entityRelation, IterableStream.of(relationRoles)); return entityRelation; }).collect(Collectors.toList()); AnalyzeHealthcareEntitiesResultPropertiesHelper.setEntityRelations(analyzeHealthcareEntitiesResult, IterableStream.of(healthcareEntityRelations)); analyzeHealthcareEntitiesResults.add(analyzeHealthcareEntitiesResult); }); healthcareResult.getErrors().forEach(documentError -> analyzeHealthcareEntitiesResults.add(new AnalyzeHealthcareEntitiesResult( documentError.getId(), null, toTextAnalyticsError(documentError.getError()))) ); return new AnalyzeHealthcareEntitiesResultCollection(IterableStream.of(analyzeHealthcareEntitiesResults)); } public static HealthcareEntityAssertion toHealthcareEntityAssertion(HealthcareAssertion healthcareAssertion) { final Association association = healthcareAssertion.getAssociation(); final Certainty certainty = healthcareAssertion.getCertainty(); final Conditionality conditionality = healthcareAssertion.getConditionality(); final HealthcareEntityAssertion entityAssertion = new HealthcareEntityAssertion(); if (association != null) { HealthcareEntityAssertionPropertiesHelper.setAssociation(entityAssertion, EntityAssociation.fromString(association.toString())); } if (certainty != null) { HealthcareEntityAssertionPropertiesHelper.setCertainty(entityAssertion, toCertainty(certainty)); } if (conditionality != null) { HealthcareEntityAssertionPropertiesHelper.setConditionality(entityAssertion, toConditionality(conditionality)); } return entityAssertion; } private static EntityCertainty toCertainty(Certainty certainty) { EntityCertainty entityCertainty1 = null; switch (certainty) { case POSITIVE: entityCertainty1 = EntityCertainty.POSITIVE; break; case POSITIVE_POSSIBLE: entityCertainty1 = EntityCertainty.POSITIVE_POSSIBLE; break; case NEUTRAL_POSSIBLE: entityCertainty1 = EntityCertainty.NEUTRAL_POSSIBLE; break; case NEGATIVE_POSSIBLE: entityCertainty1 = EntityCertainty.NEGATIVE_POSSIBLE; break; case NEGATIVE: entityCertainty1 = EntityCertainty.NEGATIVE; break; default: break; } return entityCertainty1; } private static EntityConditionality toConditionality(Conditionality conditionality) { EntityConditionality conditionality1 = null; switch (conditionality) { case HYPOTHETICAL: conditionality1 = EntityConditionality.HYPOTHETICAL; break; case CONDITIONAL: conditionality1 = EntityConditionality.CONDITIONAL; break; default: break; } return conditionality1; } /** * Helper function that parse healthcare entity index from the given entity reference string. * The entity reference format is " * * @param entityReference the given healthcare entity reference string. * * @return the healthcare entity index. */ private static Integer getHealthcareEntityIndex(String entityReference) { if (!CoreUtils.isNullOrEmpty(entityReference)) { int lastIndex = entityReference.lastIndexOf('/'); if (lastIndex != -1) { return Integer.parseInt(entityReference.substring(lastIndex + 1)); } } throw LOGGER.logExceptionAsError( new RuntimeException("Failed to parse healthcare entity index from: " + entityReference)); } /** * Get the non-null {@link Context}. The default value is {@link Context * * @param context It offers a means of passing arbitrary data (key-value pairs) to pipeline policies. * Most applications do not need to pass arbitrary data to the pipeline and can pass Context.NONE or null. * * @return The Context. */ public static Context getNotNullContext(Context context) { return context == null ? Context.NONE : context; } /** * Helper function which retrieves the size of an {@link Iterable}. * * @param documents The iterable of documents. * @return Count of documents in the iterable. */ public static int getDocumentCount(Iterable<?> documents) { if (documents instanceof Collection) { return ((Collection<?>) documents).size(); } else { final int[] count = new int[] { 0 }; documents.forEach(ignored -> count[0] += 1); return count[0]; } } /** * Helper function which convert the {@code Iterable<PiiEntityCategory>} to {@code List<PiiCategory>}. * * @param categoriesFilter the iterable of {@link PiiEntityCategory}. * @return the list of {@link PiiCategory}. */ public static List<PiiCategory> toCategoriesFilter(Iterable<PiiEntityCategory> categoriesFilter) { if (categoriesFilter == null) { return null; } final List<PiiCategory> piiCategories = new ArrayList<>(); categoriesFilter.forEach(category -> piiCategories.add(PiiCategory.fromString(category.toString()))); return piiCategories; } /** * Helper method to convert the service response of {@link DocumentSentiment} to {@link AnalyzeSentimentResult}. * * @param documentSentiment The {@link SentimentResponseDocumentsItem} returned by the service. * @param documentSentimentList The document sentiment list returned by the service. * * @return The {@link AnalyzeSentimentResult} to be returned by the SDK. */ private static AnalyzeSentimentResult toAnalyzeSentimentResult(SentimentResponseDocumentsItem documentSentiment, List<SentimentResponseDocumentsItem> documentSentimentList) { final SentimentConfidenceScorePerLabel confidenceScorePerLabel = documentSentiment.getConfidenceScores(); final List<SentenceSentiment> sentenceSentiments = documentSentiment.getSentences().stream() .map(sentenceSentiment -> { final SentimentConfidenceScorePerLabel confidenceScorePerSentence = sentenceSentiment.getConfidenceScores(); final SentenceSentimentValue sentenceSentimentValue = sentenceSentiment.getSentiment(); final SentenceSentiment sentenceSentiment1 = new SentenceSentiment(sentenceSentiment.getText(), TextSentiment.fromString(sentenceSentimentValue == null ? null : sentenceSentimentValue.toString()), new SentimentConfidenceScores(confidenceScorePerSentence.getNegative(), confidenceScorePerSentence.getNeutral(), confidenceScorePerSentence.getPositive())); SentenceSentimentPropertiesHelper.setOpinions(sentenceSentiment1, toSentenceOpinionList(sentenceSentiment, documentSentimentList)); SentenceSentimentPropertiesHelper.setOffset(sentenceSentiment1, sentenceSentiment.getOffset()); SentenceSentimentPropertiesHelper.setLength(sentenceSentiment1, sentenceSentiment.getLength()); return sentenceSentiment1; }).collect(Collectors.toList()); final List<TextAnalyticsWarning> warnings = documentSentiment.getWarnings().stream().map( warning -> toTextAnalyticsWarning(warning)).collect(Collectors.toList()); final DocumentSentimentValue documentSentimentValue = documentSentiment.getSentiment(); return new AnalyzeSentimentResult( documentSentiment.getId(), documentSentiment.getStatistics() == null ? null : toTextDocumentStatistics(documentSentiment.getStatistics()), null, new com.azure.ai.textanalytics.models.DocumentSentiment( TextSentiment.fromString(documentSentimentValue == null ? null : documentSentimentValue.toString()), new SentimentConfidenceScores( confidenceScorePerLabel.getNegative(), confidenceScorePerLabel.getNeutral(), confidenceScorePerLabel.getPositive()), new IterableStream<>(sentenceSentiments), new IterableStream<>(warnings) )); } /* * Transform SentenceSentiment's opinion mining to output that user can use. */ private static IterableStream<SentenceOpinion> toSentenceOpinionList( com.azure.ai.textanalytics.implementation.models.SentenceSentiment sentenceSentiment, List<SentimentResponseDocumentsItem> documentSentimentList) { final List<SentenceTarget> sentenceTargets = sentenceSentiment.getTargets(); if (sentenceTargets == null) { return null; } final List<SentenceOpinion> sentenceOpinions = new ArrayList<>(); sentenceTargets.forEach(sentenceTarget -> { final List<AssessmentSentiment> assessmentSentiments = new ArrayList<>(); sentenceTarget.getRelations().forEach(targetRelation -> { final TargetRelationType targetRelationType = targetRelation.getRelationType(); final String opinionPointer = targetRelation.getRef(); if (TargetRelationType.ASSESSMENT == targetRelationType) { assessmentSentiments.add(toAssessmentSentiment( findSentimentAssessment(opinionPointer, documentSentimentList))); } }); final TargetSentiment targetSentiment = new TargetSentiment(); TargetSentimentPropertiesHelper.setText(targetSentiment, sentenceTarget.getText()); TargetSentimentPropertiesHelper.setSentiment(targetSentiment, TextSentiment.fromString(sentenceTarget.getSentiment().toString())); TargetSentimentPropertiesHelper.setConfidenceScores(targetSentiment, toSentimentConfidenceScores(sentenceTarget.getConfidenceScores())); TargetSentimentPropertiesHelper.setOffset(targetSentiment, sentenceTarget.getOffset()); TargetSentimentPropertiesHelper.setLength(targetSentiment, sentenceTarget.getLength()); final SentenceOpinion sentenceOpinion = new SentenceOpinion(); SentenceOpinionPropertiesHelper.setTarget(sentenceOpinion, targetSentiment); SentenceOpinionPropertiesHelper.setAssessments(sentenceOpinion, new IterableStream<>(assessmentSentiments)); sentenceOpinions.add(sentenceOpinion); }); return new IterableStream<>(sentenceOpinions); } /* * Transform type TargetConfidenceScoreLabel to SentimentConfidenceScores. */ private static SentimentConfidenceScores toSentimentConfidenceScores( TargetConfidenceScoreLabel targetConfidenceScoreLabel) { return new SentimentConfidenceScores(targetConfidenceScoreLabel.getNegative(), NEUTRAL_SCORE_ZERO, targetConfidenceScoreLabel.getPositive()); } /* * Transform type SentenceOpinion to OpinionSentiment. */ private static AssessmentSentiment toAssessmentSentiment(SentenceAssessment sentenceAssessment) { final AssessmentSentiment assessmentSentiment = new AssessmentSentiment(); AssessmentSentimentPropertiesHelper.setText(assessmentSentiment, sentenceAssessment.getText()); AssessmentSentimentPropertiesHelper.setSentiment(assessmentSentiment, TextSentiment.fromString(sentenceAssessment.getSentiment().toString())); AssessmentSentimentPropertiesHelper.setConfidenceScores(assessmentSentiment, toSentimentConfidenceScores(sentenceAssessment.getConfidenceScores())); AssessmentSentimentPropertiesHelper.setNegated(assessmentSentiment, sentenceAssessment.isNegated()); AssessmentSentimentPropertiesHelper.setOffset(assessmentSentiment, sentenceAssessment.getOffset()); AssessmentSentimentPropertiesHelper.setLength(assessmentSentiment, sentenceAssessment.getLength()); return assessmentSentiment; } private static ExtractSummaryResult toExtractSummaryResult( ExtractiveSummarizationResultDocumentsItem documentSummary) { final List<ExtractedSummarySentence> sentences = documentSummary.getSentences(); final List<SummarySentence> summarySentences = sentences.stream().map(sentence -> { final SummarySentence summarySentence = new SummarySentence(); SummarySentencePropertiesHelper.setText(summarySentence, sentence.getText()); SummarySentencePropertiesHelper.setRankScore(summarySentence, sentence.getRankScore()); SummarySentencePropertiesHelper.setLength(summarySentence, sentence.getLength()); SummarySentencePropertiesHelper.setOffset(summarySentence, sentence.getOffset()); return summarySentence; }).collect(Collectors.toList()); final List<TextAnalyticsWarning> warnings = documentSummary.getWarnings().stream().map( warning -> toTextAnalyticsWarning(warning)).collect(Collectors.toList()); final SummarySentenceCollection summarySentenceCollection = new SummarySentenceCollection( new IterableStream<>(summarySentences), new IterableStream<>(warnings) ); final ExtractSummaryResult extractSummaryResult = new ExtractSummaryResult(documentSummary.getId(), documentSummary.getStatistics() == null ? null : toTextDocumentStatistics(documentSummary.getStatistics()), null ); ExtractSummaryResultPropertiesHelper.setSentences(extractSummaryResult, summarySentenceCollection); return extractSummaryResult; } /** * Helper method to convert {@link CustomEntitiesResult} to {@link RecognizeCustomEntitiesResultCollection}. * * @param customEntitiesResult The {@link CustomEntitiesResult}. * * @return A {@link RecognizeCustomEntitiesResultCollection}. */ public static RecognizeCustomEntitiesResultCollection toRecognizeCustomEntitiesResultCollection( CustomEntitiesResult customEntitiesResult) { final List<RecognizeEntitiesResult> recognizeEntitiesResults = new ArrayList<>(); final List<CustomEntitiesResultDocumentsItem> customEntitiesResultDocuments = customEntitiesResult.getDocuments(); for (CustomEntitiesResultDocumentsItem documentSummary : customEntitiesResultDocuments) { recognizeEntitiesResults.add(toRecognizeEntitiesResult(documentSummary)); } for (DocumentError documentError : customEntitiesResult.getErrors()) { recognizeEntitiesResults.add(new RecognizeEntitiesResult(documentError.getId(), null, toTextAnalyticsError(documentError.getError()), null)); } final RecognizeCustomEntitiesResultCollection resultCollection = new RecognizeCustomEntitiesResultCollection(recognizeEntitiesResults); RecognizeCustomEntitiesResultCollectionPropertiesHelper.setProjectName(resultCollection, customEntitiesResult.getProjectName()); RecognizeCustomEntitiesResultCollectionPropertiesHelper.setDeploymentName(resultCollection, customEntitiesResult.getDeploymentName()); if (customEntitiesResult.getStatistics() != null) { RecognizeCustomEntitiesResultCollectionPropertiesHelper.setStatistics(resultCollection, toBatchStatistics(customEntitiesResult.getStatistics())); } return resultCollection; } /** * Helper method to convert {@link CustomSingleClassificationResult} to * {@link SingleCategoryClassifyResultCollection}. * * @param customSingleClassificationResult The {@link CustomSingleClassificationResult}. * * @return A {@link SingleCategoryClassifyResultCollection}. */ public static SingleCategoryClassifyResultCollection toSingleCategoryClassifyResultCollection( CustomSingleLabelClassificationResult customSingleClassificationResult) { final List<SingleCategoryClassifyResult> singleCategoryClassifyResults = new ArrayList<>(); final List<CustomSingleLabelClassificationResultDocumentsItem> singleClassificationDocuments = customSingleClassificationResult.getDocuments(); for (CustomSingleLabelClassificationResultDocumentsItem documentSummary : singleClassificationDocuments) { singleCategoryClassifyResults.add(toSingleCategoryClassifyResult(documentSummary)); } for (DocumentError documentError : customSingleClassificationResult.getErrors()) { singleCategoryClassifyResults.add(new SingleCategoryClassifyResult(documentError.getId(), null, toTextAnalyticsError(documentError.getError()))); } final SingleCategoryClassifyResultCollection resultCollection = new SingleCategoryClassifyResultCollection(singleCategoryClassifyResults); SingleCategoryClassifyResultCollectionPropertiesHelper.setProjectName(resultCollection, customSingleClassificationResult.getProjectName()); SingleCategoryClassifyResultCollectionPropertiesHelper.setDeploymentName(resultCollection, customSingleClassificationResult.getDeploymentName()); if (customSingleClassificationResult.getStatistics() != null) { SingleCategoryClassifyResultCollectionPropertiesHelper.setStatistics(resultCollection, toBatchStatistics(customSingleClassificationResult.getStatistics())); } return resultCollection; } private static SingleCategoryClassifyResult toSingleCategoryClassifyResult( CustomSingleLabelClassificationResultDocumentsItem singleClassificationDocument) { final ClassificationResult classificationResult = singleClassificationDocument.getClassProperty(); final List<TextAnalyticsWarning> warnings = singleClassificationDocument.getWarnings().stream().map( warning -> toTextAnalyticsWarning(warning)).collect(Collectors.toList()); final SingleCategoryClassifyResult singleCategoryClassifyResult = new SingleCategoryClassifyResult( singleClassificationDocument.getId(), singleClassificationDocument.getStatistics() == null ? null : toTextDocumentStatistics(singleClassificationDocument.getStatistics()), null); SingleCategoryClassifyResultPropertiesHelper.setClassification(singleCategoryClassifyResult, toDocumentClassification(classificationResult)); SingleCategoryClassifyResultPropertiesHelper.setWarnings(singleCategoryClassifyResult, new IterableStream<>(warnings)); return singleCategoryClassifyResult; } private static ClassificationCategory toDocumentClassification(ClassificationResult classificationResult) { final ClassificationCategory classificationCategory = new ClassificationCategory(); ClassificationCategoryPropertiesHelper.setCategory(classificationCategory, classificationResult.getCategory()); ClassificationCategoryPropertiesHelper.setConfidenceScore(classificationCategory, classificationResult.getConfidenceScore()); return classificationCategory; } /** * Helper method to convert {@link CustomMultiClassificationResult} to * {@link MultiCategoryClassifyResultCollection}. * * @param customMultiClassificationResult The {@link CustomMultiClassificationResult}. * * @return A {@link SingleCategoryClassifyResultCollection}. */ public static MultiCategoryClassifyResultCollection toMultiCategoryClassifyResultCollection( CustomMultiLabelClassificationResult customMultiClassificationResult) { final List<MultiCategoryClassifyResult> multiCategoryClassifyResults = new ArrayList<>(); final List<CustomMultiLabelClassificationResultDocumentsItem> multiClassificationDocuments = customMultiClassificationResult.getDocuments(); for (CustomMultiLabelClassificationResultDocumentsItem multiClassificationDocument : multiClassificationDocuments) { multiCategoryClassifyResults.add(toMultiCategoryClassifyResult(multiClassificationDocument)); } for (DocumentError documentError : customMultiClassificationResult.getErrors()) { multiCategoryClassifyResults.add(new MultiCategoryClassifyResult(documentError.getId(), null, toTextAnalyticsError(documentError.getError()))); } final MultiCategoryClassifyResultCollection resultCollection = new MultiCategoryClassifyResultCollection(multiCategoryClassifyResults); MultiCategoryClassifyResultCollectionPropertiesHelper.setProjectName(resultCollection, customMultiClassificationResult.getProjectName()); MultiCategoryClassifyResultCollectionPropertiesHelper.setDeploymentName(resultCollection, customMultiClassificationResult.getDeploymentName()); if (customMultiClassificationResult.getStatistics() != null) { MultiCategoryClassifyResultCollectionPropertiesHelper.setStatistics(resultCollection, toBatchStatistics(customMultiClassificationResult.getStatistics())); } return resultCollection; } private static MultiCategoryClassifyResult toMultiCategoryClassifyResult( CustomMultiLabelClassificationResultDocumentsItem multiClassificationDocument) { final List<ClassificationCategory> classificationCategories = multiClassificationDocument .getClassProperty() .stream() .map(classificationResult -> toDocumentClassification(classificationResult)) .collect(Collectors.toList()); final List<TextAnalyticsWarning> warnings = multiClassificationDocument.getWarnings().stream().map( warning -> toTextAnalyticsWarning(warning)).collect(Collectors.toList()); final MultiCategoryClassifyResult classifySingleCategoryResult = new MultiCategoryClassifyResult( multiClassificationDocument.getId(), multiClassificationDocument.getStatistics() == null ? null : toTextDocumentStatistics(multiClassificationDocument.getStatistics()), null); final ClassificationCategoryCollection classifications = new ClassificationCategoryCollection( new IterableStream<>(classificationCategories)); ClassificationCategoryCollectionPropertiesHelper.setWarnings(classifications, new IterableStream<>(warnings)); MultiCategoryClassifyResultPropertiesHelper.setClassifications(classifySingleCategoryResult, classifications); return classifySingleCategoryResult; } /* * Parses the reference pointer to an index array that contains document, sentence, and opinion indexes. */ public static int[] parseRefPointerToIndexArray(String assessmentPointer) { final Matcher matcher = PATTERN.matcher(assessmentPointer); final boolean isMatched = matcher.find(); final int[] result = new int[3]; if (isMatched) { result[0] = Integer.parseInt(matcher.group(1)); result[1] = Integer.parseInt(matcher.group(2)); result[2] = Integer.parseInt(matcher.group(3)); } else { throw LOGGER.logExceptionAsError(new IllegalStateException( String.format("'%s' is not a valid assessment pointer.", assessmentPointer))); } return result; } /* * Find the specific sentence assessment in the document sentiment list by given the assessment reference pointer. */ public static SentenceAssessment findSentimentAssessment(String assessmentPointer, List<SentimentResponseDocumentsItem> documentSentiments) { final int[] assessmentIndexes = parseRefPointerToIndexArray(assessmentPointer); final int documentIndex = assessmentIndexes[0]; final int sentenceIndex = assessmentIndexes[1]; final int assessmentIndex = assessmentIndexes[2]; if (documentIndex >= documentSentiments.size()) { throw LOGGER.logExceptionAsError(new IllegalStateException( String.format("Invalid document index '%s' in '%s'.", documentIndex, assessmentPointer))); } final SentimentResponseDocumentsItem documentsentiment = documentSentiments.get(documentIndex); final List<com.azure.ai.textanalytics.implementation.models.SentenceSentiment> sentenceSentiments = documentsentiment.getSentences(); if (sentenceIndex >= sentenceSentiments.size()) { throw LOGGER.logExceptionAsError(new IllegalStateException( String.format("Invalid sentence index '%s' in '%s'.", sentenceIndex, assessmentPointer))); } final List<SentenceAssessment> assessments = sentenceSentiments.get(sentenceIndex).getAssessments(); if (assessmentIndex >= assessments.size()) { throw LOGGER.logExceptionAsError(new IllegalStateException( String.format("Invalid assessment index '%s' in '%s'.", assessmentIndex, assessmentPointer))); } return assessments.get(assessmentIndex); } }
class Utility { public static final Duration DEFAULT_POLL_INTERVAL = Duration.ofSeconds(30); private static final ClientLogger LOGGER = new ClientLogger(Utility.class); private static final int NEUTRAL_SCORE_ZERO = 0; private static final int OPERATION_ID_LENGTH = 37; private static final String DOCUMENT_SENTENCES_ASSESSMENTS_REG_EXP = " private static final Pattern PATTERN; static { PATTERN = Pattern.compile(DOCUMENT_SENTENCES_ASSESSMENTS_REG_EXP); } private Utility() { } /** * Verify that list of documents are not null or empty. Otherwise, throw exception. * * @param documents A list of documents. * * @throws NullPointerException if {@code documents} is null. * @throws IllegalArgumentException if {@code documents} is empty. */ public static void inputDocumentsValidation(Iterable<?> documents) { Objects.requireNonNull(documents, "'documents' cannot be null."); final Iterator<?> iterator = documents.iterator(); if (!iterator.hasNext()) { throw new IllegalArgumentException("'documents' cannot be empty."); } } /** * Mapping a {@link ErrorResponseException} to {@link HttpResponseException} if exist. Otherwise, return * original {@link Throwable}. * * @param throwable A {@link Throwable}. * @return A {@link HttpResponseException} or the original throwable type. */ public static Throwable mapToHttpResponseExceptionIfExists(Throwable throwable) { if (throwable instanceof ErrorResponseException) { ErrorResponseException errorException = (ErrorResponseException) throwable; final ErrorResponse errorResponse = errorException.getValue(); com.azure.ai.textanalytics.models.TextAnalyticsError textAnalyticsError = null; if (errorResponse != null && errorResponse.getError() != null) { textAnalyticsError = toTextAnalyticsError(errorResponse.getError()); } return new HttpResponseException(errorException.getMessage(), errorException.getResponse(), textAnalyticsError); } return throwable; } /** * Given a list of documents will apply the indexing function to it and return the updated list. * * @param documents the inputs to apply the mapping function to. * @param mappingFunction the function which applies the index to the incoming input value. * @param <T> the type of items being returned in the list. * @return The list holding all the generic items combined. */ public static <T> List<T> mapByIndex(Iterable<String> documents, BiFunction<String, String, T> mappingFunction) { Objects.requireNonNull(documents, "'documents' cannot be null."); AtomicInteger i = new AtomicInteger(0); List<T> result = new ArrayList<>(); documents.forEach(document -> result.add(mappingFunction.apply(String.valueOf(i.getAndIncrement()), document)) ); return result; } /** * Convert {@link DocumentStatistics} to {@link TextDocumentStatistics} * * @param statistics the {@link DocumentStatistics} provided by the service. * @return the {@link TextDocumentStatistics} returned by the SDK. */ public static TextDocumentStatistics toTextDocumentStatistics(DocumentStatistics statistics) { return new TextDocumentStatistics(statistics.getCharactersCount(), statistics.getTransactionsCount()); } /** * Convert {@link RequestStatistics} to {@link TextDocumentBatchStatistics} * * @param statistics the {@link RequestStatistics} provided by the service. * @return the {@link TextDocumentBatchStatistics} returned by the SDK. */ public static TextDocumentBatchStatistics toBatchStatistics(RequestStatistics statistics) { return new TextDocumentBatchStatistics(statistics.getDocumentsCount(), statistics.getValidDocumentsCount(), statistics.getErroneousDocumentsCount(), statistics.getTransactionsCount()); } /** * Convert {@link Error} to {@link com.azure.ai.textanalytics.models.TextAnalyticsError} * This function maps the service returned {@link Error inner error} to the top level * {@link com.azure.ai.textanalytics.models.TextAnalyticsError error}, if inner error present. * * @param error the {@link Error} returned by the service. * @return the {@link com.azure.ai.textanalytics.models.TextAnalyticsError} returned by the SDK. */ public static TextAnalyticsError toTextAnalyticsError(Error error) { final InnerErrorModel innerError = error.getInnererror(); if (innerError == null) { final ErrorCode errorCode = error.getCode(); return new com.azure.ai.textanalytics.models.TextAnalyticsError( TextAnalyticsErrorCode.fromString(errorCode == null ? null : errorCode.toString()), error.getMessage(), error.getTarget()); } final InnerErrorCode innerErrorCodeValue = innerError.getCode(); return new com.azure.ai.textanalytics.models.TextAnalyticsError( TextAnalyticsErrorCode.fromString(innerErrorCodeValue == null ? null : innerErrorCodeValue.toString()), innerError.getMessage(), innerError.getTarget()); } public static TextAnalyticsWarning toTextAnalyticsWarning( DocumentWarning warning) { final WarningCodeValue warningCodeValue = warning.getCode(); return new TextAnalyticsWarning( WarningCode.fromString(warningCodeValue == null ? null : warningCodeValue.toString()), warning.getMessage()); } /** * Convert the incoming input {@link TextDocumentInput} to the service expected {@link MultiLanguageInput}. * * @param documents the user provided input in {@link TextDocumentInput} * @return the service required input {@link MultiLanguageInput} */ public static List<MultiLanguageInput> toMultiLanguageInput(Iterable<TextDocumentInput> documents) { List<MultiLanguageInput> multiLanguageInputs = new ArrayList<>(); for (TextDocumentInput textDocumentInput : documents) { multiLanguageInputs.add(new MultiLanguageInput().setId(textDocumentInput.getId()) .setText(textDocumentInput.getText()).setLanguage(textDocumentInput.getLanguage())); } return multiLanguageInputs; } /** * Convert the incoming input {@link com.azure.ai.textanalytics.models.TextAnalyticsError} * to a {@link TextAnalyticsException}. * * @param error the {@link com.azure.ai.textanalytics.models.TextAnalyticsError}. * @return the {@link TextAnalyticsException} to be thrown. */ public static TextAnalyticsException toTextAnalyticsException( com.azure.ai.textanalytics.models.TextAnalyticsError error) { return new TextAnalyticsException(error.getMessage(), error.getErrorCode(), error.getTarget()); } /** * Convert to a list of {@link LanguageInput} from {@link DetectLanguageInput}. * * @param documents The list of documents to detect languages for. * * @return a list of {@link LanguageInput}. */ public static List<LanguageInput> toLanguageInput(Iterable<DetectLanguageInput> documents) { final List<LanguageInput> multiLanguageInputs = new ArrayList<>(); documents.forEach(textDocumentInput -> multiLanguageInputs.add(new LanguageInput() .setId(textDocumentInput.getId()) .setText(textDocumentInput.getText()) .setCountryHint(textDocumentInput.getCountryHint()))); return multiLanguageInputs; } /** * Extracts the operation ID from the 'operation-location' URL. An example of 'operation-location' is * https: * * @param operationLocation The URL specified in the 'Operation-Location' response header containing the * operation ID used to track the progress and obtain the ID of the analyze operation. * * @return The operation ID that tracks the long running operation progress. */ /** * Extract the next pagination link which contains the request parameter values, into map, * such as '$skip=20' and '$top=2'. * * @param nextLink the next pagination link. * * @return A map that holds the request parameter value of next pagination link. */ public static Map<String, Object> parseNextLink(String nextLink) { if (!CoreUtils.isNullOrEmpty(nextLink)) { final Map<String, Object> parameterMap = new HashMap<>(); final String[] strings = nextLink.split("\\?", 2); final String[] parameters = strings[1].split("&"); for (String parameter : parameters) { final String[] parameterPair = parameter.split("="); final String key = parameterPair[0]; final String value = parameterPair[1]; if ("showStats".equals(key)) { parameterMap.put(key, value); } else if ("$skip".equals(key) || "$top".equals(key)) { parameterMap.put(key, Integer.valueOf(value)); } else if ("skip".equals(key) || "top".equals(key)) { parameterMap.put("$" + key, Integer.valueOf(value)); } } return parameterMap; } return new HashMap<>(); } public static Response<AnalyzeSentimentResultCollection> toAnalyzeSentimentResultCollectionResponse( Response<SentimentResponse> response) { return new SimpleResponse<>(response, toAnalyzeSentimentResultCollection(response.getValue())); } public static Response<AnalyzeSentimentResultCollection> toAnalyzeSentimentResultCollectionResponse2( Response<AnalyzeTextTaskResult> response) { return new SimpleResponse<>(response, toAnalyzeSentimentResultCollection(((SentimentTaskResult) response.getValue()).getResults())); } public static Response<DetectLanguageResultCollection> toDetectLanguageResultCollectionResponse( Response<LanguageResult> response) { final LanguageResult languageResult = response.getValue(); final List<DetectLanguageResult> detectLanguageResults = new ArrayList<>(); for (DocumentLanguage documentLanguage : languageResult.getDocuments()) { com.azure.ai.textanalytics.implementation.models.DetectedLanguage detectedLanguage = documentLanguage.getDetectedLanguage(); final List<TextAnalyticsWarning> warnings = documentLanguage.getWarnings().stream() .map(warning -> toTextAnalyticsWarning(warning)) .collect(Collectors.toList()); detectLanguageResults.add(new DetectLanguageResult( documentLanguage.getId(), documentLanguage.getStatistics() == null ? null : toTextDocumentStatistics(documentLanguage.getStatistics()), null, new DetectedLanguage(detectedLanguage.getName(), detectedLanguage.getIso6391Name(), detectedLanguage.getConfidenceScore(), new IterableStream<>(warnings)))); } for (DocumentError documentError : languageResult.getErrors()) { detectLanguageResults.add(new DetectLanguageResult(documentError.getId(), null, toTextAnalyticsError(documentError.getError()), null)); } return new SimpleResponse<>(response, new DetectLanguageResultCollection(detectLanguageResults, languageResult.getModelVersion(), languageResult.getStatistics() == null ? null : toBatchStatistics(languageResult.getStatistics()))); } public static Response<DetectLanguageResultCollection> toDetectLanguageResultCollectionResponse2( Response<AnalyzeTextTaskResult> response) { final LanguageDetectionResult languageResult = ((LanguageDetectionTaskResult) response.getValue()).getResults(); final List<DetectLanguageResult> detectLanguageResults = new ArrayList<>(); for (LanguageDetectionDocumentResult documentLanguage : languageResult.getDocuments()) { com.azure.ai.textanalytics.implementation.models.DetectedLanguage detectedLanguage = documentLanguage.getDetectedLanguage(); final List<TextAnalyticsWarning> warnings = documentLanguage.getWarnings() .stream() .map(warning -> toTextAnalyticsWarning(warning)) .collect(Collectors.toList()); detectLanguageResults.add(new DetectLanguageResult( documentLanguage.getId(), documentLanguage.getStatistics() == null ? null : toTextDocumentStatistics(documentLanguage.getStatistics()), null, new DetectedLanguage(detectedLanguage.getName(), detectedLanguage.getIso6391Name(), detectedLanguage.getConfidenceScore(), new IterableStream<>(warnings) ))); } for (DocumentError documentError : languageResult.getErrors()) { detectLanguageResults.add(new DetectLanguageResult(documentError.getId(), null, toTextAnalyticsError(documentError.getError()), null)); } return new SimpleResponse<>(response, new DetectLanguageResultCollection(detectLanguageResults, languageResult.getModelVersion(), languageResult.getStatistics() == null ? null : toBatchStatistics(languageResult.getStatistics()))); } public static Response<ExtractKeyPhrasesResultCollection> toExtractKeyPhrasesResultCollectionResponse( final Response<KeyPhraseResult> response) { final KeyPhraseResult keyPhraseResult = response.getValue(); final List<ExtractKeyPhraseResult> keyPhraseResultList = new ArrayList<>(); for (KeyPhraseResultDocumentsItem documentKeyPhrases : keyPhraseResult.getDocuments()) { final String documentId = documentKeyPhrases.getId(); keyPhraseResultList.add(new ExtractKeyPhraseResult( documentId, documentKeyPhrases.getStatistics() == null ? null : toTextDocumentStatistics(documentKeyPhrases.getStatistics()), null, new KeyPhrasesCollection( new IterableStream<>(documentKeyPhrases.getKeyPhrases()), new IterableStream<>(documentKeyPhrases.getWarnings().stream().map( warning -> toTextAnalyticsWarning(warning)).collect(Collectors.toList()))))); } for (DocumentError documentError : keyPhraseResult.getErrors()) { keyPhraseResultList.add(new ExtractKeyPhraseResult(documentError.getId(), null, toTextAnalyticsError(documentError.getError()), null)); } return new SimpleResponse<>(response, new ExtractKeyPhrasesResultCollection(keyPhraseResultList, keyPhraseResult.getModelVersion(), keyPhraseResult.getStatistics() == null ? null : toBatchStatistics(keyPhraseResult.getStatistics()))); } public static Response<ExtractKeyPhrasesResultCollection> toExtractKeyPhrasesResultCollectionResponse2( final Response<AnalyzeTextTaskResult> response) { final KeyPhraseResult keyPhraseResult = ((KeyPhraseTaskResult) response.getValue()).getResults(); final List<ExtractKeyPhraseResult> keyPhraseResultList = new ArrayList<>(); for (KeyPhraseResultDocumentsItem documentKeyPhrases : keyPhraseResult.getDocuments()) { final String documentId = documentKeyPhrases.getId(); keyPhraseResultList.add(new ExtractKeyPhraseResult( documentId, documentKeyPhrases.getStatistics() == null ? null : toTextDocumentStatistics(documentKeyPhrases.getStatistics()), null, new KeyPhrasesCollection( new IterableStream<>(documentKeyPhrases.getKeyPhrases()), new IterableStream<>(documentKeyPhrases.getWarnings().stream().map( warning -> toTextAnalyticsWarning(warning)).collect(Collectors.toList()))))); } for (DocumentError documentError : keyPhraseResult.getErrors()) { keyPhraseResultList.add(new ExtractKeyPhraseResult(documentError.getId(), null, toTextAnalyticsError(documentError.getError()), null)); } return new SimpleResponse<>(response, new ExtractKeyPhrasesResultCollection(keyPhraseResultList, keyPhraseResult.getModelVersion(), keyPhraseResult.getStatistics() == null ? null : toBatchStatistics(keyPhraseResult.getStatistics()))); } public static RecognizeEntitiesResultCollection toRecognizeEntitiesResultCollectionResponse( final EntitiesResult entitiesResult) { List<RecognizeEntitiesResult> recognizeEntitiesResults = new ArrayList<>(); entitiesResult.getDocuments().forEach(documentEntities -> recognizeEntitiesResults.add(toRecognizeEntitiesResult(documentEntities))); for (DocumentError documentError : entitiesResult.getErrors()) { recognizeEntitiesResults.add(new RecognizeEntitiesResult(documentError.getId(), null, toTextAnalyticsError(documentError.getError()), null)); } return new RecognizeEntitiesResultCollection(recognizeEntitiesResults, entitiesResult.getModelVersion(), entitiesResult.getStatistics() == null ? null : toBatchStatistics(entitiesResult.getStatistics())); } public static Response<RecognizeEntitiesResultCollection> toRecognizeEntitiesResultCollection( final Response<EntitiesResult> response) { EntitiesResult entitiesResult = response.getValue(); return new SimpleResponse<>(response, new RecognizeEntitiesResultCollection( toRecognizeEntitiesResults(entitiesResult), entitiesResult.getModelVersion(), entitiesResult.getStatistics() == null ? null : toBatchStatistics(entitiesResult.getStatistics()))); } public static Response<RecognizeEntitiesResultCollection> toRecognizeEntitiesResultCollection2( final Response<AnalyzeTextTaskResult> response) { EntitiesTaskResult entitiesTaskResult = (EntitiesTaskResult) response.getValue(); final EntitiesResult results = entitiesTaskResult.getResults(); return new SimpleResponse<>(response, new RecognizeEntitiesResultCollection( toRecognizeEntitiesResults(results), results.getModelVersion(), results.getStatistics() == null ? null : toBatchStatistics(results.getStatistics()))); } public static List<RecognizeEntitiesResult> toRecognizeEntitiesResults(EntitiesResult results) { List<RecognizeEntitiesResult> recognizeEntitiesResults = new ArrayList<>(); results.getDocuments().forEach( documentEntities -> recognizeEntitiesResults.add(new RecognizeEntitiesResult( documentEntities.getId(), documentEntities.getStatistics() == null ? null : toTextDocumentStatistics(documentEntities.getStatistics()), null, new CategorizedEntityCollection( new IterableStream<>(documentEntities.getEntities().stream().map(entity -> { final CategorizedEntity categorizedEntity = new CategorizedEntity(entity.getText(), EntityCategory.fromString(entity.getCategory()), entity.getSubcategory(), entity.getConfidenceScore()); CategorizedEntityPropertiesHelper.setLength(categorizedEntity, entity.getLength()); CategorizedEntityPropertiesHelper.setOffset(categorizedEntity, entity.getOffset()); return categorizedEntity; }).collect(Collectors.toList())), new IterableStream<>( documentEntities.getWarnings().stream() .map(warning -> toTextAnalyticsWarning(warning)).collect(Collectors.toList())))))); for (DocumentError documentError : results.getErrors()) { recognizeEntitiesResults.add(new RecognizeEntitiesResult(documentError.getId(), null, toTextAnalyticsError(documentError.getError()), null)); } return recognizeEntitiesResults; } public static RecognizeEntitiesResult toRecognizeEntitiesResult(EntitiesResultDocumentsItem documentEntities) { return new RecognizeEntitiesResult( documentEntities.getId(), documentEntities.getStatistics() == null ? null : toTextDocumentStatistics(documentEntities.getStatistics()), null, new CategorizedEntityCollection( new IterableStream<>(documentEntities.getEntities().stream().map(entity -> { final CategorizedEntity categorizedEntity = new CategorizedEntity(entity.getText(), EntityCategory.fromString(entity.getCategory()), entity.getSubcategory(), entity.getConfidenceScore()); CategorizedEntityPropertiesHelper.setLength(categorizedEntity, entity.getLength()); CategorizedEntityPropertiesHelper.setOffset(categorizedEntity, entity.getOffset()); return categorizedEntity; }).collect(Collectors.toList())), new IterableStream<>(documentEntities.getWarnings().stream().map( warning -> toTextAnalyticsWarning(warning)).collect(Collectors.toList())))); } public static RecognizeEntitiesResult toRecognizeEntitiesResult(CustomEntitiesResultDocumentsItem documentEntities) { return new RecognizeEntitiesResult( documentEntities.getId(), documentEntities.getStatistics() == null ? null : toTextDocumentStatistics(documentEntities.getStatistics()), null, new CategorizedEntityCollection( new IterableStream<>(documentEntities.getEntities().stream().map(entity -> { final CategorizedEntity categorizedEntity = new CategorizedEntity(entity.getText(), EntityCategory.fromString(entity.getCategory()), entity.getSubcategory(), entity.getConfidenceScore()); CategorizedEntityPropertiesHelper.setLength(categorizedEntity, entity.getLength()); CategorizedEntityPropertiesHelper.setOffset(categorizedEntity, entity.getOffset()); return categorizedEntity; }).collect(Collectors.toList())), new IterableStream<>(documentEntities.getWarnings().stream().map( warning -> toTextAnalyticsWarning(warning)).collect(Collectors.toList())))); } public static Response<RecognizePiiEntitiesResultCollection> toRecognizePiiEntitiesResultCollectionResponse( final Response<PiiResult> response) { final PiiResult piiEntitiesResult = response.getValue(); return new SimpleResponse<>(response, new RecognizePiiEntitiesResultCollection( toRecognizePiiEntitiesResults(piiEntitiesResult), piiEntitiesResult.getModelVersion(), piiEntitiesResult.getStatistics() == null ? null : toBatchStatistics(piiEntitiesResult.getStatistics()) )); } public static Response<RecognizePiiEntitiesResultCollection> toRecognizePiiEntitiesResultCollectionResponse2( final Response<AnalyzeTextTaskResult> response) { final PiiResult piiEntitiesResult = ((PiiTaskResult) response.getValue()).getResults(); return new SimpleResponse<>(response, new RecognizePiiEntitiesResultCollection( toRecognizePiiEntitiesResults(piiEntitiesResult), piiEntitiesResult.getModelVersion(), piiEntitiesResult.getStatistics() == null ? null : toBatchStatistics(piiEntitiesResult.getStatistics()) )); } public static List<RecognizePiiEntitiesResult> toRecognizePiiEntitiesResults(PiiResult piiEntitiesResult) { final List<RecognizePiiEntitiesResult> recognizeEntitiesResults = new ArrayList<>(); piiEntitiesResult.getDocuments().forEach(documentEntities -> { final List<PiiEntity> piiEntities = documentEntities.getEntities().stream().map( entity -> { final PiiEntity piiEntity = new PiiEntity(); PiiEntityPropertiesHelper.setText(piiEntity, entity.getText()); PiiEntityPropertiesHelper.setCategory(piiEntity, PiiEntityCategory.fromString(entity.getCategory())); PiiEntityPropertiesHelper.setSubcategory(piiEntity, entity.getSubcategory()); PiiEntityPropertiesHelper.setConfidenceScore(piiEntity, entity.getConfidenceScore()); PiiEntityPropertiesHelper.setOffset(piiEntity, entity.getOffset()); PiiEntityPropertiesHelper.setLength(piiEntity, entity.getLength()); return piiEntity; }) .collect(Collectors.toList()); final List<TextAnalyticsWarning> warnings = documentEntities.getWarnings().stream().map( warning -> toTextAnalyticsWarning(warning)).collect(Collectors.toList()); recognizeEntitiesResults.add(new RecognizePiiEntitiesResult( documentEntities.getId(), documentEntities.getStatistics() == null ? null : toTextDocumentStatistics(documentEntities.getStatistics()), null, new PiiEntityCollection(new IterableStream<>(piiEntities), documentEntities.getRedactedText(), new IterableStream<>(warnings)) )); }); for (DocumentError documentError : piiEntitiesResult.getErrors()) { recognizeEntitiesResults.add(new RecognizePiiEntitiesResult(documentError.getId(), null, toTextAnalyticsError(documentError.getError()), null)); } return recognizeEntitiesResults; } public static RecognizeEntitiesResult toRecognizeEntitiesResult(DocumentEntities documentEntities) { return new RecognizeEntitiesResult( documentEntities.getId(), documentEntities.getStatistics() == null ? null : toTextDocumentStatistics(documentEntities.getStatistics()), null, new CategorizedEntityCollection( new IterableStream<>(documentEntities.getEntities().stream().map(entity -> { final CategorizedEntity categorizedEntity = new CategorizedEntity(entity.getText(), EntityCategory.fromString(entity.getCategory()), entity.getSubcategory(), entity.getConfidenceScore()); CategorizedEntityPropertiesHelper.setLength(categorizedEntity, entity.getLength()); CategorizedEntityPropertiesHelper.setOffset(categorizedEntity, entity.getOffset()); return categorizedEntity; }).collect(Collectors.toList())), new IterableStream<>(documentEntities.getWarnings().stream().map( warning -> toTextAnalyticsWarning(warning)).collect(Collectors.toList())))); } public static RecognizePiiEntitiesResultCollection toRecognizePiiEntitiesResultCollection( final PiiResult piiEntitiesResult) { final List<RecognizePiiEntitiesResult> recognizeEntitiesResults = new ArrayList<>(); piiEntitiesResult.getDocuments().forEach(documentEntities -> { final List<PiiEntity> piiEntities = documentEntities.getEntities().stream().map(entity -> { final PiiEntity piiEntity = new PiiEntity(); PiiEntityPropertiesHelper.setText(piiEntity, entity.getText()); PiiEntityPropertiesHelper.setCategory(piiEntity, PiiEntityCategory.fromString(entity.getCategory())); PiiEntityPropertiesHelper.setSubcategory(piiEntity, entity.getSubcategory()); PiiEntityPropertiesHelper.setConfidenceScore(piiEntity, entity.getConfidenceScore()); PiiEntityPropertiesHelper.setOffset(piiEntity, entity.getOffset()); return piiEntity; }).collect(Collectors.toList()); final List<TextAnalyticsWarning> warnings = documentEntities.getWarnings().stream().map( warning -> toTextAnalyticsWarning(warning)).collect(Collectors.toList()); recognizeEntitiesResults.add(new RecognizePiiEntitiesResult( documentEntities.getId(), documentEntities.getStatistics() == null ? null : toTextDocumentStatistics(documentEntities.getStatistics()), null, new PiiEntityCollection(new IterableStream<>(piiEntities), documentEntities.getRedactedText(), new IterableStream<>(warnings)) )); }); for (DocumentError documentError : piiEntitiesResult.getErrors()) { recognizeEntitiesResults.add(new RecognizePiiEntitiesResult(documentError.getId(), null, toTextAnalyticsError(documentError.getError()), null)); } return new RecognizePiiEntitiesResultCollection(recognizeEntitiesResults, piiEntitiesResult.getModelVersion(), piiEntitiesResult.getStatistics() == null ? null : toBatchStatistics(piiEntitiesResult.getStatistics())); } public static ExtractKeyPhrasesResultCollection toExtractKeyPhrasesResultCollection( final KeyPhraseResult keyPhraseResult) { final List<ExtractKeyPhraseResult> keyPhraseResultList = new ArrayList<>(); for (KeyPhraseResultDocumentsItem documentKeyPhrases : keyPhraseResult.getDocuments()) { final String documentId = documentKeyPhrases.getId(); keyPhraseResultList.add(new ExtractKeyPhraseResult( documentId, documentKeyPhrases.getStatistics() == null ? null : toTextDocumentStatistics(documentKeyPhrases.getStatistics()), null, new KeyPhrasesCollection( new IterableStream<>(documentKeyPhrases.getKeyPhrases()), new IterableStream<>(documentKeyPhrases.getWarnings().stream().map( warning -> toTextAnalyticsWarning(warning)).collect(Collectors.toList()))))); } for (DocumentError documentError : keyPhraseResult.getErrors()) { keyPhraseResultList.add(new ExtractKeyPhraseResult(documentError.getId(), null, toTextAnalyticsError(documentError.getError()), null)); } return new ExtractKeyPhrasesResultCollection(keyPhraseResultList, keyPhraseResult.getModelVersion(), keyPhraseResult.getStatistics() == null ? null : toBatchStatistics(keyPhraseResult.getStatistics())); } public static Response<RecognizeLinkedEntitiesResultCollection> toRecognizeLinkedEntitiesResultCollectionResponse( final Response<EntityLinkingResult> response) { final EntityLinkingResult entityLinkingResult = response.getValue(); return new SimpleResponse<>(response, new RecognizeLinkedEntitiesResultCollection(toRecognizeLinkedEntitiesResultCollection(entityLinkingResult), entityLinkingResult.getModelVersion(), entityLinkingResult.getStatistics() == null ? null : toBatchStatistics(entityLinkingResult.getStatistics()))); } public static Response<RecognizeLinkedEntitiesResultCollection> toRecognizeLinkedEntitiesResultCollection( final Response<AnalyzeTextTaskResult> response) { final EntityLinkingResult entityLinkingResult = ((EntityLinkingTaskResult) response.getValue()).getResults(); return new SimpleResponse<>(response, new RecognizeLinkedEntitiesResultCollection(toRecognizeLinkedEntitiesResultCollection(entityLinkingResult), entityLinkingResult.getModelVersion(), entityLinkingResult.getStatistics() == null ? null : toBatchStatistics(entityLinkingResult.getStatistics()))); } public static RecognizeLinkedEntitiesResultCollection toRecognizeLinkedEntitiesResultCollection( final EntityLinkingResult entityLinkingResult) { final List<RecognizeLinkedEntitiesResult> linkedEntitiesResults = entityLinkingResult.getDocuments().stream().map( documentLinkedEntities -> new RecognizeLinkedEntitiesResult( documentLinkedEntities.getId(), documentLinkedEntities.getStatistics() == null ? null : toTextDocumentStatistics(documentLinkedEntities.getStatistics()), null, new LinkedEntityCollection(new IterableStream<>( documentLinkedEntities.getEntities().stream().map( linkedEntity -> { final LinkedEntity entity = new LinkedEntity( linkedEntity.getName(), new IterableStream<>( linkedEntity.getMatches().stream().map( match -> { final LinkedEntityMatch linkedEntityMatch = new LinkedEntityMatch( match.getText(), match.getConfidenceScore()); LinkedEntityMatchPropertiesHelper.setOffset(linkedEntityMatch, match.getOffset()); LinkedEntityMatchPropertiesHelper.setLength(linkedEntityMatch, match.getLength()); return linkedEntityMatch; }).collect(Collectors.toList())), linkedEntity.getLanguage(), linkedEntity.getId(), linkedEntity.getUrl(), linkedEntity.getDataSource()); LinkedEntityPropertiesHelper.setBingEntitySearchApiId(entity, linkedEntity.getBingId()); return entity; }).collect(Collectors.toList())), new IterableStream<>(documentLinkedEntities.getWarnings().stream().map( warning -> toTextAnalyticsWarning(warning)).collect(Collectors.toList())))) ).collect(Collectors.toList()); for (DocumentError documentError : entityLinkingResult.getErrors()) { linkedEntitiesResults.add(new RecognizeLinkedEntitiesResult(documentError.getId(), null, toTextAnalyticsError(documentError.getError()), null)); } return new RecognizeLinkedEntitiesResultCollection(linkedEntitiesResults, entityLinkingResult.getModelVersion(), entityLinkingResult.getStatistics() == null ? null : toBatchStatistics(entityLinkingResult.getStatistics())); } /** * Helper method to convert {@link SentimentResponse} to {@link AnalyzeSentimentResultCollection}. * * @param sentimentResponse The {@link SentimentResponse}. * * @return A {@link AnalyzeSentimentResultCollection}. */ public static AnalyzeSentimentResultCollection toAnalyzeSentimentResultCollection( SentimentResponse sentimentResponse) { final List<AnalyzeSentimentResult> analyzeSentimentResults = new ArrayList<>(); final List<SentimentResponseDocumentsItem> documentSentiments = sentimentResponse.getDocuments(); for (SentimentResponseDocumentsItem documentSentiment : documentSentiments) { analyzeSentimentResults.add(toAnalyzeSentimentResult(documentSentiment, documentSentiments)); } for (DocumentError documentError : sentimentResponse.getErrors()) { analyzeSentimentResults.add(new AnalyzeSentimentResult(documentError.getId(), null, toTextAnalyticsError(documentError.getError()), null)); } return new AnalyzeSentimentResultCollection(analyzeSentimentResults, sentimentResponse.getModelVersion(), sentimentResponse.getStatistics() == null ? null : toBatchStatistics(sentimentResponse.getStatistics())); } /** * Helper method to convert {@link ExtractiveSummarizationResult} to {@link ExtractSummaryResultCollection}. * * @param extractiveSummarizationResult The {@link ExtractiveSummarizationResult}. * * @return A {@link ExtractSummaryResultCollection}. */ public static ExtractSummaryResultCollection toExtractSummaryResultCollection( ExtractiveSummarizationResult extractiveSummarizationResult) { final List<ExtractSummaryResult> extractSummaryResults = new ArrayList<>(); final List<ExtractiveSummarizationResultDocumentsItem> extractedDocumentSummaries = extractiveSummarizationResult.getDocuments(); for (ExtractiveSummarizationResultDocumentsItem documentSummary : extractedDocumentSummaries) { extractSummaryResults.add(toExtractSummaryResult(documentSummary)); } for (DocumentError documentError : extractiveSummarizationResult.getErrors()) { extractSummaryResults.add(new ExtractSummaryResult(documentError.getId(), null, toTextAnalyticsError(documentError.getError()))); } return new ExtractSummaryResultCollection(extractSummaryResults, extractiveSummarizationResult.getModelVersion(), extractiveSummarizationResult.getStatistics() == null ? null : toBatchStatistics(extractiveSummarizationResult.getStatistics())); } /** * Transfer {@link HealthcareResult} into {@link AnalyzeHealthcareEntitiesResultCollection}. * * @param healthcareResult the service side raw data, HealthcareResult. * * @return the client side explored model, AnalyzeHealthcareEntitiesResultCollection. */ public static AnalyzeHealthcareEntitiesResultCollection toAnalyzeHealthcareEntitiesResultCollection( HealthcareResult healthcareResult) { List<AnalyzeHealthcareEntitiesResult> analyzeHealthcareEntitiesResults = new ArrayList<>(); healthcareResult.getDocuments().forEach( documentEntities -> { final AnalyzeHealthcareEntitiesResult analyzeHealthcareEntitiesResult = new AnalyzeHealthcareEntitiesResult( documentEntities.getId(), documentEntities.getStatistics() == null ? null : toTextDocumentStatistics(documentEntities.getStatistics()), null); final List<TextAnalyticsWarning> warnings = documentEntities.getWarnings().stream().map( textAnalyticsWarning -> new TextAnalyticsWarning( Optional.ofNullable(textAnalyticsWarning.getCode()) .map(warningCodeValue -> WarningCode.fromString(warningCodeValue.toString())) .orElse(null), textAnalyticsWarning.getMessage()) ).collect(Collectors.toList()); AnalyzeHealthcareEntitiesResultPropertiesHelper.setWarnings(analyzeHealthcareEntitiesResult, IterableStream.of(warnings)); final List<HealthcareEntity> healthcareEntities = documentEntities.getEntities().stream().map( entity -> { final HealthcareEntity healthcareEntity = new HealthcareEntity(); HealthcareEntityPropertiesHelper.setText(healthcareEntity, entity.getText()); HealthcareEntityPropertiesHelper.setNormalizedText(healthcareEntity, entity.getName()); if (entity.getCategory() != null) { HealthcareEntityPropertiesHelper.setCategory(healthcareEntity, HealthcareEntityCategory.fromString(entity.getCategory().toString())); } HealthcareEntityPropertiesHelper.setConfidenceScore(healthcareEntity, entity.getConfidenceScore()); HealthcareEntityPropertiesHelper.setOffset(healthcareEntity, entity.getOffset()); HealthcareEntityPropertiesHelper.setLength(healthcareEntity, entity.getLength()); final List<EntityDataSource> entityDataSources = Optional.ofNullable(entity.getLinks()).map( links -> links.stream().map( link -> { final EntityDataSource dataSource = new EntityDataSource(); EntityDataSourcePropertiesHelper.setName(dataSource, link.getDataSource()); EntityDataSourcePropertiesHelper.setEntityId(dataSource, link.getId()); return dataSource; } ).collect(Collectors.toList())) .orElse(new ArrayList<>()); HealthcareEntityPropertiesHelper.setDataSources(healthcareEntity, IterableStream.of(entityDataSources)); final HealthcareAssertion assertion = entity.getAssertion(); if (assertion != null) { HealthcareEntityPropertiesHelper.setAssertion(healthcareEntity, toHealthcareEntityAssertion(assertion)); } return healthcareEntity; }).collect(Collectors.toList()); AnalyzeHealthcareEntitiesResultPropertiesHelper.setEntities(analyzeHealthcareEntitiesResult, IterableStream.of(healthcareEntities)); final List<HealthcareEntityRelation> healthcareEntityRelations = documentEntities.getRelations().stream().map( healthcareRelation -> { final HealthcareEntityRelation entityRelation = new HealthcareEntityRelation(); final RelationType relationType = healthcareRelation.getRelationType(); if (relationType != null) { HealthcareEntityRelationPropertiesHelper.setRelationType(entityRelation, HealthcareEntityRelationType.fromString(relationType.toString())); } final List<HealthcareEntityRelationRole> relationRoles = healthcareRelation.getEntities().stream().map( relationEntity -> { final HealthcareEntityRelationRole relationRole = new HealthcareEntityRelationRole(); HealthcareEntityRelationRolePropertiesHelper.setName(relationRole, relationEntity.getRole()); HealthcareEntityRelationRolePropertiesHelper.setEntity(relationRole, healthcareEntities.get(getHealthcareEntityIndex(relationEntity.getRef()))); return relationRole; }).collect(Collectors.toList()); HealthcareEntityRelationPropertiesHelper.setRoles(entityRelation, IterableStream.of(relationRoles)); return entityRelation; }).collect(Collectors.toList()); AnalyzeHealthcareEntitiesResultPropertiesHelper.setEntityRelations(analyzeHealthcareEntitiesResult, IterableStream.of(healthcareEntityRelations)); analyzeHealthcareEntitiesResults.add(analyzeHealthcareEntitiesResult); }); healthcareResult.getErrors().forEach(documentError -> analyzeHealthcareEntitiesResults.add(new AnalyzeHealthcareEntitiesResult( documentError.getId(), null, toTextAnalyticsError(documentError.getError()))) ); return new AnalyzeHealthcareEntitiesResultCollection(IterableStream.of(analyzeHealthcareEntitiesResults)); } public static HealthcareEntityAssertion toHealthcareEntityAssertion(HealthcareAssertion healthcareAssertion) { final Association association = healthcareAssertion.getAssociation(); final Certainty certainty = healthcareAssertion.getCertainty(); final Conditionality conditionality = healthcareAssertion.getConditionality(); final HealthcareEntityAssertion entityAssertion = new HealthcareEntityAssertion(); if (association != null) { HealthcareEntityAssertionPropertiesHelper.setAssociation(entityAssertion, EntityAssociation.fromString(association.toString())); } if (certainty != null) { HealthcareEntityAssertionPropertiesHelper.setCertainty(entityAssertion, toCertainty(certainty)); } if (conditionality != null) { HealthcareEntityAssertionPropertiesHelper.setConditionality(entityAssertion, toConditionality(conditionality)); } return entityAssertion; } private static EntityCertainty toCertainty(Certainty certainty) { EntityCertainty entityCertainty1 = null; switch (certainty) { case POSITIVE: entityCertainty1 = EntityCertainty.POSITIVE; break; case POSITIVE_POSSIBLE: entityCertainty1 = EntityCertainty.POSITIVE_POSSIBLE; break; case NEUTRAL_POSSIBLE: entityCertainty1 = EntityCertainty.NEUTRAL_POSSIBLE; break; case NEGATIVE_POSSIBLE: entityCertainty1 = EntityCertainty.NEGATIVE_POSSIBLE; break; case NEGATIVE: entityCertainty1 = EntityCertainty.NEGATIVE; break; default: break; } return entityCertainty1; } private static EntityConditionality toConditionality(Conditionality conditionality) { EntityConditionality conditionality1 = null; switch (conditionality) { case HYPOTHETICAL: conditionality1 = EntityConditionality.HYPOTHETICAL; break; case CONDITIONAL: conditionality1 = EntityConditionality.CONDITIONAL; break; default: break; } return conditionality1; } /** * Helper function that parse healthcare entity index from the given entity reference string. * The entity reference format is " * * @param entityReference the given healthcare entity reference string. * * @return the healthcare entity index. */ private static Integer getHealthcareEntityIndex(String entityReference) { if (!CoreUtils.isNullOrEmpty(entityReference)) { int lastIndex = entityReference.lastIndexOf('/'); if (lastIndex != -1) { return Integer.parseInt(entityReference.substring(lastIndex + 1)); } } throw LOGGER.logExceptionAsError( new RuntimeException("Failed to parse healthcare entity index from: " + entityReference)); } /** * Get the non-null {@link Context}. The default value is {@link Context * * @param context It offers a means of passing arbitrary data (key-value pairs) to pipeline policies. * Most applications do not need to pass arbitrary data to the pipeline and can pass Context.NONE or null. * * @return The Context. */ public static Context getNotNullContext(Context context) { return context == null ? Context.NONE : context; } /** * Helper function which retrieves the size of an {@link Iterable}. * * @param documents The iterable of documents. * @return Count of documents in the iterable. */ public static int getDocumentCount(Iterable<?> documents) { if (documents instanceof Collection) { return ((Collection<?>) documents).size(); } else { final int[] count = new int[] { 0 }; documents.forEach(ignored -> count[0] += 1); return count[0]; } } /** * Helper function which convert the {@code Iterable<PiiEntityCategory>} to {@code List<PiiCategory>}. * * @param categoriesFilter the iterable of {@link PiiEntityCategory}. * @return the list of {@link PiiCategory}. */ public static List<PiiCategory> toCategoriesFilter(Iterable<PiiEntityCategory> categoriesFilter) { if (categoriesFilter == null) { return null; } final List<PiiCategory> piiCategories = new ArrayList<>(); categoriesFilter.forEach(category -> piiCategories.add(PiiCategory.fromString(category.toString()))); return piiCategories; } /** * Helper method to convert the service response of {@link DocumentSentiment} to {@link AnalyzeSentimentResult}. * * @param documentSentiment The {@link SentimentResponseDocumentsItem} returned by the service. * @param documentSentimentList The document sentiment list returned by the service. * * @return The {@link AnalyzeSentimentResult} to be returned by the SDK. */ private static AnalyzeSentimentResult toAnalyzeSentimentResult(SentimentResponseDocumentsItem documentSentiment, List<SentimentResponseDocumentsItem> documentSentimentList) { final SentimentConfidenceScorePerLabel confidenceScorePerLabel = documentSentiment.getConfidenceScores(); final List<SentenceSentiment> sentenceSentiments = documentSentiment.getSentences().stream() .map(sentenceSentiment -> { final SentimentConfidenceScorePerLabel confidenceScorePerSentence = sentenceSentiment.getConfidenceScores(); final SentenceSentimentValue sentenceSentimentValue = sentenceSentiment.getSentiment(); final SentenceSentiment sentenceSentiment1 = new SentenceSentiment(sentenceSentiment.getText(), TextSentiment.fromString(sentenceSentimentValue == null ? null : sentenceSentimentValue.toString()), new SentimentConfidenceScores(confidenceScorePerSentence.getNegative(), confidenceScorePerSentence.getNeutral(), confidenceScorePerSentence.getPositive())); SentenceSentimentPropertiesHelper.setOpinions(sentenceSentiment1, toSentenceOpinionList(sentenceSentiment, documentSentimentList)); SentenceSentimentPropertiesHelper.setOffset(sentenceSentiment1, sentenceSentiment.getOffset()); SentenceSentimentPropertiesHelper.setLength(sentenceSentiment1, sentenceSentiment.getLength()); return sentenceSentiment1; }).collect(Collectors.toList()); final List<TextAnalyticsWarning> warnings = documentSentiment.getWarnings().stream().map( warning -> toTextAnalyticsWarning(warning)).collect(Collectors.toList()); final DocumentSentimentValue documentSentimentValue = documentSentiment.getSentiment(); return new AnalyzeSentimentResult( documentSentiment.getId(), documentSentiment.getStatistics() == null ? null : toTextDocumentStatistics(documentSentiment.getStatistics()), null, new com.azure.ai.textanalytics.models.DocumentSentiment( TextSentiment.fromString(documentSentimentValue == null ? null : documentSentimentValue.toString()), new SentimentConfidenceScores( confidenceScorePerLabel.getNegative(), confidenceScorePerLabel.getNeutral(), confidenceScorePerLabel.getPositive()), new IterableStream<>(sentenceSentiments), new IterableStream<>(warnings) )); } /* * Transform SentenceSentiment's opinion mining to output that user can use. */ private static IterableStream<SentenceOpinion> toSentenceOpinionList( com.azure.ai.textanalytics.implementation.models.SentenceSentiment sentenceSentiment, List<SentimentResponseDocumentsItem> documentSentimentList) { final List<SentenceTarget> sentenceTargets = sentenceSentiment.getTargets(); if (sentenceTargets == null) { return null; } final List<SentenceOpinion> sentenceOpinions = new ArrayList<>(); sentenceTargets.forEach(sentenceTarget -> { final List<AssessmentSentiment> assessmentSentiments = new ArrayList<>(); sentenceTarget.getRelations().forEach(targetRelation -> { final TargetRelationType targetRelationType = targetRelation.getRelationType(); final String opinionPointer = targetRelation.getRef(); if (TargetRelationType.ASSESSMENT == targetRelationType) { assessmentSentiments.add(toAssessmentSentiment( findSentimentAssessment(opinionPointer, documentSentimentList))); } }); final TargetSentiment targetSentiment = new TargetSentiment(); TargetSentimentPropertiesHelper.setText(targetSentiment, sentenceTarget.getText()); TargetSentimentPropertiesHelper.setSentiment(targetSentiment, TextSentiment.fromString(sentenceTarget.getSentiment().toString())); TargetSentimentPropertiesHelper.setConfidenceScores(targetSentiment, toSentimentConfidenceScores(sentenceTarget.getConfidenceScores())); TargetSentimentPropertiesHelper.setOffset(targetSentiment, sentenceTarget.getOffset()); TargetSentimentPropertiesHelper.setLength(targetSentiment, sentenceTarget.getLength()); final SentenceOpinion sentenceOpinion = new SentenceOpinion(); SentenceOpinionPropertiesHelper.setTarget(sentenceOpinion, targetSentiment); SentenceOpinionPropertiesHelper.setAssessments(sentenceOpinion, new IterableStream<>(assessmentSentiments)); sentenceOpinions.add(sentenceOpinion); }); return new IterableStream<>(sentenceOpinions); } /* * Transform type TargetConfidenceScoreLabel to SentimentConfidenceScores. */ private static SentimentConfidenceScores toSentimentConfidenceScores( TargetConfidenceScoreLabel targetConfidenceScoreLabel) { return new SentimentConfidenceScores(targetConfidenceScoreLabel.getNegative(), NEUTRAL_SCORE_ZERO, targetConfidenceScoreLabel.getPositive()); } /* * Transform type SentenceOpinion to OpinionSentiment. */ private static AssessmentSentiment toAssessmentSentiment(SentenceAssessment sentenceAssessment) { final AssessmentSentiment assessmentSentiment = new AssessmentSentiment(); AssessmentSentimentPropertiesHelper.setText(assessmentSentiment, sentenceAssessment.getText()); AssessmentSentimentPropertiesHelper.setSentiment(assessmentSentiment, TextSentiment.fromString(sentenceAssessment.getSentiment().toString())); AssessmentSentimentPropertiesHelper.setConfidenceScores(assessmentSentiment, toSentimentConfidenceScores(sentenceAssessment.getConfidenceScores())); AssessmentSentimentPropertiesHelper.setNegated(assessmentSentiment, sentenceAssessment.isNegated()); AssessmentSentimentPropertiesHelper.setOffset(assessmentSentiment, sentenceAssessment.getOffset()); AssessmentSentimentPropertiesHelper.setLength(assessmentSentiment, sentenceAssessment.getLength()); return assessmentSentiment; } private static ExtractSummaryResult toExtractSummaryResult( ExtractiveSummarizationResultDocumentsItem documentSummary) { final List<ExtractedSummarySentence> sentences = documentSummary.getSentences(); final List<SummarySentence> summarySentences = sentences.stream().map(sentence -> { final SummarySentence summarySentence = new SummarySentence(); SummarySentencePropertiesHelper.setText(summarySentence, sentence.getText()); SummarySentencePropertiesHelper.setRankScore(summarySentence, sentence.getRankScore()); SummarySentencePropertiesHelper.setLength(summarySentence, sentence.getLength()); SummarySentencePropertiesHelper.setOffset(summarySentence, sentence.getOffset()); return summarySentence; }).collect(Collectors.toList()); final List<TextAnalyticsWarning> warnings = documentSummary.getWarnings().stream().map( warning -> toTextAnalyticsWarning(warning)).collect(Collectors.toList()); final SummarySentenceCollection summarySentenceCollection = new SummarySentenceCollection( new IterableStream<>(summarySentences), new IterableStream<>(warnings) ); final ExtractSummaryResult extractSummaryResult = new ExtractSummaryResult(documentSummary.getId(), documentSummary.getStatistics() == null ? null : toTextDocumentStatistics(documentSummary.getStatistics()), null ); ExtractSummaryResultPropertiesHelper.setSentences(extractSummaryResult, summarySentenceCollection); return extractSummaryResult; } /** * Helper method to convert {@link CustomEntitiesResult} to {@link RecognizeCustomEntitiesResultCollection}. * * @param customEntitiesResult The {@link CustomEntitiesResult}. * * @return A {@link RecognizeCustomEntitiesResultCollection}. */ public static RecognizeCustomEntitiesResultCollection toRecognizeCustomEntitiesResultCollection( CustomEntitiesResult customEntitiesResult) { final List<RecognizeEntitiesResult> recognizeEntitiesResults = new ArrayList<>(); final List<CustomEntitiesResultDocumentsItem> customEntitiesResultDocuments = customEntitiesResult.getDocuments(); for (CustomEntitiesResultDocumentsItem documentSummary : customEntitiesResultDocuments) { recognizeEntitiesResults.add(toRecognizeEntitiesResult(documentSummary)); } for (DocumentError documentError : customEntitiesResult.getErrors()) { recognizeEntitiesResults.add(new RecognizeEntitiesResult(documentError.getId(), null, toTextAnalyticsError(documentError.getError()), null)); } final RecognizeCustomEntitiesResultCollection resultCollection = new RecognizeCustomEntitiesResultCollection(recognizeEntitiesResults); RecognizeCustomEntitiesResultCollectionPropertiesHelper.setProjectName(resultCollection, customEntitiesResult.getProjectName()); RecognizeCustomEntitiesResultCollectionPropertiesHelper.setDeploymentName(resultCollection, customEntitiesResult.getDeploymentName()); if (customEntitiesResult.getStatistics() != null) { RecognizeCustomEntitiesResultCollectionPropertiesHelper.setStatistics(resultCollection, toBatchStatistics(customEntitiesResult.getStatistics())); } return resultCollection; } /** * Helper method to convert {@link CustomSingleClassificationResult} to * {@link SingleCategoryClassifyResultCollection}. * * @param customSingleClassificationResult The {@link CustomSingleClassificationResult}. * * @return A {@link SingleCategoryClassifyResultCollection}. */ public static SingleCategoryClassifyResultCollection toSingleCategoryClassifyResultCollection( CustomSingleLabelClassificationResult customSingleClassificationResult) { final List<SingleCategoryClassifyResult> singleCategoryClassifyResults = new ArrayList<>(); final List<CustomSingleLabelClassificationResultDocumentsItem> singleClassificationDocuments = customSingleClassificationResult.getDocuments(); for (CustomSingleLabelClassificationResultDocumentsItem documentSummary : singleClassificationDocuments) { singleCategoryClassifyResults.add(toSingleCategoryClassifyResult(documentSummary)); } for (DocumentError documentError : customSingleClassificationResult.getErrors()) { singleCategoryClassifyResults.add(new SingleCategoryClassifyResult(documentError.getId(), null, toTextAnalyticsError(documentError.getError()))); } final SingleCategoryClassifyResultCollection resultCollection = new SingleCategoryClassifyResultCollection(singleCategoryClassifyResults); SingleCategoryClassifyResultCollectionPropertiesHelper.setProjectName(resultCollection, customSingleClassificationResult.getProjectName()); SingleCategoryClassifyResultCollectionPropertiesHelper.setDeploymentName(resultCollection, customSingleClassificationResult.getDeploymentName()); if (customSingleClassificationResult.getStatistics() != null) { SingleCategoryClassifyResultCollectionPropertiesHelper.setStatistics(resultCollection, toBatchStatistics(customSingleClassificationResult.getStatistics())); } return resultCollection; } private static SingleCategoryClassifyResult toSingleCategoryClassifyResult( CustomSingleLabelClassificationResultDocumentsItem singleClassificationDocument) { final ClassificationResult classificationResult = singleClassificationDocument.getClassProperty(); final List<TextAnalyticsWarning> warnings = singleClassificationDocument.getWarnings().stream().map( warning -> toTextAnalyticsWarning(warning)).collect(Collectors.toList()); final SingleCategoryClassifyResult singleCategoryClassifyResult = new SingleCategoryClassifyResult( singleClassificationDocument.getId(), singleClassificationDocument.getStatistics() == null ? null : toTextDocumentStatistics(singleClassificationDocument.getStatistics()), null); SingleCategoryClassifyResultPropertiesHelper.setClassification(singleCategoryClassifyResult, toDocumentClassification(classificationResult)); SingleCategoryClassifyResultPropertiesHelper.setWarnings(singleCategoryClassifyResult, new IterableStream<>(warnings)); return singleCategoryClassifyResult; } private static ClassificationCategory toDocumentClassification(ClassificationResult classificationResult) { final ClassificationCategory classificationCategory = new ClassificationCategory(); ClassificationCategoryPropertiesHelper.setCategory(classificationCategory, classificationResult.getCategory()); ClassificationCategoryPropertiesHelper.setConfidenceScore(classificationCategory, classificationResult.getConfidenceScore()); return classificationCategory; } /** * Helper method to convert {@link CustomMultiClassificationResult} to * {@link MultiCategoryClassifyResultCollection}. * * @param customMultiClassificationResult The {@link CustomMultiClassificationResult}. * * @return A {@link SingleCategoryClassifyResultCollection}. */ public static MultiCategoryClassifyResultCollection toMultiCategoryClassifyResultCollection( CustomMultiLabelClassificationResult customMultiClassificationResult) { final List<MultiCategoryClassifyResult> multiCategoryClassifyResults = new ArrayList<>(); final List<CustomMultiLabelClassificationResultDocumentsItem> multiClassificationDocuments = customMultiClassificationResult.getDocuments(); for (CustomMultiLabelClassificationResultDocumentsItem multiClassificationDocument : multiClassificationDocuments) { multiCategoryClassifyResults.add(toMultiCategoryClassifyResult(multiClassificationDocument)); } for (DocumentError documentError : customMultiClassificationResult.getErrors()) { multiCategoryClassifyResults.add(new MultiCategoryClassifyResult(documentError.getId(), null, toTextAnalyticsError(documentError.getError()))); } final MultiCategoryClassifyResultCollection resultCollection = new MultiCategoryClassifyResultCollection(multiCategoryClassifyResults); MultiCategoryClassifyResultCollectionPropertiesHelper.setProjectName(resultCollection, customMultiClassificationResult.getProjectName()); MultiCategoryClassifyResultCollectionPropertiesHelper.setDeploymentName(resultCollection, customMultiClassificationResult.getDeploymentName()); if (customMultiClassificationResult.getStatistics() != null) { MultiCategoryClassifyResultCollectionPropertiesHelper.setStatistics(resultCollection, toBatchStatistics(customMultiClassificationResult.getStatistics())); } return resultCollection; } private static MultiCategoryClassifyResult toMultiCategoryClassifyResult( CustomMultiLabelClassificationResultDocumentsItem multiClassificationDocument) { final List<ClassificationCategory> classificationCategories = multiClassificationDocument .getClassProperty() .stream() .map(classificationResult -> toDocumentClassification(classificationResult)) .collect(Collectors.toList()); final List<TextAnalyticsWarning> warnings = multiClassificationDocument.getWarnings().stream().map( warning -> toTextAnalyticsWarning(warning)).collect(Collectors.toList()); final MultiCategoryClassifyResult classifySingleCategoryResult = new MultiCategoryClassifyResult( multiClassificationDocument.getId(), multiClassificationDocument.getStatistics() == null ? null : toTextDocumentStatistics(multiClassificationDocument.getStatistics()), null); final ClassificationCategoryCollection classifications = new ClassificationCategoryCollection( new IterableStream<>(classificationCategories)); ClassificationCategoryCollectionPropertiesHelper.setWarnings(classifications, new IterableStream<>(warnings)); MultiCategoryClassifyResultPropertiesHelper.setClassifications(classifySingleCategoryResult, classifications); return classifySingleCategoryResult; } /* * Parses the reference pointer to an index array that contains document, sentence, and opinion indexes. */ public static int[] parseRefPointerToIndexArray(String assessmentPointer) { final Matcher matcher = PATTERN.matcher(assessmentPointer); final boolean isMatched = matcher.find(); final int[] result = new int[3]; if (isMatched) { result[0] = Integer.parseInt(matcher.group(1)); result[1] = Integer.parseInt(matcher.group(2)); result[2] = Integer.parseInt(matcher.group(3)); } else { throw LOGGER.logExceptionAsError(new IllegalStateException( String.format("'%s' is not a valid assessment pointer.", assessmentPointer))); } return result; } /* * Find the specific sentence assessment in the document sentiment list by given the assessment reference pointer. */ public static SentenceAssessment findSentimentAssessment(String assessmentPointer, List<SentimentResponseDocumentsItem> documentSentiments) { final int[] assessmentIndexes = parseRefPointerToIndexArray(assessmentPointer); final int documentIndex = assessmentIndexes[0]; final int sentenceIndex = assessmentIndexes[1]; final int assessmentIndex = assessmentIndexes[2]; if (documentIndex >= documentSentiments.size()) { throw LOGGER.logExceptionAsError(new IllegalStateException( String.format("Invalid document index '%s' in '%s'.", documentIndex, assessmentPointer))); } final SentimentResponseDocumentsItem documentsentiment = documentSentiments.get(documentIndex); final List<com.azure.ai.textanalytics.implementation.models.SentenceSentiment> sentenceSentiments = documentsentiment.getSentences(); if (sentenceIndex >= sentenceSentiments.size()) { throw LOGGER.logExceptionAsError(new IllegalStateException( String.format("Invalid sentence index '%s' in '%s'.", sentenceIndex, assessmentPointer))); } final List<SentenceAssessment> assessments = sentenceSentiments.get(sentenceIndex).getAssessments(); if (assessmentIndex >= assessments.size()) { throw LOGGER.logExceptionAsError(new IllegalStateException( String.format("Invalid assessment index '%s' in '%s'.", assessmentIndex, assessmentPointer))); } return assessments.get(assessmentIndex); } }
We should also make a note of the service version default in readme.
private boolean isConsolidatedServiceVersion(TextAnalyticsServiceVersion serviceVersion) { if (serviceVersion == null) { serviceVersion = TextAnalyticsServiceVersion.V2022_04_01_PREVIEW; } return !(TextAnalyticsServiceVersion.V3_0 == serviceVersion || TextAnalyticsServiceVersion.V3_1 == serviceVersion); }
serviceVersion = TextAnalyticsServiceVersion.V2022_04_01_PREVIEW;
private boolean isConsolidatedServiceVersion(TextAnalyticsServiceVersion serviceVersion) { if (serviceVersion == null) { serviceVersion = TextAnalyticsServiceVersion.V2022_04_01_PREVIEW; } return !(TextAnalyticsServiceVersion.V3_0 == serviceVersion || TextAnalyticsServiceVersion.V3_1 == serviceVersion); }
class that implements this HttpTrait * interface. * * <p><strong>Note:</strong> It is important to understand the precedence order of the HttpTrait APIs. In * particular, if a {@link HttpPipeline}
class that implements this HttpTrait * interface. * * <p><strong>Note:</strong> It is important to understand the precedence order of the HttpTrait APIs. In * particular, if a {@link HttpPipeline}
Is `setModelVersion()` no longer supported?
public static void main(String[] args) { TextAnalyticsAsyncClient client = new TextAnalyticsClientBuilder() .credential(new AzureKeyCredential("{key}")) .endpoint("{endpoint}") .buildAsyncClient(); List<TextDocumentInput> documents = new ArrayList<>(); for (int i = 0; i < 21; i++) { documents.add(new TextDocumentInput(Integer.toString(i), "We went to Contoso Steakhouse located at midtown NYC last week for a dinner party, and we adore" + " the spot! They provide marvelous food and they have a great menu. The chief cook happens to be" + " the owner (I think his name is John Doe) and he is super nice, coming out of the kitchen and " + "greeted us all. We enjoyed very much dining in the place! The Sirloin steak I ordered was tender" + " and juicy, and the place was impeccably clean. You can even pre-order from their online menu at" + " www.contososteakhouse.com, call 312-555-0176 or send email to order@contososteakhouse.com! The" + " only complaint I have is the food didn't come fast enough. Overall I highly recommend it!" )); } client.beginAnalyzeActions(documents, new TextAnalyticsActions() .setDisplayName("{tasks_display_name}") .setExtractKeyPhrasesActions(new ExtractKeyPhrasesAction()), new AnalyzeActionsOptions()) .flatMap(result -> { AnalyzeActionsOperationDetail operationDetail = result.getValue(); System.out.printf("Action display name: %s, Successfully completed actions: %d, in-process actions: %d," + " failed actions: %d, total actions: %d%n", operationDetail.getDisplayName(), operationDetail.getSucceededCount(), operationDetail.getInProgressCount(), operationDetail.getFailedCount(), operationDetail.getTotalCount()); return result.getFinalResult(); }) .flatMap(analyzeActionsResultPagedFlux -> analyzeActionsResultPagedFlux.byPage()) .subscribe( perPage -> processAnalyzeActionsResult(perPage), ex -> System.out.println("Error listing pages: " + ex.getMessage()), () -> System.out.println("Successfully listed all pages")); try { TimeUnit.MINUTES.sleep(5); } catch (InterruptedException e) { e.printStackTrace(); } }
.setExtractKeyPhrasesActions(new ExtractKeyPhrasesAction()),
public static void main(String[] args) { TextAnalyticsAsyncClient client = new TextAnalyticsClientBuilder() .credential(new AzureKeyCredential("{key}")) .endpoint("{endpoint}") .buildAsyncClient(); List<TextDocumentInput> documents = new ArrayList<>(); for (int i = 0; i < 21; i++) { documents.add(new TextDocumentInput(Integer.toString(i), "We went to Contoso Steakhouse located at midtown NYC last week for a dinner party, and we adore" + " the spot! They provide marvelous food and they have a great menu. The chief cook happens to be" + " the owner (I think his name is John Doe) and he is super nice, coming out of the kitchen and " + "greeted us all. We enjoyed very much dining in the place! The Sirloin steak I ordered was tender" + " and juicy, and the place was impeccably clean. You can even pre-order from their online menu at" + " www.contososteakhouse.com, call 312-555-0176 or send email to order@contososteakhouse.com! The" + " only complaint I have is the food didn't come fast enough. Overall I highly recommend it!" )); } client.beginAnalyzeActions(documents, new TextAnalyticsActions() .setDisplayName("{tasks_display_name}") .setRecognizeEntitiesActions(new RecognizeEntitiesAction()) .setExtractKeyPhrasesActions(new ExtractKeyPhrasesAction()), new AnalyzeActionsOptions()) .flatMap(result -> { AnalyzeActionsOperationDetail operationDetail = result.getValue(); System.out.printf("Action display name: %s, Successfully completed actions: %d, in-process actions: %d," + " failed actions: %d, total actions: %d%n", operationDetail.getDisplayName(), operationDetail.getSucceededCount(), operationDetail.getInProgressCount(), operationDetail.getFailedCount(), operationDetail.getTotalCount()); return result.getFinalResult(); }) .flatMap(analyzeActionsResultPagedFlux -> analyzeActionsResultPagedFlux.byPage()) .subscribe( perPage -> processAnalyzeActionsResult(perPage), ex -> System.out.println("Error listing pages: " + ex.getMessage()), () -> System.out.println("Successfully listed all pages")); try { TimeUnit.MINUTES.sleep(5); } catch (InterruptedException e) { e.printStackTrace(); } }
class AnalyzeActionsAsync { /** * Main method to invoke this demo about how to analyze a batch of tasks. * * @param args Unused arguments to the program. */ private static void processAnalyzeActionsResult(PagedResponse<AnalyzeActionsResult> perPage) { System.out.printf("Response code: %d, Continuation Token: %s.%n", perPage.getStatusCode(), perPage.getContinuationToken()); for (AnalyzeActionsResult actionsResult : perPage.getElements()) { System.out.println("Entities recognition action results:"); for (RecognizeEntitiesActionResult actionResult : actionsResult.getRecognizeEntitiesResults()) { if (!actionResult.isError()) { for (RecognizeEntitiesResult documentResult : actionResult.getDocumentsResults()) { if (!documentResult.isError()) { for (CategorizedEntity entity : documentResult.getEntities()) { System.out.printf("\tText: %s, category: %s, confidence score: %f.%n", entity.getText(), entity.getCategory(), entity.getConfidenceScore()); } } else { System.out.printf("\tCannot recognize entities. Error: %s%n", documentResult.getError().getMessage()); } } } else { System.out.printf("\tCannot execute Entities Recognition action. Error: %s%n", actionResult.getError().getMessage()); } } System.out.println("Key phrases extraction action results:"); for (ExtractKeyPhrasesActionResult actionResult : actionsResult.getExtractKeyPhrasesResults()) { if (!actionResult.isError()) { for (ExtractKeyPhraseResult documentResult : actionResult.getDocumentsResults()) { if (!documentResult.isError()) { System.out.println("\tExtracted phrases:"); for (String keyPhrases : documentResult.getKeyPhrases()) { System.out.printf("\t\t%s.%n", keyPhrases); } } else { System.out.printf("\tCannot extract key phrases. Error: %s%n", documentResult.getError().getMessage()); } } } else { System.out.printf("\tCannot execute Key Phrases Extraction action. Error: %s%n", actionResult.getError().getMessage()); } } } } }
class AnalyzeActionsAsync { /** * Main method to invoke this demo about how to analyze a batch of tasks. * * @param args Unused arguments to the program. */ private static void processAnalyzeActionsResult(PagedResponse<AnalyzeActionsResult> perPage) { System.out.printf("Response code: %d, Continuation Token: %s.%n", perPage.getStatusCode(), perPage.getContinuationToken()); for (AnalyzeActionsResult actionsResult : perPage.getElements()) { System.out.println("Entities recognition action results:"); for (RecognizeEntitiesActionResult actionResult : actionsResult.getRecognizeEntitiesResults()) { if (!actionResult.isError()) { for (RecognizeEntitiesResult documentResult : actionResult.getDocumentsResults()) { if (!documentResult.isError()) { for (CategorizedEntity entity : documentResult.getEntities()) { System.out.printf("\tText: %s, category: %s, confidence score: %f.%n", entity.getText(), entity.getCategory(), entity.getConfidenceScore()); } } else { System.out.printf("\tCannot recognize entities. Error: %s%n", documentResult.getError().getMessage()); } } } else { System.out.printf("\tCannot execute Entities Recognition action. Error: %s%n", actionResult.getError().getMessage()); } } System.out.println("Key phrases extraction action results:"); for (ExtractKeyPhrasesActionResult actionResult : actionsResult.getExtractKeyPhrasesResults()) { if (!actionResult.isError()) { for (ExtractKeyPhraseResult documentResult : actionResult.getDocumentsResults()) { if (!documentResult.isError()) { System.out.println("\tExtracted phrases:"); for (String keyPhrases : documentResult.getKeyPhrases()) { System.out.printf("\t\t%s.%n", keyPhrases); } } else { System.out.printf("\tCannot extract key phrases. Error: %s%n", documentResult.getError().getMessage()); } } } else { System.out.printf("\tCannot execute Key Phrases Extraction action. Error: %s%n", actionResult.getError().getMessage()); } } } } }
Should we check if this is Null to avoid NPE?
private AnalyzeActionsResult toAnalyzeActionsResultLanguageApi(AnalyzeTextJobState analyzeJobState) { final TasksStateTasks tasksStateTasks = analyzeJobState.getTasks(); final List<AnalyzeTextLROResult> tasksResults = tasksStateTasks.getItems(); final List<RecognizeEntitiesActionResult> recognizeEntitiesActionResults = new ArrayList<>(); final List<RecognizePiiEntitiesActionResult> recognizePiiEntitiesActionResults = new ArrayList<>(); final List<ExtractKeyPhrasesActionResult> extractKeyPhrasesActionResults = new ArrayList<>(); final List<RecognizeLinkedEntitiesActionResult> recognizeLinkedEntitiesActionResults = new ArrayList<>(); final List<AnalyzeHealthcareEntitiesActionResult> analyzeHealthcareEntitiesActionResults = new ArrayList<>(); final List<AnalyzeSentimentActionResult> analyzeSentimentActionResults = new ArrayList<>(); final List<ExtractSummaryActionResult> extractSummaryActionResults = new ArrayList<>(); final List<RecognizeCustomEntitiesActionResult> recognizeCustomEntitiesActionResults = new ArrayList<>(); final List<SingleCategoryClassifyActionResult> singleCategoryClassifyActionResults = new ArrayList<>(); final List<MultiCategoryClassifyActionResult> multiCategoryClassifyActionResults = new ArrayList<>(); if (!CoreUtils.isNullOrEmpty(tasksResults)) { for (int i = 0; i < tasksResults.size(); i++) { final AnalyzeTextLROResult taskResult = tasksResults.get(i); if (taskResult instanceof EntityRecognitionLROResult) { final EntityRecognitionLROResult entityTaskResult = (EntityRecognitionLROResult) taskResult; final RecognizeEntitiesActionResult actionResult = new RecognizeEntitiesActionResult(); final EntitiesResult results = entityTaskResult.getResults(); if (results != null) { RecognizeEntitiesActionResultPropertiesHelper.setDocumentsResults(actionResult, toRecognizeEntitiesResultCollectionResponse(results)); } TextAnalyticsActionResultPropertiesHelper.setActionName(actionResult, entityTaskResult.getTaskName()); TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult, entityTaskResult.getLastUpdateDateTime()); recognizeEntitiesActionResults.add(actionResult); } else if (taskResult instanceof CustomEntityRecognitionLROResult) { final CustomEntityRecognitionLROResult customEntityTaskResult = (CustomEntityRecognitionLROResult) taskResult; final RecognizeCustomEntitiesActionResult actionResult = new RecognizeCustomEntitiesActionResult(); final CustomEntitiesResult results = customEntityTaskResult.getResults(); if (results != null) { RecognizeCustomEntitiesActionResultPropertiesHelper.setDocumentsResults(actionResult, toRecognizeCustomEntitiesResultCollection(results)); } TextAnalyticsActionResultPropertiesHelper.setActionName(actionResult, customEntityTaskResult.getTaskName()); TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult, customEntityTaskResult.getLastUpdateDateTime()); recognizeCustomEntitiesActionResults.add(actionResult); } else if (taskResult instanceof CustomSingleLabelClassificationLROResult) { final CustomSingleLabelClassificationLROResult customSingleLabelClassificationResult = (CustomSingleLabelClassificationLROResult) taskResult; final SingleCategoryClassifyActionResult actionResult = new SingleCategoryClassifyActionResult(); final CustomSingleLabelClassificationResult results = customSingleLabelClassificationResult.getResults(); if (results != null) { SingleCategoryClassifyActionResultPropertiesHelper.setDocumentsResults(actionResult, toSingleCategoryClassifyResultCollection(results)); } TextAnalyticsActionResultPropertiesHelper.setActionName(actionResult, customSingleLabelClassificationResult.getTaskName()); TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult, customSingleLabelClassificationResult.getLastUpdateDateTime()); singleCategoryClassifyActionResults.add(actionResult); } else if (taskResult instanceof CustomMultiLabelClassificationLROResult) { final CustomMultiLabelClassificationLROResult customMultiLabelClassificationLROResult = (CustomMultiLabelClassificationLROResult) taskResult; final MultiCategoryClassifyActionResult actionResult = new MultiCategoryClassifyActionResult(); final CustomMultiLabelClassificationResult results = customMultiLabelClassificationLROResult.getResults(); if (results != null) { MultiCategoryClassifyActionResultPropertiesHelper.setDocumentsResults(actionResult, toMultiCategoryClassifyResultCollection(results)); } TextAnalyticsActionResultPropertiesHelper.setActionName(actionResult, customMultiLabelClassificationLROResult.getTaskName()); TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult, customMultiLabelClassificationLROResult.getLastUpdateDateTime()); multiCategoryClassifyActionResults.add(actionResult); } else if (taskResult instanceof EntityLinkingLROResult) { final EntityLinkingLROResult entityLinkingLROResult = (EntityLinkingLROResult) taskResult; final RecognizeLinkedEntitiesActionResult actionResult = new RecognizeLinkedEntitiesActionResult(); final EntityLinkingResult results = entityLinkingLROResult.getResults(); if (results != null) { RecognizeLinkedEntitiesActionResultPropertiesHelper.setDocumentsResults(actionResult, toRecognizeLinkedEntitiesResultCollection(results)); } TextAnalyticsActionResultPropertiesHelper.setActionName(actionResult, entityLinkingLROResult.getTaskName()); TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult, entityLinkingLROResult.getLastUpdateDateTime()); recognizeLinkedEntitiesActionResults.add(actionResult); } else if (taskResult instanceof PiiEntityRecognitionLROResult) { final PiiEntityRecognitionLROResult piiEntityRecognitionLROResult = (PiiEntityRecognitionLROResult) taskResult; final RecognizePiiEntitiesActionResult actionResult = new RecognizePiiEntitiesActionResult(); final PiiResult results = piiEntityRecognitionLROResult.getResults(); if (results != null) { RecognizePiiEntitiesActionResultPropertiesHelper.setDocumentsResults(actionResult, toRecognizePiiEntitiesResultCollection(results)); } TextAnalyticsActionResultPropertiesHelper.setActionName(actionResult, piiEntityRecognitionLROResult.getTaskName()); TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult, piiEntityRecognitionLROResult.getLastUpdateDateTime()); recognizePiiEntitiesActionResults.add(actionResult); } else if (taskResult instanceof ExtractiveSummarizationLROResult) { final ExtractiveSummarizationLROResult extractiveSummarizationLROResult = (ExtractiveSummarizationLROResult) taskResult; final ExtractSummaryActionResult actionResult = new ExtractSummaryActionResult(); final ExtractiveSummarizationResult results = extractiveSummarizationLROResult.getResults(); if (results != null) { ExtractSummaryActionResultPropertiesHelper.setDocumentsResults(actionResult, toExtractSummaryResultCollection(results)); } TextAnalyticsActionResultPropertiesHelper.setActionName(actionResult, extractiveSummarizationLROResult.getTaskName()); TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult, extractiveSummarizationLROResult.getLastUpdateDateTime()); extractSummaryActionResults.add(actionResult); } else if (taskResult instanceof HealthcareLROResult) { final HealthcareLROResult healthcareLROResult = (HealthcareLROResult) taskResult; final AnalyzeHealthcareEntitiesActionResult actionResult = new AnalyzeHealthcareEntitiesActionResult(); final HealthcareResult results = healthcareLROResult.getResults(); if (results != null) { AnalyzeHealthcareEntitiesActionResultPropertiesHelper.setDocumentsResults(actionResult, toAnalyzeHealthcareEntitiesResultCollection(results)); } TextAnalyticsActionResultPropertiesHelper.setActionName(actionResult, healthcareLROResult.getTaskName()); TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult, healthcareLROResult.getLastUpdateDateTime()); analyzeHealthcareEntitiesActionResults.add(actionResult); } else if (taskResult instanceof SentimentLROResult) { final SentimentLROResult sentimentLROResult = (SentimentLROResult) taskResult; final AnalyzeSentimentActionResult actionResult = new AnalyzeSentimentActionResult(); final SentimentResponse results = sentimentLROResult.getResults(); if (results != null) { AnalyzeSentimentActionResultPropertiesHelper.setDocumentsResults(actionResult, toAnalyzeSentimentResultCollection(results)); } TextAnalyticsActionResultPropertiesHelper.setActionName(actionResult, sentimentLROResult.getTaskName()); TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult, sentimentLROResult.getLastUpdateDateTime()); analyzeSentimentActionResults.add(actionResult); } else if (taskResult instanceof KeyPhraseExtractionLROResult) { final KeyPhraseExtractionLROResult keyPhraseExtractionLROResult = (KeyPhraseExtractionLROResult) taskResult; final ExtractKeyPhrasesActionResult actionResult = new ExtractKeyPhrasesActionResult(); final KeyPhraseResult results = keyPhraseExtractionLROResult.getResults(); if (results != null) { ExtractKeyPhrasesActionResultPropertiesHelper.setDocumentsResults(actionResult, toExtractKeyPhrasesResultCollection(results)); } TextAnalyticsActionResultPropertiesHelper.setActionName(actionResult, keyPhraseExtractionLROResult.getTaskName()); TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult, keyPhraseExtractionLROResult.getLastUpdateDateTime()); extractKeyPhrasesActionResults.add(actionResult); } else { throw logger.logExceptionAsError(new RuntimeException( "Invalid Long running operation task result: " + taskResult.getClass())); } } } final List<Error> errors = analyzeJobState.getErrors(); if (!CoreUtils.isNullOrEmpty(errors)) { for (Error error : errors) { if (error != null) { final String[] targetPair = parseActionErrorTarget(error.getTarget(), error.getMessage()); final String taskName = targetPair[0]; final Integer taskIndex = Integer.valueOf(targetPair[1]); final TextAnalyticsActionResult actionResult; if (ENTITY_RECOGNITION_TASKS.equals(taskName)) { actionResult = recognizeEntitiesActionResults.get(taskIndex); } else if (ENTITY_RECOGNITION_PII_TASKS.equals(taskName)) { actionResult = recognizePiiEntitiesActionResults.get(taskIndex); } else if (KEY_PHRASE_EXTRACTION_TASKS.equals(taskName)) { actionResult = extractKeyPhrasesActionResults.get(taskIndex); } else if (ENTITY_LINKING_TASKS.equals(taskName)) { actionResult = recognizeLinkedEntitiesActionResults.get(taskIndex); } else if (SENTIMENT_ANALYSIS_TASKS.equals(taskName)) { actionResult = analyzeSentimentActionResults.get(taskIndex); } else if (EXTRACTIVE_SUMMARIZATION_TASKS.equals(taskName)) { actionResult = extractSummaryActionResults.get(taskIndex); } else if (CUSTOM_ENTITY_RECOGNITION_TASKS.equals(taskName)) { actionResult = recognizeCustomEntitiesActionResults.get(taskIndex); } else if (CUSTOM_SINGLE_CLASSIFICATION_TASKS.equals(taskName)) { actionResult = singleCategoryClassifyActionResults.get(taskIndex); } else if (CUSTOM_MULTI_CLASSIFICATION_TASKS.equals(taskName)) { actionResult = multiCategoryClassifyActionResults.get(taskIndex); } else { throw logger.logExceptionAsError(new RuntimeException( "Invalid task name in target reference, " + taskName)); } TextAnalyticsActionResultPropertiesHelper.setIsError(actionResult, true); TextAnalyticsActionResultPropertiesHelper.setError(actionResult, new com.azure.ai.textanalytics.models.TextAnalyticsError( TextAnalyticsErrorCode.fromString( error.getCode() == null ? null : error.getCode().toString()), error.getMessage(), null)); } } } final AnalyzeActionsResult analyzeActionsResult = new AnalyzeActionsResult(); AnalyzeActionsResultPropertiesHelper.setRecognizeEntitiesResults(analyzeActionsResult, IterableStream.of(recognizeEntitiesActionResults)); AnalyzeActionsResultPropertiesHelper.setRecognizePiiEntitiesResults(analyzeActionsResult, IterableStream.of(recognizePiiEntitiesActionResults)); AnalyzeActionsResultPropertiesHelper.setAnalyzeHealthcareEntitiesResults(analyzeActionsResult, IterableStream.of(analyzeHealthcareEntitiesActionResults)); AnalyzeActionsResultPropertiesHelper.setExtractKeyPhrasesResults(analyzeActionsResult, IterableStream.of(extractKeyPhrasesActionResults)); AnalyzeActionsResultPropertiesHelper.setRecognizeLinkedEntitiesResults(analyzeActionsResult, IterableStream.of(recognizeLinkedEntitiesActionResults)); AnalyzeActionsResultPropertiesHelper.setAnalyzeSentimentResults(analyzeActionsResult, IterableStream.of(analyzeSentimentActionResults)); AnalyzeActionsResultPropertiesHelper.setExtractSummaryResults(analyzeActionsResult, IterableStream.of(extractSummaryActionResults)); AnalyzeActionsResultPropertiesHelper.setRecognizeCustomEntitiesResults(analyzeActionsResult, IterableStream.of(recognizeCustomEntitiesActionResults)); AnalyzeActionsResultPropertiesHelper.setClassifySingleCategoryResults(analyzeActionsResult, IterableStream.of(singleCategoryClassifyActionResults)); AnalyzeActionsResultPropertiesHelper.setClassifyMultiCategoryResults(analyzeActionsResult, IterableStream.of(multiCategoryClassifyActionResults)); return analyzeActionsResult; }
final Integer taskIndex = Integer.valueOf(targetPair[1]);
private AnalyzeActionsResult toAnalyzeActionsResultLanguageApi(AnalyzeTextJobState analyzeJobState) { final TasksStateTasks tasksStateTasks = analyzeJobState.getTasks(); final List<AnalyzeTextLROResult> tasksResults = tasksStateTasks.getItems(); final List<RecognizeEntitiesActionResult> recognizeEntitiesActionResults = new ArrayList<>(); final List<RecognizePiiEntitiesActionResult> recognizePiiEntitiesActionResults = new ArrayList<>(); final List<ExtractKeyPhrasesActionResult> extractKeyPhrasesActionResults = new ArrayList<>(); final List<RecognizeLinkedEntitiesActionResult> recognizeLinkedEntitiesActionResults = new ArrayList<>(); final List<AnalyzeHealthcareEntitiesActionResult> analyzeHealthcareEntitiesActionResults = new ArrayList<>(); final List<AnalyzeSentimentActionResult> analyzeSentimentActionResults = new ArrayList<>(); final List<ExtractSummaryActionResult> extractSummaryActionResults = new ArrayList<>(); final List<RecognizeCustomEntitiesActionResult> recognizeCustomEntitiesActionResults = new ArrayList<>(); final List<SingleCategoryClassifyActionResult> singleCategoryClassifyActionResults = new ArrayList<>(); final List<MultiCategoryClassifyActionResult> multiCategoryClassifyActionResults = new ArrayList<>(); if (!CoreUtils.isNullOrEmpty(tasksResults)) { for (int i = 0; i < tasksResults.size(); i++) { final AnalyzeTextLROResult taskResult = tasksResults.get(i); if (taskResult instanceof EntityRecognitionLROResult) { final EntityRecognitionLROResult entityTaskResult = (EntityRecognitionLROResult) taskResult; final RecognizeEntitiesActionResult actionResult = new RecognizeEntitiesActionResult(); final EntitiesResult results = entityTaskResult.getResults(); if (results != null) { RecognizeEntitiesActionResultPropertiesHelper.setDocumentsResults(actionResult, toRecognizeEntitiesResultCollectionResponse(results)); } TextAnalyticsActionResultPropertiesHelper.setActionName(actionResult, entityTaskResult.getTaskName()); TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult, entityTaskResult.getLastUpdateDateTime()); recognizeEntitiesActionResults.add(actionResult); } else if (taskResult instanceof CustomEntityRecognitionLROResult) { final CustomEntityRecognitionLROResult customEntityTaskResult = (CustomEntityRecognitionLROResult) taskResult; final RecognizeCustomEntitiesActionResult actionResult = new RecognizeCustomEntitiesActionResult(); final CustomEntitiesResult results = customEntityTaskResult.getResults(); if (results != null) { RecognizeCustomEntitiesActionResultPropertiesHelper.setDocumentsResults(actionResult, toRecognizeCustomEntitiesResultCollection(results)); } TextAnalyticsActionResultPropertiesHelper.setActionName(actionResult, customEntityTaskResult.getTaskName()); TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult, customEntityTaskResult.getLastUpdateDateTime()); recognizeCustomEntitiesActionResults.add(actionResult); } else if (taskResult instanceof CustomSingleLabelClassificationLROResult) { final CustomSingleLabelClassificationLROResult customSingleLabelClassificationResult = (CustomSingleLabelClassificationLROResult) taskResult; final SingleCategoryClassifyActionResult actionResult = new SingleCategoryClassifyActionResult(); final CustomSingleLabelClassificationResult results = customSingleLabelClassificationResult.getResults(); if (results != null) { SingleCategoryClassifyActionResultPropertiesHelper.setDocumentsResults(actionResult, toSingleCategoryClassifyResultCollection(results)); } TextAnalyticsActionResultPropertiesHelper.setActionName(actionResult, customSingleLabelClassificationResult.getTaskName()); TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult, customSingleLabelClassificationResult.getLastUpdateDateTime()); singleCategoryClassifyActionResults.add(actionResult); } else if (taskResult instanceof CustomMultiLabelClassificationLROResult) { final CustomMultiLabelClassificationLROResult customMultiLabelClassificationLROResult = (CustomMultiLabelClassificationLROResult) taskResult; final MultiCategoryClassifyActionResult actionResult = new MultiCategoryClassifyActionResult(); final CustomMultiLabelClassificationResult results = customMultiLabelClassificationLROResult.getResults(); if (results != null) { MultiCategoryClassifyActionResultPropertiesHelper.setDocumentsResults(actionResult, toMultiCategoryClassifyResultCollection(results)); } TextAnalyticsActionResultPropertiesHelper.setActionName(actionResult, customMultiLabelClassificationLROResult.getTaskName()); TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult, customMultiLabelClassificationLROResult.getLastUpdateDateTime()); multiCategoryClassifyActionResults.add(actionResult); } else if (taskResult instanceof EntityLinkingLROResult) { final EntityLinkingLROResult entityLinkingLROResult = (EntityLinkingLROResult) taskResult; final RecognizeLinkedEntitiesActionResult actionResult = new RecognizeLinkedEntitiesActionResult(); final EntityLinkingResult results = entityLinkingLROResult.getResults(); if (results != null) { RecognizeLinkedEntitiesActionResultPropertiesHelper.setDocumentsResults(actionResult, toRecognizeLinkedEntitiesResultCollection(results)); } TextAnalyticsActionResultPropertiesHelper.setActionName(actionResult, entityLinkingLROResult.getTaskName()); TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult, entityLinkingLROResult.getLastUpdateDateTime()); recognizeLinkedEntitiesActionResults.add(actionResult); } else if (taskResult instanceof PiiEntityRecognitionLROResult) { final PiiEntityRecognitionLROResult piiEntityRecognitionLROResult = (PiiEntityRecognitionLROResult) taskResult; final RecognizePiiEntitiesActionResult actionResult = new RecognizePiiEntitiesActionResult(); final PiiResult results = piiEntityRecognitionLROResult.getResults(); if (results != null) { RecognizePiiEntitiesActionResultPropertiesHelper.setDocumentsResults(actionResult, toRecognizePiiEntitiesResultCollection(results)); } TextAnalyticsActionResultPropertiesHelper.setActionName(actionResult, piiEntityRecognitionLROResult.getTaskName()); TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult, piiEntityRecognitionLROResult.getLastUpdateDateTime()); recognizePiiEntitiesActionResults.add(actionResult); } else if (taskResult instanceof ExtractiveSummarizationLROResult) { final ExtractiveSummarizationLROResult extractiveSummarizationLROResult = (ExtractiveSummarizationLROResult) taskResult; final ExtractSummaryActionResult actionResult = new ExtractSummaryActionResult(); final ExtractiveSummarizationResult results = extractiveSummarizationLROResult.getResults(); if (results != null) { ExtractSummaryActionResultPropertiesHelper.setDocumentsResults(actionResult, toExtractSummaryResultCollection(results)); } TextAnalyticsActionResultPropertiesHelper.setActionName(actionResult, extractiveSummarizationLROResult.getTaskName()); TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult, extractiveSummarizationLROResult.getLastUpdateDateTime()); extractSummaryActionResults.add(actionResult); } else if (taskResult instanceof HealthcareLROResult) { final HealthcareLROResult healthcareLROResult = (HealthcareLROResult) taskResult; final AnalyzeHealthcareEntitiesActionResult actionResult = new AnalyzeHealthcareEntitiesActionResult(); final HealthcareResult results = healthcareLROResult.getResults(); if (results != null) { AnalyzeHealthcareEntitiesActionResultPropertiesHelper.setDocumentsResults(actionResult, toAnalyzeHealthcareEntitiesResultCollection(results)); } TextAnalyticsActionResultPropertiesHelper.setActionName(actionResult, healthcareLROResult.getTaskName()); TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult, healthcareLROResult.getLastUpdateDateTime()); analyzeHealthcareEntitiesActionResults.add(actionResult); } else if (taskResult instanceof SentimentLROResult) { final SentimentLROResult sentimentLROResult = (SentimentLROResult) taskResult; final AnalyzeSentimentActionResult actionResult = new AnalyzeSentimentActionResult(); final SentimentResponse results = sentimentLROResult.getResults(); if (results != null) { AnalyzeSentimentActionResultPropertiesHelper.setDocumentsResults(actionResult, toAnalyzeSentimentResultCollection(results)); } TextAnalyticsActionResultPropertiesHelper.setActionName(actionResult, sentimentLROResult.getTaskName()); TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult, sentimentLROResult.getLastUpdateDateTime()); analyzeSentimentActionResults.add(actionResult); } else if (taskResult instanceof KeyPhraseExtractionLROResult) { final KeyPhraseExtractionLROResult keyPhraseExtractionLROResult = (KeyPhraseExtractionLROResult) taskResult; final ExtractKeyPhrasesActionResult actionResult = new ExtractKeyPhrasesActionResult(); final KeyPhraseResult results = keyPhraseExtractionLROResult.getResults(); if (results != null) { ExtractKeyPhrasesActionResultPropertiesHelper.setDocumentsResults(actionResult, toExtractKeyPhrasesResultCollection(results)); } TextAnalyticsActionResultPropertiesHelper.setActionName(actionResult, keyPhraseExtractionLROResult.getTaskName()); TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult, keyPhraseExtractionLROResult.getLastUpdateDateTime()); extractKeyPhrasesActionResults.add(actionResult); } else { throw logger.logExceptionAsError(new RuntimeException( "Invalid Long running operation task result: " + taskResult.getClass())); } } } final List<Error> errors = analyzeJobState.getErrors(); if (!CoreUtils.isNullOrEmpty(errors)) { for (Error error : errors) { if (error != null) { final String[] targetPair = parseActionErrorTarget(error.getTarget(), error.getMessage()); final String taskName = targetPair[0]; final Integer taskIndex = Integer.valueOf(targetPair[1]); final TextAnalyticsActionResult actionResult; if (ENTITY_RECOGNITION_TASKS.equals(taskName)) { actionResult = recognizeEntitiesActionResults.get(taskIndex); } else if (ENTITY_RECOGNITION_PII_TASKS.equals(taskName)) { actionResult = recognizePiiEntitiesActionResults.get(taskIndex); } else if (KEY_PHRASE_EXTRACTION_TASKS.equals(taskName)) { actionResult = extractKeyPhrasesActionResults.get(taskIndex); } else if (ENTITY_LINKING_TASKS.equals(taskName)) { actionResult = recognizeLinkedEntitiesActionResults.get(taskIndex); } else if (SENTIMENT_ANALYSIS_TASKS.equals(taskName)) { actionResult = analyzeSentimentActionResults.get(taskIndex); } else if (EXTRACTIVE_SUMMARIZATION_TASKS.equals(taskName)) { actionResult = extractSummaryActionResults.get(taskIndex); } else if (CUSTOM_ENTITY_RECOGNITION_TASKS.equals(taskName)) { actionResult = recognizeCustomEntitiesActionResults.get(taskIndex); } else if (CUSTOM_SINGLE_CLASSIFICATION_TASKS.equals(taskName)) { actionResult = singleCategoryClassifyActionResults.get(taskIndex); } else if (CUSTOM_MULTI_CLASSIFICATION_TASKS.equals(taskName)) { actionResult = multiCategoryClassifyActionResults.get(taskIndex); } else { throw logger.logExceptionAsError(new RuntimeException( "Invalid task name in target reference, " + taskName)); } TextAnalyticsActionResultPropertiesHelper.setIsError(actionResult, true); TextAnalyticsActionResultPropertiesHelper.setError(actionResult, new com.azure.ai.textanalytics.models.TextAnalyticsError( TextAnalyticsErrorCode.fromString( error.getCode() == null ? null : error.getCode().toString()), error.getMessage(), null)); } } } final AnalyzeActionsResult analyzeActionsResult = new AnalyzeActionsResult(); AnalyzeActionsResultPropertiesHelper.setRecognizeEntitiesResults(analyzeActionsResult, IterableStream.of(recognizeEntitiesActionResults)); AnalyzeActionsResultPropertiesHelper.setRecognizePiiEntitiesResults(analyzeActionsResult, IterableStream.of(recognizePiiEntitiesActionResults)); AnalyzeActionsResultPropertiesHelper.setAnalyzeHealthcareEntitiesResults(analyzeActionsResult, IterableStream.of(analyzeHealthcareEntitiesActionResults)); AnalyzeActionsResultPropertiesHelper.setExtractKeyPhrasesResults(analyzeActionsResult, IterableStream.of(extractKeyPhrasesActionResults)); AnalyzeActionsResultPropertiesHelper.setRecognizeLinkedEntitiesResults(analyzeActionsResult, IterableStream.of(recognizeLinkedEntitiesActionResults)); AnalyzeActionsResultPropertiesHelper.setAnalyzeSentimentResults(analyzeActionsResult, IterableStream.of(analyzeSentimentActionResults)); AnalyzeActionsResultPropertiesHelper.setExtractSummaryResults(analyzeActionsResult, IterableStream.of(extractSummaryActionResults)); AnalyzeActionsResultPropertiesHelper.setRecognizeCustomEntitiesResults(analyzeActionsResult, IterableStream.of(recognizeCustomEntitiesActionResults)); AnalyzeActionsResultPropertiesHelper.setClassifySingleCategoryResults(analyzeActionsResult, IterableStream.of(singleCategoryClassifyActionResults)); AnalyzeActionsResultPropertiesHelper.setClassifyMultiCategoryResults(analyzeActionsResult, IterableStream.of(multiCategoryClassifyActionResults)); return analyzeActionsResult; }
class AnalyzeActionsAsyncClient { private static final String ENTITY_RECOGNITION_TASKS = "entityRecognitionTasks"; private static final String ENTITY_RECOGNITION_PII_TASKS = "entityRecognitionPiiTasks"; private static final String KEY_PHRASE_EXTRACTION_TASKS = "keyPhraseExtractionTasks"; private static final String ENTITY_LINKING_TASKS = "entityLinkingTasks"; private static final String SENTIMENT_ANALYSIS_TASKS = "sentimentAnalysisTasks"; private static final String EXTRACTIVE_SUMMARIZATION_TASKS = "extractiveSummarizationTasks"; private static final String CUSTOM_ENTITY_RECOGNITION_TASKS = "customEntityRecognitionTasks"; private static final String CUSTOM_SINGLE_CLASSIFICATION_TASKS = "customClassificationTasks"; private static final String CUSTOM_MULTI_CLASSIFICATION_TASKS = "customMultiClassificationTasks"; private static final String REGEX_ACTION_ERROR_TARGET = String.format(" ENTITY_RECOGNITION_PII_TASKS, ENTITY_RECOGNITION_TASKS, ENTITY_LINKING_TASKS, SENTIMENT_ANALYSIS_TASKS, EXTRACTIVE_SUMMARIZATION_TASKS, CUSTOM_ENTITY_RECOGNITION_TASKS, CUSTOM_SINGLE_CLASSIFICATION_TASKS, CUSTOM_MULTI_CLASSIFICATION_TASKS); private final ClientLogger logger = new ClientLogger(AnalyzeActionsAsyncClient.class); private final TextAnalyticsClientImpl legacyService; private final AnalyzeTextsImpl service; private static final Pattern PATTERN; static { PATTERN = Pattern.compile(REGEX_ACTION_ERROR_TARGET, Pattern.MULTILINE); } AnalyzeActionsAsyncClient(TextAnalyticsClientImpl legacyService) { this.legacyService = legacyService; this.service = null; } AnalyzeActionsAsyncClient(AnalyzeTextsImpl service) { this.legacyService = null; this.service = service; } PollerFlux<AnalyzeActionsOperationDetail, AnalyzeActionsResultPagedFlux> beginAnalyzeActions( Iterable<TextDocumentInput> documents, TextAnalyticsActions actions, AnalyzeActionsOptions options, Context context) { try { Objects.requireNonNull(actions, "'actions' cannot be null."); inputDocumentsValidation(documents); options = getNotNullAnalyzeActionsOptions(options); final Context finalContext = getNotNullContext(context) .addData(AZ_TRACING_NAMESPACE_KEY, COGNITIVE_TRACING_NAMESPACE_VALUE); final boolean finalIncludeStatistics = options.isIncludeStatistics(); if (service != null) { final AnalyzeTextJobsInput analyzeTextJobsInput = new AnalyzeTextJobsInput() .setDisplayName(actions.getDisplayName()) .setAnalysisInput( new MultiLanguageAnalysisInput().setDocuments(toMultiLanguageInput(documents))) .setTasks(getAnalyzeTextLROTasks(actions)); return new PollerFlux<>( DEFAULT_POLL_INTERVAL, activationOperation( service.submitJobWithResponseAsync(analyzeTextJobsInput, finalContext) .map(analyzeResponse -> { final AnalyzeActionsOperationDetail textAnalyticsOperationResult = new AnalyzeActionsOperationDetail(); AnalyzeActionsOperationDetailPropertiesHelper .setOperationId(textAnalyticsOperationResult, parseOperationId( analyzeResponse.getDeserializedHeaders().getOperationLocation())); return textAnalyticsOperationResult; })), pollingOperationLanguageApi(operationId -> service.jobStatusWithResponseAsync(operationId, finalIncludeStatistics, null, null, finalContext)), (pollingContext, pollResponse) -> Mono.just(pollingContext.getLatestResponse().getValue()), fetchingOperation( operationId -> Mono.just(getAnalyzeOperationFluxPage( operationId, null, null, finalIncludeStatistics, finalContext))) ); } final AnalyzeBatchInput analyzeBatchInput = new AnalyzeBatchInput() .setAnalysisInput(new MultiLanguageBatchInput().setDocuments(toMultiLanguageInput(documents))) .setTasks(getJobManifestTasks(actions)); analyzeBatchInput.setDisplayName(actions.getDisplayName()); return new PollerFlux<>( DEFAULT_POLL_INTERVAL, activationOperation( legacyService.analyzeWithResponseAsync(analyzeBatchInput, finalContext) .map(analyzeResponse -> { final AnalyzeActionsOperationDetail textAnalyticsOperationResult = new AnalyzeActionsOperationDetail(); AnalyzeActionsOperationDetailPropertiesHelper .setOperationId(textAnalyticsOperationResult, parseOperationId(analyzeResponse.getDeserializedHeaders().getOperationLocation())); return textAnalyticsOperationResult; })), pollingOperation(operationId -> legacyService.analyzeStatusWithResponseAsync(operationId.toString(), finalIncludeStatistics, null, null, finalContext)), (pollingContext, activationResponse) -> Mono.error(new RuntimeException("Cancellation is not supported.")), fetchingOperation(operationId -> Mono.just(getAnalyzeOperationFluxPage( operationId, null, null, finalIncludeStatistics, finalContext))) ); } catch (RuntimeException ex) { return PollerFlux.error(ex); } } PollerFlux<AnalyzeActionsOperationDetail, AnalyzeActionsResultPagedIterable> beginAnalyzeActionsIterable( Iterable<TextDocumentInput> documents, TextAnalyticsActions actions, AnalyzeActionsOptions options, Context context) { try { Objects.requireNonNull(actions, "'actions' cannot be null."); inputDocumentsValidation(documents); options = getNotNullAnalyzeActionsOptions(options); final Context finalContext = getNotNullContext(context) .addData(AZ_TRACING_NAMESPACE_KEY, COGNITIVE_TRACING_NAMESPACE_VALUE); final AnalyzeBatchInput analyzeBatchInput = new AnalyzeBatchInput() .setAnalysisInput(new MultiLanguageBatchInput().setDocuments(toMultiLanguageInput(documents))) .setTasks(getJobManifestTasks(actions)); analyzeBatchInput.setDisplayName(actions.getDisplayName()); final boolean finalIncludeStatistics = options.isIncludeStatistics(); if (service != null) { return new PollerFlux<>( DEFAULT_POLL_INTERVAL, activationOperation( service.submitJobWithResponseAsync( new AnalyzeTextJobsInput() .setDisplayName(actions.getDisplayName()) .setAnalysisInput(new MultiLanguageAnalysisInput().setDocuments(toMultiLanguageInput(documents))) .setTasks(getAnalyzeTextLROTasks(actions)), finalContext) .map(analyzeResponse -> { final AnalyzeActionsOperationDetail operationDetail = new AnalyzeActionsOperationDetail(); AnalyzeActionsOperationDetailPropertiesHelper.setOperationId(operationDetail, parseOperationId(analyzeResponse.getDeserializedHeaders().getOperationLocation())); return operationDetail; })), pollingOperationLanguageApi(operationId -> service.jobStatusWithResponseAsync(operationId, finalIncludeStatistics, null, null, finalContext)), (activationResponse, pollingContext) -> Mono.error(new RuntimeException("Cancellation is not supported.")), fetchingOperationIterable( operationId -> Mono.just(new AnalyzeActionsResultPagedIterable(getAnalyzeOperationFluxPage( operationId, null, null, finalIncludeStatistics, finalContext)))) ); } return new PollerFlux<>( DEFAULT_POLL_INTERVAL, activationOperation( legacyService.analyzeWithResponseAsync(analyzeBatchInput, finalContext) .map(analyzeResponse -> { final AnalyzeActionsOperationDetail operationDetail = new AnalyzeActionsOperationDetail(); AnalyzeActionsOperationDetailPropertiesHelper.setOperationId(operationDetail, parseOperationId(analyzeResponse.getDeserializedHeaders().getOperationLocation())); return operationDetail; })), pollingOperation(operationId -> legacyService.analyzeStatusWithResponseAsync(operationId.toString(), finalIncludeStatistics, null, null, finalContext)), (activationResponse, pollingContext) -> Mono.error(new RuntimeException("Cancellation is not supported.")), fetchingOperationIterable( operationId -> Mono.just(new AnalyzeActionsResultPagedIterable(getAnalyzeOperationFluxPage( operationId, null, null, finalIncludeStatistics, finalContext)))) ); } catch (RuntimeException ex) { return PollerFlux.error(ex); } } private List<AnalyzeTextLROTask> getAnalyzeTextLROTasks(TextAnalyticsActions actions) { if (actions == null) { return null; } final List<AnalyzeTextLROTask> tasks = new ArrayList<>(); final Iterable<RecognizeEntitiesAction> recognizeEntitiesActions = actions.getRecognizeEntitiesActions(); final Iterable<RecognizePiiEntitiesAction> recognizePiiEntitiesActions = actions.getRecognizePiiEntitiesActions(); final Iterable<ExtractKeyPhrasesAction> extractKeyPhrasesActions = actions.getExtractKeyPhrasesActions(); final Iterable<RecognizeLinkedEntitiesAction> recognizeLinkedEntitiesActions = actions.getRecognizeLinkedEntitiesActions(); final Iterable<AnalyzeHealthcareEntitiesAction> analyzeHealthcareEntitiesActions = actions.getAnalyzeHealthcareEntitiesActions(); final Iterable<AnalyzeSentimentAction> analyzeSentimentActions = actions.getAnalyzeSentimentActions(); final Iterable<ExtractSummaryAction> extractSummaryActions = actions.getExtractSummaryActions(); final Iterable<RecognizeCustomEntitiesAction> recognizeCustomEntitiesActions = actions.getRecognizeCustomEntitiesActions(); final Iterable<SingleCategoryClassifyAction> singleCategoryClassifyActions = actions.getSingleCategoryClassifyActions(); final Iterable<MultiCategoryClassifyAction> multiCategoryClassifyActions = actions.getMultiCategoryClassifyActions(); if (recognizeEntitiesActions != null) { recognizeEntitiesActions.forEach(action -> tasks.add(toEntitiesLROTask(action))); } if (recognizePiiEntitiesActions != null) { recognizePiiEntitiesActions.forEach(action -> tasks.add(toPiiLROTask(action))); } if (analyzeHealthcareEntitiesActions != null) { analyzeHealthcareEntitiesActions.forEach(action -> tasks.add(toHealthcareLROTask(action))); } if (extractKeyPhrasesActions != null) { extractKeyPhrasesActions.forEach(action -> tasks.add(toKeyPhraseLROTask(action))); } if (recognizeLinkedEntitiesActions != null) { recognizeLinkedEntitiesActions.forEach(action -> tasks.add(toEntityLinkingLROTask(action))); } if (analyzeSentimentActions != null) { analyzeSentimentActions.forEach(action -> tasks.add(toSentimentAnalysisLROTask(action))); } if (extractSummaryActions != null) { extractSummaryActions.forEach(action -> tasks.add(toExtractiveSummarizationLROTask(action))); } if (recognizeCustomEntitiesActions != null) { recognizeCustomEntitiesActions.forEach(action -> tasks.add(toCustomEntitiesLROTask(action))); } if (singleCategoryClassifyActions != null) { singleCategoryClassifyActions.forEach(action -> tasks.add( toCustomSingleLabelClassificationLROTask(action))); } if (multiCategoryClassifyActions != null) { multiCategoryClassifyActions.forEach(action -> tasks.add(toCustomMultiLabelClassificationLROTask(action))); } return tasks; } private JobManifestTasks getJobManifestTasks(TextAnalyticsActions actions) { if (actions == null) { return null; } final JobManifestTasks jobManifestTasks = new JobManifestTasks(); if (actions.getRecognizeEntitiesActions() != null) { jobManifestTasks.setEntityRecognitionTasks(toEntitiesTasks(actions)); } if (actions.getRecognizePiiEntitiesActions() != null) { jobManifestTasks.setEntityRecognitionPiiTasks(toPiiTasks(actions)); } if (actions.getExtractKeyPhrasesActions() != null) { jobManifestTasks.setKeyPhraseExtractionTasks(toKeyPhrasesTasks(actions)); } if (actions.getRecognizeLinkedEntitiesActions() != null) { jobManifestTasks.setEntityLinkingTasks(toEntityLinkingTasks(actions)); } if (actions.getAnalyzeSentimentActions() != null) { jobManifestTasks.setSentimentAnalysisTasks(toSentimentAnalysisTasks(actions)); } if (actions.getExtractSummaryActions() != null) { jobManifestTasks.setExtractiveSummarizationTasks(toExtractiveSummarizationTask(actions)); } if (actions.getRecognizeCustomEntitiesActions() != null) { jobManifestTasks.setCustomEntityRecognitionTasks(toCustomEntitiesTask(actions)); } if (actions.getSingleCategoryClassifyActions() != null) { jobManifestTasks.setCustomSingleClassificationTasks(toCustomSingleClassificationTask(actions)); } if (actions.getMultiCategoryClassifyActions() != null) { jobManifestTasks.setCustomMultiClassificationTasks(toCustomMultiClassificationTask(actions)); } return jobManifestTasks; } private EntitiesLROTask toEntitiesLROTask(RecognizeEntitiesAction action) { if (action == null) { return null; } final EntitiesLROTask task = new EntitiesLROTask(); task.setParameters(getEntitiesTaskParameters(action)).setTaskName(action.getActionName()); return task; } private List<EntitiesTask> toEntitiesTasks(TextAnalyticsActions actions) { final List<EntitiesTask> entitiesTasks = new ArrayList<>(); for (RecognizeEntitiesAction action : actions.getRecognizeEntitiesActions()) { entitiesTasks.add( action == null ? null : new EntitiesTask() .setTaskName(action.getActionName()) .setParameters(getEntitiesTaskParameters(action))); } return entitiesTasks; } private EntitiesTaskParameters getEntitiesTaskParameters(RecognizeEntitiesAction action) { return (EntitiesTaskParameters) new EntitiesTaskParameters() .setStringIndexType(StringIndexType.UTF16CODE_UNIT) .setModelVersion(action.getModelVersion()) .setLoggingOptOut(action.isServiceLogsDisabled()); } private PiiLROTask toPiiLROTask(RecognizePiiEntitiesAction action) { if (action == null) { return null; } final PiiLROTask task = new PiiLROTask(); task.setParameters(getPiiTaskParameters(action)).setTaskName(action.getActionName()); return task; } private List<PiiTask> toPiiTasks(TextAnalyticsActions actions) { final List<PiiTask> piiTasks = new ArrayList<>(); for (RecognizePiiEntitiesAction action : actions.getRecognizePiiEntitiesActions()) { piiTasks.add( action == null ? null : new PiiTask() .setTaskName(action.getActionName()) .setParameters(getPiiTaskParameters(action))); } return piiTasks; } private PiiTaskParameters getPiiTaskParameters(RecognizePiiEntitiesAction action) { return (PiiTaskParameters) new PiiTaskParameters() .setStringIndexType(StringIndexType.UTF16CODE_UNIT) .setDomain(PiiDomain.fromString( action.getDomainFilter() == null ? null : action.getDomainFilter().toString())) .setPiiCategories(toCategoriesFilter(action.getCategoriesFilter())) .setModelVersion(action.getModelVersion()) .setLoggingOptOut(action.isServiceLogsDisabled()); } private HealthcareLROTask toHealthcareLROTask(AnalyzeHealthcareEntitiesAction action) { if (action == null) { return null; } final HealthcareLROTask task = new HealthcareLROTask(); task.setParameters(getHealthcareTaskParameters(action)).setTaskName(action.getActionName()); return task; } private HealthcareTaskParameters getHealthcareTaskParameters(AnalyzeHealthcareEntitiesAction action) { return (HealthcareTaskParameters) new HealthcareTaskParameters() .setStringIndexType(StringIndexType.UTF16CODE_UNIT) .setModelVersion(action.getModelVersion()) .setLoggingOptOut(action.isServiceLogsDisabled()); } private KeyPhraseLROTask toKeyPhraseLROTask(ExtractKeyPhrasesAction action) { if (action == null) { return null; } final KeyPhraseLROTask task = new KeyPhraseLROTask(); task.setParameters(getKeyPhraseTaskParameters(action)).setTaskName(action.getActionName()); return task; } private List<KeyPhrasesTask> toKeyPhrasesTasks(TextAnalyticsActions actions) { final List<KeyPhrasesTask> keyPhrasesTasks = new ArrayList<>(); for (ExtractKeyPhrasesAction action : actions.getExtractKeyPhrasesActions()) { keyPhrasesTasks.add( action == null ? null : new KeyPhrasesTask() .setTaskName(action.getActionName()) .setParameters(getKeyPhraseTaskParameters(action))); } return keyPhrasesTasks; } private KeyPhraseTaskParameters getKeyPhraseTaskParameters(ExtractKeyPhrasesAction action) { return (KeyPhraseTaskParameters) new KeyPhraseTaskParameters() .setModelVersion(action.getModelVersion()) .setLoggingOptOut(action.isServiceLogsDisabled()); } private EntityLinkingLROTask toEntityLinkingLROTask(RecognizeLinkedEntitiesAction action) { if (action == null) { return null; } final EntityLinkingLROTask task = new EntityLinkingLROTask(); task.setParameters(getEntityLinkingTaskParameters(action)).setTaskName(action.getActionName()); return task; } private List<EntityLinkingTask> toEntityLinkingTasks(TextAnalyticsActions actions) { final List<EntityLinkingTask> tasks = new ArrayList<>(); for (RecognizeLinkedEntitiesAction action : actions.getRecognizeLinkedEntitiesActions()) { tasks.add( action == null ? null : new EntityLinkingTask() .setTaskName(action.getActionName()) .setParameters(getEntityLinkingTaskParameters(action))); } return tasks; } private EntityLinkingTaskParameters getEntityLinkingTaskParameters(RecognizeLinkedEntitiesAction action) { return (EntityLinkingTaskParameters) new EntityLinkingTaskParameters() .setStringIndexType(StringIndexType.UTF16CODE_UNIT) .setModelVersion(action.getModelVersion()) .setLoggingOptOut(action.isServiceLogsDisabled()); } private SentimentAnalysisLROTask toSentimentAnalysisLROTask(AnalyzeSentimentAction action) { if (action == null) { return null; } final SentimentAnalysisLROTask task = new SentimentAnalysisLROTask(); task.setParameters(getSentimentAnalysisTaskParameters(action)).setTaskName(action.getActionName()); return task; } private List<SentimentAnalysisTask> toSentimentAnalysisTasks(TextAnalyticsActions actions) { final List<SentimentAnalysisTask> tasks = new ArrayList<>(); for (AnalyzeSentimentAction action : actions.getAnalyzeSentimentActions()) { tasks.add( action == null ? null : new SentimentAnalysisTask() .setTaskName(action.getActionName()) .setParameters(getSentimentAnalysisTaskParameters(action))); } return tasks; } private SentimentAnalysisTaskParameters getSentimentAnalysisTaskParameters(AnalyzeSentimentAction action) { return (SentimentAnalysisTaskParameters) new SentimentAnalysisTaskParameters() .setStringIndexType(StringIndexType.UTF16CODE_UNIT) .setOpinionMining(action.isIncludeOpinionMining()) .setModelVersion(action.getModelVersion()) .setLoggingOptOut(action.isServiceLogsDisabled()); } private ExtractiveSummarizationLROTask toExtractiveSummarizationLROTask(ExtractSummaryAction action) { if (action == null) { return null; } final ExtractiveSummarizationLROTask task = new ExtractiveSummarizationLROTask(); task.setParameters(getExtractiveSummarizationTaskParameters(action)).setTaskName(action.getActionName()); return task; } private List<ExtractiveSummarizationTask> toExtractiveSummarizationTask(TextAnalyticsActions actions) { final List<ExtractiveSummarizationTask> extractiveSummarizationTasks = new ArrayList<>(); for (ExtractSummaryAction action : actions.getExtractSummaryActions()) { extractiveSummarizationTasks.add( action == null ? null : new ExtractiveSummarizationTask() .setTaskName(action.getActionName()) .setParameters(getExtractiveSummarizationTaskParameters(action))); } return extractiveSummarizationTasks; } private ExtractiveSummarizationTaskParameters getExtractiveSummarizationTaskParameters( ExtractSummaryAction action) { return (ExtractiveSummarizationTaskParameters) new ExtractiveSummarizationTaskParameters() .setStringIndexType(StringIndexType.UTF16CODE_UNIT) .setSentenceCount(action.getMaxSentenceCount()) .setSortBy(action.getOrderBy() == null ? null : ExtractiveSummarizationSortingCriteria .fromString(action.getOrderBy().toString())) .setModelVersion(action.getModelVersion()) .setLoggingOptOut(action.isServiceLogsDisabled()); } private CustomEntitiesLROTask toCustomEntitiesLROTask(RecognizeCustomEntitiesAction action) { if (action == null) { return null; } final CustomEntitiesLROTask task = new CustomEntitiesLROTask(); task.setParameters(getCustomEntitiesTaskParameters(action)).setTaskName(action.getActionName()); return task; } private List<CustomEntitiesTask> toCustomEntitiesTask(TextAnalyticsActions actions) { final List<CustomEntitiesTask> tasks = new ArrayList<>(); for (RecognizeCustomEntitiesAction action : actions.getRecognizeCustomEntitiesActions()) { tasks.add( action == null ? null : new CustomEntitiesTask() .setTaskName(action.getActionName()) .setParameters(getCustomEntitiesTaskParameters(action))); } return tasks; } private CustomEntitiesTaskParameters getCustomEntitiesTaskParameters(RecognizeCustomEntitiesAction action) { return (CustomEntitiesTaskParameters) new CustomEntitiesTaskParameters() .setStringIndexType(StringIndexType.UTF16CODE_UNIT) .setProjectName(action.getProjectName()) .setDeploymentName(action.getDeploymentName()) .setLoggingOptOut(action.isServiceLogsDisabled()); } private CustomSingleLabelClassificationLROTask toCustomSingleLabelClassificationLROTask( SingleCategoryClassifyAction action) { if (action == null) { return null; } final CustomSingleLabelClassificationLROTask task = new CustomSingleLabelClassificationLROTask(); task.setParameters(getCustomSingleClassificationTaskParameters(action)).setTaskName(action.getActionName()); return task; } private List<CustomSingleClassificationTask> toCustomSingleClassificationTask(TextAnalyticsActions actions) { final List<CustomSingleClassificationTask> tasks = new ArrayList<>(); for (SingleCategoryClassifyAction action : actions.getSingleCategoryClassifyActions()) { tasks.add( action == null ? null : new CustomSingleClassificationTask() .setTaskName(action.getActionName()) .setParameters(getCustomSingleClassificationTaskParameters(action))); } return tasks; } private CustomSingleLabelClassificationTaskParameters getCustomSingleClassificationTaskParameters( SingleCategoryClassifyAction action) { return (CustomSingleLabelClassificationTaskParameters) new CustomSingleLabelClassificationTaskParameters() .setProjectName(action.getProjectName()) .setDeploymentName(action.getDeploymentName()) .setLoggingOptOut(action.isServiceLogsDisabled()); } private CustomMultiLabelClassificationLROTask toCustomMultiLabelClassificationLROTask( MultiCategoryClassifyAction action) { if (action == null) { return null; } final CustomMultiLabelClassificationLROTask task = new CustomMultiLabelClassificationLROTask(); task.setParameters(getCustomMultiLabelClassificationTaskParameters(action)).setTaskName(action.getActionName()); return task; } private List<CustomMultiClassificationTask> toCustomMultiClassificationTask(TextAnalyticsActions actions) { final List<CustomMultiClassificationTask> tasks = new ArrayList<>(); for (MultiCategoryClassifyAction action : actions.getMultiCategoryClassifyActions()) { tasks.add( action == null ? null : new CustomMultiClassificationTask() .setTaskName(action.getActionName()) .setParameters(getCustomMultiLabelClassificationTaskParameters(action))); } return tasks; } private CustomMultiLabelClassificationTaskParameters getCustomMultiLabelClassificationTaskParameters( MultiCategoryClassifyAction action) { return (CustomMultiLabelClassificationTaskParameters) new CustomMultiLabelClassificationTaskParameters() .setProjectName(action.getProjectName()) .setDeploymentName(action.getDeploymentName()) .setLoggingOptOut(action.isServiceLogsDisabled()); } private Function<PollingContext<AnalyzeActionsOperationDetail>, Mono<AnalyzeActionsOperationDetail>> activationOperation(Mono<AnalyzeActionsOperationDetail> operationResult) { return pollingContext -> { try { return operationResult.onErrorMap(Utility::mapToHttpResponseExceptionIfExists); } catch (RuntimeException ex) { return monoError(logger, ex); } }; } private Function<PollingContext<AnalyzeActionsOperationDetail>, Mono<PollResponse<AnalyzeActionsOperationDetail>>> pollingOperation(Function<UUID, Mono<Response<AnalyzeJobState>>> pollingFunction) { return pollingContext -> { try { final PollResponse<AnalyzeActionsOperationDetail> operationResultPollResponse = pollingContext.getLatestResponse(); final UUID operationId = UUID.fromString(operationResultPollResponse.getValue().getOperationId()); return pollingFunction.apply(operationId) .flatMap(modelResponse -> processAnalyzedModelResponse(modelResponse, operationResultPollResponse)) .onErrorMap(Utility::mapToHttpResponseExceptionIfExists); } catch (RuntimeException ex) { return monoError(logger, ex); } }; } private Function<PollingContext<AnalyzeActionsOperationDetail>, Mono<PollResponse<AnalyzeActionsOperationDetail>>> pollingOperationLanguageApi(Function<UUID, Mono<Response<AnalyzeTextJobState>>> pollingFunction) { return pollingContext -> { try { final PollResponse<AnalyzeActionsOperationDetail> operationResultPollResponse = pollingContext.getLatestResponse(); final UUID operationId = UUID.fromString(operationResultPollResponse.getValue().getOperationId()); return pollingFunction.apply(operationId) .flatMap(modelResponse -> processAnalyzedModelResponseLanguageApi( modelResponse, operationResultPollResponse)) .onErrorMap(Utility::mapToHttpResponseExceptionIfExists); } catch (RuntimeException ex) { return monoError(logger, ex); } }; } private Function<PollingContext<AnalyzeActionsOperationDetail>, Mono<AnalyzeActionsResultPagedFlux>> fetchingOperation(Function<UUID, Mono<AnalyzeActionsResultPagedFlux>> fetchingFunction) { return pollingContext -> { try { final UUID operationId = UUID.fromString(pollingContext.getLatestResponse().getValue().getOperationId()); return fetchingFunction.apply(operationId); } catch (RuntimeException ex) { return monoError(logger, ex); } }; } private Function<PollingContext<AnalyzeActionsOperationDetail>, Mono<AnalyzeActionsResultPagedIterable>> fetchingOperationIterable(Function<UUID, Mono<AnalyzeActionsResultPagedIterable>> fetchingFunction) { return pollingContext -> { try { final UUID operationId = UUID.fromString(pollingContext.getLatestResponse().getValue().getOperationId()); return fetchingFunction.apply(operationId); } catch (RuntimeException ex) { return monoError(logger, ex); } }; } AnalyzeActionsResultPagedFlux getAnalyzeOperationFluxPage(UUID operationId, Integer top, Integer skip, boolean showStats, Context context) { return new AnalyzeActionsResultPagedFlux( () -> (continuationToken, pageSize) -> getPage(continuationToken, operationId, top, skip, showStats, context).flux()); } Mono<PagedResponse<AnalyzeActionsResult>> getPage(String continuationToken, UUID operationId, Integer top, Integer skip, boolean showStats, Context context) { if (continuationToken != null) { final Map<String, Object> continuationTokenMap = parseNextLink(continuationToken); final Integer topValue = (Integer) continuationTokenMap.getOrDefault("$top", null); final Integer skipValue = (Integer) continuationTokenMap.getOrDefault("$skip", null); final Boolean showStatsValue = (Boolean) continuationTokenMap.getOrDefault(showStats, false); if (service != null) { return service.jobStatusWithResponseAsync(operationId, showStatsValue, topValue, skipValue, context) .map(this::toAnalyzeActionsResultPagedResponseLanguageApi) .onErrorMap(Utility::mapToHttpResponseExceptionIfExists); } return legacyService.analyzeStatusWithResponseAsync(operationId.toString(), showStatsValue, topValue, skipValue, context) .map(this::toAnalyzeActionsResultPagedResponse) .onErrorMap(Utility::mapToHttpResponseExceptionIfExists); } else { if (service != null) { return service.jobStatusWithResponseAsync(operationId, showStats, top, skip, context) .map(this::toAnalyzeActionsResultPagedResponseLanguageApi) .onErrorMap(Utility::mapToHttpResponseExceptionIfExists); } return legacyService.analyzeStatusWithResponseAsync(operationId.toString(), showStats, top, skip, context) .map(this::toAnalyzeActionsResultPagedResponse) .onErrorMap(Utility::mapToHttpResponseExceptionIfExists); } } private PagedResponse<AnalyzeActionsResult> toAnalyzeActionsResultPagedResponse(Response<AnalyzeJobState> response) { final AnalyzeJobState analyzeJobState = response.getValue(); return new PagedResponseBase<Void, AnalyzeActionsResult>( response.getRequest(), response.getStatusCode(), response.getHeaders(), Arrays.asList(toAnalyzeActionsResult(analyzeJobState)), analyzeJobState.getNextLink(), null); } private PagedResponse<AnalyzeActionsResult> toAnalyzeActionsResultPagedResponseLanguageApi(Response<AnalyzeTextJobState> response) { final AnalyzeTextJobState analyzeJobState = response.getValue(); return new PagedResponseBase<Void, AnalyzeActionsResult>( response.getRequest(), response.getStatusCode(), response.getHeaders(), Arrays.asList(toAnalyzeActionsResultLanguageApi(analyzeJobState)), analyzeJobState.getNextLink(), null); } private AnalyzeActionsResult toAnalyzeActionsResult(AnalyzeJobState analyzeJobState) { TasksStateTasksOld tasksStateTasks = analyzeJobState.getTasks(); final List<TasksStateTasksEntityRecognitionPiiTasksItem> piiTasksItems = tasksStateTasks.getEntityRecognitionPiiTasks(); final List<TasksStateTasksEntityRecognitionTasksItem> entityRecognitionTasksItems = tasksStateTasks.getEntityRecognitionTasks(); final List<TasksStateTasksKeyPhraseExtractionTasksItem> keyPhraseExtractionTasks = tasksStateTasks.getKeyPhraseExtractionTasks(); final List<TasksStateTasksEntityLinkingTasksItem> linkedEntityRecognitionTasksItems = tasksStateTasks.getEntityLinkingTasks(); final List<TasksStateTasksSentimentAnalysisTasksItem> sentimentAnalysisTasksItems = tasksStateTasks.getSentimentAnalysisTasks(); List<RecognizeEntitiesActionResult> recognizeEntitiesActionResults = new ArrayList<>(); List<RecognizePiiEntitiesActionResult> recognizePiiEntitiesActionResults = new ArrayList<>(); List<ExtractKeyPhrasesActionResult> extractKeyPhrasesActionResults = new ArrayList<>(); List<RecognizeLinkedEntitiesActionResult> recognizeLinkedEntitiesActionResults = new ArrayList<>(); List<AnalyzeSentimentActionResult> analyzeSentimentActionResults = new ArrayList<>(); List<ExtractSummaryActionResult> extractSummaryActionResults = new ArrayList<>(); List<RecognizeCustomEntitiesActionResult> recognizeCustomEntitiesActionResults = new ArrayList<>(); List<SingleCategoryClassifyActionResult> singleCategoryClassifyActionResults = new ArrayList<>(); List<MultiCategoryClassifyActionResult> multiCategoryClassifyActionResults = new ArrayList<>(); if (!CoreUtils.isNullOrEmpty(entityRecognitionTasksItems)) { for (int i = 0; i < entityRecognitionTasksItems.size(); i++) { final TasksStateTasksEntityRecognitionTasksItem taskItem = entityRecognitionTasksItems.get(i); final RecognizeEntitiesActionResult actionResult = new RecognizeEntitiesActionResult(); final EntitiesResult results = taskItem.getResults(); if (results != null) { RecognizeEntitiesActionResultPropertiesHelper.setDocumentsResults(actionResult, toRecognizeEntitiesResultCollectionResponse(results)); } TextAnalyticsActionResultPropertiesHelper.setActionName(actionResult, taskItem.getTaskName()); TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult, taskItem.getLastUpdateDateTime()); recognizeEntitiesActionResults.add(actionResult); } } if (!CoreUtils.isNullOrEmpty(piiTasksItems)) { for (int i = 0; i < piiTasksItems.size(); i++) { final TasksStateTasksEntityRecognitionPiiTasksItem taskItem = piiTasksItems.get(i); final RecognizePiiEntitiesActionResult actionResult = new RecognizePiiEntitiesActionResult(); final PiiResult results = taskItem.getResults(); if (results != null) { RecognizePiiEntitiesActionResultPropertiesHelper.setDocumentsResults(actionResult, toRecognizePiiEntitiesResultCollection(results)); } TextAnalyticsActionResultPropertiesHelper.setActionName(actionResult, taskItem.getTaskName()); TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult, taskItem.getLastUpdateDateTime()); recognizePiiEntitiesActionResults.add(actionResult); } } if (!CoreUtils.isNullOrEmpty(keyPhraseExtractionTasks)) { for (int i = 0; i < keyPhraseExtractionTasks.size(); i++) { final TasksStateTasksKeyPhraseExtractionTasksItem taskItem = keyPhraseExtractionTasks.get(i); final ExtractKeyPhrasesActionResult actionResult = new ExtractKeyPhrasesActionResult(); final KeyPhraseResult results = taskItem.getResults(); if (results != null) { ExtractKeyPhrasesActionResultPropertiesHelper.setDocumentsResults(actionResult, toExtractKeyPhrasesResultCollection(results)); } TextAnalyticsActionResultPropertiesHelper.setActionName(actionResult, taskItem.getTaskName()); TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult, taskItem.getLastUpdateDateTime()); extractKeyPhrasesActionResults.add(actionResult); } } if (!CoreUtils.isNullOrEmpty(linkedEntityRecognitionTasksItems)) { for (int i = 0; i < linkedEntityRecognitionTasksItems.size(); i++) { final TasksStateTasksEntityLinkingTasksItem taskItem = linkedEntityRecognitionTasksItems.get(i); final RecognizeLinkedEntitiesActionResult actionResult = new RecognizeLinkedEntitiesActionResult(); final EntityLinkingResult results = taskItem.getResults(); if (results != null) { RecognizeLinkedEntitiesActionResultPropertiesHelper.setDocumentsResults(actionResult, toRecognizeLinkedEntitiesResultCollection(results)); } TextAnalyticsActionResultPropertiesHelper.setActionName(actionResult, taskItem.getTaskName()); TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult, taskItem.getLastUpdateDateTime()); recognizeLinkedEntitiesActionResults.add(actionResult); } } if (!CoreUtils.isNullOrEmpty(sentimentAnalysisTasksItems)) { for (int i = 0; i < sentimentAnalysisTasksItems.size(); i++) { final TasksStateTasksSentimentAnalysisTasksItem taskItem = sentimentAnalysisTasksItems.get(i); final AnalyzeSentimentActionResult actionResult = new AnalyzeSentimentActionResult(); final SentimentResponse results = taskItem.getResults(); if (results != null) { AnalyzeSentimentActionResultPropertiesHelper.setDocumentsResults(actionResult, toAnalyzeSentimentResultCollection(results)); } TextAnalyticsActionResultPropertiesHelper.setActionName(actionResult, taskItem.getTaskName()); TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult, taskItem.getLastUpdateDateTime()); analyzeSentimentActionResults.add(actionResult); } } final List<TextAnalyticsError> errors = analyzeJobState.getErrors(); if (!CoreUtils.isNullOrEmpty(errors)) { for (TextAnalyticsError error : errors) { if (error != null) { final String[] targetPair = parseActionErrorTarget(error.getTarget(), error.getMessage()); final String taskName = targetPair[0]; final Integer taskIndex = Integer.valueOf(targetPair[1]); final TextAnalyticsActionResult actionResult; if (ENTITY_RECOGNITION_TASKS.equals(taskName)) { actionResult = recognizeEntitiesActionResults.get(taskIndex); } else if (ENTITY_RECOGNITION_PII_TASKS.equals(taskName)) { actionResult = recognizePiiEntitiesActionResults.get(taskIndex); } else if (KEY_PHRASE_EXTRACTION_TASKS.equals(taskName)) { actionResult = extractKeyPhrasesActionResults.get(taskIndex); } else if (ENTITY_LINKING_TASKS.equals(taskName)) { actionResult = recognizeLinkedEntitiesActionResults.get(taskIndex); } else if (SENTIMENT_ANALYSIS_TASKS.equals(taskName)) { actionResult = analyzeSentimentActionResults.get(taskIndex); } else if (EXTRACTIVE_SUMMARIZATION_TASKS.equals(taskName)) { actionResult = extractSummaryActionResults.get(taskIndex); } else if (CUSTOM_ENTITY_RECOGNITION_TASKS.equals(taskName)) { actionResult = recognizeCustomEntitiesActionResults.get(taskIndex); } else if (CUSTOM_SINGLE_CLASSIFICATION_TASKS.equals(taskName)) { actionResult = singleCategoryClassifyActionResults.get(taskIndex); } else if (CUSTOM_MULTI_CLASSIFICATION_TASKS.equals(taskName)) { actionResult = multiCategoryClassifyActionResults.get(taskIndex); } else { throw logger.logExceptionAsError(new RuntimeException( "Invalid task name in target reference, " + taskName)); } TextAnalyticsActionResultPropertiesHelper.setIsError(actionResult, true); TextAnalyticsActionResultPropertiesHelper.setError(actionResult, new com.azure.ai.textanalytics.models.TextAnalyticsError( TextAnalyticsErrorCode.fromString( error.getErrorCode() == null ? null : error.getErrorCode().toString()), error.getMessage(), null)); } } } final AnalyzeActionsResult analyzeActionsResult = new AnalyzeActionsResult(); AnalyzeActionsResultPropertiesHelper.setRecognizeEntitiesResults(analyzeActionsResult, IterableStream.of(recognizeEntitiesActionResults)); AnalyzeActionsResultPropertiesHelper.setRecognizePiiEntitiesResults(analyzeActionsResult, IterableStream.of(recognizePiiEntitiesActionResults)); AnalyzeActionsResultPropertiesHelper.setExtractKeyPhrasesResults(analyzeActionsResult, IterableStream.of(extractKeyPhrasesActionResults)); AnalyzeActionsResultPropertiesHelper.setRecognizeLinkedEntitiesResults(analyzeActionsResult, IterableStream.of(recognizeLinkedEntitiesActionResults)); AnalyzeActionsResultPropertiesHelper.setAnalyzeSentimentResults(analyzeActionsResult, IterableStream.of(analyzeSentimentActionResults)); AnalyzeActionsResultPropertiesHelper.setExtractSummaryResults(analyzeActionsResult, IterableStream.of(extractSummaryActionResults)); AnalyzeActionsResultPropertiesHelper.setRecognizeCustomEntitiesResults(analyzeActionsResult, IterableStream.of(recognizeCustomEntitiesActionResults)); AnalyzeActionsResultPropertiesHelper.setClassifySingleCategoryResults(analyzeActionsResult, IterableStream.of(singleCategoryClassifyActionResults)); AnalyzeActionsResultPropertiesHelper.setClassifyMultiCategoryResults(analyzeActionsResult, IterableStream.of(multiCategoryClassifyActionResults)); return analyzeActionsResult; } private Mono<PollResponse<AnalyzeActionsOperationDetail>> processAnalyzedModelResponse( Response<AnalyzeJobState> analyzeJobStateResponse, PollResponse<AnalyzeActionsOperationDetail> operationResultPollResponse) { LongRunningOperationStatus status = LongRunningOperationStatus.SUCCESSFULLY_COMPLETED; if (analyzeJobStateResponse.getValue() != null && analyzeJobStateResponse.getValue().getStatus() != null) { switch (analyzeJobStateResponse.getValue().getStatus()) { case NOT_STARTED: case RUNNING: status = LongRunningOperationStatus.IN_PROGRESS; break; case SUCCEEDED: status = LongRunningOperationStatus.SUCCESSFULLY_COMPLETED; break; case CANCELLED: status = LongRunningOperationStatus.USER_CANCELLED; break; default: status = LongRunningOperationStatus.fromString( analyzeJobStateResponse.getValue().getStatus().toString(), true); break; } } AnalyzeActionsOperationDetailPropertiesHelper.setDisplayName(operationResultPollResponse.getValue(), analyzeJobStateResponse.getValue().getDisplayName()); AnalyzeActionsOperationDetailPropertiesHelper.setCreatedAt(operationResultPollResponse.getValue(), analyzeJobStateResponse.getValue().getCreatedDateTime()); AnalyzeActionsOperationDetailPropertiesHelper.setExpiresAt(operationResultPollResponse.getValue(), analyzeJobStateResponse.getValue().getExpirationDateTime()); AnalyzeActionsOperationDetailPropertiesHelper.setLastModifiedAt(operationResultPollResponse.getValue(), analyzeJobStateResponse.getValue().getLastUpdateDateTime()); final TasksStateTasksOld tasksResult = analyzeJobStateResponse.getValue().getTasks(); AnalyzeActionsOperationDetailPropertiesHelper.setActionsFailed(operationResultPollResponse.getValue(), tasksResult.getFailed()); AnalyzeActionsOperationDetailPropertiesHelper.setActionsInProgress(operationResultPollResponse.getValue(), tasksResult.getInProgress()); AnalyzeActionsOperationDetailPropertiesHelper.setActionsSucceeded( operationResultPollResponse.getValue(), tasksResult.getCompleted()); AnalyzeActionsOperationDetailPropertiesHelper.setActionsInTotal(operationResultPollResponse.getValue(), tasksResult.getTotal()); return Mono.just(new PollResponse<>(status, operationResultPollResponse.getValue())); } private Mono<PollResponse<AnalyzeActionsOperationDetail>> processAnalyzedModelResponseLanguageApi( Response<AnalyzeTextJobState> analyzeJobStateResponse, PollResponse<AnalyzeActionsOperationDetail> operationResultPollResponse) { LongRunningOperationStatus status = LongRunningOperationStatus.SUCCESSFULLY_COMPLETED; if (analyzeJobStateResponse.getValue() != null && analyzeJobStateResponse.getValue().getStatus() != null) { switch (analyzeJobStateResponse.getValue().getStatus()) { case NOT_STARTED: case RUNNING: status = LongRunningOperationStatus.IN_PROGRESS; break; case SUCCEEDED: status = LongRunningOperationStatus.SUCCESSFULLY_COMPLETED; break; case CANCELLED: status = LongRunningOperationStatus.USER_CANCELLED; break; case PARTIALLY_SUCCEEDED: status = LongRunningOperationStatus.fromString("partiallySucceeded", true); break; default: status = LongRunningOperationStatus.fromString( analyzeJobStateResponse.getValue().getStatus().toString(), true); break; } } AnalyzeActionsOperationDetailPropertiesHelper.setDisplayName(operationResultPollResponse.getValue(), analyzeJobStateResponse.getValue().getDisplayName()); AnalyzeActionsOperationDetailPropertiesHelper.setCreatedAt(operationResultPollResponse.getValue(), analyzeJobStateResponse.getValue().getCreatedDateTime()); AnalyzeActionsOperationDetailPropertiesHelper.setExpiresAt(operationResultPollResponse.getValue(), analyzeJobStateResponse.getValue().getExpirationDateTime()); AnalyzeActionsOperationDetailPropertiesHelper.setLastModifiedAt(operationResultPollResponse.getValue(), analyzeJobStateResponse.getValue().getLastUpdateDateTime()); final TasksStateTasks tasksResult = analyzeJobStateResponse.getValue().getTasks(); AnalyzeActionsOperationDetailPropertiesHelper.setActionsFailed(operationResultPollResponse.getValue(), tasksResult.getFailed()); AnalyzeActionsOperationDetailPropertiesHelper.setActionsInProgress(operationResultPollResponse.getValue(), tasksResult.getInProgress()); AnalyzeActionsOperationDetailPropertiesHelper.setActionsSucceeded( operationResultPollResponse.getValue(), tasksResult.getCompleted()); AnalyzeActionsOperationDetailPropertiesHelper.setActionsInTotal(operationResultPollResponse.getValue(), tasksResult.getTotal()); return Mono.just(new PollResponse<>(status, operationResultPollResponse.getValue())); } private Context getNotNullContext(Context context) { return context == null ? Context.NONE : context; } private AnalyzeActionsOptions getNotNullAnalyzeActionsOptions(AnalyzeActionsOptions options) { return options == null ? new AnalyzeActionsOptions() : options; } private String[] parseActionErrorTarget(String targetReference, String errorMessage) { if (CoreUtils.isNullOrEmpty(targetReference)) { if (CoreUtils.isNullOrEmpty(errorMessage)) { errorMessage = "Expected an error with a target field referencing an action but did not get one"; } throw logger.logExceptionAsError(new RuntimeException(errorMessage)); } final Matcher matcher = PATTERN.matcher(targetReference); String[] taskNameIdPair = new String[2]; while (matcher.find()) { taskNameIdPair[0] = matcher.group(1); taskNameIdPair[1] = matcher.group(2); } return taskNameIdPair; } }
class AnalyzeActionsAsyncClient { private static final String ENTITY_RECOGNITION_TASKS = "entityRecognitionTasks"; private static final String ENTITY_RECOGNITION_PII_TASKS = "entityRecognitionPiiTasks"; private static final String KEY_PHRASE_EXTRACTION_TASKS = "keyPhraseExtractionTasks"; private static final String ENTITY_LINKING_TASKS = "entityLinkingTasks"; private static final String SENTIMENT_ANALYSIS_TASKS = "sentimentAnalysisTasks"; private static final String EXTRACTIVE_SUMMARIZATION_TASKS = "extractiveSummarizationTasks"; private static final String CUSTOM_ENTITY_RECOGNITION_TASKS = "customEntityRecognitionTasks"; private static final String CUSTOM_SINGLE_CLASSIFICATION_TASKS = "customClassificationTasks"; private static final String CUSTOM_MULTI_CLASSIFICATION_TASKS = "customMultiClassificationTasks"; private static final String REGEX_ACTION_ERROR_TARGET = String.format(" ENTITY_RECOGNITION_PII_TASKS, ENTITY_RECOGNITION_TASKS, ENTITY_LINKING_TASKS, SENTIMENT_ANALYSIS_TASKS, EXTRACTIVE_SUMMARIZATION_TASKS, CUSTOM_ENTITY_RECOGNITION_TASKS, CUSTOM_SINGLE_CLASSIFICATION_TASKS, CUSTOM_MULTI_CLASSIFICATION_TASKS); private final ClientLogger logger = new ClientLogger(AnalyzeActionsAsyncClient.class); private final TextAnalyticsClientImpl legacyService; private final AnalyzeTextsImpl service; private static final Pattern PATTERN; static { PATTERN = Pattern.compile(REGEX_ACTION_ERROR_TARGET, Pattern.MULTILINE); } AnalyzeActionsAsyncClient(TextAnalyticsClientImpl legacyService) { this.legacyService = legacyService; this.service = null; } AnalyzeActionsAsyncClient(AnalyzeTextsImpl service) { this.legacyService = null; this.service = service; } PollerFlux<AnalyzeActionsOperationDetail, AnalyzeActionsResultPagedFlux> beginAnalyzeActions( Iterable<TextDocumentInput> documents, TextAnalyticsActions actions, AnalyzeActionsOptions options, Context context) { try { Objects.requireNonNull(actions, "'actions' cannot be null."); inputDocumentsValidation(documents); options = getNotNullAnalyzeActionsOptions(options); final Context finalContext = getNotNullContext(context) .addData(AZ_TRACING_NAMESPACE_KEY, COGNITIVE_TRACING_NAMESPACE_VALUE); final boolean finalIncludeStatistics = options.isIncludeStatistics(); if (service != null) { final AnalyzeTextJobsInput analyzeTextJobsInput = new AnalyzeTextJobsInput() .setDisplayName(actions.getDisplayName()) .setAnalysisInput( new MultiLanguageAnalysisInput().setDocuments(toMultiLanguageInput(documents))) .setTasks(getAnalyzeTextLROTasks(actions)); return new PollerFlux<>( DEFAULT_POLL_INTERVAL, activationOperation( service.submitJobWithResponseAsync(analyzeTextJobsInput, finalContext) .map(analyzeResponse -> { final AnalyzeActionsOperationDetail textAnalyticsOperationResult = new AnalyzeActionsOperationDetail(); AnalyzeActionsOperationDetailPropertiesHelper .setOperationId(textAnalyticsOperationResult, parseOperationId( analyzeResponse.getDeserializedHeaders().getOperationLocation())); return textAnalyticsOperationResult; })), pollingOperationLanguageApi(operationId -> service.jobStatusWithResponseAsync(operationId, finalIncludeStatistics, null, null, finalContext)), (pollingContext, pollResponse) -> Mono.just(pollingContext.getLatestResponse().getValue()), fetchingOperation( operationId -> Mono.just(getAnalyzeOperationFluxPage( operationId, null, null, finalIncludeStatistics, finalContext))) ); } final AnalyzeBatchInput analyzeBatchInput = new AnalyzeBatchInput() .setAnalysisInput(new MultiLanguageBatchInput().setDocuments(toMultiLanguageInput(documents))) .setTasks(getJobManifestTasks(actions)); analyzeBatchInput.setDisplayName(actions.getDisplayName()); return new PollerFlux<>( DEFAULT_POLL_INTERVAL, activationOperation( legacyService.analyzeWithResponseAsync(analyzeBatchInput, finalContext) .map(analyzeResponse -> { final AnalyzeActionsOperationDetail textAnalyticsOperationResult = new AnalyzeActionsOperationDetail(); AnalyzeActionsOperationDetailPropertiesHelper .setOperationId(textAnalyticsOperationResult, parseOperationId(analyzeResponse.getDeserializedHeaders().getOperationLocation())); return textAnalyticsOperationResult; })), pollingOperation(operationId -> legacyService.analyzeStatusWithResponseAsync(operationId.toString(), finalIncludeStatistics, null, null, finalContext)), (pollingContext, activationResponse) -> Mono.error(new RuntimeException("Cancellation is not supported.")), fetchingOperation(operationId -> Mono.just(getAnalyzeOperationFluxPage( operationId, null, null, finalIncludeStatistics, finalContext))) ); } catch (RuntimeException ex) { return PollerFlux.error(ex); } } PollerFlux<AnalyzeActionsOperationDetail, AnalyzeActionsResultPagedIterable> beginAnalyzeActionsIterable( Iterable<TextDocumentInput> documents, TextAnalyticsActions actions, AnalyzeActionsOptions options, Context context) { try { Objects.requireNonNull(actions, "'actions' cannot be null."); inputDocumentsValidation(documents); options = getNotNullAnalyzeActionsOptions(options); final Context finalContext = getNotNullContext(context) .addData(AZ_TRACING_NAMESPACE_KEY, COGNITIVE_TRACING_NAMESPACE_VALUE); final AnalyzeBatchInput analyzeBatchInput = new AnalyzeBatchInput() .setAnalysisInput(new MultiLanguageBatchInput().setDocuments(toMultiLanguageInput(documents))) .setTasks(getJobManifestTasks(actions)); analyzeBatchInput.setDisplayName(actions.getDisplayName()); final boolean finalIncludeStatistics = options.isIncludeStatistics(); if (service != null) { return new PollerFlux<>( DEFAULT_POLL_INTERVAL, activationOperation( service.submitJobWithResponseAsync( new AnalyzeTextJobsInput() .setDisplayName(actions.getDisplayName()) .setAnalysisInput(new MultiLanguageAnalysisInput().setDocuments(toMultiLanguageInput(documents))) .setTasks(getAnalyzeTextLROTasks(actions)), finalContext) .map(analyzeResponse -> { final AnalyzeActionsOperationDetail operationDetail = new AnalyzeActionsOperationDetail(); AnalyzeActionsOperationDetailPropertiesHelper.setOperationId(operationDetail, parseOperationId(analyzeResponse.getDeserializedHeaders().getOperationLocation())); return operationDetail; })), pollingOperationLanguageApi(operationId -> service.jobStatusWithResponseAsync(operationId, finalIncludeStatistics, null, null, finalContext)), (activationResponse, pollingContext) -> Mono.error(new RuntimeException("Cancellation is not supported.")), fetchingOperationIterable( operationId -> Mono.just(new AnalyzeActionsResultPagedIterable(getAnalyzeOperationFluxPage( operationId, null, null, finalIncludeStatistics, finalContext)))) ); } return new PollerFlux<>( DEFAULT_POLL_INTERVAL, activationOperation( legacyService.analyzeWithResponseAsync(analyzeBatchInput, finalContext) .map(analyzeResponse -> { final AnalyzeActionsOperationDetail operationDetail = new AnalyzeActionsOperationDetail(); AnalyzeActionsOperationDetailPropertiesHelper.setOperationId(operationDetail, parseOperationId(analyzeResponse.getDeserializedHeaders().getOperationLocation())); return operationDetail; })), pollingOperation(operationId -> legacyService.analyzeStatusWithResponseAsync(operationId.toString(), finalIncludeStatistics, null, null, finalContext)), (activationResponse, pollingContext) -> Mono.error(new RuntimeException("Cancellation is not supported.")), fetchingOperationIterable( operationId -> Mono.just(new AnalyzeActionsResultPagedIterable(getAnalyzeOperationFluxPage( operationId, null, null, finalIncludeStatistics, finalContext)))) ); } catch (RuntimeException ex) { return PollerFlux.error(ex); } } private List<AnalyzeTextLROTask> getAnalyzeTextLROTasks(TextAnalyticsActions actions) { if (actions == null) { return null; } final List<AnalyzeTextLROTask> tasks = new ArrayList<>(); final Iterable<RecognizeEntitiesAction> recognizeEntitiesActions = actions.getRecognizeEntitiesActions(); final Iterable<RecognizePiiEntitiesAction> recognizePiiEntitiesActions = actions.getRecognizePiiEntitiesActions(); final Iterable<ExtractKeyPhrasesAction> extractKeyPhrasesActions = actions.getExtractKeyPhrasesActions(); final Iterable<RecognizeLinkedEntitiesAction> recognizeLinkedEntitiesActions = actions.getRecognizeLinkedEntitiesActions(); final Iterable<AnalyzeHealthcareEntitiesAction> analyzeHealthcareEntitiesActions = actions.getAnalyzeHealthcareEntitiesActions(); final Iterable<AnalyzeSentimentAction> analyzeSentimentActions = actions.getAnalyzeSentimentActions(); final Iterable<ExtractSummaryAction> extractSummaryActions = actions.getExtractSummaryActions(); final Iterable<RecognizeCustomEntitiesAction> recognizeCustomEntitiesActions = actions.getRecognizeCustomEntitiesActions(); final Iterable<SingleCategoryClassifyAction> singleCategoryClassifyActions = actions.getSingleCategoryClassifyActions(); final Iterable<MultiCategoryClassifyAction> multiCategoryClassifyActions = actions.getMultiCategoryClassifyActions(); if (recognizeEntitiesActions != null) { recognizeEntitiesActions.forEach(action -> tasks.add(toEntitiesLROTask(action))); } if (recognizePiiEntitiesActions != null) { recognizePiiEntitiesActions.forEach(action -> tasks.add(toPiiLROTask(action))); } if (analyzeHealthcareEntitiesActions != null) { analyzeHealthcareEntitiesActions.forEach(action -> tasks.add(toHealthcareLROTask(action))); } if (extractKeyPhrasesActions != null) { extractKeyPhrasesActions.forEach(action -> tasks.add(toKeyPhraseLROTask(action))); } if (recognizeLinkedEntitiesActions != null) { recognizeLinkedEntitiesActions.forEach(action -> tasks.add(toEntityLinkingLROTask(action))); } if (analyzeSentimentActions != null) { analyzeSentimentActions.forEach(action -> tasks.add(toSentimentAnalysisLROTask(action))); } if (extractSummaryActions != null) { extractSummaryActions.forEach(action -> tasks.add(toExtractiveSummarizationLROTask(action))); } if (recognizeCustomEntitiesActions != null) { recognizeCustomEntitiesActions.forEach(action -> tasks.add(toCustomEntitiesLROTask(action))); } if (singleCategoryClassifyActions != null) { singleCategoryClassifyActions.forEach(action -> tasks.add( toCustomSingleLabelClassificationLROTask(action))); } if (multiCategoryClassifyActions != null) { multiCategoryClassifyActions.forEach(action -> tasks.add(toCustomMultiLabelClassificationLROTask(action))); } return tasks; } private JobManifestTasks getJobManifestTasks(TextAnalyticsActions actions) { if (actions == null) { return null; } final JobManifestTasks jobManifestTasks = new JobManifestTasks(); if (actions.getRecognizeEntitiesActions() != null) { jobManifestTasks.setEntityRecognitionTasks(toEntitiesTasks(actions)); } if (actions.getRecognizePiiEntitiesActions() != null) { jobManifestTasks.setEntityRecognitionPiiTasks(toPiiTasks(actions)); } if (actions.getExtractKeyPhrasesActions() != null) { jobManifestTasks.setKeyPhraseExtractionTasks(toKeyPhrasesTasks(actions)); } if (actions.getRecognizeLinkedEntitiesActions() != null) { jobManifestTasks.setEntityLinkingTasks(toEntityLinkingTasks(actions)); } if (actions.getAnalyzeSentimentActions() != null) { jobManifestTasks.setSentimentAnalysisTasks(toSentimentAnalysisTasks(actions)); } if (actions.getExtractSummaryActions() != null) { jobManifestTasks.setExtractiveSummarizationTasks(toExtractiveSummarizationTask(actions)); } if (actions.getRecognizeCustomEntitiesActions() != null) { jobManifestTasks.setCustomEntityRecognitionTasks(toCustomEntitiesTask(actions)); } if (actions.getSingleCategoryClassifyActions() != null) { jobManifestTasks.setCustomSingleClassificationTasks(toCustomSingleClassificationTask(actions)); } if (actions.getMultiCategoryClassifyActions() != null) { jobManifestTasks.setCustomMultiClassificationTasks(toCustomMultiClassificationTask(actions)); } return jobManifestTasks; } private EntitiesLROTask toEntitiesLROTask(RecognizeEntitiesAction action) { if (action == null) { return null; } final EntitiesLROTask task = new EntitiesLROTask(); task.setParameters(getEntitiesTaskParameters(action)).setTaskName(action.getActionName()); return task; } private List<EntitiesTask> toEntitiesTasks(TextAnalyticsActions actions) { final List<EntitiesTask> entitiesTasks = new ArrayList<>(); for (RecognizeEntitiesAction action : actions.getRecognizeEntitiesActions()) { entitiesTasks.add( action == null ? null : new EntitiesTask() .setTaskName(action.getActionName()) .setParameters(getEntitiesTaskParameters(action))); } return entitiesTasks; } private EntitiesTaskParameters getEntitiesTaskParameters(RecognizeEntitiesAction action) { return (EntitiesTaskParameters) new EntitiesTaskParameters() .setStringIndexType(StringIndexType.UTF16CODE_UNIT) .setModelVersion(action.getModelVersion()) .setLoggingOptOut(action.isServiceLogsDisabled()); } private PiiLROTask toPiiLROTask(RecognizePiiEntitiesAction action) { if (action == null) { return null; } final PiiLROTask task = new PiiLROTask(); task.setParameters(getPiiTaskParameters(action)).setTaskName(action.getActionName()); return task; } private List<PiiTask> toPiiTasks(TextAnalyticsActions actions) { final List<PiiTask> piiTasks = new ArrayList<>(); for (RecognizePiiEntitiesAction action : actions.getRecognizePiiEntitiesActions()) { piiTasks.add( action == null ? null : new PiiTask() .setTaskName(action.getActionName()) .setParameters(getPiiTaskParameters(action))); } return piiTasks; } private PiiTaskParameters getPiiTaskParameters(RecognizePiiEntitiesAction action) { return (PiiTaskParameters) new PiiTaskParameters() .setStringIndexType(StringIndexType.UTF16CODE_UNIT) .setDomain(PiiDomain.fromString( action.getDomainFilter() == null ? null : action.getDomainFilter().toString())) .setPiiCategories(toCategoriesFilter(action.getCategoriesFilter())) .setModelVersion(action.getModelVersion()) .setLoggingOptOut(action.isServiceLogsDisabled()); } private HealthcareLROTask toHealthcareLROTask(AnalyzeHealthcareEntitiesAction action) { if (action == null) { return null; } final HealthcareLROTask task = new HealthcareLROTask(); task.setParameters(getHealthcareTaskParameters(action)).setTaskName(action.getActionName()); return task; } private HealthcareTaskParameters getHealthcareTaskParameters(AnalyzeHealthcareEntitiesAction action) { return (HealthcareTaskParameters) new HealthcareTaskParameters() .setStringIndexType(StringIndexType.UTF16CODE_UNIT) .setModelVersion(action.getModelVersion()) .setLoggingOptOut(action.isServiceLogsDisabled()); } private KeyPhraseLROTask toKeyPhraseLROTask(ExtractKeyPhrasesAction action) { if (action == null) { return null; } final KeyPhraseLROTask task = new KeyPhraseLROTask(); task.setParameters(getKeyPhraseTaskParameters(action)).setTaskName(action.getActionName()); return task; } private List<KeyPhrasesTask> toKeyPhrasesTasks(TextAnalyticsActions actions) { final List<KeyPhrasesTask> keyPhrasesTasks = new ArrayList<>(); for (ExtractKeyPhrasesAction action : actions.getExtractKeyPhrasesActions()) { keyPhrasesTasks.add( action == null ? null : new KeyPhrasesTask() .setTaskName(action.getActionName()) .setParameters(getKeyPhraseTaskParameters(action))); } return keyPhrasesTasks; } private KeyPhraseTaskParameters getKeyPhraseTaskParameters(ExtractKeyPhrasesAction action) { return (KeyPhraseTaskParameters) new KeyPhraseTaskParameters() .setModelVersion(action.getModelVersion()) .setLoggingOptOut(action.isServiceLogsDisabled()); } private EntityLinkingLROTask toEntityLinkingLROTask(RecognizeLinkedEntitiesAction action) { if (action == null) { return null; } final EntityLinkingLROTask task = new EntityLinkingLROTask(); task.setParameters(getEntityLinkingTaskParameters(action)).setTaskName(action.getActionName()); return task; } private List<EntityLinkingTask> toEntityLinkingTasks(TextAnalyticsActions actions) { final List<EntityLinkingTask> tasks = new ArrayList<>(); for (RecognizeLinkedEntitiesAction action : actions.getRecognizeLinkedEntitiesActions()) { tasks.add( action == null ? null : new EntityLinkingTask() .setTaskName(action.getActionName()) .setParameters(getEntityLinkingTaskParameters(action))); } return tasks; } private EntityLinkingTaskParameters getEntityLinkingTaskParameters(RecognizeLinkedEntitiesAction action) { return (EntityLinkingTaskParameters) new EntityLinkingTaskParameters() .setStringIndexType(StringIndexType.UTF16CODE_UNIT) .setModelVersion(action.getModelVersion()) .setLoggingOptOut(action.isServiceLogsDisabled()); } private SentimentAnalysisLROTask toSentimentAnalysisLROTask(AnalyzeSentimentAction action) { if (action == null) { return null; } final SentimentAnalysisLROTask task = new SentimentAnalysisLROTask(); task.setParameters(getSentimentAnalysisTaskParameters(action)).setTaskName(action.getActionName()); return task; } private List<SentimentAnalysisTask> toSentimentAnalysisTasks(TextAnalyticsActions actions) { final List<SentimentAnalysisTask> tasks = new ArrayList<>(); for (AnalyzeSentimentAction action : actions.getAnalyzeSentimentActions()) { tasks.add( action == null ? null : new SentimentAnalysisTask() .setTaskName(action.getActionName()) .setParameters(getSentimentAnalysisTaskParameters(action))); } return tasks; } private SentimentAnalysisTaskParameters getSentimentAnalysisTaskParameters(AnalyzeSentimentAction action) { return (SentimentAnalysisTaskParameters) new SentimentAnalysisTaskParameters() .setStringIndexType(StringIndexType.UTF16CODE_UNIT) .setOpinionMining(action.isIncludeOpinionMining()) .setModelVersion(action.getModelVersion()) .setLoggingOptOut(action.isServiceLogsDisabled()); } private ExtractiveSummarizationLROTask toExtractiveSummarizationLROTask(ExtractSummaryAction action) { if (action == null) { return null; } final ExtractiveSummarizationLROTask task = new ExtractiveSummarizationLROTask(); task.setParameters(getExtractiveSummarizationTaskParameters(action)).setTaskName(action.getActionName()); return task; } private List<ExtractiveSummarizationTask> toExtractiveSummarizationTask(TextAnalyticsActions actions) { final List<ExtractiveSummarizationTask> extractiveSummarizationTasks = new ArrayList<>(); for (ExtractSummaryAction action : actions.getExtractSummaryActions()) { extractiveSummarizationTasks.add( action == null ? null : new ExtractiveSummarizationTask() .setTaskName(action.getActionName()) .setParameters(getExtractiveSummarizationTaskParameters(action))); } return extractiveSummarizationTasks; } private ExtractiveSummarizationTaskParameters getExtractiveSummarizationTaskParameters( ExtractSummaryAction action) { return (ExtractiveSummarizationTaskParameters) new ExtractiveSummarizationTaskParameters() .setStringIndexType(StringIndexType.UTF16CODE_UNIT) .setSentenceCount(action.getMaxSentenceCount()) .setSortBy(action.getOrderBy() == null ? null : ExtractiveSummarizationSortingCriteria .fromString(action.getOrderBy().toString())) .setModelVersion(action.getModelVersion()) .setLoggingOptOut(action.isServiceLogsDisabled()); } private CustomEntitiesLROTask toCustomEntitiesLROTask(RecognizeCustomEntitiesAction action) { if (action == null) { return null; } final CustomEntitiesLROTask task = new CustomEntitiesLROTask(); task.setParameters(getCustomEntitiesTaskParameters(action)).setTaskName(action.getActionName()); return task; } private List<CustomEntitiesTask> toCustomEntitiesTask(TextAnalyticsActions actions) { final List<CustomEntitiesTask> tasks = new ArrayList<>(); for (RecognizeCustomEntitiesAction action : actions.getRecognizeCustomEntitiesActions()) { tasks.add( action == null ? null : new CustomEntitiesTask() .setTaskName(action.getActionName()) .setParameters(getCustomEntitiesTaskParameters(action))); } return tasks; } private CustomEntitiesTaskParameters getCustomEntitiesTaskParameters(RecognizeCustomEntitiesAction action) { return (CustomEntitiesTaskParameters) new CustomEntitiesTaskParameters() .setStringIndexType(StringIndexType.UTF16CODE_UNIT) .setProjectName(action.getProjectName()) .setDeploymentName(action.getDeploymentName()) .setLoggingOptOut(action.isServiceLogsDisabled()); } private CustomSingleLabelClassificationLROTask toCustomSingleLabelClassificationLROTask( SingleCategoryClassifyAction action) { if (action == null) { return null; } final CustomSingleLabelClassificationLROTask task = new CustomSingleLabelClassificationLROTask(); task.setParameters(getCustomSingleClassificationTaskParameters(action)).setTaskName(action.getActionName()); return task; } private List<CustomSingleClassificationTask> toCustomSingleClassificationTask(TextAnalyticsActions actions) { final List<CustomSingleClassificationTask> tasks = new ArrayList<>(); for (SingleCategoryClassifyAction action : actions.getSingleCategoryClassifyActions()) { tasks.add( action == null ? null : new CustomSingleClassificationTask() .setTaskName(action.getActionName()) .setParameters(getCustomSingleClassificationTaskParameters(action))); } return tasks; } private CustomSingleLabelClassificationTaskParameters getCustomSingleClassificationTaskParameters( SingleCategoryClassifyAction action) { return (CustomSingleLabelClassificationTaskParameters) new CustomSingleLabelClassificationTaskParameters() .setProjectName(action.getProjectName()) .setDeploymentName(action.getDeploymentName()) .setLoggingOptOut(action.isServiceLogsDisabled()); } private CustomMultiLabelClassificationLROTask toCustomMultiLabelClassificationLROTask( MultiCategoryClassifyAction action) { if (action == null) { return null; } final CustomMultiLabelClassificationLROTask task = new CustomMultiLabelClassificationLROTask(); task.setParameters(getCustomMultiLabelClassificationTaskParameters(action)).setTaskName(action.getActionName()); return task; } private List<CustomMultiClassificationTask> toCustomMultiClassificationTask(TextAnalyticsActions actions) { final List<CustomMultiClassificationTask> tasks = new ArrayList<>(); for (MultiCategoryClassifyAction action : actions.getMultiCategoryClassifyActions()) { tasks.add( action == null ? null : new CustomMultiClassificationTask() .setTaskName(action.getActionName()) .setParameters(getCustomMultiLabelClassificationTaskParameters(action))); } return tasks; } private CustomMultiLabelClassificationTaskParameters getCustomMultiLabelClassificationTaskParameters( MultiCategoryClassifyAction action) { return (CustomMultiLabelClassificationTaskParameters) new CustomMultiLabelClassificationTaskParameters() .setProjectName(action.getProjectName()) .setDeploymentName(action.getDeploymentName()) .setLoggingOptOut(action.isServiceLogsDisabled()); } private Function<PollingContext<AnalyzeActionsOperationDetail>, Mono<AnalyzeActionsOperationDetail>> activationOperation(Mono<AnalyzeActionsOperationDetail> operationResult) { return pollingContext -> { try { return operationResult.onErrorMap(Utility::mapToHttpResponseExceptionIfExists); } catch (RuntimeException ex) { return monoError(logger, ex); } }; } private Function<PollingContext<AnalyzeActionsOperationDetail>, Mono<PollResponse<AnalyzeActionsOperationDetail>>> pollingOperation(Function<UUID, Mono<Response<AnalyzeJobState>>> pollingFunction) { return pollingContext -> { try { final PollResponse<AnalyzeActionsOperationDetail> operationResultPollResponse = pollingContext.getLatestResponse(); final UUID operationId = UUID.fromString(operationResultPollResponse.getValue().getOperationId()); return pollingFunction.apply(operationId) .flatMap(modelResponse -> processAnalyzedModelResponse(modelResponse, operationResultPollResponse)) .onErrorMap(Utility::mapToHttpResponseExceptionIfExists); } catch (RuntimeException ex) { return monoError(logger, ex); } }; } private Function<PollingContext<AnalyzeActionsOperationDetail>, Mono<PollResponse<AnalyzeActionsOperationDetail>>> pollingOperationLanguageApi(Function<UUID, Mono<Response<AnalyzeTextJobState>>> pollingFunction) { return pollingContext -> { try { final PollResponse<AnalyzeActionsOperationDetail> operationResultPollResponse = pollingContext.getLatestResponse(); final UUID operationId = UUID.fromString(operationResultPollResponse.getValue().getOperationId()); return pollingFunction.apply(operationId) .flatMap(modelResponse -> processAnalyzedModelResponseLanguageApi( modelResponse, operationResultPollResponse)) .onErrorMap(Utility::mapToHttpResponseExceptionIfExists); } catch (RuntimeException ex) { return monoError(logger, ex); } }; } private Function<PollingContext<AnalyzeActionsOperationDetail>, Mono<AnalyzeActionsResultPagedFlux>> fetchingOperation(Function<UUID, Mono<AnalyzeActionsResultPagedFlux>> fetchingFunction) { return pollingContext -> { try { final UUID operationId = UUID.fromString(pollingContext.getLatestResponse().getValue().getOperationId()); return fetchingFunction.apply(operationId); } catch (RuntimeException ex) { return monoError(logger, ex); } }; } private Function<PollingContext<AnalyzeActionsOperationDetail>, Mono<AnalyzeActionsResultPagedIterable>> fetchingOperationIterable(Function<UUID, Mono<AnalyzeActionsResultPagedIterable>> fetchingFunction) { return pollingContext -> { try { final UUID operationId = UUID.fromString(pollingContext.getLatestResponse().getValue().getOperationId()); return fetchingFunction.apply(operationId); } catch (RuntimeException ex) { return monoError(logger, ex); } }; } AnalyzeActionsResultPagedFlux getAnalyzeOperationFluxPage(UUID operationId, Integer top, Integer skip, boolean showStats, Context context) { return new AnalyzeActionsResultPagedFlux( () -> (continuationToken, pageSize) -> getPage(continuationToken, operationId, top, skip, showStats, context).flux()); } Mono<PagedResponse<AnalyzeActionsResult>> getPage(String continuationToken, UUID operationId, Integer top, Integer skip, boolean showStats, Context context) { if (continuationToken != null) { final Map<String, Object> continuationTokenMap = parseNextLink(continuationToken); final Integer topValue = (Integer) continuationTokenMap.getOrDefault("$top", null); final Integer skipValue = (Integer) continuationTokenMap.getOrDefault("$skip", null); final Boolean showStatsValue = (Boolean) continuationTokenMap.getOrDefault(showStats, false); if (service != null) { return service.jobStatusWithResponseAsync(operationId, showStatsValue, topValue, skipValue, context) .map(this::toAnalyzeActionsResultPagedResponseLanguageApi) .onErrorMap(Utility::mapToHttpResponseExceptionIfExists); } return legacyService.analyzeStatusWithResponseAsync(operationId.toString(), showStatsValue, topValue, skipValue, context) .map(this::toAnalyzeActionsResultPagedResponse) .onErrorMap(Utility::mapToHttpResponseExceptionIfExists); } else { if (service != null) { return service.jobStatusWithResponseAsync(operationId, showStats, top, skip, context) .map(this::toAnalyzeActionsResultPagedResponseLanguageApi) .onErrorMap(Utility::mapToHttpResponseExceptionIfExists); } return legacyService.analyzeStatusWithResponseAsync(operationId.toString(), showStats, top, skip, context) .map(this::toAnalyzeActionsResultPagedResponse) .onErrorMap(Utility::mapToHttpResponseExceptionIfExists); } } private PagedResponse<AnalyzeActionsResult> toAnalyzeActionsResultPagedResponse(Response<AnalyzeJobState> response) { final AnalyzeJobState analyzeJobState = response.getValue(); return new PagedResponseBase<Void, AnalyzeActionsResult>( response.getRequest(), response.getStatusCode(), response.getHeaders(), Arrays.asList(toAnalyzeActionsResult(analyzeJobState)), analyzeJobState.getNextLink(), null); } private PagedResponse<AnalyzeActionsResult> toAnalyzeActionsResultPagedResponseLanguageApi(Response<AnalyzeTextJobState> response) { final AnalyzeTextJobState analyzeJobState = response.getValue(); return new PagedResponseBase<Void, AnalyzeActionsResult>( response.getRequest(), response.getStatusCode(), response.getHeaders(), Arrays.asList(toAnalyzeActionsResultLanguageApi(analyzeJobState)), analyzeJobState.getNextLink(), null); } private AnalyzeActionsResult toAnalyzeActionsResult(AnalyzeJobState analyzeJobState) { TasksStateTasksOld tasksStateTasks = analyzeJobState.getTasks(); final List<TasksStateTasksEntityRecognitionPiiTasksItem> piiTasksItems = tasksStateTasks.getEntityRecognitionPiiTasks(); final List<TasksStateTasksEntityRecognitionTasksItem> entityRecognitionTasksItems = tasksStateTasks.getEntityRecognitionTasks(); final List<TasksStateTasksKeyPhraseExtractionTasksItem> keyPhraseExtractionTasks = tasksStateTasks.getKeyPhraseExtractionTasks(); final List<TasksStateTasksEntityLinkingTasksItem> linkedEntityRecognitionTasksItems = tasksStateTasks.getEntityLinkingTasks(); final List<TasksStateTasksSentimentAnalysisTasksItem> sentimentAnalysisTasksItems = tasksStateTasks.getSentimentAnalysisTasks(); List<RecognizeEntitiesActionResult> recognizeEntitiesActionResults = new ArrayList<>(); List<RecognizePiiEntitiesActionResult> recognizePiiEntitiesActionResults = new ArrayList<>(); List<ExtractKeyPhrasesActionResult> extractKeyPhrasesActionResults = new ArrayList<>(); List<RecognizeLinkedEntitiesActionResult> recognizeLinkedEntitiesActionResults = new ArrayList<>(); List<AnalyzeSentimentActionResult> analyzeSentimentActionResults = new ArrayList<>(); List<ExtractSummaryActionResult> extractSummaryActionResults = new ArrayList<>(); List<RecognizeCustomEntitiesActionResult> recognizeCustomEntitiesActionResults = new ArrayList<>(); List<SingleCategoryClassifyActionResult> singleCategoryClassifyActionResults = new ArrayList<>(); List<MultiCategoryClassifyActionResult> multiCategoryClassifyActionResults = new ArrayList<>(); if (!CoreUtils.isNullOrEmpty(entityRecognitionTasksItems)) { for (int i = 0; i < entityRecognitionTasksItems.size(); i++) { final TasksStateTasksEntityRecognitionTasksItem taskItem = entityRecognitionTasksItems.get(i); final RecognizeEntitiesActionResult actionResult = new RecognizeEntitiesActionResult(); final EntitiesResult results = taskItem.getResults(); if (results != null) { RecognizeEntitiesActionResultPropertiesHelper.setDocumentsResults(actionResult, toRecognizeEntitiesResultCollectionResponse(results)); } TextAnalyticsActionResultPropertiesHelper.setActionName(actionResult, taskItem.getTaskName()); TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult, taskItem.getLastUpdateDateTime()); recognizeEntitiesActionResults.add(actionResult); } } if (!CoreUtils.isNullOrEmpty(piiTasksItems)) { for (int i = 0; i < piiTasksItems.size(); i++) { final TasksStateTasksEntityRecognitionPiiTasksItem taskItem = piiTasksItems.get(i); final RecognizePiiEntitiesActionResult actionResult = new RecognizePiiEntitiesActionResult(); final PiiResult results = taskItem.getResults(); if (results != null) { RecognizePiiEntitiesActionResultPropertiesHelper.setDocumentsResults(actionResult, toRecognizePiiEntitiesResultCollection(results)); } TextAnalyticsActionResultPropertiesHelper.setActionName(actionResult, taskItem.getTaskName()); TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult, taskItem.getLastUpdateDateTime()); recognizePiiEntitiesActionResults.add(actionResult); } } if (!CoreUtils.isNullOrEmpty(keyPhraseExtractionTasks)) { for (int i = 0; i < keyPhraseExtractionTasks.size(); i++) { final TasksStateTasksKeyPhraseExtractionTasksItem taskItem = keyPhraseExtractionTasks.get(i); final ExtractKeyPhrasesActionResult actionResult = new ExtractKeyPhrasesActionResult(); final KeyPhraseResult results = taskItem.getResults(); if (results != null) { ExtractKeyPhrasesActionResultPropertiesHelper.setDocumentsResults(actionResult, toExtractKeyPhrasesResultCollection(results)); } TextAnalyticsActionResultPropertiesHelper.setActionName(actionResult, taskItem.getTaskName()); TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult, taskItem.getLastUpdateDateTime()); extractKeyPhrasesActionResults.add(actionResult); } } if (!CoreUtils.isNullOrEmpty(linkedEntityRecognitionTasksItems)) { for (int i = 0; i < linkedEntityRecognitionTasksItems.size(); i++) { final TasksStateTasksEntityLinkingTasksItem taskItem = linkedEntityRecognitionTasksItems.get(i); final RecognizeLinkedEntitiesActionResult actionResult = new RecognizeLinkedEntitiesActionResult(); final EntityLinkingResult results = taskItem.getResults(); if (results != null) { RecognizeLinkedEntitiesActionResultPropertiesHelper.setDocumentsResults(actionResult, toRecognizeLinkedEntitiesResultCollection(results)); } TextAnalyticsActionResultPropertiesHelper.setActionName(actionResult, taskItem.getTaskName()); TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult, taskItem.getLastUpdateDateTime()); recognizeLinkedEntitiesActionResults.add(actionResult); } } if (!CoreUtils.isNullOrEmpty(sentimentAnalysisTasksItems)) { for (int i = 0; i < sentimentAnalysisTasksItems.size(); i++) { final TasksStateTasksSentimentAnalysisTasksItem taskItem = sentimentAnalysisTasksItems.get(i); final AnalyzeSentimentActionResult actionResult = new AnalyzeSentimentActionResult(); final SentimentResponse results = taskItem.getResults(); if (results != null) { AnalyzeSentimentActionResultPropertiesHelper.setDocumentsResults(actionResult, toAnalyzeSentimentResultCollection(results)); } TextAnalyticsActionResultPropertiesHelper.setActionName(actionResult, taskItem.getTaskName()); TextAnalyticsActionResultPropertiesHelper.setCompletedAt(actionResult, taskItem.getLastUpdateDateTime()); analyzeSentimentActionResults.add(actionResult); } } final List<TextAnalyticsError> errors = analyzeJobState.getErrors(); if (!CoreUtils.isNullOrEmpty(errors)) { for (TextAnalyticsError error : errors) { if (error != null) { final String[] targetPair = parseActionErrorTarget(error.getTarget(), error.getMessage()); final String taskName = targetPair[0]; final Integer taskIndex = Integer.valueOf(targetPair[1]); final TextAnalyticsActionResult actionResult; if (ENTITY_RECOGNITION_TASKS.equals(taskName)) { actionResult = recognizeEntitiesActionResults.get(taskIndex); } else if (ENTITY_RECOGNITION_PII_TASKS.equals(taskName)) { actionResult = recognizePiiEntitiesActionResults.get(taskIndex); } else if (KEY_PHRASE_EXTRACTION_TASKS.equals(taskName)) { actionResult = extractKeyPhrasesActionResults.get(taskIndex); } else if (ENTITY_LINKING_TASKS.equals(taskName)) { actionResult = recognizeLinkedEntitiesActionResults.get(taskIndex); } else if (SENTIMENT_ANALYSIS_TASKS.equals(taskName)) { actionResult = analyzeSentimentActionResults.get(taskIndex); } else if (EXTRACTIVE_SUMMARIZATION_TASKS.equals(taskName)) { actionResult = extractSummaryActionResults.get(taskIndex); } else if (CUSTOM_ENTITY_RECOGNITION_TASKS.equals(taskName)) { actionResult = recognizeCustomEntitiesActionResults.get(taskIndex); } else if (CUSTOM_SINGLE_CLASSIFICATION_TASKS.equals(taskName)) { actionResult = singleCategoryClassifyActionResults.get(taskIndex); } else if (CUSTOM_MULTI_CLASSIFICATION_TASKS.equals(taskName)) { actionResult = multiCategoryClassifyActionResults.get(taskIndex); } else { throw logger.logExceptionAsError(new RuntimeException( "Invalid task name in target reference, " + taskName)); } TextAnalyticsActionResultPropertiesHelper.setIsError(actionResult, true); TextAnalyticsActionResultPropertiesHelper.setError(actionResult, new com.azure.ai.textanalytics.models.TextAnalyticsError( TextAnalyticsErrorCode.fromString( error.getErrorCode() == null ? null : error.getErrorCode().toString()), error.getMessage(), null)); } } } final AnalyzeActionsResult analyzeActionsResult = new AnalyzeActionsResult(); AnalyzeActionsResultPropertiesHelper.setRecognizeEntitiesResults(analyzeActionsResult, IterableStream.of(recognizeEntitiesActionResults)); AnalyzeActionsResultPropertiesHelper.setRecognizePiiEntitiesResults(analyzeActionsResult, IterableStream.of(recognizePiiEntitiesActionResults)); AnalyzeActionsResultPropertiesHelper.setExtractKeyPhrasesResults(analyzeActionsResult, IterableStream.of(extractKeyPhrasesActionResults)); AnalyzeActionsResultPropertiesHelper.setRecognizeLinkedEntitiesResults(analyzeActionsResult, IterableStream.of(recognizeLinkedEntitiesActionResults)); AnalyzeActionsResultPropertiesHelper.setAnalyzeSentimentResults(analyzeActionsResult, IterableStream.of(analyzeSentimentActionResults)); AnalyzeActionsResultPropertiesHelper.setExtractSummaryResults(analyzeActionsResult, IterableStream.of(extractSummaryActionResults)); AnalyzeActionsResultPropertiesHelper.setRecognizeCustomEntitiesResults(analyzeActionsResult, IterableStream.of(recognizeCustomEntitiesActionResults)); AnalyzeActionsResultPropertiesHelper.setClassifySingleCategoryResults(analyzeActionsResult, IterableStream.of(singleCategoryClassifyActionResults)); AnalyzeActionsResultPropertiesHelper.setClassifyMultiCategoryResults(analyzeActionsResult, IterableStream.of(multiCategoryClassifyActionResults)); return analyzeActionsResult; } private Mono<PollResponse<AnalyzeActionsOperationDetail>> processAnalyzedModelResponse( Response<AnalyzeJobState> analyzeJobStateResponse, PollResponse<AnalyzeActionsOperationDetail> operationResultPollResponse) { LongRunningOperationStatus status = LongRunningOperationStatus.SUCCESSFULLY_COMPLETED; if (analyzeJobStateResponse.getValue() != null && analyzeJobStateResponse.getValue().getStatus() != null) { switch (analyzeJobStateResponse.getValue().getStatus()) { case NOT_STARTED: case RUNNING: status = LongRunningOperationStatus.IN_PROGRESS; break; case SUCCEEDED: status = LongRunningOperationStatus.SUCCESSFULLY_COMPLETED; break; case CANCELLED: status = LongRunningOperationStatus.USER_CANCELLED; break; default: status = LongRunningOperationStatus.fromString( analyzeJobStateResponse.getValue().getStatus().toString(), true); break; } } AnalyzeActionsOperationDetailPropertiesHelper.setDisplayName(operationResultPollResponse.getValue(), analyzeJobStateResponse.getValue().getDisplayName()); AnalyzeActionsOperationDetailPropertiesHelper.setCreatedAt(operationResultPollResponse.getValue(), analyzeJobStateResponse.getValue().getCreatedDateTime()); AnalyzeActionsOperationDetailPropertiesHelper.setExpiresAt(operationResultPollResponse.getValue(), analyzeJobStateResponse.getValue().getExpirationDateTime()); AnalyzeActionsOperationDetailPropertiesHelper.setLastModifiedAt(operationResultPollResponse.getValue(), analyzeJobStateResponse.getValue().getLastUpdateDateTime()); final TasksStateTasksOld tasksResult = analyzeJobStateResponse.getValue().getTasks(); AnalyzeActionsOperationDetailPropertiesHelper.setActionsFailed(operationResultPollResponse.getValue(), tasksResult.getFailed()); AnalyzeActionsOperationDetailPropertiesHelper.setActionsInProgress(operationResultPollResponse.getValue(), tasksResult.getInProgress()); AnalyzeActionsOperationDetailPropertiesHelper.setActionsSucceeded( operationResultPollResponse.getValue(), tasksResult.getCompleted()); AnalyzeActionsOperationDetailPropertiesHelper.setActionsInTotal(operationResultPollResponse.getValue(), tasksResult.getTotal()); return Mono.just(new PollResponse<>(status, operationResultPollResponse.getValue())); } private Mono<PollResponse<AnalyzeActionsOperationDetail>> processAnalyzedModelResponseLanguageApi( Response<AnalyzeTextJobState> analyzeJobStateResponse, PollResponse<AnalyzeActionsOperationDetail> operationResultPollResponse) { LongRunningOperationStatus status = LongRunningOperationStatus.SUCCESSFULLY_COMPLETED; if (analyzeJobStateResponse.getValue() != null && analyzeJobStateResponse.getValue().getStatus() != null) { switch (analyzeJobStateResponse.getValue().getStatus()) { case NOT_STARTED: case RUNNING: status = LongRunningOperationStatus.IN_PROGRESS; break; case SUCCEEDED: status = LongRunningOperationStatus.SUCCESSFULLY_COMPLETED; break; case CANCELLED: status = LongRunningOperationStatus.USER_CANCELLED; break; case PARTIALLY_SUCCEEDED: status = LongRunningOperationStatus.fromString("partiallySucceeded", true); break; default: status = LongRunningOperationStatus.fromString( analyzeJobStateResponse.getValue().getStatus().toString(), true); break; } } AnalyzeActionsOperationDetailPropertiesHelper.setDisplayName(operationResultPollResponse.getValue(), analyzeJobStateResponse.getValue().getDisplayName()); AnalyzeActionsOperationDetailPropertiesHelper.setCreatedAt(operationResultPollResponse.getValue(), analyzeJobStateResponse.getValue().getCreatedDateTime()); AnalyzeActionsOperationDetailPropertiesHelper.setExpiresAt(operationResultPollResponse.getValue(), analyzeJobStateResponse.getValue().getExpirationDateTime()); AnalyzeActionsOperationDetailPropertiesHelper.setLastModifiedAt(operationResultPollResponse.getValue(), analyzeJobStateResponse.getValue().getLastUpdateDateTime()); final TasksStateTasks tasksResult = analyzeJobStateResponse.getValue().getTasks(); AnalyzeActionsOperationDetailPropertiesHelper.setActionsFailed(operationResultPollResponse.getValue(), tasksResult.getFailed()); AnalyzeActionsOperationDetailPropertiesHelper.setActionsInProgress(operationResultPollResponse.getValue(), tasksResult.getInProgress()); AnalyzeActionsOperationDetailPropertiesHelper.setActionsSucceeded( operationResultPollResponse.getValue(), tasksResult.getCompleted()); AnalyzeActionsOperationDetailPropertiesHelper.setActionsInTotal(operationResultPollResponse.getValue(), tasksResult.getTotal()); return Mono.just(new PollResponse<>(status, operationResultPollResponse.getValue())); } private Context getNotNullContext(Context context) { return context == null ? Context.NONE : context; } private AnalyzeActionsOptions getNotNullAnalyzeActionsOptions(AnalyzeActionsOptions options) { return options == null ? new AnalyzeActionsOptions() : options; } private String[] parseActionErrorTarget(String targetReference, String errorMessage) { if (CoreUtils.isNullOrEmpty(targetReference)) { if (CoreUtils.isNullOrEmpty(errorMessage)) { errorMessage = "Expected an error with a target field referencing an action but did not get one"; } throw logger.logExceptionAsError(new RuntimeException(errorMessage)); } final Matcher matcher = PATTERN.matcher(targetReference); String[] taskNameIdPair = new String[2]; while (matcher.find()) { taskNameIdPair[0] = matcher.group(1); taskNameIdPair[1] = matcher.group(2); } return taskNameIdPair; } }
Could this be done with a Swagger transform?
public void customize(LibraryCustomization customization, Logger logger) { PackageCustomization implementationModels = customization.getPackage("com.azure.storage.blob.implementation.models"); implementationModels.getClass("BlobHierarchyListSegment").addAnnotation("@JsonDeserialize(using = com.azure.storage.blob.implementation.util.CustomHierarchicalListingDeserializer.class)"); implementationModels.getClass("BlobPrefix").rename("BlobPrefixInternal"); PackageCustomization models = customization.getPackage("com.azure.storage.blob.models"); models.getClass("PageList").addAnnotation("@JsonDeserialize(using = PageListDeserializer.class)"); models.getClass("BlobCopySourceTags").rename("BlobCopySourceTagsMode"); models.getClass("BlobErrorCode").getProperty("SNAPHOT_OPERATION_RATE_EXCEEDED") .addAnnotation("Deprecated"); ClassCustomization blobHttpHeaders = models.getClass("BlobHttpHeaders"); blobHttpHeaders.getMethod("getContentMd5").getJavadoc().setDescription("Get the contentMd5 property: " + "Optional. An MD5 hash of the blob content. Note that this hash is not validated, as the hashes for " + "the individual blocks were validated when each was uploaded. The value does not need to be base64 " + "encoded as the SDK will perform the encoding."); blobHttpHeaders.getMethod("setContentMd5").getJavadoc().setDescription("Set the contentMd5 property: " + "Optional. An MD5 hash of the blob content. Note that this hash is not validated, as the hashes for " + "the individual blocks were validated when each was uploaded. The value does not need to be base64 " + "encoded as the SDK will perform the encoding."); ClassCustomization blobContainerEncryptionScope = models.getClass("BlobContainerEncryptionScope"); blobContainerEncryptionScope.getMethod("isEncryptionScopeOverridePrevented") .setReturnType("boolean", "return Boolean.TRUE.equals(%s);", true); blobHttpHeaders.removeAnnotation("@JacksonXmlRootElement") .addAnnotation("@JacksonXmlRootElement(localName = \"blob-http-headers\")"); blobContainerEncryptionScope.removeAnnotation("@JacksonXmlRootElement") .addAnnotation("@JacksonXmlRootElement(localName = \"blob-container-encryption-scope\")"); models.getClass("CpkInfo").removeAnnotation("@JacksonXmlRootElement") .addAnnotation("@JacksonXmlRootElement(localName = \"cpk-info\")"); models.getClass("BlobMetrics").removeAnnotation("@JacksonXmlRootElement") .addAnnotation("@JacksonXmlRootElement(localName = \"Metrics\")"); models.getClass("BlobAnalyticsLogging").removeAnnotation("@JacksonXmlRootElement") .addAnnotation("@JacksonXmlRootElement(localName = \"Logging\")"); models.getClass("BlobRetentionPolicy").removeAnnotation("@JacksonXmlRootElement") .addAnnotation("@JacksonXmlRootElement(localName = \"RetentionPolicy\")"); models.getClass("BlobServiceStatistics").removeAnnotation("@JacksonXmlRootElement") .addAnnotation("@JacksonXmlRootElement(localName = \"StorageServiceStats\")"); models.getClass("BlobSignedIdentifier").removeAnnotation("@JacksonXmlRootElement") .addAnnotation("@JacksonXmlRootElement(localName = \"SignedIdentifier\")"); models.getClass("BlobAccessPolicy").removeAnnotation("@JacksonXmlRootElement") .addAnnotation("@JacksonXmlRootElement(localName = \"AccessPolicy\")"); ClassCustomization blobContainerItemProperties = models.getClass("BlobContainerItemProperties"); blobContainerItemProperties.getMethod("isEncryptionScopeOverridePrevented") .setReturnType("boolean", "return Boolean.TRUE.equals(%s);", true); blobContainerItemProperties.getMethod("setIsImmutableStorageWithVersioningEnabled") .rename("setImmutableStorageWithVersioningEnabled"); blobContainerItemProperties.getMethod("setEncryptionScopeOverridePrevented") .replaceParameters("boolean encryptionScopeOverridePrevented"); ClassCustomization block = models.getClass("Block"); block.getMethod("getSizeInt") .rename("getSize") .addAnnotation("@Deprecated") .setReturnType("int", "return (int) this.sizeLong; .getJavadoc() .setDeprecated("Use {@link block.getMethod("setSizeInt") .rename("setSize") .addAnnotation("@Deprecated") .setReturnType("Block", "return %s.setSizeLong((long) sizeInt);", true) .getJavadoc() .setDeprecated("Use {@link ClassCustomization listBlobsIncludeItem = models.getClass("ListBlobsIncludeItem"); listBlobsIncludeItem.renameEnumMember("IMMUTABILITYPOLICY", "IMMUTABILITY_POLICY") .renameEnumMember("LEGALHOLD", "LEGAL_HOLD") .renameEnumMember("DELETEDWITHVERSIONS", "DELETED_WITH_VERSIONS"); }
models.getClass("BlobCopySourceTags").rename("BlobCopySourceTagsMode");
public void customize(LibraryCustomization customization, Logger logger) { PackageCustomization implementationModels = customization.getPackage("com.azure.storage.blob.implementation.models"); implementationModels.getClass("BlobHierarchyListSegment").addAnnotation("@JsonDeserialize(using = com.azure.storage.blob.implementation.util.CustomHierarchicalListingDeserializer.class)"); implementationModels.getClass("BlobPrefix").rename("BlobPrefixInternal"); PackageCustomization models = customization.getPackage("com.azure.storage.blob.models"); models.getClass("PageList").addAnnotation("@JsonDeserialize(using = PageListDeserializer.class)"); models.getClass("BlobCopySourceTags").rename("BlobCopySourceTagsMode"); ClassCustomization blobHttpHeaders = models.getClass("BlobHttpHeaders"); blobHttpHeaders.getMethod("getContentMd5").getJavadoc().setDescription("Get the contentMd5 property: " + "Optional. An MD5 hash of the blob content. Note that this hash is not validated, as the hashes for " + "the individual blocks were validated when each was uploaded. The value does not need to be base64 " + "encoded as the SDK will perform the encoding."); blobHttpHeaders.getMethod("setContentMd5").getJavadoc().setDescription("Set the contentMd5 property: " + "Optional. An MD5 hash of the blob content. Note that this hash is not validated, as the hashes for " + "the individual blocks were validated when each was uploaded. The value does not need to be base64 " + "encoded as the SDK will perform the encoding."); ClassCustomization blobContainerEncryptionScope = models.getClass("BlobContainerEncryptionScope"); blobContainerEncryptionScope.getMethod("isEncryptionScopeOverridePrevented") .setReturnType("boolean", "return Boolean.TRUE.equals(%s);", true); blobHttpHeaders.removeAnnotation("@JacksonXmlRootElement") .addAnnotation("@JacksonXmlRootElement(localName = \"blob-http-headers\")"); blobContainerEncryptionScope.removeAnnotation("@JacksonXmlRootElement") .addAnnotation("@JacksonXmlRootElement(localName = \"blob-container-encryption-scope\")"); models.getClass("CpkInfo").removeAnnotation("@JacksonXmlRootElement") .addAnnotation("@JacksonXmlRootElement(localName = \"cpk-info\")"); models.getClass("BlobMetrics").removeAnnotation("@JacksonXmlRootElement") .addAnnotation("@JacksonXmlRootElement(localName = \"Metrics\")"); models.getClass("BlobAnalyticsLogging").removeAnnotation("@JacksonXmlRootElement") .addAnnotation("@JacksonXmlRootElement(localName = \"Logging\")"); models.getClass("BlobRetentionPolicy").removeAnnotation("@JacksonXmlRootElement") .addAnnotation("@JacksonXmlRootElement(localName = \"RetentionPolicy\")"); models.getClass("BlobServiceStatistics").removeAnnotation("@JacksonXmlRootElement") .addAnnotation("@JacksonXmlRootElement(localName = \"StorageServiceStats\")"); models.getClass("BlobSignedIdentifier").removeAnnotation("@JacksonXmlRootElement") .addAnnotation("@JacksonXmlRootElement(localName = \"SignedIdentifier\")"); models.getClass("BlobAccessPolicy").removeAnnotation("@JacksonXmlRootElement") .addAnnotation("@JacksonXmlRootElement(localName = \"AccessPolicy\")"); ClassCustomization blobContainerItemProperties = models.getClass("BlobContainerItemProperties"); blobContainerItemProperties.getMethod("isEncryptionScopeOverridePrevented") .setReturnType("boolean", "return Boolean.TRUE.equals(%s);", true); blobContainerItemProperties.getMethod("setIsImmutableStorageWithVersioningEnabled") .rename("setImmutableStorageWithVersioningEnabled"); blobContainerItemProperties.getMethod("setEncryptionScopeOverridePrevented") .replaceParameters("boolean encryptionScopeOverridePrevented"); ClassCustomization block = models.getClass("Block"); block.getMethod("getSizeInt") .rename("getSize") .addAnnotation("@Deprecated") .setReturnType("int", "return (int) this.sizeLong; .getJavadoc() .setDeprecated("Use {@link block.getMethod("setSizeInt") .rename("setSize") .addAnnotation("@Deprecated") .setReturnType("Block", "return %s.setSizeLong((long) sizeInt);", true) .getJavadoc() .setDeprecated("Use {@link ClassCustomization listBlobsIncludeItem = models.getClass("ListBlobsIncludeItem"); listBlobsIncludeItem.renameEnumMember("IMMUTABILITYPOLICY", "IMMUTABILITY_POLICY") .renameEnumMember("LEGALHOLD", "LEGAL_HOLD") .renameEnumMember("DELETEDWITHVERSIONS", "DELETED_WITH_VERSIONS"); }
class BlobStorageCustomization extends Customization { @Override }
class BlobStorageCustomization extends Customization { @Override }
Do we want to keep this knowing that the method will throw based on testing it? Maybe comment it out for now until it is fix?
public void customize(LibraryCustomization customization, Logger logger) { PackageCustomization implementationModels = customization.getPackage("com.azure.storage.blob.implementation.models"); implementationModels.getClass("BlobHierarchyListSegment").addAnnotation("@JsonDeserialize(using = com.azure.storage.blob.implementation.util.CustomHierarchicalListingDeserializer.class)"); implementationModels.getClass("BlobPrefix").rename("BlobPrefixInternal"); PackageCustomization models = customization.getPackage("com.azure.storage.blob.models"); models.getClass("PageList").addAnnotation("@JsonDeserialize(using = PageListDeserializer.class)"); models.getClass("BlobCopySourceTags").rename("BlobCopySourceTagsMode"); models.getClass("BlobErrorCode").getProperty("SNAPHOT_OPERATION_RATE_EXCEEDED") .addAnnotation("Deprecated"); ClassCustomization blobHttpHeaders = models.getClass("BlobHttpHeaders"); blobHttpHeaders.getMethod("getContentMd5").getJavadoc().setDescription("Get the contentMd5 property: " + "Optional. An MD5 hash of the blob content. Note that this hash is not validated, as the hashes for " + "the individual blocks were validated when each was uploaded. The value does not need to be base64 " + "encoded as the SDK will perform the encoding."); blobHttpHeaders.getMethod("setContentMd5").getJavadoc().setDescription("Set the contentMd5 property: " + "Optional. An MD5 hash of the blob content. Note that this hash is not validated, as the hashes for " + "the individual blocks were validated when each was uploaded. The value does not need to be base64 " + "encoded as the SDK will perform the encoding."); ClassCustomization blobContainerEncryptionScope = models.getClass("BlobContainerEncryptionScope"); blobContainerEncryptionScope.getMethod("isEncryptionScopeOverridePrevented") .setReturnType("boolean", "return Boolean.TRUE.equals(%s);", true); blobHttpHeaders.removeAnnotation("@JacksonXmlRootElement") .addAnnotation("@JacksonXmlRootElement(localName = \"blob-http-headers\")"); blobContainerEncryptionScope.removeAnnotation("@JacksonXmlRootElement") .addAnnotation("@JacksonXmlRootElement(localName = \"blob-container-encryption-scope\")"); models.getClass("CpkInfo").removeAnnotation("@JacksonXmlRootElement") .addAnnotation("@JacksonXmlRootElement(localName = \"cpk-info\")"); models.getClass("BlobMetrics").removeAnnotation("@JacksonXmlRootElement") .addAnnotation("@JacksonXmlRootElement(localName = \"Metrics\")"); models.getClass("BlobAnalyticsLogging").removeAnnotation("@JacksonXmlRootElement") .addAnnotation("@JacksonXmlRootElement(localName = \"Logging\")"); models.getClass("BlobRetentionPolicy").removeAnnotation("@JacksonXmlRootElement") .addAnnotation("@JacksonXmlRootElement(localName = \"RetentionPolicy\")"); models.getClass("BlobServiceStatistics").removeAnnotation("@JacksonXmlRootElement") .addAnnotation("@JacksonXmlRootElement(localName = \"StorageServiceStats\")"); models.getClass("BlobSignedIdentifier").removeAnnotation("@JacksonXmlRootElement") .addAnnotation("@JacksonXmlRootElement(localName = \"SignedIdentifier\")"); models.getClass("BlobAccessPolicy").removeAnnotation("@JacksonXmlRootElement") .addAnnotation("@JacksonXmlRootElement(localName = \"AccessPolicy\")"); ClassCustomization blobContainerItemProperties = models.getClass("BlobContainerItemProperties"); blobContainerItemProperties.getMethod("isEncryptionScopeOverridePrevented") .setReturnType("boolean", "return Boolean.TRUE.equals(%s);", true); blobContainerItemProperties.getMethod("setIsImmutableStorageWithVersioningEnabled") .rename("setImmutableStorageWithVersioningEnabled"); blobContainerItemProperties.getMethod("setEncryptionScopeOverridePrevented") .replaceParameters("boolean encryptionScopeOverridePrevented"); ClassCustomization block = models.getClass("Block"); block.getMethod("getSizeInt") .rename("getSize") .addAnnotation("@Deprecated") .setReturnType("int", "return (int) this.sizeLong; .getJavadoc() .setDeprecated("Use {@link block.getMethod("setSizeInt") .rename("setSize") .addAnnotation("@Deprecated") .setReturnType("Block", "return %s.setSizeLong((long) sizeInt);", true) .getJavadoc() .setDeprecated("Use {@link ClassCustomization listBlobsIncludeItem = models.getClass("ListBlobsIncludeItem"); listBlobsIncludeItem.renameEnumMember("IMMUTABILITYPOLICY", "IMMUTABILITY_POLICY") .renameEnumMember("LEGALHOLD", "LEGAL_HOLD") .renameEnumMember("DELETEDWITHVERSIONS", "DELETED_WITH_VERSIONS"); }
models.getClass("BlobErrorCode").getProperty("SNAPHOT_OPERATION_RATE_EXCEEDED")
public void customize(LibraryCustomization customization, Logger logger) { PackageCustomization implementationModels = customization.getPackage("com.azure.storage.blob.implementation.models"); implementationModels.getClass("BlobHierarchyListSegment").addAnnotation("@JsonDeserialize(using = com.azure.storage.blob.implementation.util.CustomHierarchicalListingDeserializer.class)"); implementationModels.getClass("BlobPrefix").rename("BlobPrefixInternal"); PackageCustomization models = customization.getPackage("com.azure.storage.blob.models"); models.getClass("PageList").addAnnotation("@JsonDeserialize(using = PageListDeserializer.class)"); models.getClass("BlobCopySourceTags").rename("BlobCopySourceTagsMode"); ClassCustomization blobHttpHeaders = models.getClass("BlobHttpHeaders"); blobHttpHeaders.getMethod("getContentMd5").getJavadoc().setDescription("Get the contentMd5 property: " + "Optional. An MD5 hash of the blob content. Note that this hash is not validated, as the hashes for " + "the individual blocks were validated when each was uploaded. The value does not need to be base64 " + "encoded as the SDK will perform the encoding."); blobHttpHeaders.getMethod("setContentMd5").getJavadoc().setDescription("Set the contentMd5 property: " + "Optional. An MD5 hash of the blob content. Note that this hash is not validated, as the hashes for " + "the individual blocks were validated when each was uploaded. The value does not need to be base64 " + "encoded as the SDK will perform the encoding."); ClassCustomization blobContainerEncryptionScope = models.getClass("BlobContainerEncryptionScope"); blobContainerEncryptionScope.getMethod("isEncryptionScopeOverridePrevented") .setReturnType("boolean", "return Boolean.TRUE.equals(%s);", true); blobHttpHeaders.removeAnnotation("@JacksonXmlRootElement") .addAnnotation("@JacksonXmlRootElement(localName = \"blob-http-headers\")"); blobContainerEncryptionScope.removeAnnotation("@JacksonXmlRootElement") .addAnnotation("@JacksonXmlRootElement(localName = \"blob-container-encryption-scope\")"); models.getClass("CpkInfo").removeAnnotation("@JacksonXmlRootElement") .addAnnotation("@JacksonXmlRootElement(localName = \"cpk-info\")"); models.getClass("BlobMetrics").removeAnnotation("@JacksonXmlRootElement") .addAnnotation("@JacksonXmlRootElement(localName = \"Metrics\")"); models.getClass("BlobAnalyticsLogging").removeAnnotation("@JacksonXmlRootElement") .addAnnotation("@JacksonXmlRootElement(localName = \"Logging\")"); models.getClass("BlobRetentionPolicy").removeAnnotation("@JacksonXmlRootElement") .addAnnotation("@JacksonXmlRootElement(localName = \"RetentionPolicy\")"); models.getClass("BlobServiceStatistics").removeAnnotation("@JacksonXmlRootElement") .addAnnotation("@JacksonXmlRootElement(localName = \"StorageServiceStats\")"); models.getClass("BlobSignedIdentifier").removeAnnotation("@JacksonXmlRootElement") .addAnnotation("@JacksonXmlRootElement(localName = \"SignedIdentifier\")"); models.getClass("BlobAccessPolicy").removeAnnotation("@JacksonXmlRootElement") .addAnnotation("@JacksonXmlRootElement(localName = \"AccessPolicy\")"); ClassCustomization blobContainerItemProperties = models.getClass("BlobContainerItemProperties"); blobContainerItemProperties.getMethod("isEncryptionScopeOverridePrevented") .setReturnType("boolean", "return Boolean.TRUE.equals(%s);", true); blobContainerItemProperties.getMethod("setIsImmutableStorageWithVersioningEnabled") .rename("setImmutableStorageWithVersioningEnabled"); blobContainerItemProperties.getMethod("setEncryptionScopeOverridePrevented") .replaceParameters("boolean encryptionScopeOverridePrevented"); ClassCustomization block = models.getClass("Block"); block.getMethod("getSizeInt") .rename("getSize") .addAnnotation("@Deprecated") .setReturnType("int", "return (int) this.sizeLong; .getJavadoc() .setDeprecated("Use {@link block.getMethod("setSizeInt") .rename("setSize") .addAnnotation("@Deprecated") .setReturnType("Block", "return %s.setSizeLong((long) sizeInt);", true) .getJavadoc() .setDeprecated("Use {@link ClassCustomization listBlobsIncludeItem = models.getClass("ListBlobsIncludeItem"); listBlobsIncludeItem.renameEnumMember("IMMUTABILITYPOLICY", "IMMUTABILITY_POLICY") .renameEnumMember("LEGALHOLD", "LEGAL_HOLD") .renameEnumMember("DELETEDWITHVERSIONS", "DELETED_WITH_VERSIONS"); }
class BlobStorageCustomization extends Customization { @Override }
class BlobStorageCustomization extends Customization { @Override }
nit, Do we have to use method overload? Might it be easier if just different name?
public SpringServiceImpl withoutGitConfig() { if (isEnterpriseTier()) { return withGitConfig((ConfigurationServiceGitProperty) null); } else { return withGitConfig((ConfigServerGitProperty) null); } }
return withGitConfig((ConfigServerGitProperty) null);
public SpringServiceImpl withoutGitConfig() { return withGitConfig(null); }
class SpringServiceImpl extends GroupableResourceImpl<SpringService, ServiceResourceInner, SpringServiceImpl, AppPlatformManager> implements SpringService, SpringService.Definition, SpringService.Update { private final SpringServiceCertificatesImpl certificates = new SpringServiceCertificatesImpl(this); private final SpringAppsImpl apps = new SpringAppsImpl(this); private final SpringConfigurationServicesImpl configurationServices = new SpringConfigurationServicesImpl(this); private FunctionalTaskItem configServerTask = null; private FunctionalTaskItem monitoringSettingTask = null; private ServiceResourceInner patchToUpdate = new ServiceResourceInner(); private boolean updated; private boolean updateConfigurationServiceTask = true; private final Map<String, ConfigurationServiceGitRepository> gitRepositoryMap = new ConcurrentHashMap<>(); SpringServiceImpl(String name, ServiceResourceInner innerObject, AppPlatformManager manager) { super(name, innerObject, manager); } @Override public SpringServiceImpl update() { return super.update(); } @Override public Sku sku() { return innerModel().sku(); } @Override public SpringApps apps() { return apps; } @Override public SpringServiceCertificates certificates() { return certificates; } @Override public MonitoringSettingProperties getMonitoringSetting() { return getMonitoringSettingAsync().block(); } @Override public Mono<MonitoringSettingProperties> getMonitoringSettingAsync() { return manager().serviceClient().getMonitoringSettings().getAsync(resourceGroupName(), name()) .map(MonitoringSettingResourceInner::properties); } @Override public ConfigServerProperties getServerProperties() { return getServerPropertiesAsync().block(); } @Override public Mono<ConfigServerProperties> getServerPropertiesAsync() { return manager().serviceClient().getConfigServers().getAsync(resourceGroupName(), name()) .map(ConfigServerResourceInner::properties); } @Override public TestKeys listTestKeys() { return listTestKeysAsync().block(); } @Override public Mono<TestKeys> listTestKeysAsync() { return manager().serviceClient().getServices().listTestKeysAsync(resourceGroupName(), name()); } @Override public TestKeys regenerateTestKeys(TestKeyType keyType) { return regenerateTestKeysAsync(keyType).block(); } @Override public Mono<TestKeys> regenerateTestKeysAsync(TestKeyType keyType) { return manager().serviceClient().getServices().regenerateTestKeyAsync(resourceGroupName(), name(), new RegenerateTestKeyRequestPayload().withKeyType(keyType)); } @Override public void disableTestEndpoint() { disableTestEndpointAsync().block(); } @Override public Mono<Void> disableTestEndpointAsync() { return manager().serviceClient().getServices().disableTestEndpointAsync(resourceGroupName(), name()); } @Override public TestKeys enableTestEndpoint() { return enableTestEndpointAsync().block(); } @Override public Mono<TestKeys> enableTestEndpointAsync() { return manager().serviceClient().getServices().enableTestEndpointAsync(resourceGroupName(), name()); } @Override public SpringConfigurationService getDefaultConfigurationService() { return manager().serviceClient().getConfigurationServices().getAsync(resourceGroupName(), name(), Constants.DEFAULT_TANZU_COMPONENT_NAME) .switchIfEmpty(Mono.empty()) .map(inner -> new SpringConfigurationServiceImpl(inner.name(), this, inner)) .block(); } @Override public SpringConfigurationServices configurationServices() { return this.configurationServices; } @Override public SpringServiceImpl withSku(String skuName) { return withSku(new Sku().withName(skuName)); } @Override public SpringServiceImpl withSku(SkuName skuName) { return withSku(skuName.toString()); } @Override public SpringServiceImpl withSku(String skuName, int capacity) { return withSku(new Sku().withName(skuName).withCapacity(capacity)); } @Override public SpringServiceImpl withSku(Sku sku) { innerModel().withSku(sku); if (isInUpdateMode()) { patchToUpdate.withSku(sku); updated = true; } return this; } @Override public SpringServiceImpl withEnterpriseTierSku() { withSku(SkuName.E0); return this; } @Override public SpringServiceImpl withTracing(String appInsightInstrumentationKey) { monitoringSettingTask = context -> manager().serviceClient().getMonitoringSettings() .updatePatchAsync(resourceGroupName(), name(), new MonitoringSettingResourceInner().withProperties( new MonitoringSettingProperties() .withAppInsightsInstrumentationKey(appInsightInstrumentationKey) .withTraceEnabled(true))) .then(context.voidMono()); return this; } @Override public SpringServiceImpl withoutTracing() { monitoringSettingTask = context -> manager().serviceClient().getMonitoringSettings() .updatePatchAsync( resourceGroupName(), name(), new MonitoringSettingResourceInner().withProperties( new MonitoringSettingProperties().withTraceEnabled(false) )) .then(context.voidMono()); return this; } @Override public SpringServiceImpl withGitUri(String uri) { configServerTask = context -> manager().serviceClient().getConfigServers() .updatePatchAsync(resourceGroupName(), name(), new ConfigServerResourceInner().withProperties( new ConfigServerProperties() .withConfigServer(new ConfigServerSettings().withGitProperty( new ConfigServerGitProperty().withUri(uri) )) )) .then(context.voidMono()); return this; } @Override public SpringServiceImpl withGitUriAndCredential(String uri, String username, String password) { configServerTask = context -> manager().serviceClient().getConfigServers() .updatePatchAsync(resourceGroupName(), name(), new ConfigServerResourceInner().withProperties( new ConfigServerProperties() .withConfigServer(new ConfigServerSettings().withGitProperty( new ConfigServerGitProperty() .withUri(uri) .withUsername(username) .withPassword(password) )) )) .then(context.voidMono()); return this; } @Override public SpringServiceImpl withGitConfig(ConfigServerGitProperty gitConfig) { configServerTask = context -> manager().serviceClient().getConfigServers() .updatePatchAsync(resourceGroupName(), name(), new ConfigServerResourceInner().withProperties( new ConfigServerProperties() .withConfigServer(new ConfigServerSettings().withGitProperty(gitConfig)) )) .then(context.voidMono()); return this; } @Override @Override public void beforeGroupCreateOrUpdate() { if (configServerTask != null) { this.addPostRunDependent(configServerTask); } if (monitoringSettingTask != null) { this.addPostRunDependent(monitoringSettingTask); } if (isEnterpriseTier()) { if (updateConfigurationServiceTask) { prepareCreateOrUpdateConfigurationService(); } updateConfigurationServiceTask = false; } configServerTask = null; monitoringSettingTask = null; } @Override public Mono<SpringService> createResourceAsync() { Mono<ServiceResourceInner> createOrUpdate; if (isInCreateMode()) { createOrUpdate = manager().serviceClient().getServices() .createOrUpdateAsync(resourceGroupName(), name(), innerModel()); if (isEnterpriseTier()) { createOrUpdate = createOrUpdate .flatMap(inner -> manager().serviceClient().getBuildServiceAgentPools().updatePutAsync( resourceGroupName(), name(), Constants.DEFAULT_TANZU_COMPONENT_NAME, Constants.DEFAULT_TANZU_COMPONENT_NAME, new BuildServiceAgentPoolResourceInner() .withProperties( new BuildServiceAgentPoolProperties() .withPoolSize( new BuildServiceAgentPoolSizeProperties() .withName("S1"))) ).then(Mono.just(inner))); } } else if (updated) { createOrUpdate = manager().serviceClient().getServices().updateAsync( resourceGroupName(), name(), patchToUpdate); patchToUpdate = new ServiceResourceInner(); updated = false; } else { return Mono.just(this); } return createOrUpdate .map(inner -> { this.setInner(inner); return this; }); } @Override public Mono<Void> afterPostRunAsync(boolean isGroupFaulted) { clearCache(); if (isGroupFaulted) { return Mono.empty(); } return refreshAsync().then(); } @Override protected Mono<ServiceResourceInner> getInnerAsync() { return manager().serviceClient().getServices().getByResourceGroupAsync(resourceGroupName(), name()) .map(inner -> { clearCache(); return inner; }); } @Override public SpringServiceImpl withCertificate(String name, String keyVaultUri, String certNameInKeyVault) { certificates.prepareCreateOrUpdate( name, new KeyVaultCertificateProperties().withVaultUri(keyVaultUri).withKeyVaultCertName(certNameInKeyVault) ); return this; } @Override public SpringServiceImpl withCertificate(String name, String keyVaultUri, String certNameInKeyVault, String certVersion) { certificates.prepareCreateOrUpdate( name, new KeyVaultCertificateProperties() .withVaultUri(keyVaultUri) .withKeyVaultCertName(certNameInKeyVault) .withCertVersion(certVersion) ); return this; } @Override public SpringServiceImpl withoutCertificate(String name) { certificates.prepareDelete(name); return this; } @Override public SpringServiceImpl withGitConfig(String uri, String branch, List<String> filePatterns) { return withGitConfigRepository(Constants.DEFAULT_TANZU_COMPONENT_NAME, uri, branch, filePatterns); } @Override public SpringServiceImpl withGitConfigRepository(String name, String uri, String branch, List<String> filePatterns) { if (CoreUtils.isNullOrEmpty(name)) { return this; } this.gitRepositoryMap.computeIfAbsent(name, key -> new ConfigurationServiceGitRepository() .withName(name) .withUri(uri) .withPatterns(filePatterns) .withLabel(branch) ); updateConfigurationServiceTask = true; return this; } @Override public SpringServiceImpl withGitConfig(ConfigurationServiceGitProperty gitConfig) { gitRepositoryMap.clear(); if (gitConfig != null && CoreUtils.isNullOrEmpty(gitConfig.repositories())) { for (ConfigurationServiceGitRepository repository : gitConfig.repositories()) { this.gitRepositoryMap.put(repository.name(), repository); } } updateConfigurationServiceTask = true; return this; } private void prepareCreateOrUpdateConfigurationService() { List<ConfigurationServiceGitRepository> repositories = new ArrayList<>(this.gitRepositoryMap.values()); this.configurationServices.prepareCreateOrUpdate(new ConfigurationServiceGitProperty().withRepositories(repositories)); } private boolean isInUpdateMode() { return !isInCreateMode(); } boolean isEnterpriseTier() { return innerModel().sku() != null && SkuName.E0.toString().equals(innerModel().sku().name()); } private void clearCache() { this.gitRepositoryMap.clear(); this.configurationServices.clear(); } }
class SpringServiceImpl extends GroupableResourceImpl<SpringService, ServiceResourceInner, SpringServiceImpl, AppPlatformManager> implements SpringService, SpringService.Definition, SpringService.Update { private final SpringServiceCertificatesImpl certificates = new SpringServiceCertificatesImpl(this); private final SpringAppsImpl apps = new SpringAppsImpl(this); private final SpringConfigurationServicesImpl configurationServices = new SpringConfigurationServicesImpl(this); private FunctionalTaskItem configServerTask = null; private FunctionalTaskItem monitoringSettingTask = null; private ServiceResourceInner patchToUpdate = new ServiceResourceInner(); private boolean updated; private final ConfigurationServiceConfig configurationServiceConfig = new ConfigurationServiceConfig(); SpringServiceImpl(String name, ServiceResourceInner innerObject, AppPlatformManager manager) { super(name, innerObject, manager); } @Override public SpringServiceImpl update() { return super.update(); } @Override public Sku sku() { return innerModel().sku(); } @Override public SpringApps apps() { return apps; } @Override public SpringServiceCertificates certificates() { return certificates; } @Override public MonitoringSettingProperties getMonitoringSetting() { return getMonitoringSettingAsync().block(); } @Override public Mono<MonitoringSettingProperties> getMonitoringSettingAsync() { return manager().serviceClient().getMonitoringSettings().getAsync(resourceGroupName(), name()) .map(MonitoringSettingResourceInner::properties); } @Override public ConfigServerProperties getServerProperties() { return getServerPropertiesAsync().block(); } @Override public Mono<ConfigServerProperties> getServerPropertiesAsync() { return manager().serviceClient().getConfigServers().getAsync(resourceGroupName(), name()) .map(ConfigServerResourceInner::properties); } @Override public TestKeys listTestKeys() { return listTestKeysAsync().block(); } @Override public Mono<TestKeys> listTestKeysAsync() { return manager().serviceClient().getServices().listTestKeysAsync(resourceGroupName(), name()); } @Override public TestKeys regenerateTestKeys(TestKeyType keyType) { return regenerateTestKeysAsync(keyType).block(); } @Override public Mono<TestKeys> regenerateTestKeysAsync(TestKeyType keyType) { return manager().serviceClient().getServices().regenerateTestKeyAsync(resourceGroupName(), name(), new RegenerateTestKeyRequestPayload().withKeyType(keyType)); } @Override public void disableTestEndpoint() { disableTestEndpointAsync().block(); } @Override public Mono<Void> disableTestEndpointAsync() { return manager().serviceClient().getServices().disableTestEndpointAsync(resourceGroupName(), name()); } @Override public TestKeys enableTestEndpoint() { return enableTestEndpointAsync().block(); } @Override public Mono<TestKeys> enableTestEndpointAsync() { return manager().serviceClient().getServices().enableTestEndpointAsync(resourceGroupName(), name()); } @Override public SpringConfigurationService getDefaultConfigurationService() { return manager().serviceClient().getConfigurationServices().getAsync(resourceGroupName(), name(), Constants.DEFAULT_TANZU_COMPONENT_NAME) .switchIfEmpty(Mono.empty()) .map(inner -> new SpringConfigurationServiceImpl(inner.name(), this, inner)) .block(); } @Override public SpringServiceImpl withSku(String skuName) { return withSku(new Sku().withName(skuName)); } @Override public SpringServiceImpl withSku(SkuName skuName) { return withSku(skuName.toString()); } @Override public SpringServiceImpl withSku(String skuName, int capacity) { return withSku(new Sku().withName(skuName).withCapacity(capacity)); } @Override public SpringServiceImpl withSku(Sku sku) { innerModel().withSku(sku); if (isInUpdateMode()) { patchToUpdate.withSku(sku); updated = true; } return this; } @Override public SpringServiceImpl withEnterpriseTierSku() { withSku(SkuName.E0); return this; } @Override public SpringServiceImpl withTracing(String appInsightInstrumentationKey) { monitoringSettingTask = context -> manager().serviceClient().getMonitoringSettings() .updatePatchAsync(resourceGroupName(), name(), new MonitoringSettingResourceInner().withProperties( new MonitoringSettingProperties() .withAppInsightsInstrumentationKey(appInsightInstrumentationKey) .withTraceEnabled(true))) .then(context.voidMono()); return this; } @Override public SpringServiceImpl withoutTracing() { monitoringSettingTask = context -> manager().serviceClient().getMonitoringSettings() .updatePatchAsync( resourceGroupName(), name(), new MonitoringSettingResourceInner().withProperties( new MonitoringSettingProperties().withTraceEnabled(false) )) .then(context.voidMono()); return this; } @Override public SpringServiceImpl withGitUri(String uri) { configServerTask = context -> manager().serviceClient().getConfigServers() .updatePatchAsync(resourceGroupName(), name(), new ConfigServerResourceInner().withProperties( new ConfigServerProperties() .withConfigServer(new ConfigServerSettings().withGitProperty( new ConfigServerGitProperty().withUri(uri) )) )) .then(context.voidMono()); return this; } @Override public SpringServiceImpl withGitUriAndCredential(String uri, String username, String password) { configServerTask = context -> manager().serviceClient().getConfigServers() .updatePatchAsync(resourceGroupName(), name(), new ConfigServerResourceInner().withProperties( new ConfigServerProperties() .withConfigServer(new ConfigServerSettings().withGitProperty( new ConfigServerGitProperty() .withUri(uri) .withUsername(username) .withPassword(password) )) )) .then(context.voidMono()); return this; } @Override public SpringServiceImpl withGitConfig(ConfigServerGitProperty gitConfig) { configServerTask = context -> manager().serviceClient().getConfigServers() .updatePatchAsync(resourceGroupName(), name(), new ConfigServerResourceInner().withProperties( new ConfigServerProperties() .withConfigServer(new ConfigServerSettings().withGitProperty(gitConfig)) )) .then(context.voidMono()); return this; } @Override @Override public void beforeGroupCreateOrUpdate() { if (configServerTask != null) { this.addPostRunDependent(configServerTask); } if (monitoringSettingTask != null) { this.addPostRunDependent(monitoringSettingTask); } if (isEnterpriseTier()) { if (isInCreateMode() || configurationServiceConfig.needUpdate()) { prepareCreateOrUpdateConfigurationService(); configurationServiceConfig.clearUpdate(); } } configServerTask = null; monitoringSettingTask = null; } @Override public Mono<SpringService> createResourceAsync() { Mono<ServiceResourceInner> createOrUpdate; if (isInCreateMode()) { createOrUpdate = manager().serviceClient().getServices() .createOrUpdateAsync(resourceGroupName(), name(), innerModel()); if (isEnterpriseTier()) { createOrUpdate = createOrUpdate .flatMap(inner -> manager().serviceClient().getBuildServiceAgentPools().updatePutAsync( resourceGroupName(), name(), Constants.DEFAULT_TANZU_COMPONENT_NAME, Constants.DEFAULT_TANZU_COMPONENT_NAME, new BuildServiceAgentPoolResourceInner() .withProperties( new BuildServiceAgentPoolProperties() .withPoolSize( new BuildServiceAgentPoolSizeProperties() .withName("S1"))) ).then(Mono.just(inner))); } } else if (updated) { createOrUpdate = manager().serviceClient().getServices().updateAsync( resourceGroupName(), name(), patchToUpdate); patchToUpdate = new ServiceResourceInner(); updated = false; } else { return Mono.just(this); } return createOrUpdate .map(inner -> { this.setInner(inner); return this; }); } @Override public Mono<Void> afterPostRunAsync(boolean isGroupFaulted) { return Mono .just(true) .map( ignored -> { clearCache(); return ignored; }) .then(); } @Override protected Mono<ServiceResourceInner> getInnerAsync() { return manager().serviceClient().getServices().getByResourceGroupAsync(resourceGroupName(), name()) .map(inner -> { clearCache(); return inner; }); } @Override public SpringServiceImpl withCertificate(String name, String keyVaultUri, String certNameInKeyVault) { certificates.prepareCreateOrUpdate( name, new KeyVaultCertificateProperties().withVaultUri(keyVaultUri).withKeyVaultCertName(certNameInKeyVault) ); return this; } @Override public SpringServiceImpl withCertificate(String name, String keyVaultUri, String certNameInKeyVault, String certVersion) { certificates.prepareCreateOrUpdate( name, new KeyVaultCertificateProperties() .withVaultUri(keyVaultUri) .withKeyVaultCertName(certNameInKeyVault) .withCertVersion(certVersion) ); return this; } @Override public SpringServiceImpl withoutCertificate(String name) { certificates.prepareDelete(name); return this; } @Override public SpringServiceImpl withDefaultGitRepository(String uri, String branch, List<String> filePatterns) { return withGitRepository(Constants.DEFAULT_TANZU_COMPONENT_NAME, uri, branch, filePatterns); } @Override public SpringServiceImpl withGitRepository(String name, String uri, String branch, List<String> filePatterns) { if (CoreUtils.isNullOrEmpty(name)) { return this; } this.configurationServiceConfig.addRepository( new ConfigurationServiceGitRepository() .withName(name) .withUri(uri) .withPatterns(filePatterns) .withLabel(branch)); return this; } @Override public SpringServiceImpl withGitRepositoryConfig(ConfigurationServiceGitProperty gitConfig) { this.configurationServiceConfig.clearRepositories(); if (gitConfig != null && !CoreUtils.isNullOrEmpty(gitConfig.repositories())) { for (ConfigurationServiceGitRepository repository : gitConfig.repositories()) { this.configurationServiceConfig.addRepository(repository); } } return this; } @Override public SpringServiceImpl withoutGitRepository(String name) { this.configurationServiceConfig.removeRepository(name); return this; } @Override public SpringServiceImpl withoutGitRepositories() { this.configurationServiceConfig.clearRepositories(); return this; } private void prepareCreateOrUpdateConfigurationService() { List<ConfigurationServiceGitRepository> repositories = this.configurationServiceConfig.mergeRepositories(); this.configurationServices.prepareCreateOrUpdate(new ConfigurationServiceGitProperty().withRepositories(repositories)); } private boolean isInUpdateMode() { return !isInCreateMode(); } boolean isEnterpriseTier() { return innerModel().sku() != null && SkuName.E0.toString().equals(innerModel().sku().name()); } private void clearCache() { this.configurationServices.clear(); this.configurationServiceConfig.reset(); } private class ConfigurationServiceConfig { private final Map<String, ConfigurationServiceGitRepository> gitRepositoryMap = new ConcurrentHashMap<>(); private final Set<String> repositoriesToDelete = new HashSet<>(); private boolean update; private boolean clearRepositories; boolean needUpdate() { return update; } public void clearUpdate() { this.update = false; } void reset() { this.gitRepositoryMap.clear(); this.update = false; this.repositoriesToDelete.clear(); this.clearRepositories = false; } public void addRepository(ConfigurationServiceGitRepository repository) { this.gitRepositoryMap.putIfAbsent(repository.name(), repository); this.update = true; } public void clearRepositories() { this.gitRepositoryMap.clear(); this.clearRepositories = true; this.update = true; } public void removeRepository(String name) { this.repositoriesToDelete.add(name); this.update = true; } public List<ConfigurationServiceGitRepository> mergeRepositories() { if (this.clearRepositories) { return new ArrayList<>(this.gitRepositoryMap.values()); } else { Map<String, ConfigurationServiceGitRepository> existingGitRepositories = new HashMap<>(); if (isInUpdateMode()) { SpringConfigurationService configurationService = getDefaultConfigurationService(); if (configurationService != null) { List<ConfigurationServiceGitRepository> repositoryList = configurationService.innerModel().properties().settings() == null ? Collections.emptyList() : configurationService.innerModel().properties().settings().gitProperty().repositories(); if (repositoryList != null) { repositoryList.forEach(repository -> existingGitRepositories.put(repository.name(), repository)); } } } existingGitRepositories.putAll(gitRepositoryMap); for (String repositoryToDelete : repositoriesToDelete) { existingGitRepositories.remove(repositoryToDelete); } return new ArrayList<>(existingGitRepositories.values()); } } } }
do we need to support different serialization rules (e.g. on null values) at all?
public JsonWriter toJson(JsonWriter jsonWriter) { jsonWriter.writeStartObject().writeStringField("op", op.toString()); if (from != null) { jsonWriter.writeStringField("from", from); } jsonWriter.writeStringField("path", path); if (value.isInitialized()) { String val = value.getValue(); if (val == null) { jsonWriter.writeNullField("value"); } else { jsonWriter.writeRawField("value", val); } } return jsonWriter.writeEndObject().flush(); }
if (value.isInitialized()) {
public JsonWriter toJson(JsonWriter jsonWriter) { jsonWriter.writeStartObject().writeStringField("op", op.toString()); if (from != null) { jsonWriter.writeStringField("from", from); } jsonWriter.writeStringField("path", path); if (value.isInitialized()) { String val = value.getValue(); if (val == null) { jsonWriter.writeNullField("value"); } else { jsonWriter.writeRawField("value", val); } } return jsonWriter.writeEndObject().flush(); }
class JsonPatchOperation implements JsonCapable<JsonPatchOperation> { private final JsonPatchOperationKind op; private final String from; private final String path; private final Option<String> value; /** * Creates a JSON Patch operation. * <p> * When {@code optionalValue} is null the value won't be included in the JSON request, use {@link Optional * to indicate a JSON null. * * @param op The kind of operation. * @param from Optional from target path. * @param path Operation target path. * @param value Optional value. */ public JsonPatchOperation(JsonPatchOperationKind op, String from, String path, Option<String> value) { this.op = op; this.from = from; this.path = path; this.value = value; } /** * Gets the operation kind. * * @return The kind of operation. */ public JsonPatchOperationKind getOp() { return op; } /** * Gets the operation from target path. * * @return The operation from target path. */ public String getFrom() { return from; } /** * Gets the operation target path. * * @return The operation target path. */ public String getPath() { return path; } /** * Gets the operation value. * <p> * If the operation doesn't take a value {@link Option * * @return The operation value. */ public Option<String> getValue() { return value; } @Override public int hashCode() { return Objects.hash(op.toString(), from, path, (value == null) ? null : value.getValue()); } @Override public boolean equals(Object obj) { if (!(obj instanceof JsonPatchOperation)) { return false; } if (this == obj) { return true; } JsonPatchOperation other = (JsonPatchOperation) obj; return Objects.equals(op, other.op) && Objects.equals(from, other.from) && Objects.equals(path, other.path) && Objects.equals(value, other.value); } @Override public String toString() { return toJson(new StringBuilder()).toString(); } @Override public StringBuilder toJson(StringBuilder stringBuilder) { stringBuilder.append("{\"op\":\"") .append(op.toString()) .append("\""); if (from != null) { stringBuilder.append(",\"from\":\"") .append(from) .append("\""); } stringBuilder.append(",\"path\":\"") .append(path) .append("\""); if (value.isInitialized()) { stringBuilder.append(",\"value\":") .append(value.getValue()); } return stringBuilder.append("}"); } @Override /** * Creates an instance of {@link JsonPatchOperation} by reading the {@link JsonReader}. * <p> * null will be returned if the {@link JsonReader} points to {@link JsonToken * <p> * {@link IllegalStateException} will be thrown if the {@link JsonReader} doesn't point to either {@link * JsonToken * * @param jsonReader The {@link JsonReader} that will be read. * @return An instance of {@link JsonPatchOperation} if the {@link JsonReader} is pointing to {@link * JsonPatchOperation} JSON content, or null if it's pointing to {@link JsonToken * @throws IllegalStateException If the {@link JsonReader} wasn't pointing to either {@link JsonToken * {@link JsonToken */ public static JsonPatchOperation fromJson(JsonReader jsonReader) { return JsonUtils.deserializeObject(jsonReader, (reader, token) -> { JsonPatchOperationKind op = null; String from = null; String path = null; Option<String> value = Option.uninitialized(); while (jsonReader.nextToken() != JsonToken.END_OBJECT) { String fieldName = jsonReader.getFieldName(); token = jsonReader.nextToken(); switch (fieldName) { case "op": op = JsonPatchOperationKind.fromString(jsonReader.getStringValue()); break; case "from": from = jsonReader.getStringValue(); break; case "path": path = jsonReader.getStringValue(); break; case "value": if (token == JsonToken.START_ARRAY || token == JsonToken.START_OBJECT) { value = Option.of(jsonReader.readChildren()); } else if (token == JsonToken.NULL) { value = Option.empty(); } else { value = Option.of(jsonReader.getTextValue()); } break; default: break; } } return new JsonPatchOperation(op, from, path, value); }); } }
class JsonPatchOperation implements JsonCapable<JsonPatchOperation> { private final JsonPatchOperationKind op; private final String from; private final String path; private final Option<String> value; /** * Creates a JSON Patch operation. * <p> * When {@code optionalValue} is null the value won't be included in the JSON request, use {@link Optional * to indicate a JSON null. * * @param op The kind of operation. * @param from Optional from target path. * @param path Operation target path. * @param value Optional value. */ public JsonPatchOperation(JsonPatchOperationKind op, String from, String path, Option<String> value) { this.op = op; this.from = from; this.path = path; this.value = value; } /** * Gets the operation kind. * * @return The kind of operation. */ public JsonPatchOperationKind getOp() { return op; } /** * Gets the operation from target path. * * @return The operation from target path. */ public String getFrom() { return from; } /** * Gets the operation target path. * * @return The operation target path. */ public String getPath() { return path; } /** * Gets the operation value. * <p> * If the operation doesn't take a value {@link Option * * @return The operation value. */ public Option<String> getValue() { return value; } @Override public int hashCode() { return Objects.hash(op.toString(), from, path, (value == null) ? null : value.getValue()); } @Override public boolean equals(Object obj) { if (!(obj instanceof JsonPatchOperation)) { return false; } if (this == obj) { return true; } JsonPatchOperation other = (JsonPatchOperation) obj; return Objects.equals(op, other.op) && Objects.equals(from, other.from) && Objects.equals(path, other.path) && Objects.equals(value, other.value); } @Override public String toString() { return toJson(new StringBuilder()).toString(); } @Override public StringBuilder toJson(StringBuilder stringBuilder) { stringBuilder.append("{\"op\":\"") .append(op.toString()) .append("\""); if (from != null) { stringBuilder.append(",\"from\":\"") .append(from) .append("\""); } stringBuilder.append(",\"path\":\"") .append(path) .append("\""); if (value.isInitialized()) { stringBuilder.append(",\"value\":") .append(value.getValue()); } return stringBuilder.append("}"); } @Override /** * Creates an instance of {@link JsonPatchOperation} by reading the {@link JsonReader}. * <p> * null will be returned if the {@link JsonReader} points to {@link JsonToken * <p> * {@link IllegalStateException} will be thrown if the {@link JsonReader} doesn't point to either {@link * JsonToken * * @param jsonReader The {@link JsonReader} that will be read. * @return An instance of {@link JsonPatchOperation} if the {@link JsonReader} is pointing to {@link * JsonPatchOperation} JSON content, or null if it's pointing to {@link JsonToken * @throws IllegalStateException If the {@link JsonReader} wasn't pointing to either {@link JsonToken * {@link JsonToken */ public static JsonPatchOperation fromJson(JsonReader jsonReader) { return JsonUtils.deserializeObject(jsonReader, (reader, token) -> { JsonPatchOperationKind op = null; String from = null; String path = null; Option<String> value = Option.uninitialized(); while (jsonReader.nextToken() != JsonToken.END_OBJECT) { String fieldName = jsonReader.getFieldName(); token = jsonReader.nextToken(); switch (fieldName) { case "op": op = JsonPatchOperationKind.fromString(jsonReader.getStringValue()); break; case "from": from = jsonReader.getStringValue(); break; case "path": path = jsonReader.getStringValue(); break; case "value": if (token == JsonToken.START_ARRAY || token == JsonToken.START_OBJECT) { value = Option.of(jsonReader.readChildren()); } else if (token == JsonToken.NULL) { value = Option.empty(); } else { value = Option.of(jsonReader.getTextValue()); } break; default: break; } } return new JsonPatchOperation(op, from, path, value); }); } }
will there be readme.md file? pls help to add more specific description on uasge of user token. keyvault, mysql flexible, network feature
public static void main(String[] args) throws Exception { TokenCredential credential = new DefaultAzureCredentialBuilder() .authorityHost(AzureAuthorityHosts.AZURE_PUBLIC_CLOUD) .build(); AzureProfile profile = new AzureProfile(AzureEnvironment.AZURE); HttpPipelinePolicy userTokenPolicy = (context, next) -> { Mono<String> token = null; String bearerTokenPrefix = "bearer "; String authorization = context.getHttpRequest().getHeaders().getValue("Authorization"); if (authorization != null && authorization.toLowerCase(Locale.ROOT).startsWith(bearerTokenPrefix)) { token = Mono.just(authorization.substring(bearerTokenPrefix.length())); } else { token = credential .getToken(new TokenRequestContext().addScopes(profile.getEnvironment().getResourceManagerEndpoint() + "/.default")) .map(AccessToken::getToken); } return token .flatMap(accessToken -> { context.getHttpRequest().getHeaders().set(USER_TOKEN_HEADER, accessToken); return next.process(); }); }; AzureResourceManager azureResourceManager = AzureResourceManager.authenticate(credential, profile).withDefaultSubscription(); ServiceLinkerManager serviceLinkerManager = ServiceLinkerManager.authenticate(credential, profile); ServiceLinkerManager serviceLinkerManagerWithUserToken = ServiceLinkerManager.configure().withPolicy(userTokenPolicy).authenticate(credential, profile); creatSpringCloudAndSQLConnection(azureResourceManager, serviceLinkerManager); createWebAppAndKeyVaultConnectionWithUserIdentity(azureResourceManager, serviceLinkerManagerWithUserToken); }
public static void main(String[] args) throws Exception { TokenCredential credential = new DefaultAzureCredentialBuilder() .authorityHost(AzureAuthorityHosts.AZURE_PUBLIC_CLOUD) .build(); AzureProfile profile = new AzureProfile(AzureEnvironment.AZURE); HttpPipelinePolicy userTokenPolicy = new UserTokenPolicy(credential, profile.getEnvironment()); AzureResourceManager azureResourceManager = AzureResourceManager.authenticate(credential, profile).withDefaultSubscription(); ServiceLinkerManager serviceLinkerManager = ServiceLinkerManager.authenticate(credential, profile); ServiceLinkerManager serviceLinkerManagerWithUserToken = ServiceLinkerManager.configure().withPolicy(userTokenPolicy).authenticate(credential, profile); createSpringCloudAndSQLConnection(azureResourceManager, serviceLinkerManager); createWebAppAndKeyVaultConnectionWithUserIdentity(azureResourceManager, serviceLinkerManagerWithUserToken); }
class CreateServiceLinker { private static final String USER_TOKEN_HEADER = "x-ms-serviceconnector-user-token"; /** * Main entry point. * * @param args the parameters */ private static void creatSpringCloudAndSQLConnection(AzureResourceManager azureResourceManager, ServiceLinkerManager serviceLinkerManager) { String resourceGroupName = "rg" + randomString(8); Region region = Region.US_EAST; String springServiceName = "spring" + randomString(8); String springAppName = "app" + randomString(8); String sqlServerName = "sqlserver" + randomString(8); String sqlDatabaseName = "sqldb" + randomString(8); String sqlUserName = "sql" + randomString(8); String sqlPassword = "5$Ql" + randomString(8); SpringService springService = azureResourceManager.springServices().define(springServiceName) .withRegion(region) .withNewResourceGroup(resourceGroupName) .withSku(SkuName.B0) .create(); SpringApp springApp = springService.apps().define(springAppName) .withDefaultActiveDeployment() .create(); SqlServer sqlServer = azureResourceManager.sqlServers().define(sqlServerName) .withRegion(region) .withExistingResourceGroup(resourceGroupName) .withAdministratorLogin(sqlUserName) .withAdministratorPassword(sqlPassword) .create(); SqlDatabase sqlDatabase = sqlServer.databases().define(sqlDatabaseName) .withBasicEdition() .create(); LinkerResource linker = serviceLinkerManager.linkers().define("sql") .withExistingResourceUri(springApp.getActiveDeployment().id()) .withTargetService( new AzureResource() .withId(sqlDatabase.id()) ) .withAuthInfo( new SecretAuthInfo() .withName(sqlUserName) .withSecretInfo( new ValueSecretInfo() .withValue(sqlPassword) ) ) .withClientType(ClientType.SPRING_BOOT) .create(); System.out.println("Configurations:"); for (SourceConfiguration sourceConfiguration : linker.listConfigurations().configurations()) { System.out.printf("\t%s: %s%n", sourceConfiguration.name(), sourceConfiguration.value()); } } private static void createWebAppAndKeyVaultConnectionWithUserIdentity(AzureResourceManager azureResourceManager, ServiceLinkerManager serviceLinkerManager) { String resourceGroupName = "rg" + randomString(8); Region region = Region.US_EAST; String webAppName = "web" + randomString(8); String keyVaultName = "vault" + randomString(8); String identityName = "identity" + randomString(8); WebApp webApp = azureResourceManager.webApps().define(webAppName) .withRegion(region) .withNewResourceGroup(resourceGroupName) .withNewLinuxPlan(PricingTier.BASIC_B1) .withBuiltInImage(RuntimeStack.NODEJS_14_LTS) .create(); Vault vault = azureResourceManager.vaults().define(keyVaultName) .withRegion(region) .withExistingResourceGroup(resourceGroupName) .withEmptyAccessPolicy() .create(); Identity identity = azureResourceManager.identities().define(identityName) .withRegion(region) .withExistingResourceGroup(resourceGroupName) .create(); LinkerResource linker = serviceLinkerManager.linkers().define("keyvault") .withExistingResourceUri(webApp.id()) .withTargetService( new AzureResource() .withId(vault.id()) ) .withAuthInfo( new UserAssignedIdentityAuthInfo() .withSubscriptionId(azureResourceManager.subscriptionId()) .withClientId(identity.clientId()) ) .withClientType(ClientType.NODEJS) .create(); System.out.println("Configurations:"); for (SourceConfiguration sourceConfiguration : linker.listConfigurations().configurations()) { System.out.printf("\t%s: %s%n", sourceConfiguration.name(), sourceConfiguration.value()); } } private static String randomString(int length) { return UUID.randomUUID().toString().replace("-", "").substring(0, length); } }
class CreateServiceLinker { private static final String USER_TOKEN_HEADER = "x-ms-serviceconnector-user-token"; /** * Main entry point. * * @param args the parameters */ private static void createSpringCloudAndSQLConnection(AzureResourceManager azureResourceManager, ServiceLinkerManager serviceLinkerManager) { String resourceGroupName = "rg" + randomString(8); Region region = Region.US_EAST; String springServiceName = "spring" + randomString(8); String springAppName = "app" + randomString(8); String sqlServerName = "sqlserver" + randomString(8); String sqlDatabaseName = "sqldb" + randomString(8); String sqlUserName = "sql" + randomString(8); String sqlPassword = "5$Ql" + randomString(8); SpringService springService = azureResourceManager.springServices().define(springServiceName) .withRegion(region) .withNewResourceGroup(resourceGroupName) .withSku(SkuName.B0) .create(); SpringApp springApp = springService.apps().define(springAppName) .withDefaultActiveDeployment() .create(); SqlServer sqlServer = azureResourceManager.sqlServers().define(sqlServerName) .withRegion(region) .withExistingResourceGroup(resourceGroupName) .withAdministratorLogin(sqlUserName) .withAdministratorPassword(sqlPassword) .create(); SqlDatabase sqlDatabase = sqlServer.databases().define(sqlDatabaseName) .withBasicEdition() .create(); LinkerResource linker = serviceLinkerManager.linkers().define("sql") .withExistingResourceUri(springApp.getActiveDeployment().id()) .withTargetService( new AzureResource() .withId(sqlDatabase.id()) ) .withAuthInfo( new SecretAuthInfo() .withName(sqlUserName) .withSecretInfo( new ValueSecretInfo() .withValue(sqlPassword) ) ) .withClientType(ClientType.SPRING_BOOT) .create(); System.out.println("Configurations:"); for (SourceConfiguration sourceConfiguration : linker.listConfigurations().configurations()) { System.out.printf("\t%s: %s%n", sourceConfiguration.name(), sourceConfiguration.value()); } } private static void createWebAppAndKeyVaultConnectionWithUserIdentity(AzureResourceManager azureResourceManager, ServiceLinkerManager serviceLinkerManager) { String resourceGroupName = "rg" + randomString(8); Region region = Region.US_EAST; String webAppName = "web" + randomString(8); String keyVaultName = "vault" + randomString(8); String identityName = "identity" + randomString(8); WebApp webApp = azureResourceManager.webApps().define(webAppName) .withRegion(region) .withNewResourceGroup(resourceGroupName) .withNewLinuxPlan(PricingTier.BASIC_B1) .withBuiltInImage(RuntimeStack.NODEJS_14_LTS) .create(); Vault vault = azureResourceManager.vaults().define(keyVaultName) .withRegion(region) .withExistingResourceGroup(resourceGroupName) .withEmptyAccessPolicy() .create(); Identity identity = azureResourceManager.identities().define(identityName) .withRegion(region) .withExistingResourceGroup(resourceGroupName) .create(); LinkerResource linker = serviceLinkerManager.linkers().define("keyvault") .withExistingResourceUri(webApp.id()) .withTargetService( new AzureResource() .withId(vault.id()) ) .withAuthInfo( new UserAssignedIdentityAuthInfo() .withSubscriptionId(azureResourceManager.subscriptionId()) .withClientId(identity.clientId()) ) .withClientType(ClientType.NODEJS) .create(); System.out.println("Configurations:"); for (SourceConfiguration sourceConfiguration : linker.listConfigurations().configurations()) { System.out.printf("\t%s: %s%n", sourceConfiguration.name(), sourceConfiguration.value()); } } private static String randomString(int length) { return UUID.randomUUID().toString().replace("-", "").substring(0, length); } public static class UserTokenPolicy implements HttpPipelinePolicy { private final TokenCredential credential; private final AzureEnvironment environment; public UserTokenPolicy(TokenCredential credential, AzureEnvironment environment) { this.credential = credential; this.environment = environment; } @Override public Mono<HttpResponse> process(HttpPipelineCallContext context, HttpPipelineNextPolicy next) { Mono<String> token = null; String bearerTokenPrefix = "bearer "; String authorization = context.getHttpRequest().getHeaders().getValue("Authorization"); if (authorization != null && authorization.toLowerCase(Locale.ROOT).startsWith(bearerTokenPrefix)) { token = Mono.just(authorization.substring(bearerTokenPrefix.length())); } else { token = credential .getToken(new TokenRequestContext().addScopes(environment.getResourceManagerEndpoint() + "/.default")) .map(AccessToken::getToken); } return token .flatMap(accessToken -> { context.getHttpRequest().getHeaders().set(USER_TOKEN_HEADER, accessToken); return next.process(); }); } } }
Do we reset `updateConfigurationServiceTask` as well?
private void clearCache() { this.gitRepositoryMap.clear(); this.configurationServices.clear(); }
this.configurationServices.clear();
private void clearCache() { this.configurationServices.clear(); this.configurationServiceConfig.reset(); }
class SpringServiceImpl extends GroupableResourceImpl<SpringService, ServiceResourceInner, SpringServiceImpl, AppPlatformManager> implements SpringService, SpringService.Definition, SpringService.Update { private final SpringServiceCertificatesImpl certificates = new SpringServiceCertificatesImpl(this); private final SpringAppsImpl apps = new SpringAppsImpl(this); private final SpringConfigurationServicesImpl configurationServices = new SpringConfigurationServicesImpl(this); private FunctionalTaskItem configServerTask = null; private FunctionalTaskItem monitoringSettingTask = null; private ServiceResourceInner patchToUpdate = new ServiceResourceInner(); private boolean updated; private boolean updateConfigurationServiceTask = true; private final Map<String, ConfigurationServiceGitRepository> gitRepositoryMap = new ConcurrentHashMap<>(); SpringServiceImpl(String name, ServiceResourceInner innerObject, AppPlatformManager manager) { super(name, innerObject, manager); } @Override public SpringServiceImpl update() { return super.update(); } @Override public Sku sku() { return innerModel().sku(); } @Override public SpringApps apps() { return apps; } @Override public SpringServiceCertificates certificates() { return certificates; } @Override public MonitoringSettingProperties getMonitoringSetting() { return getMonitoringSettingAsync().block(); } @Override public Mono<MonitoringSettingProperties> getMonitoringSettingAsync() { return manager().serviceClient().getMonitoringSettings().getAsync(resourceGroupName(), name()) .map(MonitoringSettingResourceInner::properties); } @Override public ConfigServerProperties getServerProperties() { return getServerPropertiesAsync().block(); } @Override public Mono<ConfigServerProperties> getServerPropertiesAsync() { return manager().serviceClient().getConfigServers().getAsync(resourceGroupName(), name()) .map(ConfigServerResourceInner::properties); } @Override public TestKeys listTestKeys() { return listTestKeysAsync().block(); } @Override public Mono<TestKeys> listTestKeysAsync() { return manager().serviceClient().getServices().listTestKeysAsync(resourceGroupName(), name()); } @Override public TestKeys regenerateTestKeys(TestKeyType keyType) { return regenerateTestKeysAsync(keyType).block(); } @Override public Mono<TestKeys> regenerateTestKeysAsync(TestKeyType keyType) { return manager().serviceClient().getServices().regenerateTestKeyAsync(resourceGroupName(), name(), new RegenerateTestKeyRequestPayload().withKeyType(keyType)); } @Override public void disableTestEndpoint() { disableTestEndpointAsync().block(); } @Override public Mono<Void> disableTestEndpointAsync() { return manager().serviceClient().getServices().disableTestEndpointAsync(resourceGroupName(), name()); } @Override public TestKeys enableTestEndpoint() { return enableTestEndpointAsync().block(); } @Override public Mono<TestKeys> enableTestEndpointAsync() { return manager().serviceClient().getServices().enableTestEndpointAsync(resourceGroupName(), name()); } @Override public SpringConfigurationService getDefaultConfigurationService() { return manager().serviceClient().getConfigurationServices().getAsync(resourceGroupName(), name(), Constants.DEFAULT_TANZU_COMPONENT_NAME) .switchIfEmpty(Mono.empty()) .map(inner -> new SpringConfigurationServiceImpl(inner.name(), this, inner)) .block(); } @Override public SpringConfigurationServices configurationServices() { return this.configurationServices; } @Override public SpringServiceImpl withSku(String skuName) { return withSku(new Sku().withName(skuName)); } @Override public SpringServiceImpl withSku(SkuName skuName) { return withSku(skuName.toString()); } @Override public SpringServiceImpl withSku(String skuName, int capacity) { return withSku(new Sku().withName(skuName).withCapacity(capacity)); } @Override public SpringServiceImpl withSku(Sku sku) { innerModel().withSku(sku); if (isInUpdateMode()) { patchToUpdate.withSku(sku); updated = true; } return this; } @Override public SpringServiceImpl withEnterpriseTierSku() { withSku(SkuName.E0); return this; } @Override public SpringServiceImpl withTracing(String appInsightInstrumentationKey) { monitoringSettingTask = context -> manager().serviceClient().getMonitoringSettings() .updatePatchAsync(resourceGroupName(), name(), new MonitoringSettingResourceInner().withProperties( new MonitoringSettingProperties() .withAppInsightsInstrumentationKey(appInsightInstrumentationKey) .withTraceEnabled(true))) .then(context.voidMono()); return this; } @Override public SpringServiceImpl withoutTracing() { monitoringSettingTask = context -> manager().serviceClient().getMonitoringSettings() .updatePatchAsync( resourceGroupName(), name(), new MonitoringSettingResourceInner().withProperties( new MonitoringSettingProperties().withTraceEnabled(false) )) .then(context.voidMono()); return this; } @Override public SpringServiceImpl withGitUri(String uri) { configServerTask = context -> manager().serviceClient().getConfigServers() .updatePatchAsync(resourceGroupName(), name(), new ConfigServerResourceInner().withProperties( new ConfigServerProperties() .withConfigServer(new ConfigServerSettings().withGitProperty( new ConfigServerGitProperty().withUri(uri) )) )) .then(context.voidMono()); return this; } @Override public SpringServiceImpl withGitUriAndCredential(String uri, String username, String password) { configServerTask = context -> manager().serviceClient().getConfigServers() .updatePatchAsync(resourceGroupName(), name(), new ConfigServerResourceInner().withProperties( new ConfigServerProperties() .withConfigServer(new ConfigServerSettings().withGitProperty( new ConfigServerGitProperty() .withUri(uri) .withUsername(username) .withPassword(password) )) )) .then(context.voidMono()); return this; } @Override public SpringServiceImpl withGitConfig(ConfigServerGitProperty gitConfig) { configServerTask = context -> manager().serviceClient().getConfigServers() .updatePatchAsync(resourceGroupName(), name(), new ConfigServerResourceInner().withProperties( new ConfigServerProperties() .withConfigServer(new ConfigServerSettings().withGitProperty(gitConfig)) )) .then(context.voidMono()); return this; } @Override public SpringServiceImpl withoutGitConfig() { if (isEnterpriseTier()) { return withGitConfig((ConfigurationServiceGitProperty) null); } else { return withGitConfig((ConfigServerGitProperty) null); } } @Override public void beforeGroupCreateOrUpdate() { if (configServerTask != null) { this.addPostRunDependent(configServerTask); } if (monitoringSettingTask != null) { this.addPostRunDependent(monitoringSettingTask); } if (isEnterpriseTier()) { if (updateConfigurationServiceTask) { prepareCreateOrUpdateConfigurationService(); } updateConfigurationServiceTask = false; } configServerTask = null; monitoringSettingTask = null; } @Override public Mono<SpringService> createResourceAsync() { Mono<ServiceResourceInner> createOrUpdate; if (isInCreateMode()) { createOrUpdate = manager().serviceClient().getServices() .createOrUpdateAsync(resourceGroupName(), name(), innerModel()); if (isEnterpriseTier()) { createOrUpdate = createOrUpdate .flatMap(inner -> manager().serviceClient().getBuildServiceAgentPools().updatePutAsync( resourceGroupName(), name(), Constants.DEFAULT_TANZU_COMPONENT_NAME, Constants.DEFAULT_TANZU_COMPONENT_NAME, new BuildServiceAgentPoolResourceInner() .withProperties( new BuildServiceAgentPoolProperties() .withPoolSize( new BuildServiceAgentPoolSizeProperties() .withName("S1"))) ).then(Mono.just(inner))); } } else if (updated) { createOrUpdate = manager().serviceClient().getServices().updateAsync( resourceGroupName(), name(), patchToUpdate); patchToUpdate = new ServiceResourceInner(); updated = false; } else { return Mono.just(this); } return createOrUpdate .map(inner -> { this.setInner(inner); return this; }); } @Override public Mono<Void> afterPostRunAsync(boolean isGroupFaulted) { clearCache(); if (isGroupFaulted) { return Mono.empty(); } return refreshAsync().then(); } @Override protected Mono<ServiceResourceInner> getInnerAsync() { return manager().serviceClient().getServices().getByResourceGroupAsync(resourceGroupName(), name()) .map(inner -> { clearCache(); return inner; }); } @Override public SpringServiceImpl withCertificate(String name, String keyVaultUri, String certNameInKeyVault) { certificates.prepareCreateOrUpdate( name, new KeyVaultCertificateProperties().withVaultUri(keyVaultUri).withKeyVaultCertName(certNameInKeyVault) ); return this; } @Override public SpringServiceImpl withCertificate(String name, String keyVaultUri, String certNameInKeyVault, String certVersion) { certificates.prepareCreateOrUpdate( name, new KeyVaultCertificateProperties() .withVaultUri(keyVaultUri) .withKeyVaultCertName(certNameInKeyVault) .withCertVersion(certVersion) ); return this; } @Override public SpringServiceImpl withoutCertificate(String name) { certificates.prepareDelete(name); return this; } @Override public SpringServiceImpl withGitConfig(String uri, String branch, List<String> filePatterns) { return withGitConfigRepository(Constants.DEFAULT_TANZU_COMPONENT_NAME, uri, branch, filePatterns); } @Override public SpringServiceImpl withGitConfigRepository(String name, String uri, String branch, List<String> filePatterns) { if (CoreUtils.isNullOrEmpty(name)) { return this; } this.gitRepositoryMap.computeIfAbsent(name, key -> new ConfigurationServiceGitRepository() .withName(name) .withUri(uri) .withPatterns(filePatterns) .withLabel(branch) ); updateConfigurationServiceTask = true; return this; } @Override public SpringServiceImpl withGitConfig(ConfigurationServiceGitProperty gitConfig) { gitRepositoryMap.clear(); if (gitConfig != null && CoreUtils.isNullOrEmpty(gitConfig.repositories())) { for (ConfigurationServiceGitRepository repository : gitConfig.repositories()) { this.gitRepositoryMap.put(repository.name(), repository); } } updateConfigurationServiceTask = true; return this; } private void prepareCreateOrUpdateConfigurationService() { List<ConfigurationServiceGitRepository> repositories = new ArrayList<>(this.gitRepositoryMap.values()); this.configurationServices.prepareCreateOrUpdate(new ConfigurationServiceGitProperty().withRepositories(repositories)); } private boolean isInUpdateMode() { return !isInCreateMode(); } boolean isEnterpriseTier() { return innerModel().sku() != null && SkuName.E0.toString().equals(innerModel().sku().name()); } }
class SpringServiceImpl extends GroupableResourceImpl<SpringService, ServiceResourceInner, SpringServiceImpl, AppPlatformManager> implements SpringService, SpringService.Definition, SpringService.Update { private final SpringServiceCertificatesImpl certificates = new SpringServiceCertificatesImpl(this); private final SpringAppsImpl apps = new SpringAppsImpl(this); private final SpringConfigurationServicesImpl configurationServices = new SpringConfigurationServicesImpl(this); private FunctionalTaskItem configServerTask = null; private FunctionalTaskItem monitoringSettingTask = null; private ServiceResourceInner patchToUpdate = new ServiceResourceInner(); private boolean updated; private final ConfigurationServiceConfig configurationServiceConfig = new ConfigurationServiceConfig(); SpringServiceImpl(String name, ServiceResourceInner innerObject, AppPlatformManager manager) { super(name, innerObject, manager); } @Override public SpringServiceImpl update() { return super.update(); } @Override public Sku sku() { return innerModel().sku(); } @Override public SpringApps apps() { return apps; } @Override public SpringServiceCertificates certificates() { return certificates; } @Override public MonitoringSettingProperties getMonitoringSetting() { return getMonitoringSettingAsync().block(); } @Override public Mono<MonitoringSettingProperties> getMonitoringSettingAsync() { return manager().serviceClient().getMonitoringSettings().getAsync(resourceGroupName(), name()) .map(MonitoringSettingResourceInner::properties); } @Override public ConfigServerProperties getServerProperties() { return getServerPropertiesAsync().block(); } @Override public Mono<ConfigServerProperties> getServerPropertiesAsync() { return manager().serviceClient().getConfigServers().getAsync(resourceGroupName(), name()) .map(ConfigServerResourceInner::properties); } @Override public TestKeys listTestKeys() { return listTestKeysAsync().block(); } @Override public Mono<TestKeys> listTestKeysAsync() { return manager().serviceClient().getServices().listTestKeysAsync(resourceGroupName(), name()); } @Override public TestKeys regenerateTestKeys(TestKeyType keyType) { return regenerateTestKeysAsync(keyType).block(); } @Override public Mono<TestKeys> regenerateTestKeysAsync(TestKeyType keyType) { return manager().serviceClient().getServices().regenerateTestKeyAsync(resourceGroupName(), name(), new RegenerateTestKeyRequestPayload().withKeyType(keyType)); } @Override public void disableTestEndpoint() { disableTestEndpointAsync().block(); } @Override public Mono<Void> disableTestEndpointAsync() { return manager().serviceClient().getServices().disableTestEndpointAsync(resourceGroupName(), name()); } @Override public TestKeys enableTestEndpoint() { return enableTestEndpointAsync().block(); } @Override public Mono<TestKeys> enableTestEndpointAsync() { return manager().serviceClient().getServices().enableTestEndpointAsync(resourceGroupName(), name()); } @Override public SpringConfigurationService getDefaultConfigurationService() { return manager().serviceClient().getConfigurationServices().getAsync(resourceGroupName(), name(), Constants.DEFAULT_TANZU_COMPONENT_NAME) .switchIfEmpty(Mono.empty()) .map(inner -> new SpringConfigurationServiceImpl(inner.name(), this, inner)) .block(); } @Override public SpringServiceImpl withSku(String skuName) { return withSku(new Sku().withName(skuName)); } @Override public SpringServiceImpl withSku(SkuName skuName) { return withSku(skuName.toString()); } @Override public SpringServiceImpl withSku(String skuName, int capacity) { return withSku(new Sku().withName(skuName).withCapacity(capacity)); } @Override public SpringServiceImpl withSku(Sku sku) { innerModel().withSku(sku); if (isInUpdateMode()) { patchToUpdate.withSku(sku); updated = true; } return this; } @Override public SpringServiceImpl withEnterpriseTierSku() { withSku(SkuName.E0); return this; } @Override public SpringServiceImpl withTracing(String appInsightInstrumentationKey) { monitoringSettingTask = context -> manager().serviceClient().getMonitoringSettings() .updatePatchAsync(resourceGroupName(), name(), new MonitoringSettingResourceInner().withProperties( new MonitoringSettingProperties() .withAppInsightsInstrumentationKey(appInsightInstrumentationKey) .withTraceEnabled(true))) .then(context.voidMono()); return this; } @Override public SpringServiceImpl withoutTracing() { monitoringSettingTask = context -> manager().serviceClient().getMonitoringSettings() .updatePatchAsync( resourceGroupName(), name(), new MonitoringSettingResourceInner().withProperties( new MonitoringSettingProperties().withTraceEnabled(false) )) .then(context.voidMono()); return this; } @Override public SpringServiceImpl withGitUri(String uri) { configServerTask = context -> manager().serviceClient().getConfigServers() .updatePatchAsync(resourceGroupName(), name(), new ConfigServerResourceInner().withProperties( new ConfigServerProperties() .withConfigServer(new ConfigServerSettings().withGitProperty( new ConfigServerGitProperty().withUri(uri) )) )) .then(context.voidMono()); return this; } @Override public SpringServiceImpl withGitUriAndCredential(String uri, String username, String password) { configServerTask = context -> manager().serviceClient().getConfigServers() .updatePatchAsync(resourceGroupName(), name(), new ConfigServerResourceInner().withProperties( new ConfigServerProperties() .withConfigServer(new ConfigServerSettings().withGitProperty( new ConfigServerGitProperty() .withUri(uri) .withUsername(username) .withPassword(password) )) )) .then(context.voidMono()); return this; } @Override public SpringServiceImpl withGitConfig(ConfigServerGitProperty gitConfig) { configServerTask = context -> manager().serviceClient().getConfigServers() .updatePatchAsync(resourceGroupName(), name(), new ConfigServerResourceInner().withProperties( new ConfigServerProperties() .withConfigServer(new ConfigServerSettings().withGitProperty(gitConfig)) )) .then(context.voidMono()); return this; } @Override public SpringServiceImpl withoutGitConfig() { return withGitConfig(null); } @Override public void beforeGroupCreateOrUpdate() { if (configServerTask != null) { this.addPostRunDependent(configServerTask); } if (monitoringSettingTask != null) { this.addPostRunDependent(monitoringSettingTask); } if (isEnterpriseTier()) { if (isInCreateMode() || configurationServiceConfig.needUpdate()) { prepareCreateOrUpdateConfigurationService(); configurationServiceConfig.clearUpdate(); } } configServerTask = null; monitoringSettingTask = null; } @Override public Mono<SpringService> createResourceAsync() { Mono<ServiceResourceInner> createOrUpdate; if (isInCreateMode()) { createOrUpdate = manager().serviceClient().getServices() .createOrUpdateAsync(resourceGroupName(), name(), innerModel()); if (isEnterpriseTier()) { createOrUpdate = createOrUpdate .flatMap(inner -> manager().serviceClient().getBuildServiceAgentPools().updatePutAsync( resourceGroupName(), name(), Constants.DEFAULT_TANZU_COMPONENT_NAME, Constants.DEFAULT_TANZU_COMPONENT_NAME, new BuildServiceAgentPoolResourceInner() .withProperties( new BuildServiceAgentPoolProperties() .withPoolSize( new BuildServiceAgentPoolSizeProperties() .withName("S1"))) ).then(Mono.just(inner))); } } else if (updated) { createOrUpdate = manager().serviceClient().getServices().updateAsync( resourceGroupName(), name(), patchToUpdate); patchToUpdate = new ServiceResourceInner(); updated = false; } else { return Mono.just(this); } return createOrUpdate .map(inner -> { this.setInner(inner); return this; }); } @Override public Mono<Void> afterPostRunAsync(boolean isGroupFaulted) { return Mono .just(true) .map( ignored -> { clearCache(); return ignored; }) .then(); } @Override protected Mono<ServiceResourceInner> getInnerAsync() { return manager().serviceClient().getServices().getByResourceGroupAsync(resourceGroupName(), name()) .map(inner -> { clearCache(); return inner; }); } @Override public SpringServiceImpl withCertificate(String name, String keyVaultUri, String certNameInKeyVault) { certificates.prepareCreateOrUpdate( name, new KeyVaultCertificateProperties().withVaultUri(keyVaultUri).withKeyVaultCertName(certNameInKeyVault) ); return this; } @Override public SpringServiceImpl withCertificate(String name, String keyVaultUri, String certNameInKeyVault, String certVersion) { certificates.prepareCreateOrUpdate( name, new KeyVaultCertificateProperties() .withVaultUri(keyVaultUri) .withKeyVaultCertName(certNameInKeyVault) .withCertVersion(certVersion) ); return this; } @Override public SpringServiceImpl withoutCertificate(String name) { certificates.prepareDelete(name); return this; } @Override public SpringServiceImpl withDefaultGitRepository(String uri, String branch, List<String> filePatterns) { return withGitRepository(Constants.DEFAULT_TANZU_COMPONENT_NAME, uri, branch, filePatterns); } @Override public SpringServiceImpl withGitRepository(String name, String uri, String branch, List<String> filePatterns) { if (CoreUtils.isNullOrEmpty(name)) { return this; } this.configurationServiceConfig.addRepository( new ConfigurationServiceGitRepository() .withName(name) .withUri(uri) .withPatterns(filePatterns) .withLabel(branch)); return this; } @Override public SpringServiceImpl withGitRepositoryConfig(ConfigurationServiceGitProperty gitConfig) { this.configurationServiceConfig.clearRepositories(); if (gitConfig != null && !CoreUtils.isNullOrEmpty(gitConfig.repositories())) { for (ConfigurationServiceGitRepository repository : gitConfig.repositories()) { this.configurationServiceConfig.addRepository(repository); } } return this; } @Override public SpringServiceImpl withoutGitRepository(String name) { this.configurationServiceConfig.removeRepository(name); return this; } @Override public SpringServiceImpl withoutGitRepositories() { this.configurationServiceConfig.clearRepositories(); return this; } private void prepareCreateOrUpdateConfigurationService() { List<ConfigurationServiceGitRepository> repositories = this.configurationServiceConfig.mergeRepositories(); this.configurationServices.prepareCreateOrUpdate(new ConfigurationServiceGitProperty().withRepositories(repositories)); } private boolean isInUpdateMode() { return !isInCreateMode(); } boolean isEnterpriseTier() { return innerModel().sku() != null && SkuName.E0.toString().equals(innerModel().sku().name()); } private class ConfigurationServiceConfig { private final Map<String, ConfigurationServiceGitRepository> gitRepositoryMap = new ConcurrentHashMap<>(); private final Set<String> repositoriesToDelete = new HashSet<>(); private boolean update; private boolean clearRepositories; boolean needUpdate() { return update; } public void clearUpdate() { this.update = false; } void reset() { this.gitRepositoryMap.clear(); this.update = false; this.repositoriesToDelete.clear(); this.clearRepositories = false; } public void addRepository(ConfigurationServiceGitRepository repository) { this.gitRepositoryMap.putIfAbsent(repository.name(), repository); this.update = true; } public void clearRepositories() { this.gitRepositoryMap.clear(); this.clearRepositories = true; this.update = true; } public void removeRepository(String name) { this.repositoriesToDelete.add(name); this.update = true; } public List<ConfigurationServiceGitRepository> mergeRepositories() { if (this.clearRepositories) { return new ArrayList<>(this.gitRepositoryMap.values()); } else { Map<String, ConfigurationServiceGitRepository> existingGitRepositories = new HashMap<>(); if (isInUpdateMode()) { SpringConfigurationService configurationService = getDefaultConfigurationService(); if (configurationService != null) { List<ConfigurationServiceGitRepository> repositoryList = configurationService.innerModel().properties().settings() == null ? Collections.emptyList() : configurationService.innerModel().properties().settings().gitProperty().repositories(); if (repositoryList != null) { repositoryList.forEach(repository -> existingGitRepositories.put(repository.name(), repository)); } } } existingGitRepositories.putAll(gitRepositoryMap); for (String repositoryToDelete : repositoriesToDelete) { existingGitRepositories.remove(repositoryToDelete); } return new ArrayList<>(existingGitRepositories.values()); } } } }
If this is typical and recommended, should you define it as a static class so that it be easier for user to reuse?
public static void main(String[] args) throws Exception { TokenCredential credential = new DefaultAzureCredentialBuilder() .authorityHost(AzureAuthorityHosts.AZURE_PUBLIC_CLOUD) .build(); AzureProfile profile = new AzureProfile(AzureEnvironment.AZURE); HttpPipelinePolicy userTokenPolicy = (context, next) -> { Mono<String> token = null; String bearerTokenPrefix = "bearer "; String authorization = context.getHttpRequest().getHeaders().getValue("Authorization"); if (authorization != null && authorization.toLowerCase(Locale.ROOT).startsWith(bearerTokenPrefix)) { token = Mono.just(authorization.substring(bearerTokenPrefix.length())); } else { token = credential .getToken(new TokenRequestContext().addScopes(profile.getEnvironment().getResourceManagerEndpoint() + "/.default")) .map(AccessToken::getToken); } return token .flatMap(accessToken -> { context.getHttpRequest().getHeaders().set(USER_TOKEN_HEADER, accessToken); return next.process(); }); }; AzureResourceManager azureResourceManager = AzureResourceManager.authenticate(credential, profile).withDefaultSubscription(); ServiceLinkerManager serviceLinkerManager = ServiceLinkerManager.authenticate(credential, profile); ServiceLinkerManager serviceLinkerManagerWithUserToken = ServiceLinkerManager.configure().withPolicy(userTokenPolicy).authenticate(credential, profile); creatSpringCloudAndSQLConnection(azureResourceManager, serviceLinkerManager); createWebAppAndKeyVaultConnectionWithUserIdentity(azureResourceManager, serviceLinkerManagerWithUserToken); }
};
public static void main(String[] args) throws Exception { TokenCredential credential = new DefaultAzureCredentialBuilder() .authorityHost(AzureAuthorityHosts.AZURE_PUBLIC_CLOUD) .build(); AzureProfile profile = new AzureProfile(AzureEnvironment.AZURE); HttpPipelinePolicy userTokenPolicy = new UserTokenPolicy(credential, profile.getEnvironment()); AzureResourceManager azureResourceManager = AzureResourceManager.authenticate(credential, profile).withDefaultSubscription(); ServiceLinkerManager serviceLinkerManager = ServiceLinkerManager.authenticate(credential, profile); ServiceLinkerManager serviceLinkerManagerWithUserToken = ServiceLinkerManager.configure().withPolicy(userTokenPolicy).authenticate(credential, profile); createSpringCloudAndSQLConnection(azureResourceManager, serviceLinkerManager); createWebAppAndKeyVaultConnectionWithUserIdentity(azureResourceManager, serviceLinkerManagerWithUserToken); }
class CreateServiceLinker { private static final String USER_TOKEN_HEADER = "x-ms-serviceconnector-user-token"; /** * Main entry point. * * @param args the parameters */ private static void creatSpringCloudAndSQLConnection(AzureResourceManager azureResourceManager, ServiceLinkerManager serviceLinkerManager) { String resourceGroupName = "rg" + randomString(8); Region region = Region.US_EAST; String springServiceName = "spring" + randomString(8); String springAppName = "app" + randomString(8); String sqlServerName = "sqlserver" + randomString(8); String sqlDatabaseName = "sqldb" + randomString(8); String sqlUserName = "sql" + randomString(8); String sqlPassword = "5$Ql" + randomString(8); SpringService springService = azureResourceManager.springServices().define(springServiceName) .withRegion(region) .withNewResourceGroup(resourceGroupName) .withSku(SkuName.B0) .create(); SpringApp springApp = springService.apps().define(springAppName) .withDefaultActiveDeployment() .create(); SqlServer sqlServer = azureResourceManager.sqlServers().define(sqlServerName) .withRegion(region) .withExistingResourceGroup(resourceGroupName) .withAdministratorLogin(sqlUserName) .withAdministratorPassword(sqlPassword) .create(); SqlDatabase sqlDatabase = sqlServer.databases().define(sqlDatabaseName) .withBasicEdition() .create(); LinkerResource linker = serviceLinkerManager.linkers().define("sql") .withExistingResourceUri(springApp.getActiveDeployment().id()) .withTargetService( new AzureResource() .withId(sqlDatabase.id()) ) .withAuthInfo( new SecretAuthInfo() .withName(sqlUserName) .withSecretInfo( new ValueSecretInfo() .withValue(sqlPassword) ) ) .withClientType(ClientType.SPRING_BOOT) .create(); System.out.println("Configurations:"); for (SourceConfiguration sourceConfiguration : linker.listConfigurations().configurations()) { System.out.printf("\t%s: %s%n", sourceConfiguration.name(), sourceConfiguration.value()); } } private static void createWebAppAndKeyVaultConnectionWithUserIdentity(AzureResourceManager azureResourceManager, ServiceLinkerManager serviceLinkerManager) { String resourceGroupName = "rg" + randomString(8); Region region = Region.US_EAST; String webAppName = "web" + randomString(8); String keyVaultName = "vault" + randomString(8); String identityName = "identity" + randomString(8); WebApp webApp = azureResourceManager.webApps().define(webAppName) .withRegion(region) .withNewResourceGroup(resourceGroupName) .withNewLinuxPlan(PricingTier.BASIC_B1) .withBuiltInImage(RuntimeStack.NODEJS_14_LTS) .create(); Vault vault = azureResourceManager.vaults().define(keyVaultName) .withRegion(region) .withExistingResourceGroup(resourceGroupName) .withEmptyAccessPolicy() .create(); Identity identity = azureResourceManager.identities().define(identityName) .withRegion(region) .withExistingResourceGroup(resourceGroupName) .create(); LinkerResource linker = serviceLinkerManager.linkers().define("keyvault") .withExistingResourceUri(webApp.id()) .withTargetService( new AzureResource() .withId(vault.id()) ) .withAuthInfo( new UserAssignedIdentityAuthInfo() .withSubscriptionId(azureResourceManager.subscriptionId()) .withClientId(identity.clientId()) ) .withClientType(ClientType.NODEJS) .create(); System.out.println("Configurations:"); for (SourceConfiguration sourceConfiguration : linker.listConfigurations().configurations()) { System.out.printf("\t%s: %s%n", sourceConfiguration.name(), sourceConfiguration.value()); } } private static String randomString(int length) { return UUID.randomUUID().toString().replace("-", "").substring(0, length); } }
class CreateServiceLinker { private static final String USER_TOKEN_HEADER = "x-ms-serviceconnector-user-token"; /** * Main entry point. * * @param args the parameters */ private static void createSpringCloudAndSQLConnection(AzureResourceManager azureResourceManager, ServiceLinkerManager serviceLinkerManager) { String resourceGroupName = "rg" + randomString(8); Region region = Region.US_EAST; String springServiceName = "spring" + randomString(8); String springAppName = "app" + randomString(8); String sqlServerName = "sqlserver" + randomString(8); String sqlDatabaseName = "sqldb" + randomString(8); String sqlUserName = "sql" + randomString(8); String sqlPassword = "5$Ql" + randomString(8); SpringService springService = azureResourceManager.springServices().define(springServiceName) .withRegion(region) .withNewResourceGroup(resourceGroupName) .withSku(SkuName.B0) .create(); SpringApp springApp = springService.apps().define(springAppName) .withDefaultActiveDeployment() .create(); SqlServer sqlServer = azureResourceManager.sqlServers().define(sqlServerName) .withRegion(region) .withExistingResourceGroup(resourceGroupName) .withAdministratorLogin(sqlUserName) .withAdministratorPassword(sqlPassword) .create(); SqlDatabase sqlDatabase = sqlServer.databases().define(sqlDatabaseName) .withBasicEdition() .create(); LinkerResource linker = serviceLinkerManager.linkers().define("sql") .withExistingResourceUri(springApp.getActiveDeployment().id()) .withTargetService( new AzureResource() .withId(sqlDatabase.id()) ) .withAuthInfo( new SecretAuthInfo() .withName(sqlUserName) .withSecretInfo( new ValueSecretInfo() .withValue(sqlPassword) ) ) .withClientType(ClientType.SPRING_BOOT) .create(); System.out.println("Configurations:"); for (SourceConfiguration sourceConfiguration : linker.listConfigurations().configurations()) { System.out.printf("\t%s: %s%n", sourceConfiguration.name(), sourceConfiguration.value()); } } private static void createWebAppAndKeyVaultConnectionWithUserIdentity(AzureResourceManager azureResourceManager, ServiceLinkerManager serviceLinkerManager) { String resourceGroupName = "rg" + randomString(8); Region region = Region.US_EAST; String webAppName = "web" + randomString(8); String keyVaultName = "vault" + randomString(8); String identityName = "identity" + randomString(8); WebApp webApp = azureResourceManager.webApps().define(webAppName) .withRegion(region) .withNewResourceGroup(resourceGroupName) .withNewLinuxPlan(PricingTier.BASIC_B1) .withBuiltInImage(RuntimeStack.NODEJS_14_LTS) .create(); Vault vault = azureResourceManager.vaults().define(keyVaultName) .withRegion(region) .withExistingResourceGroup(resourceGroupName) .withEmptyAccessPolicy() .create(); Identity identity = azureResourceManager.identities().define(identityName) .withRegion(region) .withExistingResourceGroup(resourceGroupName) .create(); LinkerResource linker = serviceLinkerManager.linkers().define("keyvault") .withExistingResourceUri(webApp.id()) .withTargetService( new AzureResource() .withId(vault.id()) ) .withAuthInfo( new UserAssignedIdentityAuthInfo() .withSubscriptionId(azureResourceManager.subscriptionId()) .withClientId(identity.clientId()) ) .withClientType(ClientType.NODEJS) .create(); System.out.println("Configurations:"); for (SourceConfiguration sourceConfiguration : linker.listConfigurations().configurations()) { System.out.printf("\t%s: %s%n", sourceConfiguration.name(), sourceConfiguration.value()); } } private static String randomString(int length) { return UUID.randomUUID().toString().replace("-", "").substring(0, length); } public static class UserTokenPolicy implements HttpPipelinePolicy { private final TokenCredential credential; private final AzureEnvironment environment; public UserTokenPolicy(TokenCredential credential, AzureEnvironment environment) { this.credential = credential; this.environment = environment; } @Override public Mono<HttpResponse> process(HttpPipelineCallContext context, HttpPipelineNextPolicy next) { Mono<String> token = null; String bearerTokenPrefix = "bearer "; String authorization = context.getHttpRequest().getHeaders().getValue("Authorization"); if (authorization != null && authorization.toLowerCase(Locale.ROOT).startsWith(bearerTokenPrefix)) { token = Mono.just(authorization.substring(bearerTokenPrefix.length())); } else { token = credential .getToken(new TokenRequestContext().addScopes(environment.getResourceManagerEndpoint() + "/.default")) .map(AccessToken::getToken); } return token .flatMap(accessToken -> { context.getHttpRequest().getHeaders().set(USER_TOKEN_HEADER, accessToken); return next.process(); }); } } }
I wonder whether use a new model/class will make more sense. For example: Instead of using StoreResult -> use a new class ResponseStatistic, and contain the following properties directly. ![image](https://user-images.githubusercontent.com/64233642/163652271-e88a47f1-db5d-41d6-80b8-756d6f63607c.png)
public void recordResponse(RxDocumentServiceRequest request, StoreResult storeResult, GlobalEndpointManager globalEndpointManager) { Objects.requireNonNull(request, "request is required and cannot be null."); Instant responseTime = Instant.now(); StoreResponseStatistics storeResponseStatistics = new StoreResponseStatistics(); storeResponseStatistics.requestResponseTimeUTC = responseTime; storeResponseStatistics.storeResult = StoreResult.createSerializableStoreResult(storeResult); storeResponseStatistics.requestOperationType = request.getOperationType(); storeResponseStatistics.requestResourceType = request.getResourceType(); activityId = request.getActivityId().toString(); URI locationEndPoint = null; if (request.requestContext != null) { if (request.requestContext.locationEndpointToRoute != null) { locationEndPoint = request.requestContext.locationEndpointToRoute; } } synchronized (this) { if (responseTime.isAfter(this.requestEndTimeUTC)) { this.requestEndTimeUTC = responseTime; } if (locationEndPoint != null) { this.regionsContacted.add(globalEndpointManager.getRegionName(locationEndPoint, request.getOperationType())); this.locationEndpointsContacted.add(locationEndPoint); } if (storeResponseStatistics.requestOperationType == OperationType.Head || storeResponseStatistics.requestOperationType == OperationType.HeadFeed) { this.supplementalResponseStatisticsList.add(storeResponseStatistics); } else { this.responseStatisticsList.add(storeResponseStatistics); } } }
storeResponseStatistics.storeResult = StoreResult.createSerializableStoreResult(storeResult);
public void recordResponse(RxDocumentServiceRequest request, StoreResult storeResult, GlobalEndpointManager globalEndpointManager) { Objects.requireNonNull(request, "request is required and cannot be null."); Instant responseTime = Instant.now(); StoreResponseStatistics storeResponseStatistics = new StoreResponseStatistics(); storeResponseStatistics.requestResponseTimeUTC = responseTime; storeResponseStatistics.storeResult = StoreResult.createSerializableStoreResult(storeResult); storeResponseStatistics.requestOperationType = request.getOperationType(); storeResponseStatistics.requestResourceType = request.getResourceType(); activityId = request.getActivityId().toString(); URI locationEndPoint = null; if (request.requestContext != null) { if (request.requestContext.locationEndpointToRoute != null) { locationEndPoint = request.requestContext.locationEndpointToRoute; } } synchronized (this) { if (responseTime.isAfter(this.requestEndTimeUTC)) { this.requestEndTimeUTC = responseTime; } if (locationEndPoint != null && globalEndpointManager != null) { this.regionsContacted.add(globalEndpointManager.getRegionName(locationEndPoint, request.getOperationType())); this.locationEndpointsContacted.add(locationEndPoint); } if (storeResponseStatistics.requestOperationType == OperationType.Head || storeResponseStatistics.requestOperationType == OperationType.HeadFeed) { this.supplementalResponseStatisticsList.add(storeResponseStatistics); } else { this.responseStatisticsList.add(storeResponseStatistics); } } }
class ClientSideRequestStatistics { private static final int MAX_SUPPLEMENTAL_REQUESTS_FOR_TO_STRING = 10; private final DiagnosticsClientContext diagnosticsClientContext; private String activityId; private List<StoreResponseStatistics> responseStatisticsList; private List<StoreResponseStatistics> supplementalResponseStatisticsList; private Map<String, AddressResolutionStatistics> addressResolutionStatistics; private List<URI> contactedReplicas; private Set<URI> failedReplicas; private Instant requestStartTimeUTC; private Instant requestEndTimeUTC; private Set<String> regionsContacted; private Set<URI> locationEndpointsContacted; private RetryContext retryContext; private GatewayStatistics gatewayStatistics; private RequestTimeline gatewayRequestTimeline; private MetadataDiagnosticsContext metadataDiagnosticsContext; private SerializationDiagnosticsContext serializationDiagnosticsContext; public ClientSideRequestStatistics(DiagnosticsClientContext diagnosticsClientContext) { this.diagnosticsClientContext = diagnosticsClientContext; this.requestStartTimeUTC = Instant.now(); this.requestEndTimeUTC = Instant.now(); this.responseStatisticsList = new ArrayList<>(); this.supplementalResponseStatisticsList = new ArrayList<>(); this.addressResolutionStatistics = new HashMap<>(); this.contactedReplicas = Collections.synchronizedList(new ArrayList<>()); this.failedReplicas = Collections.synchronizedSet(new HashSet<>()); this.regionsContacted = Collections.synchronizedSet(new HashSet<>()); this.locationEndpointsContacted = Collections.synchronizedSet(new HashSet<>()); this.metadataDiagnosticsContext = new MetadataDiagnosticsContext(); this.serializationDiagnosticsContext = new SerializationDiagnosticsContext(); this.retryContext = new RetryContext(); } public ClientSideRequestStatistics(ClientSideRequestStatistics toBeCloned) { this.diagnosticsClientContext = toBeCloned.diagnosticsClientContext; this.requestStartTimeUTC = toBeCloned.requestStartTimeUTC; this.requestEndTimeUTC = toBeCloned.requestEndTimeUTC; this.responseStatisticsList = new ArrayList<>(toBeCloned.responseStatisticsList); this.supplementalResponseStatisticsList = new ArrayList<>(toBeCloned.supplementalResponseStatisticsList); this.addressResolutionStatistics = new HashMap<>(toBeCloned.addressResolutionStatistics); this.contactedReplicas = Collections.synchronizedList(new ArrayList<>(toBeCloned.contactedReplicas)); this.failedReplicas = Collections.synchronizedSet(new HashSet<>(toBeCloned.failedReplicas)); this.regionsContacted = Collections.synchronizedSet(new HashSet<>(toBeCloned.regionsContacted)); this.locationEndpointsContacted = Collections.synchronizedSet( new HashSet<>(toBeCloned.locationEndpointsContacted)); this.metadataDiagnosticsContext = new MetadataDiagnosticsContext(toBeCloned.metadataDiagnosticsContext); this.serializationDiagnosticsContext = new SerializationDiagnosticsContext(toBeCloned.serializationDiagnosticsContext); this.retryContext = new RetryContext(toBeCloned.retryContext); } public Duration getDuration() { return Duration.between(requestStartTimeUTC, requestEndTimeUTC); } public Instant getRequestStartTimeUTC() { return requestStartTimeUTC; } public DiagnosticsClientContext getDiagnosticsClientContext() { return diagnosticsClientContext; } public void recordGatewayResponse( RxDocumentServiceRequest rxDocumentServiceRequest, StoreResponse storeResponse, CosmosException exception, GlobalEndpointManager globalEndpointManager) { Instant responseTime = Instant.now(); synchronized (this) { if (responseTime.isAfter(this.requestEndTimeUTC)) { this.requestEndTimeUTC = responseTime; } URI locationEndPoint = null; if (rxDocumentServiceRequest != null && rxDocumentServiceRequest.requestContext != null) { locationEndPoint = rxDocumentServiceRequest.requestContext.locationEndpointToRoute; } this.recordRetryContextEndTime(); if (locationEndPoint != null) { this.regionsContacted.add(globalEndpointManager.getRegionName(locationEndPoint, rxDocumentServiceRequest.getOperationType())); this.locationEndpointsContacted.add(locationEndPoint); } this.gatewayStatistics = new GatewayStatistics(); if (rxDocumentServiceRequest != null) { this.gatewayStatistics.operationType = rxDocumentServiceRequest.getOperationType(); this.gatewayStatistics.resourceType = rxDocumentServiceRequest.getResourceType(); } if (storeResponse != null) { this.gatewayStatistics.statusCode = storeResponse.getStatus(); this.gatewayStatistics.subStatusCode = DirectBridgeInternal.getSubStatusCode(storeResponse); this.gatewayStatistics.sessionToken = storeResponse .getHeaderValue(HttpConstants.HttpHeaders.SESSION_TOKEN); this.gatewayStatistics.requestCharge = storeResponse .getHeaderValue(HttpConstants.HttpHeaders.REQUEST_CHARGE); this.gatewayStatistics.requestTimeline = DirectBridgeInternal.getRequestTimeline(storeResponse); this.gatewayStatistics.partitionKeyRangeId = storeResponse.getPartitionKeyRangeId(); this.activityId= storeResponse.getHeaderValue(HttpConstants.HttpHeaders.ACTIVITY_ID); } else if (exception != null) { this.gatewayStatistics.statusCode = exception.getStatusCode(); this.gatewayStatistics.subStatusCode = exception.getSubStatusCode(); this.gatewayStatistics.requestTimeline = this.gatewayRequestTimeline; this.gatewayStatistics.requestCharge= String.valueOf(exception.getRequestCharge()); this.activityId=exception.getActivityId(); } } } public void setGatewayRequestTimeline(RequestTimeline transportRequestTimeline) { this.gatewayRequestTimeline = transportRequestTimeline; } public RequestTimeline getGatewayRequestTimeline() { return this.gatewayRequestTimeline; } public String recordAddressResolutionStart( URI targetEndpoint, boolean forceRefresh, boolean forceCollectionRoutingMapRefresh) { String identifier = Utils .randomUUID() .toString(); AddressResolutionStatistics resolutionStatistics = new AddressResolutionStatistics(); resolutionStatistics.startTimeUTC = Instant.now(); resolutionStatistics.endTimeUTC = null; resolutionStatistics.targetEndpoint = targetEndpoint == null ? "<NULL>" : targetEndpoint.toString(); resolutionStatistics.forceRefresh = forceRefresh; resolutionStatistics.forceCollectionRoutingMapRefresh = forceCollectionRoutingMapRefresh; synchronized (this) { this.addressResolutionStatistics.put(identifier, resolutionStatistics); } return identifier; } public void recordAddressResolutionEnd(String identifier, String errorMessage) { if (StringUtils.isEmpty(identifier)) { return; } Instant responseTime = Instant.now(); synchronized (this) { if (!this.addressResolutionStatistics.containsKey(identifier)) { throw new IllegalArgumentException("Identifier " + identifier + " does not exist. Please call start " + "before calling end"); } if (responseTime.isAfter(this.requestEndTimeUTC)) { this.requestEndTimeUTC = responseTime; } AddressResolutionStatistics resolutionStatistics = this.addressResolutionStatistics.get(identifier); resolutionStatistics.endTimeUTC = responseTime; resolutionStatistics.errorMessage = errorMessage; resolutionStatistics.inflightRequest = false; } } public List<URI> getContactedReplicas() { return contactedReplicas; } public void setContactedReplicas(List<URI> contactedReplicas) { this.contactedReplicas = Collections.synchronizedList(contactedReplicas); } public Set<URI> getFailedReplicas() { return failedReplicas; } public void setFailedReplicas(Set<URI> failedReplicas) { this.failedReplicas = Collections.synchronizedSet(failedReplicas); } public Set<String> getContactedRegionNames() { return regionsContacted; } public void setRegionsContacted(Set<String> regionsContacted) { this.regionsContacted = Collections.synchronizedSet(regionsContacted); } public Set<URI> getLocationEndpointsContacted() { return locationEndpointsContacted; } public void setLocationEndpointsContacted(Set<URI> locationEndpointsContacted) { this.locationEndpointsContacted = locationEndpointsContacted; } public MetadataDiagnosticsContext getMetadataDiagnosticsContext(){ return this.metadataDiagnosticsContext; } public SerializationDiagnosticsContext getSerializationDiagnosticsContext() { return this.serializationDiagnosticsContext; } public void recordRetryContextEndTime() { this.retryContext.updateEndTime(); } public RetryContext getRetryContext() { return retryContext; } public List<StoreResponseStatistics> getResponseStatisticsList() { return responseStatisticsList; } public List<StoreResponseStatistics> getSupplementalResponseStatisticsList() { return supplementalResponseStatisticsList; } public Map<String, AddressResolutionStatistics> getAddressResolutionStatistics() { return addressResolutionStatistics; } public GatewayStatistics getGatewayStatistics() { return gatewayStatistics; } public static class StoreResponseStatistics { @JsonSerialize(using = StoreResult.StoreResultSerializer.class) private StoreResult storeResult; @JsonSerialize(using = DiagnosticsInstantSerializer.class) private Instant requestResponseTimeUTC; @JsonSerialize private ResourceType requestResourceType; @JsonSerialize private OperationType requestOperationType; public StoreResult getStoreResult() { return storeResult; } public Instant getRequestResponseTimeUTC() { return requestResponseTimeUTC; } public ResourceType getRequestResourceType() { return requestResourceType; } public OperationType getRequestOperationType() { return requestOperationType; } } public static class SystemInformation { private String usedMemory; private String availableMemory; private String systemCpuLoad; private int availableProcessors; public String getUsedMemory() { return usedMemory; } public String getAvailableMemory() { return availableMemory; } public String getSystemCpuLoad() { return systemCpuLoad; } public int getAvailableProcessors() { return availableProcessors; } } public static class ClientSideRequestStatisticsSerializer extends StdSerializer<ClientSideRequestStatistics> { private static final long serialVersionUID = -2746532297176812860L; ClientSideRequestStatisticsSerializer() { super(ClientSideRequestStatistics.class); } @Override public void serialize( ClientSideRequestStatistics statistics, JsonGenerator generator, SerializerProvider provider) throws IOException { generator.writeStartObject(); long requestLatency = statistics .getDuration() .toMillis(); generator.writeStringField("userAgent", Utils.getUserAgent()); generator.writeStringField("activityId", statistics.activityId); generator.writeNumberField("requestLatencyInMs", requestLatency); generator.writeStringField("requestStartTimeUTC", DiagnosticsInstantSerializer.fromInstant(statistics.requestStartTimeUTC)); generator.writeStringField("requestEndTimeUTC", DiagnosticsInstantSerializer.fromInstant(statistics.requestEndTimeUTC)); generator.writeObjectField("responseStatisticsList", statistics.responseStatisticsList); generator.writeObjectField("supplementalResponseStatisticsList", getCappedSupplementalResponseStatisticsList(statistics.supplementalResponseStatisticsList)); generator.writeObjectField("addressResolutionStatistics", statistics.addressResolutionStatistics); generator.writeObjectField("regionsContacted", statistics.regionsContacted); generator.writeObjectField("retryContext", statistics.retryContext); generator.writeObjectField("metadataDiagnosticsContext", statistics.getMetadataDiagnosticsContext()); generator.writeObjectField("serializationDiagnosticsContext", statistics.getSerializationDiagnosticsContext()); generator.writeObjectField("gatewayStatistics", statistics.gatewayStatistics); try { SystemInformation systemInformation = fetchSystemInformation(); generator.writeObjectField("systemInformation", systemInformation); } catch (Exception e) { } generator.writeObjectField("clientCfgs", statistics.diagnosticsClientContext); generator.writeEndObject(); } } public static List<StoreResponseStatistics> getCappedSupplementalResponseStatisticsList(List<StoreResponseStatistics> supplementalResponseStatisticsList) { int supplementalResponseStatisticsListCount = supplementalResponseStatisticsList.size(); int initialIndex = Math.max(supplementalResponseStatisticsListCount - MAX_SUPPLEMENTAL_REQUESTS_FOR_TO_STRING, 0); if (initialIndex != 0) { List<StoreResponseStatistics> subList = supplementalResponseStatisticsList .subList(initialIndex, supplementalResponseStatisticsListCount); return subList; } return supplementalResponseStatisticsList; } public static class AddressResolutionStatistics { @JsonSerialize(using = DiagnosticsInstantSerializer.class) private Instant startTimeUTC; @JsonSerialize(using = DiagnosticsInstantSerializer.class) private Instant endTimeUTC; @JsonSerialize private String targetEndpoint; @JsonSerialize private String errorMessage; @JsonSerialize private boolean forceRefresh; @JsonSerialize private boolean forceCollectionRoutingMapRefresh; @JsonSerialize private boolean inflightRequest = true; public Instant getStartTimeUTC() { return startTimeUTC; } public Instant getEndTimeUTC() { return endTimeUTC; } public String getTargetEndpoint() { return targetEndpoint; } public String getErrorMessage() { return errorMessage; } public boolean isInflightRequest() { return inflightRequest; } public boolean isForceRefresh() { return forceRefresh; } public boolean isForceCollectionRoutingMapRefresh() { return forceCollectionRoutingMapRefresh; } } public static class GatewayStatistics { private String sessionToken; private OperationType operationType; private ResourceType resourceType; private int statusCode; private int subStatusCode; private String requestCharge; private RequestTimeline requestTimeline; private String partitionKeyRangeId; public String getSessionToken() { return sessionToken; } public OperationType getOperationType() { return operationType; } public int getStatusCode() { return statusCode; } public int getSubStatusCode() { return subStatusCode; } public String getRequestCharge() { return requestCharge; } public RequestTimeline getRequestTimeline() { return requestTimeline; } public ResourceType getResourceType() { return resourceType; } public String getPartitionKeyRangeId() { return partitionKeyRangeId; } } public static SystemInformation fetchSystemInformation() { SystemInformation systemInformation = new SystemInformation(); Runtime runtime = Runtime.getRuntime(); long totalMemory = runtime.totalMemory() / 1024; long freeMemory = runtime.freeMemory() / 1024; long maxMemory = runtime.maxMemory() / 1024; systemInformation.usedMemory = totalMemory - freeMemory + " KB"; systemInformation.availableMemory = (maxMemory - (totalMemory - freeMemory)) + " KB"; systemInformation.availableProcessors = runtime.availableProcessors(); systemInformation.systemCpuLoad = CpuMemoryMonitor .getCpuLoad() .toString(); return systemInformation; } }
class ClientSideRequestStatistics { private static final int MAX_SUPPLEMENTAL_REQUESTS_FOR_TO_STRING = 10; private final DiagnosticsClientContext.DiagnosticsClientConfig diagnosticsClientConfig; private String activityId; private List<StoreResponseStatistics> responseStatisticsList; private List<StoreResponseStatistics> supplementalResponseStatisticsList; private Map<String, AddressResolutionStatistics> addressResolutionStatistics; private List<URI> contactedReplicas; private Set<URI> failedReplicas; private Instant requestStartTimeUTC; private Instant requestEndTimeUTC; private Set<String> regionsContacted; private Set<URI> locationEndpointsContacted; private RetryContext retryContext; private GatewayStatistics gatewayStatistics; private RequestTimeline gatewayRequestTimeline; private MetadataDiagnosticsContext metadataDiagnosticsContext; private SerializationDiagnosticsContext serializationDiagnosticsContext; public ClientSideRequestStatistics(DiagnosticsClientContext diagnosticsClientContext) { this.diagnosticsClientConfig = diagnosticsClientContext.getConfig(); this.requestStartTimeUTC = Instant.now(); this.requestEndTimeUTC = Instant.now(); this.responseStatisticsList = new ArrayList<>(); this.supplementalResponseStatisticsList = new ArrayList<>(); this.addressResolutionStatistics = new HashMap<>(); this.contactedReplicas = Collections.synchronizedList(new ArrayList<>()); this.failedReplicas = Collections.synchronizedSet(new HashSet<>()); this.regionsContacted = Collections.synchronizedSet(new HashSet<>()); this.locationEndpointsContacted = Collections.synchronizedSet(new HashSet<>()); this.metadataDiagnosticsContext = new MetadataDiagnosticsContext(); this.serializationDiagnosticsContext = new SerializationDiagnosticsContext(); this.retryContext = new RetryContext(); } public ClientSideRequestStatistics(ClientSideRequestStatistics toBeCloned) { this.diagnosticsClientConfig = toBeCloned.diagnosticsClientConfig; this.requestStartTimeUTC = toBeCloned.requestStartTimeUTC; this.requestEndTimeUTC = toBeCloned.requestEndTimeUTC; this.responseStatisticsList = new ArrayList<>(toBeCloned.responseStatisticsList); this.supplementalResponseStatisticsList = new ArrayList<>(toBeCloned.supplementalResponseStatisticsList); this.addressResolutionStatistics = new HashMap<>(toBeCloned.addressResolutionStatistics); this.contactedReplicas = Collections.synchronizedList(new ArrayList<>(toBeCloned.contactedReplicas)); this.failedReplicas = Collections.synchronizedSet(new HashSet<>(toBeCloned.failedReplicas)); this.regionsContacted = Collections.synchronizedSet(new HashSet<>(toBeCloned.regionsContacted)); this.locationEndpointsContacted = Collections.synchronizedSet( new HashSet<>(toBeCloned.locationEndpointsContacted)); this.metadataDiagnosticsContext = new MetadataDiagnosticsContext(toBeCloned.metadataDiagnosticsContext); this.serializationDiagnosticsContext = new SerializationDiagnosticsContext(toBeCloned.serializationDiagnosticsContext); this.retryContext = new RetryContext(toBeCloned.retryContext); } public Duration getDuration() { return Duration.between(requestStartTimeUTC, requestEndTimeUTC); } public Instant getRequestStartTimeUTC() { return requestStartTimeUTC; } public DiagnosticsClientContext.DiagnosticsClientConfig getDiagnosticsClientConfig() { return diagnosticsClientConfig; } public void recordGatewayResponse( RxDocumentServiceRequest rxDocumentServiceRequest, StoreResponse storeResponse, CosmosException exception, GlobalEndpointManager globalEndpointManager) { Instant responseTime = Instant.now(); synchronized (this) { if (responseTime.isAfter(this.requestEndTimeUTC)) { this.requestEndTimeUTC = responseTime; } URI locationEndPoint = null; if (rxDocumentServiceRequest != null && rxDocumentServiceRequest.requestContext != null) { locationEndPoint = rxDocumentServiceRequest.requestContext.locationEndpointToRoute; } this.recordRetryContextEndTime(); if (locationEndPoint != null && globalEndpointManager != null) { this.regionsContacted.add(globalEndpointManager.getRegionName(locationEndPoint, rxDocumentServiceRequest.getOperationType())); this.locationEndpointsContacted.add(locationEndPoint); } this.gatewayStatistics = new GatewayStatistics(); if (rxDocumentServiceRequest != null) { this.gatewayStatistics.operationType = rxDocumentServiceRequest.getOperationType(); this.gatewayStatistics.resourceType = rxDocumentServiceRequest.getResourceType(); } if (storeResponse != null) { this.gatewayStatistics.statusCode = storeResponse.getStatus(); this.gatewayStatistics.subStatusCode = DirectBridgeInternal.getSubStatusCode(storeResponse); this.gatewayStatistics.sessionToken = storeResponse .getHeaderValue(HttpConstants.HttpHeaders.SESSION_TOKEN); this.gatewayStatistics.requestCharge = storeResponse .getHeaderValue(HttpConstants.HttpHeaders.REQUEST_CHARGE); this.gatewayStatistics.requestTimeline = DirectBridgeInternal.getRequestTimeline(storeResponse); this.gatewayStatistics.partitionKeyRangeId = storeResponse.getPartitionKeyRangeId(); this.activityId= storeResponse.getHeaderValue(HttpConstants.HttpHeaders.ACTIVITY_ID); } else if (exception != null) { this.gatewayStatistics.statusCode = exception.getStatusCode(); this.gatewayStatistics.subStatusCode = exception.getSubStatusCode(); this.gatewayStatistics.requestTimeline = this.gatewayRequestTimeline; this.gatewayStatistics.requestCharge= String.valueOf(exception.getRequestCharge()); this.activityId=exception.getActivityId(); } } } public void setGatewayRequestTimeline(RequestTimeline transportRequestTimeline) { this.gatewayRequestTimeline = transportRequestTimeline; } public RequestTimeline getGatewayRequestTimeline() { return this.gatewayRequestTimeline; } public String recordAddressResolutionStart( URI targetEndpoint, boolean forceRefresh, boolean forceCollectionRoutingMapRefresh) { String identifier = Utils .randomUUID() .toString(); AddressResolutionStatistics resolutionStatistics = new AddressResolutionStatistics(); resolutionStatistics.startTimeUTC = Instant.now(); resolutionStatistics.endTimeUTC = null; resolutionStatistics.targetEndpoint = targetEndpoint == null ? "<NULL>" : targetEndpoint.toString(); resolutionStatistics.forceRefresh = forceRefresh; resolutionStatistics.forceCollectionRoutingMapRefresh = forceCollectionRoutingMapRefresh; synchronized (this) { this.addressResolutionStatistics.put(identifier, resolutionStatistics); } return identifier; } public void recordAddressResolutionEnd(String identifier, String errorMessage) { if (StringUtils.isEmpty(identifier)) { return; } Instant responseTime = Instant.now(); synchronized (this) { if (!this.addressResolutionStatistics.containsKey(identifier)) { throw new IllegalArgumentException("Identifier " + identifier + " does not exist. Please call start " + "before calling end"); } if (responseTime.isAfter(this.requestEndTimeUTC)) { this.requestEndTimeUTC = responseTime; } AddressResolutionStatistics resolutionStatistics = this.addressResolutionStatistics.get(identifier); resolutionStatistics.endTimeUTC = responseTime; resolutionStatistics.errorMessage = errorMessage; resolutionStatistics.inflightRequest = false; } } public List<URI> getContactedReplicas() { return contactedReplicas; } public void setContactedReplicas(List<URI> contactedReplicas) { this.contactedReplicas = Collections.synchronizedList(contactedReplicas); } public Set<URI> getFailedReplicas() { return failedReplicas; } public void setFailedReplicas(Set<URI> failedReplicas) { this.failedReplicas = Collections.synchronizedSet(failedReplicas); } public Set<String> getContactedRegionNames() { return regionsContacted; } public void setRegionsContacted(Set<String> regionsContacted) { this.regionsContacted = Collections.synchronizedSet(regionsContacted); } public Set<URI> getLocationEndpointsContacted() { return locationEndpointsContacted; } public void setLocationEndpointsContacted(Set<URI> locationEndpointsContacted) { this.locationEndpointsContacted = locationEndpointsContacted; } public MetadataDiagnosticsContext getMetadataDiagnosticsContext(){ return this.metadataDiagnosticsContext; } public SerializationDiagnosticsContext getSerializationDiagnosticsContext() { return this.serializationDiagnosticsContext; } public void recordRetryContextEndTime() { this.retryContext.updateEndTime(); } public RetryContext getRetryContext() { return retryContext; } public List<StoreResponseStatistics> getResponseStatisticsList() { return responseStatisticsList; } public List<StoreResponseStatistics> getSupplementalResponseStatisticsList() { return supplementalResponseStatisticsList; } public Map<String, AddressResolutionStatistics> getAddressResolutionStatistics() { return addressResolutionStatistics; } public GatewayStatistics getGatewayStatistics() { return gatewayStatistics; } public static class StoreResponseStatistics { @JsonSerialize(using = StoreResult.StoreResultSerializer.class) private StoreResult storeResult; @JsonSerialize(using = DiagnosticsInstantSerializer.class) private Instant requestResponseTimeUTC; @JsonSerialize private ResourceType requestResourceType; @JsonSerialize private OperationType requestOperationType; public StoreResult getStoreResult() { return storeResult; } public Instant getRequestResponseTimeUTC() { return requestResponseTimeUTC; } public ResourceType getRequestResourceType() { return requestResourceType; } public OperationType getRequestOperationType() { return requestOperationType; } } public static class SystemInformation { private String usedMemory; private String availableMemory; private String systemCpuLoad; private int availableProcessors; public String getUsedMemory() { return usedMemory; } public String getAvailableMemory() { return availableMemory; } public String getSystemCpuLoad() { return systemCpuLoad; } public int getAvailableProcessors() { return availableProcessors; } } public static class ClientSideRequestStatisticsSerializer extends StdSerializer<ClientSideRequestStatistics> { private static final long serialVersionUID = -2746532297176812860L; ClientSideRequestStatisticsSerializer() { super(ClientSideRequestStatistics.class); } @Override public void serialize( ClientSideRequestStatistics statistics, JsonGenerator generator, SerializerProvider provider) throws IOException { generator.writeStartObject(); long requestLatency = statistics .getDuration() .toMillis(); generator.writeStringField("userAgent", Utils.getUserAgent()); generator.writeStringField("activityId", statistics.activityId); generator.writeNumberField("requestLatencyInMs", requestLatency); generator.writeStringField("requestStartTimeUTC", DiagnosticsInstantSerializer.fromInstant(statistics.requestStartTimeUTC)); generator.writeStringField("requestEndTimeUTC", DiagnosticsInstantSerializer.fromInstant(statistics.requestEndTimeUTC)); generator.writeObjectField("responseStatisticsList", statistics.responseStatisticsList); generator.writeObjectField("supplementalResponseStatisticsList", getCappedSupplementalResponseStatisticsList(statistics.supplementalResponseStatisticsList)); generator.writeObjectField("addressResolutionStatistics", statistics.addressResolutionStatistics); generator.writeObjectField("regionsContacted", statistics.regionsContacted); generator.writeObjectField("retryContext", statistics.retryContext); generator.writeObjectField("metadataDiagnosticsContext", statistics.getMetadataDiagnosticsContext()); generator.writeObjectField("serializationDiagnosticsContext", statistics.getSerializationDiagnosticsContext()); generator.writeObjectField("gatewayStatistics", statistics.gatewayStatistics); try { SystemInformation systemInformation = fetchSystemInformation(); generator.writeObjectField("systemInformation", systemInformation); } catch (Exception e) { } generator.writeObjectField("clientCfgs", statistics.diagnosticsClientConfig); generator.writeEndObject(); } } public static List<StoreResponseStatistics> getCappedSupplementalResponseStatisticsList(List<StoreResponseStatistics> supplementalResponseStatisticsList) { int supplementalResponseStatisticsListCount = supplementalResponseStatisticsList.size(); int initialIndex = Math.max(supplementalResponseStatisticsListCount - MAX_SUPPLEMENTAL_REQUESTS_FOR_TO_STRING, 0); if (initialIndex != 0) { List<StoreResponseStatistics> subList = supplementalResponseStatisticsList .subList(initialIndex, supplementalResponseStatisticsListCount); return subList; } return supplementalResponseStatisticsList; } public static class AddressResolutionStatistics { @JsonSerialize(using = DiagnosticsInstantSerializer.class) private Instant startTimeUTC; @JsonSerialize(using = DiagnosticsInstantSerializer.class) private Instant endTimeUTC; @JsonSerialize private String targetEndpoint; @JsonSerialize private String errorMessage; @JsonSerialize private boolean forceRefresh; @JsonSerialize private boolean forceCollectionRoutingMapRefresh; @JsonSerialize private boolean inflightRequest = true; public Instant getStartTimeUTC() { return startTimeUTC; } public Instant getEndTimeUTC() { return endTimeUTC; } public String getTargetEndpoint() { return targetEndpoint; } public String getErrorMessage() { return errorMessage; } public boolean isInflightRequest() { return inflightRequest; } public boolean isForceRefresh() { return forceRefresh; } public boolean isForceCollectionRoutingMapRefresh() { return forceCollectionRoutingMapRefresh; } } public static class GatewayStatistics { private String sessionToken; private OperationType operationType; private ResourceType resourceType; private int statusCode; private int subStatusCode; private String requestCharge; private RequestTimeline requestTimeline; private String partitionKeyRangeId; public String getSessionToken() { return sessionToken; } public OperationType getOperationType() { return operationType; } public int getStatusCode() { return statusCode; } public int getSubStatusCode() { return subStatusCode; } public String getRequestCharge() { return requestCharge; } public RequestTimeline getRequestTimeline() { return requestTimeline; } public ResourceType getResourceType() { return resourceType; } public String getPartitionKeyRangeId() { return partitionKeyRangeId; } } public static SystemInformation fetchSystemInformation() { SystemInformation systemInformation = new SystemInformation(); Runtime runtime = Runtime.getRuntime(); long totalMemory = runtime.totalMemory() / 1024; long freeMemory = runtime.freeMemory() / 1024; long maxMemory = runtime.maxMemory() / 1024; systemInformation.usedMemory = totalMemory - freeMemory + " KB"; systemInformation.availableMemory = (maxMemory - (totalMemory - freeMemory)) + " KB"; systemInformation.availableProcessors = runtime.availableProcessors(); systemInformation.systemCpuLoad = CpuMemoryMonitor .getCpuLoad() .toString(); return systemInformation; } }
is `.` actually valid? Or was there just a bug in the regex?
private static boolean isValidTenantCharacter(char c) { return (c >= 'a' && c <= 'z') || (c >= 'A' && c <= 'Z') || (c >= '0' && c <= '9') || (c == '.') || (c == '-'); }
return (c >= 'a' && c <= 'z') || (c >= 'A' && c <= 'Z') || (c >= '0' && c <= '9') || (c == '.') || (c == '-');
private static boolean isValidTenantCharacter(char c) { return (c >= 'a' && c <= 'z') || (c >= 'A' && c <= 'Z') || (c >= '0' && c <= '9') || (c == '.') || (c == '-'); }
class ValidationUtil { public static void validate(String className, Map<String, Object> parameters, ClientLogger logger) { List<String> missing = new ArrayList<>(); for (Map.Entry<String, Object> entry : parameters.entrySet()) { if (entry.getValue() == null) { missing.add(entry.getKey()); } } if (missing.size() > 0) { throw logger.logExceptionAsWarning(new IllegalArgumentException("Must provide non-null values for " + String.join(", ", missing) + " properties in " + className)); } } public static void validateAuthHost(String authHost, ClientLogger logger) { try { new URI(authHost); } catch (URISyntaxException e) { throw logger.logExceptionAsError( new IllegalArgumentException("Must provide a valid URI for authority host.", e)); } if (!authHost.startsWith("https")) { throw logger.logExceptionAsError( new IllegalArgumentException("Authority host must use https scheme.")); } } public static void validateTenantIdCharacterRange(String id, ClientLogger logger) { if (id != null) { for (int i = 0; i < id.length(); i++) { if (!isValidTenantCharacter(id.charAt(i))) { throw logger.logExceptionAsError( new IllegalArgumentException( "Invalid tenant id provided. You can locate your tenant id by following the instructions" + " listed here: https: } } } } public static void validateInteractiveBrowserRedirectUrlSetup(Integer port, String redirectUrl, ClientLogger logger) { if (port != null && redirectUrl != null) { throw logger.logExceptionAsError( new IllegalArgumentException("Port and Redirect URL cannot be configured at the same time. " + "Port is deprecated now. Use the redirectUrl setter to specify" + " the redirect URL on the builder.")); } } }
class ValidationUtil { public static void validate(String className, Map<String, Object> parameters, ClientLogger logger) { List<String> missing = new ArrayList<>(); for (Map.Entry<String, Object> entry : parameters.entrySet()) { if (entry.getValue() == null) { missing.add(entry.getKey()); } } if (missing.size() > 0) { throw logger.logExceptionAsWarning(new IllegalArgumentException("Must provide non-null values for " + String.join(", ", missing) + " properties in " + className)); } } public static void validateAuthHost(String authHost, ClientLogger logger) { try { new URI(authHost); } catch (URISyntaxException e) { throw logger.logExceptionAsError( new IllegalArgumentException("Must provide a valid URI for authority host.", e)); } if (!authHost.startsWith("https")) { throw logger.logExceptionAsError( new IllegalArgumentException("Authority host must use https scheme.")); } } public static void validateTenantIdCharacterRange(String id, ClientLogger logger) { if (id != null) { for (int i = 0; i < id.length(); i++) { if (!isValidTenantCharacter(id.charAt(i))) { throw logger.logExceptionAsError( new IllegalArgumentException( "Invalid tenant id provided. You can locate your tenant id by following the instructions" + " listed here: https: } } } } public static void validateInteractiveBrowserRedirectUrlSetup(Integer port, String redirectUrl, ClientLogger logger) { if (port != null && redirectUrl != null) { throw logger.logExceptionAsError( new IllegalArgumentException("Port and Redirect URL cannot be configured at the same time. " + "Port is deprecated now. Use the redirectUrl setter to specify" + " the redirect URL on the builder.")); } } }
Does refresh needed? It is an extra GET?
public Mono<Void> afterPostRunAsync(boolean isGroupFaulted) { clearCache(); if (isGroupFaulted) { return Mono.empty(); } return refreshAsync().then(); }
return refreshAsync().then();
public Mono<Void> afterPostRunAsync(boolean isGroupFaulted) { return Mono .just(true) .map( ignored -> { clearCache(); return ignored; }) .then(); }
class SpringServiceImpl extends GroupableResourceImpl<SpringService, ServiceResourceInner, SpringServiceImpl, AppPlatformManager> implements SpringService, SpringService.Definition, SpringService.Update { private final SpringServiceCertificatesImpl certificates = new SpringServiceCertificatesImpl(this); private final SpringAppsImpl apps = new SpringAppsImpl(this); private final SpringConfigurationServicesImpl configurationServices = new SpringConfigurationServicesImpl(this); private FunctionalTaskItem configServerTask = null; private FunctionalTaskItem monitoringSettingTask = null; private ServiceResourceInner patchToUpdate = new ServiceResourceInner(); private boolean updated; private boolean updateConfigurationServiceTask = true; private final Map<String, ConfigurationServiceGitRepository> gitRepositoryMap = new ConcurrentHashMap<>(); SpringServiceImpl(String name, ServiceResourceInner innerObject, AppPlatformManager manager) { super(name, innerObject, manager); } @Override public SpringServiceImpl update() { return super.update(); } @Override public Sku sku() { return innerModel().sku(); } @Override public SpringApps apps() { return apps; } @Override public SpringServiceCertificates certificates() { return certificates; } @Override public MonitoringSettingProperties getMonitoringSetting() { return getMonitoringSettingAsync().block(); } @Override public Mono<MonitoringSettingProperties> getMonitoringSettingAsync() { return manager().serviceClient().getMonitoringSettings().getAsync(resourceGroupName(), name()) .map(MonitoringSettingResourceInner::properties); } @Override public ConfigServerProperties getServerProperties() { return getServerPropertiesAsync().block(); } @Override public Mono<ConfigServerProperties> getServerPropertiesAsync() { return manager().serviceClient().getConfigServers().getAsync(resourceGroupName(), name()) .map(ConfigServerResourceInner::properties); } @Override public TestKeys listTestKeys() { return listTestKeysAsync().block(); } @Override public Mono<TestKeys> listTestKeysAsync() { return manager().serviceClient().getServices().listTestKeysAsync(resourceGroupName(), name()); } @Override public TestKeys regenerateTestKeys(TestKeyType keyType) { return regenerateTestKeysAsync(keyType).block(); } @Override public Mono<TestKeys> regenerateTestKeysAsync(TestKeyType keyType) { return manager().serviceClient().getServices().regenerateTestKeyAsync(resourceGroupName(), name(), new RegenerateTestKeyRequestPayload().withKeyType(keyType)); } @Override public void disableTestEndpoint() { disableTestEndpointAsync().block(); } @Override public Mono<Void> disableTestEndpointAsync() { return manager().serviceClient().getServices().disableTestEndpointAsync(resourceGroupName(), name()); } @Override public TestKeys enableTestEndpoint() { return enableTestEndpointAsync().block(); } @Override public Mono<TestKeys> enableTestEndpointAsync() { return manager().serviceClient().getServices().enableTestEndpointAsync(resourceGroupName(), name()); } @Override public SpringConfigurationService getDefaultConfigurationService() { return manager().serviceClient().getConfigurationServices().getAsync(resourceGroupName(), name(), Constants.DEFAULT_TANZU_COMPONENT_NAME) .switchIfEmpty(Mono.empty()) .map(inner -> new SpringConfigurationServiceImpl(inner.name(), this, inner)) .block(); } @Override public SpringConfigurationServices configurationServices() { return this.configurationServices; } @Override public SpringServiceImpl withSku(String skuName) { return withSku(new Sku().withName(skuName)); } @Override public SpringServiceImpl withSku(SkuName skuName) { return withSku(skuName.toString()); } @Override public SpringServiceImpl withSku(String skuName, int capacity) { return withSku(new Sku().withName(skuName).withCapacity(capacity)); } @Override public SpringServiceImpl withSku(Sku sku) { innerModel().withSku(sku); if (isInUpdateMode()) { patchToUpdate.withSku(sku); updated = true; } return this; } @Override public SpringServiceImpl withEnterpriseTierSku() { withSku(SkuName.E0); return this; } @Override public SpringServiceImpl withTracing(String appInsightInstrumentationKey) { monitoringSettingTask = context -> manager().serviceClient().getMonitoringSettings() .updatePatchAsync(resourceGroupName(), name(), new MonitoringSettingResourceInner().withProperties( new MonitoringSettingProperties() .withAppInsightsInstrumentationKey(appInsightInstrumentationKey) .withTraceEnabled(true))) .then(context.voidMono()); return this; } @Override public SpringServiceImpl withoutTracing() { monitoringSettingTask = context -> manager().serviceClient().getMonitoringSettings() .updatePatchAsync( resourceGroupName(), name(), new MonitoringSettingResourceInner().withProperties( new MonitoringSettingProperties().withTraceEnabled(false) )) .then(context.voidMono()); return this; } @Override public SpringServiceImpl withGitUri(String uri) { configServerTask = context -> manager().serviceClient().getConfigServers() .updatePatchAsync(resourceGroupName(), name(), new ConfigServerResourceInner().withProperties( new ConfigServerProperties() .withConfigServer(new ConfigServerSettings().withGitProperty( new ConfigServerGitProperty().withUri(uri) )) )) .then(context.voidMono()); return this; } @Override public SpringServiceImpl withGitUriAndCredential(String uri, String username, String password) { configServerTask = context -> manager().serviceClient().getConfigServers() .updatePatchAsync(resourceGroupName(), name(), new ConfigServerResourceInner().withProperties( new ConfigServerProperties() .withConfigServer(new ConfigServerSettings().withGitProperty( new ConfigServerGitProperty() .withUri(uri) .withUsername(username) .withPassword(password) )) )) .then(context.voidMono()); return this; } @Override public SpringServiceImpl withGitConfig(ConfigServerGitProperty gitConfig) { configServerTask = context -> manager().serviceClient().getConfigServers() .updatePatchAsync(resourceGroupName(), name(), new ConfigServerResourceInner().withProperties( new ConfigServerProperties() .withConfigServer(new ConfigServerSettings().withGitProperty(gitConfig)) )) .then(context.voidMono()); return this; } @Override public SpringServiceImpl withoutGitConfig() { if (isEnterpriseTier()) { return withGitConfig((ConfigurationServiceGitProperty) null); } else { return withGitConfig((ConfigServerGitProperty) null); } } @Override public void beforeGroupCreateOrUpdate() { if (configServerTask != null) { this.addPostRunDependent(configServerTask); } if (monitoringSettingTask != null) { this.addPostRunDependent(monitoringSettingTask); } if (isEnterpriseTier()) { if (updateConfigurationServiceTask) { prepareCreateOrUpdateConfigurationService(); } updateConfigurationServiceTask = false; } configServerTask = null; monitoringSettingTask = null; } @Override public Mono<SpringService> createResourceAsync() { Mono<ServiceResourceInner> createOrUpdate; if (isInCreateMode()) { createOrUpdate = manager().serviceClient().getServices() .createOrUpdateAsync(resourceGroupName(), name(), innerModel()); if (isEnterpriseTier()) { createOrUpdate = createOrUpdate .flatMap(inner -> manager().serviceClient().getBuildServiceAgentPools().updatePutAsync( resourceGroupName(), name(), Constants.DEFAULT_TANZU_COMPONENT_NAME, Constants.DEFAULT_TANZU_COMPONENT_NAME, new BuildServiceAgentPoolResourceInner() .withProperties( new BuildServiceAgentPoolProperties() .withPoolSize( new BuildServiceAgentPoolSizeProperties() .withName("S1"))) ).then(Mono.just(inner))); } } else if (updated) { createOrUpdate = manager().serviceClient().getServices().updateAsync( resourceGroupName(), name(), patchToUpdate); patchToUpdate = new ServiceResourceInner(); updated = false; } else { return Mono.just(this); } return createOrUpdate .map(inner -> { this.setInner(inner); return this; }); } @Override @Override protected Mono<ServiceResourceInner> getInnerAsync() { return manager().serviceClient().getServices().getByResourceGroupAsync(resourceGroupName(), name()) .map(inner -> { clearCache(); return inner; }); } @Override public SpringServiceImpl withCertificate(String name, String keyVaultUri, String certNameInKeyVault) { certificates.prepareCreateOrUpdate( name, new KeyVaultCertificateProperties().withVaultUri(keyVaultUri).withKeyVaultCertName(certNameInKeyVault) ); return this; } @Override public SpringServiceImpl withCertificate(String name, String keyVaultUri, String certNameInKeyVault, String certVersion) { certificates.prepareCreateOrUpdate( name, new KeyVaultCertificateProperties() .withVaultUri(keyVaultUri) .withKeyVaultCertName(certNameInKeyVault) .withCertVersion(certVersion) ); return this; } @Override public SpringServiceImpl withoutCertificate(String name) { certificates.prepareDelete(name); return this; } @Override public SpringServiceImpl withGitConfig(String uri, String branch, List<String> filePatterns) { return withGitConfigRepository(Constants.DEFAULT_TANZU_COMPONENT_NAME, uri, branch, filePatterns); } @Override public SpringServiceImpl withGitConfigRepository(String name, String uri, String branch, List<String> filePatterns) { if (CoreUtils.isNullOrEmpty(name)) { return this; } this.gitRepositoryMap.computeIfAbsent(name, key -> new ConfigurationServiceGitRepository() .withName(name) .withUri(uri) .withPatterns(filePatterns) .withLabel(branch) ); updateConfigurationServiceTask = true; return this; } @Override public SpringServiceImpl withGitConfig(ConfigurationServiceGitProperty gitConfig) { gitRepositoryMap.clear(); if (gitConfig != null && CoreUtils.isNullOrEmpty(gitConfig.repositories())) { for (ConfigurationServiceGitRepository repository : gitConfig.repositories()) { this.gitRepositoryMap.put(repository.name(), repository); } } updateConfigurationServiceTask = true; return this; } private void prepareCreateOrUpdateConfigurationService() { List<ConfigurationServiceGitRepository> repositories = new ArrayList<>(this.gitRepositoryMap.values()); this.configurationServices.prepareCreateOrUpdate(new ConfigurationServiceGitProperty().withRepositories(repositories)); } private boolean isInUpdateMode() { return !isInCreateMode(); } boolean isEnterpriseTier() { return innerModel().sku() != null && SkuName.E0.toString().equals(innerModel().sku().name()); } private void clearCache() { this.gitRepositoryMap.clear(); this.configurationServices.clear(); } }
class SpringServiceImpl extends GroupableResourceImpl<SpringService, ServiceResourceInner, SpringServiceImpl, AppPlatformManager> implements SpringService, SpringService.Definition, SpringService.Update { private final SpringServiceCertificatesImpl certificates = new SpringServiceCertificatesImpl(this); private final SpringAppsImpl apps = new SpringAppsImpl(this); private final SpringConfigurationServicesImpl configurationServices = new SpringConfigurationServicesImpl(this); private FunctionalTaskItem configServerTask = null; private FunctionalTaskItem monitoringSettingTask = null; private ServiceResourceInner patchToUpdate = new ServiceResourceInner(); private boolean updated; private final ConfigurationServiceConfig configurationServiceConfig = new ConfigurationServiceConfig(); SpringServiceImpl(String name, ServiceResourceInner innerObject, AppPlatformManager manager) { super(name, innerObject, manager); } @Override public SpringServiceImpl update() { return super.update(); } @Override public Sku sku() { return innerModel().sku(); } @Override public SpringApps apps() { return apps; } @Override public SpringServiceCertificates certificates() { return certificates; } @Override public MonitoringSettingProperties getMonitoringSetting() { return getMonitoringSettingAsync().block(); } @Override public Mono<MonitoringSettingProperties> getMonitoringSettingAsync() { return manager().serviceClient().getMonitoringSettings().getAsync(resourceGroupName(), name()) .map(MonitoringSettingResourceInner::properties); } @Override public ConfigServerProperties getServerProperties() { return getServerPropertiesAsync().block(); } @Override public Mono<ConfigServerProperties> getServerPropertiesAsync() { return manager().serviceClient().getConfigServers().getAsync(resourceGroupName(), name()) .map(ConfigServerResourceInner::properties); } @Override public TestKeys listTestKeys() { return listTestKeysAsync().block(); } @Override public Mono<TestKeys> listTestKeysAsync() { return manager().serviceClient().getServices().listTestKeysAsync(resourceGroupName(), name()); } @Override public TestKeys regenerateTestKeys(TestKeyType keyType) { return regenerateTestKeysAsync(keyType).block(); } @Override public Mono<TestKeys> regenerateTestKeysAsync(TestKeyType keyType) { return manager().serviceClient().getServices().regenerateTestKeyAsync(resourceGroupName(), name(), new RegenerateTestKeyRequestPayload().withKeyType(keyType)); } @Override public void disableTestEndpoint() { disableTestEndpointAsync().block(); } @Override public Mono<Void> disableTestEndpointAsync() { return manager().serviceClient().getServices().disableTestEndpointAsync(resourceGroupName(), name()); } @Override public TestKeys enableTestEndpoint() { return enableTestEndpointAsync().block(); } @Override public Mono<TestKeys> enableTestEndpointAsync() { return manager().serviceClient().getServices().enableTestEndpointAsync(resourceGroupName(), name()); } @Override public SpringConfigurationService getDefaultConfigurationService() { return manager().serviceClient().getConfigurationServices().getAsync(resourceGroupName(), name(), Constants.DEFAULT_TANZU_COMPONENT_NAME) .switchIfEmpty(Mono.empty()) .map(inner -> new SpringConfigurationServiceImpl(inner.name(), this, inner)) .block(); } @Override public SpringServiceImpl withSku(String skuName) { return withSku(new Sku().withName(skuName)); } @Override public SpringServiceImpl withSku(SkuName skuName) { return withSku(skuName.toString()); } @Override public SpringServiceImpl withSku(String skuName, int capacity) { return withSku(new Sku().withName(skuName).withCapacity(capacity)); } @Override public SpringServiceImpl withSku(Sku sku) { innerModel().withSku(sku); if (isInUpdateMode()) { patchToUpdate.withSku(sku); updated = true; } return this; } @Override public SpringServiceImpl withEnterpriseTierSku() { withSku(SkuName.E0); return this; } @Override public SpringServiceImpl withTracing(String appInsightInstrumentationKey) { monitoringSettingTask = context -> manager().serviceClient().getMonitoringSettings() .updatePatchAsync(resourceGroupName(), name(), new MonitoringSettingResourceInner().withProperties( new MonitoringSettingProperties() .withAppInsightsInstrumentationKey(appInsightInstrumentationKey) .withTraceEnabled(true))) .then(context.voidMono()); return this; } @Override public SpringServiceImpl withoutTracing() { monitoringSettingTask = context -> manager().serviceClient().getMonitoringSettings() .updatePatchAsync( resourceGroupName(), name(), new MonitoringSettingResourceInner().withProperties( new MonitoringSettingProperties().withTraceEnabled(false) )) .then(context.voidMono()); return this; } @Override public SpringServiceImpl withGitUri(String uri) { configServerTask = context -> manager().serviceClient().getConfigServers() .updatePatchAsync(resourceGroupName(), name(), new ConfigServerResourceInner().withProperties( new ConfigServerProperties() .withConfigServer(new ConfigServerSettings().withGitProperty( new ConfigServerGitProperty().withUri(uri) )) )) .then(context.voidMono()); return this; } @Override public SpringServiceImpl withGitUriAndCredential(String uri, String username, String password) { configServerTask = context -> manager().serviceClient().getConfigServers() .updatePatchAsync(resourceGroupName(), name(), new ConfigServerResourceInner().withProperties( new ConfigServerProperties() .withConfigServer(new ConfigServerSettings().withGitProperty( new ConfigServerGitProperty() .withUri(uri) .withUsername(username) .withPassword(password) )) )) .then(context.voidMono()); return this; } @Override public SpringServiceImpl withGitConfig(ConfigServerGitProperty gitConfig) { configServerTask = context -> manager().serviceClient().getConfigServers() .updatePatchAsync(resourceGroupName(), name(), new ConfigServerResourceInner().withProperties( new ConfigServerProperties() .withConfigServer(new ConfigServerSettings().withGitProperty(gitConfig)) )) .then(context.voidMono()); return this; } @Override public SpringServiceImpl withoutGitConfig() { return withGitConfig(null); } @Override public void beforeGroupCreateOrUpdate() { if (configServerTask != null) { this.addPostRunDependent(configServerTask); } if (monitoringSettingTask != null) { this.addPostRunDependent(monitoringSettingTask); } if (isEnterpriseTier()) { if (isInCreateMode() || configurationServiceConfig.needUpdate()) { prepareCreateOrUpdateConfigurationService(); configurationServiceConfig.clearUpdate(); } } configServerTask = null; monitoringSettingTask = null; } @Override public Mono<SpringService> createResourceAsync() { Mono<ServiceResourceInner> createOrUpdate; if (isInCreateMode()) { createOrUpdate = manager().serviceClient().getServices() .createOrUpdateAsync(resourceGroupName(), name(), innerModel()); if (isEnterpriseTier()) { createOrUpdate = createOrUpdate .flatMap(inner -> manager().serviceClient().getBuildServiceAgentPools().updatePutAsync( resourceGroupName(), name(), Constants.DEFAULT_TANZU_COMPONENT_NAME, Constants.DEFAULT_TANZU_COMPONENT_NAME, new BuildServiceAgentPoolResourceInner() .withProperties( new BuildServiceAgentPoolProperties() .withPoolSize( new BuildServiceAgentPoolSizeProperties() .withName("S1"))) ).then(Mono.just(inner))); } } else if (updated) { createOrUpdate = manager().serviceClient().getServices().updateAsync( resourceGroupName(), name(), patchToUpdate); patchToUpdate = new ServiceResourceInner(); updated = false; } else { return Mono.just(this); } return createOrUpdate .map(inner -> { this.setInner(inner); return this; }); } @Override @Override protected Mono<ServiceResourceInner> getInnerAsync() { return manager().serviceClient().getServices().getByResourceGroupAsync(resourceGroupName(), name()) .map(inner -> { clearCache(); return inner; }); } @Override public SpringServiceImpl withCertificate(String name, String keyVaultUri, String certNameInKeyVault) { certificates.prepareCreateOrUpdate( name, new KeyVaultCertificateProperties().withVaultUri(keyVaultUri).withKeyVaultCertName(certNameInKeyVault) ); return this; } @Override public SpringServiceImpl withCertificate(String name, String keyVaultUri, String certNameInKeyVault, String certVersion) { certificates.prepareCreateOrUpdate( name, new KeyVaultCertificateProperties() .withVaultUri(keyVaultUri) .withKeyVaultCertName(certNameInKeyVault) .withCertVersion(certVersion) ); return this; } @Override public SpringServiceImpl withoutCertificate(String name) { certificates.prepareDelete(name); return this; } @Override public SpringServiceImpl withDefaultGitRepository(String uri, String branch, List<String> filePatterns) { return withGitRepository(Constants.DEFAULT_TANZU_COMPONENT_NAME, uri, branch, filePatterns); } @Override public SpringServiceImpl withGitRepository(String name, String uri, String branch, List<String> filePatterns) { if (CoreUtils.isNullOrEmpty(name)) { return this; } this.configurationServiceConfig.addRepository( new ConfigurationServiceGitRepository() .withName(name) .withUri(uri) .withPatterns(filePatterns) .withLabel(branch)); return this; } @Override public SpringServiceImpl withGitRepositoryConfig(ConfigurationServiceGitProperty gitConfig) { this.configurationServiceConfig.clearRepositories(); if (gitConfig != null && !CoreUtils.isNullOrEmpty(gitConfig.repositories())) { for (ConfigurationServiceGitRepository repository : gitConfig.repositories()) { this.configurationServiceConfig.addRepository(repository); } } return this; } @Override public SpringServiceImpl withoutGitRepository(String name) { this.configurationServiceConfig.removeRepository(name); return this; } @Override public SpringServiceImpl withoutGitRepositories() { this.configurationServiceConfig.clearRepositories(); return this; } private void prepareCreateOrUpdateConfigurationService() { List<ConfigurationServiceGitRepository> repositories = this.configurationServiceConfig.mergeRepositories(); this.configurationServices.prepareCreateOrUpdate(new ConfigurationServiceGitProperty().withRepositories(repositories)); } private boolean isInUpdateMode() { return !isInCreateMode(); } boolean isEnterpriseTier() { return innerModel().sku() != null && SkuName.E0.toString().equals(innerModel().sku().name()); } private void clearCache() { this.configurationServices.clear(); this.configurationServiceConfig.reset(); } private class ConfigurationServiceConfig { private final Map<String, ConfigurationServiceGitRepository> gitRepositoryMap = new ConcurrentHashMap<>(); private final Set<String> repositoriesToDelete = new HashSet<>(); private boolean update; private boolean clearRepositories; boolean needUpdate() { return update; } public void clearUpdate() { this.update = false; } void reset() { this.gitRepositoryMap.clear(); this.update = false; this.repositoriesToDelete.clear(); this.clearRepositories = false; } public void addRepository(ConfigurationServiceGitRepository repository) { this.gitRepositoryMap.putIfAbsent(repository.name(), repository); this.update = true; } public void clearRepositories() { this.gitRepositoryMap.clear(); this.clearRepositories = true; this.update = true; } public void removeRepository(String name) { this.repositoriesToDelete.add(name); this.update = true; } public List<ConfigurationServiceGitRepository> mergeRepositories() { if (this.clearRepositories) { return new ArrayList<>(this.gitRepositoryMap.values()); } else { Map<String, ConfigurationServiceGitRepository> existingGitRepositories = new HashMap<>(); if (isInUpdateMode()) { SpringConfigurationService configurationService = getDefaultConfigurationService(); if (configurationService != null) { List<ConfigurationServiceGitRepository> repositoryList = configurationService.innerModel().properties().settings() == null ? Collections.emptyList() : configurationService.innerModel().properties().settings().gitProperty().repositories(); if (repositoryList != null) { repositoryList.forEach(repository -> existingGitRepositories.put(repository.name(), repository)); } } } existingGitRepositories.putAll(gitRepositoryMap); for (String repositoryToDelete : repositoriesToDelete) { existingGitRepositories.remove(repositoryToDelete); } return new ArrayList<>(existingGitRepositories.values()); } } } }
Does not support Enterprise Tier deployment due to the amount of code, will support it in next individual PR.
public SpringAppDeploymentImpl withJarFile(File jar) { if (service().isEnterpriseTier()) { throw new UnsupportedOperationException("Enterprise tier artifact deployment not supported yet."); } else { ensureSource(UserSourceType.JAR); this.addDependency( context -> parent().getResourceUploadUrlAsync() .flatMap(option -> { UploadedUserSourceInfo uploadedUserSourceInfo = (UploadedUserSourceInfo) innerModel().properties().source(); uploadedUserSourceInfo.withRelativePath(option.relativePath()); return uploadToStorage(jar, option) .then(context.voidMono()); }) ); } return this; }
throw new UnsupportedOperationException("Enterprise tier artifact deployment not supported yet.");
public SpringAppDeploymentImpl withJarFile(File jar) { if (service().isEnterpriseTier()) { throw new UnsupportedOperationException("Enterprise tier artifact deployment not supported yet."); } else { ensureSource(UserSourceType.JAR); this.addDependency( context -> parent().getResourceUploadUrlAsync() .flatMap(option -> { UploadedUserSourceInfo uploadedUserSourceInfo = (UploadedUserSourceInfo) innerModel().properties().source(); uploadedUserSourceInfo.withRelativePath(option.relativePath()); return uploadToStorage(jar, option) .then(context.voidMono()); }) ); } return this; }
class SpringAppDeploymentImpl extends ExternalChildResourceImpl<SpringAppDeployment, DeploymentResourceInner, SpringAppImpl, SpringApp> implements SpringAppDeployment, SpringAppDeployment.Definition<SpringAppImpl, SpringAppDeploymentImpl>, SpringAppDeployment.Update { SpringAppDeploymentImpl(String name, SpringAppImpl parent, DeploymentResourceInner innerObject) { super(name, parent, innerObject); } @Override public String appName() { if (innerModel().properties() == null) { return null; } return innerModel().name(); } @Override public DeploymentSettings settings() { if (innerModel().properties() == null) { return null; } return innerModel().properties().deploymentSettings(); } @Override public DeploymentResourceStatus status() { if (innerModel().properties() == null) { return null; } return innerModel().properties().status(); } @Override public boolean isActive() { if (innerModel().properties() == null) { return false; } return innerModel().properties().active(); } @Override public List<DeploymentInstance> instances() { if (innerModel().properties() == null) { return null; } return innerModel().properties().instances(); } @Override public void start() { startAsync().block(); } @Override public Mono<Void> startAsync() { return manager().serviceClient().getDeployments().startAsync( parent().parent().resourceGroupName(), parent().parent().name(), parent().name(), name() ); } @Override public void stop() { stopAsync().block(); } @Override public Mono<Void> stopAsync() { return manager().serviceClient().getDeployments().stopAsync( parent().parent().resourceGroupName(), parent().parent().name(), parent().name(), name() ); } @Override public void restart() { restartAsync().block(); } @Override public Mono<Void> restartAsync() { return manager().serviceClient().getDeployments().restartAsync( parent().parent().resourceGroupName(), parent().parent().name(), parent().name(), name() ); } @Override public String getLogFileUrl() { return getLogFileUrlAsync().block(); } @Override public Mono<String> getLogFileUrlAsync() { return manager().serviceClient().getDeployments().getLogFileUrlAsync( parent().parent().resourceGroupName(), parent().parent().name(), parent().name(), name() ) .map(LogFileUrlResponseInner::url); } private void ensureDeploySettings() { if (innerModel().properties() == null) { innerModel().withProperties(new DeploymentResourceProperties()); } if (innerModel().properties().deploymentSettings() == null) { innerModel().properties().withDeploymentSettings(new DeploymentSettings()); } } private void ensureSource() { ensureSource(null); } private void ensureSource(UserSourceType type) { if (innerModel().properties() == null) { innerModel().withProperties(new DeploymentResourceProperties()); } if (innerModel().properties().source() == null) { if (type == UserSourceType.JAR) { innerModel().properties().withSource(new JarUploadedUserSourceInfo()); } else if (type == UserSourceType.SOURCE) { innerModel().properties().withSource(new SourceUploadedUserSourceInfo()); } else if (type == UserSourceType.NET_CORE_ZIP) { innerModel().properties().withSource(new NetCoreZipUploadedUserSourceInfo()); } else if (type == UserSourceType.BUILD_RESULT) { innerModel().properties().withSource(new BuildResultUserSourceInfo()); } else { innerModel().properties().withSource(new UserSourceInfo()); } } } private ShareFileAsyncClient createShareFileAsyncClient(ResourceUploadDefinition option) { return new ShareFileClientBuilder() .endpoint(option.uploadUrl()) .httpClient(manager().httpPipeline().getHttpClient()) .buildFileAsyncClient(); } private Mono<Void> uploadToStorage(File source, ResourceUploadDefinition option) { UserSourceInfo userSourceInfo = innerModel().properties().source(); if (userSourceInfo instanceof UploadedUserSourceInfo) { UploadedUserSourceInfo uploadedUserSourceInfo = (UploadedUserSourceInfo) userSourceInfo; try { uploadedUserSourceInfo.withRelativePath(option.relativePath()); ShareFileAsyncClient shareFileAsyncClient = createShareFileAsyncClient(option); return shareFileAsyncClient.create(source.length()) .flatMap(fileInfo -> shareFileAsyncClient.uploadFromFile(source.getAbsolutePath())) .then(Mono.empty()); } catch (Exception e) { return Mono.error(e); } } else { return Mono.empty(); } } @Override @Override public SpringAppDeploymentImpl withExistingSource(UserSourceType type, String relativePath) { if (isEnterpriseTier()) { ensureSource(UserSourceType.BUILD_RESULT); UserSourceInfo sourceInfo = innerModel().properties().source(); if (sourceInfo instanceof BuildResultUserSourceInfo) { BuildResultUserSourceInfo userSourceInfo = (BuildResultUserSourceInfo) sourceInfo; userSourceInfo.withBuildResultId(relativePath); } } else { ensureSource(type); UserSourceInfo userSourceInfo = innerModel().properties().source(); if (userSourceInfo instanceof UploadedUserSourceInfo) { UploadedUserSourceInfo uploadedUserSourceInfo = (UploadedUserSourceInfo) userSourceInfo; uploadedUserSourceInfo.withRelativePath(relativePath); } } return this; } private boolean isEnterpriseTier() { return service().isEnterpriseTier(); } @Override public SpringAppDeploymentImpl withSourceCodeTarGzFile(File sourceCodeTarGz) { ensureSource(UserSourceType.SOURCE); this.addDependency( context -> parent().getResourceUploadUrlAsync() .flatMap(option -> uploadToStorage(sourceCodeTarGz, option) .then(context.voidMono())) ); return this; } @Override public SpringAppDeploymentImpl withTargetModule(String moduleName) { ensureSource(UserSourceType.SOURCE); UserSourceInfo userSourceInfo = innerModel().properties().source(); if (userSourceInfo instanceof SourceUploadedUserSourceInfo) { SourceUploadedUserSourceInfo sourceUploadedUserSourceInfo = (SourceUploadedUserSourceInfo) userSourceInfo; sourceUploadedUserSourceInfo.withArtifactSelector(moduleName); } return this; } @Override public SpringAppDeploymentImpl withSingleModule() { ensureSource(UserSourceType.SOURCE); UserSourceInfo userSourceInfo = innerModel().properties().source(); if (userSourceInfo instanceof SourceUploadedUserSourceInfo) { SourceUploadedUserSourceInfo sourceUploadedUserSourceInfo = (SourceUploadedUserSourceInfo) userSourceInfo; sourceUploadedUserSourceInfo.withArtifactSelector(null); } return this; } @Override public SpringAppDeploymentImpl withInstance(int count) { if (innerModel().sku() == null) { innerModel().withSku(parent().parent().sku()); } if (innerModel().sku() == null) { innerModel().withSku(new Sku().withName("B0")); } innerModel().sku().withCapacity(count); return this; } @Override public SpringAppDeploymentImpl withCpu(int cpuCount) { ensureDeploySettings(); if (innerModel().properties().deploymentSettings().resourceRequests() == null) { innerModel().properties().deploymentSettings().withResourceRequests(new ResourceRequests()); } innerModel().properties().deploymentSettings().resourceRequests().withCpu(String.valueOf(cpuCount)); return this; } @Override public SpringAppDeploymentImpl withMemory(int sizeInGB) { ensureDeploySettings(); if (innerModel().properties().deploymentSettings().resourceRequests() == null) { innerModel().properties().deploymentSettings().withResourceRequests(new ResourceRequests()); } innerModel().properties().deploymentSettings().resourceRequests().withMemory(String.format("%dGi", sizeInGB)); return this; } @Override public SpringAppDeploymentImpl withRuntime(RuntimeVersion version) { UserSourceInfo userSourceInfo = innerModel().properties().source(); if (userSourceInfo instanceof JarUploadedUserSourceInfo) { JarUploadedUserSourceInfo uploadedUserSourceInfo = (JarUploadedUserSourceInfo) userSourceInfo; uploadedUserSourceInfo.withRuntimeVersion(version.toString()); } else if (userSourceInfo instanceof NetCoreZipUploadedUserSourceInfo) { NetCoreZipUploadedUserSourceInfo uploadedUserSourceInfo = (NetCoreZipUploadedUserSourceInfo) userSourceInfo; uploadedUserSourceInfo.withRuntimeVersion(version.toString()); } else if (userSourceInfo instanceof SourceUploadedUserSourceInfo) { SourceUploadedUserSourceInfo uploadedUserSourceInfo = (SourceUploadedUserSourceInfo) userSourceInfo; uploadedUserSourceInfo.withRuntimeVersion(version.toString()); } return this; } @Override public SpringAppDeploymentImpl withJvmOptions(String jvmOptions) { ensureSource(UserSourceType.JAR); UserSourceInfo userSourceInfo = innerModel().properties().source(); if (userSourceInfo instanceof JarUploadedUserSourceInfo) { JarUploadedUserSourceInfo uploadedUserSourceInfo = (JarUploadedUserSourceInfo) userSourceInfo; uploadedUserSourceInfo.withJvmOptions(jvmOptions); } return this; } private void ensureEnvironments() { ensureDeploySettings(); if (innerModel().properties().deploymentSettings().environmentVariables() == null) { innerModel().properties().deploymentSettings().withEnvironmentVariables(new HashMap<>()); } } @Override public SpringAppDeploymentImpl withEnvironment(String key, String value) { ensureEnvironments(); innerModel().properties().deploymentSettings().environmentVariables().put(key, value); return this; } @Override public SpringAppDeploymentImpl withoutEnvironment(String key) { ensureEnvironments(); innerModel().properties().deploymentSettings().environmentVariables().remove(key); return this; } @Override public SpringAppDeploymentImpl withVersionName(String versionName) { ensureSource(); innerModel().properties().source().withVersion(versionName); return this; } @Override public SpringAppDeploymentImpl withActivation() { this.addPostRunDependent( context -> parent().update().withActiveDeployment(name()).applyAsync() .map(Function.identity()) ); return this; } @Override public Mono<SpringAppDeployment> createResourceAsync() { return manager().serviceClient().getDeployments().createOrUpdateAsync( parent().parent().resourceGroupName(), parent().parent().name(), parent().name(), name(), innerModel() ) .map(inner -> { setInner(inner); return this; }); } @Override public Mono<SpringAppDeployment> updateResourceAsync() { return manager().serviceClient().getDeployments().updateAsync( parent().parent().resourceGroupName(), parent().parent().name(), parent().name(), name(), innerModel() ) .map(inner -> { setInner(inner); return this; }); } @Override public Mono<Void> deleteResourceAsync() { return manager().serviceClient().getDeployments().deleteAsync( parent().parent().resourceGroupName(), parent().parent().name(), parent().name(), name() ); } @Override protected Mono<DeploymentResourceInner> getInnerAsync() { return manager().serviceClient().getDeployments().getAsync( parent().parent().resourceGroupName(), parent().parent().name(), parent().name(), name() ); } @Override public String id() { return innerModel().id(); } @Override public SpringAppDeploymentImpl update() { prepareUpdate(); return this; } private AppPlatformManager manager() { return parent().manager(); } @Override public SpringAppImpl attach() { return parent().addActiveDeployment(this); } private SpringServiceImpl service() { return parent().parent(); } }
class SpringAppDeploymentImpl extends ExternalChildResourceImpl<SpringAppDeployment, DeploymentResourceInner, SpringAppImpl, SpringApp> implements SpringAppDeployment, SpringAppDeployment.Definition<SpringAppImpl, SpringAppDeploymentImpl>, SpringAppDeployment.Update { SpringAppDeploymentImpl(String name, SpringAppImpl parent, DeploymentResourceInner innerObject) { super(name, parent, innerObject); } @Override public String appName() { if (innerModel().properties() == null) { return null; } return innerModel().name(); } @Override public DeploymentSettings settings() { if (innerModel().properties() == null) { return null; } return innerModel().properties().deploymentSettings(); } @Override public DeploymentResourceStatus status() { if (innerModel().properties() == null) { return null; } return innerModel().properties().status(); } @Override public boolean isActive() { if (innerModel().properties() == null) { return false; } return innerModel().properties().active(); } @Override public List<DeploymentInstance> instances() { if (innerModel().properties() == null) { return null; } return innerModel().properties().instances(); } @Override public void start() { startAsync().block(); } @Override public Mono<Void> startAsync() { return manager().serviceClient().getDeployments().startAsync( parent().parent().resourceGroupName(), parent().parent().name(), parent().name(), name() ); } @Override public void stop() { stopAsync().block(); } @Override public Mono<Void> stopAsync() { return manager().serviceClient().getDeployments().stopAsync( parent().parent().resourceGroupName(), parent().parent().name(), parent().name(), name() ); } @Override public void restart() { restartAsync().block(); } @Override public Mono<Void> restartAsync() { return manager().serviceClient().getDeployments().restartAsync( parent().parent().resourceGroupName(), parent().parent().name(), parent().name(), name() ); } @Override public String getLogFileUrl() { return getLogFileUrlAsync().block(); } @Override public Mono<String> getLogFileUrlAsync() { return manager().serviceClient().getDeployments().getLogFileUrlAsync( parent().parent().resourceGroupName(), parent().parent().name(), parent().name(), name() ) .map(LogFileUrlResponseInner::url); } private void ensureDeploySettings() { if (innerModel().properties() == null) { innerModel().withProperties(new DeploymentResourceProperties()); } if (innerModel().properties().deploymentSettings() == null) { innerModel().properties().withDeploymentSettings(new DeploymentSettings()); } } private void ensureSource() { ensureSource(null); } private void ensureSource(UserSourceType type) { if (innerModel().properties() == null) { innerModel().withProperties(new DeploymentResourceProperties()); } if (innerModel().properties().source() == null) { if (type == UserSourceType.JAR) { innerModel().properties().withSource(new JarUploadedUserSourceInfo()); } else if (type == UserSourceType.SOURCE) { innerModel().properties().withSource(new SourceUploadedUserSourceInfo()); } else if (type == UserSourceType.NET_CORE_ZIP) { innerModel().properties().withSource(new NetCoreZipUploadedUserSourceInfo()); } else if (type == UserSourceType.BUILD_RESULT) { innerModel().properties().withSource(new BuildResultUserSourceInfo()); } else { innerModel().properties().withSource(new UserSourceInfo()); } } } private ShareFileAsyncClient createShareFileAsyncClient(ResourceUploadDefinition option) { return new ShareFileClientBuilder() .endpoint(option.uploadUrl()) .httpClient(manager().httpPipeline().getHttpClient()) .buildFileAsyncClient(); } private Mono<Void> uploadToStorage(File source, ResourceUploadDefinition option) { UserSourceInfo userSourceInfo = innerModel().properties().source(); if (userSourceInfo instanceof UploadedUserSourceInfo) { UploadedUserSourceInfo uploadedUserSourceInfo = (UploadedUserSourceInfo) userSourceInfo; try { uploadedUserSourceInfo.withRelativePath(option.relativePath()); ShareFileAsyncClient shareFileAsyncClient = createShareFileAsyncClient(option); return shareFileAsyncClient.create(source.length()) .flatMap(fileInfo -> shareFileAsyncClient.uploadFromFile(source.getAbsolutePath())) .then(Mono.empty()); } catch (Exception e) { return Mono.error(e); } } else { return Mono.empty(); } } @Override @Override public SpringAppDeploymentImpl withExistingSource(UserSourceType type, String relativePath) { if (isEnterpriseTier()) { ensureSource(UserSourceType.BUILD_RESULT); UserSourceInfo sourceInfo = innerModel().properties().source(); if (sourceInfo instanceof BuildResultUserSourceInfo) { BuildResultUserSourceInfo userSourceInfo = (BuildResultUserSourceInfo) sourceInfo; userSourceInfo.withBuildResultId(relativePath); } } else { ensureSource(type); UserSourceInfo userSourceInfo = innerModel().properties().source(); if (userSourceInfo instanceof UploadedUserSourceInfo) { UploadedUserSourceInfo uploadedUserSourceInfo = (UploadedUserSourceInfo) userSourceInfo; uploadedUserSourceInfo.withRelativePath(relativePath); } } return this; } private boolean isEnterpriseTier() { return service().isEnterpriseTier(); } @Override public SpringAppDeploymentImpl withSourceCodeTarGzFile(File sourceCodeTarGz) { ensureSource(UserSourceType.SOURCE); this.addDependency( context -> parent().getResourceUploadUrlAsync() .flatMap(option -> uploadToStorage(sourceCodeTarGz, option) .then(context.voidMono())) ); return this; } @Override public SpringAppDeploymentImpl withTargetModule(String moduleName) { ensureSource(UserSourceType.SOURCE); UserSourceInfo userSourceInfo = innerModel().properties().source(); if (userSourceInfo instanceof SourceUploadedUserSourceInfo) { SourceUploadedUserSourceInfo sourceUploadedUserSourceInfo = (SourceUploadedUserSourceInfo) userSourceInfo; sourceUploadedUserSourceInfo.withArtifactSelector(moduleName); } return this; } @Override public SpringAppDeploymentImpl withSingleModule() { ensureSource(UserSourceType.SOURCE); UserSourceInfo userSourceInfo = innerModel().properties().source(); if (userSourceInfo instanceof SourceUploadedUserSourceInfo) { SourceUploadedUserSourceInfo sourceUploadedUserSourceInfo = (SourceUploadedUserSourceInfo) userSourceInfo; sourceUploadedUserSourceInfo.withArtifactSelector(null); } return this; } @Override public SpringAppDeploymentImpl withInstance(int count) { if (innerModel().sku() == null) { innerModel().withSku(parent().parent().sku()); } if (innerModel().sku() == null) { innerModel().withSku(new Sku().withName("B0")); } innerModel().sku().withCapacity(count); return this; } @Override public SpringAppDeploymentImpl withCpu(int cpuCount) { ensureDeploySettings(); if (innerModel().properties().deploymentSettings().resourceRequests() == null) { innerModel().properties().deploymentSettings().withResourceRequests(new ResourceRequests()); } innerModel().properties().deploymentSettings().resourceRequests().withCpu(String.valueOf(cpuCount)); return this; } @Override public SpringAppDeploymentImpl withMemory(int sizeInGB) { ensureDeploySettings(); if (innerModel().properties().deploymentSettings().resourceRequests() == null) { innerModel().properties().deploymentSettings().withResourceRequests(new ResourceRequests()); } innerModel().properties().deploymentSettings().resourceRequests().withMemory(String.format("%dGi", sizeInGB)); return this; } @Override public SpringAppDeploymentImpl withRuntime(RuntimeVersion version) { UserSourceInfo userSourceInfo = innerModel().properties().source(); if (userSourceInfo instanceof JarUploadedUserSourceInfo) { JarUploadedUserSourceInfo uploadedUserSourceInfo = (JarUploadedUserSourceInfo) userSourceInfo; uploadedUserSourceInfo.withRuntimeVersion(version.toString()); } else if (userSourceInfo instanceof NetCoreZipUploadedUserSourceInfo) { NetCoreZipUploadedUserSourceInfo uploadedUserSourceInfo = (NetCoreZipUploadedUserSourceInfo) userSourceInfo; uploadedUserSourceInfo.withRuntimeVersion(version.toString()); } else if (userSourceInfo instanceof SourceUploadedUserSourceInfo) { SourceUploadedUserSourceInfo uploadedUserSourceInfo = (SourceUploadedUserSourceInfo) userSourceInfo; uploadedUserSourceInfo.withRuntimeVersion(version.toString()); } return this; } @Override public SpringAppDeploymentImpl withJvmOptions(String jvmOptions) { ensureSource(UserSourceType.JAR); UserSourceInfo userSourceInfo = innerModel().properties().source(); if (userSourceInfo instanceof JarUploadedUserSourceInfo) { JarUploadedUserSourceInfo uploadedUserSourceInfo = (JarUploadedUserSourceInfo) userSourceInfo; uploadedUserSourceInfo.withJvmOptions(jvmOptions); } return this; } private void ensureEnvironments() { ensureDeploySettings(); if (innerModel().properties().deploymentSettings().environmentVariables() == null) { innerModel().properties().deploymentSettings().withEnvironmentVariables(new HashMap<>()); } } @Override public SpringAppDeploymentImpl withEnvironment(String key, String value) { ensureEnvironments(); innerModel().properties().deploymentSettings().environmentVariables().put(key, value); return this; } @Override public SpringAppDeploymentImpl withoutEnvironment(String key) { ensureEnvironments(); innerModel().properties().deploymentSettings().environmentVariables().remove(key); return this; } @Override public SpringAppDeploymentImpl withVersionName(String versionName) { ensureSource(); innerModel().properties().source().withVersion(versionName); return this; } @Override public SpringAppDeploymentImpl withActivation() { this.addPostRunDependent( context -> parent().update().withActiveDeployment(name()).applyAsync() .map(Function.identity()) ); return this; } @Override public Mono<SpringAppDeployment> createResourceAsync() { return manager().serviceClient().getDeployments().createOrUpdateAsync( parent().parent().resourceGroupName(), parent().parent().name(), parent().name(), name(), innerModel() ) .map(inner -> { setInner(inner); return this; }); } @Override public Mono<SpringAppDeployment> updateResourceAsync() { return manager().serviceClient().getDeployments().updateAsync( parent().parent().resourceGroupName(), parent().parent().name(), parent().name(), name(), innerModel() ) .map(inner -> { setInner(inner); return this; }); } @Override public Mono<Void> deleteResourceAsync() { return manager().serviceClient().getDeployments().deleteAsync( parent().parent().resourceGroupName(), parent().parent().name(), parent().name(), name() ); } @Override protected Mono<DeploymentResourceInner> getInnerAsync() { return manager().serviceClient().getDeployments().getAsync( parent().parent().resourceGroupName(), parent().parent().name(), parent().name(), name() ); } @Override public String id() { return innerModel().id(); } @Override public SpringAppDeploymentImpl update() { prepareUpdate(); return this; } private AppPlatformManager manager() { return parent().manager(); } @Override public SpringAppImpl attach() { return parent().addActiveDeployment(this); } private SpringServiceImpl service() { return parent().parent(); } }
So this `S1` etc. seems to be configurable in future PR? And for enterprise, does this mean it now had to send 2 PUT (or more if gitConfig set)?
public Mono<SpringService> createResourceAsync() { Mono<ServiceResourceInner> createOrUpdate; if (isInCreateMode()) { createOrUpdate = manager().serviceClient().getServices() .createOrUpdateAsync(resourceGroupName(), name(), innerModel()); if (isEnterpriseTier()) { createOrUpdate = createOrUpdate .flatMap(inner -> manager().serviceClient().getBuildServiceAgentPools().updatePutAsync( resourceGroupName(), name(), Constants.DEFAULT_TANZU_COMPONENT_NAME, Constants.DEFAULT_TANZU_COMPONENT_NAME, new BuildServiceAgentPoolResourceInner() .withProperties( new BuildServiceAgentPoolProperties() .withPoolSize( new BuildServiceAgentPoolSizeProperties() .withName("S1"))) ).then(Mono.just(inner))); } } else if (updated) { createOrUpdate = manager().serviceClient().getServices().updateAsync( resourceGroupName(), name(), patchToUpdate); patchToUpdate = new ServiceResourceInner(); updated = false; } else { return Mono.just(this); } return createOrUpdate .map(inner -> { this.setInner(inner); return this; }); }
.withName("S1")))
public Mono<SpringService> createResourceAsync() { Mono<ServiceResourceInner> createOrUpdate; if (isInCreateMode()) { createOrUpdate = manager().serviceClient().getServices() .createOrUpdateAsync(resourceGroupName(), name(), innerModel()); if (isEnterpriseTier()) { createOrUpdate = createOrUpdate .flatMap(inner -> manager().serviceClient().getBuildServiceAgentPools().updatePutAsync( resourceGroupName(), name(), Constants.DEFAULT_TANZU_COMPONENT_NAME, Constants.DEFAULT_TANZU_COMPONENT_NAME, new BuildServiceAgentPoolResourceInner() .withProperties( new BuildServiceAgentPoolProperties() .withPoolSize( new BuildServiceAgentPoolSizeProperties() .withName("S1"))) ).then(Mono.just(inner))); } } else if (updated) { createOrUpdate = manager().serviceClient().getServices().updateAsync( resourceGroupName(), name(), patchToUpdate); patchToUpdate = new ServiceResourceInner(); updated = false; } else { return Mono.just(this); } return createOrUpdate .map(inner -> { this.setInner(inner); return this; }); }
class SpringServiceImpl extends GroupableResourceImpl<SpringService, ServiceResourceInner, SpringServiceImpl, AppPlatformManager> implements SpringService, SpringService.Definition, SpringService.Update { private final SpringServiceCertificatesImpl certificates = new SpringServiceCertificatesImpl(this); private final SpringAppsImpl apps = new SpringAppsImpl(this); private final SpringConfigurationServicesImpl configurationServices = new SpringConfigurationServicesImpl(this); private FunctionalTaskItem configServerTask = null; private FunctionalTaskItem monitoringSettingTask = null; private ServiceResourceInner patchToUpdate = new ServiceResourceInner(); private boolean updated; private boolean updateConfigurationServiceTask = true; private final Map<String, ConfigurationServiceGitRepository> gitRepositoryMap = new ConcurrentHashMap<>(); SpringServiceImpl(String name, ServiceResourceInner innerObject, AppPlatformManager manager) { super(name, innerObject, manager); } @Override public SpringServiceImpl update() { return super.update(); } @Override public Sku sku() { return innerModel().sku(); } @Override public SpringApps apps() { return apps; } @Override public SpringServiceCertificates certificates() { return certificates; } @Override public MonitoringSettingProperties getMonitoringSetting() { return getMonitoringSettingAsync().block(); } @Override public Mono<MonitoringSettingProperties> getMonitoringSettingAsync() { return manager().serviceClient().getMonitoringSettings().getAsync(resourceGroupName(), name()) .map(MonitoringSettingResourceInner::properties); } @Override public ConfigServerProperties getServerProperties() { return getServerPropertiesAsync().block(); } @Override public Mono<ConfigServerProperties> getServerPropertiesAsync() { return manager().serviceClient().getConfigServers().getAsync(resourceGroupName(), name()) .map(ConfigServerResourceInner::properties); } @Override public TestKeys listTestKeys() { return listTestKeysAsync().block(); } @Override public Mono<TestKeys> listTestKeysAsync() { return manager().serviceClient().getServices().listTestKeysAsync(resourceGroupName(), name()); } @Override public TestKeys regenerateTestKeys(TestKeyType keyType) { return regenerateTestKeysAsync(keyType).block(); } @Override public Mono<TestKeys> regenerateTestKeysAsync(TestKeyType keyType) { return manager().serviceClient().getServices().regenerateTestKeyAsync(resourceGroupName(), name(), new RegenerateTestKeyRequestPayload().withKeyType(keyType)); } @Override public void disableTestEndpoint() { disableTestEndpointAsync().block(); } @Override public Mono<Void> disableTestEndpointAsync() { return manager().serviceClient().getServices().disableTestEndpointAsync(resourceGroupName(), name()); } @Override public TestKeys enableTestEndpoint() { return enableTestEndpointAsync().block(); } @Override public Mono<TestKeys> enableTestEndpointAsync() { return manager().serviceClient().getServices().enableTestEndpointAsync(resourceGroupName(), name()); } @Override public SpringConfigurationService getDefaultConfigurationService() { return manager().serviceClient().getConfigurationServices().getAsync(resourceGroupName(), name(), Constants.DEFAULT_TANZU_COMPONENT_NAME) .switchIfEmpty(Mono.empty()) .map(inner -> new SpringConfigurationServiceImpl(inner.name(), this, inner)) .block(); } @Override public SpringConfigurationServices configurationServices() { return this.configurationServices; } @Override public SpringServiceImpl withSku(String skuName) { return withSku(new Sku().withName(skuName)); } @Override public SpringServiceImpl withSku(SkuName skuName) { return withSku(skuName.toString()); } @Override public SpringServiceImpl withSku(String skuName, int capacity) { return withSku(new Sku().withName(skuName).withCapacity(capacity)); } @Override public SpringServiceImpl withSku(Sku sku) { innerModel().withSku(sku); if (isInUpdateMode()) { patchToUpdate.withSku(sku); updated = true; } return this; } @Override public SpringServiceImpl withEnterpriseTierSku() { withSku(SkuName.E0); return this; } @Override public SpringServiceImpl withTracing(String appInsightInstrumentationKey) { monitoringSettingTask = context -> manager().serviceClient().getMonitoringSettings() .updatePatchAsync(resourceGroupName(), name(), new MonitoringSettingResourceInner().withProperties( new MonitoringSettingProperties() .withAppInsightsInstrumentationKey(appInsightInstrumentationKey) .withTraceEnabled(true))) .then(context.voidMono()); return this; } @Override public SpringServiceImpl withoutTracing() { monitoringSettingTask = context -> manager().serviceClient().getMonitoringSettings() .updatePatchAsync( resourceGroupName(), name(), new MonitoringSettingResourceInner().withProperties( new MonitoringSettingProperties().withTraceEnabled(false) )) .then(context.voidMono()); return this; } @Override public SpringServiceImpl withGitUri(String uri) { configServerTask = context -> manager().serviceClient().getConfigServers() .updatePatchAsync(resourceGroupName(), name(), new ConfigServerResourceInner().withProperties( new ConfigServerProperties() .withConfigServer(new ConfigServerSettings().withGitProperty( new ConfigServerGitProperty().withUri(uri) )) )) .then(context.voidMono()); return this; } @Override public SpringServiceImpl withGitUriAndCredential(String uri, String username, String password) { configServerTask = context -> manager().serviceClient().getConfigServers() .updatePatchAsync(resourceGroupName(), name(), new ConfigServerResourceInner().withProperties( new ConfigServerProperties() .withConfigServer(new ConfigServerSettings().withGitProperty( new ConfigServerGitProperty() .withUri(uri) .withUsername(username) .withPassword(password) )) )) .then(context.voidMono()); return this; } @Override public SpringServiceImpl withGitConfig(ConfigServerGitProperty gitConfig) { configServerTask = context -> manager().serviceClient().getConfigServers() .updatePatchAsync(resourceGroupName(), name(), new ConfigServerResourceInner().withProperties( new ConfigServerProperties() .withConfigServer(new ConfigServerSettings().withGitProperty(gitConfig)) )) .then(context.voidMono()); return this; } @Override public SpringServiceImpl withoutGitConfig() { if (isEnterpriseTier()) { return withGitConfig((ConfigurationServiceGitProperty) null); } else { return withGitConfig((ConfigServerGitProperty) null); } } @Override public void beforeGroupCreateOrUpdate() { if (configServerTask != null) { this.addPostRunDependent(configServerTask); } if (monitoringSettingTask != null) { this.addPostRunDependent(monitoringSettingTask); } if (isEnterpriseTier()) { if (updateConfigurationServiceTask) { prepareCreateOrUpdateConfigurationService(); } updateConfigurationServiceTask = false; } configServerTask = null; monitoringSettingTask = null; } @Override @Override public Mono<Void> afterPostRunAsync(boolean isGroupFaulted) { clearCache(); if (isGroupFaulted) { return Mono.empty(); } return refreshAsync().then(); } @Override protected Mono<ServiceResourceInner> getInnerAsync() { return manager().serviceClient().getServices().getByResourceGroupAsync(resourceGroupName(), name()) .map(inner -> { clearCache(); return inner; }); } @Override public SpringServiceImpl withCertificate(String name, String keyVaultUri, String certNameInKeyVault) { certificates.prepareCreateOrUpdate( name, new KeyVaultCertificateProperties().withVaultUri(keyVaultUri).withKeyVaultCertName(certNameInKeyVault) ); return this; } @Override public SpringServiceImpl withCertificate(String name, String keyVaultUri, String certNameInKeyVault, String certVersion) { certificates.prepareCreateOrUpdate( name, new KeyVaultCertificateProperties() .withVaultUri(keyVaultUri) .withKeyVaultCertName(certNameInKeyVault) .withCertVersion(certVersion) ); return this; } @Override public SpringServiceImpl withoutCertificate(String name) { certificates.prepareDelete(name); return this; } @Override public SpringServiceImpl withGitConfig(String uri, String branch, List<String> filePatterns) { return withGitConfigRepository(Constants.DEFAULT_TANZU_COMPONENT_NAME, uri, branch, filePatterns); } @Override public SpringServiceImpl withGitConfigRepository(String name, String uri, String branch, List<String> filePatterns) { if (CoreUtils.isNullOrEmpty(name)) { return this; } this.gitRepositoryMap.computeIfAbsent(name, key -> new ConfigurationServiceGitRepository() .withName(name) .withUri(uri) .withPatterns(filePatterns) .withLabel(branch) ); updateConfigurationServiceTask = true; return this; } @Override public SpringServiceImpl withGitConfig(ConfigurationServiceGitProperty gitConfig) { gitRepositoryMap.clear(); if (gitConfig != null && CoreUtils.isNullOrEmpty(gitConfig.repositories())) { for (ConfigurationServiceGitRepository repository : gitConfig.repositories()) { this.gitRepositoryMap.put(repository.name(), repository); } } updateConfigurationServiceTask = true; return this; } private void prepareCreateOrUpdateConfigurationService() { List<ConfigurationServiceGitRepository> repositories = new ArrayList<>(this.gitRepositoryMap.values()); this.configurationServices.prepareCreateOrUpdate(new ConfigurationServiceGitProperty().withRepositories(repositories)); } private boolean isInUpdateMode() { return !isInCreateMode(); } boolean isEnterpriseTier() { return innerModel().sku() != null && SkuName.E0.toString().equals(innerModel().sku().name()); } private void clearCache() { this.gitRepositoryMap.clear(); this.configurationServices.clear(); } }
class SpringServiceImpl extends GroupableResourceImpl<SpringService, ServiceResourceInner, SpringServiceImpl, AppPlatformManager> implements SpringService, SpringService.Definition, SpringService.Update { private final SpringServiceCertificatesImpl certificates = new SpringServiceCertificatesImpl(this); private final SpringAppsImpl apps = new SpringAppsImpl(this); private final SpringConfigurationServicesImpl configurationServices = new SpringConfigurationServicesImpl(this); private FunctionalTaskItem configServerTask = null; private FunctionalTaskItem monitoringSettingTask = null; private ServiceResourceInner patchToUpdate = new ServiceResourceInner(); private boolean updated; private final ConfigurationServiceConfig configurationServiceConfig = new ConfigurationServiceConfig(); SpringServiceImpl(String name, ServiceResourceInner innerObject, AppPlatformManager manager) { super(name, innerObject, manager); } @Override public SpringServiceImpl update() { return super.update(); } @Override public Sku sku() { return innerModel().sku(); } @Override public SpringApps apps() { return apps; } @Override public SpringServiceCertificates certificates() { return certificates; } @Override public MonitoringSettingProperties getMonitoringSetting() { return getMonitoringSettingAsync().block(); } @Override public Mono<MonitoringSettingProperties> getMonitoringSettingAsync() { return manager().serviceClient().getMonitoringSettings().getAsync(resourceGroupName(), name()) .map(MonitoringSettingResourceInner::properties); } @Override public ConfigServerProperties getServerProperties() { return getServerPropertiesAsync().block(); } @Override public Mono<ConfigServerProperties> getServerPropertiesAsync() { return manager().serviceClient().getConfigServers().getAsync(resourceGroupName(), name()) .map(ConfigServerResourceInner::properties); } @Override public TestKeys listTestKeys() { return listTestKeysAsync().block(); } @Override public Mono<TestKeys> listTestKeysAsync() { return manager().serviceClient().getServices().listTestKeysAsync(resourceGroupName(), name()); } @Override public TestKeys regenerateTestKeys(TestKeyType keyType) { return regenerateTestKeysAsync(keyType).block(); } @Override public Mono<TestKeys> regenerateTestKeysAsync(TestKeyType keyType) { return manager().serviceClient().getServices().regenerateTestKeyAsync(resourceGroupName(), name(), new RegenerateTestKeyRequestPayload().withKeyType(keyType)); } @Override public void disableTestEndpoint() { disableTestEndpointAsync().block(); } @Override public Mono<Void> disableTestEndpointAsync() { return manager().serviceClient().getServices().disableTestEndpointAsync(resourceGroupName(), name()); } @Override public TestKeys enableTestEndpoint() { return enableTestEndpointAsync().block(); } @Override public Mono<TestKeys> enableTestEndpointAsync() { return manager().serviceClient().getServices().enableTestEndpointAsync(resourceGroupName(), name()); } @Override public SpringConfigurationService getDefaultConfigurationService() { return manager().serviceClient().getConfigurationServices().getAsync(resourceGroupName(), name(), Constants.DEFAULT_TANZU_COMPONENT_NAME) .switchIfEmpty(Mono.empty()) .map(inner -> new SpringConfigurationServiceImpl(inner.name(), this, inner)) .block(); } @Override public SpringServiceImpl withSku(String skuName) { return withSku(new Sku().withName(skuName)); } @Override public SpringServiceImpl withSku(SkuName skuName) { return withSku(skuName.toString()); } @Override public SpringServiceImpl withSku(String skuName, int capacity) { return withSku(new Sku().withName(skuName).withCapacity(capacity)); } @Override public SpringServiceImpl withSku(Sku sku) { innerModel().withSku(sku); if (isInUpdateMode()) { patchToUpdate.withSku(sku); updated = true; } return this; } @Override public SpringServiceImpl withEnterpriseTierSku() { withSku(SkuName.E0); return this; } @Override public SpringServiceImpl withTracing(String appInsightInstrumentationKey) { monitoringSettingTask = context -> manager().serviceClient().getMonitoringSettings() .updatePatchAsync(resourceGroupName(), name(), new MonitoringSettingResourceInner().withProperties( new MonitoringSettingProperties() .withAppInsightsInstrumentationKey(appInsightInstrumentationKey) .withTraceEnabled(true))) .then(context.voidMono()); return this; } @Override public SpringServiceImpl withoutTracing() { monitoringSettingTask = context -> manager().serviceClient().getMonitoringSettings() .updatePatchAsync( resourceGroupName(), name(), new MonitoringSettingResourceInner().withProperties( new MonitoringSettingProperties().withTraceEnabled(false) )) .then(context.voidMono()); return this; } @Override public SpringServiceImpl withGitUri(String uri) { configServerTask = context -> manager().serviceClient().getConfigServers() .updatePatchAsync(resourceGroupName(), name(), new ConfigServerResourceInner().withProperties( new ConfigServerProperties() .withConfigServer(new ConfigServerSettings().withGitProperty( new ConfigServerGitProperty().withUri(uri) )) )) .then(context.voidMono()); return this; } @Override public SpringServiceImpl withGitUriAndCredential(String uri, String username, String password) { configServerTask = context -> manager().serviceClient().getConfigServers() .updatePatchAsync(resourceGroupName(), name(), new ConfigServerResourceInner().withProperties( new ConfigServerProperties() .withConfigServer(new ConfigServerSettings().withGitProperty( new ConfigServerGitProperty() .withUri(uri) .withUsername(username) .withPassword(password) )) )) .then(context.voidMono()); return this; } @Override public SpringServiceImpl withGitConfig(ConfigServerGitProperty gitConfig) { configServerTask = context -> manager().serviceClient().getConfigServers() .updatePatchAsync(resourceGroupName(), name(), new ConfigServerResourceInner().withProperties( new ConfigServerProperties() .withConfigServer(new ConfigServerSettings().withGitProperty(gitConfig)) )) .then(context.voidMono()); return this; } @Override public SpringServiceImpl withoutGitConfig() { return withGitConfig(null); } @Override public void beforeGroupCreateOrUpdate() { if (configServerTask != null) { this.addPostRunDependent(configServerTask); } if (monitoringSettingTask != null) { this.addPostRunDependent(monitoringSettingTask); } if (isEnterpriseTier()) { if (isInCreateMode() || configurationServiceConfig.needUpdate()) { prepareCreateOrUpdateConfigurationService(); configurationServiceConfig.clearUpdate(); } } configServerTask = null; monitoringSettingTask = null; } @Override @Override public Mono<Void> afterPostRunAsync(boolean isGroupFaulted) { return Mono .just(true) .map( ignored -> { clearCache(); return ignored; }) .then(); } @Override protected Mono<ServiceResourceInner> getInnerAsync() { return manager().serviceClient().getServices().getByResourceGroupAsync(resourceGroupName(), name()) .map(inner -> { clearCache(); return inner; }); } @Override public SpringServiceImpl withCertificate(String name, String keyVaultUri, String certNameInKeyVault) { certificates.prepareCreateOrUpdate( name, new KeyVaultCertificateProperties().withVaultUri(keyVaultUri).withKeyVaultCertName(certNameInKeyVault) ); return this; } @Override public SpringServiceImpl withCertificate(String name, String keyVaultUri, String certNameInKeyVault, String certVersion) { certificates.prepareCreateOrUpdate( name, new KeyVaultCertificateProperties() .withVaultUri(keyVaultUri) .withKeyVaultCertName(certNameInKeyVault) .withCertVersion(certVersion) ); return this; } @Override public SpringServiceImpl withoutCertificate(String name) { certificates.prepareDelete(name); return this; } @Override public SpringServiceImpl withDefaultGitRepository(String uri, String branch, List<String> filePatterns) { return withGitRepository(Constants.DEFAULT_TANZU_COMPONENT_NAME, uri, branch, filePatterns); } @Override public SpringServiceImpl withGitRepository(String name, String uri, String branch, List<String> filePatterns) { if (CoreUtils.isNullOrEmpty(name)) { return this; } this.configurationServiceConfig.addRepository( new ConfigurationServiceGitRepository() .withName(name) .withUri(uri) .withPatterns(filePatterns) .withLabel(branch)); return this; } @Override public SpringServiceImpl withGitRepositoryConfig(ConfigurationServiceGitProperty gitConfig) { this.configurationServiceConfig.clearRepositories(); if (gitConfig != null && !CoreUtils.isNullOrEmpty(gitConfig.repositories())) { for (ConfigurationServiceGitRepository repository : gitConfig.repositories()) { this.configurationServiceConfig.addRepository(repository); } } return this; } @Override public SpringServiceImpl withoutGitRepository(String name) { this.configurationServiceConfig.removeRepository(name); return this; } @Override public SpringServiceImpl withoutGitRepositories() { this.configurationServiceConfig.clearRepositories(); return this; } private void prepareCreateOrUpdateConfigurationService() { List<ConfigurationServiceGitRepository> repositories = this.configurationServiceConfig.mergeRepositories(); this.configurationServices.prepareCreateOrUpdate(new ConfigurationServiceGitProperty().withRepositories(repositories)); } private boolean isInUpdateMode() { return !isInCreateMode(); } boolean isEnterpriseTier() { return innerModel().sku() != null && SkuName.E0.toString().equals(innerModel().sku().name()); } private void clearCache() { this.configurationServices.clear(); this.configurationServiceConfig.reset(); } private class ConfigurationServiceConfig { private final Map<String, ConfigurationServiceGitRepository> gitRepositoryMap = new ConcurrentHashMap<>(); private final Set<String> repositoriesToDelete = new HashSet<>(); private boolean update; private boolean clearRepositories; boolean needUpdate() { return update; } public void clearUpdate() { this.update = false; } void reset() { this.gitRepositoryMap.clear(); this.update = false; this.repositoriesToDelete.clear(); this.clearRepositories = false; } public void addRepository(ConfigurationServiceGitRepository repository) { this.gitRepositoryMap.putIfAbsent(repository.name(), repository); this.update = true; } public void clearRepositories() { this.gitRepositoryMap.clear(); this.clearRepositories = true; this.update = true; } public void removeRepository(String name) { this.repositoriesToDelete.add(name); this.update = true; } public List<ConfigurationServiceGitRepository> mergeRepositories() { if (this.clearRepositories) { return new ArrayList<>(this.gitRepositoryMap.values()); } else { Map<String, ConfigurationServiceGitRepository> existingGitRepositories = new HashMap<>(); if (isInUpdateMode()) { SpringConfigurationService configurationService = getDefaultConfigurationService(); if (configurationService != null) { List<ConfigurationServiceGitRepository> repositoryList = configurationService.innerModel().properties().settings() == null ? Collections.emptyList() : configurationService.innerModel().properties().settings().gitProperty().repositories(); if (repositoryList != null) { repositoryList.forEach(repository -> existingGitRepositories.put(repository.name(), repository)); } } } existingGitRepositories.putAll(gitRepositoryMap); for (String repositoryToDelete : repositoriesToDelete) { existingGitRepositories.remove(repositoryToDelete); } return new ArrayList<>(existingGitRepositories.values()); } } } }
remove the tailing `.`
public void postProcessEnvironment(ConfigurableEnvironment environment, SpringApplication application) { if (!isKeyVaultClientOnClasspath()) { logger.debug(String.format(SKIP_CONFIGURE_REASON_FORMAT, "com.azure:azure-security-keyvault-secrets doesn't exist in classpath.")); return; } final AzureKeyVaultSecretProperties secretProperties = loadProperties(environment); if (!secretProperties.isPropertySourceEnabled()) { logger.debug(String.format(SKIP_CONFIGURE_REASON_FORMAT, "spring.cloud.azure.keyvault.secret.property-source-enabled=false")); return; } if (secretProperties.getPropertySources().isEmpty()) { logger.debug(String.format(SKIP_CONFIGURE_REASON_FORMAT, "spring.cloud.azure.keyvault.secret.property-sources is empty.")); return; } final List<AzureKeyVaultPropertySourceProperties> propertiesList = secretProperties.getPropertySources(); List<KeyVaultPropertySource> keyVaultPropertySources = buildKeyVaultPropertySourceList(propertiesList); final MutablePropertySources propertySources = environment.getPropertySources(); for (int i = keyVaultPropertySources.size() - 1; i >= 0; i--) { KeyVaultPropertySource propertySource = keyVaultPropertySources.get(i); logger.debug("Inserting Key Vault PropertySource. name = " + propertySource.getName()); if (propertySources.contains(SYSTEM_ENVIRONMENT_PROPERTY_SOURCE_NAME)) { propertySources.addAfter(SYSTEM_ENVIRONMENT_PROPERTY_SOURCE_NAME, propertySource); } else { propertySources.addFirst(propertySource); } } }
logger.debug(String.format(SKIP_CONFIGURE_REASON_FORMAT, "com.azure:azure-security-keyvault-secrets doesn't exist in classpath."));
public void postProcessEnvironment(ConfigurableEnvironment environment, SpringApplication application) { if (!isKeyVaultClientOnClasspath()) { logger.debug(String.format(SKIP_CONFIGURE_REASON_FORMAT, "com.azure:azure-security-keyvault-secrets doesn't exist in classpath")); return; } final AzureKeyVaultSecretProperties secretProperties = loadProperties(environment); if (!secretProperties.isPropertySourceEnabled()) { logger.debug(String.format(SKIP_CONFIGURE_REASON_FORMAT, "spring.cloud.azure.keyvault.secret.property-source-enabled=false")); return; } if (secretProperties.getPropertySources().isEmpty()) { logger.debug(String.format(SKIP_CONFIGURE_REASON_FORMAT, "spring.cloud.azure.keyvault.secret.property-sources is empty")); return; } final List<AzureKeyVaultPropertySourceProperties> propertiesList = secretProperties.getPropertySources(); List<KeyVaultPropertySource> keyVaultPropertySources = buildKeyVaultPropertySourceList(propertiesList); final MutablePropertySources propertySources = environment.getPropertySources(); for (int i = keyVaultPropertySources.size() - 1; i >= 0; i--) { KeyVaultPropertySource propertySource = keyVaultPropertySources.get(i); logger.debug("Inserting Key Vault PropertySource. name = " + propertySource.getName()); if (propertySources.contains(SYSTEM_ENVIRONMENT_PROPERTY_SOURCE_NAME)) { propertySources.addAfter(SYSTEM_ENVIRONMENT_PROPERTY_SOURCE_NAME, propertySource); } else { propertySources.addFirst(propertySource); } } }
class KeyVaultEnvironmentPostProcessor implements EnvironmentPostProcessor, Ordered { public static final int ORDER = ConfigDataEnvironmentPostProcessor.ORDER + 1; private static final String SKIP_CONFIGURE_REASON_FORMAT = "Skip configuring Key Vault PropertySource because %s."; private final Log logger; /** * Creates a new instance of {@link KeyVaultEnvironmentPostProcessor}. * @param logger The logger used in this class. */ public KeyVaultEnvironmentPostProcessor(Log logger) { this.logger = logger; } /** * Construct a {@link KeyVaultEnvironmentPostProcessor} instance with a new {@link DeferredLog}. */ public KeyVaultEnvironmentPostProcessor() { this.logger = new DeferredLog(); } /** * Construct {@link KeyVaultPropertySource}s according to {@link AzureKeyVaultSecretProperties}, * then insert these {@link KeyVaultPropertySource}s into {@link ConfigurableEnvironment}. * * @param environment the environment. * @param application the application. */ @Override private List<KeyVaultPropertySource> buildKeyVaultPropertySourceList( List<AzureKeyVaultPropertySourceProperties> propertiesList) { List<KeyVaultPropertySource> propertySources = new ArrayList<>(); for (int i = 0; i < propertiesList.size(); i++) { AzureKeyVaultPropertySourceProperties properties = propertiesList.get(i); if (!properties.isEnabled()) { logger.debug(String.format(SKIP_CONFIGURE_REASON_FORMAT, "spring.cloud.azure.keyvault.secret.property-sources[" + i + "].enabled = false.")); continue; } if (!StringUtils.hasText(properties.getEndpoint())) { logger.debug(String.format(SKIP_CONFIGURE_REASON_FORMAT, "spring.cloud.azure.keyvault.secret.property-sources[" + i + "].endpoint is empty.")); continue; } propertySources.add(buildKeyVaultPropertySource(properties)); } return propertySources; } private KeyVaultPropertySource buildKeyVaultPropertySource( AzureKeyVaultPropertySourceProperties properties) { try { final KeyVaultOperation keyVaultOperation = new KeyVaultOperation( buildSecretClient(properties), properties.getRefreshInterval(), properties.getSecretKeys(), properties.isCaseSensitive()); return new KeyVaultPropertySource(properties.getName(), keyVaultOperation); } catch (final Exception exception) { throw new IllegalStateException("Failed to configure KeyVault property source", exception); } } private SecretClient buildSecretClient(AzureKeyVaultPropertySourceProperties propertySourceProperties) { AzureKeyVaultSecretProperties secretProperties = toAzureKeyVaultSecretProperties(propertySourceProperties); return buildSecretClient(secretProperties); } private AzureKeyVaultSecretProperties toAzureKeyVaultSecretProperties( AzureKeyVaultPropertySourceProperties propertySourceProperties) { AzureKeyVaultSecretProperties secretProperties = new AzureKeyVaultSecretProperties(); AzurePropertiesUtils.copyAzureCommonProperties(propertySourceProperties, secretProperties); secretProperties.setEndpoint(propertySourceProperties.getEndpoint()); secretProperties.setServiceVersion(propertySourceProperties.getServiceVersion()); return secretProperties; } /** * Build a KeyVault Secret client * @param secretProperties secret properties * @return secret client */ SecretClient buildSecretClient(AzureKeyVaultSecretProperties secretProperties) { return new SecretClientBuilderFactory(secretProperties).build().buildClient(); } AzureKeyVaultSecretProperties loadProperties(ConfigurableEnvironment environment) { Binder binder = Binder.get(environment); AzureGlobalProperties globalProperties = binder .bind(AzureGlobalProperties.PREFIX, Bindable.of(AzureGlobalProperties.class)) .orElseGet(AzureGlobalProperties::new); AzureKeyVaultSecretProperties secretProperties = binder .bind(AzureKeyVaultSecretProperties.PREFIX, Bindable.of(AzureKeyVaultSecretProperties.class)) .orElseGet(AzureKeyVaultSecretProperties::new); List<AzureKeyVaultPropertySourceProperties> list = secretProperties.getPropertySources(); for (int i = 0; i < list.size(); i++) { list.set(i, buildMergedProperties(globalProperties, list.get(i))); } for (int i = 0; i < list.size(); i++) { AzureKeyVaultPropertySourceProperties propertySourceProperties = list.get(i); if (!StringUtils.hasText(propertySourceProperties.getName())) { propertySourceProperties.setName(buildPropertySourceName(i)); } } return secretProperties; } private AzureKeyVaultPropertySourceProperties buildMergedProperties( AzureGlobalProperties globalProperties, AzureKeyVaultPropertySourceProperties propertySourceProperties) { AzureKeyVaultPropertySourceProperties mergedProperties = new AzureKeyVaultPropertySourceProperties(); AzurePropertiesUtils.mergeAzureCommonProperties(globalProperties, propertySourceProperties, mergedProperties); mergedProperties.setEnabled(propertySourceProperties.isEnabled()); mergedProperties.setName(propertySourceProperties.getName()); mergedProperties.setEndpoint(propertySourceProperties.getEndpoint()); mergedProperties.setServiceVersion(propertySourceProperties.getServiceVersion()); mergedProperties.setCaseSensitive(propertySourceProperties.isCaseSensitive()); mergedProperties.setSecretKeys(propertySourceProperties.getSecretKeys()); mergedProperties.setRefreshInterval(propertySourceProperties.getRefreshInterval()); return mergedProperties; } String buildPropertySourceName(int index) { return "azure-key-vault-secret-property-source-" + index; } private boolean isKeyVaultClientOnClasspath() { return ClassUtils.isPresent("com.azure.security.keyvault.secrets.SecretClient", KeyVaultEnvironmentPostProcessor.class.getClassLoader()); } /** * Get the order value of this object. * @return The order value. */ @Override public int getOrder() { return ORDER; } }
class KeyVaultEnvironmentPostProcessor implements EnvironmentPostProcessor, Ordered { public static final int ORDER = ConfigDataEnvironmentPostProcessor.ORDER + 1; private static final String SKIP_CONFIGURE_REASON_FORMAT = "Skip configuring Key Vault PropertySource because %s."; private final Log logger; /** * Creates a new instance of {@link KeyVaultEnvironmentPostProcessor}. * @param logger The logger used in this class. */ public KeyVaultEnvironmentPostProcessor(Log logger) { this.logger = logger; } /** * Construct a {@link KeyVaultEnvironmentPostProcessor} instance with a new {@link DeferredLog}. */ public KeyVaultEnvironmentPostProcessor() { this.logger = new DeferredLog(); } /** * Construct {@link KeyVaultPropertySource}s according to {@link AzureKeyVaultSecretProperties}, * then insert these {@link KeyVaultPropertySource}s into {@link ConfigurableEnvironment}. * * @param environment the environment. * @param application the application. */ @Override private List<KeyVaultPropertySource> buildKeyVaultPropertySourceList( List<AzureKeyVaultPropertySourceProperties> propertiesList) { List<KeyVaultPropertySource> propertySources = new ArrayList<>(); for (int i = 0; i < propertiesList.size(); i++) { AzureKeyVaultPropertySourceProperties properties = propertiesList.get(i); if (!properties.isEnabled()) { logger.debug(String.format(SKIP_CONFIGURE_REASON_FORMAT, "spring.cloud.azure.keyvault.secret.property-sources[" + i + "].enabled = false")); continue; } if (!StringUtils.hasText(properties.getEndpoint())) { logger.debug(String.format(SKIP_CONFIGURE_REASON_FORMAT, "spring.cloud.azure.keyvault.secret.property-sources[" + i + "].endpoint is empty")); continue; } propertySources.add(buildKeyVaultPropertySource(properties)); } return propertySources; } private KeyVaultPropertySource buildKeyVaultPropertySource( AzureKeyVaultPropertySourceProperties properties) { try { final KeyVaultOperation keyVaultOperation = new KeyVaultOperation( buildSecretClient(properties), properties.getRefreshInterval(), properties.getSecretKeys(), properties.isCaseSensitive()); return new KeyVaultPropertySource(properties.getName(), keyVaultOperation); } catch (final Exception exception) { throw new IllegalStateException("Failed to configure KeyVault property source", exception); } } private SecretClient buildSecretClient(AzureKeyVaultPropertySourceProperties propertySourceProperties) { AzureKeyVaultSecretProperties secretProperties = toAzureKeyVaultSecretProperties(propertySourceProperties); return buildSecretClient(secretProperties); } private AzureKeyVaultSecretProperties toAzureKeyVaultSecretProperties( AzureKeyVaultPropertySourceProperties propertySourceProperties) { AzureKeyVaultSecretProperties secretProperties = new AzureKeyVaultSecretProperties(); AzurePropertiesUtils.copyAzureCommonProperties(propertySourceProperties, secretProperties); secretProperties.setEndpoint(propertySourceProperties.getEndpoint()); secretProperties.setServiceVersion(propertySourceProperties.getServiceVersion()); return secretProperties; } /** * Build a KeyVault Secret client * @param secretProperties secret properties * @return secret client */ SecretClient buildSecretClient(AzureKeyVaultSecretProperties secretProperties) { return new SecretClientBuilderFactory(secretProperties).build().buildClient(); } AzureKeyVaultSecretProperties loadProperties(ConfigurableEnvironment environment) { Binder binder = Binder.get(environment); AzureGlobalProperties globalProperties = binder .bind(AzureGlobalProperties.PREFIX, Bindable.of(AzureGlobalProperties.class)) .orElseGet(AzureGlobalProperties::new); AzureKeyVaultSecretProperties secretProperties = binder .bind(AzureKeyVaultSecretProperties.PREFIX, Bindable.of(AzureKeyVaultSecretProperties.class)) .orElseGet(AzureKeyVaultSecretProperties::new); List<AzureKeyVaultPropertySourceProperties> list = secretProperties.getPropertySources(); for (int i = 0; i < list.size(); i++) { list.set(i, buildMergedProperties(globalProperties, list.get(i))); } for (int i = 0; i < list.size(); i++) { AzureKeyVaultPropertySourceProperties propertySourceProperties = list.get(i); if (!StringUtils.hasText(propertySourceProperties.getName())) { propertySourceProperties.setName(buildPropertySourceName(i)); } } return secretProperties; } private AzureKeyVaultPropertySourceProperties buildMergedProperties( AzureGlobalProperties globalProperties, AzureKeyVaultPropertySourceProperties propertySourceProperties) { AzureKeyVaultPropertySourceProperties mergedProperties = new AzureKeyVaultPropertySourceProperties(); AzurePropertiesUtils.mergeAzureCommonProperties(globalProperties, propertySourceProperties, mergedProperties); mergedProperties.setEnabled(propertySourceProperties.isEnabled()); mergedProperties.setName(propertySourceProperties.getName()); mergedProperties.setEndpoint(propertySourceProperties.getEndpoint()); mergedProperties.setServiceVersion(propertySourceProperties.getServiceVersion()); mergedProperties.setCaseSensitive(propertySourceProperties.isCaseSensitive()); mergedProperties.setSecretKeys(propertySourceProperties.getSecretKeys()); mergedProperties.setRefreshInterval(propertySourceProperties.getRefreshInterval()); return mergedProperties; } String buildPropertySourceName(int index) { return "azure-key-vault-secret-property-source-" + index; } private boolean isKeyVaultClientOnClasspath() { return ClassUtils.isPresent("com.azure.security.keyvault.secrets.SecretClient", KeyVaultEnvironmentPostProcessor.class.getClassLoader()); } /** * Get the order value of this object. * @return The order value. */ @Override public int getOrder() { return ORDER; } }
same here
public void postProcessEnvironment(ConfigurableEnvironment environment, SpringApplication application) { if (!isKeyVaultClientOnClasspath()) { logger.debug(String.format(SKIP_CONFIGURE_REASON_FORMAT, "com.azure:azure-security-keyvault-secrets doesn't exist in classpath.")); return; } final AzureKeyVaultSecretProperties secretProperties = loadProperties(environment); if (!secretProperties.isPropertySourceEnabled()) { logger.debug(String.format(SKIP_CONFIGURE_REASON_FORMAT, "spring.cloud.azure.keyvault.secret.property-source-enabled=false")); return; } if (secretProperties.getPropertySources().isEmpty()) { logger.debug(String.format(SKIP_CONFIGURE_REASON_FORMAT, "spring.cloud.azure.keyvault.secret.property-sources is empty.")); return; } final List<AzureKeyVaultPropertySourceProperties> propertiesList = secretProperties.getPropertySources(); List<KeyVaultPropertySource> keyVaultPropertySources = buildKeyVaultPropertySourceList(propertiesList); final MutablePropertySources propertySources = environment.getPropertySources(); for (int i = keyVaultPropertySources.size() - 1; i >= 0; i--) { KeyVaultPropertySource propertySource = keyVaultPropertySources.get(i); logger.debug("Inserting Key Vault PropertySource. name = " + propertySource.getName()); if (propertySources.contains(SYSTEM_ENVIRONMENT_PROPERTY_SOURCE_NAME)) { propertySources.addAfter(SYSTEM_ENVIRONMENT_PROPERTY_SOURCE_NAME, propertySource); } else { propertySources.addFirst(propertySource); } } }
logger.debug(String.format(SKIP_CONFIGURE_REASON_FORMAT, "spring.cloud.azure.keyvault.secret.property-sources is empty."));
public void postProcessEnvironment(ConfigurableEnvironment environment, SpringApplication application) { if (!isKeyVaultClientOnClasspath()) { logger.debug(String.format(SKIP_CONFIGURE_REASON_FORMAT, "com.azure:azure-security-keyvault-secrets doesn't exist in classpath")); return; } final AzureKeyVaultSecretProperties secretProperties = loadProperties(environment); if (!secretProperties.isPropertySourceEnabled()) { logger.debug(String.format(SKIP_CONFIGURE_REASON_FORMAT, "spring.cloud.azure.keyvault.secret.property-source-enabled=false")); return; } if (secretProperties.getPropertySources().isEmpty()) { logger.debug(String.format(SKIP_CONFIGURE_REASON_FORMAT, "spring.cloud.azure.keyvault.secret.property-sources is empty")); return; } final List<AzureKeyVaultPropertySourceProperties> propertiesList = secretProperties.getPropertySources(); List<KeyVaultPropertySource> keyVaultPropertySources = buildKeyVaultPropertySourceList(propertiesList); final MutablePropertySources propertySources = environment.getPropertySources(); for (int i = keyVaultPropertySources.size() - 1; i >= 0; i--) { KeyVaultPropertySource propertySource = keyVaultPropertySources.get(i); logger.debug("Inserting Key Vault PropertySource. name = " + propertySource.getName()); if (propertySources.contains(SYSTEM_ENVIRONMENT_PROPERTY_SOURCE_NAME)) { propertySources.addAfter(SYSTEM_ENVIRONMENT_PROPERTY_SOURCE_NAME, propertySource); } else { propertySources.addFirst(propertySource); } } }
class KeyVaultEnvironmentPostProcessor implements EnvironmentPostProcessor, Ordered { public static final int ORDER = ConfigDataEnvironmentPostProcessor.ORDER + 1; private static final String SKIP_CONFIGURE_REASON_FORMAT = "Skip configuring Key Vault PropertySource because %s."; private final Log logger; /** * Creates a new instance of {@link KeyVaultEnvironmentPostProcessor}. * @param logger The logger used in this class. */ public KeyVaultEnvironmentPostProcessor(Log logger) { this.logger = logger; } /** * Construct a {@link KeyVaultEnvironmentPostProcessor} instance with a new {@link DeferredLog}. */ public KeyVaultEnvironmentPostProcessor() { this.logger = new DeferredLog(); } /** * Construct {@link KeyVaultPropertySource}s according to {@link AzureKeyVaultSecretProperties}, * then insert these {@link KeyVaultPropertySource}s into {@link ConfigurableEnvironment}. * * @param environment the environment. * @param application the application. */ @Override private List<KeyVaultPropertySource> buildKeyVaultPropertySourceList( List<AzureKeyVaultPropertySourceProperties> propertiesList) { List<KeyVaultPropertySource> propertySources = new ArrayList<>(); for (int i = 0; i < propertiesList.size(); i++) { AzureKeyVaultPropertySourceProperties properties = propertiesList.get(i); if (!properties.isEnabled()) { logger.debug(String.format(SKIP_CONFIGURE_REASON_FORMAT, "spring.cloud.azure.keyvault.secret.property-sources[" + i + "].enabled = false.")); continue; } if (!StringUtils.hasText(properties.getEndpoint())) { logger.debug(String.format(SKIP_CONFIGURE_REASON_FORMAT, "spring.cloud.azure.keyvault.secret.property-sources[" + i + "].endpoint is empty.")); continue; } propertySources.add(buildKeyVaultPropertySource(properties)); } return propertySources; } private KeyVaultPropertySource buildKeyVaultPropertySource( AzureKeyVaultPropertySourceProperties properties) { try { final KeyVaultOperation keyVaultOperation = new KeyVaultOperation( buildSecretClient(properties), properties.getRefreshInterval(), properties.getSecretKeys(), properties.isCaseSensitive()); return new KeyVaultPropertySource(properties.getName(), keyVaultOperation); } catch (final Exception exception) { throw new IllegalStateException("Failed to configure KeyVault property source", exception); } } private SecretClient buildSecretClient(AzureKeyVaultPropertySourceProperties propertySourceProperties) { AzureKeyVaultSecretProperties secretProperties = toAzureKeyVaultSecretProperties(propertySourceProperties); return buildSecretClient(secretProperties); } private AzureKeyVaultSecretProperties toAzureKeyVaultSecretProperties( AzureKeyVaultPropertySourceProperties propertySourceProperties) { AzureKeyVaultSecretProperties secretProperties = new AzureKeyVaultSecretProperties(); AzurePropertiesUtils.copyAzureCommonProperties(propertySourceProperties, secretProperties); secretProperties.setEndpoint(propertySourceProperties.getEndpoint()); secretProperties.setServiceVersion(propertySourceProperties.getServiceVersion()); return secretProperties; } /** * Build a KeyVault Secret client * @param secretProperties secret properties * @return secret client */ SecretClient buildSecretClient(AzureKeyVaultSecretProperties secretProperties) { return new SecretClientBuilderFactory(secretProperties).build().buildClient(); } AzureKeyVaultSecretProperties loadProperties(ConfigurableEnvironment environment) { Binder binder = Binder.get(environment); AzureGlobalProperties globalProperties = binder .bind(AzureGlobalProperties.PREFIX, Bindable.of(AzureGlobalProperties.class)) .orElseGet(AzureGlobalProperties::new); AzureKeyVaultSecretProperties secretProperties = binder .bind(AzureKeyVaultSecretProperties.PREFIX, Bindable.of(AzureKeyVaultSecretProperties.class)) .orElseGet(AzureKeyVaultSecretProperties::new); List<AzureKeyVaultPropertySourceProperties> list = secretProperties.getPropertySources(); for (int i = 0; i < list.size(); i++) { list.set(i, buildMergedProperties(globalProperties, list.get(i))); } for (int i = 0; i < list.size(); i++) { AzureKeyVaultPropertySourceProperties propertySourceProperties = list.get(i); if (!StringUtils.hasText(propertySourceProperties.getName())) { propertySourceProperties.setName(buildPropertySourceName(i)); } } return secretProperties; } private AzureKeyVaultPropertySourceProperties buildMergedProperties( AzureGlobalProperties globalProperties, AzureKeyVaultPropertySourceProperties propertySourceProperties) { AzureKeyVaultPropertySourceProperties mergedProperties = new AzureKeyVaultPropertySourceProperties(); AzurePropertiesUtils.mergeAzureCommonProperties(globalProperties, propertySourceProperties, mergedProperties); mergedProperties.setEnabled(propertySourceProperties.isEnabled()); mergedProperties.setName(propertySourceProperties.getName()); mergedProperties.setEndpoint(propertySourceProperties.getEndpoint()); mergedProperties.setServiceVersion(propertySourceProperties.getServiceVersion()); mergedProperties.setCaseSensitive(propertySourceProperties.isCaseSensitive()); mergedProperties.setSecretKeys(propertySourceProperties.getSecretKeys()); mergedProperties.setRefreshInterval(propertySourceProperties.getRefreshInterval()); return mergedProperties; } String buildPropertySourceName(int index) { return "azure-key-vault-secret-property-source-" + index; } private boolean isKeyVaultClientOnClasspath() { return ClassUtils.isPresent("com.azure.security.keyvault.secrets.SecretClient", KeyVaultEnvironmentPostProcessor.class.getClassLoader()); } /** * Get the order value of this object. * @return The order value. */ @Override public int getOrder() { return ORDER; } }
class KeyVaultEnvironmentPostProcessor implements EnvironmentPostProcessor, Ordered { public static final int ORDER = ConfigDataEnvironmentPostProcessor.ORDER + 1; private static final String SKIP_CONFIGURE_REASON_FORMAT = "Skip configuring Key Vault PropertySource because %s."; private final Log logger; /** * Creates a new instance of {@link KeyVaultEnvironmentPostProcessor}. * @param logger The logger used in this class. */ public KeyVaultEnvironmentPostProcessor(Log logger) { this.logger = logger; } /** * Construct a {@link KeyVaultEnvironmentPostProcessor} instance with a new {@link DeferredLog}. */ public KeyVaultEnvironmentPostProcessor() { this.logger = new DeferredLog(); } /** * Construct {@link KeyVaultPropertySource}s according to {@link AzureKeyVaultSecretProperties}, * then insert these {@link KeyVaultPropertySource}s into {@link ConfigurableEnvironment}. * * @param environment the environment. * @param application the application. */ @Override private List<KeyVaultPropertySource> buildKeyVaultPropertySourceList( List<AzureKeyVaultPropertySourceProperties> propertiesList) { List<KeyVaultPropertySource> propertySources = new ArrayList<>(); for (int i = 0; i < propertiesList.size(); i++) { AzureKeyVaultPropertySourceProperties properties = propertiesList.get(i); if (!properties.isEnabled()) { logger.debug(String.format(SKIP_CONFIGURE_REASON_FORMAT, "spring.cloud.azure.keyvault.secret.property-sources[" + i + "].enabled = false")); continue; } if (!StringUtils.hasText(properties.getEndpoint())) { logger.debug(String.format(SKIP_CONFIGURE_REASON_FORMAT, "spring.cloud.azure.keyvault.secret.property-sources[" + i + "].endpoint is empty")); continue; } propertySources.add(buildKeyVaultPropertySource(properties)); } return propertySources; } private KeyVaultPropertySource buildKeyVaultPropertySource( AzureKeyVaultPropertySourceProperties properties) { try { final KeyVaultOperation keyVaultOperation = new KeyVaultOperation( buildSecretClient(properties), properties.getRefreshInterval(), properties.getSecretKeys(), properties.isCaseSensitive()); return new KeyVaultPropertySource(properties.getName(), keyVaultOperation); } catch (final Exception exception) { throw new IllegalStateException("Failed to configure KeyVault property source", exception); } } private SecretClient buildSecretClient(AzureKeyVaultPropertySourceProperties propertySourceProperties) { AzureKeyVaultSecretProperties secretProperties = toAzureKeyVaultSecretProperties(propertySourceProperties); return buildSecretClient(secretProperties); } private AzureKeyVaultSecretProperties toAzureKeyVaultSecretProperties( AzureKeyVaultPropertySourceProperties propertySourceProperties) { AzureKeyVaultSecretProperties secretProperties = new AzureKeyVaultSecretProperties(); AzurePropertiesUtils.copyAzureCommonProperties(propertySourceProperties, secretProperties); secretProperties.setEndpoint(propertySourceProperties.getEndpoint()); secretProperties.setServiceVersion(propertySourceProperties.getServiceVersion()); return secretProperties; } /** * Build a KeyVault Secret client * @param secretProperties secret properties * @return secret client */ SecretClient buildSecretClient(AzureKeyVaultSecretProperties secretProperties) { return new SecretClientBuilderFactory(secretProperties).build().buildClient(); } AzureKeyVaultSecretProperties loadProperties(ConfigurableEnvironment environment) { Binder binder = Binder.get(environment); AzureGlobalProperties globalProperties = binder .bind(AzureGlobalProperties.PREFIX, Bindable.of(AzureGlobalProperties.class)) .orElseGet(AzureGlobalProperties::new); AzureKeyVaultSecretProperties secretProperties = binder .bind(AzureKeyVaultSecretProperties.PREFIX, Bindable.of(AzureKeyVaultSecretProperties.class)) .orElseGet(AzureKeyVaultSecretProperties::new); List<AzureKeyVaultPropertySourceProperties> list = secretProperties.getPropertySources(); for (int i = 0; i < list.size(); i++) { list.set(i, buildMergedProperties(globalProperties, list.get(i))); } for (int i = 0; i < list.size(); i++) { AzureKeyVaultPropertySourceProperties propertySourceProperties = list.get(i); if (!StringUtils.hasText(propertySourceProperties.getName())) { propertySourceProperties.setName(buildPropertySourceName(i)); } } return secretProperties; } private AzureKeyVaultPropertySourceProperties buildMergedProperties( AzureGlobalProperties globalProperties, AzureKeyVaultPropertySourceProperties propertySourceProperties) { AzureKeyVaultPropertySourceProperties mergedProperties = new AzureKeyVaultPropertySourceProperties(); AzurePropertiesUtils.mergeAzureCommonProperties(globalProperties, propertySourceProperties, mergedProperties); mergedProperties.setEnabled(propertySourceProperties.isEnabled()); mergedProperties.setName(propertySourceProperties.getName()); mergedProperties.setEndpoint(propertySourceProperties.getEndpoint()); mergedProperties.setServiceVersion(propertySourceProperties.getServiceVersion()); mergedProperties.setCaseSensitive(propertySourceProperties.isCaseSensitive()); mergedProperties.setSecretKeys(propertySourceProperties.getSecretKeys()); mergedProperties.setRefreshInterval(propertySourceProperties.getRefreshInterval()); return mergedProperties; } String buildPropertySourceName(int index) { return "azure-key-vault-secret-property-source-" + index; } private boolean isKeyVaultClientOnClasspath() { return ClassUtils.isPresent("com.azure.security.keyvault.secrets.SecretClient", KeyVaultEnvironmentPostProcessor.class.getClassLoader()); } /** * Get the order value of this object. * @return The order value. */ @Override public int getOrder() { return ORDER; } }
TODO: all unknown properties should skipChildren, either the current token is a simple value like boolean, null, number, or string and this is a no-op and progression in the next loop will skip it. Or the value is pointing to a sub-object or array which will need to perform recursive skipping. There is a big question on how this will be handled when there is additional properties on the class.
public static SampleResource fromJson(JsonReader jsonReader) { return JsonUtils.readObject(jsonReader, (reader, token) -> { String namePropertiesName = null; String registrationTtl = null; while (reader.nextToken() != JsonToken.END_OBJECT) { String fieldName = reader.getFieldName(); token = reader.nextToken(); if ("properties".equals(fieldName) && token == JsonToken.START_OBJECT) { while (reader.nextToken() != JsonToken.END_OBJECT) { fieldName = reader.getFieldName(); reader.nextToken(); if ("name".equals(fieldName)) { namePropertiesName = reader.getStringValue(); } else if ("registrationTtl".equals(fieldName)) { registrationTtl = reader.getStringValue(); } else { reader.skipChildren(); } } } else { reader.skipChildren(); } } return new SampleResource().withNamePropertiesName(namePropertiesName).withRegistrationTtl(registrationTtl); }); }
reader.skipChildren();
public static SampleResource fromJson(JsonReader jsonReader) { return JsonUtils.readObject(jsonReader, reader -> { String namePropertiesName = null; String registrationTtl = null; while (reader.nextToken() != JsonToken.END_OBJECT) { String fieldName = reader.getFieldName(); reader.nextToken(); if ("properties".equals(fieldName) && reader.currentToken() == JsonToken.START_OBJECT) { while (reader.nextToken() != JsonToken.END_OBJECT) { fieldName = reader.getFieldName(); reader.nextToken(); if ("name".equals(fieldName)) { namePropertiesName = reader.getStringValue(); } else if ("registrationTtl".equals(fieldName)) { registrationTtl = reader.getStringValue(); } else { reader.skipChildren(); } } } else { reader.skipChildren(); } } return new SampleResource().withNamePropertiesName(namePropertiesName).withRegistrationTtl(registrationTtl); }); }
class SampleResource implements JsonCapable<SampleResource> { @JsonProperty(value = "properties.name") private String namePropertiesName; @JsonProperty(value = "properties.registrationTtl") private String registrationTtl; public SampleResource withNamePropertiesName(String namePropertiesName) { this.namePropertiesName = namePropertiesName; return this; } public SampleResource withRegistrationTtl(String registrationTtl) { this.registrationTtl = registrationTtl; return this; } public String getNamePropertiesName() { return namePropertiesName; } public String getRegistrationTtl() { return registrationTtl; } @Override public JsonWriter toJson(JsonWriter jsonWriter) { jsonWriter.writeStartObject(); if (namePropertiesName == null && registrationTtl == null) { return jsonWriter.writeEndObject().flush(); } jsonWriter.writeFieldName("properties").writeStartObject(); JsonUtils.writeNonNullStringField(jsonWriter, "name", namePropertiesName); JsonUtils.writeNonNullStringField(jsonWriter, "registrationTtl", registrationTtl); return jsonWriter.writeEndObject().writeEndObject().flush(); } }
class SampleResource implements JsonCapable<SampleResource> { private String namePropertiesName; private String registrationTtl; public SampleResource withNamePropertiesName(String namePropertiesName) { this.namePropertiesName = namePropertiesName; return this; } public SampleResource withRegistrationTtl(String registrationTtl) { this.registrationTtl = registrationTtl; return this; } public String getNamePropertiesName() { return namePropertiesName; } public String getRegistrationTtl() { return registrationTtl; } @Override public JsonWriter toJson(JsonWriter jsonWriter) { jsonWriter.writeStartObject(); if (namePropertiesName == null && registrationTtl == null) { return jsonWriter.writeEndObject().flush(); } jsonWriter.writeFieldName("properties").writeStartObject(); JsonUtils.writeNonNullStringField(jsonWriter, "name", namePropertiesName); JsonUtils.writeNonNullStringField(jsonWriter, "registrationTtl", registrationTtl); return jsonWriter.writeEndObject().writeEndObject().flush(); } }
for the future: would it make sense to add convenience on DefaultJsonWriter so it can hide stream manipulations? ```java DefaultJsonWriter writer = new DefaultJsonWriter(); toJson(writer); return writer.toString(); // can throw on the wrong state ```
public String toString() { AccessibleByteArrayOutputStream outputStream = new AccessibleByteArrayOutputStream(); JsonWriter writer = DefaultJsonWriter.toStream(outputStream); toJson(writer); return outputStream.toString(StandardCharsets.UTF_8); }
toJson(writer);
public String toString() { AccessibleByteArrayOutputStream outputStream = new AccessibleByteArrayOutputStream(); JsonWriter writer = DefaultJsonWriter.fromStream(outputStream); toJson(writer); return outputStream.toString(StandardCharsets.UTF_8); }
class JsonPatchOperation implements JsonCapable<JsonPatchOperation> { private final JsonPatchOperationKind op; private final String from; private final String path; private final Option<String> value; /** * Creates a JSON Patch operation. * <p> * When {@code optionalValue} is null the value won't be included in the JSON request, use {@link Optional * to indicate a JSON null. * * @param op The kind of operation. * @param from Optional from target path. * @param path Operation target path. * @param value Optional value. */ public JsonPatchOperation(JsonPatchOperationKind op, String from, String path, Option<String> value) { this.op = op; this.from = from; this.path = path; this.value = value; } /** * Gets the operation kind. * * @return The kind of operation. */ public JsonPatchOperationKind getOp() { return op; } /** * Gets the operation from target path. * * @return The operation from target path. */ public String getFrom() { return from; } /** * Gets the operation target path. * * @return The operation target path. */ public String getPath() { return path; } /** * Gets the operation value. * <p> * If the operation doesn't take a value {@link Option * * @return The operation value. */ public Option<String> getValue() { return value; } @Override public int hashCode() { return Objects.hash(op.toString(), from, path, (value == null) ? null : value.getValue()); } @Override public boolean equals(Object obj) { if (!(obj instanceof JsonPatchOperation)) { return false; } if (this == obj) { return true; } JsonPatchOperation other = (JsonPatchOperation) obj; return Objects.equals(op, other.op) && Objects.equals(from, other.from) && Objects.equals(path, other.path) && Objects.equals(value, other.value); } @Override @Override public JsonWriter toJson(JsonWriter jsonWriter) { jsonWriter.writeStartObject().writeStringField("op", op.toString()); if (from != null) { jsonWriter.writeStringField("from", from); } jsonWriter.writeStringField("path", path); if (value.isInitialized()) { String val = value.getValue(); if (val == null) { jsonWriter.writeNullField("value"); } else { jsonWriter.writeRawField("value", val); } } return jsonWriter.writeEndObject().flush(); } /** * Creates an instance of {@link JsonPatchOperation} by reading the {@link JsonReader}. * <p> * null will be returned if the {@link JsonReader} points to {@link JsonToken * <p> * {@link IllegalStateException} will be thrown if the {@link JsonReader} doesn't point to either {@link * JsonToken * * @param jsonReader The {@link JsonReader} that will be read. * @return An instance of {@link JsonPatchOperation} if the {@link JsonReader} is pointing to {@link * JsonPatchOperation} JSON content, or null if it's pointing to {@link JsonToken * @throws IllegalStateException If the {@link JsonReader} wasn't pointing to either {@link JsonToken * {@link JsonToken */ public static JsonPatchOperation fromJson(JsonReader jsonReader) { return JsonUtils.readObject(jsonReader, (reader, token) -> { JsonPatchOperationKind op = null; String from = null; String path = null; Option<String> value = Option.uninitialized(); while (jsonReader.nextToken() != JsonToken.END_OBJECT) { String fieldName = jsonReader.getFieldName(); token = jsonReader.nextToken(); switch (fieldName) { case "op": op = JsonPatchOperationKind.fromString(jsonReader.getStringValue()); break; case "from": from = jsonReader.getStringValue(); break; case "path": path = jsonReader.getStringValue(); break; case "value": if (token == JsonToken.START_ARRAY || token == JsonToken.START_OBJECT) { value = Option.of(jsonReader.readChildren()); } else if (token == JsonToken.NULL) { value = Option.empty(); } else { value = Option.of(jsonReader.getTextValue()); } break; default: break; } } return new JsonPatchOperation(op, from, path, value); }); } }
class JsonPatchOperation implements JsonCapable<JsonPatchOperation> { private final JsonPatchOperationKind op; private final String from; private final String path; private final Option<String> value; /** * Creates a JSON Patch operation. * <p> * When {@code optionalValue} is null the value won't be included in the JSON request, use {@link Optional * to indicate a JSON null. * * @param op The kind of operation. * @param from Optional from target path. * @param path Operation target path. * @param value Optional value. */ public JsonPatchOperation(JsonPatchOperationKind op, String from, String path, Option<String> value) { this.op = op; this.from = from; this.path = path; this.value = value; } /** * Gets the operation kind. * * @return The kind of operation. */ public JsonPatchOperationKind getOp() { return op; } /** * Gets the operation from target path. * * @return The operation from target path. */ public String getFrom() { return from; } /** * Gets the operation target path. * * @return The operation target path. */ public String getPath() { return path; } /** * Gets the operation value. * <p> * If the operation doesn't take a value {@link Option * * @return The operation value. */ public Option<String> getValue() { return value; } @Override public int hashCode() { return Objects.hash(op.toString(), from, path, (value == null) ? null : value.getValue()); } @Override public boolean equals(Object obj) { if (!(obj instanceof JsonPatchOperation)) { return false; } if (this == obj) { return true; } JsonPatchOperation other = (JsonPatchOperation) obj; return Objects.equals(op, other.op) && Objects.equals(from, other.from) && Objects.equals(path, other.path) && Objects.equals(value, other.value); } @Override @Override public JsonWriter toJson(JsonWriter jsonWriter) { jsonWriter.writeStartObject().writeStringField("op", op.toString()); if (from != null) { jsonWriter.writeStringField("from", from); } jsonWriter.writeStringField("path", path); if (value.isInitialized()) { String val = value.getValue(); if (val == null) { jsonWriter.writeNullField("value"); } else { jsonWriter.writeRawField("value", val); } } return jsonWriter.writeEndObject().flush(); } /** * Creates an instance of {@link JsonPatchOperation} by reading the {@link JsonReader}. * <p> * null will be returned if the {@link JsonReader} points to {@link JsonToken * <p> * {@link IllegalStateException} will be thrown if the {@link JsonReader} doesn't point to either {@link * JsonToken * * @param jsonReader The {@link JsonReader} that will be read. * @return An instance of {@link JsonPatchOperation} if the {@link JsonReader} is pointing to {@link * JsonPatchOperation} JSON content, or null if it's pointing to {@link JsonToken * @throws IllegalStateException If the {@link JsonReader} wasn't pointing to either {@link JsonToken * {@link JsonToken */ public static JsonPatchOperation fromJson(JsonReader jsonReader) { return JsonUtils.readObject(jsonReader, reader -> { JsonPatchOperationKind op = null; String from = null; String path = null; Option<String> value = Option.uninitialized(); while (jsonReader.nextToken() != JsonToken.END_OBJECT) { String fieldName = jsonReader.getFieldName(); jsonReader.nextToken(); if ("op".equals(fieldName)) { op = JsonPatchOperationKind.fromString(jsonReader.getStringValue()); } else if ("from".equals(fieldName)) { from = jsonReader.getStringValue(); } else if ("path".equals(fieldName)) { path = jsonReader.getStringValue(); } else if ("value".equals(fieldName)) { if (reader.isStartArrayOrObject()) { value = Option.of(jsonReader.readChildren()); } else if (reader.currentToken() == JsonToken.NULL) { value = Option.empty(); } else { value = Option.of(jsonReader.getTextValue()); } } else { reader.skipChildren(); } } return new JsonPatchOperation(op, from, path, value); }); } }
is `toStream` name right? It feels `fromStream` is more intuitive
public String toString() { AccessibleByteArrayOutputStream outputStream = new AccessibleByteArrayOutputStream(); JsonWriter writer = DefaultJsonWriter.toStream(outputStream); toJson(writer); return outputStream.toString(StandardCharsets.UTF_8); }
JsonWriter writer = DefaultJsonWriter.toStream(outputStream);
public String toString() { AccessibleByteArrayOutputStream outputStream = new AccessibleByteArrayOutputStream(); JsonWriter writer = DefaultJsonWriter.fromStream(outputStream); toJson(writer); return outputStream.toString(StandardCharsets.UTF_8); }
class JsonPatchOperation implements JsonCapable<JsonPatchOperation> { private final JsonPatchOperationKind op; private final String from; private final String path; private final Option<String> value; /** * Creates a JSON Patch operation. * <p> * When {@code optionalValue} is null the value won't be included in the JSON request, use {@link Optional * to indicate a JSON null. * * @param op The kind of operation. * @param from Optional from target path. * @param path Operation target path. * @param value Optional value. */ public JsonPatchOperation(JsonPatchOperationKind op, String from, String path, Option<String> value) { this.op = op; this.from = from; this.path = path; this.value = value; } /** * Gets the operation kind. * * @return The kind of operation. */ public JsonPatchOperationKind getOp() { return op; } /** * Gets the operation from target path. * * @return The operation from target path. */ public String getFrom() { return from; } /** * Gets the operation target path. * * @return The operation target path. */ public String getPath() { return path; } /** * Gets the operation value. * <p> * If the operation doesn't take a value {@link Option * * @return The operation value. */ public Option<String> getValue() { return value; } @Override public int hashCode() { return Objects.hash(op.toString(), from, path, (value == null) ? null : value.getValue()); } @Override public boolean equals(Object obj) { if (!(obj instanceof JsonPatchOperation)) { return false; } if (this == obj) { return true; } JsonPatchOperation other = (JsonPatchOperation) obj; return Objects.equals(op, other.op) && Objects.equals(from, other.from) && Objects.equals(path, other.path) && Objects.equals(value, other.value); } @Override @Override public JsonWriter toJson(JsonWriter jsonWriter) { jsonWriter.writeStartObject().writeStringField("op", op.toString()); if (from != null) { jsonWriter.writeStringField("from", from); } jsonWriter.writeStringField("path", path); if (value.isInitialized()) { String val = value.getValue(); if (val == null) { jsonWriter.writeNullField("value"); } else { jsonWriter.writeRawField("value", val); } } return jsonWriter.writeEndObject().flush(); } /** * Creates an instance of {@link JsonPatchOperation} by reading the {@link JsonReader}. * <p> * null will be returned if the {@link JsonReader} points to {@link JsonToken * <p> * {@link IllegalStateException} will be thrown if the {@link JsonReader} doesn't point to either {@link * JsonToken * * @param jsonReader The {@link JsonReader} that will be read. * @return An instance of {@link JsonPatchOperation} if the {@link JsonReader} is pointing to {@link * JsonPatchOperation} JSON content, or null if it's pointing to {@link JsonToken * @throws IllegalStateException If the {@link JsonReader} wasn't pointing to either {@link JsonToken * {@link JsonToken */ public static JsonPatchOperation fromJson(JsonReader jsonReader) { return JsonUtils.readObject(jsonReader, (reader, token) -> { JsonPatchOperationKind op = null; String from = null; String path = null; Option<String> value = Option.uninitialized(); while (jsonReader.nextToken() != JsonToken.END_OBJECT) { String fieldName = jsonReader.getFieldName(); token = jsonReader.nextToken(); switch (fieldName) { case "op": op = JsonPatchOperationKind.fromString(jsonReader.getStringValue()); break; case "from": from = jsonReader.getStringValue(); break; case "path": path = jsonReader.getStringValue(); break; case "value": if (token == JsonToken.START_ARRAY || token == JsonToken.START_OBJECT) { value = Option.of(jsonReader.readChildren()); } else if (token == JsonToken.NULL) { value = Option.empty(); } else { value = Option.of(jsonReader.getTextValue()); } break; default: break; } } return new JsonPatchOperation(op, from, path, value); }); } }
class JsonPatchOperation implements JsonCapable<JsonPatchOperation> { private final JsonPatchOperationKind op; private final String from; private final String path; private final Option<String> value; /** * Creates a JSON Patch operation. * <p> * When {@code optionalValue} is null the value won't be included in the JSON request, use {@link Optional * to indicate a JSON null. * * @param op The kind of operation. * @param from Optional from target path. * @param path Operation target path. * @param value Optional value. */ public JsonPatchOperation(JsonPatchOperationKind op, String from, String path, Option<String> value) { this.op = op; this.from = from; this.path = path; this.value = value; } /** * Gets the operation kind. * * @return The kind of operation. */ public JsonPatchOperationKind getOp() { return op; } /** * Gets the operation from target path. * * @return The operation from target path. */ public String getFrom() { return from; } /** * Gets the operation target path. * * @return The operation target path. */ public String getPath() { return path; } /** * Gets the operation value. * <p> * If the operation doesn't take a value {@link Option * * @return The operation value. */ public Option<String> getValue() { return value; } @Override public int hashCode() { return Objects.hash(op.toString(), from, path, (value == null) ? null : value.getValue()); } @Override public boolean equals(Object obj) { if (!(obj instanceof JsonPatchOperation)) { return false; } if (this == obj) { return true; } JsonPatchOperation other = (JsonPatchOperation) obj; return Objects.equals(op, other.op) && Objects.equals(from, other.from) && Objects.equals(path, other.path) && Objects.equals(value, other.value); } @Override @Override public JsonWriter toJson(JsonWriter jsonWriter) { jsonWriter.writeStartObject().writeStringField("op", op.toString()); if (from != null) { jsonWriter.writeStringField("from", from); } jsonWriter.writeStringField("path", path); if (value.isInitialized()) { String val = value.getValue(); if (val == null) { jsonWriter.writeNullField("value"); } else { jsonWriter.writeRawField("value", val); } } return jsonWriter.writeEndObject().flush(); } /** * Creates an instance of {@link JsonPatchOperation} by reading the {@link JsonReader}. * <p> * null will be returned if the {@link JsonReader} points to {@link JsonToken * <p> * {@link IllegalStateException} will be thrown if the {@link JsonReader} doesn't point to either {@link * JsonToken * * @param jsonReader The {@link JsonReader} that will be read. * @return An instance of {@link JsonPatchOperation} if the {@link JsonReader} is pointing to {@link * JsonPatchOperation} JSON content, or null if it's pointing to {@link JsonToken * @throws IllegalStateException If the {@link JsonReader} wasn't pointing to either {@link JsonToken * {@link JsonToken */ public static JsonPatchOperation fromJson(JsonReader jsonReader) { return JsonUtils.readObject(jsonReader, reader -> { JsonPatchOperationKind op = null; String from = null; String path = null; Option<String> value = Option.uninitialized(); while (jsonReader.nextToken() != JsonToken.END_OBJECT) { String fieldName = jsonReader.getFieldName(); jsonReader.nextToken(); if ("op".equals(fieldName)) { op = JsonPatchOperationKind.fromString(jsonReader.getStringValue()); } else if ("from".equals(fieldName)) { from = jsonReader.getStringValue(); } else if ("path".equals(fieldName)) { path = jsonReader.getStringValue(); } else if ("value".equals(fieldName)) { if (reader.isStartArrayOrObject()) { value = Option.of(jsonReader.readChildren()); } else if (reader.currentToken() == JsonToken.NULL) { value = Option.empty(); } else { value = Option.of(jsonReader.getTextValue()); } } else { reader.skipChildren(); } } return new JsonPatchOperation(op, from, path, value); }); } }
Why do we have explicit charset specified here?
public String toString() { AccessibleByteArrayOutputStream outputStream = new AccessibleByteArrayOutputStream(); JsonWriter writer = DefaultJsonWriter.toStream(outputStream); toJson(writer); return outputStream.toString(StandardCharsets.UTF_8); }
return outputStream.toString(StandardCharsets.UTF_8);
public String toString() { AccessibleByteArrayOutputStream outputStream = new AccessibleByteArrayOutputStream(); JsonWriter writer = DefaultJsonWriter.fromStream(outputStream); toJson(writer); return outputStream.toString(StandardCharsets.UTF_8); }
class JsonPatchOperation implements JsonCapable<JsonPatchOperation> { private final JsonPatchOperationKind op; private final String from; private final String path; private final Option<String> value; /** * Creates a JSON Patch operation. * <p> * When {@code optionalValue} is null the value won't be included in the JSON request, use {@link Optional * to indicate a JSON null. * * @param op The kind of operation. * @param from Optional from target path. * @param path Operation target path. * @param value Optional value. */ public JsonPatchOperation(JsonPatchOperationKind op, String from, String path, Option<String> value) { this.op = op; this.from = from; this.path = path; this.value = value; } /** * Gets the operation kind. * * @return The kind of operation. */ public JsonPatchOperationKind getOp() { return op; } /** * Gets the operation from target path. * * @return The operation from target path. */ public String getFrom() { return from; } /** * Gets the operation target path. * * @return The operation target path. */ public String getPath() { return path; } /** * Gets the operation value. * <p> * If the operation doesn't take a value {@link Option * * @return The operation value. */ public Option<String> getValue() { return value; } @Override public int hashCode() { return Objects.hash(op.toString(), from, path, (value == null) ? null : value.getValue()); } @Override public boolean equals(Object obj) { if (!(obj instanceof JsonPatchOperation)) { return false; } if (this == obj) { return true; } JsonPatchOperation other = (JsonPatchOperation) obj; return Objects.equals(op, other.op) && Objects.equals(from, other.from) && Objects.equals(path, other.path) && Objects.equals(value, other.value); } @Override @Override public JsonWriter toJson(JsonWriter jsonWriter) { jsonWriter.writeStartObject().writeStringField("op", op.toString()); if (from != null) { jsonWriter.writeStringField("from", from); } jsonWriter.writeStringField("path", path); if (value.isInitialized()) { String val = value.getValue(); if (val == null) { jsonWriter.writeNullField("value"); } else { jsonWriter.writeRawField("value", val); } } return jsonWriter.writeEndObject().flush(); } /** * Creates an instance of {@link JsonPatchOperation} by reading the {@link JsonReader}. * <p> * null will be returned if the {@link JsonReader} points to {@link JsonToken * <p> * {@link IllegalStateException} will be thrown if the {@link JsonReader} doesn't point to either {@link * JsonToken * * @param jsonReader The {@link JsonReader} that will be read. * @return An instance of {@link JsonPatchOperation} if the {@link JsonReader} is pointing to {@link * JsonPatchOperation} JSON content, or null if it's pointing to {@link JsonToken * @throws IllegalStateException If the {@link JsonReader} wasn't pointing to either {@link JsonToken * {@link JsonToken */ public static JsonPatchOperation fromJson(JsonReader jsonReader) { return JsonUtils.readObject(jsonReader, (reader, token) -> { JsonPatchOperationKind op = null; String from = null; String path = null; Option<String> value = Option.uninitialized(); while (jsonReader.nextToken() != JsonToken.END_OBJECT) { String fieldName = jsonReader.getFieldName(); token = jsonReader.nextToken(); switch (fieldName) { case "op": op = JsonPatchOperationKind.fromString(jsonReader.getStringValue()); break; case "from": from = jsonReader.getStringValue(); break; case "path": path = jsonReader.getStringValue(); break; case "value": if (token == JsonToken.START_ARRAY || token == JsonToken.START_OBJECT) { value = Option.of(jsonReader.readChildren()); } else if (token == JsonToken.NULL) { value = Option.empty(); } else { value = Option.of(jsonReader.getTextValue()); } break; default: break; } } return new JsonPatchOperation(op, from, path, value); }); } }
class JsonPatchOperation implements JsonCapable<JsonPatchOperation> { private final JsonPatchOperationKind op; private final String from; private final String path; private final Option<String> value; /** * Creates a JSON Patch operation. * <p> * When {@code optionalValue} is null the value won't be included in the JSON request, use {@link Optional * to indicate a JSON null. * * @param op The kind of operation. * @param from Optional from target path. * @param path Operation target path. * @param value Optional value. */ public JsonPatchOperation(JsonPatchOperationKind op, String from, String path, Option<String> value) { this.op = op; this.from = from; this.path = path; this.value = value; } /** * Gets the operation kind. * * @return The kind of operation. */ public JsonPatchOperationKind getOp() { return op; } /** * Gets the operation from target path. * * @return The operation from target path. */ public String getFrom() { return from; } /** * Gets the operation target path. * * @return The operation target path. */ public String getPath() { return path; } /** * Gets the operation value. * <p> * If the operation doesn't take a value {@link Option * * @return The operation value. */ public Option<String> getValue() { return value; } @Override public int hashCode() { return Objects.hash(op.toString(), from, path, (value == null) ? null : value.getValue()); } @Override public boolean equals(Object obj) { if (!(obj instanceof JsonPatchOperation)) { return false; } if (this == obj) { return true; } JsonPatchOperation other = (JsonPatchOperation) obj; return Objects.equals(op, other.op) && Objects.equals(from, other.from) && Objects.equals(path, other.path) && Objects.equals(value, other.value); } @Override @Override public JsonWriter toJson(JsonWriter jsonWriter) { jsonWriter.writeStartObject().writeStringField("op", op.toString()); if (from != null) { jsonWriter.writeStringField("from", from); } jsonWriter.writeStringField("path", path); if (value.isInitialized()) { String val = value.getValue(); if (val == null) { jsonWriter.writeNullField("value"); } else { jsonWriter.writeRawField("value", val); } } return jsonWriter.writeEndObject().flush(); } /** * Creates an instance of {@link JsonPatchOperation} by reading the {@link JsonReader}. * <p> * null will be returned if the {@link JsonReader} points to {@link JsonToken * <p> * {@link IllegalStateException} will be thrown if the {@link JsonReader} doesn't point to either {@link * JsonToken * * @param jsonReader The {@link JsonReader} that will be read. * @return An instance of {@link JsonPatchOperation} if the {@link JsonReader} is pointing to {@link * JsonPatchOperation} JSON content, or null if it's pointing to {@link JsonToken * @throws IllegalStateException If the {@link JsonReader} wasn't pointing to either {@link JsonToken * {@link JsonToken */ public static JsonPatchOperation fromJson(JsonReader jsonReader) { return JsonUtils.readObject(jsonReader, reader -> { JsonPatchOperationKind op = null; String from = null; String path = null; Option<String> value = Option.uninitialized(); while (jsonReader.nextToken() != JsonToken.END_OBJECT) { String fieldName = jsonReader.getFieldName(); jsonReader.nextToken(); if ("op".equals(fieldName)) { op = JsonPatchOperationKind.fromString(jsonReader.getStringValue()); } else if ("from".equals(fieldName)) { from = jsonReader.getStringValue(); } else if ("path".equals(fieldName)) { path = jsonReader.getStringValue(); } else if ("value".equals(fieldName)) { if (reader.isStartArrayOrObject()) { value = Option.of(jsonReader.readChildren()); } else if (reader.currentToken() == JsonToken.NULL) { value = Option.empty(); } else { value = Option.of(jsonReader.getTextValue()); } } else { reader.skipChildren(); } } return new JsonPatchOperation(op, from, path, value); }); } }
> && token == JsonToken.START_OBJECT what if it's not the case? It's the same concern as before on common place for validation + failure + logging and finding a pattern that would help models be consistent.
public static FlattenDangling fromJson(JsonReader jsonReader) { return JsonUtils.readObject(jsonReader, (reader, token) -> { String flattenedProperty = null; while (reader.nextToken() != JsonToken.END_OBJECT) { String fieldName = reader.getFieldName(); token = reader.nextToken(); if ("a".equals(fieldName) && token == JsonToken.START_OBJECT) { while (reader.nextToken() != JsonToken.END_OBJECT) { fieldName = reader.getStringValue(); token = reader.nextToken(); if ("flattened".equals(fieldName) && token == JsonToken.START_OBJECT) { while (reader.nextToken() != JsonToken.END_OBJECT) { fieldName = reader.getStringValue(); reader.nextToken(); if ("property".equals(fieldName)) { flattenedProperty = reader.getStringValue(); } } } } } } return new FlattenDangling().setFlattenedProperty(flattenedProperty); }); }
if ("a".equals(fieldName) && token == JsonToken.START_OBJECT) {
public static FlattenDangling fromJson(JsonReader jsonReader) { return JsonUtils.readObject(jsonReader, reader -> { FlattenDangling dangling = new FlattenDangling(); JsonUtils.readFields(reader, fieldName -> { if ("a".equals(fieldName) && reader.currentToken() == JsonToken.START_OBJECT) { JsonUtils.readFields(reader, fieldName2 -> { if ("flattened".equals(fieldName2) && reader.currentToken() == JsonToken.START_OBJECT) { JsonUtils.readFields(reader, fieldName3 -> { if ("property".equals(fieldName3)) { dangling.setFlattenedProperty(reader.getStringValue()); } }); } }); } }); return dangling; }); }
class FlattenDangling implements JsonCapable<FlattenDangling> { @JsonProperty("a.flattened.property") @JsonFlatten private String flattenedProperty; public String getFlattenedProperty() { return flattenedProperty; } public FlattenDangling setFlattenedProperty(String flattenedProperty) { this.flattenedProperty = flattenedProperty; return this; } @Override public JsonWriter toJson(JsonWriter jsonWriter) { jsonWriter.writeStartObject(); if (flattenedProperty != null) { jsonWriter.writeFieldName("a") .writeStartObject() .writeFieldName("flattened") .writeStartObject() .writeStringField("property", flattenedProperty) .writeEndObject() .writeEndObject(); } return jsonWriter.writeEndObject().flush(); } }
class FlattenDangling implements JsonCapable<FlattenDangling> { private String flattenedProperty; public String getFlattenedProperty() { return flattenedProperty; } public FlattenDangling setFlattenedProperty(String flattenedProperty) { this.flattenedProperty = flattenedProperty; return this; } @Override public JsonWriter toJson(JsonWriter jsonWriter) { jsonWriter.writeStartObject(); if (flattenedProperty != null) { jsonWriter.writeFieldName("a") .writeStartObject() .writeFieldName("flattened") .writeStartObject() .writeStringField("property", flattenedProperty) .writeEndObject() .writeEndObject(); } return jsonWriter.writeEndObject().flush(); } }
this is the most difficult model I have ever seen :laughing:
public JsonWriter toJson(JsonWriter jsonWriter) { return null; }
return null;
public JsonWriter toJson(JsonWriter jsonWriter) { return toJsonInternal(jsonWriter, "foo"); }
class Foo implements JsonCapable<Foo> { @JsonProperty(value = "properties.bar") private String bar; @JsonProperty(value = "properties.props.baz") private List<String> baz; @JsonProperty(value = "properties.props.q.qux") private Map<String, String> qux; @JsonProperty(value = "properties.more\\.props") private String moreProps; @JsonProperty(value = "props.empty") private Integer empty; @JsonProperty(value = "") private Map<String, Object> additionalProperties; public String bar() { return bar; } public void bar(String bar) { this.bar = bar; } public List<String> baz() { return baz; } public void baz(List<String> baz) { this.baz = baz; } public Map<String, String> qux() { return qux; } public void qux(Map<String, String> qux) { this.qux = qux; } public String moreProps() { return moreProps; } public void moreProps(String moreProps) { this.moreProps = moreProps; } public Integer empty() { return empty; } public void empty(Integer empty) { this.empty = empty; } public Map<String, Object> additionalProperties() { return additionalProperties; } public void additionalProperties(Map<String, Object> additionalProperties) { this.additionalProperties = additionalProperties; } @Override }
class Foo implements JsonCapable<Foo> { private String bar; private List<String> baz; private Map<String, String> qux; private String moreProps; private Integer empty; private Map<String, Object> additionalProperties; public String bar() { return bar; } public void bar(String bar) { this.bar = bar; } public List<String> baz() { return baz; } public void baz(List<String> baz) { this.baz = baz; } public Map<String, String> qux() { return qux; } public void qux(Map<String, String> qux) { this.qux = qux; } public String moreProps() { return moreProps; } public void moreProps(String moreProps) { this.moreProps = moreProps; } public Integer empty() { return empty; } public void empty(Integer empty) { this.empty = empty; } public Map<String, Object> additionalProperties() { return additionalProperties; } public void additionalProperties(Map<String, Object> additionalProperties) { this.additionalProperties = additionalProperties; } @Override JsonWriter toJsonInternal(JsonWriter jsonWriter, String type) { jsonWriter.writeStartObject() .writeStringField("$type", type); if (bar != null || baz != null || qux != null || moreProps != null) { jsonWriter.writeFieldName("properties") .writeStartObject(); JsonUtils.writeNonNullStringField(jsonWriter, "bar", bar); if (baz != null || qux != null) { jsonWriter.writeFieldName("props") .writeStartObject(); if (baz != null) { JsonUtils.writeArray(jsonWriter, "baz", baz, JsonWriter::writeString); } if (qux != null) { jsonWriter.writeFieldName("q") .writeStartObject() .writeFieldName("qux") .writeStartObject(); qux.forEach(jsonWriter::writeStringField); jsonWriter.writeEndObject() .writeEndObject(); } jsonWriter.writeEndObject(); } JsonUtils.writeNonNullStringField(jsonWriter, "more.props", moreProps); jsonWriter.writeEndObject(); } if (empty != null) { jsonWriter.writeFieldName("props") .writeStartObject() .writeIntField("empty", empty) .writeEndObject(); } if (additionalProperties != null) { additionalProperties.forEach((key, value) -> JsonUtils.writeUntypedField(jsonWriter.writeFieldName(key), value)); } return jsonWriter.writeEndObject().flush(); } public static Foo fromJson(JsonReader jsonReader) { return fromJsonInternal(jsonReader, null); } static Foo fromJsonInternal(JsonReader jsonReader, String expectedType) { return JsonUtils.readObject(jsonReader, reader -> { String type = null; String bar = null; List<String> baz = null; Map<String, String> qux = null; String moreProps = null; Integer empty = null; Map<String, Object> additionalProperties = null; while (reader.nextToken() != JsonToken.END_OBJECT) { String fieldName = reader.getFieldName(); reader.nextToken(); if ("$type".equals(fieldName)) { type = reader.getStringValue(); } else if ("properties".equals(fieldName) && reader.currentToken() == JsonToken.START_OBJECT) { while (reader.nextToken() != JsonToken.END_OBJECT) { fieldName = reader.getFieldName(); reader.nextToken(); if ("bar".equals(fieldName)) { bar = reader.getStringValue(); } else if ("more.props".equals(fieldName)) { moreProps = reader.getStringValue(); } else if ("props".equals(fieldName) && reader.currentToken() == JsonToken.START_OBJECT) { while (reader.nextToken() != JsonToken.END_OBJECT) { fieldName = reader.getFieldName(); reader.nextToken(); if ("baz".equals(fieldName)) { baz = JsonUtils.readArray(reader, r -> JsonUtils.getNullableProperty(r, JsonReader::getStringValue)); } else if ("q".equals(fieldName)) { while (reader.nextToken() != JsonToken.END_OBJECT) { fieldName = reader.getFieldName(); reader.nextToken(); if ("qux".equals(fieldName) && reader.currentToken() == JsonToken.START_OBJECT) { if (qux == null) { qux = new LinkedHashMap<>(); } while (reader.nextToken() != JsonToken.END_OBJECT) { fieldName = reader.getFieldName(); reader.nextToken(); qux.put(fieldName, JsonUtils.getNullableProperty(reader, JsonReader::getStringValue)); } } else { reader.skipChildren(); } } } else { reader.skipChildren(); } } } else { reader.skipChildren(); } } } else if ("props".equals(fieldName) && reader.currentToken() == JsonToken.START_OBJECT) { while (reader.nextToken() != JsonToken.END_OBJECT) { fieldName = reader.getFieldName(); reader.nextToken(); if ("empty".equals(fieldName)) { empty = reader.currentToken() == JsonToken.NULL ? null : reader.getIntValue(); } else { reader.skipChildren(); } } } else { if (additionalProperties == null) { additionalProperties = new LinkedHashMap<>(); } additionalProperties.put(fieldName, JsonUtils.readUntypedField(reader)); } } if (expectedType != null && type != null && !Objects.equals(expectedType, type)) { throw new IllegalStateException("Discriminator field '$type' didn't match expected value: " + "'" + expectedType + "'. It was: '" + type + "'."); } if ((expectedType == null && type == null) || "foo".equals(type)) { Foo foo = new Foo(); foo.bar(bar); foo.baz(baz); foo.qux(qux); foo.moreProps(moreProps); foo.empty(empty); foo.additionalProperties(additionalProperties); return foo; } else if ("foochild".equals(expectedType) || "foochild".equals(type)) { FooChild fooChild = new FooChild(); fooChild.bar(bar); fooChild.baz(baz); fooChild.qux(qux); fooChild.moreProps(moreProps); fooChild.empty(empty); fooChild.additionalProperties(additionalProperties); return fooChild; } else { throw new IllegalStateException("Invalid discriminator value '" + reader.getStringValue() + "', expected: 'foo' or 'foochild'."); } }); } }
future concern: we probably want to limit the recursion depth here and in other helpers
public static Object readUntypedField(JsonReader jsonReader) { JsonToken token = jsonReader.currentToken(); if (token == JsonToken.END_ARRAY || token == JsonToken.END_OBJECT || token == JsonToken.FIELD_NAME) { throw new IllegalStateException("Unexpected token to begin an untyped field: " + token); } if (token == JsonToken.NULL) { return null; } else if (token == JsonToken.BOOLEAN) { return jsonReader.getBooleanValue(); } else if (token == JsonToken.NUMBER) { String numberText = jsonReader.getTextValue(); if (numberText.contains(".")) { return Double.parseDouble(numberText); } else { return Long.parseLong(numberText); } } else if (token == JsonToken.STRING) { return jsonReader.getStringValue(); } else if (token == JsonToken.START_ARRAY) { List<Object> array = new ArrayList<>(); while (jsonReader.nextToken() != JsonToken.END_ARRAY) { array.add(readUntypedField(jsonReader)); } return array; } else if (token == JsonToken.START_OBJECT) { Map<String, Object> object = new LinkedHashMap<>(); while (jsonReader.nextToken() != JsonToken.END_OBJECT) { String fieldName = jsonReader.getFieldName(); jsonReader.nextToken(); Object value = readUntypedField(jsonReader); object.put(fieldName, value); } return object; } throw new IllegalStateException("Unknown token type while reading an untyped field: " + token); }
Object value = readUntypedField(jsonReader);
public static Object readUntypedField(JsonReader jsonReader) { return readUntypedField(jsonReader, 0); }
class JsonUtils { /** * Serializes an array. * <p> * Handles three scenarios for the array: * * <ul> * <li>null {@code array} writes JSON null</li> * <li>empty {@code array} writes {@code []}</li> * <li>non-empty {@code array} writes a populated JSON array</li> * </ul> * * @param jsonWriter {@link JsonWriter} where JSON will be written. * @param fieldName Field name for the array. * @param array The array. * @param elementWriterFunc Function that writes the array element. * @param <T> Type of array element. * @return The updated {@link JsonWriter} object. */ public static <T> JsonWriter writeArray(JsonWriter jsonWriter, String fieldName, T[] array, BiConsumer<JsonWriter, T> elementWriterFunc) { jsonWriter.writeFieldName(fieldName); if (array == null) { return jsonWriter.writeNull().flush(); } jsonWriter.writeStartArray(); for (T element : array) { elementWriterFunc.accept(jsonWriter, element); } return jsonWriter.writeEndArray().flush(); } /** * Serializes an array. * <p> * Handles three scenarios for the array: * * <ul> * <li>null {@code array} writes JSON null</li> * <li>empty {@code array} writes {@code []}</li> * <li>non-empty {@code array} writes a populated JSON array</li> * </ul> * * @param jsonWriter {@link JsonWriter} where JSON will be written. * @param fieldName Field name for the array. * @param array The array. * @param elementWriterFunc Function that writes the array element. * @param <T> Type of array element. * @return The updated {@link JsonWriter} object. */ public static <T> JsonWriter writeArray(JsonWriter jsonWriter, String fieldName, Iterable<T> array, BiConsumer<JsonWriter, T> elementWriterFunc) { jsonWriter.writeFieldName(fieldName); if (array == null) { return jsonWriter.writeNull().flush(); } jsonWriter.writeStartArray(); for (T element : array) { elementWriterFunc.accept(jsonWriter, element); } return jsonWriter.writeEndArray().flush(); } /** * Handles basic logic for deserializing an object before passing it into the deserialization function. * <p> * This will initialize the {@link JsonReader} for object reading and then check if the current token is * {@link JsonToken * an {@link IllegalStateException}. The {@link JsonToken} passed into the {@code deserializationFunc} will be * {@link JsonToken * <p> * Use {@link * * @param jsonReader The {@link JsonReader} being read. * @param deserializationFunc The function that handles deserialization logic, passing the reader and current * token. * @param <T> The type of object that is being deserialized. * @return The deserialized object, or null if the {@link JsonToken * @throws IllegalStateException If the initial token for reading isn't {@link JsonToken */ public static <T> T readObject(JsonReader jsonReader, BiFunction<JsonReader, JsonToken, T> deserializationFunc) { JsonToken token = jsonReader.currentToken(); if (token == null) { token = jsonReader.nextToken(); } if (token == JsonToken.NULL) { return null; } else if (token != JsonToken.START_OBJECT) { throw new IllegalStateException("Unexpected token to begin deserialization: " + token); } return deserializationFunc.apply(jsonReader, token); } /** * Handles basic logic for deserializing an array before passing it into the deserialization function. * <p> * This will initialize the {@link JsonReader} for array reading and then check if the current token is * {@link JsonToken * {@link IllegalStateException}. * <p> * Use {@link * * @param jsonReader The {@link JsonReader} being read. * @param deserializationFunc The function that handles deserialization logic. * @param <T> The type of array element that is being deserialized. * @return The deserialized array, or null if the {@link JsonToken * @throws IllegalStateException If the initial token for reading isn't {@link JsonToken */ public static <T> List<T> readArray(JsonReader jsonReader, BiFunction<JsonReader, JsonToken, T> deserializationFunc) { JsonToken token = jsonReader.currentToken(); if (token == null) { token = jsonReader.nextToken(); } if (token == JsonToken.NULL) { return null; } else if (token != JsonToken.START_ARRAY) { throw new IllegalStateException("Unexpected token to begin deserialization: " + token); } List<T> array = new ArrayList<>(); while ((token = jsonReader.nextToken()) != JsonToken.END_ARRAY) { array.add(deserializationFunc.apply(jsonReader, token)); } return array; } /** * Writes the JSON string field if, and only if, {@code value} isn't null. * * @param writer The {@link JsonWriter} being written. * @param fieldName The field name. * @param value The value. * @return The updated {@link JsonWriter} if {@code value} wasn't null, otherwise the {@link JsonWriter} with no * modifications. */ public static JsonWriter writeNonNullStringField(JsonWriter writer, String fieldName, String value) { return (value == null) ? writer : writer.writeStringField(fieldName, value); } /** * Writes the JSON int field if, and only if, {@code value} isn't null. * * @param writer The {@link JsonWriter} being written. * @param fieldName The field name. * @param value The value. * @return The updated {@link JsonWriter} if {@code value} wasn't null, otherwise the {@link JsonWriter} with no * modifications. */ public static JsonWriter writeNonNullIntegerField(JsonWriter writer, String fieldName, Integer value) { return (value == null) ? writer : writer.writeIntField(fieldName, value); } /** * Reads the {@link JsonReader} as an untyped object. * <p> * The returned object is one of the following: * * <ul> * <li></li> * <li></li> * <li></li> * <li></li> * <li></li> * <li></li> * </ul> * * If the {@link JsonReader * {@link JsonToken * with the ending of an array or object or with the name of a field. * * @param jsonReader The {@link JsonReader} that will be read into an untyped object. * @return The untyped object based on the description. * @throws IllegalStateException If the {@link JsonReader * {@link JsonToken */ /** * Writes the {@code value} as an untyped field to the {@link JsonWriter}. * * @param jsonWriter The {@link JsonWriter} that will be written. * @param value The value to write. * @return The updated {@code jsonWriter} with the {@code value} written to it. */ public static JsonWriter writeUntypedField(JsonWriter jsonWriter, Object value) { if (value == null) { return jsonWriter.writeNull().flush(); } else if (value instanceof Short) { return jsonWriter.writeInt((short) value).flush(); } else if (value instanceof Integer) { return jsonWriter.writeInt((int) value).flush(); } else if (value instanceof Long) { return jsonWriter.writeLong((long) value).flush(); } else if (value instanceof Float) { return jsonWriter.writeFloat((float) value).flush(); } else if (value instanceof Double) { return jsonWriter.writeDouble((double) value).flush(); } else if (value instanceof Boolean) { return jsonWriter.writeBoolean((boolean) value).flush(); } else if (value instanceof byte[]) { return jsonWriter.writeBinary((byte[]) value).flush(); } else if (value instanceof CharSequence) { return jsonWriter.writeString(String.valueOf(value)).flush(); } else if (value instanceof JsonCapable<?>) { return ((JsonCapable<?>) value).toJson(jsonWriter).flush(); } else if (value.getClass() == Object.class) { return jsonWriter.writeStartObject().writeEndObject().flush(); } else { return jsonWriter.writeString(String.valueOf(value)).flush(); } } /** * Gets the nullable JSON property as null if the {@link JsonReader JsonReader's} {@link JsonReader * is {@link JsonToken * * @param jsonReader The {@link JsonReader} being read. * @param nonNullGetter The non-null getter. * @param <T> The type of the property. * @return Either null if the current token is {@link JsonToken * {@code nonNullGetter}. */ public static <T> T getNullableProperty(JsonReader jsonReader, Function<JsonReader, T> nonNullGetter) { return jsonReader.currentToken() == JsonToken.NULL ? null : nonNullGetter.apply(jsonReader); } private JsonUtils() { } }
class JsonUtils { /** * Serializes an array. * <p> * Handles three scenarios for the array: * * <ul> * <li>null {@code array} writes JSON null</li> * <li>empty {@code array} writes {@code []}</li> * <li>non-empty {@code array} writes a populated JSON array</li> * </ul> * * @param jsonWriter {@link JsonWriter} where JSON will be written. * @param fieldName Field name for the array. * @param array The array. * @param elementWriterFunc Function that writes the array element. * @param <T> Type of array element. * @return The updated {@link JsonWriter} object. */ public static <T> JsonWriter writeArray(JsonWriter jsonWriter, String fieldName, T[] array, BiConsumer<JsonWriter, T> elementWriterFunc) { jsonWriter.writeFieldName(fieldName); if (array == null) { return jsonWriter.writeNull().flush(); } jsonWriter.writeStartArray(); for (T element : array) { elementWriterFunc.accept(jsonWriter, element); } return jsonWriter.writeEndArray().flush(); } /** * Serializes an array. * <p> * Handles three scenarios for the array: * * <ul> * <li>null {@code array} writes JSON null</li> * <li>empty {@code array} writes {@code []}</li> * <li>non-empty {@code array} writes a populated JSON array</li> * </ul> * * @param jsonWriter {@link JsonWriter} where JSON will be written. * @param fieldName Field name for the array. * @param array The array. * @param elementWriterFunc Function that writes the array element. * @param <T> Type of array element. * @return The updated {@link JsonWriter} object. */ public static <T> JsonWriter writeArray(JsonWriter jsonWriter, String fieldName, Iterable<T> array, BiConsumer<JsonWriter, T> elementWriterFunc) { jsonWriter.writeFieldName(fieldName); if (array == null) { return jsonWriter.writeNull().flush(); } jsonWriter.writeStartArray(); for (T element : array) { elementWriterFunc.accept(jsonWriter, element); } return jsonWriter.writeEndArray().flush(); } /** * Handles basic logic for deserializing an object before passing it into the deserialization function. * <p> * This will initialize the {@link JsonReader} for object reading and then check if the current token is * {@link JsonToken * an {@link IllegalStateException}. The {@link JsonToken} passed into the {@code deserializationFunc} will be * {@link JsonToken * <p> * Use {@link * * @param jsonReader The {@link JsonReader} being read. * @param deserializationFunc The function that handles deserialization logic, passing the reader and current * token. * @param <T> The type of object that is being deserialized. * @return The deserialized object, or null if the {@link JsonToken * @throws IllegalStateException If the initial token for reading isn't {@link JsonToken */ public static <T> T readObject(JsonReader jsonReader, Function<JsonReader, T> deserializationFunc) { if (jsonReader.currentToken() == null) { jsonReader.nextToken(); } if (jsonReader.currentToken() == JsonToken.NULL) { return null; } else if (jsonReader.currentToken() != JsonToken.START_OBJECT) { throw new IllegalStateException("Unexpected token to begin deserialization: " + jsonReader.currentToken()); } return deserializationFunc.apply(jsonReader); } /** * Handles basic logic for deserializing an array before passing it into the deserialization function. * <p> * This will initialize the {@link JsonReader} for array reading and then check if the current token is * {@link JsonToken * {@link IllegalStateException}. * <p> * Use {@link * * @param jsonReader The {@link JsonReader} being read. * @param deserializationFunc The function that handles deserialization logic. * @param <T> The type of array element that is being deserialized. * @return The deserialized array, or null if the {@link JsonToken * @throws IllegalStateException If the initial token for reading isn't {@link JsonToken */ public static <T> List<T> readArray(JsonReader jsonReader, Function<JsonReader, T> deserializationFunc) { if (jsonReader.currentToken() == null) { jsonReader.nextToken(); } if (jsonReader.currentToken() == JsonToken.NULL) { return null; } else if (jsonReader.currentToken() != JsonToken.START_ARRAY) { throw new IllegalStateException("Unexpected token to begin deserialization: " + jsonReader.currentToken()); } List<T> array = new ArrayList<>(); while (jsonReader.nextToken() != JsonToken.END_ARRAY) { array.add(deserializationFunc.apply(jsonReader)); } return array; } /** * Writes the JSON string field if, and only if, {@code value} isn't null. * * @param writer The {@link JsonWriter} being written. * @param fieldName The field name. * @param value The value. * @return The updated {@link JsonWriter} if {@code value} wasn't null, otherwise the {@link JsonWriter} with no * modifications. */ public static JsonWriter writeNonNullStringField(JsonWriter writer, String fieldName, String value) { return (value == null) ? writer : writer.writeStringField(fieldName, value); } /** * Writes the JSON int field if, and only if, {@code value} isn't null. * * @param writer The {@link JsonWriter} being written. * @param fieldName The field name. * @param value The value. * @return The updated {@link JsonWriter} if {@code value} wasn't null, otherwise the {@link JsonWriter} with no * modifications. */ public static JsonWriter writeNonNullIntegerField(JsonWriter writer, String fieldName, Integer value) { return (value == null) ? writer : writer.writeIntField(fieldName, value); } /** * Reads the {@link JsonReader} as an untyped object. * <p> * The returned object is one of the following: * * <ul> * <li></li> * <li></li> * <li></li> * <li></li> * <li></li> * <li></li> * </ul> * * If the {@link JsonReader * {@link JsonToken * with the ending of an array or object or with the name of a field. * * @param jsonReader The {@link JsonReader} that will be read into an untyped object. * @return The untyped object based on the description. * @throws IllegalStateException If the {@link JsonReader * {@link JsonToken */ private static Object readUntypedField(JsonReader jsonReader, int depth) { if (depth >= 1000) { throw new IllegalStateException("Untyped object exceeded allowed object nested depth of 1000."); } JsonToken token = jsonReader.currentToken(); if (token == JsonToken.END_ARRAY || token == JsonToken.END_OBJECT || token == JsonToken.FIELD_NAME) { throw new IllegalStateException("Unexpected token to begin an untyped field: " + token); } if (token == JsonToken.NULL) { return null; } else if (token == JsonToken.BOOLEAN) { return jsonReader.getBooleanValue(); } else if (token == JsonToken.NUMBER) { String numberText = jsonReader.getTextValue(); if (numberText.contains(".")) { return Double.parseDouble(numberText); } else { return Long.parseLong(numberText); } } else if (token == JsonToken.STRING) { return jsonReader.getStringValue(); } else if (token == JsonToken.START_ARRAY) { List<Object> array = new ArrayList<>(); while (jsonReader.nextToken() != JsonToken.END_ARRAY) { array.add(readUntypedField(jsonReader, depth + 1)); } return array; } else if (token == JsonToken.START_OBJECT) { Map<String, Object> object = new LinkedHashMap<>(); while (jsonReader.nextToken() != JsonToken.END_OBJECT) { String fieldName = jsonReader.getFieldName(); jsonReader.nextToken(); Object value = readUntypedField(jsonReader, depth + 1); object.put(fieldName, value); } return object; } throw new IllegalStateException("Unknown token type while reading an untyped field: " + token); } /** * Writes the {@code value} as an untyped field to the {@link JsonWriter}. * * @param jsonWriter The {@link JsonWriter} that will be written. * @param value The value to write. * @return The updated {@code jsonWriter} with the {@code value} written to it. */ public static JsonWriter writeUntypedField(JsonWriter jsonWriter, Object value) { if (value == null) { return jsonWriter.writeNull().flush(); } else if (value instanceof Short) { return jsonWriter.writeInt((short) value).flush(); } else if (value instanceof Integer) { return jsonWriter.writeInt((int) value).flush(); } else if (value instanceof Long) { return jsonWriter.writeLong((long) value).flush(); } else if (value instanceof Float) { return jsonWriter.writeFloat((float) value).flush(); } else if (value instanceof Double) { return jsonWriter.writeDouble((double) value).flush(); } else if (value instanceof Boolean) { return jsonWriter.writeBoolean((boolean) value).flush(); } else if (value instanceof byte[]) { return jsonWriter.writeBinary((byte[]) value).flush(); } else if (value instanceof CharSequence) { return jsonWriter.writeString(String.valueOf(value)).flush(); } else if (value instanceof JsonCapable<?>) { return ((JsonCapable<?>) value).toJson(jsonWriter).flush(); } else if (value.getClass() == Object.class) { return jsonWriter.writeStartObject().writeEndObject().flush(); } else { return jsonWriter.writeString(String.valueOf(value)).flush(); } } /** * Gets the nullable JSON property as null if the {@link JsonReader JsonReader's} {@link JsonReader * is {@link JsonToken * * @param jsonReader The {@link JsonReader} being read. * @param nonNullGetter The non-null getter. * @param <T> The type of the property. * @return Either null if the current token is {@link JsonToken * {@code nonNullGetter}. */ public static <T> T getNullableProperty(JsonReader jsonReader, Function<JsonReader, T> nonNullGetter) { return jsonReader.currentToken() == JsonToken.NULL ? null : nonNullGetter.apply(jsonReader); } /** * Reads the fields of a JSON object until the end of the object is reached. * <p> * The passed {@link JsonReader} will point to the field value each time {@code fieldNameConsumer} is called. * <p> * An {@link IllegalStateException} will be thrown if the {@link JsonReader * {@link JsonToken * * @param jsonReader The {@link JsonReader} being read. * @param fieldNameConsumer The field name consumer function. * @throws IllegalStateException If {@link JsonReader * {@link JsonToken */ public static void readFields(JsonReader jsonReader, Consumer<String> fieldNameConsumer) { readFields(jsonReader, false, fieldName -> { fieldNameConsumer.accept(fieldName); return false; }); } /** * Reads the fields of a JSON object until the end of the object is reached. * <p> * The passed {@link JsonReader} will point to the field value each time {@code fieldNameConsumer} is called. * <p> * An {@link IllegalStateException} will be thrown if the {@link JsonReader * {@link JsonToken * <p> * If {@code readAdditionalProperties} is true and {@code fieldNameConsumer} returns false the JSON field value * will be read as if it were an additional property. After the object completes reading the untyped additional * properties mapping will be returned, this may be null if there were no additional properties in the JSON object. * * @param jsonReader The {@link JsonReader} being read. * @param readAdditionalProperties Whether additional properties should be read. * @param fieldNameConsumer The field name consumer function. * @return The additional property map if {@code readAdditionalProperties} is true and there were additional * properties in the JSON object, otherwise null. * @throws IllegalStateException If {@link JsonReader * {@link JsonToken */ public static Map<String, Object> readFields(JsonReader jsonReader, boolean readAdditionalProperties, Function<String, Boolean> fieldNameConsumer) { if (jsonReader.currentToken() != JsonToken.START_OBJECT && jsonReader.currentToken() != JsonToken.NULL) { throw new IllegalStateException("Expected the current token of the JsonReader to either be " + "START_OBJECT or NULL. It was: " + jsonReader.currentToken()); } if (jsonReader.currentToken() == JsonToken.NULL) { return null; } Map<String, Object> additionalProperties = null; while (jsonReader.nextToken() != JsonToken.END_OBJECT) { String fieldName = jsonReader.getFieldName(); jsonReader.nextToken(); boolean consumed = fieldNameConsumer.apply(fieldName); if (!consumed && readAdditionalProperties) { if (additionalProperties == null) { additionalProperties = new LinkedHashMap<>(); } additionalProperties.put(fieldName, readUntypedField(jsonReader)); } else if (!consumed) { jsonReader.skipChildren(); } } return additionalProperties; } private JsonUtils() { } }
unmodifiableList?
public List<String> configFilePatterns() { Map<String, Map<String, Object>> addonConfigs = this.innerModel().properties().deploymentSettings().addonConfigs(); if (addonConfigs == null) { return Collections.emptyList(); } Map<String, Object> configurationConfigs = addonConfigs.get(Constants.APPLICATION_CONFIGURATION_SERVICE_KEY); if (configurationConfigs == null) { return Collections.emptyList(); } String patterns = (String) configurationConfigs.get(Constants.CONFIG_FILE_PATTERNS_KEY); if (CoreUtils.isNullOrEmpty(patterns)) { return Collections.emptyList(); } return Arrays.asList(patterns.split(",")); }
return Arrays.asList(patterns.split(","));
public List<String> configFilePatterns() { Map<String, Map<String, Object>> addonConfigs = this.innerModel().properties().deploymentSettings().addonConfigs(); if (addonConfigs == null) { return Collections.emptyList(); } Map<String, Object> configurationConfigs = addonConfigs.get(Constants.APPLICATION_CONFIGURATION_SERVICE_KEY); if (configurationConfigs == null) { return Collections.emptyList(); } if (configurationConfigs.get(Constants.CONFIG_FILE_PATTERNS_KEY) instanceof String) { String patterns = (String) configurationConfigs.get(Constants.CONFIG_FILE_PATTERNS_KEY); return Collections.unmodifiableList(Arrays.asList(patterns.split(","))); } else { return Collections.emptyList(); } }
class SpringAppDeploymentImpl extends ExternalChildResourceImpl<SpringAppDeployment, DeploymentResourceInner, SpringAppImpl, SpringApp> implements SpringAppDeployment, SpringAppDeployment.Definition<SpringAppImpl, SpringAppDeploymentImpl>, SpringAppDeployment.Update { private BuildServiceTask buildServiceTask; SpringAppDeploymentImpl(String name, SpringAppImpl parent, DeploymentResourceInner innerObject) { super(name, parent, innerObject); } @Override public String appName() { if (innerModel().properties() == null) { return null; } return innerModel().name(); } @Override public DeploymentSettings settings() { if (innerModel().properties() == null) { return null; } return innerModel().properties().deploymentSettings(); } @Override public DeploymentResourceStatus status() { if (innerModel().properties() == null) { return null; } return innerModel().properties().status(); } @Override public boolean isActive() { if (innerModel().properties() == null) { return false; } return innerModel().properties().active(); } @Override public List<DeploymentInstance> instances() { if (innerModel().properties() == null) { return null; } return innerModel().properties().instances(); } @Override public void start() { startAsync().block(); } @Override public Mono<Void> startAsync() { return manager().serviceClient().getDeployments().startAsync( parent().parent().resourceGroupName(), parent().parent().name(), parent().name(), name() ); } @Override public void stop() { stopAsync().block(); } @Override public Mono<Void> stopAsync() { return manager().serviceClient().getDeployments().stopAsync( parent().parent().resourceGroupName(), parent().parent().name(), parent().name(), name() ); } @Override public void restart() { restartAsync().block(); } @Override public Mono<Void> restartAsync() { return manager().serviceClient().getDeployments().restartAsync( parent().parent().resourceGroupName(), parent().parent().name(), parent().name(), name() ); } @Override public String getLogFileUrl() { return getLogFileUrlAsync().block(); } @Override public Mono<String> getLogFileUrlAsync() { return manager().serviceClient().getDeployments().getLogFileUrlAsync( parent().parent().resourceGroupName(), parent().parent().name(), parent().name(), name() ) .map(LogFileUrlResponseInner::url); } @Override private void ensureDeploySettings() { if (innerModel().properties() == null) { innerModel().withProperties(new DeploymentResourceProperties()); } if (innerModel().properties().deploymentSettings() == null) { innerModel().properties().withDeploymentSettings(new DeploymentSettings()); } if (innerModel().properties().deploymentSettings().resourceRequests() == null) { innerModel().properties().deploymentSettings().withResourceRequests(new ResourceRequests()); } } private void ensureSource() { ensureSource(null); } private void ensureSource(UserSourceType type) { if (innerModel().properties() == null) { innerModel().withProperties(new DeploymentResourceProperties()); } if (innerModel().properties().source() == null) { if (type == UserSourceType.JAR) { innerModel().properties().withSource(new JarUploadedUserSourceInfo()); } else if (type == UserSourceType.SOURCE) { innerModel().properties().withSource(new SourceUploadedUserSourceInfo()); } else if (type == UserSourceType.NET_CORE_ZIP) { innerModel().properties().withSource(new NetCoreZipUploadedUserSourceInfo()); } else if (type == UserSourceType.BUILD_RESULT) { innerModel().properties().withSource(new BuildResultUserSourceInfo()); } else { innerModel().properties().withSource(new UserSourceInfo()); } } } @Override public SpringAppDeploymentImpl withJarFile(File jar) { if (service().isEnterpriseTier()) { return withJarFile(jar, null); } else { ensureSource(UserSourceType.JAR); this.addDependency( context -> parent().getResourceUploadUrlAsync() .flatMap(option -> { UploadedUserSourceInfo uploadedUserSourceInfo = (UploadedUserSourceInfo) innerModel().properties().source(); uploadedUserSourceInfo.withRelativePath(option.relativePath()); return uploadToStorage(jar, option) .then(context.voidMono()); }) ); return this; } } private ShareFileAsyncClient createShareFileAsyncClient(ResourceUploadDefinition option) { return new ShareFileClientBuilder() .endpoint(option.uploadUrl()) .httpClient(manager().httpPipeline().getHttpClient()) .buildFileAsyncClient(); } private Mono<Void> uploadToStorage(File source, ResourceUploadDefinition option) { try { ShareFileAsyncClient shareFileAsyncClient = createShareFileAsyncClient(option); return shareFileAsyncClient.create(source.length()) .flatMap(fileInfo -> shareFileAsyncClient.uploadFromFile(source.getAbsolutePath())) .then(Mono.empty()); } catch (Exception e) { return Mono.error(e); } } @Override public SpringAppDeploymentImpl withExistingSource(UserSourceType type, String relativePath) { if (isEnterpriseTier()) { ensureSource(UserSourceType.BUILD_RESULT); UserSourceInfo sourceInfo = innerModel().properties().source(); if (sourceInfo instanceof BuildResultUserSourceInfo) { BuildResultUserSourceInfo userSourceInfo = (BuildResultUserSourceInfo) sourceInfo; userSourceInfo.withBuildResultId(relativePath); } } else { ensureSource(type); UserSourceInfo userSourceInfo = innerModel().properties().source(); if (userSourceInfo instanceof UploadedUserSourceInfo) { UploadedUserSourceInfo uploadedUserSourceInfo = (UploadedUserSourceInfo) userSourceInfo; uploadedUserSourceInfo.withRelativePath(relativePath); } } return this; } @Override public SpringAppDeploymentImpl withJarFile(File jar, List<String> configFilePatterns) { ensureSource(UserSourceType.BUILD_RESULT); this.buildServiceTask = new BuildServiceTask(jar, configFilePatterns); return this; } private boolean isEnterpriseTier() { return service().isEnterpriseTier(); } @Override public SpringAppDeploymentImpl withSourceCodeTarGzFile(File sourceCodeTarGz) { return withSourceCodeTarGzFile(sourceCodeTarGz, null); } @Override public SpringAppDeploymentImpl withSourceCodeTarGzFile(File sourceCodeTarGz, List<String> configFilePatterns) { if (isEnterpriseTier()) { ensureSource(UserSourceType.BUILD_RESULT); this.buildServiceTask = new BuildServiceTask(sourceCodeTarGz, configFilePatterns, true); } else { ensureSource(UserSourceType.SOURCE); this.addDependency( context -> parent().getResourceUploadUrlAsync() .flatMap(option -> { UploadedUserSourceInfo uploadedUserSourceInfo = (UploadedUserSourceInfo) innerModel().properties().source(); uploadedUserSourceInfo.withRelativePath(option.relativePath()); return uploadToStorage(sourceCodeTarGz, option) .then(context.voidMono()); }) ); } return this; } @Override public SpringAppDeploymentImpl withTargetModule(String moduleName) { if (isEnterpriseTier()) { ensureSource(UserSourceType.BUILD_RESULT); this.buildServiceTask.module = moduleName; } else { ensureSource(UserSourceType.SOURCE); UserSourceInfo userSourceInfo = innerModel().properties().source(); if (userSourceInfo instanceof SourceUploadedUserSourceInfo) { SourceUploadedUserSourceInfo sourceUploadedUserSourceInfo = (SourceUploadedUserSourceInfo) userSourceInfo; sourceUploadedUserSourceInfo.withArtifactSelector(moduleName); } } return this; } @Override public SpringAppDeploymentImpl withSingleModule() { return withTargetModule(null); } @Override public SpringAppDeploymentImpl withInstance(int count) { if (innerModel().sku() == null) { innerModel().withSku(service().sku()); } if (innerModel().sku() == null) { innerModel().withSku(new Sku().withName("B0")); } innerModel().sku().withCapacity(count); return this; } @Override public SpringAppDeploymentImpl withCpu(int cpuCount) { return withCpu(String.valueOf(cpuCount)); } @Override public SpringAppDeploymentImpl withCpu(String cpuCount) { ensureDeploySettings(); innerModel().properties().deploymentSettings().resourceRequests().withCpu(cpuCount); return this; } @Override public SpringAppDeploymentImpl withMemory(int sizeInGB) { ensureDeploySettings(); innerModel().properties().deploymentSettings().resourceRequests().withMemory(String.format("%dGi", sizeInGB)); return this; } @Override public SpringAppDeploymentImpl withMemory(String size) { ensureDeploySettings(); innerModel().properties().deploymentSettings().resourceRequests().withMemory(size); return this; } @Override public SpringAppDeploymentImpl withRuntime(RuntimeVersion version) { UserSourceInfo userSourceInfo = innerModel().properties().source(); if (userSourceInfo instanceof JarUploadedUserSourceInfo) { JarUploadedUserSourceInfo uploadedUserSourceInfo = (JarUploadedUserSourceInfo) userSourceInfo; uploadedUserSourceInfo.withRuntimeVersion(version.toString()); } else if (userSourceInfo instanceof NetCoreZipUploadedUserSourceInfo) { NetCoreZipUploadedUserSourceInfo uploadedUserSourceInfo = (NetCoreZipUploadedUserSourceInfo) userSourceInfo; uploadedUserSourceInfo.withRuntimeVersion(version.toString()); } else if (userSourceInfo instanceof SourceUploadedUserSourceInfo) { SourceUploadedUserSourceInfo uploadedUserSourceInfo = (SourceUploadedUserSourceInfo) userSourceInfo; uploadedUserSourceInfo.withRuntimeVersion(version.toString()); } return this; } @Override public SpringAppDeploymentImpl withJvmOptions(String jvmOptions) { if (isEnterpriseTier()) { withEnvironment("JAVA_OPTS", jvmOptions); } else { ensureSource(UserSourceType.JAR); UserSourceInfo userSourceInfo = innerModel().properties().source(); if (userSourceInfo instanceof JarUploadedUserSourceInfo) { JarUploadedUserSourceInfo uploadedUserSourceInfo = (JarUploadedUserSourceInfo) userSourceInfo; uploadedUserSourceInfo.withJvmOptions(jvmOptions); } } return this; } private void ensureEnvironments() { ensureDeploySettings(); if (innerModel().properties().deploymentSettings().environmentVariables() == null) { innerModel().properties().deploymentSettings().withEnvironmentVariables(new HashMap<>()); } } private void ensureAddonConfigs() { ensureDeploySettings(); if (innerModel().properties().deploymentSettings().addonConfigs() == null) { innerModel().properties().deploymentSettings().withAddonConfigs(new HashMap<>()); } } @Override public SpringAppDeploymentImpl withEnvironment(String key, String value) { ensureEnvironments(); innerModel().properties().deploymentSettings().environmentVariables().put(key, value); return this; } @Override public SpringAppDeploymentImpl withoutEnvironment(String key) { ensureEnvironments(); innerModel().properties().deploymentSettings().environmentVariables().remove(key); return this; } @Override public SpringAppDeploymentImpl withVersionName(String versionName) { ensureSource(); innerModel().properties().source().withVersion(versionName); return this; } @Override public SpringAppDeploymentImpl withActivation() { this.addPostRunDependent( context -> parent().update().withActiveDeployment(name()).applyAsync() .map(Function.identity()) ); return this; } @Override public SpringAppDeploymentImpl withConfigFilePatterns(List<String> configFilePatterns) { ensureAddonConfigs(); Map<String, Map<String, Object>> addonConfigs = innerModel().properties().deploymentSettings().addonConfigs(); addonConfigs.computeIfAbsent(Constants.APPLICATION_CONFIGURATION_SERVICE_KEY, s -> { Map<String, Object> config = new HashMap<>(); config.put( Constants.CONFIG_FILE_PATTERNS_KEY, CoreUtils.isNullOrEmpty(configFilePatterns) ? "" : String.join(",", configFilePatterns)); return config; }); return this; } @Override public void beforeGroupCreateOrUpdate() { super.beforeGroupCreateOrUpdate(); if (this.buildServiceTask != null) { this.addDependency(this.buildServiceTask); this.buildServiceTask = null; } } @Override public Mono<SpringAppDeployment> createResourceAsync() { return manager().serviceClient().getDeployments().createOrUpdateAsync( parent().parent().resourceGroupName(), parent().parent().name(), parent().name(), name(), innerModel() ) .map(inner -> { setInner(inner); return this; }); } @Override public Mono<SpringAppDeployment> updateResourceAsync() { return manager().serviceClient().getDeployments().updateAsync( parent().parent().resourceGroupName(), parent().parent().name(), parent().name(), name(), innerModel() ) .map(inner -> { setInner(inner); return this; }); } @Override public Mono<Void> deleteResourceAsync() { return manager().serviceClient().getDeployments().deleteAsync( parent().parent().resourceGroupName(), parent().parent().name(), parent().name(), name() ); } @Override protected Mono<DeploymentResourceInner> getInnerAsync() { return manager().serviceClient().getDeployments().getAsync( parent().parent().resourceGroupName(), parent().parent().name(), parent().name(), name() ); } @Override public String id() { return innerModel().id(); } @Override public SpringAppDeploymentImpl update() { prepareUpdate(); return this; } private AppPlatformManager manager() { return parent().manager(); } @Override public SpringAppImpl attach() { return parent().addActiveDeployment(this); } private SpringAppImpl app() { return parent(); } private SpringServiceImpl service() { return parent().parent(); } private class BuildServiceTask implements FunctionalTaskItem { private final File file; private final boolean sourceCodeTarGz; private final List<String> configFilePatterns; private String module; BuildServiceTask(File file, List<String> configFilePatterns) { this(file, configFilePatterns, false); } BuildServiceTask(File file, List<String> configFilePatterns, boolean sourceCodeTarGz) { this.file = file; this.configFilePatterns = configFilePatterns; this.sourceCodeTarGz = sourceCodeTarGz; } @Override public Mono<Indexable> apply(Context context) { return app().getResourceUploadUrlAsync() .flatMap(option -> uploadAndBuild(file, option) .flatMap(buildId -> { BuildResultUserSourceInfo userSourceInfo = (BuildResultUserSourceInfo) innerModel().properties().source(); userSourceInfo.withBuildResultId(buildId); withConfigFilePatterns(this.configFilePatterns); return Mono.empty(); }).then(context.voidMono())); } private Mono<String> uploadAndBuild(File source, ResourceUploadDefinition option) { AppPlatformManagementClientImpl client = (AppPlatformManagementClientImpl) manager().serviceClient(); return uploadToStorage(source, option) .then( new PollerFlux<>( manager().serviceClient().getDefaultPollInterval(), context -> enqueueBuild(option, context), this::waitForBuild, (pollResultPollingContext, pollResultPollResponse) -> Mono.error(new RuntimeException("build canceled")), this::getBuildResult) .last() .flatMap(client::getLroFinalResultOrError) .flatMap((Function<Object, Mono<String>>) o -> { BuildResultInner result = (BuildResultInner) o; return Mono.just(result.id()); }) ); } private Mono<BuildResultInner> getBuildResult(PollingContext<PollResult<BuildInner>> context) { return manager().serviceClient().getBuildServices() .getBuildResultAsync( service().resourceGroupName(), service().name(), Constants.DEFAULT_TANZU_COMPONENT_NAME, parent().name(), ResourceUtils.nameFromResourceId(context.getData("buildId"))); } private Mono<PollResponse<PollResult<BuildInner>>> waitForBuild(PollingContext<PollResult<BuildInner>> context) { return getBuildResult(context) .flatMap((Function<BuildResultInner, Mono<PollResponse<PollResult<BuildInner>>>>) buildResultInner -> { BuildResultProvisioningState state = buildResultInner.properties().provisioningState(); PollResult<BuildInner> emptyResult = new PollResult<>(new BuildInner().withProperties(new BuildProperties())); if (state == BuildResultProvisioningState.SUCCEEDED) { return Mono.just(new PollResponse<>(LongRunningOperationStatus.SUCCESSFULLY_COMPLETED, emptyResult)); } else if (state == BuildResultProvisioningState.FAILED || state == BuildResultProvisioningState.DELETING) { return Mono.error(new RuntimeException("build failed")); } else if (state == BuildResultProvisioningState.QUEUING) { return Mono.just(new PollResponse<>(LongRunningOperationStatus.NOT_STARTED, emptyResult)); } return Mono.just(new PollResponse<>(LongRunningOperationStatus.IN_PROGRESS, emptyResult)); }); } private Mono<PollResult<BuildInner>> enqueueBuild(ResourceUploadDefinition option, PollingContext<PollResult<BuildInner>> context) { BuildProperties buildProperties = new BuildProperties() .withBuilder(String.format("%s/buildservices/%s/builders/%s", service().id(), Constants.DEFAULT_TANZU_COMPONENT_NAME, Constants.DEFAULT_TANZU_COMPONENT_NAME)) .withAgentPool(String.format("%s/buildservices/%s/agentPools/%s", service().id(), Constants.DEFAULT_TANZU_COMPONENT_NAME, Constants.DEFAULT_TANZU_COMPONENT_NAME)) .withRelativePath(option.relativePath()); if (this.sourceCodeTarGz) { Map<String, String> buildEnv = buildProperties.env() == null ? new HashMap<>() : buildProperties.env(); buildProperties.withEnv(buildEnv); if (module != null) { buildEnv.put("BP_MAVEN_BUILT_MODULE", module); } } return manager().serviceClient().getBuildServices() .createOrUpdateBuildAsync( service().resourceGroupName(), service().name(), Constants.DEFAULT_TANZU_COMPONENT_NAME, app().name(), new BuildInner().withProperties(buildProperties)) .map(inner -> { context.setData("buildId", inner.properties().triggeredBuildResult().id()); return new PollResult<>(inner); }); } } }
class SpringAppDeploymentImpl extends ExternalChildResourceImpl<SpringAppDeployment, DeploymentResourceInner, SpringAppImpl, SpringApp> implements SpringAppDeployment, SpringAppDeployment.Definition<SpringAppImpl, SpringAppDeploymentImpl>, SpringAppDeployment.Update { private static final Duration MAX_BUILD_TIMEOUT = Duration.ofHours(1); private BuildServiceTask buildServiceTask; SpringAppDeploymentImpl(String name, SpringAppImpl parent, DeploymentResourceInner innerObject) { super(name, parent, innerObject); } @Override public String appName() { if (innerModel().properties() == null) { return null; } return innerModel().name(); } @Override public DeploymentSettings settings() { if (innerModel().properties() == null) { return null; } return innerModel().properties().deploymentSettings(); } @Override public DeploymentResourceStatus status() { if (innerModel().properties() == null) { return null; } return innerModel().properties().status(); } @Override public boolean isActive() { if (innerModel().properties() == null) { return false; } return innerModel().properties().active(); } @Override public List<DeploymentInstance> instances() { if (innerModel().properties() == null) { return null; } return innerModel().properties().instances(); } @Override public void start() { startAsync().block(); } @Override public Mono<Void> startAsync() { return manager().serviceClient().getDeployments().startAsync( parent().parent().resourceGroupName(), parent().parent().name(), parent().name(), name() ); } @Override public void stop() { stopAsync().block(); } @Override public Mono<Void> stopAsync() { return manager().serviceClient().getDeployments().stopAsync( parent().parent().resourceGroupName(), parent().parent().name(), parent().name(), name() ); } @Override public void restart() { restartAsync().block(); } @Override public Mono<Void> restartAsync() { return manager().serviceClient().getDeployments().restartAsync( parent().parent().resourceGroupName(), parent().parent().name(), parent().name(), name() ); } @Override public String getLogFileUrl() { return getLogFileUrlAsync().block(); } @Override public Mono<String> getLogFileUrlAsync() { return manager().serviceClient().getDeployments().getLogFileUrlAsync( parent().parent().resourceGroupName(), parent().parent().name(), parent().name(), name() ) .map(LogFileUrlResponseInner::url); } @Override private void ensureDeploySettings() { if (innerModel().properties() == null) { innerModel().withProperties(new DeploymentResourceProperties()); } if (innerModel().properties().deploymentSettings() == null) { innerModel().properties().withDeploymentSettings(new DeploymentSettings()); } if (innerModel().properties().deploymentSettings().resourceRequests() == null) { innerModel().properties().deploymentSettings().withResourceRequests(new ResourceRequests()); } } private void ensureSource() { ensureSource(null); } private void ensureSource(UserSourceType type) { if (innerModel().properties() == null) { innerModel().withProperties(new DeploymentResourceProperties()); } if (innerModel().properties().source() == null) { if (type == UserSourceType.JAR) { innerModel().properties().withSource(new JarUploadedUserSourceInfo()); } else if (type == UserSourceType.SOURCE) { innerModel().properties().withSource(new SourceUploadedUserSourceInfo()); } else if (type == UserSourceType.NET_CORE_ZIP) { innerModel().properties().withSource(new NetCoreZipUploadedUserSourceInfo()); } else if (type == UserSourceType.BUILD_RESULT) { innerModel().properties().withSource(new BuildResultUserSourceInfo()); } else { innerModel().properties().withSource(new UserSourceInfo()); } } } @Override public SpringAppDeploymentImpl withJarFile(File jar) { if (service().isEnterpriseTier()) { return withJarFile(jar, null); } else { ensureSource(UserSourceType.JAR); this.addDependency( context -> parent().getResourceUploadUrlAsync() .flatMap(option -> { UploadedUserSourceInfo uploadedUserSourceInfo = (UploadedUserSourceInfo) innerModel().properties().source(); uploadedUserSourceInfo.withRelativePath(option.relativePath()); return uploadToStorageAsync(jar, option) .then(context.voidMono()); }) ); return this; } } private ShareFileAsyncClient createShareFileAsyncClient(ResourceUploadDefinition option) { return new ShareFileClientBuilder() .endpoint(option.uploadUrl()) .httpClient(manager().httpPipeline().getHttpClient()) .buildFileAsyncClient(); } private Mono<Void> uploadToStorageAsync(File source, ResourceUploadDefinition option) { try { ShareFileAsyncClient shareFileAsyncClient = createShareFileAsyncClient(option); return shareFileAsyncClient.create(source.length()) .flatMap(fileInfo -> shareFileAsyncClient.uploadFromFile(source.getAbsolutePath())) .then(Mono.empty()); } catch (Exception e) { return Mono.error(e); } } @Override public SpringAppDeploymentImpl withExistingSource(UserSourceType type, String relativePath) { if (isEnterpriseTier()) { ensureSource(UserSourceType.BUILD_RESULT); UserSourceInfo sourceInfo = innerModel().properties().source(); if (sourceInfo instanceof BuildResultUserSourceInfo) { BuildResultUserSourceInfo userSourceInfo = (BuildResultUserSourceInfo) sourceInfo; userSourceInfo.withBuildResultId(relativePath); } } else { ensureSource(type); UserSourceInfo userSourceInfo = innerModel().properties().source(); if (userSourceInfo instanceof UploadedUserSourceInfo) { UploadedUserSourceInfo uploadedUserSourceInfo = (UploadedUserSourceInfo) userSourceInfo; uploadedUserSourceInfo.withRelativePath(relativePath); } } return this; } @Override public SpringAppDeploymentImpl withJarFile(File jar, List<String> configFilePatterns) { ensureSource(UserSourceType.BUILD_RESULT); this.buildServiceTask = new BuildServiceTask(jar, configFilePatterns); return this; } private boolean isEnterpriseTier() { return service().isEnterpriseTier(); } @Override public SpringAppDeploymentImpl withSourceCodeTarGzFile(File sourceCodeTarGz) { return withSourceCodeTarGzFile(sourceCodeTarGz, null); } @Override public SpringAppDeploymentImpl withSourceCodeTarGzFile(File sourceCodeTarGz, List<String> configFilePatterns) { if (isEnterpriseTier()) { ensureSource(UserSourceType.BUILD_RESULT); this.buildServiceTask = new BuildServiceTask(sourceCodeTarGz, configFilePatterns, true); } else { ensureSource(UserSourceType.SOURCE); this.addDependency( context -> parent().getResourceUploadUrlAsync() .flatMap(option -> { UploadedUserSourceInfo uploadedUserSourceInfo = (UploadedUserSourceInfo) innerModel().properties().source(); uploadedUserSourceInfo.withRelativePath(option.relativePath()); return uploadToStorageAsync(sourceCodeTarGz, option) .then(context.voidMono()); }) ); } return this; } @Override public SpringAppDeploymentImpl withTargetModule(String moduleName) { if (isEnterpriseTier()) { ensureSource(UserSourceType.BUILD_RESULT); this.buildServiceTask.module = moduleName; } else { ensureSource(UserSourceType.SOURCE); UserSourceInfo userSourceInfo = innerModel().properties().source(); if (userSourceInfo instanceof SourceUploadedUserSourceInfo) { SourceUploadedUserSourceInfo sourceUploadedUserSourceInfo = (SourceUploadedUserSourceInfo) userSourceInfo; sourceUploadedUserSourceInfo.withArtifactSelector(moduleName); } } return this; } @Override public SpringAppDeploymentImpl withSingleModule() { return withTargetModule(null); } @Override public SpringAppDeploymentImpl withInstance(int count) { if (innerModel().sku() == null) { innerModel().withSku(service().sku()); } if (innerModel().sku() == null) { innerModel().withSku(new Sku().withName("B0")); } innerModel().sku().withCapacity(count); return this; } @Override public SpringAppDeploymentImpl withCpu(int cpuCount) { return withCpu(String.valueOf(cpuCount)); } @Override public SpringAppDeploymentImpl withCpu(String cpuCount) { ensureDeploySettings(); innerModel().properties().deploymentSettings().resourceRequests().withCpu(cpuCount); return this; } @Override public SpringAppDeploymentImpl withMemory(int sizeInGB) { ensureDeploySettings(); innerModel().properties().deploymentSettings().resourceRequests().withMemory(String.format("%dGi", sizeInGB)); return this; } @Override public SpringAppDeploymentImpl withMemory(String size) { ensureDeploySettings(); innerModel().properties().deploymentSettings().resourceRequests().withMemory(size); return this; } @Override public SpringAppDeploymentImpl withRuntime(RuntimeVersion version) { UserSourceInfo userSourceInfo = innerModel().properties().source(); if (userSourceInfo instanceof JarUploadedUserSourceInfo) { JarUploadedUserSourceInfo uploadedUserSourceInfo = (JarUploadedUserSourceInfo) userSourceInfo; uploadedUserSourceInfo.withRuntimeVersion(version.toString()); } else if (userSourceInfo instanceof NetCoreZipUploadedUserSourceInfo) { NetCoreZipUploadedUserSourceInfo uploadedUserSourceInfo = (NetCoreZipUploadedUserSourceInfo) userSourceInfo; uploadedUserSourceInfo.withRuntimeVersion(version.toString()); } else if (userSourceInfo instanceof SourceUploadedUserSourceInfo) { SourceUploadedUserSourceInfo uploadedUserSourceInfo = (SourceUploadedUserSourceInfo) userSourceInfo; uploadedUserSourceInfo.withRuntimeVersion(version.toString()); } return this; } @Override public SpringAppDeploymentImpl withJvmOptions(String jvmOptions) { if (isEnterpriseTier()) { withEnvironment("JAVA_OPTS", jvmOptions); } else { ensureSource(UserSourceType.JAR); UserSourceInfo userSourceInfo = innerModel().properties().source(); if (userSourceInfo instanceof JarUploadedUserSourceInfo) { JarUploadedUserSourceInfo uploadedUserSourceInfo = (JarUploadedUserSourceInfo) userSourceInfo; uploadedUserSourceInfo.withJvmOptions(jvmOptions); } } return this; } private void ensureEnvironments() { ensureDeploySettings(); if (innerModel().properties().deploymentSettings().environmentVariables() == null) { innerModel().properties().deploymentSettings().withEnvironmentVariables(new HashMap<>()); } } private void ensureAddonConfigs() { ensureDeploySettings(); if (innerModel().properties().deploymentSettings().addonConfigs() == null) { innerModel().properties().deploymentSettings().withAddonConfigs(new HashMap<>()); } } @Override public SpringAppDeploymentImpl withEnvironment(String key, String value) { ensureEnvironments(); innerModel().properties().deploymentSettings().environmentVariables().put(key, value); return this; } @Override public SpringAppDeploymentImpl withoutEnvironment(String key) { ensureEnvironments(); innerModel().properties().deploymentSettings().environmentVariables().remove(key); return this; } @Override public SpringAppDeploymentImpl withVersionName(String versionName) { ensureSource(); innerModel().properties().source().withVersion(versionName); return this; } @Override public SpringAppDeploymentImpl withActivation() { this.addPostRunDependent( context -> parent().update().withActiveDeployment(name()).applyAsync() .map(Function.identity()) ); return this; } @Override public SpringAppDeploymentImpl withConfigFilePatterns(List<String> configFilePatterns) { ensureAddonConfigs(); Map<String, Map<String, Object>> addonConfigs = innerModel().properties().deploymentSettings().addonConfigs(); addonConfigs.computeIfAbsent(Constants.APPLICATION_CONFIGURATION_SERVICE_KEY, s -> { Map<String, Object> config = new HashMap<>(); config.put( Constants.CONFIG_FILE_PATTERNS_KEY, CoreUtils.isNullOrEmpty(configFilePatterns) ? "" : String.join(",", configFilePatterns)); return config; }); return this; } @Override public void beforeGroupCreateOrUpdate() { super.beforeGroupCreateOrUpdate(); if (this.buildServiceTask != null) { this.addDependency(this.buildServiceTask); this.buildServiceTask = null; } } @Override public Mono<SpringAppDeployment> createResourceAsync() { return manager().serviceClient().getDeployments().createOrUpdateAsync( parent().parent().resourceGroupName(), parent().parent().name(), parent().name(), name(), innerModel() ) .map(inner -> { setInner(inner); return this; }); } @Override public Mono<SpringAppDeployment> updateResourceAsync() { return manager().serviceClient().getDeployments().updateAsync( parent().parent().resourceGroupName(), parent().parent().name(), parent().name(), name(), innerModel() ) .map(inner -> { setInner(inner); return this; }); } @Override public Mono<Void> deleteResourceAsync() { return manager().serviceClient().getDeployments().deleteAsync( parent().parent().resourceGroupName(), parent().parent().name(), parent().name(), name() ); } @Override protected Mono<DeploymentResourceInner> getInnerAsync() { return manager().serviceClient().getDeployments().getAsync( parent().parent().resourceGroupName(), parent().parent().name(), parent().name(), name() ); } @Override public String id() { return innerModel().id(); } @Override public SpringAppDeploymentImpl update() { prepareUpdate(); return this; } private AppPlatformManager manager() { return parent().manager(); } @Override public SpringAppImpl attach() { return parent().addActiveDeployment(this); } private SpringAppImpl app() { return parent(); } private SpringServiceImpl service() { return parent().parent(); } private class BuildServiceTask implements FunctionalTaskItem { private final File file; private final boolean sourceCodeTarGz; private final List<String> configFilePatterns; private String module; BuildServiceTask(File file, List<String> configFilePatterns) { this(file, configFilePatterns, false); } BuildServiceTask(File file, List<String> configFilePatterns, boolean sourceCodeTarGz) { this.file = file; this.configFilePatterns = configFilePatterns; this.sourceCodeTarGz = sourceCodeTarGz; } @Override public Mono<Indexable> apply(Context context) { return app().getResourceUploadUrlAsync() .flatMap(option -> uploadAndBuildAsync(file, option) .flatMap(buildId -> { BuildResultUserSourceInfo userSourceInfo = (BuildResultUserSourceInfo) innerModel().properties().source(); userSourceInfo.withBuildResultId(buildId); withConfigFilePatterns(this.configFilePatterns); return Mono.empty(); }).then(context.voidMono())); } private Mono<String> uploadAndBuildAsync(File source, ResourceUploadDefinition option) { AtomicLong pollCount = new AtomicLong(); Duration pollDuration = manager().serviceClient().getDefaultPollInterval(); return uploadToStorageAsync(source, option) .then(enqueueBuildAsync(option)) .flatMap(buildId -> manager().serviceClient().getBuildServices() .getBuildResultWithResponseAsync( service().resourceGroupName(), service().name(), Constants.DEFAULT_TANZU_COMPONENT_NAME, parent().name(), ResourceUtils.nameFromResourceId(buildId)) .flatMap(response -> { if (pollDuration.multipliedBy(pollCount.get()).compareTo(MAX_BUILD_TIMEOUT) < 0) { BuildResultProvisioningState state = response.getValue().properties().provisioningState(); if (state == BuildResultProvisioningState.SUCCEEDED) { return Mono.just(buildId); } else if (state == BuildResultProvisioningState.QUEUING || state == BuildResultProvisioningState.BUILDING) { return Mono.empty(); } else { AppPlatformManagementClientImpl client = (AppPlatformManagementClientImpl) manager().serviceClient(); return Mono.error(new ManagementException(String.format("Build failed for file: %s, buildId: %s", file.getName(), buildId), new HttpResponseImpl<>(response, client.getSerializerAdapter()))); } } else { return Mono.error(new ManagementException(String.format("Build timeout for file: %s, buildId: %s", file.getName(), buildId), null)); } }).repeatWhenEmpty( longFlux -> longFlux .flatMap( index -> { pollCount.set(index); return Mono.delay(ResourceManagerUtils.InternalRuntimeContext.getDelayDuration(pollDuration)); }))); } private Mono<String> enqueueBuildAsync(ResourceUploadDefinition option) { BuildProperties buildProperties = new BuildProperties() .withBuilder(String.format("%s/buildservices/%s/builders/%s", service().id(), Constants.DEFAULT_TANZU_COMPONENT_NAME, Constants.DEFAULT_TANZU_COMPONENT_NAME)) .withAgentPool(String.format("%s/buildservices/%s/agentPools/%s", service().id(), Constants.DEFAULT_TANZU_COMPONENT_NAME, Constants.DEFAULT_TANZU_COMPONENT_NAME)) .withRelativePath(option.relativePath()); if (this.sourceCodeTarGz) { Map<String, String> buildEnv = buildProperties.env() == null ? new HashMap<>() : buildProperties.env(); buildProperties.withEnv(buildEnv); if (module != null) { buildEnv.put("BP_MAVEN_BUILT_MODULE", module); } } return manager().serviceClient().getBuildServices() .createOrUpdateBuildAsync( service().resourceGroupName(), service().name(), Constants.DEFAULT_TANZU_COMPONENT_NAME, app().name(), new BuildInner().withProperties(buildProperties)) .map(inner -> inner.properties().triggeredBuildResult().id()); } @SuppressWarnings("BlockingMethodInNonBlockingContext") private class HttpResponseImpl<T> extends HttpResponse { private final Response<T> response; private final SerializerAdapter serializerAdapter; protected HttpResponseImpl(Response<T> response, SerializerAdapter serializerAdapter) { super(response.getRequest()); this.response = response; this.serializerAdapter = serializerAdapter; } @Override public int getStatusCode() { return response.getStatusCode(); } @Override public String getHeaderValue(String header) { return response.getHeaders().getValue(header); } @Override public HttpHeaders getHeaders() { return response.getHeaders(); } @Override public Flux<ByteBuffer> getBody() { try { return Flux.just(ByteBuffer.wrap(serializerAdapter.serializeToBytes(response.getValue(), SerializerEncoding.JSON))); } catch (IOException e) { return Flux.empty(); } } @Override public Mono<byte[]> getBodyAsByteArray() { try { return Mono.just(serializerAdapter.serializeToBytes(response.getValue(), SerializerEncoding.JSON)); } catch (IOException e) { return Mono.empty(); } } @Override public Mono<String> getBodyAsString() { return Mono.just(serializerAdapter.serializeRaw(response.getValue())); } @Override public Mono<String> getBodyAsString(Charset charset) { return getBodyAsString(); } } } }
Protection on type=String
public List<String> configFilePatterns() { Map<String, Map<String, Object>> addonConfigs = this.innerModel().properties().deploymentSettings().addonConfigs(); if (addonConfigs == null) { return Collections.emptyList(); } Map<String, Object> configurationConfigs = addonConfigs.get(Constants.APPLICATION_CONFIGURATION_SERVICE_KEY); if (configurationConfigs == null) { return Collections.emptyList(); } String patterns = (String) configurationConfigs.get(Constants.CONFIG_FILE_PATTERNS_KEY); if (CoreUtils.isNullOrEmpty(patterns)) { return Collections.emptyList(); } return Arrays.asList(patterns.split(",")); }
String patterns = (String) configurationConfigs.get(Constants.CONFIG_FILE_PATTERNS_KEY);
public List<String> configFilePatterns() { Map<String, Map<String, Object>> addonConfigs = this.innerModel().properties().deploymentSettings().addonConfigs(); if (addonConfigs == null) { return Collections.emptyList(); } Map<String, Object> configurationConfigs = addonConfigs.get(Constants.APPLICATION_CONFIGURATION_SERVICE_KEY); if (configurationConfigs == null) { return Collections.emptyList(); } if (configurationConfigs.get(Constants.CONFIG_FILE_PATTERNS_KEY) instanceof String) { String patterns = (String) configurationConfigs.get(Constants.CONFIG_FILE_PATTERNS_KEY); return Collections.unmodifiableList(Arrays.asList(patterns.split(","))); } else { return Collections.emptyList(); } }
class SpringAppDeploymentImpl extends ExternalChildResourceImpl<SpringAppDeployment, DeploymentResourceInner, SpringAppImpl, SpringApp> implements SpringAppDeployment, SpringAppDeployment.Definition<SpringAppImpl, SpringAppDeploymentImpl>, SpringAppDeployment.Update { private BuildServiceTask buildServiceTask; SpringAppDeploymentImpl(String name, SpringAppImpl parent, DeploymentResourceInner innerObject) { super(name, parent, innerObject); } @Override public String appName() { if (innerModel().properties() == null) { return null; } return innerModel().name(); } @Override public DeploymentSettings settings() { if (innerModel().properties() == null) { return null; } return innerModel().properties().deploymentSettings(); } @Override public DeploymentResourceStatus status() { if (innerModel().properties() == null) { return null; } return innerModel().properties().status(); } @Override public boolean isActive() { if (innerModel().properties() == null) { return false; } return innerModel().properties().active(); } @Override public List<DeploymentInstance> instances() { if (innerModel().properties() == null) { return null; } return innerModel().properties().instances(); } @Override public void start() { startAsync().block(); } @Override public Mono<Void> startAsync() { return manager().serviceClient().getDeployments().startAsync( parent().parent().resourceGroupName(), parent().parent().name(), parent().name(), name() ); } @Override public void stop() { stopAsync().block(); } @Override public Mono<Void> stopAsync() { return manager().serviceClient().getDeployments().stopAsync( parent().parent().resourceGroupName(), parent().parent().name(), parent().name(), name() ); } @Override public void restart() { restartAsync().block(); } @Override public Mono<Void> restartAsync() { return manager().serviceClient().getDeployments().restartAsync( parent().parent().resourceGroupName(), parent().parent().name(), parent().name(), name() ); } @Override public String getLogFileUrl() { return getLogFileUrlAsync().block(); } @Override public Mono<String> getLogFileUrlAsync() { return manager().serviceClient().getDeployments().getLogFileUrlAsync( parent().parent().resourceGroupName(), parent().parent().name(), parent().name(), name() ) .map(LogFileUrlResponseInner::url); } @Override private void ensureDeploySettings() { if (innerModel().properties() == null) { innerModel().withProperties(new DeploymentResourceProperties()); } if (innerModel().properties().deploymentSettings() == null) { innerModel().properties().withDeploymentSettings(new DeploymentSettings()); } if (innerModel().properties().deploymentSettings().resourceRequests() == null) { innerModel().properties().deploymentSettings().withResourceRequests(new ResourceRequests()); } } private void ensureSource() { ensureSource(null); } private void ensureSource(UserSourceType type) { if (innerModel().properties() == null) { innerModel().withProperties(new DeploymentResourceProperties()); } if (innerModel().properties().source() == null) { if (type == UserSourceType.JAR) { innerModel().properties().withSource(new JarUploadedUserSourceInfo()); } else if (type == UserSourceType.SOURCE) { innerModel().properties().withSource(new SourceUploadedUserSourceInfo()); } else if (type == UserSourceType.NET_CORE_ZIP) { innerModel().properties().withSource(new NetCoreZipUploadedUserSourceInfo()); } else if (type == UserSourceType.BUILD_RESULT) { innerModel().properties().withSource(new BuildResultUserSourceInfo()); } else { innerModel().properties().withSource(new UserSourceInfo()); } } } @Override public SpringAppDeploymentImpl withJarFile(File jar) { if (service().isEnterpriseTier()) { return withJarFile(jar, null); } else { ensureSource(UserSourceType.JAR); this.addDependency( context -> parent().getResourceUploadUrlAsync() .flatMap(option -> { UploadedUserSourceInfo uploadedUserSourceInfo = (UploadedUserSourceInfo) innerModel().properties().source(); uploadedUserSourceInfo.withRelativePath(option.relativePath()); return uploadToStorage(jar, option) .then(context.voidMono()); }) ); return this; } } private ShareFileAsyncClient createShareFileAsyncClient(ResourceUploadDefinition option) { return new ShareFileClientBuilder() .endpoint(option.uploadUrl()) .httpClient(manager().httpPipeline().getHttpClient()) .buildFileAsyncClient(); } private Mono<Void> uploadToStorage(File source, ResourceUploadDefinition option) { try { ShareFileAsyncClient shareFileAsyncClient = createShareFileAsyncClient(option); return shareFileAsyncClient.create(source.length()) .flatMap(fileInfo -> shareFileAsyncClient.uploadFromFile(source.getAbsolutePath())) .then(Mono.empty()); } catch (Exception e) { return Mono.error(e); } } @Override public SpringAppDeploymentImpl withExistingSource(UserSourceType type, String relativePath) { if (isEnterpriseTier()) { ensureSource(UserSourceType.BUILD_RESULT); UserSourceInfo sourceInfo = innerModel().properties().source(); if (sourceInfo instanceof BuildResultUserSourceInfo) { BuildResultUserSourceInfo userSourceInfo = (BuildResultUserSourceInfo) sourceInfo; userSourceInfo.withBuildResultId(relativePath); } } else { ensureSource(type); UserSourceInfo userSourceInfo = innerModel().properties().source(); if (userSourceInfo instanceof UploadedUserSourceInfo) { UploadedUserSourceInfo uploadedUserSourceInfo = (UploadedUserSourceInfo) userSourceInfo; uploadedUserSourceInfo.withRelativePath(relativePath); } } return this; } @Override public SpringAppDeploymentImpl withJarFile(File jar, List<String> configFilePatterns) { ensureSource(UserSourceType.BUILD_RESULT); this.buildServiceTask = new BuildServiceTask(jar, configFilePatterns); return this; } private boolean isEnterpriseTier() { return service().isEnterpriseTier(); } @Override public SpringAppDeploymentImpl withSourceCodeTarGzFile(File sourceCodeTarGz) { return withSourceCodeTarGzFile(sourceCodeTarGz, null); } @Override public SpringAppDeploymentImpl withSourceCodeTarGzFile(File sourceCodeTarGz, List<String> configFilePatterns) { if (isEnterpriseTier()) { ensureSource(UserSourceType.BUILD_RESULT); this.buildServiceTask = new BuildServiceTask(sourceCodeTarGz, configFilePatterns, true); } else { ensureSource(UserSourceType.SOURCE); this.addDependency( context -> parent().getResourceUploadUrlAsync() .flatMap(option -> { UploadedUserSourceInfo uploadedUserSourceInfo = (UploadedUserSourceInfo) innerModel().properties().source(); uploadedUserSourceInfo.withRelativePath(option.relativePath()); return uploadToStorage(sourceCodeTarGz, option) .then(context.voidMono()); }) ); } return this; } @Override public SpringAppDeploymentImpl withTargetModule(String moduleName) { if (isEnterpriseTier()) { ensureSource(UserSourceType.BUILD_RESULT); this.buildServiceTask.module = moduleName; } else { ensureSource(UserSourceType.SOURCE); UserSourceInfo userSourceInfo = innerModel().properties().source(); if (userSourceInfo instanceof SourceUploadedUserSourceInfo) { SourceUploadedUserSourceInfo sourceUploadedUserSourceInfo = (SourceUploadedUserSourceInfo) userSourceInfo; sourceUploadedUserSourceInfo.withArtifactSelector(moduleName); } } return this; } @Override public SpringAppDeploymentImpl withSingleModule() { return withTargetModule(null); } @Override public SpringAppDeploymentImpl withInstance(int count) { if (innerModel().sku() == null) { innerModel().withSku(service().sku()); } if (innerModel().sku() == null) { innerModel().withSku(new Sku().withName("B0")); } innerModel().sku().withCapacity(count); return this; } @Override public SpringAppDeploymentImpl withCpu(int cpuCount) { return withCpu(String.valueOf(cpuCount)); } @Override public SpringAppDeploymentImpl withCpu(String cpuCount) { ensureDeploySettings(); innerModel().properties().deploymentSettings().resourceRequests().withCpu(cpuCount); return this; } @Override public SpringAppDeploymentImpl withMemory(int sizeInGB) { ensureDeploySettings(); innerModel().properties().deploymentSettings().resourceRequests().withMemory(String.format("%dGi", sizeInGB)); return this; } @Override public SpringAppDeploymentImpl withMemory(String size) { ensureDeploySettings(); innerModel().properties().deploymentSettings().resourceRequests().withMemory(size); return this; } @Override public SpringAppDeploymentImpl withRuntime(RuntimeVersion version) { UserSourceInfo userSourceInfo = innerModel().properties().source(); if (userSourceInfo instanceof JarUploadedUserSourceInfo) { JarUploadedUserSourceInfo uploadedUserSourceInfo = (JarUploadedUserSourceInfo) userSourceInfo; uploadedUserSourceInfo.withRuntimeVersion(version.toString()); } else if (userSourceInfo instanceof NetCoreZipUploadedUserSourceInfo) { NetCoreZipUploadedUserSourceInfo uploadedUserSourceInfo = (NetCoreZipUploadedUserSourceInfo) userSourceInfo; uploadedUserSourceInfo.withRuntimeVersion(version.toString()); } else if (userSourceInfo instanceof SourceUploadedUserSourceInfo) { SourceUploadedUserSourceInfo uploadedUserSourceInfo = (SourceUploadedUserSourceInfo) userSourceInfo; uploadedUserSourceInfo.withRuntimeVersion(version.toString()); } return this; } @Override public SpringAppDeploymentImpl withJvmOptions(String jvmOptions) { if (isEnterpriseTier()) { withEnvironment("JAVA_OPTS", jvmOptions); } else { ensureSource(UserSourceType.JAR); UserSourceInfo userSourceInfo = innerModel().properties().source(); if (userSourceInfo instanceof JarUploadedUserSourceInfo) { JarUploadedUserSourceInfo uploadedUserSourceInfo = (JarUploadedUserSourceInfo) userSourceInfo; uploadedUserSourceInfo.withJvmOptions(jvmOptions); } } return this; } private void ensureEnvironments() { ensureDeploySettings(); if (innerModel().properties().deploymentSettings().environmentVariables() == null) { innerModel().properties().deploymentSettings().withEnvironmentVariables(new HashMap<>()); } } private void ensureAddonConfigs() { ensureDeploySettings(); if (innerModel().properties().deploymentSettings().addonConfigs() == null) { innerModel().properties().deploymentSettings().withAddonConfigs(new HashMap<>()); } } @Override public SpringAppDeploymentImpl withEnvironment(String key, String value) { ensureEnvironments(); innerModel().properties().deploymentSettings().environmentVariables().put(key, value); return this; } @Override public SpringAppDeploymentImpl withoutEnvironment(String key) { ensureEnvironments(); innerModel().properties().deploymentSettings().environmentVariables().remove(key); return this; } @Override public SpringAppDeploymentImpl withVersionName(String versionName) { ensureSource(); innerModel().properties().source().withVersion(versionName); return this; } @Override public SpringAppDeploymentImpl withActivation() { this.addPostRunDependent( context -> parent().update().withActiveDeployment(name()).applyAsync() .map(Function.identity()) ); return this; } @Override public SpringAppDeploymentImpl withConfigFilePatterns(List<String> configFilePatterns) { ensureAddonConfigs(); Map<String, Map<String, Object>> addonConfigs = innerModel().properties().deploymentSettings().addonConfigs(); addonConfigs.computeIfAbsent(Constants.APPLICATION_CONFIGURATION_SERVICE_KEY, s -> { Map<String, Object> config = new HashMap<>(); config.put( Constants.CONFIG_FILE_PATTERNS_KEY, CoreUtils.isNullOrEmpty(configFilePatterns) ? "" : String.join(",", configFilePatterns)); return config; }); return this; } @Override public void beforeGroupCreateOrUpdate() { super.beforeGroupCreateOrUpdate(); if (this.buildServiceTask != null) { this.addDependency(this.buildServiceTask); this.buildServiceTask = null; } } @Override public Mono<SpringAppDeployment> createResourceAsync() { return manager().serviceClient().getDeployments().createOrUpdateAsync( parent().parent().resourceGroupName(), parent().parent().name(), parent().name(), name(), innerModel() ) .map(inner -> { setInner(inner); return this; }); } @Override public Mono<SpringAppDeployment> updateResourceAsync() { return manager().serviceClient().getDeployments().updateAsync( parent().parent().resourceGroupName(), parent().parent().name(), parent().name(), name(), innerModel() ) .map(inner -> { setInner(inner); return this; }); } @Override public Mono<Void> deleteResourceAsync() { return manager().serviceClient().getDeployments().deleteAsync( parent().parent().resourceGroupName(), parent().parent().name(), parent().name(), name() ); } @Override protected Mono<DeploymentResourceInner> getInnerAsync() { return manager().serviceClient().getDeployments().getAsync( parent().parent().resourceGroupName(), parent().parent().name(), parent().name(), name() ); } @Override public String id() { return innerModel().id(); } @Override public SpringAppDeploymentImpl update() { prepareUpdate(); return this; } private AppPlatformManager manager() { return parent().manager(); } @Override public SpringAppImpl attach() { return parent().addActiveDeployment(this); } private SpringAppImpl app() { return parent(); } private SpringServiceImpl service() { return parent().parent(); } private class BuildServiceTask implements FunctionalTaskItem { private final File file; private final boolean sourceCodeTarGz; private final List<String> configFilePatterns; private String module; BuildServiceTask(File file, List<String> configFilePatterns) { this(file, configFilePatterns, false); } BuildServiceTask(File file, List<String> configFilePatterns, boolean sourceCodeTarGz) { this.file = file; this.configFilePatterns = configFilePatterns; this.sourceCodeTarGz = sourceCodeTarGz; } @Override public Mono<Indexable> apply(Context context) { return app().getResourceUploadUrlAsync() .flatMap(option -> uploadAndBuild(file, option) .flatMap(buildId -> { BuildResultUserSourceInfo userSourceInfo = (BuildResultUserSourceInfo) innerModel().properties().source(); userSourceInfo.withBuildResultId(buildId); withConfigFilePatterns(this.configFilePatterns); return Mono.empty(); }).then(context.voidMono())); } private Mono<String> uploadAndBuild(File source, ResourceUploadDefinition option) { AppPlatformManagementClientImpl client = (AppPlatformManagementClientImpl) manager().serviceClient(); return uploadToStorage(source, option) .then( new PollerFlux<>( manager().serviceClient().getDefaultPollInterval(), context -> enqueueBuild(option, context), this::waitForBuild, (pollResultPollingContext, pollResultPollResponse) -> Mono.error(new RuntimeException("build canceled")), this::getBuildResult) .last() .flatMap(client::getLroFinalResultOrError) .flatMap((Function<Object, Mono<String>>) o -> { BuildResultInner result = (BuildResultInner) o; return Mono.just(result.id()); }) ); } private Mono<BuildResultInner> getBuildResult(PollingContext<PollResult<BuildInner>> context) { return manager().serviceClient().getBuildServices() .getBuildResultAsync( service().resourceGroupName(), service().name(), Constants.DEFAULT_TANZU_COMPONENT_NAME, parent().name(), ResourceUtils.nameFromResourceId(context.getData("buildId"))); } private Mono<PollResponse<PollResult<BuildInner>>> waitForBuild(PollingContext<PollResult<BuildInner>> context) { return getBuildResult(context) .flatMap((Function<BuildResultInner, Mono<PollResponse<PollResult<BuildInner>>>>) buildResultInner -> { BuildResultProvisioningState state = buildResultInner.properties().provisioningState(); PollResult<BuildInner> emptyResult = new PollResult<>(new BuildInner().withProperties(new BuildProperties())); if (state == BuildResultProvisioningState.SUCCEEDED) { return Mono.just(new PollResponse<>(LongRunningOperationStatus.SUCCESSFULLY_COMPLETED, emptyResult)); } else if (state == BuildResultProvisioningState.FAILED || state == BuildResultProvisioningState.DELETING) { return Mono.error(new RuntimeException("build failed")); } else if (state == BuildResultProvisioningState.QUEUING) { return Mono.just(new PollResponse<>(LongRunningOperationStatus.NOT_STARTED, emptyResult)); } return Mono.just(new PollResponse<>(LongRunningOperationStatus.IN_PROGRESS, emptyResult)); }); } private Mono<PollResult<BuildInner>> enqueueBuild(ResourceUploadDefinition option, PollingContext<PollResult<BuildInner>> context) { BuildProperties buildProperties = new BuildProperties() .withBuilder(String.format("%s/buildservices/%s/builders/%s", service().id(), Constants.DEFAULT_TANZU_COMPONENT_NAME, Constants.DEFAULT_TANZU_COMPONENT_NAME)) .withAgentPool(String.format("%s/buildservices/%s/agentPools/%s", service().id(), Constants.DEFAULT_TANZU_COMPONENT_NAME, Constants.DEFAULT_TANZU_COMPONENT_NAME)) .withRelativePath(option.relativePath()); if (this.sourceCodeTarGz) { Map<String, String> buildEnv = buildProperties.env() == null ? new HashMap<>() : buildProperties.env(); buildProperties.withEnv(buildEnv); if (module != null) { buildEnv.put("BP_MAVEN_BUILT_MODULE", module); } } return manager().serviceClient().getBuildServices() .createOrUpdateBuildAsync( service().resourceGroupName(), service().name(), Constants.DEFAULT_TANZU_COMPONENT_NAME, app().name(), new BuildInner().withProperties(buildProperties)) .map(inner -> { context.setData("buildId", inner.properties().triggeredBuildResult().id()); return new PollResult<>(inner); }); } } }
class SpringAppDeploymentImpl extends ExternalChildResourceImpl<SpringAppDeployment, DeploymentResourceInner, SpringAppImpl, SpringApp> implements SpringAppDeployment, SpringAppDeployment.Definition<SpringAppImpl, SpringAppDeploymentImpl>, SpringAppDeployment.Update { private static final Duration MAX_BUILD_TIMEOUT = Duration.ofHours(1); private BuildServiceTask buildServiceTask; SpringAppDeploymentImpl(String name, SpringAppImpl parent, DeploymentResourceInner innerObject) { super(name, parent, innerObject); } @Override public String appName() { if (innerModel().properties() == null) { return null; } return innerModel().name(); } @Override public DeploymentSettings settings() { if (innerModel().properties() == null) { return null; } return innerModel().properties().deploymentSettings(); } @Override public DeploymentResourceStatus status() { if (innerModel().properties() == null) { return null; } return innerModel().properties().status(); } @Override public boolean isActive() { if (innerModel().properties() == null) { return false; } return innerModel().properties().active(); } @Override public List<DeploymentInstance> instances() { if (innerModel().properties() == null) { return null; } return innerModel().properties().instances(); } @Override public void start() { startAsync().block(); } @Override public Mono<Void> startAsync() { return manager().serviceClient().getDeployments().startAsync( parent().parent().resourceGroupName(), parent().parent().name(), parent().name(), name() ); } @Override public void stop() { stopAsync().block(); } @Override public Mono<Void> stopAsync() { return manager().serviceClient().getDeployments().stopAsync( parent().parent().resourceGroupName(), parent().parent().name(), parent().name(), name() ); } @Override public void restart() { restartAsync().block(); } @Override public Mono<Void> restartAsync() { return manager().serviceClient().getDeployments().restartAsync( parent().parent().resourceGroupName(), parent().parent().name(), parent().name(), name() ); } @Override public String getLogFileUrl() { return getLogFileUrlAsync().block(); } @Override public Mono<String> getLogFileUrlAsync() { return manager().serviceClient().getDeployments().getLogFileUrlAsync( parent().parent().resourceGroupName(), parent().parent().name(), parent().name(), name() ) .map(LogFileUrlResponseInner::url); } @Override private void ensureDeploySettings() { if (innerModel().properties() == null) { innerModel().withProperties(new DeploymentResourceProperties()); } if (innerModel().properties().deploymentSettings() == null) { innerModel().properties().withDeploymentSettings(new DeploymentSettings()); } if (innerModel().properties().deploymentSettings().resourceRequests() == null) { innerModel().properties().deploymentSettings().withResourceRequests(new ResourceRequests()); } } private void ensureSource() { ensureSource(null); } private void ensureSource(UserSourceType type) { if (innerModel().properties() == null) { innerModel().withProperties(new DeploymentResourceProperties()); } if (innerModel().properties().source() == null) { if (type == UserSourceType.JAR) { innerModel().properties().withSource(new JarUploadedUserSourceInfo()); } else if (type == UserSourceType.SOURCE) { innerModel().properties().withSource(new SourceUploadedUserSourceInfo()); } else if (type == UserSourceType.NET_CORE_ZIP) { innerModel().properties().withSource(new NetCoreZipUploadedUserSourceInfo()); } else if (type == UserSourceType.BUILD_RESULT) { innerModel().properties().withSource(new BuildResultUserSourceInfo()); } else { innerModel().properties().withSource(new UserSourceInfo()); } } } @Override public SpringAppDeploymentImpl withJarFile(File jar) { if (service().isEnterpriseTier()) { return withJarFile(jar, null); } else { ensureSource(UserSourceType.JAR); this.addDependency( context -> parent().getResourceUploadUrlAsync() .flatMap(option -> { UploadedUserSourceInfo uploadedUserSourceInfo = (UploadedUserSourceInfo) innerModel().properties().source(); uploadedUserSourceInfo.withRelativePath(option.relativePath()); return uploadToStorageAsync(jar, option) .then(context.voidMono()); }) ); return this; } } private ShareFileAsyncClient createShareFileAsyncClient(ResourceUploadDefinition option) { return new ShareFileClientBuilder() .endpoint(option.uploadUrl()) .httpClient(manager().httpPipeline().getHttpClient()) .buildFileAsyncClient(); } private Mono<Void> uploadToStorageAsync(File source, ResourceUploadDefinition option) { try { ShareFileAsyncClient shareFileAsyncClient = createShareFileAsyncClient(option); return shareFileAsyncClient.create(source.length()) .flatMap(fileInfo -> shareFileAsyncClient.uploadFromFile(source.getAbsolutePath())) .then(Mono.empty()); } catch (Exception e) { return Mono.error(e); } } @Override public SpringAppDeploymentImpl withExistingSource(UserSourceType type, String relativePath) { if (isEnterpriseTier()) { ensureSource(UserSourceType.BUILD_RESULT); UserSourceInfo sourceInfo = innerModel().properties().source(); if (sourceInfo instanceof BuildResultUserSourceInfo) { BuildResultUserSourceInfo userSourceInfo = (BuildResultUserSourceInfo) sourceInfo; userSourceInfo.withBuildResultId(relativePath); } } else { ensureSource(type); UserSourceInfo userSourceInfo = innerModel().properties().source(); if (userSourceInfo instanceof UploadedUserSourceInfo) { UploadedUserSourceInfo uploadedUserSourceInfo = (UploadedUserSourceInfo) userSourceInfo; uploadedUserSourceInfo.withRelativePath(relativePath); } } return this; } @Override public SpringAppDeploymentImpl withJarFile(File jar, List<String> configFilePatterns) { ensureSource(UserSourceType.BUILD_RESULT); this.buildServiceTask = new BuildServiceTask(jar, configFilePatterns); return this; } private boolean isEnterpriseTier() { return service().isEnterpriseTier(); } @Override public SpringAppDeploymentImpl withSourceCodeTarGzFile(File sourceCodeTarGz) { return withSourceCodeTarGzFile(sourceCodeTarGz, null); } @Override public SpringAppDeploymentImpl withSourceCodeTarGzFile(File sourceCodeTarGz, List<String> configFilePatterns) { if (isEnterpriseTier()) { ensureSource(UserSourceType.BUILD_RESULT); this.buildServiceTask = new BuildServiceTask(sourceCodeTarGz, configFilePatterns, true); } else { ensureSource(UserSourceType.SOURCE); this.addDependency( context -> parent().getResourceUploadUrlAsync() .flatMap(option -> { UploadedUserSourceInfo uploadedUserSourceInfo = (UploadedUserSourceInfo) innerModel().properties().source(); uploadedUserSourceInfo.withRelativePath(option.relativePath()); return uploadToStorageAsync(sourceCodeTarGz, option) .then(context.voidMono()); }) ); } return this; } @Override public SpringAppDeploymentImpl withTargetModule(String moduleName) { if (isEnterpriseTier()) { ensureSource(UserSourceType.BUILD_RESULT); this.buildServiceTask.module = moduleName; } else { ensureSource(UserSourceType.SOURCE); UserSourceInfo userSourceInfo = innerModel().properties().source(); if (userSourceInfo instanceof SourceUploadedUserSourceInfo) { SourceUploadedUserSourceInfo sourceUploadedUserSourceInfo = (SourceUploadedUserSourceInfo) userSourceInfo; sourceUploadedUserSourceInfo.withArtifactSelector(moduleName); } } return this; } @Override public SpringAppDeploymentImpl withSingleModule() { return withTargetModule(null); } @Override public SpringAppDeploymentImpl withInstance(int count) { if (innerModel().sku() == null) { innerModel().withSku(service().sku()); } if (innerModel().sku() == null) { innerModel().withSku(new Sku().withName("B0")); } innerModel().sku().withCapacity(count); return this; } @Override public SpringAppDeploymentImpl withCpu(int cpuCount) { return withCpu(String.valueOf(cpuCount)); } @Override public SpringAppDeploymentImpl withCpu(String cpuCount) { ensureDeploySettings(); innerModel().properties().deploymentSettings().resourceRequests().withCpu(cpuCount); return this; } @Override public SpringAppDeploymentImpl withMemory(int sizeInGB) { ensureDeploySettings(); innerModel().properties().deploymentSettings().resourceRequests().withMemory(String.format("%dGi", sizeInGB)); return this; } @Override public SpringAppDeploymentImpl withMemory(String size) { ensureDeploySettings(); innerModel().properties().deploymentSettings().resourceRequests().withMemory(size); return this; } @Override public SpringAppDeploymentImpl withRuntime(RuntimeVersion version) { UserSourceInfo userSourceInfo = innerModel().properties().source(); if (userSourceInfo instanceof JarUploadedUserSourceInfo) { JarUploadedUserSourceInfo uploadedUserSourceInfo = (JarUploadedUserSourceInfo) userSourceInfo; uploadedUserSourceInfo.withRuntimeVersion(version.toString()); } else if (userSourceInfo instanceof NetCoreZipUploadedUserSourceInfo) { NetCoreZipUploadedUserSourceInfo uploadedUserSourceInfo = (NetCoreZipUploadedUserSourceInfo) userSourceInfo; uploadedUserSourceInfo.withRuntimeVersion(version.toString()); } else if (userSourceInfo instanceof SourceUploadedUserSourceInfo) { SourceUploadedUserSourceInfo uploadedUserSourceInfo = (SourceUploadedUserSourceInfo) userSourceInfo; uploadedUserSourceInfo.withRuntimeVersion(version.toString()); } return this; } @Override public SpringAppDeploymentImpl withJvmOptions(String jvmOptions) { if (isEnterpriseTier()) { withEnvironment("JAVA_OPTS", jvmOptions); } else { ensureSource(UserSourceType.JAR); UserSourceInfo userSourceInfo = innerModel().properties().source(); if (userSourceInfo instanceof JarUploadedUserSourceInfo) { JarUploadedUserSourceInfo uploadedUserSourceInfo = (JarUploadedUserSourceInfo) userSourceInfo; uploadedUserSourceInfo.withJvmOptions(jvmOptions); } } return this; } private void ensureEnvironments() { ensureDeploySettings(); if (innerModel().properties().deploymentSettings().environmentVariables() == null) { innerModel().properties().deploymentSettings().withEnvironmentVariables(new HashMap<>()); } } private void ensureAddonConfigs() { ensureDeploySettings(); if (innerModel().properties().deploymentSettings().addonConfigs() == null) { innerModel().properties().deploymentSettings().withAddonConfigs(new HashMap<>()); } } @Override public SpringAppDeploymentImpl withEnvironment(String key, String value) { ensureEnvironments(); innerModel().properties().deploymentSettings().environmentVariables().put(key, value); return this; } @Override public SpringAppDeploymentImpl withoutEnvironment(String key) { ensureEnvironments(); innerModel().properties().deploymentSettings().environmentVariables().remove(key); return this; } @Override public SpringAppDeploymentImpl withVersionName(String versionName) { ensureSource(); innerModel().properties().source().withVersion(versionName); return this; } @Override public SpringAppDeploymentImpl withActivation() { this.addPostRunDependent( context -> parent().update().withActiveDeployment(name()).applyAsync() .map(Function.identity()) ); return this; } @Override public SpringAppDeploymentImpl withConfigFilePatterns(List<String> configFilePatterns) { ensureAddonConfigs(); Map<String, Map<String, Object>> addonConfigs = innerModel().properties().deploymentSettings().addonConfigs(); addonConfigs.computeIfAbsent(Constants.APPLICATION_CONFIGURATION_SERVICE_KEY, s -> { Map<String, Object> config = new HashMap<>(); config.put( Constants.CONFIG_FILE_PATTERNS_KEY, CoreUtils.isNullOrEmpty(configFilePatterns) ? "" : String.join(",", configFilePatterns)); return config; }); return this; } @Override public void beforeGroupCreateOrUpdate() { super.beforeGroupCreateOrUpdate(); if (this.buildServiceTask != null) { this.addDependency(this.buildServiceTask); this.buildServiceTask = null; } } @Override public Mono<SpringAppDeployment> createResourceAsync() { return manager().serviceClient().getDeployments().createOrUpdateAsync( parent().parent().resourceGroupName(), parent().parent().name(), parent().name(), name(), innerModel() ) .map(inner -> { setInner(inner); return this; }); } @Override public Mono<SpringAppDeployment> updateResourceAsync() { return manager().serviceClient().getDeployments().updateAsync( parent().parent().resourceGroupName(), parent().parent().name(), parent().name(), name(), innerModel() ) .map(inner -> { setInner(inner); return this; }); } @Override public Mono<Void> deleteResourceAsync() { return manager().serviceClient().getDeployments().deleteAsync( parent().parent().resourceGroupName(), parent().parent().name(), parent().name(), name() ); } @Override protected Mono<DeploymentResourceInner> getInnerAsync() { return manager().serviceClient().getDeployments().getAsync( parent().parent().resourceGroupName(), parent().parent().name(), parent().name(), name() ); } @Override public String id() { return innerModel().id(); } @Override public SpringAppDeploymentImpl update() { prepareUpdate(); return this; } private AppPlatformManager manager() { return parent().manager(); } @Override public SpringAppImpl attach() { return parent().addActiveDeployment(this); } private SpringAppImpl app() { return parent(); } private SpringServiceImpl service() { return parent().parent(); } private class BuildServiceTask implements FunctionalTaskItem { private final File file; private final boolean sourceCodeTarGz; private final List<String> configFilePatterns; private String module; BuildServiceTask(File file, List<String> configFilePatterns) { this(file, configFilePatterns, false); } BuildServiceTask(File file, List<String> configFilePatterns, boolean sourceCodeTarGz) { this.file = file; this.configFilePatterns = configFilePatterns; this.sourceCodeTarGz = sourceCodeTarGz; } @Override public Mono<Indexable> apply(Context context) { return app().getResourceUploadUrlAsync() .flatMap(option -> uploadAndBuildAsync(file, option) .flatMap(buildId -> { BuildResultUserSourceInfo userSourceInfo = (BuildResultUserSourceInfo) innerModel().properties().source(); userSourceInfo.withBuildResultId(buildId); withConfigFilePatterns(this.configFilePatterns); return Mono.empty(); }).then(context.voidMono())); } private Mono<String> uploadAndBuildAsync(File source, ResourceUploadDefinition option) { AtomicLong pollCount = new AtomicLong(); Duration pollDuration = manager().serviceClient().getDefaultPollInterval(); return uploadToStorageAsync(source, option) .then(enqueueBuildAsync(option)) .flatMap(buildId -> manager().serviceClient().getBuildServices() .getBuildResultWithResponseAsync( service().resourceGroupName(), service().name(), Constants.DEFAULT_TANZU_COMPONENT_NAME, parent().name(), ResourceUtils.nameFromResourceId(buildId)) .flatMap(response -> { if (pollDuration.multipliedBy(pollCount.get()).compareTo(MAX_BUILD_TIMEOUT) < 0) { BuildResultProvisioningState state = response.getValue().properties().provisioningState(); if (state == BuildResultProvisioningState.SUCCEEDED) { return Mono.just(buildId); } else if (state == BuildResultProvisioningState.QUEUING || state == BuildResultProvisioningState.BUILDING) { return Mono.empty(); } else { AppPlatformManagementClientImpl client = (AppPlatformManagementClientImpl) manager().serviceClient(); return Mono.error(new ManagementException(String.format("Build failed for file: %s, buildId: %s", file.getName(), buildId), new HttpResponseImpl<>(response, client.getSerializerAdapter()))); } } else { return Mono.error(new ManagementException(String.format("Build timeout for file: %s, buildId: %s", file.getName(), buildId), null)); } }).repeatWhenEmpty( longFlux -> longFlux .flatMap( index -> { pollCount.set(index); return Mono.delay(ResourceManagerUtils.InternalRuntimeContext.getDelayDuration(pollDuration)); }))); } private Mono<String> enqueueBuildAsync(ResourceUploadDefinition option) { BuildProperties buildProperties = new BuildProperties() .withBuilder(String.format("%s/buildservices/%s/builders/%s", service().id(), Constants.DEFAULT_TANZU_COMPONENT_NAME, Constants.DEFAULT_TANZU_COMPONENT_NAME)) .withAgentPool(String.format("%s/buildservices/%s/agentPools/%s", service().id(), Constants.DEFAULT_TANZU_COMPONENT_NAME, Constants.DEFAULT_TANZU_COMPONENT_NAME)) .withRelativePath(option.relativePath()); if (this.sourceCodeTarGz) { Map<String, String> buildEnv = buildProperties.env() == null ? new HashMap<>() : buildProperties.env(); buildProperties.withEnv(buildEnv); if (module != null) { buildEnv.put("BP_MAVEN_BUILT_MODULE", module); } } return manager().serviceClient().getBuildServices() .createOrUpdateBuildAsync( service().resourceGroupName(), service().name(), Constants.DEFAULT_TANZU_COMPONENT_NAME, app().name(), new BuildInner().withProperties(buildProperties)) .map(inner -> inner.properties().triggeredBuildResult().id()); } @SuppressWarnings("BlockingMethodInNonBlockingContext") private class HttpResponseImpl<T> extends HttpResponse { private final Response<T> response; private final SerializerAdapter serializerAdapter; protected HttpResponseImpl(Response<T> response, SerializerAdapter serializerAdapter) { super(response.getRequest()); this.response = response; this.serializerAdapter = serializerAdapter; } @Override public int getStatusCode() { return response.getStatusCode(); } @Override public String getHeaderValue(String header) { return response.getHeaders().getValue(header); } @Override public HttpHeaders getHeaders() { return response.getHeaders(); } @Override public Flux<ByteBuffer> getBody() { try { return Flux.just(ByteBuffer.wrap(serializerAdapter.serializeToBytes(response.getValue(), SerializerEncoding.JSON))); } catch (IOException e) { return Flux.empty(); } } @Override public Mono<byte[]> getBodyAsByteArray() { try { return Mono.just(serializerAdapter.serializeToBytes(response.getValue(), SerializerEncoding.JSON)); } catch (IOException e) { return Mono.empty(); } } @Override public Mono<String> getBodyAsString() { return Mono.just(serializerAdapter.serializeRaw(response.getValue())); } @Override public Mono<String> getBodyAsString(Charset charset) { return getBodyAsString(); } } } }
Why it is called `configFilePatterns`?
public void canCRUDEnterpriseTierDeployment() throws Exception { allowAllSSL(); File tarGzFile = downloadFile(PETCLINIC_TAR_GZ_URL); File jarFile = downloadFile(PETCLINIC_GATEWAY_JAR_URL); String serviceName = generateRandomResourceName("springsvc", 15); Region region = Region.US_EAST; List<String> configFilePatterns = Arrays.asList("api-gateway", "customers-service"); SpringService service = appPlatformManager.springServices().define(serviceName) .withRegion(region) .withNewResourceGroup(rgName) .withEnterpriseTierSku() .withDefaultGitRepository(PETCLINIC_CONFIG_URL, "master", configFilePatterns) .create(); String deploymentName = generateRandomResourceName("deploy", 15); List<String> apiGatewayConfigFilePatterns = Arrays.asList("api-gateway"); String appName = "api-gateway"; SpringApp app = service.apps().define(appName) .defineActiveDeployment(deploymentName) .withJarFile(jarFile) .withInstance(2) .withCpu("500m") .withMemory("512Mi") .attach() .withDefaultPublicEndpoint() .withConfigurationServiceBinding() .create(); SpringAppDeployment deployment = app.deployments().getByName(deploymentName); Assertions.assertTrue(CoreUtils.isNullOrEmpty(deployment.configFilePatterns())); deployment.update() .withConfigFilePatterns(apiGatewayConfigFilePatterns) .apply(); deployment.refresh(); Assertions.assertFalse(CoreUtils.isNullOrEmpty(deployment.configFilePatterns())); Assertions.assertNotNull(app.url()); Assertions.assertNotNull(app.activeDeploymentName()); Assertions.assertEquals(1, app.deployments().list().stream().count()); String appName2 = "customers-service"; String module = "spring-petclinic-customers-service"; List<String> customerServiceConfigFilePatterns = Arrays.asList("customers-service"); SpringApp app2 = service.apps().define(appName2) .defineActiveDeployment(deploymentName) .withSourceCodeTarGzFile(tarGzFile, customerServiceConfigFilePatterns) .withTargetModule(module) .attach() .withConfigurationServiceBinding() .create(); Assertions.assertNull(app2.url()); SpringAppDeployment customersDeployment = app2.deployments().getByName(deploymentName); Assertions.assertEquals(customerServiceConfigFilePatterns, customersDeployment.configFilePatterns()); }
List<String> configFilePatterns = Arrays.asList("api-gateway", "customers-service");
public void canCRUDEnterpriseTierDeployment() throws Exception { allowAllSSL(); File tarGzFile = downloadFile(PETCLINIC_TAR_GZ_URL); File jarFile = downloadFile(PETCLINIC_GATEWAY_JAR_URL); String serviceName = generateRandomResourceName("springsvc", 15); Region region = Region.US_EAST; List<String> configFilePatterns = Arrays.asList("api-gateway", "customers-service"); SpringService service = appPlatformManager.springServices().define(serviceName) .withRegion(region) .withNewResourceGroup(rgName) .withEnterpriseTierSku() .withDefaultGitRepository(PETCLINIC_CONFIG_URL, "master", configFilePatterns) .create(); String deploymentName = generateRandomResourceName("deploy", 15); List<String> apiGatewayConfigFilePatterns = Arrays.asList("api-gateway"); String appName = "api-gateway"; SpringApp app = service.apps().define(appName) .defineActiveDeployment(deploymentName) .withJarFile(jarFile) .withInstance(2) .withCpu("500m") .withMemory("512Mi") .attach() .withDefaultPublicEndpoint() .withConfigurationServiceBinding() .create(); SpringAppDeployment deployment = app.deployments().getByName(deploymentName); Assertions.assertTrue(CoreUtils.isNullOrEmpty(deployment.configFilePatterns())); deployment.update() .withConfigFilePatterns(apiGatewayConfigFilePatterns) .apply(); deployment.refresh(); Assertions.assertFalse(CoreUtils.isNullOrEmpty(deployment.configFilePatterns())); Assertions.assertNotNull(app.url()); Assertions.assertNotNull(app.activeDeploymentName()); Assertions.assertEquals(1, app.deployments().list().stream().count()); String appName2 = "customers-service"; String module = "spring-petclinic-customers-service"; List<String> customerServiceConfigFilePatterns = Arrays.asList("customers-service"); SpringApp app2 = service.apps().define(appName2) .defineActiveDeployment(deploymentName) .withSourceCodeTarGzFile(tarGzFile, customerServiceConfigFilePatterns) .withTargetModule(module) .attach() .withConfigurationServiceBinding() .create(); Assertions.assertNull(app2.url()); SpringAppDeployment customersDeployment = app2.deployments().getByName(deploymentName); Assertions.assertEquals(customerServiceConfigFilePatterns, customersDeployment.configFilePatterns()); }
class SpringCloudLiveOnlyTest extends AppPlatformTest { private static final String PIGGYMETRICS_CONFIG_URL = "https: private static final String GATEWAY_JAR_URL = "https: private static final String PIGGYMETRICS_TAR_GZ_URL = "https: private static final String PETCLINIC_CONFIG_URL = "https: private static final String PETCLINIC_GATEWAY_JAR_URL = "https: private static final String PETCLINIC_TAR_GZ_URL = "https: private static final String SPRING_CLOUD_SERVICE_OBJECT_ID = "938df8e2-2b9d-40b1-940c-c75c33494239"; @Test @DoNotRecord(skipInPlayback = true) public void canCRUDDeployment() throws Exception { allowAllSSL(); String serviceName = generateRandomResourceName("springsvc", 15); String appName = "gateway"; String deploymentName = generateRandomResourceName("deploy", 15); String deploymentName1 = generateRandomResourceName("deploy", 15); Region region = Region.US_EAST; SpringService service = appPlatformManager.springServices().define(serviceName) .withRegion(region) .withNewResourceGroup(rgName) .create(); File jarFile = downloadFile(GATEWAY_JAR_URL); SpringApp app = service.apps().define(appName) .defineActiveDeployment(deploymentName) .withJarFile(jarFile) .withInstance(2) .withCpu(2) .withMemory(4) .withRuntime(RuntimeVersion.JAVA_11) .attach() .withDefaultPublicEndpoint() .create(); Assertions.assertNotNull(app.url()); Assertions.assertNotNull(app.activeDeploymentName()); Assertions.assertEquals(1, app.deployments().list().stream().count()); Assertions.assertTrue(requestSuccess(app.url())); SpringAppDeployment deployment = app.getActiveDeployment(); Assertions.assertEquals("2", deployment.settings().resourceRequests().cpu()); Assertions.assertEquals("4Gi", deployment.settings().resourceRequests().memory()); Assertions.assertEquals(2, deployment.instances().size()); File gzFile = downloadFile(PIGGYMETRICS_TAR_GZ_URL); deployment = app.deployments().define(deploymentName1) .withSourceCodeTarGzFile(gzFile) .withTargetModule("gateway") .withActivation() .create(); app.refresh(); Assertions.assertEquals(deploymentName1, app.activeDeploymentName()); Assertions.assertEquals("1", deployment.settings().resourceRequests().cpu()); Assertions.assertNotNull(deployment.getLogFileUrl()); Assertions.assertTrue(requestSuccess(app.url())); app.update() .withoutDefaultPublicEndpoint() .apply(); Assertions.assertFalse(app.isPublic()); app.deployments().deleteByName(deploymentName); Assertions.assertEquals(1, app.deployments().list().stream().count()); } @Test @DoNotRecord(skipInPlayback = true) public void canCreateCustomDomainWithSsl() throws Exception { String domainName = generateRandomResourceName("jsdkdemo-", 20) + ".com"; String certOrderName = generateRandomResourceName("cert", 15); String vaultName = generateRandomResourceName("vault", 15); String certName = generateRandomResourceName("cert", 15); String serviceName = generateRandomResourceName("springsvc", 15); String appName = "gateway"; Region region = Region.US_EAST; allowAllSSL(); String cerPassword = password(); String resourcePath = Paths.get(this.getClass().getResource("/session-records").toURI()).getParent().toString(); String cerPath = resourcePath + domainName + ".cer"; String pfxPath = resourcePath + domainName + ".pfx"; createCertificate(cerPath, pfxPath, domainName, cerPassword, "ssl." + domainName, "ssl." + domainName); byte[] certificate = readAllBytes(new FileInputStream(pfxPath)); appPlatformManager.resourceManager().resourceGroups().define(rgName) .withRegion(region) .create(); DnsZone dnsZone = dnsZoneManager.zones().define(domainName) .withExistingResourceGroup(rgName) .create(); AppServiceDomain domain = appServiceManager.domains().define(domainName) .withExistingResourceGroup(rgName) .defineRegistrantContact() .withFirstName("Jon") .withLastName("Doe") .withEmail("jondoe@contoso.com") .withAddressLine1("123 4th Ave") .withCity("Redmond") .withStateOrProvince("WA") .withCountry(CountryIsoCode.UNITED_STATES) .withPostalCode("98052") .withPhoneCountryCode(CountryPhoneCode.UNITED_STATES) .withPhoneNumber("4258828080") .attach() .withDomainPrivacyEnabled(true) .withAutoRenewEnabled(false) .withExistingDnsZone(dnsZone) .create(); Vault vault = keyVaultManager.vaults().define(vaultName) .withRegion(region) .withExistingResourceGroup(rgName) .defineAccessPolicy() .forServicePrincipal(clientIdFromFile()) .allowSecretAllPermissions() .allowCertificateAllPermissions() .attach() .defineAccessPolicy() .forObjectId(SPRING_CLOUD_SERVICE_OBJECT_ID) .allowCertificatePermissions(CertificatePermissions.GET, CertificatePermissions.LIST) .allowSecretPermissions(SecretPermissions.GET, SecretPermissions.LIST) .attach() .create(); CertificateClient certificateClient = new CertificateClientBuilder() .vaultUrl(vault.vaultUri()) .pipeline(appPlatformManager.httpPipeline()) .buildClient(); certificateClient.importCertificate( new ImportCertificateOptions(certName, certificate) .setPassword(cerPassword) .setEnabled(true) ); KeyStore store = KeyStore.getInstance("PKCS12"); store.load(new ByteArrayInputStream(certificate), cerPassword.toCharArray()); String alias = Collections.list(store.aliases()).get(0); String thumbprint = printHexBinary(MessageDigest.getInstance("SHA-1").digest(store.getCertificate(alias).getEncoded())); SpringService service = appPlatformManager.springServices().define(serviceName) .withRegion(region) .withExistingResourceGroup(rgName) .withCertificate("test", vault.vaultUri(), certName) .create(); service.apps().define(appName).withDefaultActiveDeployment().withDefaultPublicEndpoint().create(); SpringApp app = service.apps().getByName(appName); dnsZone.update() .withCNameRecordSet("www", app.fqdn()) .withCNameRecordSet("ssl", app.fqdn()) .apply(); app.update() .withoutDefaultPublicEndpoint() .withCustomDomain(String.format("www.%s", domainName)) .withCustomDomain(String.format("ssl.%s", domainName), thumbprint) .apply(); Assertions.assertTrue(app.customDomains().validate(String.format("www.%s", domainName)).isValid()); Assertions.assertTrue(requestSuccess(String.format("http: Assertions.assertTrue(requestSuccess(String.format("https: app.update() .withHttpsOnly() .withoutCustomDomain(String.format("www.%s", domainName)) .apply(); Assertions.assertTrue(checkRedirect(String.format("http: } @Test @DoNotRecord(skipInPlayback = true) private File downloadFile(String remoteFileUrl) throws Exception { String[] split = remoteFileUrl.split("/"); String filename = split[split.length - 1]; File downloaded = new File(filename); if (!downloaded.exists()) { HttpURLConnection connection = (HttpURLConnection) new URL(remoteFileUrl).openConnection(); connection.connect(); try (InputStream inputStream = connection.getInputStream(); OutputStream outputStream = new FileOutputStream(downloaded)) { IOUtils.copy(inputStream, outputStream); } finally { connection.disconnect(); } } return downloaded; } private void extraTarGzSource(File folder, URL url) throws IOException { HttpURLConnection connection = (HttpURLConnection) url.openConnection(); connection.connect(); try (TarArchiveInputStream inputStream = new TarArchiveInputStream(new GzipCompressorInputStream(connection.getInputStream()))) { TarArchiveEntry entry; while ((entry = inputStream.getNextTarEntry()) != null) { if (entry.isDirectory()) { continue; } File file = new File(folder, entry.getName()); File parent = file.getParentFile(); if (parent.exists() || parent.mkdirs()) { try (OutputStream outputStream = new FileOutputStream(file)) { IOUtils.copy(inputStream, outputStream); } } else { throw new IllegalStateException("Cannot create directory: " + parent.getAbsolutePath()); } } } finally { connection.disconnect(); } } private byte[] readAllBytes(InputStream inputStream) throws IOException { try (ByteArrayOutputStream outputStream = new ByteArrayOutputStream()) { byte[] data = new byte[4096]; while (true) { int size = inputStream.read(data); if (size > 0) { outputStream.write(data, 0, size); } else { return outputStream.toByteArray(); } } } } public static void createCertificate(String certPath, String pfxPath, String alias, String password, String cnName, String dnsName) throws IOException { if (new File(pfxPath).exists()) { return; } String validityInDays = "3650"; String keyAlg = "RSA"; String sigAlg = "SHA1withRSA"; String keySize = "2048"; String storeType = "pkcs12"; String command = "keytool"; String jdkPath = System.getProperty("java.home"); if (jdkPath != null && !jdkPath.isEmpty()) { jdkPath = jdkPath.concat("\\bin"); if (new File(jdkPath).isDirectory()) { command = String.format("%s%s%s", jdkPath, File.separator, command); } } else { return; } String[] commandArgs = {command, "-genkey", "-alias", alias, "-keystore", pfxPath, "-storepass", password, "-validity", validityInDays, "-keyalg", keyAlg, "-sigalg", sigAlg, "-keysize", keySize, "-storetype", storeType, "-dname", "CN=" + cnName, "-ext", "EKU=1.3.6.1.5.5.7.3.1"}; if (dnsName != null) { List<String> args = new ArrayList<>(Arrays.asList(commandArgs)); args.add("-ext"); args.add("san=dns:" + dnsName); commandArgs = args.toArray(new String[0]); } cmdInvocation(commandArgs, true); File pfxFile = new File(pfxPath); if (pfxFile.exists()) { String[] certCommandArgs = {command, "-export", "-alias", alias, "-storetype", storeType, "-keystore", pfxPath, "-storepass", password, "-rfc", "-file", certPath}; cmdInvocation(certCommandArgs, true); File cerFile = new File(pfxPath); if (!cerFile.exists()) { throw new IOException( "Error occurred while creating certificate" + String.join(" ", certCommandArgs)); } } else { throw new IOException("Error occurred while creating certificates" + String.join(" ", commandArgs)); } } public static String cmdInvocation(String[] command, boolean ignoreErrorStream) throws IOException { String result = ""; String error = ""; Process process = new ProcessBuilder(command).start(); try ( InputStream inputStream = process.getInputStream(); InputStream errorStream = process.getErrorStream(); BufferedReader br = new BufferedReader(new InputStreamReader(inputStream, StandardCharsets.UTF_8)); BufferedReader ebr = new BufferedReader(new InputStreamReader(errorStream, StandardCharsets.UTF_8)); ) { result = br.readLine(); process.waitFor(); error = ebr.readLine(); if (error != null && (!"".equals(error))) { if (!ignoreErrorStream) { throw new IOException(error, null); } } } catch (Exception e) { throw new RuntimeException("Exception occurred while invoking command", e); } return result; } private static final char[] HEX_CODE = "0123456789ABCDEF".toCharArray(); private static String printHexBinary(byte[] data) { StringBuilder r = new StringBuilder(data.length * 2); for (byte b : data) { r.append(HEX_CODE[(b >> 4) & 0xF]); r.append(HEX_CODE[(b & 0xF)]); } return r.toString(); } }
class SpringCloudLiveOnlyTest extends AppPlatformTest { private static final String PIGGYMETRICS_CONFIG_URL = "https: private static final String GATEWAY_JAR_URL = "https: private static final String PIGGYMETRICS_TAR_GZ_URL = "https: private static final String PETCLINIC_CONFIG_URL = "https: private static final String PETCLINIC_GATEWAY_JAR_URL = "https: private static final String PETCLINIC_TAR_GZ_URL = "https: private static final String SPRING_CLOUD_SERVICE_OBJECT_ID = "938df8e2-2b9d-40b1-940c-c75c33494239"; @Test @DoNotRecord(skipInPlayback = true) public void canCRUDDeployment() throws Exception { allowAllSSL(); String serviceName = generateRandomResourceName("springsvc", 15); String appName = "gateway"; String deploymentName = generateRandomResourceName("deploy", 15); String deploymentName1 = generateRandomResourceName("deploy", 15); Region region = Region.US_EAST; SpringService service = appPlatformManager.springServices().define(serviceName) .withRegion(region) .withNewResourceGroup(rgName) .create(); File jarFile = downloadFile(GATEWAY_JAR_URL); SpringApp app = service.apps().define(appName) .defineActiveDeployment(deploymentName) .withJarFile(jarFile) .withInstance(2) .withCpu(2) .withMemory(4) .withRuntime(RuntimeVersion.JAVA_11) .attach() .withDefaultPublicEndpoint() .create(); Assertions.assertNotNull(app.url()); Assertions.assertNotNull(app.activeDeploymentName()); Assertions.assertEquals(1, app.deployments().list().stream().count()); Assertions.assertTrue(requestSuccess(app.url())); SpringAppDeployment deployment = app.getActiveDeployment(); Assertions.assertEquals("2", deployment.settings().resourceRequests().cpu()); Assertions.assertEquals("4Gi", deployment.settings().resourceRequests().memory()); Assertions.assertEquals(2, deployment.instances().size()); File gzFile = downloadFile(PIGGYMETRICS_TAR_GZ_URL); deployment = app.deployments().define(deploymentName1) .withSourceCodeTarGzFile(gzFile) .withTargetModule("gateway") .withActivation() .create(); app.refresh(); Assertions.assertEquals(deploymentName1, app.activeDeploymentName()); Assertions.assertEquals("1", deployment.settings().resourceRequests().cpu()); Assertions.assertNotNull(deployment.getLogFileUrl()); Assertions.assertTrue(requestSuccess(app.url())); app.update() .withoutDefaultPublicEndpoint() .apply(); Assertions.assertFalse(app.isPublic()); app.deployments().deleteByName(deploymentName); Assertions.assertEquals(1, app.deployments().list().stream().count()); } @Test @DoNotRecord(skipInPlayback = true) public void canCreateCustomDomainWithSsl() throws Exception { String domainName = generateRandomResourceName("jsdkdemo-", 20) + ".com"; String certOrderName = generateRandomResourceName("cert", 15); String vaultName = generateRandomResourceName("vault", 15); String certName = generateRandomResourceName("cert", 15); String serviceName = generateRandomResourceName("springsvc", 15); String appName = "gateway"; Region region = Region.US_EAST; allowAllSSL(); String cerPassword = password(); String resourcePath = Paths.get(this.getClass().getResource("/session-records").toURI()).getParent().toString(); String cerPath = resourcePath + domainName + ".cer"; String pfxPath = resourcePath + domainName + ".pfx"; createCertificate(cerPath, pfxPath, domainName, cerPassword, "ssl." + domainName, "ssl." + domainName); byte[] certificate = readAllBytes(new FileInputStream(pfxPath)); appPlatformManager.resourceManager().resourceGroups().define(rgName) .withRegion(region) .create(); DnsZone dnsZone = dnsZoneManager.zones().define(domainName) .withExistingResourceGroup(rgName) .create(); AppServiceDomain domain = appServiceManager.domains().define(domainName) .withExistingResourceGroup(rgName) .defineRegistrantContact() .withFirstName("Jon") .withLastName("Doe") .withEmail("jondoe@contoso.com") .withAddressLine1("123 4th Ave") .withCity("Redmond") .withStateOrProvince("WA") .withCountry(CountryIsoCode.UNITED_STATES) .withPostalCode("98052") .withPhoneCountryCode(CountryPhoneCode.UNITED_STATES) .withPhoneNumber("4258828080") .attach() .withDomainPrivacyEnabled(true) .withAutoRenewEnabled(false) .withExistingDnsZone(dnsZone) .create(); Vault vault = keyVaultManager.vaults().define(vaultName) .withRegion(region) .withExistingResourceGroup(rgName) .defineAccessPolicy() .forServicePrincipal(clientIdFromFile()) .allowSecretAllPermissions() .allowCertificateAllPermissions() .attach() .defineAccessPolicy() .forObjectId(SPRING_CLOUD_SERVICE_OBJECT_ID) .allowCertificatePermissions(CertificatePermissions.GET, CertificatePermissions.LIST) .allowSecretPermissions(SecretPermissions.GET, SecretPermissions.LIST) .attach() .create(); CertificateClient certificateClient = new CertificateClientBuilder() .vaultUrl(vault.vaultUri()) .pipeline(appPlatformManager.httpPipeline()) .buildClient(); certificateClient.importCertificate( new ImportCertificateOptions(certName, certificate) .setPassword(cerPassword) .setEnabled(true) ); KeyStore store = KeyStore.getInstance("PKCS12"); store.load(new ByteArrayInputStream(certificate), cerPassword.toCharArray()); String alias = Collections.list(store.aliases()).get(0); String thumbprint = printHexBinary(MessageDigest.getInstance("SHA-1").digest(store.getCertificate(alias).getEncoded())); SpringService service = appPlatformManager.springServices().define(serviceName) .withRegion(region) .withExistingResourceGroup(rgName) .withCertificate("test", vault.vaultUri(), certName) .create(); service.apps().define(appName).withDefaultActiveDeployment().withDefaultPublicEndpoint().create(); SpringApp app = service.apps().getByName(appName); dnsZone.update() .withCNameRecordSet("www", app.fqdn()) .withCNameRecordSet("ssl", app.fqdn()) .apply(); app.update() .withoutDefaultPublicEndpoint() .withCustomDomain(String.format("www.%s", domainName)) .withCustomDomain(String.format("ssl.%s", domainName), thumbprint) .apply(); Assertions.assertTrue(app.customDomains().validate(String.format("www.%s", domainName)).isValid()); Assertions.assertTrue(requestSuccess(String.format("http: Assertions.assertTrue(requestSuccess(String.format("https: app.update() .withHttpsOnly() .withoutCustomDomain(String.format("www.%s", domainName)) .apply(); Assertions.assertTrue(checkRedirect(String.format("http: } @Test @DoNotRecord(skipInPlayback = true) private File downloadFile(String remoteFileUrl) throws Exception { String[] split = remoteFileUrl.split("/"); String filename = split[split.length - 1]; File downloaded = new File(filename); if (!downloaded.exists()) { HttpURLConnection connection = (HttpURLConnection) new URL(remoteFileUrl).openConnection(); connection.connect(); try (InputStream inputStream = connection.getInputStream(); OutputStream outputStream = new FileOutputStream(downloaded)) { IOUtils.copy(inputStream, outputStream); } finally { connection.disconnect(); } } return downloaded; } private void extraTarGzSource(File folder, URL url) throws IOException { HttpURLConnection connection = (HttpURLConnection) url.openConnection(); connection.connect(); try (TarArchiveInputStream inputStream = new TarArchiveInputStream(new GzipCompressorInputStream(connection.getInputStream()))) { TarArchiveEntry entry; while ((entry = inputStream.getNextTarEntry()) != null) { if (entry.isDirectory()) { continue; } File file = new File(folder, entry.getName()); File parent = file.getParentFile(); if (parent.exists() || parent.mkdirs()) { try (OutputStream outputStream = new FileOutputStream(file)) { IOUtils.copy(inputStream, outputStream); } } else { throw new IllegalStateException("Cannot create directory: " + parent.getAbsolutePath()); } } } finally { connection.disconnect(); } } private byte[] readAllBytes(InputStream inputStream) throws IOException { try (ByteArrayOutputStream outputStream = new ByteArrayOutputStream()) { byte[] data = new byte[4096]; while (true) { int size = inputStream.read(data); if (size > 0) { outputStream.write(data, 0, size); } else { return outputStream.toByteArray(); } } } } public static void createCertificate(String certPath, String pfxPath, String alias, String password, String cnName, String dnsName) throws IOException { if (new File(pfxPath).exists()) { return; } String validityInDays = "3650"; String keyAlg = "RSA"; String sigAlg = "SHA1withRSA"; String keySize = "2048"; String storeType = "pkcs12"; String command = "keytool"; String jdkPath = System.getProperty("java.home"); if (jdkPath != null && !jdkPath.isEmpty()) { jdkPath = jdkPath.concat("\\bin"); if (new File(jdkPath).isDirectory()) { command = String.format("%s%s%s", jdkPath, File.separator, command); } } else { return; } String[] commandArgs = {command, "-genkey", "-alias", alias, "-keystore", pfxPath, "-storepass", password, "-validity", validityInDays, "-keyalg", keyAlg, "-sigalg", sigAlg, "-keysize", keySize, "-storetype", storeType, "-dname", "CN=" + cnName, "-ext", "EKU=1.3.6.1.5.5.7.3.1"}; if (dnsName != null) { List<String> args = new ArrayList<>(Arrays.asList(commandArgs)); args.add("-ext"); args.add("san=dns:" + dnsName); commandArgs = args.toArray(new String[0]); } cmdInvocation(commandArgs, true); File pfxFile = new File(pfxPath); if (pfxFile.exists()) { String[] certCommandArgs = {command, "-export", "-alias", alias, "-storetype", storeType, "-keystore", pfxPath, "-storepass", password, "-rfc", "-file", certPath}; cmdInvocation(certCommandArgs, true); File cerFile = new File(pfxPath); if (!cerFile.exists()) { throw new IOException( "Error occurred while creating certificate" + String.join(" ", certCommandArgs)); } } else { throw new IOException("Error occurred while creating certificates" + String.join(" ", commandArgs)); } } public static String cmdInvocation(String[] command, boolean ignoreErrorStream) throws IOException { String result = ""; String error = ""; Process process = new ProcessBuilder(command).start(); try ( InputStream inputStream = process.getInputStream(); InputStream errorStream = process.getErrorStream(); BufferedReader br = new BufferedReader(new InputStreamReader(inputStream, StandardCharsets.UTF_8)); BufferedReader ebr = new BufferedReader(new InputStreamReader(errorStream, StandardCharsets.UTF_8)); ) { result = br.readLine(); process.waitFor(); error = ebr.readLine(); if (error != null && (!"".equals(error))) { if (!ignoreErrorStream) { throw new IOException(error, null); } } } catch (Exception e) { throw new RuntimeException("Exception occurred while invoking command", e); } return result; } private static final char[] HEX_CODE = "0123456789ABCDEF".toCharArray(); private static String printHexBinary(byte[] data) { StringBuilder r = new StringBuilder(data.length * 2); for (byte b : data) { r.append(HEX_CODE[(b >> 4) & 0xF]); r.append(HEX_CODE[(b & 0xF)]); } return r.toString(); } }
I assume it should be `ManagementException`? Use the `getBuildResultWithResponseAsync` for the `Response` instance that can be used for `ManagementException`.
private Mono<String> uploadAndBuildAsync(File source, ResourceUploadDefinition option) { return uploadToStorageAsync(source, option) .then(enqueueBuildAsync(option)) .flatMap(buildId -> manager().serviceClient().getBuildServices() .getBuildResultAsync( service().resourceGroupName(), service().name(), Constants.DEFAULT_TANZU_COMPONENT_NAME, parent().name(), ResourceUtils.nameFromResourceId(buildId)) .flatMap(buildResultInner -> { BuildResultProvisioningState state = buildResultInner.properties().provisioningState(); if (state == BuildResultProvisioningState.SUCCEEDED) { return Mono.just(buildId); } else if (state == BuildResultProvisioningState.QUEUING || state == BuildResultProvisioningState.BUILDING) { return Mono.empty(); } else return Mono.error(new RuntimeException("build failed")); }).repeatWhenEmpty( longFlux -> longFlux .flatMap( index -> Mono.delay(ResourceManagerUtils.InternalRuntimeContext.getDelayDuration( manager().serviceClient().getDefaultPollInterval()))))); }
} else return Mono.error(new RuntimeException("build failed"));
private Mono<String> uploadAndBuildAsync(File source, ResourceUploadDefinition option) { AtomicLong pollCount = new AtomicLong(); Duration pollDuration = manager().serviceClient().getDefaultPollInterval(); return uploadToStorageAsync(source, option) .then(enqueueBuildAsync(option)) .flatMap(buildId -> manager().serviceClient().getBuildServices() .getBuildResultWithResponseAsync( service().resourceGroupName(), service().name(), Constants.DEFAULT_TANZU_COMPONENT_NAME, parent().name(), ResourceUtils.nameFromResourceId(buildId)) .flatMap(response -> { if (pollDuration.multipliedBy(pollCount.get()).compareTo(MAX_BUILD_TIMEOUT) < 0) { BuildResultProvisioningState state = response.getValue().properties().provisioningState(); if (state == BuildResultProvisioningState.SUCCEEDED) { return Mono.just(buildId); } else if (state == BuildResultProvisioningState.QUEUING || state == BuildResultProvisioningState.BUILDING) { return Mono.empty(); } else { AppPlatformManagementClientImpl client = (AppPlatformManagementClientImpl) manager().serviceClient(); return Mono.error(new ManagementException(String.format("Build failed for file: %s, buildId: %s", file.getName(), buildId), new HttpResponseImpl<>(response, client.getSerializerAdapter()))); } } else { return Mono.error(new ManagementException(String.format("Build timeout for file: %s, buildId: %s", file.getName(), buildId), null)); } }).repeatWhenEmpty( longFlux -> longFlux .flatMap( index -> { pollCount.set(index); return Mono.delay(ResourceManagerUtils.InternalRuntimeContext.getDelayDuration(pollDuration)); }))); }
class BuildServiceTask implements FunctionalTaskItem { private final File file; private final boolean sourceCodeTarGz; private final List<String> configFilePatterns; private String module; BuildServiceTask(File file, List<String> configFilePatterns) { this(file, configFilePatterns, false); } BuildServiceTask(File file, List<String> configFilePatterns, boolean sourceCodeTarGz) { this.file = file; this.configFilePatterns = configFilePatterns; this.sourceCodeTarGz = sourceCodeTarGz; } @Override public Mono<Indexable> apply(Context context) { return app().getResourceUploadUrlAsync() .flatMap(option -> uploadAndBuildAsync(file, option) .flatMap(buildId -> { BuildResultUserSourceInfo userSourceInfo = (BuildResultUserSourceInfo) innerModel().properties().source(); userSourceInfo.withBuildResultId(buildId); withConfigFilePatterns(this.configFilePatterns); return Mono.empty(); }).then(context.voidMono())); } private Mono<String> enqueueBuildAsync(ResourceUploadDefinition option) { BuildProperties buildProperties = new BuildProperties() .withBuilder(String.format("%s/buildservices/%s/builders/%s", service().id(), Constants.DEFAULT_TANZU_COMPONENT_NAME, Constants.DEFAULT_TANZU_COMPONENT_NAME)) .withAgentPool(String.format("%s/buildservices/%s/agentPools/%s", service().id(), Constants.DEFAULT_TANZU_COMPONENT_NAME, Constants.DEFAULT_TANZU_COMPONENT_NAME)) .withRelativePath(option.relativePath()); if (this.sourceCodeTarGz) { Map<String, String> buildEnv = buildProperties.env() == null ? new HashMap<>() : buildProperties.env(); buildProperties.withEnv(buildEnv); if (module != null) { buildEnv.put("BP_MAVEN_BUILT_MODULE", module); } } return manager().serviceClient().getBuildServices() .createOrUpdateBuildAsync( service().resourceGroupName(), service().name(), Constants.DEFAULT_TANZU_COMPONENT_NAME, app().name(), new BuildInner().withProperties(buildProperties)) .map(inner -> inner.properties().triggeredBuildResult().id()); } }
class BuildServiceTask implements FunctionalTaskItem { private final File file; private final boolean sourceCodeTarGz; private final List<String> configFilePatterns; private String module; BuildServiceTask(File file, List<String> configFilePatterns) { this(file, configFilePatterns, false); } BuildServiceTask(File file, List<String> configFilePatterns, boolean sourceCodeTarGz) { this.file = file; this.configFilePatterns = configFilePatterns; this.sourceCodeTarGz = sourceCodeTarGz; } @Override public Mono<Indexable> apply(Context context) { return app().getResourceUploadUrlAsync() .flatMap(option -> uploadAndBuildAsync(file, option) .flatMap(buildId -> { BuildResultUserSourceInfo userSourceInfo = (BuildResultUserSourceInfo) innerModel().properties().source(); userSourceInfo.withBuildResultId(buildId); withConfigFilePatterns(this.configFilePatterns); return Mono.empty(); }).then(context.voidMono())); } private Mono<String> enqueueBuildAsync(ResourceUploadDefinition option) { BuildProperties buildProperties = new BuildProperties() .withBuilder(String.format("%s/buildservices/%s/builders/%s", service().id(), Constants.DEFAULT_TANZU_COMPONENT_NAME, Constants.DEFAULT_TANZU_COMPONENT_NAME)) .withAgentPool(String.format("%s/buildservices/%s/agentPools/%s", service().id(), Constants.DEFAULT_TANZU_COMPONENT_NAME, Constants.DEFAULT_TANZU_COMPONENT_NAME)) .withRelativePath(option.relativePath()); if (this.sourceCodeTarGz) { Map<String, String> buildEnv = buildProperties.env() == null ? new HashMap<>() : buildProperties.env(); buildProperties.withEnv(buildEnv); if (module != null) { buildEnv.put("BP_MAVEN_BUILT_MODULE", module); } } return manager().serviceClient().getBuildServices() .createOrUpdateBuildAsync( service().resourceGroupName(), service().name(), Constants.DEFAULT_TANZU_COMPONENT_NAME, app().name(), new BuildInner().withProperties(buildProperties)) .map(inner -> inner.properties().triggeredBuildResult().id()); } @SuppressWarnings("BlockingMethodInNonBlockingContext") private class HttpResponseImpl<T> extends HttpResponse { private final Response<T> response; private final SerializerAdapter serializerAdapter; protected HttpResponseImpl(Response<T> response, SerializerAdapter serializerAdapter) { super(response.getRequest()); this.response = response; this.serializerAdapter = serializerAdapter; } @Override public int getStatusCode() { return response.getStatusCode(); } @Override public String getHeaderValue(String header) { return response.getHeaders().getValue(header); } @Override public HttpHeaders getHeaders() { return response.getHeaders(); } @Override public Flux<ByteBuffer> getBody() { try { return Flux.just(ByteBuffer.wrap(serializerAdapter.serializeToBytes(response.getValue(), SerializerEncoding.JSON))); } catch (IOException e) { return Flux.empty(); } } @Override public Mono<byte[]> getBodyAsByteArray() { try { return Mono.just(serializerAdapter.serializeToBytes(response.getValue(), SerializerEncoding.JSON)); } catch (IOException e) { return Mono.empty(); } } @Override public Mono<String> getBodyAsString() { return Mono.just(serializerAdapter.serializeRaw(response.getValue())); } @Override public Mono<String> getBodyAsString(Charset charset) { return getBodyAsString(); } } }
Do we need a timeout? A hour is also fine but I guess we need one? Can be added later.
private Mono<String> uploadAndBuildAsync(File source, ResourceUploadDefinition option) { return uploadToStorageAsync(source, option) .then(enqueueBuildAsync(option)) .flatMap(buildId -> manager().serviceClient().getBuildServices() .getBuildResultAsync( service().resourceGroupName(), service().name(), Constants.DEFAULT_TANZU_COMPONENT_NAME, parent().name(), ResourceUtils.nameFromResourceId(buildId)) .flatMap(buildResultInner -> { BuildResultProvisioningState state = buildResultInner.properties().provisioningState(); if (state == BuildResultProvisioningState.SUCCEEDED) { return Mono.just(buildId); } else if (state == BuildResultProvisioningState.QUEUING || state == BuildResultProvisioningState.BUILDING) { return Mono.empty(); } else return Mono.error(new RuntimeException("build failed")); }).repeatWhenEmpty( longFlux -> longFlux .flatMap( index -> Mono.delay(ResourceManagerUtils.InternalRuntimeContext.getDelayDuration( manager().serviceClient().getDefaultPollInterval()))))); }
}).repeatWhenEmpty(
private Mono<String> uploadAndBuildAsync(File source, ResourceUploadDefinition option) { AtomicLong pollCount = new AtomicLong(); Duration pollDuration = manager().serviceClient().getDefaultPollInterval(); return uploadToStorageAsync(source, option) .then(enqueueBuildAsync(option)) .flatMap(buildId -> manager().serviceClient().getBuildServices() .getBuildResultWithResponseAsync( service().resourceGroupName(), service().name(), Constants.DEFAULT_TANZU_COMPONENT_NAME, parent().name(), ResourceUtils.nameFromResourceId(buildId)) .flatMap(response -> { if (pollDuration.multipliedBy(pollCount.get()).compareTo(MAX_BUILD_TIMEOUT) < 0) { BuildResultProvisioningState state = response.getValue().properties().provisioningState(); if (state == BuildResultProvisioningState.SUCCEEDED) { return Mono.just(buildId); } else if (state == BuildResultProvisioningState.QUEUING || state == BuildResultProvisioningState.BUILDING) { return Mono.empty(); } else { AppPlatformManagementClientImpl client = (AppPlatformManagementClientImpl) manager().serviceClient(); return Mono.error(new ManagementException(String.format("Build failed for file: %s, buildId: %s", file.getName(), buildId), new HttpResponseImpl<>(response, client.getSerializerAdapter()))); } } else { return Mono.error(new ManagementException(String.format("Build timeout for file: %s, buildId: %s", file.getName(), buildId), null)); } }).repeatWhenEmpty( longFlux -> longFlux .flatMap( index -> { pollCount.set(index); return Mono.delay(ResourceManagerUtils.InternalRuntimeContext.getDelayDuration(pollDuration)); }))); }
class BuildServiceTask implements FunctionalTaskItem { private final File file; private final boolean sourceCodeTarGz; private final List<String> configFilePatterns; private String module; BuildServiceTask(File file, List<String> configFilePatterns) { this(file, configFilePatterns, false); } BuildServiceTask(File file, List<String> configFilePatterns, boolean sourceCodeTarGz) { this.file = file; this.configFilePatterns = configFilePatterns; this.sourceCodeTarGz = sourceCodeTarGz; } @Override public Mono<Indexable> apply(Context context) { return app().getResourceUploadUrlAsync() .flatMap(option -> uploadAndBuildAsync(file, option) .flatMap(buildId -> { BuildResultUserSourceInfo userSourceInfo = (BuildResultUserSourceInfo) innerModel().properties().source(); userSourceInfo.withBuildResultId(buildId); withConfigFilePatterns(this.configFilePatterns); return Mono.empty(); }).then(context.voidMono())); } private Mono<String> enqueueBuildAsync(ResourceUploadDefinition option) { BuildProperties buildProperties = new BuildProperties() .withBuilder(String.format("%s/buildservices/%s/builders/%s", service().id(), Constants.DEFAULT_TANZU_COMPONENT_NAME, Constants.DEFAULT_TANZU_COMPONENT_NAME)) .withAgentPool(String.format("%s/buildservices/%s/agentPools/%s", service().id(), Constants.DEFAULT_TANZU_COMPONENT_NAME, Constants.DEFAULT_TANZU_COMPONENT_NAME)) .withRelativePath(option.relativePath()); if (this.sourceCodeTarGz) { Map<String, String> buildEnv = buildProperties.env() == null ? new HashMap<>() : buildProperties.env(); buildProperties.withEnv(buildEnv); if (module != null) { buildEnv.put("BP_MAVEN_BUILT_MODULE", module); } } return manager().serviceClient().getBuildServices() .createOrUpdateBuildAsync( service().resourceGroupName(), service().name(), Constants.DEFAULT_TANZU_COMPONENT_NAME, app().name(), new BuildInner().withProperties(buildProperties)) .map(inner -> inner.properties().triggeredBuildResult().id()); } }
class BuildServiceTask implements FunctionalTaskItem { private final File file; private final boolean sourceCodeTarGz; private final List<String> configFilePatterns; private String module; BuildServiceTask(File file, List<String> configFilePatterns) { this(file, configFilePatterns, false); } BuildServiceTask(File file, List<String> configFilePatterns, boolean sourceCodeTarGz) { this.file = file; this.configFilePatterns = configFilePatterns; this.sourceCodeTarGz = sourceCodeTarGz; } @Override public Mono<Indexable> apply(Context context) { return app().getResourceUploadUrlAsync() .flatMap(option -> uploadAndBuildAsync(file, option) .flatMap(buildId -> { BuildResultUserSourceInfo userSourceInfo = (BuildResultUserSourceInfo) innerModel().properties().source(); userSourceInfo.withBuildResultId(buildId); withConfigFilePatterns(this.configFilePatterns); return Mono.empty(); }).then(context.voidMono())); } private Mono<String> enqueueBuildAsync(ResourceUploadDefinition option) { BuildProperties buildProperties = new BuildProperties() .withBuilder(String.format("%s/buildservices/%s/builders/%s", service().id(), Constants.DEFAULT_TANZU_COMPONENT_NAME, Constants.DEFAULT_TANZU_COMPONENT_NAME)) .withAgentPool(String.format("%s/buildservices/%s/agentPools/%s", service().id(), Constants.DEFAULT_TANZU_COMPONENT_NAME, Constants.DEFAULT_TANZU_COMPONENT_NAME)) .withRelativePath(option.relativePath()); if (this.sourceCodeTarGz) { Map<String, String> buildEnv = buildProperties.env() == null ? new HashMap<>() : buildProperties.env(); buildProperties.withEnv(buildEnv); if (module != null) { buildEnv.put("BP_MAVEN_BUILT_MODULE", module); } } return manager().serviceClient().getBuildServices() .createOrUpdateBuildAsync( service().resourceGroupName(), service().name(), Constants.DEFAULT_TANZU_COMPONENT_NAME, app().name(), new BuildInner().withProperties(buildProperties)) .map(inner -> inner.properties().triggeredBuildResult().id()); } @SuppressWarnings("BlockingMethodInNonBlockingContext") private class HttpResponseImpl<T> extends HttpResponse { private final Response<T> response; private final SerializerAdapter serializerAdapter; protected HttpResponseImpl(Response<T> response, SerializerAdapter serializerAdapter) { super(response.getRequest()); this.response = response; this.serializerAdapter = serializerAdapter; } @Override public int getStatusCode() { return response.getStatusCode(); } @Override public String getHeaderValue(String header) { return response.getHeaders().getValue(header); } @Override public HttpHeaders getHeaders() { return response.getHeaders(); } @Override public Flux<ByteBuffer> getBody() { try { return Flux.just(ByteBuffer.wrap(serializerAdapter.serializeToBytes(response.getValue(), SerializerEncoding.JSON))); } catch (IOException e) { return Flux.empty(); } } @Override public Mono<byte[]> getBodyAsByteArray() { try { return Mono.just(serializerAdapter.serializeToBytes(response.getValue(), SerializerEncoding.JSON)); } catch (IOException e) { return Mono.empty(); } } @Override public Mono<String> getBodyAsString() { return Mono.just(serializerAdapter.serializeRaw(response.getValue())); } @Override public Mono<String> getBodyAsString(Charset charset) { return getBodyAsString(); } } }
Could this become `return setBody(BinaryData.fromString(content));`
public HttpRequest setBody(String content) { this.body = BinaryData.fromString(content); setContentLength(this.body.getLength()); return this; }
return this;
public HttpRequest setBody(String content) { return setBody(BinaryData.fromString(content)); }
class HttpRequest { private static final ClientLogger LOGGER = new ClientLogger(HttpRequest.class); private HttpMethod httpMethod; private URL url; private HttpHeaders headers; private BinaryData body; /** * Create a new HttpRequest instance. * * @param httpMethod the HTTP request method * @param url the target address to send the request to */ public HttpRequest(HttpMethod httpMethod, URL url) { this(httpMethod, url, new HttpHeaders(), (BinaryData) null); } /** * Create a new HttpRequest instance. * * @param httpMethod the HTTP request method * @param url the target address to send the request to * @throws IllegalArgumentException if {@code url} is null or it cannot be parsed into a valid URL. */ public HttpRequest(HttpMethod httpMethod, String url) { this.httpMethod = httpMethod; setUrl(url); this.headers = new HttpHeaders(); } /** * Create a new HttpRequest instance. * * @param httpMethod the HTTP request method * @param url the target address to send the request to * @param headers the HTTP headers to use with this request * @param body the request content */ public HttpRequest(HttpMethod httpMethod, URL url, HttpHeaders headers, Flux<ByteBuffer> body) { this.httpMethod = httpMethod; this.url = url; this.headers = headers; setBody(BinaryDataHelper.createBinaryData(new FluxByteBufferContent(body))); } /** * Create a new HttpRequest instance. * * @param httpMethod the HTTP request method * @param url the target address to send the request to * @param headers the HTTP headers to use with this request * @param body the request content */ public HttpRequest(HttpMethod httpMethod, URL url, HttpHeaders headers, BinaryData body) { this.httpMethod = httpMethod; this.url = url; this.headers = headers; setBody(body); } /** * Get the request method. * * @return the request method */ public HttpMethod getHttpMethod() { return httpMethod; } /** * Set the request method. * * @param httpMethod the request method * @return this HttpRequest */ public HttpRequest setHttpMethod(HttpMethod httpMethod) { this.httpMethod = httpMethod; return this; } /** * Get the target address. * * @return the target address */ public URL getUrl() { return url; } /** * Set the target address to send the request to. * * @param url target address as {@link URL} * @return this HttpRequest */ public HttpRequest setUrl(URL url) { this.url = url; return this; } /** * Set the target address to send the request to. * * @param url target address as a String * @return this HttpRequest * @throws IllegalArgumentException if {@code url} is null or it cannot be parsed into a valid URL. */ public HttpRequest setUrl(String url) { try { this.url = new URL(url); } catch (MalformedURLException ex) { throw LOGGER.logExceptionAsWarning(new IllegalArgumentException("'url' must be a valid URL.", ex)); } return this; } /** * Get the request headers. * * @return headers to be sent */ public HttpHeaders getHeaders() { return headers; } /** * Set the request headers. * * @param headers the set of headers * @return this HttpRequest */ public HttpRequest setHeaders(HttpHeaders headers) { this.headers = headers; return this; } /** * Set a request header, replacing any existing value. A null for {@code value} will remove the header if one with * matching name exists. * * @param name the header name * @param value the header value * @return this HttpRequest */ public HttpRequest setHeader(String name, String value) { headers.set(name, value); return this; } /** * Get the request content. * * @return the content to be send */ public Flux<ByteBuffer> getBody() { return body == null ? null : body.toFluxByteBuffer(); } /** * Get the request content. * * @return the content to be send */ public BinaryData getBodyAsBinaryData() { return body; } /** * Set the request content. * <p> * The Content-Length header will be set based on the given content's length. * * @param content the request content * @return this HttpRequest */ /** * Set the request content. * <p> * The Content-Length header will be set based on the given content's length. * * @param content the request content * @return this HttpRequest */ public HttpRequest setBody(byte[] content) { setContentLength(content.length); this.body = BinaryData.fromBytes(content); return this; } /** * Set request content. * <p> * Caller must set the Content-Length header to indicate the length of the content, or use Transfer-Encoding: * chunked. * * @param content the request content * @return this HttpRequest */ public HttpRequest setBody(Flux<ByteBuffer> content) { if (content != null) { setBody(BinaryDataHelper.createBinaryData(new FluxByteBufferContent(content))); } else { this.body = null; } return this; } /** * Set request content. * <p> * If provided content has known length, i.e. {@link BinaryData * Content-Length header is updated. Otherwise, * if provided content has unknown length, i.e. {@link BinaryData * the caller must set the Content-Length header to indicate the length of the content, or use Transfer-Encoding: * chunked. * * @param content the request content * @return this HttpRequest */ public HttpRequest setBody(BinaryData content) { this.body = content; if (content != null && content.getLength() != null) { setContentLength(content.getLength()); } return this; } private void setContentLength(long contentLength) { headers.set("Content-Length", String.valueOf(contentLength)); } /** * Creates a copy of the request. * * The main purpose of this is so that this HttpRequest can be changed and the resulting HttpRequest can be a * backup. This means that the cloned HttpHeaders and body must not be able to change from side effects of this * HttpRequest. * * @return a new HTTP request instance with cloned instances of all mutable properties. */ public HttpRequest copy() { final HttpHeaders bufferedHeaders = new HttpHeaders(headers); return new HttpRequest(httpMethod, url, bufferedHeaders, body); } }
class HttpRequest { private static final ClientLogger LOGGER = new ClientLogger(HttpRequest.class); private HttpMethod httpMethod; private URL url; private HttpHeaders headers; private BinaryData body; /** * Create a new HttpRequest instance. * * @param httpMethod the HTTP request method * @param url the target address to send the request to */ public HttpRequest(HttpMethod httpMethod, URL url) { this(httpMethod, url, new HttpHeaders(), (BinaryData) null); } /** * Create a new HttpRequest instance. * * @param httpMethod the HTTP request method * @param url the target address to send the request to * @throws IllegalArgumentException if {@code url} is null or it cannot be parsed into a valid URL. */ public HttpRequest(HttpMethod httpMethod, String url) { this.httpMethod = httpMethod; setUrl(url); this.headers = new HttpHeaders(); } /** * Create a new HttpRequest instance. * * @param httpMethod the HTTP request method * @param url the target address to send the request to * @param headers the HTTP headers to use with this request */ public HttpRequest(HttpMethod httpMethod, URL url, HttpHeaders headers) { this.httpMethod = httpMethod; this.url = url; this.headers = headers; } /** * Create a new HttpRequest instance. * * @param httpMethod the HTTP request method * @param url the target address to send the request to * @param headers the HTTP headers to use with this request * @param body the request content */ public HttpRequest(HttpMethod httpMethod, URL url, HttpHeaders headers, Flux<ByteBuffer> body) { this.httpMethod = httpMethod; this.url = url; this.headers = headers; setBody(BinaryDataHelper.createBinaryData(new FluxByteBufferContent(body))); } /** * Create a new HttpRequest instance. * * @param httpMethod the HTTP request method * @param url the target address to send the request to * @param headers the HTTP headers to use with this request * @param body the request content */ public HttpRequest(HttpMethod httpMethod, URL url, HttpHeaders headers, BinaryData body) { this.httpMethod = httpMethod; this.url = url; this.headers = headers; setBody(body); } /** * Get the request method. * * @return the request method */ public HttpMethod getHttpMethod() { return httpMethod; } /** * Set the request method. * * @param httpMethod the request method * @return this HttpRequest */ public HttpRequest setHttpMethod(HttpMethod httpMethod) { this.httpMethod = httpMethod; return this; } /** * Get the target address. * * @return the target address */ public URL getUrl() { return url; } /** * Set the target address to send the request to. * * @param url target address as {@link URL} * @return this HttpRequest */ public HttpRequest setUrl(URL url) { this.url = url; return this; } /** * Set the target address to send the request to. * * @param url target address as a String * @return this HttpRequest * @throws IllegalArgumentException if {@code url} is null or it cannot be parsed into a valid URL. */ public HttpRequest setUrl(String url) { try { this.url = new URL(url); } catch (MalformedURLException ex) { throw LOGGER.logExceptionAsWarning(new IllegalArgumentException("'url' must be a valid URL.", ex)); } return this; } /** * Get the request headers. * * @return headers to be sent */ public HttpHeaders getHeaders() { return headers; } /** * Set the request headers. * * @param headers the set of headers * @return this HttpRequest */ public HttpRequest setHeaders(HttpHeaders headers) { this.headers = headers; return this; } /** * Set a request header, replacing any existing value. A null for {@code value} will remove the header if one with * matching name exists. * * @param name the header name * @param value the header value * @return this HttpRequest */ public HttpRequest setHeader(String name, String value) { headers.set(name, value); return this; } /** * Get the request content. * * @return the content to be sent */ public Flux<ByteBuffer> getBody() { return body == null ? null : body.toFluxByteBuffer(); } /** * Get the request content. * * @return the content to be sent */ public BinaryData getBodyAsBinaryData() { return body; } /** * Set the request content. * <p> * The Content-Length header will be set based on the given content's length. * * @param content the request content * @return this HttpRequest */ /** * Set the request content. * <p> * The Content-Length header will be set based on the given content's length. * * @param content the request content * @return this HttpRequest */ public HttpRequest setBody(byte[] content) { return setBody(BinaryData.fromBytes(content)); } /** * Set request content. * <p> * Caller must set the Content-Length header to indicate the length of the content, or use Transfer-Encoding: * chunked. * * @param content the request content * @return this HttpRequest */ public HttpRequest setBody(Flux<ByteBuffer> content) { if (content != null) { this.body = BinaryDataHelper.createBinaryData(new FluxByteBufferContent(content)); } else { this.body = null; } return this; } /** * Set request content. * <p> * If provided content has known length, i.e. {@link BinaryData * Content-Length header is updated. Otherwise, * if provided content has unknown length, i.e. {@link BinaryData * the caller must set the Content-Length header to indicate the length of the content, or use Transfer-Encoding: * chunked. * * @param content the request content * @return this HttpRequest */ public HttpRequest setBody(BinaryData content) { this.body = content; if (content != null && content.getLength() != null) { setContentLength(content.getLength()); } return this; } private void setContentLength(long contentLength) { headers.set("Content-Length", String.valueOf(contentLength)); } /** * Creates a copy of the request. * * The main purpose of this is so that this HttpRequest can be changed and the resulting HttpRequest can be a * backup. This means that the cloned HttpHeaders and body must not be able to change from side effects of this * HttpRequest. * * @return a new HTTP request instance with cloned instances of all mutable properties. */ public HttpRequest copy() { final HttpHeaders bufferedHeaders = new HttpHeaders(headers); return new HttpRequest(httpMethod, url, bufferedHeaders, body); } }
let's merge it. but we should discuss this issue during api review.
public ShareFileRenameOptions setHeaders(ShareFileHttpHeaders headers) { if (headers.getCacheControl() != null) { throw LOGGER.logExceptionAsError( new IllegalArgumentException("cache control is not supported on this api")); } if (headers.getContentDisposition() != null) { throw LOGGER.logExceptionAsError( new IllegalArgumentException("content disposition is not supported on this api")); } if (headers.getContentEncoding() != null) { throw LOGGER.logExceptionAsError( new IllegalArgumentException("content encoding is not supported on this api")); } if (headers.getContentLanguage() != null) { throw LOGGER.logExceptionAsError( new IllegalArgumentException("content language is not supported on this api")); } if (headers.getContentMd5() != null) { throw LOGGER.logExceptionAsError( new IllegalArgumentException("content md5 is not supported on this api")); } this.headers = headers; return this; }
return this;
public ShareFileRenameOptions setHeaders(ShareFileHttpHeaders headers) { if (headers.getCacheControl() != null) { throw LOGGER.logExceptionAsError( new IllegalArgumentException("cache control is not supported on this api")); } if (headers.getContentDisposition() != null) { throw LOGGER.logExceptionAsError( new IllegalArgumentException("content disposition is not supported on this api")); } if (headers.getContentEncoding() != null) { throw LOGGER.logExceptionAsError( new IllegalArgumentException("content encoding is not supported on this api")); } if (headers.getContentLanguage() != null) { throw LOGGER.logExceptionAsError( new IllegalArgumentException("content language is not supported on this api")); } if (headers.getContentMd5() != null) { throw LOGGER.logExceptionAsError( new IllegalArgumentException("content md5 is not supported on this api")); } this.headers = headers; return this; }
class ShareFileRenameOptions { private static final ClientLogger LOGGER = new ClientLogger(ShareFileRenameOptions.class); private final String destinationPath; private Boolean replaceIfExists; private Boolean ignoreReadOnly; private ShareRequestConditions sourceRequestConditions; private ShareRequestConditions destinationRequestConditions; private String filePermission; private FileSmbProperties smbProperties; private Map<String, String> metadata; private ShareFileHttpHeaders headers; /** * Creates a {@code ShareFileRenameOptions} object. * * @param destinationPath Relative path from the share to rename the file to. */ public ShareFileRenameOptions(String destinationPath) { StorageImplUtils.assertNotNull("destinationPath", destinationPath); this.destinationPath = destinationPath; } /** * @return The path to which the file should be renamed. */ public String getDestinationPath() { return destinationPath; } /** * @return A boolean value which, if the destination file already exists, determines whether this * request will overwrite the file or not. If true, the rename will succeed and will overwrite the destination file. * If not provided or if false and the destination file does exist, the request will not overwrite the destination * file. If provided and the destination file doesn’t exist, the rename will succeed. */ public Boolean getReplaceIfExists() { return replaceIfExists; } /** * @param replaceIfExists A boolean value which, if the destination file already exists, determines whether this * request will overwrite the file or not. If true, the rename will succeed and will overwrite the destination file. * If not provided or if false and the destination file does exist, the request will not overwrite the destination * file. If provided and the destination file doesn’t exist, the rename will succeed. * @return The updated options. */ public ShareFileRenameOptions setReplaceIfExists(Boolean replaceIfExists) { this.replaceIfExists = replaceIfExists; return this; } /** * @return A boolean value that specifies whether the ReadOnly attribute on a preexisting destination file should be * respected. If true, the rename will succeed, otherwise, a previous file at the destination with the ReadOnly * attribute set will cause the rename to fail. */ public Boolean isIgnoreReadOnly() { return ignoreReadOnly; } /** * @param ignoreReadOnly A boolean value that specifies whether the ReadOnly attribute on a preexisting destination * file should be respected. If true, the rename will succeed, otherwise, a previous file at the destination with * the ReadOnly attribute set will cause the rename to fail. * @return The updated options. */ public ShareFileRenameOptions setIgnoreReadOnly(Boolean ignoreReadOnly) { this.ignoreReadOnly = ignoreReadOnly; return this; } /** * @return Source request conditions. This parameter is only applicable if the source is a file. */ public ShareRequestConditions getSourceRequestConditions() { return sourceRequestConditions; } /** * @param sourceRequestConditions Source request conditions. This parameter is only applicable if the source is a * file. * @return The updated options. */ public ShareFileRenameOptions setSourceRequestConditions(ShareRequestConditions sourceRequestConditions) { this.sourceRequestConditions = sourceRequestConditions; return this; } /** * @return The destination request conditions. */ public ShareRequestConditions getDestinationRequestConditions() { return destinationRequestConditions; } /** * @param destinationRequestConditions The destination request conditions. * @return The updated options. */ public ShareFileRenameOptions setDestinationRequestConditions(ShareRequestConditions destinationRequestConditions) { this.destinationRequestConditions = destinationRequestConditions; return this; } /** * @return Optional file permission to set on the destination file or directory. The value in SmbProperties will be * ignored. */ public String getFilePermission() { return filePermission; } /** * @param filePermission Optional file permission to set on the destination file or directory. The value in * SmbProperties will be ignored. * @return The updated options. */ public ShareFileRenameOptions setFilePermission(String filePermission) { this.filePermission = filePermission; return this; } /** * @return Optional SMB properties to set on the destination file or directory. The only properties that are * considered are file attributes, file creation time, file last write time, and file permission key. The rest are * ignored. */ public FileSmbProperties getSmbProperties() { return smbProperties; } /** * @param smbProperties Optional SMB properties to set on the destination file or directory. The only properties * that are considered are file attributes, file creation time, file last write time, and file permission key. The * rest are ignored. * @return The updated options. */ public ShareFileRenameOptions setSmbProperties(FileSmbProperties smbProperties) { this.smbProperties = smbProperties; return this; } /** * @return The metadata to associate with the renamed file. */ public Map<String, String> getMetadata() { return this.metadata; } /** * @param metadata The metadata to associate with the renamed file. * @return The updated options. */ public ShareFileRenameOptions setMetadata(Map<String, String> metadata) { this.metadata = metadata; return this; } /** * Gets the {@link ShareFileHttpHeaders}. Currently, only content type is respected. Others are ignored. * * @return The {@link ShareFileHttpHeaders}. */ public ShareFileHttpHeaders getHeaders() { return this.headers; } /** * Sets the {@link ShareFileHttpHeaders}. Currently, only content type is respected. This method will throw if * others are set. * * @param headers {@link ShareFileHttpHeaders} * @return The updated options. * @throws IllegalArgumentException If headers besides content type are set, this method will throw. */ }
class ShareFileRenameOptions { private static final ClientLogger LOGGER = new ClientLogger(ShareFileRenameOptions.class); private final String destinationPath; private Boolean replaceIfExists; private Boolean ignoreReadOnly; private ShareRequestConditions sourceRequestConditions; private ShareRequestConditions destinationRequestConditions; private String filePermission; private FileSmbProperties smbProperties; private Map<String, String> metadata; private ShareFileHttpHeaders headers; /** * Creates a {@code ShareFileRenameOptions} object. * * @param destinationPath Relative path from the share to rename the file to. */ public ShareFileRenameOptions(String destinationPath) { StorageImplUtils.assertNotNull("destinationPath", destinationPath); this.destinationPath = destinationPath; } /** * @return The path to which the file should be renamed. */ public String getDestinationPath() { return destinationPath; } /** * @return A boolean value which, if the destination file already exists, determines whether this * request will overwrite the file or not. If true, the rename will succeed and will overwrite the destination file. * If not provided or if false and the destination file does exist, the request will not overwrite the destination * file. If provided and the destination file doesn’t exist, the rename will succeed. */ public Boolean getReplaceIfExists() { return replaceIfExists; } /** * @param replaceIfExists A boolean value which, if the destination file already exists, determines whether this * request will overwrite the file or not. If true, the rename will succeed and will overwrite the destination file. * If not provided or if false and the destination file does exist, the request will not overwrite the destination * file. If provided and the destination file doesn’t exist, the rename will succeed. * @return The updated options. */ public ShareFileRenameOptions setReplaceIfExists(Boolean replaceIfExists) { this.replaceIfExists = replaceIfExists; return this; } /** * @return A boolean value that specifies whether the ReadOnly attribute on a preexisting destination file should be * respected. If true, the rename will succeed, otherwise, a previous file at the destination with the ReadOnly * attribute set will cause the rename to fail. */ public Boolean isIgnoreReadOnly() { return ignoreReadOnly; } /** * @param ignoreReadOnly A boolean value that specifies whether the ReadOnly attribute on a preexisting destination * file should be respected. If true, the rename will succeed, otherwise, a previous file at the destination with * the ReadOnly attribute set will cause the rename to fail. * @return The updated options. */ public ShareFileRenameOptions setIgnoreReadOnly(Boolean ignoreReadOnly) { this.ignoreReadOnly = ignoreReadOnly; return this; } /** * @return Source request conditions. This parameter is only applicable if the source is a file. */ public ShareRequestConditions getSourceRequestConditions() { return sourceRequestConditions; } /** * @param sourceRequestConditions Source request conditions. This parameter is only applicable if the source is a * file. * @return The updated options. */ public ShareFileRenameOptions setSourceRequestConditions(ShareRequestConditions sourceRequestConditions) { this.sourceRequestConditions = sourceRequestConditions; return this; } /** * @return The destination request conditions. */ public ShareRequestConditions getDestinationRequestConditions() { return destinationRequestConditions; } /** * @param destinationRequestConditions The destination request conditions. * @return The updated options. */ public ShareFileRenameOptions setDestinationRequestConditions(ShareRequestConditions destinationRequestConditions) { this.destinationRequestConditions = destinationRequestConditions; return this; } /** * @return Optional file permission to set on the destination file or directory. The value in SmbProperties will be * ignored. */ public String getFilePermission() { return filePermission; } /** * @param filePermission Optional file permission to set on the destination file or directory. The value in * SmbProperties will be ignored. * @return The updated options. */ public ShareFileRenameOptions setFilePermission(String filePermission) { this.filePermission = filePermission; return this; } /** * @return Optional SMB properties to set on the destination file or directory. The only properties that are * considered are file attributes, file creation time, file last write time, and file permission key. The rest are * ignored. */ public FileSmbProperties getSmbProperties() { return smbProperties; } /** * @param smbProperties Optional SMB properties to set on the destination file or directory. The only properties * that are considered are file attributes, file creation time, file last write time, and file permission key. The * rest are ignored. * @return The updated options. */ public ShareFileRenameOptions setSmbProperties(FileSmbProperties smbProperties) { this.smbProperties = smbProperties; return this; } /** * @return The metadata to associate with the renamed file. */ public Map<String, String> getMetadata() { return this.metadata; } /** * @param metadata The metadata to associate with the renamed file. * @return The updated options. */ public ShareFileRenameOptions setMetadata(Map<String, String> metadata) { this.metadata = metadata; return this; } /** * Gets the {@link ShareFileHttpHeaders}. Currently, only content type is respected. Others are ignored. * * @return The {@link ShareFileHttpHeaders}. */ public ShareFileHttpHeaders getHeaders() { return this.headers; } /** * Sets the {@link ShareFileHttpHeaders}. Currently, only content type is respected. This method will throw if * others are set. * * @param headers {@link ShareFileHttpHeaders} * @return The updated options. * @throws IllegalArgumentException If headers besides content type are set, this method will throw. */ }
Same statement as above where this could just call into the `setBody(BinaryData)` method
public HttpRequest setBody(byte[] content) { setContentLength(content.length); this.body = BinaryData.fromBytes(content); return this; }
return this;
public HttpRequest setBody(byte[] content) { return setBody(BinaryData.fromBytes(content)); }
class HttpRequest { private static final ClientLogger LOGGER = new ClientLogger(HttpRequest.class); private HttpMethod httpMethod; private URL url; private HttpHeaders headers; private BinaryData body; /** * Create a new HttpRequest instance. * * @param httpMethod the HTTP request method * @param url the target address to send the request to */ public HttpRequest(HttpMethod httpMethod, URL url) { this(httpMethod, url, new HttpHeaders(), (BinaryData) null); } /** * Create a new HttpRequest instance. * * @param httpMethod the HTTP request method * @param url the target address to send the request to * @throws IllegalArgumentException if {@code url} is null or it cannot be parsed into a valid URL. */ public HttpRequest(HttpMethod httpMethod, String url) { this.httpMethod = httpMethod; setUrl(url); this.headers = new HttpHeaders(); } /** * Create a new HttpRequest instance. * * @param httpMethod the HTTP request method * @param url the target address to send the request to * @param headers the HTTP headers to use with this request * @param body the request content */ public HttpRequest(HttpMethod httpMethod, URL url, HttpHeaders headers, Flux<ByteBuffer> body) { this.httpMethod = httpMethod; this.url = url; this.headers = headers; setBody(BinaryDataHelper.createBinaryData(new FluxByteBufferContent(body))); } /** * Create a new HttpRequest instance. * * @param httpMethod the HTTP request method * @param url the target address to send the request to * @param headers the HTTP headers to use with this request * @param body the request content */ public HttpRequest(HttpMethod httpMethod, URL url, HttpHeaders headers, BinaryData body) { this.httpMethod = httpMethod; this.url = url; this.headers = headers; setBody(body); } /** * Get the request method. * * @return the request method */ public HttpMethod getHttpMethod() { return httpMethod; } /** * Set the request method. * * @param httpMethod the request method * @return this HttpRequest */ public HttpRequest setHttpMethod(HttpMethod httpMethod) { this.httpMethod = httpMethod; return this; } /** * Get the target address. * * @return the target address */ public URL getUrl() { return url; } /** * Set the target address to send the request to. * * @param url target address as {@link URL} * @return this HttpRequest */ public HttpRequest setUrl(URL url) { this.url = url; return this; } /** * Set the target address to send the request to. * * @param url target address as a String * @return this HttpRequest * @throws IllegalArgumentException if {@code url} is null or it cannot be parsed into a valid URL. */ public HttpRequest setUrl(String url) { try { this.url = new URL(url); } catch (MalformedURLException ex) { throw LOGGER.logExceptionAsWarning(new IllegalArgumentException("'url' must be a valid URL.", ex)); } return this; } /** * Get the request headers. * * @return headers to be sent */ public HttpHeaders getHeaders() { return headers; } /** * Set the request headers. * * @param headers the set of headers * @return this HttpRequest */ public HttpRequest setHeaders(HttpHeaders headers) { this.headers = headers; return this; } /** * Set a request header, replacing any existing value. A null for {@code value} will remove the header if one with * matching name exists. * * @param name the header name * @param value the header value * @return this HttpRequest */ public HttpRequest setHeader(String name, String value) { headers.set(name, value); return this; } /** * Get the request content. * * @return the content to be send */ public Flux<ByteBuffer> getBody() { return body == null ? null : body.toFluxByteBuffer(); } /** * Get the request content. * * @return the content to be send */ public BinaryData getBodyAsBinaryData() { return body; } /** * Set the request content. * <p> * The Content-Length header will be set based on the given content's length. * * @param content the request content * @return this HttpRequest */ public HttpRequest setBody(String content) { this.body = BinaryData.fromString(content); setContentLength(this.body.getLength()); return this; } /** * Set the request content. * <p> * The Content-Length header will be set based on the given content's length. * * @param content the request content * @return this HttpRequest */ /** * Set request content. * <p> * Caller must set the Content-Length header to indicate the length of the content, or use Transfer-Encoding: * chunked. * * @param content the request content * @return this HttpRequest */ public HttpRequest setBody(Flux<ByteBuffer> content) { if (content != null) { setBody(BinaryDataHelper.createBinaryData(new FluxByteBufferContent(content))); } else { this.body = null; } return this; } /** * Set request content. * <p> * If provided content has known length, i.e. {@link BinaryData * Content-Length header is updated. Otherwise, * if provided content has unknown length, i.e. {@link BinaryData * the caller must set the Content-Length header to indicate the length of the content, or use Transfer-Encoding: * chunked. * * @param content the request content * @return this HttpRequest */ public HttpRequest setBody(BinaryData content) { this.body = content; if (content != null && content.getLength() != null) { setContentLength(content.getLength()); } return this; } private void setContentLength(long contentLength) { headers.set("Content-Length", String.valueOf(contentLength)); } /** * Creates a copy of the request. * * The main purpose of this is so that this HttpRequest can be changed and the resulting HttpRequest can be a * backup. This means that the cloned HttpHeaders and body must not be able to change from side effects of this * HttpRequest. * * @return a new HTTP request instance with cloned instances of all mutable properties. */ public HttpRequest copy() { final HttpHeaders bufferedHeaders = new HttpHeaders(headers); return new HttpRequest(httpMethod, url, bufferedHeaders, body); } }
class HttpRequest { private static final ClientLogger LOGGER = new ClientLogger(HttpRequest.class); private HttpMethod httpMethod; private URL url; private HttpHeaders headers; private BinaryData body; /** * Create a new HttpRequest instance. * * @param httpMethod the HTTP request method * @param url the target address to send the request to */ public HttpRequest(HttpMethod httpMethod, URL url) { this(httpMethod, url, new HttpHeaders(), (BinaryData) null); } /** * Create a new HttpRequest instance. * * @param httpMethod the HTTP request method * @param url the target address to send the request to * @throws IllegalArgumentException if {@code url} is null or it cannot be parsed into a valid URL. */ public HttpRequest(HttpMethod httpMethod, String url) { this.httpMethod = httpMethod; setUrl(url); this.headers = new HttpHeaders(); } /** * Create a new HttpRequest instance. * * @param httpMethod the HTTP request method * @param url the target address to send the request to * @param headers the HTTP headers to use with this request */ public HttpRequest(HttpMethod httpMethod, URL url, HttpHeaders headers) { this.httpMethod = httpMethod; this.url = url; this.headers = headers; } /** * Create a new HttpRequest instance. * * @param httpMethod the HTTP request method * @param url the target address to send the request to * @param headers the HTTP headers to use with this request * @param body the request content */ public HttpRequest(HttpMethod httpMethod, URL url, HttpHeaders headers, Flux<ByteBuffer> body) { this.httpMethod = httpMethod; this.url = url; this.headers = headers; setBody(BinaryDataHelper.createBinaryData(new FluxByteBufferContent(body))); } /** * Create a new HttpRequest instance. * * @param httpMethod the HTTP request method * @param url the target address to send the request to * @param headers the HTTP headers to use with this request * @param body the request content */ public HttpRequest(HttpMethod httpMethod, URL url, HttpHeaders headers, BinaryData body) { this.httpMethod = httpMethod; this.url = url; this.headers = headers; setBody(body); } /** * Get the request method. * * @return the request method */ public HttpMethod getHttpMethod() { return httpMethod; } /** * Set the request method. * * @param httpMethod the request method * @return this HttpRequest */ public HttpRequest setHttpMethod(HttpMethod httpMethod) { this.httpMethod = httpMethod; return this; } /** * Get the target address. * * @return the target address */ public URL getUrl() { return url; } /** * Set the target address to send the request to. * * @param url target address as {@link URL} * @return this HttpRequest */ public HttpRequest setUrl(URL url) { this.url = url; return this; } /** * Set the target address to send the request to. * * @param url target address as a String * @return this HttpRequest * @throws IllegalArgumentException if {@code url} is null or it cannot be parsed into a valid URL. */ public HttpRequest setUrl(String url) { try { this.url = new URL(url); } catch (MalformedURLException ex) { throw LOGGER.logExceptionAsWarning(new IllegalArgumentException("'url' must be a valid URL.", ex)); } return this; } /** * Get the request headers. * * @return headers to be sent */ public HttpHeaders getHeaders() { return headers; } /** * Set the request headers. * * @param headers the set of headers * @return this HttpRequest */ public HttpRequest setHeaders(HttpHeaders headers) { this.headers = headers; return this; } /** * Set a request header, replacing any existing value. A null for {@code value} will remove the header if one with * matching name exists. * * @param name the header name * @param value the header value * @return this HttpRequest */ public HttpRequest setHeader(String name, String value) { headers.set(name, value); return this; } /** * Get the request content. * * @return the content to be sent */ public Flux<ByteBuffer> getBody() { return body == null ? null : body.toFluxByteBuffer(); } /** * Get the request content. * * @return the content to be sent */ public BinaryData getBodyAsBinaryData() { return body; } /** * Set the request content. * <p> * The Content-Length header will be set based on the given content's length. * * @param content the request content * @return this HttpRequest */ public HttpRequest setBody(String content) { return setBody(BinaryData.fromString(content)); } /** * Set the request content. * <p> * The Content-Length header will be set based on the given content's length. * * @param content the request content * @return this HttpRequest */ /** * Set request content. * <p> * Caller must set the Content-Length header to indicate the length of the content, or use Transfer-Encoding: * chunked. * * @param content the request content * @return this HttpRequest */ public HttpRequest setBody(Flux<ByteBuffer> content) { if (content != null) { this.body = BinaryDataHelper.createBinaryData(new FluxByteBufferContent(content)); } else { this.body = null; } return this; } /** * Set request content. * <p> * If provided content has known length, i.e. {@link BinaryData * Content-Length header is updated. Otherwise, * if provided content has unknown length, i.e. {@link BinaryData * the caller must set the Content-Length header to indicate the length of the content, or use Transfer-Encoding: * chunked. * * @param content the request content * @return this HttpRequest */ public HttpRequest setBody(BinaryData content) { this.body = content; if (content != null && content.getLength() != null) { setContentLength(content.getLength()); } return this; } private void setContentLength(long contentLength) { headers.set("Content-Length", String.valueOf(contentLength)); } /** * Creates a copy of the request. * * The main purpose of this is so that this HttpRequest can be changed and the resulting HttpRequest can be a * backup. This means that the cloned HttpHeaders and body must not be able to change from side effects of this * HttpRequest. * * @return a new HTTP request instance with cloned instances of all mutable properties. */ public HttpRequest copy() { final HttpHeaders bufferedHeaders = new HttpHeaders(headers); return new HttpRequest(httpMethod, url, bufferedHeaders, body); } }
![image](https://user-images.githubusercontent.com/61715331/164538672-c8dde9e4-480a-4483-94eb-af050e728cc7.png)
private static Mono<ByteString> toByteString(Flux<ByteBuffer> bbFlux) { Objects.requireNonNull(bbFlux, "'bbFlux' cannot be null."); return Mono.using(okio.Buffer::new, buffer -> bbFlux.reduce(buffer, (b, byteBuffer) -> { try { b.write(byteBuffer); return b; } catch (IOException ioe) { throw Exceptions.propagate(ioe); } }).map(b -> ByteString.of(b.readByteArray())), okio.Buffer::clear) .switchIfEmpty(EMPTY_BYTE_STRING_MONO); }
.switchIfEmpty(EMPTY_BYTE_STRING_MONO);
private static Mono<ByteString> toByteString(Flux<ByteBuffer> bbFlux) { Objects.requireNonNull(bbFlux, "'bbFlux' cannot be null."); return Mono.using(okio.Buffer::new, buffer -> bbFlux.reduce(buffer, (b, byteBuffer) -> { try { b.write(byteBuffer); return b; } catch (IOException ioe) { throw Exceptions.propagate(ioe); } }).map(b -> ByteString.of(b.readByteArray())), okio.Buffer::clear) .switchIfEmpty(EMPTY_BYTE_STRING_MONO); }
class OkHttpAsyncHttpClient implements HttpClient { final OkHttpClient httpClient; private static final Mono<okio.ByteString> EMPTY_BYTE_STRING_MONO = Mono.just(okio.ByteString.EMPTY); OkHttpAsyncHttpClient(OkHttpClient httpClient) { this.httpClient = httpClient; } @Override public Mono<HttpResponse> send(HttpRequest request) { return send(request, Context.NONE); } @Override public Mono<HttpResponse> send(HttpRequest request, Context context) { boolean eagerlyReadResponse = (boolean) context.getData("azure-eagerly-read-response").orElse(false); return Mono.create(sink -> sink.onRequest(value -> { toOkHttpRequest(request).subscribe(okHttpRequest -> { try { Call call = httpClient.newCall(okHttpRequest); call.enqueue(new OkHttpCallback(sink, request, eagerlyReadResponse)); sink.onCancel(call::cancel); } catch (Exception ex) { sink.error(ex); } }, sink::error); })); } /** * Converts the given azure-core request to okhttp request. * * @param request the azure-core request * @return the Mono emitting okhttp request */ private static Mono<okhttp3.Request> toOkHttpRequest(HttpRequest request) { Request.Builder requestBuilder = new Request.Builder() .url(request.getUrl()); if (request.getHeaders() != null) { for (HttpHeader hdr : request.getHeaders()) { hdr.getValuesList().forEach(value -> requestBuilder.addHeader(hdr.getName(), value)); } } if (request.getHttpMethod() == HttpMethod.GET) { return Mono.just(requestBuilder.get().build()); } else if (request.getHttpMethod() == HttpMethod.HEAD) { return Mono.just(requestBuilder.head().build()); } return toOkHttpRequestBody(request.getBody(), request.getHeaders()) .map(okhttpRequestBody -> requestBuilder.method(request.getHttpMethod().toString(), okhttpRequestBody) .build()); } /** * Create a Mono of okhttp3.RequestBody from the given java.nio.ByteBuffer Flux. * * @param bbFlux stream of java.nio.ByteBuffer representing request content * @param headers the headers associated with the original request * @return the Mono emitting okhttp3.RequestBody */ private static Mono<RequestBody> toOkHttpRequestBody(Flux<ByteBuffer> bbFlux, HttpHeaders headers) { Mono<okio.ByteString> bsMono = bbFlux == null ? EMPTY_BYTE_STRING_MONO : toByteString(bbFlux); return bsMono.map(bs -> { String contentType = headers.getValue("Content-Type"); MediaType mediaType = (contentType == null) ? null : MediaType.parse(contentType); return RequestBody.create(bs, mediaType); }); } /** * Aggregate Flux of java.nio.ByteBuffer to single okio.ByteString. * * Pooled okio.Buffer type is used to buffer emitted ByteBuffer instances. Content of each ByteBuffer will be * written (i.e copied) to the internal okio.Buffer slots. Once the stream terminates, the contents of all slots get * copied to one single byte array and okio.ByteString will be created referring this byte array. Finally the * initial okio.Buffer will be returned to the pool. * * @param bbFlux the Flux of ByteBuffer to aggregate * @return a mono emitting aggregated ByteString */ private static class OkHttpCallback implements okhttp3.Callback { private final MonoSink<HttpResponse> sink; private final HttpRequest request; private final boolean eagerlyReadResponse; OkHttpCallback(MonoSink<HttpResponse> sink, HttpRequest request, boolean eagerlyReadResponse) { this.sink = sink; this.request = request; this.eagerlyReadResponse = eagerlyReadResponse; } @SuppressWarnings("NullableProblems") @Override public void onFailure(okhttp3.Call call, IOException e) { sink.error(e); } @SuppressWarnings("NullableProblems") @Override public void onResponse(okhttp3.Call call, okhttp3.Response response) { /* * Use a buffered response when we are eagerly reading the response from the network and the body isn't * empty. */ if (eagerlyReadResponse) { ResponseBody body = response.body(); if (Objects.nonNull(body)) { try { byte[] bytes = body.bytes(); body.close(); sink.success(new OkHttpAsyncBufferedResponse(response, request, bytes)); } catch (IOException ex) { sink.error(ex); } } else { sink.success(new OkHttpAsyncResponse(response, request)); } } else { sink.success(new OkHttpAsyncResponse(response, request)); } } } }
class OkHttpAsyncHttpClient implements HttpClient { final OkHttpClient httpClient; private static final Mono<okio.ByteString> EMPTY_BYTE_STRING_MONO = Mono.just(okio.ByteString.EMPTY); OkHttpAsyncHttpClient(OkHttpClient httpClient) { this.httpClient = httpClient; } @Override public Mono<HttpResponse> send(HttpRequest request) { return send(request, Context.NONE); } @Override public Mono<HttpResponse> send(HttpRequest request, Context context) { boolean eagerlyReadResponse = (boolean) context.getData("azure-eagerly-read-response").orElse(false); return Mono.create(sink -> sink.onRequest(value -> { toOkHttpRequest(request).subscribe(okHttpRequest -> { try { Call call = httpClient.newCall(okHttpRequest); call.enqueue(new OkHttpCallback(sink, request, eagerlyReadResponse)); sink.onCancel(call::cancel); } catch (Exception ex) { sink.error(ex); } }, sink::error); })); } /** * Converts the given azure-core request to okhttp request. * * @param request the azure-core request * @return the Mono emitting okhttp request */ private static Mono<okhttp3.Request> toOkHttpRequest(HttpRequest request) { Request.Builder requestBuilder = new Request.Builder() .url(request.getUrl()); if (request.getHeaders() != null) { for (HttpHeader hdr : request.getHeaders()) { hdr.getValuesList().forEach(value -> requestBuilder.addHeader(hdr.getName(), value)); } } if (request.getHttpMethod() == HttpMethod.GET) { return Mono.just(requestBuilder.get().build()); } else if (request.getHttpMethod() == HttpMethod.HEAD) { return Mono.just(requestBuilder.head().build()); } return toOkHttpRequestBody(request.getBody(), request.getHeaders()) .map(okhttpRequestBody -> requestBuilder.method(request.getHttpMethod().toString(), okhttpRequestBody) .build()); } /** * Create a Mono of okhttp3.RequestBody from the given java.nio.ByteBuffer Flux. * * @param bbFlux stream of java.nio.ByteBuffer representing request content * @param headers the headers associated with the original request * @return the Mono emitting okhttp3.RequestBody */ private static Mono<RequestBody> toOkHttpRequestBody(Flux<ByteBuffer> bbFlux, HttpHeaders headers) { Mono<okio.ByteString> bsMono = bbFlux == null ? EMPTY_BYTE_STRING_MONO : toByteString(bbFlux); return bsMono.map(bs -> { String contentType = headers.getValue("Content-Type"); MediaType mediaType = (contentType == null) ? null : MediaType.parse(contentType); return RequestBody.create(bs, mediaType); }); } /** * Aggregate Flux of java.nio.ByteBuffer to single okio.ByteString. * * Pooled okio.Buffer type is used to buffer emitted ByteBuffer instances. Content of each ByteBuffer will be * written (i.e copied) to the internal okio.Buffer slots. Once the stream terminates, the contents of all slots get * copied to one single byte array and okio.ByteString will be created referring this byte array. Finally the * initial okio.Buffer will be returned to the pool. * * @param bbFlux the Flux of ByteBuffer to aggregate * @return a mono emitting aggregated ByteString */ private static class OkHttpCallback implements okhttp3.Callback { private final MonoSink<HttpResponse> sink; private final HttpRequest request; private final boolean eagerlyReadResponse; OkHttpCallback(MonoSink<HttpResponse> sink, HttpRequest request, boolean eagerlyReadResponse) { this.sink = sink; this.request = request; this.eagerlyReadResponse = eagerlyReadResponse; } @SuppressWarnings("NullableProblems") @Override public void onFailure(okhttp3.Call call, IOException e) { sink.error(e); } @SuppressWarnings("NullableProblems") @Override public void onResponse(okhttp3.Call call, okhttp3.Response response) { /* * Use a buffered response when we are eagerly reading the response from the network and the body isn't * empty. */ if (eagerlyReadResponse) { ResponseBody body = response.body(); if (Objects.nonNull(body)) { try { byte[] bytes = body.bytes(); body.close(); sink.success(new OkHttpAsyncBufferedResponse(response, request, bytes)); } catch (IOException ex) { sink.error(ex); } } else { sink.success(new OkHttpAsyncResponse(response, request)); } } else { sink.success(new OkHttpAsyncResponse(response, request)); } } } }