Search in sources :

Example 6 with Value

use of com.google.firestore.v1.Value in project xtext-core by eclipse.

the class ActionTestLanguage2SemanticSequencer method sequence.

@Override
public void sequence(ISerializationContext context, EObject semanticObject) {
    EPackage epackage = semanticObject.eClass().getEPackage();
    ParserRule rule = context.getParserRule();
    Action action = context.getAssignedAction();
    Set<Parameter> parameters = context.getEnabledBooleanParameters();
    if (epackage == ActionLang2Package.eINSTANCE)
        switch(semanticObject.eClass().getClassifierID()) {
            case ActionLang2Package.ORING:
                sequence_ORing(context, (ORing) semanticObject);
                return;
            case ActionLang2Package.VALUE:
                sequence_Value(context, (Value) semanticObject);
                return;
        }
    if (errorAcceptor != null)
        errorAcceptor.accept(diagnosticProvider.createInvalidContextOrTypeDiagnostic(semanticObject, context));
}
Also used : ParserRule(org.eclipse.xtext.ParserRule) Action(org.eclipse.xtext.Action) Value(org.eclipse.xtext.testlanguages.actionLang2.Value) Parameter(org.eclipse.xtext.Parameter) EPackage(org.eclipse.emf.ecore.EPackage) ORing(org.eclipse.xtext.testlanguages.actionLang2.ORing)

Example 7 with Value

use of com.google.firestore.v1.Value in project java-docs-samples by GoogleCloudPlatform.

the class RiskAnalysis method numericalStatsAnalysis.

// [START dlp_numerical_stats]
/**
 * Calculate numerical statistics for a column in a BigQuery table using the DLP API.
 *
 * @param projectId The Google Cloud Platform project ID to run the API call under.
 * @param datasetId The BigQuery dataset to analyze.
 * @param tableId The BigQuery table to analyze.
 * @param columnName The name of the column to analyze, which must contain only numerical data.
 * @param topicId The name of the Pub/Sub topic to notify once the job completes
 * @param subscriptionId The name of the Pub/Sub subscription to use when listening for job
 *     completion status.
 */
private static void numericalStatsAnalysis(String projectId, String datasetId, String tableId, String columnName, String topicId, String subscriptionId) throws Exception {
    // Instantiates a client
    try (DlpServiceClient dlpServiceClient = DlpServiceClient.create()) {
        BigQueryTable bigQueryTable = BigQueryTable.newBuilder().setTableId(tableId).setDatasetId(datasetId).setProjectId(projectId).build();
        FieldId fieldId = FieldId.newBuilder().setName(columnName).build();
        NumericalStatsConfig numericalStatsConfig = NumericalStatsConfig.newBuilder().setField(fieldId).build();
        PrivacyMetric privacyMetric = PrivacyMetric.newBuilder().setNumericalStatsConfig(numericalStatsConfig).build();
        String topicName = String.format("projects/%s/topics/%s", projectId, topicId);
        PublishToPubSub publishToPubSub = PublishToPubSub.newBuilder().setTopic(topicName).build();
        // Create action to publish job status notifications over Google Cloud Pub/Sub
        Action action = Action.newBuilder().setPubSub(publishToPubSub).build();
        RiskAnalysisJobConfig riskAnalysisJobConfig = RiskAnalysisJobConfig.newBuilder().setSourceTable(bigQueryTable).setPrivacyMetric(privacyMetric).addActions(action).build();
        CreateDlpJobRequest createDlpJobRequest = CreateDlpJobRequest.newBuilder().setParent(ProjectName.of(projectId).toString()).setRiskJob(riskAnalysisJobConfig).build();
        DlpJob dlpJob = dlpServiceClient.createDlpJob(createDlpJobRequest);
        String dlpJobName = dlpJob.getName();
        final SettableApiFuture<Boolean> done = SettableApiFuture.create();
        // Set up a Pub/Sub subscriber to listen on the job completion status
        Subscriber subscriber = Subscriber.newBuilder(ProjectSubscriptionName.newBuilder().setProject(projectId).setSubscription(subscriptionId).build(), (pubsubMessage, ackReplyConsumer) -> {
            if (pubsubMessage.getAttributesCount() > 0 && pubsubMessage.getAttributesMap().get("DlpJobName").equals(dlpJobName)) {
                // notify job completion
                done.set(true);
                ackReplyConsumer.ack();
            }
        }).build();
        subscriber.startAsync();
        // For long jobs, consider using a truly asynchronous execution model such as Cloud Functions
        try {
            done.get(1, TimeUnit.MINUTES);
            // Wait for the job to become available
            Thread.sleep(500);
        } catch (TimeoutException e) {
            System.out.println("Unable to verify job completion.");
        }
        // Retrieve completed job status
        DlpJob completedJob = dlpServiceClient.getDlpJob(GetDlpJobRequest.newBuilder().setName(dlpJobName).build());
        System.out.println("Job status: " + completedJob.getState());
        AnalyzeDataSourceRiskDetails riskDetails = completedJob.getRiskDetails();
        AnalyzeDataSourceRiskDetails.NumericalStatsResult result = riskDetails.getNumericalStatsResult();
        System.out.printf("Value range : [%.3f, %.3f]\n", result.getMinValue().getFloatValue(), result.getMaxValue().getFloatValue());
        int percent = 1;
        Double lastValue = null;
        for (Value quantileValue : result.getQuantileValuesList()) {
            Double currentValue = quantileValue.getFloatValue();
            if (lastValue == null || !lastValue.equals(currentValue)) {
                System.out.printf("Value at %s %% quantile : %.3f", percent, currentValue);
            }
            lastValue = currentValue;
        }
    } catch (Exception e) {
        System.out.println("Error in categoricalStatsAnalysis: " + e.getMessage());
    }
}
Also used : Arrays(java.util.Arrays) TimeoutException(java.util.concurrent.TimeoutException) Subscriber(com.google.cloud.pubsub.v1.Subscriber) DefaultParser(org.apache.commons.cli.DefaultParser) KMapEstimationHistogramBucket(com.google.privacy.dlp.v2.AnalyzeDataSourceRiskDetails.KMapEstimationResult.KMapEstimationHistogramBucket) LDiversityEquivalenceClass(com.google.privacy.dlp.v2.AnalyzeDataSourceRiskDetails.LDiversityResult.LDiversityEquivalenceClass) ValueFrequency(com.google.privacy.dlp.v2.ValueFrequency) LDiversityConfig(com.google.privacy.dlp.v2.PrivacyMetric.LDiversityConfig) NumericalStatsConfig(com.google.privacy.dlp.v2.PrivacyMetric.NumericalStatsConfig) Action(com.google.privacy.dlp.v2.Action) KMapEstimationConfig(com.google.privacy.dlp.v2.PrivacyMetric.KMapEstimationConfig) CategoricalStatsHistogramBucket(com.google.privacy.dlp.v2.AnalyzeDataSourceRiskDetails.CategoricalStatsResult.CategoricalStatsHistogramBucket) KAnonymityEquivalenceClass(com.google.privacy.dlp.v2.AnalyzeDataSourceRiskDetails.KAnonymityResult.KAnonymityEquivalenceClass) Value(com.google.privacy.dlp.v2.Value) TaggedField(com.google.privacy.dlp.v2.PrivacyMetric.KMapEstimationConfig.TaggedField) RiskAnalysisJobConfig(com.google.privacy.dlp.v2.RiskAnalysisJobConfig) Collectors(java.util.stream.Collectors) SettableApiFuture(com.google.api.core.SettableApiFuture) List(java.util.List) KAnonymityResult(com.google.privacy.dlp.v2.AnalyzeDataSourceRiskDetails.KAnonymityResult) ParseException(org.apache.commons.cli.ParseException) BigQueryTable(com.google.privacy.dlp.v2.BigQueryTable) ProjectSubscriptionName(com.google.pubsub.v1.ProjectSubscriptionName) AnalyzeDataSourceRiskDetails(com.google.privacy.dlp.v2.AnalyzeDataSourceRiskDetails) Options(org.apache.commons.cli.Options) HelpFormatter(org.apache.commons.cli.HelpFormatter) CategoricalStatsConfig(com.google.privacy.dlp.v2.PrivacyMetric.CategoricalStatsConfig) ArrayList(java.util.ArrayList) ServiceOptions(com.google.cloud.ServiceOptions) CommandLine(org.apache.commons.cli.CommandLine) FieldId(com.google.privacy.dlp.v2.FieldId) ProjectTopicName(com.google.pubsub.v1.ProjectTopicName) Option(org.apache.commons.cli.Option) DlpServiceClient(com.google.cloud.dlp.v2.DlpServiceClient) Iterator(java.util.Iterator) CreateDlpJobRequest(com.google.privacy.dlp.v2.CreateDlpJobRequest) CommandLineParser(org.apache.commons.cli.CommandLineParser) KMapEstimationResult(com.google.privacy.dlp.v2.AnalyzeDataSourceRiskDetails.KMapEstimationResult) KMapEstimationQuasiIdValues(com.google.privacy.dlp.v2.AnalyzeDataSourceRiskDetails.KMapEstimationResult.KMapEstimationQuasiIdValues) InfoType(com.google.privacy.dlp.v2.InfoType) LDiversityResult(com.google.privacy.dlp.v2.AnalyzeDataSourceRiskDetails.LDiversityResult) KAnonymityConfig(com.google.privacy.dlp.v2.PrivacyMetric.KAnonymityConfig) TimeUnit(java.util.concurrent.TimeUnit) PublishToPubSub(com.google.privacy.dlp.v2.Action.PublishToPubSub) ProjectName(com.google.privacy.dlp.v2.ProjectName) GetDlpJobRequest(com.google.privacy.dlp.v2.GetDlpJobRequest) LDiversityHistogramBucket(com.google.privacy.dlp.v2.AnalyzeDataSourceRiskDetails.LDiversityResult.LDiversityHistogramBucket) KAnonymityHistogramBucket(com.google.privacy.dlp.v2.AnalyzeDataSourceRiskDetails.KAnonymityResult.KAnonymityHistogramBucket) PrivacyMetric(com.google.privacy.dlp.v2.PrivacyMetric) OptionGroup(org.apache.commons.cli.OptionGroup) DlpJob(com.google.privacy.dlp.v2.DlpJob) Collections(java.util.Collections) Action(com.google.privacy.dlp.v2.Action) RiskAnalysisJobConfig(com.google.privacy.dlp.v2.RiskAnalysisJobConfig) PrivacyMetric(com.google.privacy.dlp.v2.PrivacyMetric) CreateDlpJobRequest(com.google.privacy.dlp.v2.CreateDlpJobRequest) TimeoutException(java.util.concurrent.TimeoutException) ParseException(org.apache.commons.cli.ParseException) PublishToPubSub(com.google.privacy.dlp.v2.Action.PublishToPubSub) Subscriber(com.google.cloud.pubsub.v1.Subscriber) DlpServiceClient(com.google.cloud.dlp.v2.DlpServiceClient) BigQueryTable(com.google.privacy.dlp.v2.BigQueryTable) FieldId(com.google.privacy.dlp.v2.FieldId) Value(com.google.privacy.dlp.v2.Value) DlpJob(com.google.privacy.dlp.v2.DlpJob) AnalyzeDataSourceRiskDetails(com.google.privacy.dlp.v2.AnalyzeDataSourceRiskDetails) NumericalStatsConfig(com.google.privacy.dlp.v2.PrivacyMetric.NumericalStatsConfig) TimeoutException(java.util.concurrent.TimeoutException)

Example 8 with Value

use of com.google.firestore.v1.Value in project beam by apache.

the class FirestoreV1FnBatchWriteWithDeadLetterQueueTest method enqueueingWritesValidateBytesSize.

@Override
@Test
public void enqueueingWritesValidateBytesSize() throws Exception {
    int maxBytes = 50;
    RpcQosOptions options = rpcQosOptions.toBuilder().withBatchMaxBytes(maxBytes).build();
    when(ff.getFirestoreStub(any())).thenReturn(stub);
    when(ff.getRpcQos(any())).thenReturn(FirestoreStatefulComponentFactory.INSTANCE.getRpcQos(options));
    byte[] bytes = new byte[maxBytes + 1];
    SecureRandom.getInstanceStrong().nextBytes(bytes);
    byte[] base64Bytes = Base64.getEncoder().encode(bytes);
    String base64String = Base64.getEncoder().encodeToString(bytes);
    Value largeValue = Value.newBuilder().setStringValueBytes(ByteString.copyFrom(base64Bytes)).build();
    // apply a doc transform that is too large
    Write write1 = Write.newBuilder().setTransform(DocumentTransform.newBuilder().setDocument(String.format("doc-%03d", 2)).addFieldTransforms(FieldTransform.newBuilder().setAppendMissingElements(ArrayValue.newBuilder().addValues(largeValue)))).build();
    // delete a doc that is too large
    Write write2 = Write.newBuilder().setDelete(String.format("doc-%03d_%s", 3, base64String)).build();
    // update a doc that is too large
    Write write3 = Write.newBuilder().setUpdate(Document.newBuilder().setName(String.format("doc-%03d", 4)).putAllFields(ImmutableMap.of("foo", largeValue))).build();
    BatchWriteFnWithDeadLetterQueue fn = getFn(clock, ff, options, metricsFixture.counterFactory, metricsFixture.distributionFactory);
    fn.populateDisplayData(displayDataBuilder);
    fn.setup();
    fn.startBundle(startBundleContext);
    ArgumentCaptor<WriteFailure> write1FailureCapture = ArgumentCaptor.forClass(WriteFailure.class);
    doNothing().when(processContext).outputWithTimestamp(write1FailureCapture.capture(), any());
    when(processContext.element()).thenReturn(write1);
    fn.processElement(processContext, window);
    WriteFailure failure = write1FailureCapture.getValue();
    assertNotNull(failure);
    String message = failure.getStatus().getMessage();
    assertTrue(message.contains("TRANSFORM"));
    assertTrue(message.contains("larger than configured max allowed bytes per batch"));
    ArgumentCaptor<WriteFailure> write2FailureCapture = ArgumentCaptor.forClass(WriteFailure.class);
    doNothing().when(processContext).outputWithTimestamp(write2FailureCapture.capture(), any());
    when(processContext.element()).thenReturn(write2);
    fn.processElement(processContext, window);
    WriteFailure failure2 = write2FailureCapture.getValue();
    assertNotNull(failure2);
    String message2 = failure2.getStatus().getMessage();
    assertTrue(message2.contains("DELETE"));
    assertTrue(message2.contains("larger than configured max allowed bytes per batch"));
    ArgumentCaptor<WriteFailure> write3FailureCapture = ArgumentCaptor.forClass(WriteFailure.class);
    doNothing().when(processContext).outputWithTimestamp(write3FailureCapture.capture(), any());
    when(processContext.element()).thenReturn(write3);
    fn.processElement(processContext, window);
    WriteFailure failure3 = write3FailureCapture.getValue();
    assertNotNull(failure3);
    String message3 = failure3.getStatus().getMessage();
    assertTrue(message3.contains("UPDATE"));
    assertTrue(message3.contains("larger than configured max allowed bytes per batch"));
    assertEquals(0, fn.writes.size());
}
Also used : Write(com.google.firestore.v1.Write) BatchWriteFnWithDeadLetterQueue(org.apache.beam.sdk.io.gcp.firestore.FirestoreV1WriteFn.BatchWriteFnWithDeadLetterQueue) Value(com.google.firestore.v1.Value) ArrayValue(com.google.firestore.v1.ArrayValue) WriteFailure(org.apache.beam.sdk.io.gcp.firestore.FirestoreV1.WriteFailure) ByteString(com.google.protobuf.ByteString) Test(org.junit.Test)

Example 9 with Value

use of com.google.firestore.v1.Value in project beam by apache.

the class FirestoreV1FnBatchWriteWithSummaryTest method enqueueingWritesValidateBytesSize.

@Override
@Test
public void enqueueingWritesValidateBytesSize() throws Exception {
    int maxBytes = 50;
    RpcQosOptions options = rpcQosOptions.toBuilder().withBatchMaxBytes(maxBytes).build();
    when(ff.getFirestoreStub(any())).thenReturn(stub);
    when(ff.getRpcQos(any())).thenReturn(FirestoreStatefulComponentFactory.INSTANCE.getRpcQos(options));
    byte[] bytes = new byte[maxBytes + 1];
    SecureRandom.getInstanceStrong().nextBytes(bytes);
    byte[] base64Bytes = Base64.getEncoder().encode(bytes);
    String base64String = Base64.getEncoder().encodeToString(bytes);
    Value largeValue = Value.newBuilder().setStringValueBytes(ByteString.copyFrom(base64Bytes)).build();
    // apply a doc transform that is too large
    Write write1 = Write.newBuilder().setTransform(DocumentTransform.newBuilder().setDocument(String.format("doc-%03d", 2)).addFieldTransforms(FieldTransform.newBuilder().setAppendMissingElements(ArrayValue.newBuilder().addValues(largeValue)))).build();
    // delete a doc that is too large
    Write write2 = Write.newBuilder().setDelete(String.format("doc-%03d_%s", 3, base64String)).build();
    // update a doc that is too large
    Write write3 = Write.newBuilder().setUpdate(Document.newBuilder().setName(String.format("doc-%03d", 4)).putAllFields(ImmutableMap.of("foo", largeValue))).build();
    BatchWriteFnWithSummary fn = getFn(clock, ff, options, metricsFixture.counterFactory, metricsFixture.distributionFactory);
    fn.populateDisplayData(displayDataBuilder);
    fn.setup();
    fn.startBundle(startBundleContext);
    try {
        when(processContext.element()).thenReturn(write1);
        fn.processElement(processContext, window);
        fail("expected validation error");
    } catch (FailedWritesException e) {
        WriteFailure failure = e.getWriteFailures().get(0);
        assertNotNull(failure);
        String message = failure.getStatus().getMessage();
        assertTrue(message.contains("TRANSFORM"));
        assertTrue(message.contains("larger than configured max allowed bytes per batch"));
    }
    try {
        when(processContext.element()).thenReturn(write2);
        fn.processElement(processContext, window);
        fail("expected validation error");
    } catch (FailedWritesException e) {
        WriteFailure failure = e.getWriteFailures().get(0);
        assertNotNull(failure);
        String message = failure.getStatus().getMessage();
        assertTrue(message.contains("DELETE"));
        assertTrue(message.contains("larger than configured max allowed bytes per batch"));
    }
    try {
        when(processContext.element()).thenReturn(write3);
        fn.processElement(processContext, window);
        fail("expected validation error");
    } catch (FailedWritesException e) {
        WriteFailure failure = e.getWriteFailures().get(0);
        assertNotNull(failure);
        String message = failure.getStatus().getMessage();
        assertTrue(message.contains("UPDATE"));
        assertTrue(message.contains("larger than configured max allowed bytes per batch"));
    }
    assertEquals(0, fn.writes.size());
}
Also used : Write(com.google.firestore.v1.Write) FailedWritesException(org.apache.beam.sdk.io.gcp.firestore.FirestoreV1.FailedWritesException) Value(com.google.firestore.v1.Value) ArrayValue(com.google.firestore.v1.ArrayValue) WriteFailure(org.apache.beam.sdk.io.gcp.firestore.FirestoreV1.WriteFailure) ByteString(com.google.protobuf.ByteString) BatchWriteFnWithSummary(org.apache.beam.sdk.io.gcp.firestore.FirestoreV1WriteFn.BatchWriteFnWithSummary) Test(org.junit.Test)

Aggregations

Value (com.google.privacy.dlp.v2.Value)5 ServiceOptions (com.google.cloud.ServiceOptions)4 DlpServiceClient (com.google.cloud.dlp.v2.DlpServiceClient)4 FieldId (com.google.privacy.dlp.v2.FieldId)4 InfoType (com.google.privacy.dlp.v2.InfoType)4 ProjectName (com.google.privacy.dlp.v2.ProjectName)4 SettableApiFuture (com.google.api.core.SettableApiFuture)3 Subscriber (com.google.cloud.pubsub.v1.Subscriber)3 Action (com.google.privacy.dlp.v2.Action)3 PublishToPubSub (com.google.privacy.dlp.v2.Action.PublishToPubSub)3 AnalyzeDataSourceRiskDetails (com.google.privacy.dlp.v2.AnalyzeDataSourceRiskDetails)3 CategoricalStatsHistogramBucket (com.google.privacy.dlp.v2.AnalyzeDataSourceRiskDetails.CategoricalStatsResult.CategoricalStatsHistogramBucket)3 KAnonymityResult (com.google.privacy.dlp.v2.AnalyzeDataSourceRiskDetails.KAnonymityResult)3 KAnonymityEquivalenceClass (com.google.privacy.dlp.v2.AnalyzeDataSourceRiskDetails.KAnonymityResult.KAnonymityEquivalenceClass)3 KAnonymityHistogramBucket (com.google.privacy.dlp.v2.AnalyzeDataSourceRiskDetails.KAnonymityResult.KAnonymityHistogramBucket)3 KMapEstimationResult (com.google.privacy.dlp.v2.AnalyzeDataSourceRiskDetails.KMapEstimationResult)3 KMapEstimationHistogramBucket (com.google.privacy.dlp.v2.AnalyzeDataSourceRiskDetails.KMapEstimationResult.KMapEstimationHistogramBucket)3 KMapEstimationQuasiIdValues (com.google.privacy.dlp.v2.AnalyzeDataSourceRiskDetails.KMapEstimationResult.KMapEstimationQuasiIdValues)3 LDiversityResult (com.google.privacy.dlp.v2.AnalyzeDataSourceRiskDetails.LDiversityResult)3 LDiversityEquivalenceClass (com.google.privacy.dlp.v2.AnalyzeDataSourceRiskDetails.LDiversityResult.LDiversityEquivalenceClass)3