use of org.apache.beam.vendor.guava.v26_0_jre.com.google.common.annotations.VisibleForTesting in project beam by apache.
the class AzureBlobStoreFileSystem method generateSasToken.
@VisibleForTesting
/**
* Generate an SAS Token if the user did not provide one through pipeline options
*/
String generateSasToken() throws IOException {
if (!Strings.isNullOrEmpty(options.getSasToken())) {
return options.getSasToken();
}
SharedAccessAccountPolicy sharedAccessAccountPolicy = new SharedAccessAccountPolicy();
long date = new Date().getTime();
long expiryDate = new Date(date + DEFAULT_EXPIRY_TIME).getTime();
sharedAccessAccountPolicy.setPermissionsFromString(DEFAULT_PERMISSIONS);
sharedAccessAccountPolicy.setSharedAccessStartTime(new Date(date));
sharedAccessAccountPolicy.setSharedAccessExpiryTime(new Date(expiryDate));
sharedAccessAccountPolicy.setResourceTypeFromString(DEFAULT_RESOURCE_TYPES);
sharedAccessAccountPolicy.setServiceFromString(DEFAULT_SERVICES);
String storageConnectionString;
if (!Strings.isNullOrEmpty(options.getAzureConnectionString())) {
storageConnectionString = options.getAzureConnectionString();
} else if (!Strings.isNullOrEmpty(options.getAccessKey())) {
storageConnectionString = "DefaultEndpointsProtocol=https;AccountName=" + client.get().getAccountName() + ";AccountKey=" + options.getAccessKey() + ";EndpointSuffix=core.windows.net";
} else {
throw new IOException("Copying blobs requires that a SAS token, connection string, or account key be provided.");
}
try {
CloudStorageAccount storageAccount = CloudStorageAccount.parse(storageConnectionString);
return "?" + storageAccount.generateSharedAccessSignature(sharedAccessAccountPolicy);
} catch (Exception e) {
throw (IOException) e.getCause();
}
}
use of org.apache.beam.vendor.guava.v26_0_jre.com.google.common.annotations.VisibleForTesting in project beam by apache.
the class ExpansionService method expand.
@VisibleForTesting
/*package*/
ExpansionApi.ExpansionResponse expand(ExpansionApi.ExpansionRequest request) {
LOG.info("Expanding '{}' with URN '{}'", request.getTransform().getUniqueName(), request.getTransform().getSpec().getUrn());
LOG.debug("Full transform: {}", request.getTransform());
Set<String> existingTransformIds = request.getComponents().getTransformsMap().keySet();
Pipeline pipeline = createPipeline();
boolean isUseDeprecatedRead = ExperimentalOptions.hasExperiment(pipelineOptions, "use_deprecated_read") || ExperimentalOptions.hasExperiment(pipelineOptions, "beam_fn_api_use_deprecated_read");
if (!isUseDeprecatedRead) {
ExperimentalOptions.addExperiment(pipeline.getOptions().as(ExperimentalOptions.class), "beam_fn_api");
// TODO(BEAM-10670): Remove this when we address performance issue.
ExperimentalOptions.addExperiment(pipeline.getOptions().as(ExperimentalOptions.class), "use_sdf_read");
} else {
LOG.warn("Using use_depreacted_read in portable runners is runner-dependent. The " + "ExpansionService will respect that, but if your runner does not have support for " + "native Read transform, your Pipeline will fail during Pipeline submission.");
}
RehydratedComponents rehydratedComponents = RehydratedComponents.forComponents(request.getComponents()).withPipeline(pipeline);
Map<String, PCollection<?>> inputs = request.getTransform().getInputsMap().entrySet().stream().collect(Collectors.toMap(Map.Entry::getKey, input -> {
try {
return rehydratedComponents.getPCollection(input.getValue());
} catch (IOException exn) {
throw new RuntimeException(exn);
}
}));
String urn = request.getTransform().getSpec().getUrn();
TransformProvider transformProvider = null;
if (getUrn(ExpansionMethods.Enum.JAVA_CLASS_LOOKUP).equals(urn)) {
AllowList allowList = pipelineOptions.as(ExpansionServiceOptions.class).getJavaClassLookupAllowlist();
assert allowList != null;
transformProvider = new JavaClassLookupTransformProvider(allowList);
} else {
transformProvider = getRegisteredTransforms().get(urn);
if (transformProvider == null) {
throw new UnsupportedOperationException("Unknown urn: " + request.getTransform().getSpec().getUrn());
}
}
List<String> classpathResources = transformProvider.getDependencies(request.getTransform().getSpec(), pipeline.getOptions());
pipeline.getOptions().as(PortablePipelineOptions.class).setFilesToStage(classpathResources);
Map<String, PCollection<?>> outputs = transformProvider.apply(pipeline, request.getTransform().getUniqueName(), request.getTransform().getSpec(), inputs);
// Needed to find which transform was new...
SdkComponents sdkComponents = rehydratedComponents.getSdkComponents(Collections.emptyList()).withNewIdPrefix(request.getNamespace());
sdkComponents.registerEnvironment(Environments.createOrGetDefaultEnvironment(pipeline.getOptions().as(PortablePipelineOptions.class)));
Map<String, String> outputMap = outputs.entrySet().stream().collect(Collectors.toMap(Map.Entry::getKey, output -> {
try {
return sdkComponents.registerPCollection(output.getValue());
} catch (IOException exn) {
throw new RuntimeException(exn);
}
}));
if (isUseDeprecatedRead) {
SplittableParDo.convertReadBasedSplittableDoFnsToPrimitiveReadsIfNecessary(pipeline);
}
RunnerApi.Pipeline pipelineProto = PipelineTranslation.toProto(pipeline, sdkComponents);
String expandedTransformId = Iterables.getOnlyElement(pipelineProto.getRootTransformIdsList().stream().filter(id -> !existingTransformIds.contains(id)).collect(Collectors.toList()));
RunnerApi.Components components = pipelineProto.getComponents();
RunnerApi.PTransform expandedTransform = components.getTransformsOrThrow(expandedTransformId).toBuilder().setUniqueName(expandedTransformId).clearOutputs().putAllOutputs(outputMap).build();
LOG.debug("Expanded to {}", expandedTransform);
return ExpansionApi.ExpansionResponse.newBuilder().setComponents(components.toBuilder().removeTransforms(expandedTransformId)).setTransform(expandedTransform).addAllRequirements(pipelineProto.getRequirementsList()).build();
}
use of org.apache.beam.vendor.guava.v26_0_jre.com.google.common.annotations.VisibleForTesting in project beam by apache.
the class TableRowToStorageApiProto method tableRowFromMessage.
@VisibleForTesting
public static TableRow tableRowFromMessage(Message message) {
TableRow tableRow = new TableRow();
for (Map.Entry<FieldDescriptor, Object> field : message.getAllFields().entrySet()) {
FieldDescriptor fieldDescriptor = field.getKey();
Object fieldValue = field.getValue();
tableRow.putIfAbsent(fieldDescriptor.getName(), jsonValueFromMessageValue(fieldDescriptor, fieldValue, true));
}
return tableRow;
}
use of org.apache.beam.vendor.guava.v26_0_jre.com.google.common.annotations.VisibleForTesting in project beam by apache.
the class SparkBeamMetric method renderName.
@VisibleForTesting
String renderName(MetricResult<?> metricResult) {
MetricKey key = metricResult.getKey();
MetricName name = key.metricName();
String step = key.stepName();
ArrayList<String> pieces = new ArrayList<>();
if (step != null) {
step = step.replaceAll(ILLEGAL_CHARACTERS, "_");
if (step.endsWith("_")) {
step = step.substring(0, step.length() - 1);
}
pieces.add(step);
}
pieces.addAll(ImmutableList.of(name.getNamespace(), name.getName()).stream().map(str -> str.replaceAll(ILLEGAL_CHARACTERS, "_")).collect(toList()));
return String.join(".", pieces);
}
use of org.apache.beam.vendor.guava.v26_0_jre.com.google.common.annotations.VisibleForTesting in project beam by apache.
the class SparkBeamMetric method renderName.
@VisibleForTesting
static String renderName(MetricResult<?> metricResult) {
MetricKey key = metricResult.getKey();
MetricName name = key.metricName();
String step = key.stepName();
ArrayList<String> pieces = new ArrayList<>();
if (step != null) {
step = step.replaceAll(ILLEGAL_CHARACTERS, "_");
if (step.endsWith("_")) {
step = step.substring(0, step.length() - 1);
}
pieces.add(step);
}
pieces.addAll(ImmutableList.of(name.getNamespace(), name.getName()).stream().map(str -> str.replaceAll(ILLEGAL_CHARACTERS, "_")).collect(toList()));
return String.join(".", pieces);
}
Aggregations