use of java.util.function.BiConsumer in project photon-model by vmware.
the class AWSEndpointAdapterService method computeDesc.
private BiConsumer<ComputeDescription, Retriever> computeDesc() {
return (cd, r) -> {
cd.regionId = r.get(REGION_KEY).orElse(null);
cd.zoneId = r.get(ZONE_KEY).orElse(null);
cd.environmentName = ComputeDescription.ENVIRONMENT_NAME_AWS;
List<String> children = new ArrayList<>();
children.add(ComputeType.ZONE.toString());
cd.supportedChildren = children;
cd.instanceAdapterReference = AdapterUriUtil.buildPublicAdapterUri(getHost(), AWSUriPaths.AWS_INSTANCE_ADAPTER);
cd.enumerationAdapterReference = AdapterUriUtil.buildPublicAdapterUri(getHost(), AWSUriPaths.AWS_ENUMERATION_ADAPTER);
cd.powerAdapterReference = AdapterUriUtil.buildPublicAdapterUri(getHost(), AWSUriPaths.AWS_POWER_ADAPTER);
cd.diskAdapterReference = AdapterUriUtil.buildPublicAdapterUri(getHost(), AWSUriPaths.AWS_DISK_ADAPTER);
{
URI statsAdapterUri = AdapterUriUtil.buildPublicAdapterUri(getHost(), AWSUriPaths.AWS_STATS_ADAPTER);
URI costStatsAdapterUri = AdapterUriUtil.buildPublicAdapterUri(getHost(), AWSUriPaths.AWS_COST_STATS_ADAPTER);
cd.statsAdapterReferences = new LinkedHashSet<>();
cd.statsAdapterReferences.add(costStatsAdapterUri);
cd.statsAdapterReferences.add(statsAdapterUri);
cd.statsAdapterReference = statsAdapterUri;
}
};
}
use of java.util.function.BiConsumer in project photon-model by vmware.
the class EndpointAdapterUtils method configureEndpoint.
private static void configureEndpoint(StatelessService service, EndpointConfigRequest body, BiConsumer<AuthCredentialsServiceState, Retriever> credEnhancer, BiConsumer<ComputeDescription, Retriever> descEnhancer, BiConsumer<ComputeState, Retriever> compEnhancer, BiConsumer<EndpointState, Retriever> endpointEnhancer) {
TaskManager tm = new TaskManager(service, body.taskReference, body.resourceLink());
Consumer<Throwable> onFailure = tm::patchTaskToFailure;
Consumer<Operation> onSuccess = (op) -> {
EndpointState endpoint = op.getBody(EndpointState.class);
op.complete();
AuthCredentialsServiceState authState = new AuthCredentialsServiceState();
Map<String, String> props = new HashMap<>(body.endpointProperties);
props.put(MOCK_REQUEST, String.valueOf(body.isMockRequest));
props.put(ENDPOINT_REFERENCE_URI, body.resourceReference.toString());
Retriever r = Retriever.of(props);
try {
credEnhancer.accept(authState, r);
ComputeDescription cd = new ComputeDescription();
descEnhancer.accept(cd, r);
ComputeState cs = new ComputeState();
cs.powerState = PowerState.ON;
compEnhancer.accept(cs, r);
EndpointState es = new EndpointState();
es.endpointProperties = new HashMap<>();
es.regionId = r.get(EndpointConfigRequest.REGION_KEY).orElse(null);
endpointEnhancer.accept(es, r);
Stream<Operation> operations = Stream.of(Pair.of(authState, endpoint.authCredentialsLink), Pair.of(cd, endpoint.computeDescriptionLink), Pair.of(cs, endpoint.computeLink), Pair.of(es, endpoint.documentSelfLink)).map((p) -> Operation.createPatch(createInventoryUri(service.getHost(), p.right)).setBody(p.left).setReferer(service.getUri()));
applyChanges(tm, service, endpoint, operations);
} catch (Exception e) {
tm.patchTaskToFailure(e);
}
};
AdapterUtils.getServiceState(service, body.resourceReference, onSuccess, onFailure);
}
use of java.util.function.BiConsumer in project photon-model by vmware.
the class VSphereAdapterSnapshotService method performSnapshotOperation.
private DeferredResult<SnapshotContext> performSnapshotOperation(SnapshotContext context) {
DeferredResult<SnapshotContext> result = new DeferredResult<>();
VSphereIOThreadPool pool = VSphereIOThreadPoolAllocator.getPool(this);
DeferredResult<AuthCredentialsService.AuthCredentialsServiceState> credentials;
if (IAAS_API_ENABLED) {
credentials = SessionUtil.retrieveExternalToken(this, context.operation.getAuthorizationContext());
} else {
URI authUri = createInventoryUri(this.getHost(), context.parentComputeDescription.description.authCredentialsLink);
Operation op = Operation.createGet(authUri);
credentials = this.sendWithDeferredResult(op, AuthCredentialsService.AuthCredentialsServiceState.class);
}
switch(context.requestType) {
case CREATE:
BiConsumer<AuthCredentialsService.AuthCredentialsServiceState, Throwable> create = (authCredentialsServiceState, throwable) -> {
if (throwable != null) {
result.fail(throwable);
return;
}
pool.submit(context.parentComputeDescription.adapterManagementReference, authCredentialsServiceState, (connection, e) -> {
if (e != null) {
result.fail(e);
} else {
createSnapshot(connection, context, result);
}
});
};
credentials.whenComplete(create);
break;
case DELETE:
BiConsumer<AuthCredentialsService.AuthCredentialsServiceState, Throwable> delete = (authCredentialsServiceState, throwable) -> {
if (throwable != null) {
result.fail(throwable);
return;
}
pool.submit(context.parentComputeDescription.adapterManagementReference, authCredentialsServiceState, (connection, e) -> {
if (e != null) {
result.fail(e);
} else {
deleteSnapshot(context, connection, result);
}
});
};
credentials.whenComplete(delete);
break;
case REVERT:
BiConsumer<AuthCredentialsService.AuthCredentialsServiceState, Throwable> revert = (authCredentialsServiceState, throwable) -> {
if (throwable != null) {
result.fail(throwable);
return;
}
pool.submit(context.parentComputeDescription.adapterManagementReference, authCredentialsServiceState, (connection, e) -> {
if (e != null) {
result.fail(e);
} else {
revertSnapshot(context, connection, result);
}
});
};
credentials.whenComplete(revert);
break;
default:
result.fail(new IllegalStateException("Unsupported requestType " + context.requestType));
}
return result;
}
use of java.util.function.BiConsumer in project cdap by caskdata.
the class AbstractSparkSubmitter method createSubmitArguments.
/**
* Creates the list of arguments that will be used for calling {@link SparkSubmit#main(String[])}.
*
* @param runtimeContext the {@link SparkRuntimeContext} for the spark program
* @param configs set of Spark configurations
* @param resources list of resources that needs to be localized to Spark containers
* @param jobFile the job file for Spark
* @return a list of arguments
*/
private List<String> createSubmitArguments(SparkRuntimeContext runtimeContext, Map<String, String> configs, List<LocalizeResource> resources, URI jobFile) {
SparkSpecification spec = runtimeContext.getSparkSpecification();
ImmutableList.Builder<String> builder = ImmutableList.builder();
addMaster(configs, builder);
builder.add("--conf").add("spark.app.name=" + spec.getName());
BiConsumer<String, String> confAdder = (k, v) -> builder.add("--conf").add(k + "=" + v);
configs.forEach(confAdder);
getSubmitConf().forEach(confAdder);
String archives = Joiner.on(',').join(Iterables.transform(Iterables.filter(resources, ARCHIVE_FILTER), RESOURCE_TO_PATH));
String files = Joiner.on(',').join(Iterables.transform(Iterables.filter(resources, Predicates.not(ARCHIVE_FILTER)), RESOURCE_TO_PATH));
if (!archives.isEmpty()) {
builder.add("--archives").add(archives);
}
if (!files.isEmpty()) {
builder.add("--files").add(files);
}
boolean isPySpark = jobFile.getPath().endsWith(".py");
if (isPySpark) {
// For python, add extra py library files
String pyFiles = configs.get("spark.submit.pyFiles");
if (pyFiles != null) {
builder.add("--py-files").add(pyFiles);
}
} else {
builder.add("--class").add(SparkMainWrapper.class.getName());
}
if ("file".equals(jobFile.getScheme())) {
builder.add(jobFile.getPath());
} else {
builder.add(jobFile.toString());
}
if (!isPySpark) {
// Add extra arguments for easily identifying the program from command line.
// Arguments to user program is always coming from the runtime arguments.
builder.add("--cdap.spark.program=" + runtimeContext.getProgramRunId().toString());
builder.add("--cdap.user.main.class=" + spec.getMainClassName());
}
return builder.build();
}
use of java.util.function.BiConsumer in project elasticsearch-indexing-proxy by codelibs.
the class IndexingProxyService method updateWriterNode.
private void updateWriterNode(final long version, final List<DiscoveryNode> nodeList, final BiConsumer<UpdateResponse, Exception> consumer) {
if (logger.isDebugEnabled()) {
logger.debug("No available write node.");
}
Collections.shuffle(nodeList);
final DiscoveryNode nextNode = nodeList.get(0);
final Map<String, Object> source = new HashMap<>();
source.put(IndexingProxyPlugin.NODE_NAME, nextNode.getName());
source.put(IndexingProxyPlugin.TIMESTAMP, new Date());
client.prepareUpdate(IndexingProxyPlugin.INDEX_NAME, IndexingProxyPlugin.TYPE_NAME, FILE_ID).setVersion(version).setDoc(source).setRefreshPolicy(RefreshPolicy.WAIT_UNTIL).execute(wrap(res -> {
randomWait();
consumer.accept(res, null);
}, ex -> {
if (logger.isDebugEnabled()) {
logger.debug("Failed to update file_id.", ex);
}
randomWait();
consumer.accept(null, ex);
}));
}
Aggregations