use of com.google.cloud.dataproc.v1.WorkflowTemplate in project java-dataproc by googleapis.
the class WorkflowTemplateServiceClientTest method listWorkflowTemplatesTest3.
@Test
public void listWorkflowTemplatesTest3() throws Exception {
WorkflowTemplate responsesElement = WorkflowTemplate.newBuilder().build();
ListWorkflowTemplatesResponse expectedResponse = ListWorkflowTemplatesResponse.newBuilder().setNextPageToken("").addAllTemplates(Arrays.asList(responsesElement)).build();
mockWorkflowTemplateService.addResponse(expectedResponse);
String parent = "parent-995424086";
ListWorkflowTemplatesPagedResponse pagedListResponse = client.listWorkflowTemplates(parent);
List<WorkflowTemplate> resources = Lists.newArrayList(pagedListResponse.iterateAll());
Assert.assertEquals(1, resources.size());
Assert.assertEquals(expectedResponse.getTemplatesList().get(0), resources.get(0));
List<AbstractMessage> actualRequests = mockWorkflowTemplateService.getRequests();
Assert.assertEquals(1, actualRequests.size());
ListWorkflowTemplatesRequest actualRequest = ((ListWorkflowTemplatesRequest) actualRequests.get(0));
Assert.assertEquals(parent, actualRequest.getParent());
Assert.assertTrue(channelProvider.isHeaderSent(ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
use of com.google.cloud.dataproc.v1.WorkflowTemplate in project java-dataproc by googleapis.
the class ITSystemTest method getWorkflowTemplateTest.
@Test
public void getWorkflowTemplateTest() {
WorkflowTemplate response = workflowClient.getWorkflowTemplate(WORKFLOW_TEMPLATE_NAME);
assertEquals(ID, response.getId());
assertEquals(WORKFLOW_TEMPLATE_NAME.toString(), response.getName());
assertEquals(VERSION, response.getVersion());
}
use of com.google.cloud.dataproc.v1.WorkflowTemplate in project flytekit-java by flyteorg.
the class FlyteAdminClientTest method shouldPropagateCreateWorkflowToStub.
@Test
public void shouldPropagateCreateWorkflowToStub() {
String nodeId = "node";
WorkflowIdentifier identifier = WorkflowIdentifier.builder().domain(DOMAIN).project(PROJECT).name(WF_NAME).version(WF_VERSION).build();
TaskNode taskNode = TaskNode.builder().referenceId(PartialTaskIdentifier.builder().domain(DOMAIN).project(PROJECT).name(TASK_NAME).version(TASK_VERSION).build()).build();
Node node = Node.builder().id(nodeId).taskNode(taskNode).inputs(ImmutableList.of(Binding.builder().var_(VAR_NAME).binding(BindingData.ofScalar(Scalar.ofPrimitive(Primitive.ofStringValue(SCALAR)))).build())).upstreamNodeIds(emptyList()).build();
TypedInterface interface_ = TypedInterface.builder().inputs(ImmutableMap.of()).outputs(ImmutableMap.of()).build();
WorkflowTemplate template = WorkflowTemplate.builder().nodes(ImmutableList.of(node)).metadata(WorkflowMetadata.builder().build()).interface_(interface_).outputs(ImmutableList.of()).build();
client.createWorkflow(identifier, template, ImmutableMap.of());
assertThat(stubService.createWorkflowRequest, equalTo(WorkflowOuterClass.WorkflowCreateRequest.newBuilder().setId(newIdentifier(ResourceType.WORKFLOW, WF_NAME, WF_VERSION)).setSpec(newWorkflowSpec(nodeId)).build()));
}
use of com.google.cloud.dataproc.v1.WorkflowTemplate in project flytekit-java by flyteorg.
the class ExecuteDynamicWorkflow method execute.
private void execute() {
Config config = Config.load();
ExecutionConfig executionConfig = ExecutionConfig.load();
Collection<ClassLoader> modules = ClassLoaders.forModuleDir(config.moduleDir()).values();
Map<String, FileSystem> fileSystems = FileSystemLoader.loadFileSystems(modules);
FileSystem outputFs = FileSystemLoader.getFileSystem(fileSystems, outputPrefix);
ProtoWriter protoWriter = new ProtoWriter(outputPrefix, outputFs);
try {
FileSystem inputFs = FileSystemLoader.getFileSystem(fileSystems, inputs);
ProtoReader protoReader = new ProtoReader(inputFs);
TaskTemplate taskTemplate = protoReader.getTaskTemplate(taskTemplatePath);
ClassLoader packageClassLoader = PackageLoader.load(fileSystems, taskTemplate);
Map<String, String> env = getEnv();
Map<WorkflowIdentifier, WorkflowTemplate> workflowTemplates = ClassLoaders.withClassLoader(packageClassLoader, () -> Registrars.loadAll(WorkflowTemplateRegistrar.class, env));
Map<TaskIdentifier, RunnableTask> runnableTasks = ClassLoaders.withClassLoader(packageClassLoader, () -> Registrars.loadAll(RunnableTaskRegistrar.class, env));
Map<TaskIdentifier, DynamicWorkflowTask> dynamicWorkflowTasks = ClassLoaders.withClassLoader(packageClassLoader, () -> Registrars.loadAll(DynamicWorkflowTaskRegistrar.class, env));
// before we run anything, switch class loader, otherwise,
// ServiceLoaders and other things wouldn't work, for instance,
// FileSystemRegister in Apache Beam
// we don't take the whole "custom" field, but only jflyte part, for that we ser-de it
Struct custom = JFlyteCustom.deserializeFromStruct(taskTemplate.custom()).serializeToStruct();
// all tasks already have staged jars, we can reuse 'jflyte' custom from current task to get
// it
Map<TaskIdentifier, TaskTemplate> taskTemplates = mapValues(ProjectClosure.createTaskTemplates(executionConfig, runnableTasks, dynamicWorkflowTasks), template -> template.toBuilder().custom(ProjectClosure.merge(template.custom(), custom)).build());
DynamicJobSpec futures = withClassLoader(packageClassLoader, () -> {
Map<String, Literal> input = protoReader.getInput(inputs);
DynamicWorkflowTask task = getDynamicWorkflowTask(this.task);
return task.run(input);
});
DynamicJobSpec rewrittenFutures = rewrite(executionConfig, futures, taskTemplates, workflowTemplates);
if (rewrittenFutures.nodes().isEmpty()) {
Map<String, Literal> outputs = getLiteralMap(rewrittenFutures.outputs());
protoWriter.writeOutputs(outputs);
} else {
protoWriter.writeFutures(rewrittenFutures);
}
} catch (ContainerError e) {
LOG.error("failed to run dynamic workflow", e);
protoWriter.writeError(ProtoUtil.serializeContainerError(e));
} catch (Throwable e) {
LOG.error("failed to run dynamic workflow", e);
protoWriter.writeError(ProtoUtil.serializeThrowable(e));
}
}
use of com.google.cloud.dataproc.v1.WorkflowTemplate in project flytekit-java by flyteorg.
the class ExecuteLocal method call.
@Override
public Integer call() {
Map<String, ClassLoader> modules = ExecuteLocalLoader.loadModules(packageDir);
Map<String, String> env = ImmutableMap.of("FLYTE_INTERNAL_DOMAIN", "development", "FLYTE_INTERNAL_VERSION", "test", "FLYTE_INTERNAL_PROJECT", "flytetester");
Map<String, RunnableTask> runnableTasks = ExecuteLocalLoader.loadTasks(modules, env);
Map<String, DynamicWorkflowTask> dynamicWorkflowTasks = // TODO support dynamic tasks
emptyMap();
Map<String, WorkflowTemplate> workflows = ExecuteLocalLoader.loadWorkflows(modules, env);
WorkflowTemplate workflow = Preconditions.checkNotNull(workflows.get(workflowName), "workflow not found [%s]", workflowName);
String synopsis = getCustomSynopsis();
List<String> inputArgsList = inputArgs == null ? Collections.emptyList() : Arrays.asList(inputArgs);
Map<String, Literal> inputs = getArgsParser().parseInputs(synopsis, workflow.interface_().inputs(), inputArgsList);
try {
// TODO, use logging listener here
ExecutionListener listener = NoopExecutionListener.create();
Map<String, Literal> outputs = LocalEngine.compileAndExecute(workflow, runnableTasks, dynamicWorkflowTasks, inputs, listener);
LOG.info("Outputs: " + StringUtil.serializeLiteralMap(outputs));
return 0;
} catch (Throwable e) {
return handleException(e);
}
}
Aggregations