Search in sources :

Example 11 with BasicArguments

use of io.cdap.cdap.etl.common.BasicArguments in project cdap by cdapio.

the class SmartWorkflow method updateTokenWithTriggeringProperties.

private void updateTokenWithTriggeringProperties(TriggeringScheduleInfo scheduleInfo, TriggeringPropertyMapping propertiesMapping, WorkflowToken token) {
    List<ProgramStatusTriggerInfo> programStatusTriggerInfos = new ArrayList<>();
    for (TriggerInfo info : scheduleInfo.getTriggerInfos()) {
        if (info instanceof ProgramStatusTriggerInfo) {
            programStatusTriggerInfos.add((ProgramStatusTriggerInfo) info);
        }
    }
    // If no ProgramStatusTriggerInfo, no need of override the existing runtimeArgs
    if (programStatusTriggerInfos.isEmpty()) {
        return;
    }
    // Currently only expecting one trigger in a schedule
    ProgramStatusTriggerInfo triggerInfo = programStatusTriggerInfos.get(0);
    BasicArguments triggeringArguments = new BasicArguments(triggerInfo.getWorkflowToken(), triggerInfo.getRuntimeArguments());
    // Get the value of every triggering pipeline arguments specified in the propertiesMapping and update newRuntimeArgs
    List<ArgumentMapping> argumentMappings = propertiesMapping.getArguments();
    for (ArgumentMapping mapping : argumentMappings) {
        String sourceKey = mapping.getSource();
        if (sourceKey == null) {
            LOG.warn("The name of argument from the triggering pipeline cannot be null, " + "skip this argument mapping: '{}'.", mapping);
            continue;
        }
        String value = triggeringArguments.get(sourceKey);
        if (value == null) {
            LOG.warn("Runtime argument '{}' is not found in run '{}' of the triggering pipeline '{}' " + "in namespace '{}' ", sourceKey, triggerInfo.getRunId(), triggerInfo.getApplicationName(), triggerInfo.getNamespace());
            continue;
        }
        // Use the argument name in the triggering pipeline if target is not specified
        String targetKey = mapping.getTarget() == null ? sourceKey : mapping.getTarget();
        token.put(targetKey, value);
    }
    // Get the resolved plugin properties map from triggering pipeline's workflow token in triggeringArguments
    Map<String, Map<String, String>> resolvedProperties = GSON.fromJson(triggeringArguments.get(RESOLVED_PLUGIN_PROPERTIES_MAP), STAGE_PROPERTIES_MAP);
    for (PluginPropertyMapping mapping : propertiesMapping.getPluginProperties()) {
        String stageName = mapping.getStageName();
        if (stageName == null) {
            LOG.warn("The name of the stage cannot be null in plugin property mapping, skip this mapping: '{}'.", mapping);
            continue;
        }
        Map<String, String> pluginProperties = resolvedProperties.get(stageName);
        if (pluginProperties == null) {
            LOG.warn("No plugin properties can be found with stage name '{}' in triggering pipeline '{}' " + "in namespace '{}' ", mapping.getStageName(), triggerInfo.getApplicationName(), triggerInfo.getNamespace());
            continue;
        }
        String sourceKey = mapping.getSource();
        if (sourceKey == null) {
            LOG.warn("The name of argument from the triggering pipeline cannot be null, " + "skip this argument mapping: '{}'.", mapping);
            continue;
        }
        String value = pluginProperties.get(sourceKey);
        if (value == null) {
            LOG.warn("No property with name '{}' can be found in plugin '{}' of the triggering pipeline '{}' " + "in namespace '{}' ", sourceKey, stageName, triggerInfo.getApplicationName(), triggerInfo.getNamespace());
            continue;
        }
        // Use the argument name in the triggering pipeline if target is not specified
        String targetKey = mapping.getTarget() == null ? sourceKey : mapping.getTarget();
        token.put(targetKey, value);
    }
}
Also used : ArgumentMapping(io.cdap.cdap.etl.proto.v2.ArgumentMapping) ProgramStatusTriggerInfo(io.cdap.cdap.api.schedule.ProgramStatusTriggerInfo) ArrayList(java.util.ArrayList) TriggerInfo(io.cdap.cdap.api.schedule.TriggerInfo) ProgramStatusTriggerInfo(io.cdap.cdap.api.schedule.ProgramStatusTriggerInfo) PluginPropertyMapping(io.cdap.cdap.etl.proto.v2.PluginPropertyMapping) BasicArguments(io.cdap.cdap.etl.common.BasicArguments) Map(java.util.Map) HashMap(java.util.HashMap) LinkedHashMap(java.util.LinkedHashMap)

Example 12 with BasicArguments

use of io.cdap.cdap.etl.common.BasicArguments in project cdap by cdapio.

the class ConnectionHandler method getConnector.

private Connector getConnector(ServicePluginConfigurer configurer, PluginInfo pluginInfo, String namespace, TrackedPluginSelector pluginSelector) throws IOException {
    Map<String, String> arguments = getContext().getPreferencesForNamespace(namespace, true);
    Map<String, MacroEvaluator> evaluators = ImmutableMap.of(SecureStoreMacroEvaluator.FUNCTION_NAME, new SecureStoreMacroEvaluator(namespace, getContext()), OAuthMacroEvaluator.FUNCTION_NAME, new OAuthMacroEvaluator(getContext()));
    MacroEvaluator macroEvaluator = new DefaultMacroEvaluator(new BasicArguments(arguments), evaluators, Collections.singleton(OAuthMacroEvaluator.FUNCTION_NAME));
    MacroParserOptions options = MacroParserOptions.builder().setEscaping(false).setFunctionWhitelist(evaluators.keySet()).build();
    return ConnectionUtils.getConnector(configurer, pluginInfo, pluginSelector, macroEvaluator, options);
}
Also used : MacroParserOptions(io.cdap.cdap.api.macro.MacroParserOptions) SecureStoreMacroEvaluator(io.cdap.cdap.etl.common.SecureStoreMacroEvaluator) OAuthMacroEvaluator(io.cdap.cdap.etl.common.OAuthMacroEvaluator) DefaultMacroEvaluator(io.cdap.cdap.etl.common.DefaultMacroEvaluator) SecureStoreMacroEvaluator(io.cdap.cdap.etl.common.SecureStoreMacroEvaluator) MacroEvaluator(io.cdap.cdap.api.macro.MacroEvaluator) OAuthMacroEvaluator(io.cdap.cdap.etl.common.OAuthMacroEvaluator) DefaultMacroEvaluator(io.cdap.cdap.etl.common.DefaultMacroEvaluator) BasicArguments(io.cdap.cdap.etl.common.BasicArguments)

Example 13 with BasicArguments

use of io.cdap.cdap.etl.common.BasicArguments in project cdap by cdapio.

the class RemoteValidationTask method run.

@Override
public void run(RunnableTaskContext context) throws Exception {
    SystemAppTaskContext systemAppContext = context.getRunnableTaskSystemAppContext();
    RemoteValidationRequest remoteValidationRequest = GSON.fromJson(context.getParam(), RemoteValidationRequest.class);
    String namespace = remoteValidationRequest.getNamespace();
    String originalRequest = remoteValidationRequest.getRequest();
    StageValidationRequest validationRequest;
    try {
        validationRequest = GSON.fromJson(originalRequest, StageValidationRequest.class);
        validationRequest.validate();
    } catch (JsonSyntaxException e) {
        throw new IllegalArgumentException(String.format("Unable to decode request body %s", originalRequest), e);
    } catch (IllegalArgumentException e) {
        throw new IllegalArgumentException("Invalid stage config", e);
    }
    Map<String, String> arguments = Collections.emptyMap();
    // this option.
    if (validationRequest.getResolveMacrosFromPreferences()) {
        try {
            arguments = systemAppContext.getPreferencesForNamespace(namespace, true);
        } catch (IllegalArgumentException iae) {
            // If this is the case, we return a 404 error.
            throw new IllegalArgumentException(String.format(NAMESPACE_DOES_NOT_EXIST, namespace), iae);
        }
    }
    Map<String, MacroEvaluator> evaluators = ImmutableMap.of(SecureStoreMacroEvaluator.FUNCTION_NAME, new SecureStoreMacroEvaluator(namespace, systemAppContext), OAuthMacroEvaluator.FUNCTION_NAME, new OAuthMacroEvaluator(systemAppContext), ConnectionMacroEvaluator.FUNCTION_NAME, new ConnectionMacroEvaluator(namespace, systemAppContext));
    MacroEvaluator macroEvaluator = new DefaultMacroEvaluator(new BasicArguments(arguments), evaluators, DefaultMacroEvaluator.MAP_FUNCTIONS);
    MacroParserOptions macroParserOptions = MacroParserOptions.builder().skipInvalidMacros().setEscaping(false).setFunctionWhitelist(evaluators.keySet()).build();
    Function<Map<String, String>, Map<String, String>> macroFn = macroProperties -> systemAppContext.evaluateMacros(namespace, macroProperties, macroEvaluator, macroParserOptions);
    PluginConfigurer pluginConfigurer = systemAppContext.createPluginConfigurer(namespace);
    StageValidationResponse validationResponse = ValidationUtils.validate(namespace, validationRequest, pluginConfigurer, macroFn, systemAppContext);
    // If the validation success and if it only involves system artifacts, then we don't need to restart task runner
    if (validationResponse.getFailures().isEmpty()) {
        StageSpec spec = validationResponse.getSpec();
        if (spec != null) {
            context.setTerminateOnComplete(!ArtifactScope.SYSTEM.equals(spec.getPlugin().getArtifact().getScope()));
        }
    }
    context.writeResult(GSON.toJson(validationResponse).getBytes());
}
Also used : StageValidationResponse(io.cdap.cdap.etl.proto.v2.validation.StageValidationResponse) OAuthMacroEvaluator(io.cdap.cdap.etl.common.OAuthMacroEvaluator) SchemaTypeAdapter(io.cdap.cdap.internal.io.SchemaTypeAdapter) LoggerFactory(org.slf4j.LoggerFactory) Function(java.util.function.Function) GsonBuilder(com.google.gson.GsonBuilder) Gson(com.google.gson.Gson) Map(java.util.Map) ConnectionMacroEvaluator(io.cdap.cdap.etl.common.ConnectionMacroEvaluator) MacroParserOptions(io.cdap.cdap.api.macro.MacroParserOptions) RunnableTaskContext(io.cdap.cdap.api.service.worker.RunnableTaskContext) PluginConfigurer(io.cdap.cdap.api.plugin.PluginConfigurer) StageValidationRequest(io.cdap.cdap.etl.proto.v2.validation.StageValidationRequest) SystemAppTaskContext(io.cdap.cdap.api.service.worker.SystemAppTaskContext) Logger(org.slf4j.Logger) ImmutableMap(com.google.common.collect.ImmutableMap) JsonSyntaxException(com.google.gson.JsonSyntaxException) Schema(io.cdap.cdap.api.data.schema.Schema) DefaultMacroEvaluator(io.cdap.cdap.etl.common.DefaultMacroEvaluator) BasicArguments(io.cdap.cdap.etl.common.BasicArguments) RunnableTask(io.cdap.cdap.api.service.worker.RunnableTask) SecureStoreMacroEvaluator(io.cdap.cdap.etl.common.SecureStoreMacroEvaluator) StageSpec(io.cdap.cdap.etl.proto.v2.spec.StageSpec) Collections(java.util.Collections) ArtifactScope(io.cdap.cdap.api.artifact.ArtifactScope) MacroEvaluator(io.cdap.cdap.api.macro.MacroEvaluator) SecureStoreMacroEvaluator(io.cdap.cdap.etl.common.SecureStoreMacroEvaluator) OAuthMacroEvaluator(io.cdap.cdap.etl.common.OAuthMacroEvaluator) ConnectionMacroEvaluator(io.cdap.cdap.etl.common.ConnectionMacroEvaluator) DefaultMacroEvaluator(io.cdap.cdap.etl.common.DefaultMacroEvaluator) SecureStoreMacroEvaluator(io.cdap.cdap.etl.common.SecureStoreMacroEvaluator) MacroEvaluator(io.cdap.cdap.api.macro.MacroEvaluator) ConnectionMacroEvaluator(io.cdap.cdap.etl.common.ConnectionMacroEvaluator) SystemAppTaskContext(io.cdap.cdap.api.service.worker.SystemAppTaskContext) OAuthMacroEvaluator(io.cdap.cdap.etl.common.OAuthMacroEvaluator) MacroParserOptions(io.cdap.cdap.api.macro.MacroParserOptions) StageValidationRequest(io.cdap.cdap.etl.proto.v2.validation.StageValidationRequest) PluginConfigurer(io.cdap.cdap.api.plugin.PluginConfigurer) JsonSyntaxException(com.google.gson.JsonSyntaxException) StageSpec(io.cdap.cdap.etl.proto.v2.spec.StageSpec) DefaultMacroEvaluator(io.cdap.cdap.etl.common.DefaultMacroEvaluator) BasicArguments(io.cdap.cdap.etl.common.BasicArguments) Map(java.util.Map) ImmutableMap(com.google.common.collect.ImmutableMap) StageValidationResponse(io.cdap.cdap.etl.proto.v2.validation.StageValidationResponse)

Example 14 with BasicArguments

use of io.cdap.cdap.etl.common.BasicArguments in project cdap by caskdata.

the class SparkPipelineRunner method runPipeline.

public void runPipeline(PhaseSpec phaseSpec, String sourcePluginType, JavaSparkExecutionContext sec, Map<String, Integer> stagePartitions, PluginContext pluginContext, Map<String, StageStatisticsCollector> collectors, Set<String> uncombinableSinks, boolean consolidateStages, boolean cacheFunctions) throws Exception {
    PipelinePhase pipelinePhase = phaseSpec.getPhase();
    BasicArguments arguments = new BasicArguments(sec);
    FunctionCache.Factory functionCacheFactory = FunctionCache.Factory.newInstance(cacheFunctions);
    MacroEvaluator macroEvaluator = new DefaultMacroEvaluator(arguments, sec.getLogicalStartTime(), sec.getSecureStore(), sec.getServiceDiscoverer(), sec.getNamespace());
    Map<String, EmittedRecords> emittedRecords = new HashMap<>();
    // should never happen, but removes warning
    if (pipelinePhase.getDag() == null) {
        throw new IllegalStateException("Pipeline phase has no connections.");
    }
    Set<String> uncombinableStages = new HashSet<>(uncombinableSinks);
    for (String uncombinableType : UNCOMBINABLE_PLUGIN_TYPES) {
        pipelinePhase.getStagesOfType(uncombinableType).stream().map(StageSpec::getName).forEach(s -> uncombinableStages.add(s));
    }
    CombinerDag groupedDag = new CombinerDag(pipelinePhase.getDag(), uncombinableStages);
    Map<String, Set<String>> groups = consolidateStages ? groupedDag.groupNodes() : Collections.emptyMap();
    if (!groups.isEmpty()) {
        LOG.debug("Stage consolidation is on.");
        int groupNum = 1;
        for (Set<String> group : groups.values()) {
            LOG.debug("Group{}: {}", groupNum, group);
            groupNum++;
        }
    }
    Set<String> branchers = new HashSet<>();
    for (String stageName : groupedDag.getNodes()) {
        if (groupedDag.getNodeOutputs(stageName).size() > 1) {
            branchers.add(stageName);
        }
    }
    Set<String> shufflers = pipelinePhase.getStagesOfType(BatchAggregator.PLUGIN_TYPE).stream().map(StageSpec::getName).collect(Collectors.toSet());
    Collection<Runnable> sinkRunnables = new ArrayList<>();
    for (String stageName : groupedDag.getTopologicalOrder()) {
        if (groups.containsKey(stageName)) {
            sinkRunnables.add(handleGroup(sec, phaseSpec, groups.get(stageName), groupedDag.getNodeInputs(stageName), emittedRecords, collectors));
            continue;
        }
        StageSpec stageSpec = pipelinePhase.getStage(stageName);
        String pluginType = stageSpec.getPluginType();
        EmittedRecords.Builder emittedBuilder = EmittedRecords.builder();
        // don't want to do an additional filter for stages that can emit errors,
        // but aren't connected to an ErrorTransform
        // similarly, don't want to do an additional filter for alerts when the stage isn't connected to
        // an AlertPublisher
        boolean hasErrorOutput = false;
        boolean hasAlertOutput = false;
        Set<String> outputs = pipelinePhase.getStageOutputs(stageName);
        for (String output : outputs) {
            String outputPluginType = pipelinePhase.getStage(output).getPluginType();
            // noinspection ConstantConditions
            if (ErrorTransform.PLUGIN_TYPE.equals(outputPluginType)) {
                hasErrorOutput = true;
            } else if (AlertPublisher.PLUGIN_TYPE.equals(outputPluginType)) {
                hasAlertOutput = true;
            }
        }
        SparkCollection<Object> stageData = null;
        Map<String, SparkCollection<Object>> inputDataCollections = new HashMap<>();
        Set<String> stageInputs = pipelinePhase.getStageInputs(stageName);
        for (String inputStageName : stageInputs) {
            StageSpec inputStageSpec = pipelinePhase.getStage(inputStageName);
            if (inputStageSpec == null) {
                // means the input to this stage is in a separate phase. For example, it is an action.
                continue;
            }
            String port = null;
            // not errors or alerts or output port records
            if (!Constants.Connector.PLUGIN_TYPE.equals(inputStageSpec.getPluginType()) && !Constants.Connector.PLUGIN_TYPE.equals(pluginType)) {
                port = inputStageSpec.getOutputPorts().get(stageName).getPort();
            }
            SparkCollection<Object> inputRecords = port == null ? emittedRecords.get(inputStageName).outputRecords : emittedRecords.get(inputStageName).outputPortRecords.get(port);
            inputDataCollections.put(inputStageName, inputRecords);
        }
        // initialize the stageRDD as the union of all input RDDs.
        if (!inputDataCollections.isEmpty()) {
            Iterator<SparkCollection<Object>> inputCollectionIter = inputDataCollections.values().iterator();
            stageData = inputCollectionIter.next();
            // don't union inputs records if we're joining or if we're processing errors
            while (!BatchJoiner.PLUGIN_TYPE.equals(pluginType) && !ErrorTransform.PLUGIN_TYPE.equals(pluginType) && inputCollectionIter.hasNext()) {
                stageData = stageData.union(inputCollectionIter.next());
            }
        }
        boolean isConnectorSource = Constants.Connector.PLUGIN_TYPE.equals(pluginType) && pipelinePhase.getSources().contains(stageName);
        boolean isConnectorSink = Constants.Connector.PLUGIN_TYPE.equals(pluginType) && pipelinePhase.getSinks().contains(stageName);
        StageStatisticsCollector collector = collectors.get(stageName) == null ? new NoopStageStatisticsCollector() : collectors.get(stageName);
        PluginFunctionContext pluginFunctionContext = new PluginFunctionContext(stageSpec, sec, collector);
        if (stageData == null) {
            // null in the other else-if conditions
            if (sourcePluginType.equals(pluginType) || isConnectorSource) {
                SparkCollection<RecordInfo<Object>> combinedData = getSource(stageSpec, functionCacheFactory, collector);
                emittedBuilder = addEmitted(emittedBuilder, pipelinePhase, stageSpec, combinedData, groupedDag, branchers, shufflers, hasErrorOutput, hasAlertOutput);
            } else {
                throw new IllegalStateException(String.format("Stage '%s' has no input and is not a source.", stageName));
            }
        } else if (BatchSink.PLUGIN_TYPE.equals(pluginType) || isConnectorSink) {
            sinkRunnables.add(stageData.createStoreTask(stageSpec, new BatchSinkFunction(pluginFunctionContext, functionCacheFactory.newCache())));
        } else if (SparkSink.PLUGIN_TYPE.equals(pluginType)) {
            SparkSink<Object> sparkSink = pluginContext.newPluginInstance(stageName, macroEvaluator);
            sinkRunnables.add(stageData.createStoreTask(stageSpec, sparkSink));
        } else if (AlertPublisher.PLUGIN_TYPE.equals(pluginType)) {
            // union all the alerts coming into this stage
            SparkCollection<Alert> inputAlerts = null;
            for (String inputStage : stageInputs) {
                SparkCollection<Alert> inputErrorsFromStage = emittedRecords.get(inputStage).alertRecords;
                if (inputErrorsFromStage == null) {
                    continue;
                }
                if (inputAlerts == null) {
                    inputAlerts = inputErrorsFromStage;
                } else {
                    inputAlerts = inputAlerts.union(inputErrorsFromStage);
                }
            }
            if (inputAlerts != null) {
                inputAlerts.publishAlerts(stageSpec, collector);
            }
        } else if (ErrorTransform.PLUGIN_TYPE.equals(pluginType)) {
            // union all the errors coming into this stage
            SparkCollection<ErrorRecord<Object>> inputErrors = null;
            for (String inputStage : stageInputs) {
                SparkCollection<ErrorRecord<Object>> inputErrorsFromStage = emittedRecords.get(inputStage).errorRecords;
                if (inputErrorsFromStage == null) {
                    continue;
                }
                if (inputErrors == null) {
                    inputErrors = inputErrorsFromStage;
                } else {
                    inputErrors = inputErrors.union(inputErrorsFromStage);
                }
            }
            if (inputErrors != null) {
                SparkCollection<RecordInfo<Object>> combinedData = inputErrors.flatMap(stageSpec, new ErrorTransformFunction<Object, Object>(pluginFunctionContext, functionCacheFactory.newCache()));
                emittedBuilder = addEmitted(emittedBuilder, pipelinePhase, stageSpec, combinedData, groupedDag, branchers, shufflers, hasErrorOutput, hasAlertOutput);
            }
        } else {
            Object plugin = pluginContext.newPluginInstance(stageName, macroEvaluator);
            Optional<EmittedRecords.Builder> declarativeBuilder = tryRelationalTransform(pipelinePhase, groupedDag, branchers, shufflers, stageName, stageSpec, emittedBuilder, hasErrorOutput, hasAlertOutput, stageData, inputDataCollections, plugin);
            if (declarativeBuilder.isPresent()) {
                emittedBuilder = declarativeBuilder.get();
            } else {
                emittedBuilder = transform(emittedBuilder, stagePartitions, pipelinePhase, functionCacheFactory, groupedDag, branchers, shufflers, stageName, stageSpec, pluginType, hasErrorOutput, hasAlertOutput, stageData, inputDataCollections, collector, pluginFunctionContext, plugin);
            }
        }
        emittedRecords.put(stageName, emittedBuilder.build());
    }
    boolean shouldWriteInParallel = Boolean.parseBoolean(sec.getRuntimeArguments().get("pipeline.spark.parallel.sinks.enabled"));
    if (!shouldWriteInParallel) {
        for (Runnable runnable : sinkRunnables) {
            runnable.run();
        }
        return;
    }
    Collection<Future> sinkFutures = new ArrayList<>(sinkRunnables.size());
    ExecutorService executorService = Executors.newFixedThreadPool(sinkRunnables.size(), new ThreadFactoryBuilder().setNameFormat("pipeline-sink-task").build());
    for (Runnable runnable : sinkRunnables) {
        sinkFutures.add(executorService.submit(runnable));
    }
    Throwable error = null;
    Iterator<Future> futureIter = sinkFutures.iterator();
    for (Future future : sinkFutures) {
        try {
            future.get();
        } catch (ExecutionException e) {
            error = e.getCause();
            break;
        } catch (InterruptedException e) {
            break;
        }
    }
    executorService.shutdownNow();
    if (error != null) {
        throw Throwables.propagate(error);
    }
}
Also used : DefaultMacroEvaluator(io.cdap.cdap.etl.common.DefaultMacroEvaluator) MacroEvaluator(io.cdap.cdap.api.macro.MacroEvaluator) ImmutableSet(com.google.common.collect.ImmutableSet) Set(java.util.Set) HashSet(java.util.HashSet) HashMap(java.util.HashMap) ThreadFactoryBuilder(com.google.common.util.concurrent.ThreadFactoryBuilder) ArrayList(java.util.ArrayList) PluginFunctionContext(io.cdap.cdap.etl.spark.function.PluginFunctionContext) StageSpec(io.cdap.cdap.etl.proto.v2.spec.StageSpec) DefaultMacroEvaluator(io.cdap.cdap.etl.common.DefaultMacroEvaluator) ThreadFactoryBuilder(com.google.common.util.concurrent.ThreadFactoryBuilder) BasicArguments(io.cdap.cdap.etl.common.BasicArguments) ExecutionException(java.util.concurrent.ExecutionException) FunctionCache(io.cdap.cdap.etl.spark.function.FunctionCache) HashSet(java.util.HashSet) NoopStageStatisticsCollector(io.cdap.cdap.etl.common.NoopStageStatisticsCollector) RecordInfo(io.cdap.cdap.etl.common.RecordInfo) CombinerDag(io.cdap.cdap.etl.planner.CombinerDag) BatchSinkFunction(io.cdap.cdap.etl.spark.function.BatchSinkFunction) StageStatisticsCollector(io.cdap.cdap.etl.common.StageStatisticsCollector) NoopStageStatisticsCollector(io.cdap.cdap.etl.common.NoopStageStatisticsCollector) PipelinePhase(io.cdap.cdap.etl.common.PipelinePhase) ExecutorService(java.util.concurrent.ExecutorService) Future(java.util.concurrent.Future) ErrorRecord(io.cdap.cdap.etl.api.ErrorRecord)

Example 15 with BasicArguments

use of io.cdap.cdap.etl.common.BasicArguments in project cdap by caskdata.

the class ValidationHandler method validateLocally.

private void validateLocally(HttpServiceRequest request, HttpServiceResponder responder, String namespace) throws IOException {
    StageValidationRequest validationRequest;
    try {
        validationRequest = GSON.fromJson(StandardCharsets.UTF_8.decode(request.getContent()).toString(), StageValidationRequest.class);
        validationRequest.validate();
    } catch (JsonSyntaxException e) {
        responder.sendError(HttpURLConnection.HTTP_BAD_REQUEST, "Unable to decode request body: " + e.getMessage());
        return;
    } catch (IllegalArgumentException e) {
        responder.sendError(HttpURLConnection.HTTP_BAD_REQUEST, "Invalid stage config: " + e.getMessage());
        return;
    }
    Map<String, String> arguments = Collections.emptyMap();
    // this option.
    if (validationRequest.getResolveMacrosFromPreferences()) {
        try {
            arguments = getContext().getPreferencesForNamespace(namespace, true);
        } catch (IllegalArgumentException iae) {
            // If this method returns IllegalArgumentException, it means the namespace doesn't exist.
            // If this is the case, we return a 404 error.
            responder.sendError(HttpURLConnection.HTTP_NOT_FOUND, String.format("Namespace '%s' does not exist", namespace));
            return;
        }
    }
    Map<String, MacroEvaluator> evaluators = ImmutableMap.of(SecureStoreMacroEvaluator.FUNCTION_NAME, new SecureStoreMacroEvaluator(namespace, getContext()), OAuthMacroEvaluator.FUNCTION_NAME, new OAuthMacroEvaluator(getContext()), ConnectionMacroEvaluator.FUNCTION_NAME, new ConnectionMacroEvaluator(namespace, getContext()));
    MacroEvaluator macroEvaluator = new DefaultMacroEvaluator(new BasicArguments(arguments), evaluators, DefaultMacroEvaluator.MAP_FUNCTIONS);
    MacroParserOptions macroParserOptions = MacroParserOptions.builder().skipInvalidMacros().setEscaping(false).setFunctionWhitelist(evaluators.keySet()).build();
    Function<Map<String, String>, Map<String, String>> macroFn = macroProperties -> getContext().evaluateMacros(namespace, macroProperties, macroEvaluator, macroParserOptions);
    String validationResponse = GSON.toJson(ValidationUtils.validate(namespace, validationRequest, getContext().createServicePluginConfigurer(namespace), macroFn, getContext()));
    responder.sendString(validationResponse);
}
Also used : BatchSource(io.cdap.cdap.etl.api.batch.BatchSource) HttpURLConnection(java.net.HttpURLConnection) JsonObject(com.google.gson.JsonObject) RemoteExecutionException(io.cdap.cdap.api.service.worker.RemoteExecutionException) PathParam(javax.ws.rs.PathParam) TypeToken(com.google.gson.reflect.TypeToken) OAuthMacroEvaluator(io.cdap.cdap.etl.common.OAuthMacroEvaluator) SchemaTypeAdapter(io.cdap.cdap.internal.io.SchemaTypeAdapter) GET(javax.ws.rs.GET) AccessException(io.cdap.cdap.api.security.AccessException) LoggerFactory(org.slf4j.LoggerFactory) Path(javax.ws.rs.Path) Bytes(io.cdap.cdap.api.common.Bytes) Function(java.util.function.Function) GsonBuilder(com.google.gson.GsonBuilder) RunnableTaskRequest(io.cdap.cdap.api.service.worker.RunnableTaskRequest) Gson(com.google.gson.Gson) ArtifactSummary(io.cdap.cdap.api.artifact.ArtifactSummary) PluginSpec(io.cdap.cdap.etl.proto.v2.spec.PluginSpec) Map(java.util.Map) ConnectionMacroEvaluator(io.cdap.cdap.etl.common.ConnectionMacroEvaluator) MacroParserOptions(io.cdap.cdap.api.macro.MacroParserOptions) RemoteTaskException(io.cdap.cdap.api.service.worker.RemoteTaskException) StageValidationRequest(io.cdap.cdap.etl.proto.v2.validation.StageValidationRequest) PipelineSpec(io.cdap.cdap.etl.proto.v2.spec.PipelineSpec) Logger(org.slf4j.Logger) POST(javax.ws.rs.POST) ImmutableMap(com.google.common.collect.ImmutableMap) JsonSyntaxException(com.google.gson.JsonSyntaxException) IOException(java.io.IOException) HttpServiceResponder(io.cdap.cdap.api.service.http.HttpServiceResponder) Schema(io.cdap.cdap.api.data.schema.Schema) StandardCharsets(java.nio.charset.StandardCharsets) DefaultMacroEvaluator(io.cdap.cdap.etl.common.DefaultMacroEvaluator) AbstractSystemHttpServiceHandler(io.cdap.cdap.api.service.http.AbstractSystemHttpServiceHandler) BasicArguments(io.cdap.cdap.etl.common.BasicArguments) Type(java.lang.reflect.Type) ArtifactVersion(io.cdap.cdap.api.artifact.ArtifactVersion) HttpServiceRequest(io.cdap.cdap.api.service.http.HttpServiceRequest) BatchPipelineSpec(io.cdap.cdap.etl.batch.BatchPipelineSpec) SecureStoreMacroEvaluator(io.cdap.cdap.etl.common.SecureStoreMacroEvaluator) StageSpec(io.cdap.cdap.etl.proto.v2.spec.StageSpec) AppRequest(io.cdap.cdap.proto.artifact.AppRequest) Collections(java.util.Collections) ArtifactId(io.cdap.cdap.api.artifact.ArtifactId) ArtifactScope(io.cdap.cdap.api.artifact.ArtifactScope) MacroEvaluator(io.cdap.cdap.api.macro.MacroEvaluator) SecureStoreMacroEvaluator(io.cdap.cdap.etl.common.SecureStoreMacroEvaluator) OAuthMacroEvaluator(io.cdap.cdap.etl.common.OAuthMacroEvaluator) ConnectionMacroEvaluator(io.cdap.cdap.etl.common.ConnectionMacroEvaluator) DefaultMacroEvaluator(io.cdap.cdap.etl.common.DefaultMacroEvaluator) SecureStoreMacroEvaluator(io.cdap.cdap.etl.common.SecureStoreMacroEvaluator) MacroEvaluator(io.cdap.cdap.api.macro.MacroEvaluator) ConnectionMacroEvaluator(io.cdap.cdap.etl.common.ConnectionMacroEvaluator) OAuthMacroEvaluator(io.cdap.cdap.etl.common.OAuthMacroEvaluator) MacroParserOptions(io.cdap.cdap.api.macro.MacroParserOptions) StageValidationRequest(io.cdap.cdap.etl.proto.v2.validation.StageValidationRequest) JsonSyntaxException(com.google.gson.JsonSyntaxException) DefaultMacroEvaluator(io.cdap.cdap.etl.common.DefaultMacroEvaluator) BasicArguments(io.cdap.cdap.etl.common.BasicArguments) Map(java.util.Map) ImmutableMap(com.google.common.collect.ImmutableMap)

Aggregations

BasicArguments (io.cdap.cdap.etl.common.BasicArguments)28 MacroEvaluator (io.cdap.cdap.api.macro.MacroEvaluator)26 DefaultMacroEvaluator (io.cdap.cdap.etl.common.DefaultMacroEvaluator)26 StageSpec (io.cdap.cdap.etl.proto.v2.spec.StageSpec)12 PluginContext (io.cdap.cdap.api.plugin.PluginContext)10 PipelineRuntime (io.cdap.cdap.etl.common.PipelineRuntime)10 SparkPipelinePluginContext (io.cdap.cdap.etl.spark.plugin.SparkPipelinePluginContext)10 HashMap (java.util.HashMap)10 Map (java.util.Map)10 MacroParserOptions (io.cdap.cdap.api.macro.MacroParserOptions)8 BatchPhaseSpec (io.cdap.cdap.etl.batch.BatchPhaseSpec)8 OAuthMacroEvaluator (io.cdap.cdap.etl.common.OAuthMacroEvaluator)8 SecureStoreMacroEvaluator (io.cdap.cdap.etl.common.SecureStoreMacroEvaluator)8 SparkPipelineRuntime (io.cdap.cdap.etl.spark.SparkPipelineRuntime)8 PipelinePluginContext (io.cdap.cdap.etl.common.plugin.PipelinePluginContext)6 TxRunnable (io.cdap.cdap.api.TxRunnable)5 DatasetContext (io.cdap.cdap.api.data.DatasetContext)5 ArrayList (java.util.ArrayList)5 ImmutableMap (com.google.common.collect.ImmutableMap)3 Gson (com.google.gson.Gson)3