Search in sources :

Example 1 with SerializableMetadataProvider

use of org.apache.hop.core.metadata.SerializableMetadataProvider in project hop by apache.

the class RemotePipelineEngine method sendToHopServer.

/**
 * Send the pipeline for execution to a HopServer hop server.
 *
 * @param pipelineMeta the pipeline meta-data
 * @param executionConfiguration the pipeline execution configuration
 * @throws HopException if any errors occur during the dispatch to the hop server
 */
private void sendToHopServer(PipelineMeta pipelineMeta, PipelineExecutionConfiguration executionConfiguration, IHopMetadataProvider metadataProvider) throws HopException {
    if (hopServer == null) {
        throw new HopException("No remote server specified");
    }
    if (Utils.isEmpty(pipelineMeta.getName())) {
        throw new HopException("The pipeline needs a name to uniquely identify it by on the remote server.");
    }
    // Inject certain internal variables to make it more intuitive.
    // 
    Map<String, String> vars = new HashMap<>();
    // 
    for (String var : getVariableNames()) {
        if (isVariablePassedToRemoteServer(var)) {
            vars.put(var, getVariable(var));
        }
    }
    executionConfiguration.getVariablesMap().putAll(vars);
    hopServer.getLogChannel().setLogLevel(executionConfiguration.getLogLevel());
    try {
        if (executionConfiguration.isPassingExport()) {
            // First export the workflow...
            // 
            FileObject tempFile = HopVfs.createTempFile("pipelineExport", HopVfs.Suffix.ZIP);
            // The executionConfiguration should not include external references here because all the
            // resources should be
            // retrieved from the exported zip file
            // TODO: Serialize metadata objects to JSON and include it in the zip file
            // 
            PipelineExecutionConfiguration clonedConfiguration = (PipelineExecutionConfiguration) executionConfiguration.clone();
            TopLevelResource topLevelResource = ResourceUtil.serializeResourceExportInterface(tempFile.getName().toString(), pipelineMeta, this, metadataProvider, clonedConfiguration, CONFIGURATION_IN_EXPORT_FILENAME, remotePipelineRunConfiguration.getNamedResourcesSourceFolder(), remotePipelineRunConfiguration.getNamedResourcesTargetFolder(), executionConfiguration.getVariablesMap());
            // Send the zip file over to the hop server...
            // 
            String result = hopServer.sendExport(this, topLevelResource.getArchiveName(), RegisterPackageServlet.TYPE_PIPELINE, topLevelResource.getBaseResourceName());
            WebResult webResult = WebResult.fromXmlString(result);
            if (!webResult.getResult().equalsIgnoreCase(WebResult.STRING_OK)) {
                String message = cleanupMessage(webResult.getMessage());
                throw new HopException("There was an error passing the exported pipeline to the remote server: " + Const.CR + message);
            }
            containerId = webResult.getId();
        } else {
            // Now send it off to the remote server...
            // Include the JSON of the whole content of the current metadata
            // 
            SerializableMetadataProvider serializableMetadataProvider = new SerializableMetadataProvider(metadataProvider);
            String xml = new PipelineConfiguration(pipelineMeta, executionConfiguration, serializableMetadataProvider).getXml(this);
            String reply = hopServer.sendXml(this, xml, RegisterPipelineServlet.CONTEXT_PATH + "/?xml=Y");
            WebResult webResult = WebResult.fromXmlString(reply);
            if (!webResult.getResult().equalsIgnoreCase(WebResult.STRING_OK)) {
                String message = cleanupMessage(webResult.getMessage());
                throw new HopException("There was an error posting the pipeline on the remote server: " + Const.CR + message);
            }
            containerId = webResult.getId();
        }
        // Prepare the pipeline
        // 
        String reply = hopServer.execService(this, PrepareExecutionPipelineServlet.CONTEXT_PATH + "/?name=" + URLEncoder.encode(pipelineMeta.getName(), "UTF-8") + "&xml=Y&id=" + containerId);
        WebResult webResult = WebResult.fromXmlString(reply);
        if (!webResult.getResult().equalsIgnoreCase(WebResult.STRING_OK)) {
            String message = cleanupMessage(webResult.getMessage());
            throw new HopException("There was an error preparing the pipeline for execution on the remote server: " + Const.CR + message);
        }
        // Get the status right after preparation.
        // 
        getPipelineStatus();
    } catch (HopException ke) {
        throw ke;
    } catch (Exception e) {
        throw new HopException(e);
    }
}
Also used : TopLevelResource(org.apache.hop.resource.TopLevelResource) SerializableMetadataProvider(org.apache.hop.core.metadata.SerializableMetadataProvider) HopException(org.apache.hop.core.exception.HopException) FileObject(org.apache.commons.vfs2.FileObject) HopException(org.apache.hop.core.exception.HopException)

Example 2 with SerializableMetadataProvider

use of org.apache.hop.core.metadata.SerializableMetadataProvider in project hop by apache.

the class AddExportServlet method doGet.

@Override
public void doGet(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
    if (isJettyMode() && !request.getRequestURI().startsWith(CONTEXT_PATH)) {
        return;
    }
    if (log.isDebug()) {
        logDebug("Addition of export requested");
    }
    PrintWriter out = response.getWriter();
    // read from the client
    InputStream in = request.getInputStream();
    if (log.isDetailed()) {
        logDetailed("Encoding: " + request.getCharacterEncoding());
    }
    boolean isWorkflow = TYPE_WORKFLOW.equalsIgnoreCase(request.getParameter(PARAMETER_TYPE));
    // the resource to load
    String load = request.getParameter(PARAMETER_LOAD);
    response.setContentType("text/xml");
    out.print(XmlHandler.getXmlHeader());
    response.setStatus(HttpServletResponse.SC_OK);
    OutputStream outputStream = null;
    try {
        FileObject tempFile = HopVfs.createTempFile("export", ".zip", System.getProperty("java.io.tmpdir"));
        outputStream = HopVfs.getOutputStream(tempFile, false);
        // Pass the input directly to a temporary file
        // 
        int c;
        while ((c = in.read()) != -1) {
            outputStream.write(c);
        }
        outputStream.flush();
        outputStream.close();
        // don't close it twice
        outputStream = null;
        String archiveUrl = tempFile.getName().toString();
        String fileUrl = null;
        String serverObjectId = null;
        SimpleLoggingObject servletLoggingObject = new SimpleLoggingObject(CONTEXT_PATH, LoggingObjectType.HOP_SERVER, null);
        // 
        if (!Utils.isEmpty(load)) {
            String metaStoreJson = RegisterPackageServlet.getMetaStoreJsonFromZIP("zip:" + archiveUrl + "!metadata.json");
            SerializableMetadataProvider metadataProvider = new SerializableMetadataProvider(metaStoreJson);
            fileUrl = "zip:" + archiveUrl + "!" + load;
            if (isWorkflow) {
                // Open the workflow from inside the ZIP archive
                // 
                HopVfs.getFileObject(fileUrl);
                WorkflowMeta workflowMeta = new WorkflowMeta(fileUrl);
                // Also read the execution configuration information
                // 
                String configUrl = "zip:" + archiveUrl + "!" + Workflow.CONFIGURATION_IN_EXPORT_FILENAME;
                Document configDoc = XmlHandler.loadXmlFile(configUrl);
                WorkflowExecutionConfiguration workflowExecutionConfiguration = new WorkflowExecutionConfiguration(XmlHandler.getSubNode(configDoc, WorkflowExecutionConfiguration.XML_TAG));
                serverObjectId = UUID.randomUUID().toString();
                servletLoggingObject.setContainerObjectId(serverObjectId);
                servletLoggingObject.setLogLevel(workflowExecutionConfiguration.getLogLevel());
                String runConfigurationName = workflowExecutionConfiguration.getRunConfiguration();
                // Inflate the metadata and simply store it into the workflow metadata
                // 
                workflowMeta.setMetadataProvider(metadataProvider);
                final IWorkflowEngine<WorkflowMeta> workflow = WorkflowEngineFactory.createWorkflowEngine(variables, runConfigurationName, metadataProvider, workflowMeta, servletLoggingObject);
                // store it all in the map...
                // 
                getWorkflowMap().addWorkflow(workflow.getWorkflowName(), serverObjectId, workflow, new WorkflowConfiguration(workflowMeta, workflowExecutionConfiguration, metadataProvider));
                // Apply the execution configuration...
                // 
                log.setLogLevel(workflowExecutionConfiguration.getLogLevel());
            } else {
                // Read the execution configuration information
                // 
                String configUrl = "zip:" + archiveUrl + "!" + Pipeline.CONFIGURATION_IN_EXPORT_FILENAME;
                Document configDoc = XmlHandler.loadXmlFile(configUrl);
                PipelineExecutionConfiguration executionConfiguration = new PipelineExecutionConfiguration(XmlHandler.getSubNode(configDoc, PipelineExecutionConfiguration.XML_TAG));
                // Open the pipeline from inside the ZIP archive
                // 
                PipelineMeta pipelineMeta = new PipelineMeta(fileUrl, metadataProvider, true, Variables.getADefaultVariableSpace());
                serverObjectId = UUID.randomUUID().toString();
                servletLoggingObject.setContainerObjectId(serverObjectId);
                servletLoggingObject.setLogLevel(executionConfiguration.getLogLevel());
                String runConfigurationName = executionConfiguration.getRunConfiguration();
                IPipelineEngine<PipelineMeta> pipeline = PipelineEngineFactory.createPipelineEngine(variables, runConfigurationName, metadataProvider, pipelineMeta);
                pipeline.setParent(servletLoggingObject);
                // store it all in the map...
                // 
                getPipelineMap().addPipeline(pipeline.getPipelineMeta().getName(), serverObjectId, pipeline, new PipelineConfiguration(pipelineMeta, executionConfiguration, metadataProvider));
            }
        } else {
            fileUrl = archiveUrl;
        }
        out.println(new WebResult(WebResult.STRING_OK, fileUrl, serverObjectId));
    } catch (Exception ex) {
        out.println(new WebResult(WebResult.STRING_ERROR, Const.getStackTracker(ex)));
    } finally {
        if (outputStream != null) {
            outputStream.close();
        }
    }
}
Also used : InputStream(java.io.InputStream) OutputStream(java.io.OutputStream) PipelineConfiguration(org.apache.hop.pipeline.PipelineConfiguration) SimpleLoggingObject(org.apache.hop.core.logging.SimpleLoggingObject) Document(org.w3c.dom.Document) WorkflowExecutionConfiguration(org.apache.hop.workflow.WorkflowExecutionConfiguration) ServletException(javax.servlet.ServletException) IOException(java.io.IOException) WorkflowMeta(org.apache.hop.workflow.WorkflowMeta) PipelineMeta(org.apache.hop.pipeline.PipelineMeta) WorkflowConfiguration(org.apache.hop.workflow.WorkflowConfiguration) SerializableMetadataProvider(org.apache.hop.core.metadata.SerializableMetadataProvider) FileObject(org.apache.commons.vfs2.FileObject) PipelineExecutionConfiguration(org.apache.hop.pipeline.PipelineExecutionConfiguration) PrintWriter(java.io.PrintWriter)

Example 3 with SerializableMetadataProvider

use of org.apache.hop.core.metadata.SerializableMetadataProvider in project hop by apache.

the class ManageProjectsOptionPlugin method exportMetadataToJson.

private void exportMetadataToJson(ILogChannel log, ProjectsConfig config, IVariables variables, IHasHopMetadataProvider hasHopMetadataProvider) throws HopException {
    if (StringUtils.isEmpty(projectName)) {
        throw new HopException("Please specify the name of the project for which you want to export the metadata");
    }
    ProjectConfig projectConfig = config.findProjectConfig(projectName);
    if (projectConfig == null) {
        throw new HopException("Project '" + projectName + "' couldn't be found in the Hop configuration");
    }
    Project project = projectConfig.loadProject(Variables.getADefaultVariableSpace());
    ProjectsUtil.enableProject(log, projectName, project, variables, new ArrayList<>(), null, hasHopMetadataProvider);
    log.logBasic("Enabled project " + projectName);
    String realFilename = variables.resolve(metadataJsonFilename);
    log.logBasic("Exporting project metadata to a single file: " + realFilename);
    // This is the metadata to export
    // 
    SerializableMetadataProvider metadataProvider = new SerializableMetadataProvider(hasHopMetadataProvider.getMetadataProvider());
    String jsonString = metadataProvider.toJson();
    try {
        try (OutputStream outputStream = HopVfs.getOutputStream(realFilename, false)) {
            outputStream.write(jsonString.getBytes(StandardCharsets.UTF_8));
        }
        log.logBasic("Metadata was exported successfully.");
    } catch (Exception e) {
        throw new HopException("There was an error exporting metadata to file: " + realFilename, e);
    }
}
Also used : SerializableMetadataProvider(org.apache.hop.core.metadata.SerializableMetadataProvider) HopException(org.apache.hop.core.exception.HopException) OutputStream(java.io.OutputStream) HopException(org.apache.hop.core.exception.HopException)

Example 4 with SerializableMetadataProvider

use of org.apache.hop.core.metadata.SerializableMetadataProvider in project hop by apache.

the class MainBeam method main.

public static void main(String[] args) {
    try {
        System.out.println("Argument 1 : Pipeline filename (.hpl)   : " + args[0]);
        System.out.println("Argument 2 : Metadata filename (.json)  : " + args[1]);
        System.out.println("Argument 3 : Pipeline run configuration : " + args[2]);
        System.out.println(">>>>>> Initializing Hop...");
        HopEnvironment.init();
        // Read the pipeline XML and metadata JSON (optionally from Hadoop FS)
        // 
        String pipelineMetaXml = readFileIntoString(args[0], "UTF-8");
        String metadataJson = readFileIntoString(args[1], "UTF-8");
        String runConfigName = args[2];
        // Inflate the metadata:
        // 
        SerializableMetadataProvider metadataProvider = new SerializableMetadataProvider(metadataJson);
        // Load the pipeline run configuration from this metadata provider:
        // 
        IHopMetadataSerializer<PipelineRunConfiguration> serializer = metadataProvider.getSerializer(PipelineRunConfiguration.class);
        if (!serializer.exists(runConfigName)) {
            throw new HopException("The specified pipeline run configuration '" + runConfigName + "' doesn't exist");
        }
        System.out.println(">>>>>> Loading pipeline metadata");
        PipelineMeta pipelineMeta = new PipelineMeta(XmlHandler.loadXmlString(pipelineMetaXml, PipelineMeta.XML_TAG), metadataProvider);
        System.out.println(">>>>>> Building Apache Beam Pipeline...");
        PluginRegistry registry = PluginRegistry.getInstance();
        IPlugin beamInputPlugin = registry.getPlugin(TransformPluginType.class, BeamConst.STRING_BEAM_INPUT_PLUGIN_ID);
        if (beamInputPlugin != null) {
            System.out.println(">>>>>> Found Beam Input transform plugin class loader");
        } else {
            throw new HopException("ERROR: Unable to find Beam Input transform plugin. Is it in the fat jar? ");
        }
        IVariables variables = Variables.getADefaultVariableSpace();
        // Execute it...
        // 
        IPipelineEngine<PipelineMeta> pipelineEngine = PipelineEngineFactory.createPipelineEngine(variables, runConfigName, metadataProvider, pipelineMeta);
        System.out.println(">>>>>> Pipeline executing starting...");
        pipelineEngine.execute();
        pipelineEngine.waitUntilFinished();
        System.out.println(">>>>>> Execution finished...");
        System.exit(0);
    } catch (Exception e) {
        System.err.println("Error running Beam pipeline: " + e.getMessage());
        e.printStackTrace();
        System.exit(1);
    }
}
Also used : SerializableMetadataProvider(org.apache.hop.core.metadata.SerializableMetadataProvider) HopException(org.apache.hop.core.exception.HopException) PipelineRunConfiguration(org.apache.hop.pipeline.config.PipelineRunConfiguration) IVariables(org.apache.hop.core.variables.IVariables) PluginRegistry(org.apache.hop.core.plugins.PluginRegistry) HopException(org.apache.hop.core.exception.HopException) IOException(java.io.IOException) PipelineMeta(org.apache.hop.pipeline.PipelineMeta) IPlugin(org.apache.hop.core.plugins.IPlugin)

Example 5 with SerializableMetadataProvider

use of org.apache.hop.core.metadata.SerializableMetadataProvider in project hop by apache.

the class HopBeamGuiPlugin method menuToolsExportMetadata.

@GuiMenuElement(root = HopGui.ID_MAIN_MENU, id = ID_MAIN_MENU_TOOLS_EXPORT_METADATA, label = "i18n::BeamGuiPlugin.Menu.ExportMetadata.Text", parentId = HopGui.ID_MAIN_MENU_TOOLS_PARENT_ID, separator = true)
public void menuToolsExportMetadata() {
    HopGui hopGui = HopGui.getInstance();
    final Shell shell = hopGui.getShell();
    MessageBox box = new MessageBox(shell, SWT.OK | SWT.CANCEL | SWT.ICON_INFORMATION);
    box.setText(BaseMessages.getString(PKG, "BeamGuiPlugin.ExportMetadata.Dialog.Header"));
    box.setMessage(BaseMessages.getString(PKG, "BeamGuiPlugin.ExportMetadata.Dialog.Message"));
    int answer = box.open();
    if ((answer & SWT.CANCEL) != 0) {
        return;
    }
    // Ask
    // 
    String filename = BaseDialog.presentFileDialog(true, shell, new String[] { "*.json", "*.*" }, new String[] { BaseMessages.getString(PKG, "BeamGuiPlugin.FileTypes.Json.Label"), BaseMessages.getString(PKG, "BeamGuiPlugin.FileTypes.All.Label") }, true);
    if (filename == null) {
        return;
    }
    try {
        // Save HopGui metadata to JSON...
        // 
        SerializableMetadataProvider metadataProvider = new SerializableMetadataProvider(hopGui.getMetadataProvider());
        String jsonString = metadataProvider.toJson();
        String realFilename = hopGui.getVariables().resolve(filename);
        try (OutputStream outputStream = HopVfs.getOutputStream(realFilename, false)) {
            outputStream.write(jsonString.getBytes(StandardCharsets.UTF_8));
        }
    } catch (Exception e) {
        new ErrorDialog(shell, "Error", "Error saving metadata to JSON file : " + filename, e);
    }
}
Also used : Shell(org.eclipse.swt.widgets.Shell) SerializableMetadataProvider(org.apache.hop.core.metadata.SerializableMetadataProvider) OutputStream(java.io.OutputStream) ErrorDialog(org.apache.hop.ui.core.dialog.ErrorDialog) InvocationTargetException(java.lang.reflect.InvocationTargetException) HopGui(org.apache.hop.ui.hopgui.HopGui) MessageBox(org.eclipse.swt.widgets.MessageBox) GuiMenuElement(org.apache.hop.core.gui.plugin.menu.GuiMenuElement)

Aggregations

SerializableMetadataProvider (org.apache.hop.core.metadata.SerializableMetadataProvider)11 HopException (org.apache.hop.core.exception.HopException)7 OutputStream (java.io.OutputStream)6 IOException (java.io.IOException)5 FileObject (org.apache.commons.vfs2.FileObject)4 PipelineMeta (org.apache.hop.pipeline.PipelineMeta)4 SimpleLoggingObject (org.apache.hop.core.logging.SimpleLoggingObject)3 IVariables (org.apache.hop.core.variables.IVariables)3 PipelineConfiguration (org.apache.hop.pipeline.PipelineConfiguration)3 PipelineExecutionConfiguration (org.apache.hop.pipeline.PipelineExecutionConfiguration)3 WorkflowConfiguration (org.apache.hop.workflow.WorkflowConfiguration)3 WorkflowExecutionConfiguration (org.apache.hop.workflow.WorkflowExecutionConfiguration)3 WorkflowMeta (org.apache.hop.workflow.WorkflowMeta)3 InputStream (java.io.InputStream)2 ServletException (javax.servlet.ServletException)2 ByteArrayOutputStream (java.io.ByteArrayOutputStream)1 PrintWriter (java.io.PrintWriter)1 InvocationTargetException (java.lang.reflect.InvocationTargetException)1 ArrayList (java.util.ArrayList)1 HashMap (java.util.HashMap)1