Search in sources :

Example 1 with DataProperty

use of org.pentaho.platform.api.repository2.unified.data.node.DataProperty in project pentaho-kettle by pentaho.

the class AttributesMapUtil method loadAttributesMap.

public static final void loadAttributesMap(DataNode dataNode, AttributesInterface attributesInterface) throws KettleException {
    Map<String, Map<String, String>> attributesMap = new HashMap<String, Map<String, String>>();
    attributesInterface.setAttributesMap(attributesMap);
    DataNode groupsNode = dataNode.getNode(NODE_ATTRIBUTE_GROUPS);
    if (groupsNode != null) {
        Iterable<DataNode> nodes = groupsNode.getNodes();
        for (Iterator<DataNode> groupsIterator = nodes.iterator(); groupsIterator.hasNext(); ) {
            DataNode groupNode = groupsIterator.next();
            HashMap<String, String> attributes = new HashMap<String, String>();
            attributesMap.put(groupNode.getName(), attributes);
            Iterable<DataProperty> properties = groupNode.getProperties();
            for (Iterator<DataProperty> propertiesIterator = properties.iterator(); propertiesIterator.hasNext(); ) {
                DataProperty dataProperty = propertiesIterator.next();
                String key = dataProperty.getName();
                String value = dataProperty.getString();
                if (key != null && value != null) {
                    attributes.put(key, value);
                }
            }
        }
    }
}
Also used : HashMap(java.util.HashMap) DataNode(org.pentaho.platform.api.repository2.unified.data.node.DataNode) DataProperty(org.pentaho.platform.api.repository2.unified.data.node.DataProperty) Map(java.util.Map) HashMap(java.util.HashMap)

Example 2 with DataProperty

use of org.pentaho.platform.api.repository2.unified.data.node.DataProperty in project pentaho-kettle by pentaho.

the class TransDelegate method dataNodeToElement.

public void dataNodeToElement(final DataNode rootNode, final RepositoryElementInterface element) throws KettleException {
    TransMeta transMeta = (TransMeta) element;
    Set<String> privateDatabases = null;
    // read the private databases
    DataNode privateDbsNode = rootNode.getNode(NODE_TRANS_PRIVATE_DATABASES);
    // BACKLOG-6635
    if (privateDbsNode != null) {
        privateDatabases = new HashSet<String>();
        if (privateDbsNode.hasProperty(PROP_TRANS_PRIVATE_DATABASE_NAMES)) {
            for (String privateDatabaseName : getString(privateDbsNode, PROP_TRANS_PRIVATE_DATABASE_NAMES).split(TRANS_PRIVATE_DATABASE_DELIMITER)) {
                if (!privateDatabaseName.isEmpty()) {
                    privateDatabases.add(privateDatabaseName);
                }
            }
        } else {
            for (DataNode privateDatabase : privateDbsNode.getNodes()) {
                privateDatabases.add(privateDatabase.getName());
            }
        }
    }
    transMeta.setPrivateDatabases(privateDatabases);
    // read the steps...
    // 
    DataNode stepsNode = rootNode.getNode(NODE_STEPS);
    for (DataNode stepNode : stepsNode.getNodes()) {
        StepMeta stepMeta = new StepMeta(new StringObjectId(stepNode.getId().toString()));
        // for tracing, retain hierarchy
        stepMeta.setParentTransMeta(transMeta);
        // Read the basics
        // 
        stepMeta.setName(getString(stepNode, PROP_NAME));
        if (stepNode.hasProperty(PROP_DESCRIPTION)) {
            stepMeta.setDescription(getString(stepNode, PROP_DESCRIPTION));
        }
        stepMeta.setDistributes(stepNode.getProperty(PROP_STEP_DISTRIBUTE).getBoolean());
        DataProperty rowDistributionProperty = stepNode.getProperty(PROP_STEP_ROW_DISTRIBUTION);
        String rowDistributionCode = rowDistributionProperty == null ? null : rowDistributionProperty.getString();
        RowDistributionInterface rowDistribution = PluginRegistry.getInstance().loadClass(RowDistributionPluginType.class, rowDistributionCode, RowDistributionInterface.class);
        stepMeta.setRowDistribution(rowDistribution);
        stepMeta.setDraw(stepNode.getProperty(PROP_STEP_GUI_DRAW).getBoolean());
        int copies = (int) stepNode.getProperty(PROP_STEP_COPIES).getLong();
        String copiesString = stepNode.getProperty(PROP_STEP_COPIES_STRING) != null ? stepNode.getProperty(PROP_STEP_COPIES_STRING).getString() : StringUtils.EMPTY;
        if (!Utils.isEmpty(copiesString)) {
            stepMeta.setCopiesString(copiesString);
        } else {
            // for backward compatibility
            stepMeta.setCopies(copies);
        }
        int x = (int) stepNode.getProperty(PROP_STEP_GUI_LOCATION_X).getLong();
        int y = (int) stepNode.getProperty(PROP_STEP_GUI_LOCATION_Y).getLong();
        stepMeta.setLocation(x, y);
        // Load the group attributes map
        // 
        AttributesMapUtil.loadAttributesMap(stepNode, stepMeta);
        String stepType = getString(stepNode, PROP_STEP_TYPE);
        // Create a new StepMetaInterface object...
        // 
        PluginRegistry registry = PluginRegistry.getInstance();
        PluginInterface stepPlugin = registry.findPluginWithId(StepPluginType.class, stepType);
        StepMetaInterface stepMetaInterface = null;
        if (stepPlugin != null) {
            stepMetaInterface = (StepMetaInterface) registry.loadClass(stepPlugin);
            // revert to the default in case we loaded an alternate version
            stepType = stepPlugin.getIds()[0];
        } else {
            stepMeta.setStepMetaInterface((StepMetaInterface) new MissingTrans(stepMeta.getName(), stepType));
            transMeta.addMissingTrans((MissingTrans) stepMeta.getStepMetaInterface());
        }
        stepMeta.setStepID(stepType);
        // Read the metadata from the repository too...
        // 
        RepositoryProxy proxy = new RepositoryProxy(stepNode.getNode(NODE_STEP_CUSTOM));
        if (!stepMeta.isMissing()) {
            readRepCompatibleStepMeta(stepMetaInterface, proxy, null, transMeta.getDatabases());
            stepMetaInterface.readRep(proxy, transMeta.getMetaStore(), null, transMeta.getDatabases());
            stepMeta.setStepMetaInterface(stepMetaInterface);
        }
        // Get the partitioning as well...
        StepPartitioningMeta stepPartitioningMeta = new StepPartitioningMeta();
        if (stepNode.hasProperty(PROP_PARTITIONING_SCHEMA)) {
            String partSchemaId = stepNode.getProperty(PROP_PARTITIONING_SCHEMA).getRef().getId().toString();
            String schemaName = repo.loadPartitionSchema(new StringObjectId(partSchemaId), null).getName();
            stepPartitioningMeta.setPartitionSchemaName(schemaName);
            String methodCode = getString(stepNode, PROP_PARTITIONING_METHOD);
            stepPartitioningMeta.setMethod(StepPartitioningMeta.getMethod(methodCode));
            if (stepPartitioningMeta.getPartitioner() != null) {
                proxy = new RepositoryProxy(stepNode.getNode(NODE_PARTITIONER_CUSTOM));
                stepPartitioningMeta.getPartitioner().loadRep(proxy, null);
            }
            stepPartitioningMeta.hasChanged(true);
        }
        stepMeta.setStepPartitioningMeta(stepPartitioningMeta);
        stepMeta.getStepPartitioningMeta().setPartitionSchemaAfterLoading(transMeta.getPartitionSchemas());
        // Get the cluster schema name
        String clusterSchemaName = getString(stepNode, PROP_CLUSTER_SCHEMA);
        stepMeta.setClusterSchemaName(clusterSchemaName);
        if (clusterSchemaName != null && transMeta.getClusterSchemas() != null) {
            // Get the cluster schema from the given name
            for (ClusterSchema clusterSchema : transMeta.getClusterSchemas()) {
                if (clusterSchema.getName().equals(clusterSchemaName)) {
                    stepMeta.setClusterSchema(clusterSchema);
                    break;
                }
            }
        }
        transMeta.addStep(stepMeta);
    }
    for (DataNode stepNode : stepsNode.getNodes()) {
        ObjectId stepObjectId = new StringObjectId(stepNode.getId().toString());
        StepMeta stepMeta = StepMeta.findStep(transMeta.getSteps(), stepObjectId);
        // 
        if (stepNode.hasProperty(PROP_STEP_ERROR_HANDLING_SOURCE_STEP)) {
            StepErrorMeta meta = new StepErrorMeta(transMeta, stepMeta);
            meta.setTargetStep(StepMeta.findStep(transMeta.getSteps(), stepNode.getProperty(PROP_STEP_ERROR_HANDLING_TARGET_STEP).getString()));
            meta.setEnabled(stepNode.getProperty(PROP_STEP_ERROR_HANDLING_IS_ENABLED).getBoolean());
            meta.setNrErrorsValuename(getString(stepNode, PROP_STEP_ERROR_HANDLING_NR_VALUENAME));
            meta.setErrorDescriptionsValuename(getString(stepNode, PROP_STEP_ERROR_HANDLING_DESCRIPTIONS_VALUENAME));
            meta.setErrorFieldsValuename(getString(stepNode, PROP_STEP_ERROR_HANDLING_FIELDS_VALUENAME));
            meta.setErrorCodesValuename(getString(stepNode, PROP_STEP_ERROR_HANDLING_CODES_VALUENAME));
            meta.setMaxErrors(getString(stepNode, PROP_STEP_ERROR_HANDLING_MAX_ERRORS));
            meta.setMaxPercentErrors(getString(stepNode, PROP_STEP_ERROR_HANDLING_MAX_PCT_ERRORS));
            meta.setMinPercentRows(getString(stepNode, PROP_STEP_ERROR_HANDLING_MIN_PCT_ROWS));
            // a bit of a trick, I know.
            meta.getSourceStep().setStepErrorMeta(meta);
        }
    }
    // 
    for (int i = 0; i < transMeta.nrSteps(); i++) {
        StepMeta stepMeta = transMeta.getStep(i);
        StepMetaInterface sii = stepMeta.getStepMetaInterface();
        if (sii != null) {
            sii.searchInfoAndTargetSteps(transMeta.getSteps());
        }
    }
    // Read the notes...
    // 
    DataNode notesNode = rootNode.getNode(NODE_NOTES);
    int nrNotes = (int) notesNode.getProperty(PROP_NR_NOTES).getLong();
    for (DataNode noteNode : notesNode.getNodes()) {
        String xml = getString(noteNode, PROP_XML);
        transMeta.addNote(new NotePadMeta(XMLHandler.getSubNode(XMLHandler.loadXMLString(xml), NotePadMeta.XML_TAG)));
    }
    if (transMeta.nrNotes() != nrNotes) {
        throw new KettleException("The number of notes read [" + transMeta.nrNotes() + "] was not the number we expected [" + nrNotes + "]");
    }
    // Read the hops...
    // 
    DataNode hopsNode = rootNode.getNode(NODE_HOPS);
    int nrHops = (int) hopsNode.getProperty(PROP_NR_HOPS).getLong();
    for (DataNode hopNode : hopsNode.getNodes()) {
        String stepFromName = getString(hopNode, TRANS_HOP_FROM);
        String stepToName = getString(hopNode, TRANS_HOP_TO);
        boolean enabled = true;
        if (hopNode.hasProperty(TRANS_HOP_ENABLED)) {
            enabled = hopNode.getProperty(TRANS_HOP_ENABLED).getBoolean();
        }
        StepMeta stepFrom = StepMeta.findStep(transMeta.getSteps(), stepFromName);
        StepMeta stepTo = StepMeta.findStep(transMeta.getSteps(), stepToName);
        // 
        if (stepFrom != null && stepTo != null) {
            transMeta.addTransHop(new TransHopMeta(stepFrom, stepTo, enabled));
        }
    }
    if (transMeta.nrTransHops() != nrHops) {
        throw new KettleException("The number of hops read [" + transMeta.nrTransHops() + "] was not the number we expected [" + nrHops + "]");
    }
    // Load the details at the end, to make sure we reference the databases correctly, etc.
    // 
    loadTransformationDetails(rootNode, transMeta);
    loadDependencies(rootNode, transMeta);
    transMeta.eraseParameters();
    DataNode paramsNode = rootNode.getNode(NODE_PARAMETERS);
    int count = (int) paramsNode.getProperty(PROP_NR_PARAMETERS).getLong();
    for (int idx = 0; idx < count; idx++) {
        DataNode paramNode = paramsNode.getNode(TRANS_PARAM_PREFIX + idx);
        String key = getString(paramNode, PARAM_KEY);
        String def = getString(paramNode, PARAM_DEFAULT);
        String desc = getString(paramNode, PARAM_DESC);
        transMeta.addParameterDefinition(key, def, desc);
    }
    transMeta.activateParameters();
}
Also used : KettleException(org.pentaho.di.core.exception.KettleException) StringObjectId(org.pentaho.di.repository.StringObjectId) ObjectId(org.pentaho.di.repository.ObjectId) PluginInterface(org.pentaho.di.core.plugins.PluginInterface) TransMeta(org.pentaho.di.trans.TransMeta) StepMetaInterface(org.pentaho.di.trans.step.StepMetaInterface) StepErrorMeta(org.pentaho.di.trans.step.StepErrorMeta) DataProperty(org.pentaho.platform.api.repository2.unified.data.node.DataProperty) StepPartitioningMeta(org.pentaho.di.trans.step.StepPartitioningMeta) StepMeta(org.pentaho.di.trans.step.StepMeta) StringObjectId(org.pentaho.di.repository.StringObjectId) DataNode(org.pentaho.platform.api.repository2.unified.data.node.DataNode) PluginRegistry(org.pentaho.di.core.plugins.PluginRegistry) RowDistributionInterface(org.pentaho.di.trans.step.RowDistributionInterface) MissingTrans(org.pentaho.di.trans.steps.missing.MissingTrans) NotePadMeta(org.pentaho.di.core.NotePadMeta) TransHopMeta(org.pentaho.di.trans.TransHopMeta) ClusterSchema(org.pentaho.di.cluster.ClusterSchema)

Example 3 with DataProperty

use of org.pentaho.platform.api.repository2.unified.data.node.DataProperty in project pentaho-kettle by pentaho.

the class PurRepositoryAttribute method getAttributeDatabaseMeta.

public DatabaseMeta getAttributeDatabaseMeta(String code) {
    DataProperty property = dataNode.getProperty(code);
    if (property == null || Utils.isEmpty(property.getString())) {
        return null;
    }
    ObjectId id = new StringObjectId(property.getString());
    return DatabaseMeta.findDatabase(databases, id);
}
Also used : ObjectId(org.pentaho.di.repository.ObjectId) StringObjectId(org.pentaho.di.repository.StringObjectId) DataProperty(org.pentaho.platform.api.repository2.unified.data.node.DataProperty) StringObjectId(org.pentaho.di.repository.StringObjectId)

Example 4 with DataProperty

use of org.pentaho.platform.api.repository2.unified.data.node.DataProperty in project pentaho-kettle by pentaho.

the class PurRepositoryStressTest method setUpTest.

@Before
public void setUpTest() throws Exception {
    // -- Common
    this.purRepository = new PurRepository();
    this.mockRepo = mock(IUnifiedRepository.class);
    this.result = mock(RepositoryConnectResult.class);
    when(result.getUnifiedRepository()).thenReturn(mockRepo);
    this.connector = mock(IRepositoryConnector.class);
    when(connector.connect(anyString(), anyString())).thenReturn(result);
    this.user = mock(IUser.class);
    when(result.getUser()).thenReturn(user);
    this.mockMeta = mock(PurRepositoryMeta.class);
    purRepository.init(mockMeta);
    purRepository.setPurRepositoryConnector(connector);
    this.mockRootFolder = mock(RepositoryFile.class);
    when(mockRootFolder.getId()).thenReturn("/");
    when(mockRootFolder.getPath()).thenReturn("/");
    when(mockRepo.getFile("/")).thenReturn(mockRootFolder);
    // -- testLocksSaveRepositoryDirectory
    RepositoryFile repFile = mock(RepositoryFile.class);
    doReturn("id2").when(repFile).getId();
    doReturn(repFile).when(mockRepo).createFolder(any(Serializable.class), any(RepositoryFile.class), anyString());
    // -- testLocksIsUserHomeDirectory
    doReturn(null).when(mockRepo).getFile(anyString());
    RepositoryFile folder1 = mock(RepositoryFile.class);
    when(folder1.getPath()).thenReturn("/folder1/folder2/");
    when(folder1.getName()).thenReturn("folder2");
    when(mockRepo.getFileById(any(Serializable.class))).thenReturn(folder1);
    // -- testLocksDeleteRepositoryDirectory
    doReturn("user1").when(user).getLogin();
    mockStatic(ClientRepositoryPaths.class);
    when(ClientRepositoryPaths.getUserHomeFolderPath(any())).thenReturn("/");
    purRepository.connect("TEST_USER", "TEST_PASSWORD");
    // -- testLocksGetDirectoryNames
    List<RepositoryFile> children = new ArrayList<RepositoryFile>();
    when(mockRepo.getChildren(any(Serializable.class))).thenReturn(children);
    // -- testLocksGetObjectId
    when(ClientRepositoryPaths.getEtcFolderPath()).thenReturn("/test");
    String bdPath = "/test/pdi/databases";
    when(mockRepo.getFile(bdPath)).thenReturn(mockRootFolder);
    // -- testLockDeleteDatabaseMeta
    String bdMetaFile = "/test/pdi/databases/dbName.kdb";
    RepositoryFile folderBdMeta = mock(RepositoryFile.class);
    when(folderBdMeta.getPath()).thenReturn("/test/pdi/databases/dbName.kdb");
    when(folderBdMeta.getId()).thenReturn("db");
    when(folderBdMeta.getName()).thenReturn("dbName.kdb");
    when(mockRepo.getFile(bdMetaFile)).thenReturn(folderBdMeta);
    String partitionSchemas = "/test/pdi/partitionSchemas";
    RepositoryFile folderPartitionSchemas = mock(RepositoryFile.class);
    when(folderPartitionSchemas.getPath()).thenReturn("/test/pdi/partitionSchemas");
    when(folderBdMeta.getId()).thenReturn("pschemas");
    when(folderPartitionSchemas.getName()).thenReturn("partitionSchemas");
    when(mockRepo.getFile(partitionSchemas)).thenReturn(folderPartitionSchemas);
    String slaveServers = "/test/pdi/slaveServers";
    RepositoryFile folderSlaveServers = mock(RepositoryFile.class);
    when(folderSlaveServers.getPath()).thenReturn("/test/pdi/slaveServers");
    when(folderBdMeta.getId()).thenReturn("sservers");
    when(folderSlaveServers.getName()).thenReturn("slaveServers");
    when(mockRepo.getFile(slaveServers)).thenReturn(folderSlaveServers);
    String clusterSchemas = "/test/pdi/clusterSchemas";
    RepositoryFile folderClusterSchemas = mock(RepositoryFile.class);
    when(folderClusterSchemas.getPath()).thenReturn("/test/pdi/clusterSchemas");
    when(folderBdMeta.getId()).thenReturn("cschemas");
    when(folderClusterSchemas.getName()).thenReturn("clusterSchemas");
    when(mockRepo.getFile(clusterSchemas)).thenReturn(folderClusterSchemas);
    List<RepositoryFile> childrenRes = new ArrayList<RepositoryFile>();
    when(mockRepo.getChildren(any(Serializable.class), anyString())).thenReturn(childrenRes);
    // -- testLockLoadClusterSchema
    RepositoryFile folderLoad = mock(RepositoryFile.class);
    when(folderLoad.getTitle()).thenReturn("titleFolderLoad");
    when(folderLoad.getId()).thenReturn("idFolderLoad");
    when(folderLoad.getPath()).thenReturn("/folder1/");
    when(mockRepo.getFileAtVersion(anyString(), eq("v1"))).thenReturn(folderLoad);
    DataProperty dataNodeProp = mock(DataProperty.class);
    when(dataNodeProp.getBoolean()).thenReturn(false);
    when(dataNodeProp.getLong()).thenReturn(0L);
    DataNode dataNodeRes = mock(DataNode.class);
    when(dataNodeRes.getProperty(anyString())).thenReturn(dataNodeProp);
    DataNode stepsNode = mock(DataNode.class);
    when(stepsNode.getNodes()).thenReturn(new ArrayList<DataNode>());
    when(stepsNode.getProperty(anyString())).thenReturn(dataNodeProp);
    DataNode dataNode = mock(DataNode.class);
    when(dataNode.getProperty(anyString())).thenReturn(dataNodeProp);
    when(dataNode.getNode(anyString())).thenReturn(dataNodeRes);
    when(dataNode.getNode(eq("transPrivateDatabases"))).thenReturn(null);
    when(dataNode.getNode(eq("steps"))).thenReturn(stepsNode);
    when(dataNode.getNode(eq("notes"))).thenReturn(stepsNode);
    when(dataNode.getNode(eq("hops"))).thenReturn(stepsNode);
    when(dataNode.getNode(eq("jobPrivateDatabases"))).thenReturn(null);
    when(dataNode.getNode(eq("entries"))).thenReturn(stepsNode);
    NodeRepositoryFileData modeRepoFileData = mock(NodeRepositoryFileData.class);
    when(modeRepoFileData.getNode()).thenReturn(dataNode);
    when(mockRepo.getDataAtVersionForRead(anyString(), eq("v1"), eq(NodeRepositoryFileData.class))).thenReturn(modeRepoFileData);
    when(mockRepo.getDataAtVersionForRead(anyString(), eq("v3"), eq(NodeRepositoryFileData.class))).thenReturn(modeRepoFileData);
    VersionSummary vSummary = mock(VersionSummary.class);
    when(vSummary.getId()).thenReturn(mock(Serializable.class));
    when(vSummary.getAuthor()).thenReturn("author");
    when(vSummary.getDate()).thenReturn(mock(Date.class));
    when(vSummary.getMessage()).thenReturn("message");
    when(mockRepo.getVersionSummary(anyString(), anyString())).thenReturn(vSummary);
    // -- testLockLoadSlaveServer
    mockStatic(Encr.class);
    when(Encr.decryptPasswordOptionallyEncrypted(anyString())).thenReturn("pass");
    // -- testLockLoadTransformation
    String transfFile = "/folder1/folder2/transName.ktr";
    RepositoryFile transfFileMeta = mock(RepositoryFile.class);
    when(transfFileMeta.getPath()).thenReturn("/folder1/folder2/transName.ktr");
    when(transfFileMeta.getId()).thenReturn("transName");
    when(transfFileMeta.getName()).thenReturn("transName.ktr");
    when(mockRepo.getFile(transfFile)).thenReturn(transfFileMeta);
    mockStatic(AttributesMapUtil.class);
    // -- testLockLoadJob
    String jobFile = "/folder1/folder2/jobName.kjb";
    RepositoryFile jobFileMeta = mock(RepositoryFile.class);
    when(jobFileMeta.getPath()).thenReturn("/folder1/folder2/jobName.kjb");
    when(jobFileMeta.getId()).thenReturn("jobName");
    when(jobFileMeta.getName()).thenReturn("jobName.kjb");
    when(mockRepo.getFile(jobFile)).thenReturn(jobFileMeta);
    // -- testLockSaveClusterSchema
    RepositoryFile clusterSchemaFile = mock(RepositoryFile.class);
    when(clusterSchemaFile.getId()).thenReturn("clusterSchemaFile");
    when(mockRepo.updateFile(any(RepositoryFile.class), any(IRepositoryFileData.class), anyString())).thenReturn(clusterSchemaFile);
    // -- testLockGetObjectInformation
    when(mockRepo.getFileById("idnull")).thenReturn(null);
    // --- testLockLoadJob2 ---
    RepositoryFile folderLoad2 = mock(RepositoryFile.class);
    when(folderLoad2.getTitle()).thenReturn("titleFolderLoad");
    when(folderLoad2.getId()).thenReturn("idFolderLoad");
    when(folderLoad2.getPath()).thenReturn("/");
    when(mockRepo.getFileAtVersion(anyString(), eq("v3"))).thenReturn(folderLoad2);
    mockStatic(DBCache.class);
    purRepository.loadAndCacheSharedObjects(true);
}
Also used : IRepositoryFileData(org.pentaho.platform.api.repository2.unified.IRepositoryFileData) Serializable(java.io.Serializable) ArrayList(java.util.ArrayList) DataProperty(org.pentaho.platform.api.repository2.unified.data.node.DataProperty) Matchers.anyString(org.mockito.Matchers.anyString) Date(java.util.Date) DataNode(org.pentaho.platform.api.repository2.unified.data.node.DataNode) NodeRepositoryFileData(org.pentaho.platform.api.repository2.unified.data.node.NodeRepositoryFileData) IUser(org.pentaho.di.repository.IUser) VersionSummary(org.pentaho.platform.api.repository2.unified.VersionSummary) RepositoryFile(org.pentaho.platform.api.repository2.unified.RepositoryFile) IUnifiedRepository(org.pentaho.platform.api.repository2.unified.IUnifiedRepository) Before(org.junit.Before)

Example 5 with DataProperty

use of org.pentaho.platform.api.repository2.unified.data.node.DataProperty in project pentaho-kettle by pentaho.

the class PurRepositoryMetaStore method dataNodeToAttribute.

protected void dataNodeToAttribute(DataNode dataNode, IMetaStoreAttribute attribute) throws MetaStoreException {
    for (DataProperty dataProperty : dataNode.getProperties()) {
        Object value;
        switch(dataProperty.getType()) {
            case DATE:
                value = (dataProperty.getDate());
                break;
            case DOUBLE:
                value = (dataProperty.getDouble());
                break;
            case LONG:
                value = (dataProperty.getLong());
                break;
            case STRING:
                value = (dataProperty.getString());
                break;
            default:
                continue;
        }
        // Backwards Compatibility
        if (dataProperty.getName().equals(dataNode.getName())) {
            attribute.setValue(value);
        }
        attribute.addChild(newAttribute(dataProperty.getName(), value));
    }
    for (DataNode subNode : dataNode.getNodes()) {
        IMetaStoreAttribute subAttr = newAttribute(subNode.getName(), null);
        dataNodeToAttribute(subNode, subAttr);
        attribute.addChild(subAttr);
    }
}
Also used : DataNode(org.pentaho.platform.api.repository2.unified.data.node.DataNode) IMetaStoreAttribute(org.pentaho.metastore.api.IMetaStoreAttribute) DataProperty(org.pentaho.platform.api.repository2.unified.data.node.DataProperty)

Aggregations

DataProperty (org.pentaho.platform.api.repository2.unified.data.node.DataProperty)25 DataNode (org.pentaho.platform.api.repository2.unified.data.node.DataNode)15 NodeRepositoryFileData (org.pentaho.platform.api.repository2.unified.data.node.NodeRepositoryFileData)5 Date (java.util.Date)4 Test (org.junit.Test)4 RepositoryFile (org.pentaho.platform.api.repository2.unified.RepositoryFile)4 ArrayList (java.util.ArrayList)3 Matchers.anyString (org.mockito.Matchers.anyString)3 IUnifiedRepository (org.pentaho.platform.api.repository2.unified.IUnifiedRepository)3 Serializable (java.io.Serializable)2 HashMap (java.util.HashMap)2 Map (java.util.Map)2 ObjectId (org.pentaho.di.repository.ObjectId)2 StringObjectId (org.pentaho.di.repository.StringObjectId)2 ITenant (org.pentaho.platform.api.mt.ITenant)2 DataNodeRef (org.pentaho.platform.api.repository2.unified.data.node.DataNodeRef)2 Calendar (java.util.Calendar)1 Locale (java.util.Locale)1 Properties (java.util.Properties)1 TimeZone (java.util.TimeZone)1