Search in sources :

Example 11 with EHadoopParamName

use of org.talend.metadata.managment.ui.utils.ExtendedNodeConnectionContextUtils.EHadoopParamName in project tbd-studio-se by Talend.

the class NoSQLRepositoryContextHandler method setPropertiesForContextMode.

/*
     * (non-Javadoc)
     *
     * @see
     * org.talend.repository.ui.utils.IRepositoryContextHandler#setPropertiesForContextMode(org.talend.core.model.properties
     * .ContextItem, java.util.Map)
     */
@Override
public void setPropertiesForContextMode(String prefixName, Connection connection, Set<IConnParamName> paramSet) {
    if (connection == null) {
        return;
    }
    if (connection instanceof NoSQLConnection) {
        String noSqlVariableName = null;
        NoSQLConnection noSqlConn = (NoSQLConnection) connection;
        String originalVariableName = prefixName + ConnectionContextHelper.LINE;
        for (IConnParamName param : paramSet) {
            if (param instanceof EHadoopParamName) {
                EHadoopParamName noSqlParam = (EHadoopParamName) param;
                originalVariableName = prefixName + ConnectionContextHelper.LINE;
                if (noSqlParam == EHadoopParamName.ReplicaSets) {
                    noSqlVariableName = originalVariableName;
                } else {
                    noSqlVariableName = originalVariableName + noSqlParam;
                }
                matchContextForAttribues(noSqlConn, noSqlParam, noSqlVariableName);
            }
        }
    }
}
Also used : EHadoopParamName(org.talend.metadata.managment.ui.utils.ExtendedNodeConnectionContextUtils.EHadoopParamName) IConnParamName(org.talend.metadata.managment.ui.model.IConnParamName) NoSQLConnection(org.talend.repository.model.nosql.NoSQLConnection)

Example 12 with EHadoopParamName

use of org.talend.metadata.managment.ui.utils.ExtendedNodeConnectionContextUtils.EHadoopParamName in project tbd-studio-se by Talend.

the class HdfsContextHandler method createContextParameters.

@Override
public List<IContextParameter> createContextParameters(String prefixName, Connection connection, Set<IConnParamName> paramSet) {
    List<IContextParameter> varList = new ArrayList<IContextParameter>();
    if (connection instanceof HDFSConnection) {
        HDFSConnection conn = (HDFSConnection) connection;
        String paramPrefix = prefixName + ConnectionContextHelper.LINE;
        String paramName = null;
        for (IConnParamName param : paramSet) {
            if (param instanceof EHadoopParamName) {
                EHadoopParamName hdfsParam = (EHadoopParamName) param;
                paramName = paramPrefix + hdfsParam;
                switch(hdfsParam) {
                    case HdfsUser:
                        ConnectionContextHelper.createParameters(varList, paramName, conn.getUserName());
                        break;
                    case HdfsRowSeparator:
                        ConnectionContextHelper.createParameters(varList, paramName, conn.getRowSeparator());
                        break;
                    case HdfsFileSeparator:
                        ConnectionContextHelper.createParameters(varList, paramName, conn.getFieldSeparator());
                        break;
                    case HdfsRowHeader:
                        ConnectionContextHelper.createParameters(varList, paramName, conn.getHeaderValue(), JavaTypesManager.INTEGER);
                        break;
                    default:
                }
            }
        }
        createHadoopPropertiesContextVariable(prefixName, varList, conn.getHadoopProperties());
    }
    return varList;
}
Also used : ArrayList(java.util.ArrayList) EHadoopParamName(org.talend.metadata.managment.ui.utils.ExtendedNodeConnectionContextUtils.EHadoopParamName) HDFSConnection(org.talend.repository.model.hdfs.HDFSConnection) IConnParamName(org.talend.metadata.managment.ui.model.IConnParamName) IContextParameter(org.talend.core.model.process.IContextParameter)

Example 13 with EHadoopParamName

use of org.talend.metadata.managment.ui.utils.ExtendedNodeConnectionContextUtils.EHadoopParamName in project tbd-studio-se by Talend.

the class HdfsContextHandler method setPropertiesForExistContextMode.

@Override
public void setPropertiesForExistContextMode(Connection connection, Set<IConnParamName> paramSet, Map<ContextItem, List<ConectionAdaptContextVariableModel>> adaptMap) {
    if (connection == null) {
        return;
    }
    if (connection instanceof HDFSConnection) {
        HDFSConnection hcatalogConn = (HDFSConnection) connection;
        ContextItem currentContext = null;
        for (IConnParamName param : paramSet) {
            if (param instanceof EHadoopParamName) {
                String hdfsVariableName = null;
                EHadoopParamName hdfsConnectionParam = (EHadoopParamName) param;
                if (adaptMap != null && adaptMap.size() > 0) {
                    for (Map.Entry<ContextItem, List<ConectionAdaptContextVariableModel>> entry : adaptMap.entrySet()) {
                        currentContext = entry.getKey();
                        List<ConectionAdaptContextVariableModel> modelList = entry.getValue();
                        for (ConectionAdaptContextVariableModel model : modelList) {
                            if (model.getValue().equals(hdfsConnectionParam.name())) {
                                hdfsVariableName = model.getName();
                                break;
                            }
                        }
                    }
                }
                if (hdfsVariableName != null) {
                    hdfsVariableName = getCorrectVariableName(currentContext, hdfsVariableName, hdfsConnectionParam);
                    matchContextForAttribues(hcatalogConn, hdfsConnectionParam, hdfsVariableName);
                }
            }
        }
        matchAdditionProperties(hcatalogConn, adaptMap);
    }
}
Also used : ContextItem(org.talend.core.model.properties.ContextItem) EHadoopParamName(org.talend.metadata.managment.ui.utils.ExtendedNodeConnectionContextUtils.EHadoopParamName) HDFSConnection(org.talend.repository.model.hdfs.HDFSConnection) IConnParamName(org.talend.metadata.managment.ui.model.IConnParamName) ArrayList(java.util.ArrayList) List(java.util.List) ConectionAdaptContextVariableModel(org.talend.core.ui.context.model.table.ConectionAdaptContextVariableModel) Map(java.util.Map)

Example 14 with EHadoopParamName

use of org.talend.metadata.managment.ui.utils.ExtendedNodeConnectionContextUtils.EHadoopParamName in project tbd-studio-se by Talend.

the class HCatalogContextHandler method createContextParameters.

@Override
public List<IContextParameter> createContextParameters(String prefixName, Connection connection, Set<IConnParamName> paramSet) {
    List<IContextParameter> varList = new ArrayList<IContextParameter>();
    if (connection instanceof HCatalogConnection) {
        HCatalogConnection conn = (HCatalogConnection) connection;
        String paramPrefix = prefixName + ConnectionContextHelper.LINE;
        String paramName = null;
        for (IConnParamName param : paramSet) {
            if (param instanceof EHadoopParamName) {
                EHadoopParamName hcatalogParam = (EHadoopParamName) param;
                paramName = paramPrefix + hcatalogParam;
                switch(hcatalogParam) {
                    case HCatalogHostName:
                        ConnectionContextHelper.createParameters(varList, paramName, conn.getHostName());
                        break;
                    case HCatalogPort:
                        ConnectionContextHelper.createParameters(varList, paramName, conn.getPort());
                        break;
                    case HCatalogUser:
                        ConnectionContextHelper.createParameters(varList, paramName, conn.getUserName());
                        break;
                    case HCatalogPassword:
                        ConnectionContextHelper.createParameters(varList, paramName, conn.getPassword(), JavaTypesManager.PASSWORD);
                        break;
                    case HCatalogKerPrin:
                        ConnectionContextHelper.createParameters(varList, paramName, conn.getKrbPrincipal());
                        break;
                    case HCatalogRealm:
                        ConnectionContextHelper.createParameters(varList, paramName, conn.getKrbRealm());
                        break;
                    case HCatalogDatabase:
                        ConnectionContextHelper.createParameters(varList, paramName, conn.getDatabase());
                        break;
                    case HcataLogRowSeparator:
                        ConnectionContextHelper.createParameters(varList, paramName, conn.getRowSeparator());
                        break;
                    case HcatalogFileSeparator:
                        ConnectionContextHelper.createParameters(varList, paramName, conn.getFieldSeparator());
                        break;
                    default:
                }
            }
        }
        createHadoopPropertiesContextVariable(prefixName, varList, conn.getHadoopProperties());
    }
    return varList;
}
Also used : HCatalogConnection(org.talend.repository.model.hcatalog.HCatalogConnection) ArrayList(java.util.ArrayList) EHadoopParamName(org.talend.metadata.managment.ui.utils.ExtendedNodeConnectionContextUtils.EHadoopParamName) IConnParamName(org.talend.metadata.managment.ui.model.IConnParamName) IContextParameter(org.talend.core.model.process.IContextParameter)

Example 15 with EHadoopParamName

use of org.talend.metadata.managment.ui.utils.ExtendedNodeConnectionContextUtils.EHadoopParamName in project tbd-studio-se by Talend.

the class HadoopClusterContextHandler method matchContextForAttribues.

@Override
protected void matchContextForAttribues(Connection conn, IConnParamName paramName, String hadoopVariableName) {
    HadoopClusterConnection hadoopConn = (HadoopClusterConnection) conn;
    EHadoopParamName hadoopParam = (EHadoopParamName) paramName;
    switch(hadoopParam) {
        case NameNodeUri:
            hadoopConn.setNameNodeURI(ContextParameterUtils.getNewScriptCode(hadoopVariableName, LANGUAGE));
            break;
        case JobTrackerUri:
            hadoopConn.setJobTrackerURI(ContextParameterUtils.getNewScriptCode(hadoopVariableName, LANGUAGE));
            break;
        case ResourceManager:
            hadoopConn.setJobTrackerURI(ContextParameterUtils.getNewScriptCode(hadoopVariableName, LANGUAGE));
            break;
        case ResourceManagerScheduler:
            hadoopConn.setRmScheduler(ContextParameterUtils.getNewScriptCode(hadoopVariableName, LANGUAGE));
            break;
        case JobHistory:
            hadoopConn.setJobHistory(ContextParameterUtils.getNewScriptCode(hadoopVariableName, LANGUAGE));
            break;
        case StagingDirectory:
            hadoopConn.setStagingDirectory(ContextParameterUtils.getNewScriptCode(hadoopVariableName, LANGUAGE));
            break;
        case NameNodePrin:
            hadoopConn.setPrincipal(ContextParameterUtils.getNewScriptCode(hadoopVariableName, LANGUAGE));
            break;
        case JTOrRMPrin:
            hadoopConn.setJtOrRmPrincipal(ContextParameterUtils.getNewScriptCode(hadoopVariableName, LANGUAGE));
            break;
        case JobHistroyPrin:
            hadoopConn.setJobHistoryPrincipal(ContextParameterUtils.getNewScriptCode(hadoopVariableName, LANGUAGE));
            break;
        case User:
            hadoopConn.setUserName(ContextParameterUtils.getNewScriptCode(hadoopVariableName, LANGUAGE));
            break;
        case Group:
            hadoopConn.setGroup(ContextParameterUtils.getNewScriptCode(hadoopVariableName, LANGUAGE));
            break;
        case Principal:
            hadoopConn.setKeytabPrincipal(ContextParameterUtils.getNewScriptCode(hadoopVariableName, LANGUAGE));
            break;
        case KeyTab:
            hadoopConn.setKeytab(ContextParameterUtils.getNewScriptCode(hadoopVariableName, LANGUAGE));
            break;
        case WebHostName:
            hadoopConn.getParameters().put(ConnParameterKeys.CONN_PARA_KEY_WEB_HCAT_HOSTNAME, ContextParameterUtils.getNewScriptCode(hadoopVariableName, LANGUAGE));
            break;
        case WebPort:
            hadoopConn.getParameters().put(ConnParameterKeys.CONN_PARA_KEY_WEB_HCAT_PORT, ContextParameterUtils.getNewScriptCode(hadoopVariableName, LANGUAGE));
            break;
        case WebUser:
            hadoopConn.getParameters().put(ConnParameterKeys.CONN_PARA_KEY_WEB_HCAT_USERNAME, ContextParameterUtils.getNewScriptCode(hadoopVariableName, LANGUAGE));
            break;
        case WebJobResFolder:
            hadoopConn.getParameters().put(ConnParameterKeys.CONN_PARA_KEY_WEB_HCAT_JOB_RESULT_FOLDER, ContextParameterUtils.getNewScriptCode(hadoopVariableName, LANGUAGE));
            break;
        case HDIUser:
            hadoopConn.getParameters().put(ConnParameterKeys.CONN_PARA_KEY_HDI_USERNAME, ContextParameterUtils.getNewScriptCode(hadoopVariableName, LANGUAGE));
            break;
        case HDIPassword:
            hadoopConn.getParameters().put(ConnParameterKeys.CONN_PARA_KEY_HDI_PASSWORD, ContextParameterUtils.getNewScriptCode(hadoopVariableName, LANGUAGE));
            break;
        case KeyAzureHost:
            hadoopConn.getParameters().put(ConnParameterKeys.CONN_PARA_KEY_AZURE_HOSTNAME, ContextParameterUtils.getNewScriptCode(hadoopVariableName, LANGUAGE));
            break;
        case KeyAzureContainer:
            hadoopConn.getParameters().put(ConnParameterKeys.CONN_PARA_KEY_AZURE_CONTAINER, ContextParameterUtils.getNewScriptCode(hadoopVariableName, LANGUAGE));
            break;
        case KeyAzuresUser:
            hadoopConn.getParameters().put(ConnParameterKeys.CONN_PARA_KEY_AZURE_USERNAME, ContextParameterUtils.getNewScriptCode(hadoopVariableName, LANGUAGE));
            break;
        case KeyAzurePassword:
            hadoopConn.getParameters().put(ConnParameterKeys.CONN_PARA_KEY_AZURE_PASSWORD, ContextParameterUtils.getNewScriptCode(hadoopVariableName, LANGUAGE));
            break;
        case KeyAzureDeployBlob:
            hadoopConn.getParameters().put(ConnParameterKeys.CONN_PARA_KEY_AZURE_DEPLOY_BLOB, ContextParameterUtils.getNewScriptCode(hadoopVariableName, LANGUAGE));
            break;
        case SynapseHostName:
            hadoopConn.getParameters().put(ConnParameterKeys.CONN_PARA_KEY_SYNAPSE_HOST, ContextParameterUtils.getNewScriptCode(hadoopVariableName, LANGUAGE));
            break;
        case SynapseAuthToken:
            hadoopConn.getParameters().put(ConnParameterKeys.CONN_PARA_KEY_SYNAPSE_AUTH_TOKEN, ContextParameterUtils.getNewScriptCode(hadoopVariableName, LANGUAGE));
            break;
        case SynapseSparkPools:
            hadoopConn.getParameters().put(ConnParameterKeys.CONN_PARA_KEY_SYNAPSE_SPARK_POOLS, ContextParameterUtils.getNewScriptCode(hadoopVariableName, LANGUAGE));
            break;
        case SynapseFsHostName:
            hadoopConn.getParameters().put(ConnParameterKeys.CONN_PARA_KEY_SYNAPSE_FS_HOSTNAME, ContextParameterUtils.getNewScriptCode(hadoopVariableName, LANGUAGE));
            break;
        case SynapseFsContainer:
            hadoopConn.getParameters().put(ConnParameterKeys.CONN_PARA_KEY_SYNAPSE_FS_CONTAINER, ContextParameterUtils.getNewScriptCode(hadoopVariableName, LANGUAGE));
            break;
        case SynapseFsUserName:
            hadoopConn.getParameters().put(ConnParameterKeys.CONN_PARA_KEY_SYNAPSE_FS_USERNAME, ContextParameterUtils.getNewScriptCode(hadoopVariableName, LANGUAGE));
            break;
        case SynapseFsPassword:
            hadoopConn.getParameters().put(ConnParameterKeys.CONN_PARA_KEY_SYNAPSE_FS_PASSWORD, ContextParameterUtils.getNewScriptCode(hadoopVariableName, LANGUAGE));
            break;
        case SynapseDeployBlob:
            hadoopConn.getParameters().put(ConnParameterKeys.CONN_PARA_KEY_SYNAPSE_DEPLOY_BLOB, ContextParameterUtils.getNewScriptCode(hadoopVariableName, LANGUAGE));
            break;
        case SynapseDriverMemory:
            hadoopConn.getParameters().put(ConnParameterKeys.CONN_PARA_KEY_DRIVER_MEMORY, ContextParameterUtils.getNewScriptCode(hadoopVariableName, LANGUAGE));
            break;
        case SynapseDriverCores:
            hadoopConn.getParameters().put(ConnParameterKeys.CONN_PARA_KEY_DRIVER_CORES, ContextParameterUtils.getNewScriptCode(hadoopVariableName, LANGUAGE));
            break;
        case SynapseExecutorMemory:
            hadoopConn.getParameters().put(ConnParameterKeys.CONN_PARA_KEY_EXECUTOR_MEMORY, ContextParameterUtils.getNewScriptCode(hadoopVariableName, LANGUAGE));
            break;
        case ClouderaNavigatorUsername:
            hadoopConn.setClouderaNaviUserName(ContextParameterUtils.getNewScriptCode(hadoopVariableName, LANGUAGE));
            break;
        case ClouderaNavigatorPassword:
            hadoopConn.setClouderaNaviPassword(ContextParameterUtils.getNewScriptCode(hadoopVariableName, LANGUAGE));
            break;
        case ClouderaNavigatorUrl:
            hadoopConn.setClouderaNaviUrl(ContextParameterUtils.getNewScriptCode(hadoopVariableName, LANGUAGE));
            break;
        case ClouderaNavigatorMetadataUrl:
            hadoopConn.setClouderaNaviMetadataUrl(ContextParameterUtils.getNewScriptCode(hadoopVariableName, LANGUAGE));
            break;
        case maprTPassword:
            hadoopConn.setMaprTPassword(ContextParameterUtils.getNewScriptCode(hadoopVariableName, LANGUAGE));
            break;
        case maprTCluster:
            hadoopConn.setMaprTCluster(ContextParameterUtils.getNewScriptCode(hadoopVariableName, LANGUAGE));
            break;
        case maprTDuration:
            hadoopConn.setMaprTDuration(ContextParameterUtils.getNewScriptCode(hadoopVariableName, LANGUAGE));
            break;
        case maprTHomeDir:
            hadoopConn.setMaprTHomeDir(ContextParameterUtils.getNewScriptCode(hadoopVariableName, LANGUAGE));
            break;
        case maprTHadoopLogin:
            hadoopConn.setMaprTHadoopLogin(ContextParameterUtils.getNewScriptCode(hadoopVariableName, LANGUAGE));
            break;
        case GoogleProjectId:
            hadoopConn.getParameters().put(ConnParameterKeys.CONN_PARA_KEY_GOOGLE_PROJECT_ID, ContextParameterUtils.getNewScriptCode(hadoopVariableName, LANGUAGE));
            break;
        case GoogleClusterId:
            hadoopConn.getParameters().put(ConnParameterKeys.CONN_PARA_KEY_GOOGLE_CLUSTER_ID, ContextParameterUtils.getNewScriptCode(hadoopVariableName, LANGUAGE));
            break;
        case GoogleRegion:
            hadoopConn.getParameters().put(ConnParameterKeys.CONN_PARA_KEY_GOOGLE_REGION, ContextParameterUtils.getNewScriptCode(hadoopVariableName, LANGUAGE));
            break;
        case GoogleJarsBucket:
            hadoopConn.getParameters().put(ConnParameterKeys.CONN_PARA_KEY_GOOGLE_JARS_BUCKET, ContextParameterUtils.getNewScriptCode(hadoopVariableName, LANGUAGE));
            break;
        case PathToGoogleCredentials:
            hadoopConn.getParameters().put(ConnParameterKeys.CONN_PARA_KEY_PATH_TO_GOOGLE_CREDENTIALS, ContextParameterUtils.getNewScriptCode(hadoopVariableName, LANGUAGE));
            break;
        case DataBricksEndpoint:
            hadoopConn.getParameters().put(ConnParameterKeys.CONN_PARA_KEY_DATABRICKS_ENDPOINT, ContextParameterUtils.getNewScriptCode(hadoopVariableName, LANGUAGE));
            break;
        case DataBricksCloudProvider:
            hadoopConn.getParameters().put(ConnParameterKeys.CONN_PARA_KEY_DATABRICKS_CLOUD_PROVIDER, ContextParameterUtils.getNewScriptCode(hadoopVariableName, LANGUAGE));
            break;
        case DatabricksRunMode:
            hadoopConn.getParameters().put(ConnParameterKeys.CONN_PARA_KEY_DATABRICKS_RUN_MODE, ContextParameterUtils.getNewScriptCode(hadoopVariableName, LANGUAGE));
            break;
        case DataBricksClusterId:
            hadoopConn.getParameters().put(ConnParameterKeys.CONN_PARA_KEY_DATABRICKS_CLUSTER_ID, ContextParameterUtils.getNewScriptCode(hadoopVariableName, LANGUAGE));
            break;
        case DataBricksToken:
            hadoopConn.getParameters().put(ConnParameterKeys.CONN_PARA_KEY_DATABRICKS_TOKEN, ContextParameterUtils.getNewScriptCode(hadoopVariableName, LANGUAGE));
            break;
        case DataBricksDBFSDepFolder:
            hadoopConn.getParameters().put(ConnParameterKeys.CONN_PARA_KEY_DATABRICKS_DBFS_DEP_FOLDER, ContextParameterUtils.getNewScriptCode(hadoopVariableName, LANGUAGE));
            break;
        case WebHDFSSSLTrustStorePath:
            hadoopConn.setWebHDFSSSLTrustStorePath(ContextParameterUtils.getNewScriptCode(hadoopVariableName, LANGUAGE));
            break;
        case WebHDFSSSLTrustStorePassword:
            hadoopConn.setWebHDFSSSLTrustStorePassword(ContextParameterUtils.getNewScriptCode(hadoopVariableName, LANGUAGE));
            break;
        case setHadoopConf:
            hadoopConn.getParameters().put(ConnParameterKeys.CONN_PARA_KEY_SET_HADOOP_CONF, ContextParameterUtils.getNewScriptCode(hadoopVariableName, LANGUAGE));
            break;
        case hadoopConfSpecificJar:
            hadoopConn.getParameters().put(ConnParameterKeys.CONN_PARA_KEY_HADOOP_CONF_SPECIFIC_JAR, ContextParameterUtils.getNewScriptCode(hadoopVariableName, LANGUAGE));
            break;
        case UseKnox:
            hadoopConn.getParameters().put(ConnParameterKeys.CONN_PARA_KEY_USE_KNOX, ContextParameterUtils.getNewScriptCode(hadoopVariableName, LANGUAGE));
            break;
        case SparkMode:
            hadoopConn.getParameters().put(ConnParameterKeys.CONN_PARA_KEY_SPARK_MODE, ContextParameterUtils.getNewScriptCode(hadoopVariableName, LANGUAGE));
            break;
        case KnoxUrl:
            hadoopConn.getParameters().put(ConnParameterKeys.CONN_PARA_KEY_KNOX_URL, ContextParameterUtils.getNewScriptCode(hadoopVariableName, LANGUAGE));
            break;
        case KnoxUsername:
            hadoopConn.getParameters().put(ConnParameterKeys.CONN_PARA_KEY_KNOX_USER, ContextParameterUtils.getNewScriptCode(hadoopVariableName, LANGUAGE));
            break;
        case KnoxPassword:
            hadoopConn.getParameters().put(ConnParameterKeys.CONN_PARA_KEY_KNOX_PASSWORD, ContextParameterUtils.getNewScriptCode(hadoopVariableName, LANGUAGE));
            break;
        case KnoxDirectory:
            hadoopConn.getParameters().put(ConnParameterKeys.CONN_PARA_KEY_KNOX_DIRECTORY, ContextParameterUtils.getNewScriptCode(hadoopVariableName, LANGUAGE));
            break;
        default:
    }
}
Also used : EHadoopParamName(org.talend.metadata.managment.ui.utils.ExtendedNodeConnectionContextUtils.EHadoopParamName) HadoopClusterConnection(org.talend.repository.model.hadoopcluster.HadoopClusterConnection)

Aggregations

EHadoopParamName (org.talend.metadata.managment.ui.utils.ExtendedNodeConnectionContextUtils.EHadoopParamName)16 IConnParamName (org.talend.metadata.managment.ui.model.IConnParamName)12 ArrayList (java.util.ArrayList)8 Map (java.util.Map)8 List (java.util.List)4 IContextParameter (org.talend.core.model.process.IContextParameter)4 ContextItem (org.talend.core.model.properties.ContextItem)4 ConectionAdaptContextVariableModel (org.talend.core.ui.context.model.table.ConectionAdaptContextVariableModel)4 HadoopClusterConnection (org.talend.repository.model.hadoopcluster.HadoopClusterConnection)4 HCatalogConnection (org.talend.repository.model.hcatalog.HCatalogConnection)4 HDFSConnection (org.talend.repository.model.hdfs.HDFSConnection)4 NoSQLConnection (org.talend.repository.model.nosql.NoSQLConnection)4 JSONArray (org.talend.utils.json.JSONArray)3 JSONException (org.talend.utils.json.JSONException)3 JSONObject (org.talend.utils.json.JSONObject)3 JavaType (org.talend.core.model.metadata.types.JavaType)1