use of org.talend.designer.hdfsbrowse.model.HDFSFile in project tbd-studio-se by Talend.
the class ExtractHDFSSchemaManager method extractColumns.
public List<MetadataColumn> extractColumns(HDFSConnection connection, ClassLoader classLoader, IHDFSNode node) throws Exception {
List<MetadataColumn> columns = new ArrayList<MetadataColumn>();
if (connection == null || node == null || node.getType() != EHadoopFileTypes.FILE) {
return columns;
}
HDFSFile file = (HDFSFile) node;
HDFSConnectionBean connectionBean = HDFSModelUtil.convert2HDFSConnectionBean(connection);
Object filePath = getHDFSFilePath(connectionBean, classLoader, file.getPath());
IExtractSchemaService<HDFSConnection> service = ExtractHDFSMetaServiceFactory.getService(connectionBean, classLoader, filePath);
return service.extractColumns(connection, node);
}
use of org.talend.designer.hdfsbrowse.model.HDFSFile in project tbd-studio-se by Talend.
the class ExtractSequenceFileSchemaService method extractColumns.
/*
* (non-Javadoc)
*
* @see
* org.talend.repository.hadoopcluster.service.IExtractSchemaService#extractColumns(org.talend.repository.model.
* hadoopcluster.HadoopSubConnection, org.talend.designer.hdfsbrowse.model.IHDFSNode)
*/
@Override
public List<MetadataColumn> extractColumns(HDFSConnection connection, IHDFSNode node) throws Exception {
if (connection == null || node == null || node.getType() != EHadoopFileTypes.FILE) {
return Collections.EMPTY_LIST;
}
HDFSFile file = (HDFSFile) node;
file.setFileType(EHDFSFileTypes.SEQUENCE);
return extractColumns(connection, file.getPath());
}
Aggregations