use of org.talend.designer.hdfsbrowse.model.HDFSFile in project tbd-studio-se by Talend.
the class ExtractParquetFileSchemaService method extractColumns.
@Override
public List<MetadataColumn> extractColumns(HDFSConnection connection, IHDFSNode node) throws Exception {
if (connection == null || node == null || node.getType() != EHadoopFileTypes.FILE) {
return Collections.EMPTY_LIST;
}
HDFSFile file = (HDFSFile) node;
file.setFileType(EHDFSFileTypes.PARQUET);
return extractColumns(connection, file.getPath());
}
use of org.talend.designer.hdfsbrowse.model.HDFSFile in project tbd-studio-se by Talend.
the class HadoopOperationManager method loadHDFSFolderChildren.
public void loadHDFSFolderChildren(HDFSConnectionBean connection, Object fileSystem, ClassLoader classLoader, HDFSPath parent, String path) throws HadoopServerException {
if (connection == null || fileSystem == null || classLoader == null || parent == null || path == null) {
return;
}
ClassLoader oldClassLoaderLoader = Thread.currentThread().getContextClassLoader();
try {
Thread.currentThread().setContextClassLoader(classLoader);
Object pathObj = ReflectionUtils.newInstance("org.apache.hadoop.fs.Path", classLoader, new Object[] { path });
Object[] statusList = (Object[]) ReflectionUtils.invokeMethod(fileSystem, "listStatus", new Object[] { pathObj });
if (statusList == null) {
return;
}
for (Object status : statusList) {
HDFSPath content = null;
Object statusPath = ReflectionUtils.invokeMethod(status, "getPath", new Object[0]);
if (statusPath == null) {
continue;
}
String pathName = (String) ReflectionUtils.invokeMethod(statusPath, "getName", new Object[0]);
if (StringUtils.isBlank(pathName)) {
continue;
}
String absolutePath = ((URI) ReflectionUtils.invokeMethod(statusPath, "toUri", new Object[0])).toString();
if (StringUtils.isBlank(absolutePath)) {
continue;
}
String relativePath = URI.create(absolutePath).getPath();
if ((Boolean) ReflectionUtils.invokeMethod(status, "isDir", new Object[0])) {
content = new HDFSFolder(parent);
} else {
content = new HDFSFile(parent);
content.setTable(createTable(trimFileExtention(pathName)));
}
content.setPath(relativePath);
content.setValue(pathName);
parent.addChild(content);
}
} catch (Exception e) {
throw new HadoopServerException(e);
} finally {
Thread.currentThread().setContextClassLoader(oldClassLoaderLoader);
}
}
use of org.talend.designer.hdfsbrowse.model.HDFSFile in project tbd-studio-se by Talend.
the class FileSelectorTreeViewerProvider method getColumnText.
public String getColumnText(Object element, int columnIndex) {
IHDFSNode content = (IHDFSNode) element;
EHadoopFileTypes type = content.getType();
switch(columnIndex) {
case 0:
return StringUtils.trimToEmpty(content.getValue());
case 1:
return StringUtils.trimToEmpty(type.getValue());
case 2:
if (content instanceof HDFSFile) {
HDFSFile file = (HDFSFile) content;
return StringUtils.trimToEmpty(file.getSize());
} else {
return EMPTY_STRING;
}
default:
return EMPTY_STRING;
}
}
use of org.talend.designer.hdfsbrowse.model.HDFSFile in project tbd-studio-se by Talend.
the class ExtractTextFileSchemaService method extractColumns.
@Override
public List<MetadataColumn> extractColumns(HDFSConnection connection, IHDFSNode node) throws Exception {
List<MetadataColumn> columns = new ArrayList<MetadataColumn>();
if (connection == null || node == null || node.getType() != EHadoopFileTypes.FILE) {
return columns;
}
HDFSFile file = (HDFSFile) node;
file.setFileType(EHDFSFileTypes.TEXT);
InputStream inputStream = HadoopOperationManager.getInstance().getFileContent(HDFSModelUtil.convert2HDFSConnectionBean(connection), classLoader, file.getPath());
return extractColumns(connection, inputStream, file.getTable().getName());
}
use of org.talend.designer.hdfsbrowse.model.HDFSFile in project tbd-studio-se by Talend.
the class ExtractAVROFileSchemaService method extractColumns.
@Override
public List<MetadataColumn> extractColumns(HDFSConnection connection, IHDFSNode node) throws Exception {
List<MetadataColumn> columns = new ArrayList<MetadataColumn>();
if (connection == null || node == null || node.getType() != EHadoopFileTypes.FILE) {
return columns;
}
HDFSFile file = (HDFSFile) node;
file.setFileType(EHDFSFileTypes.AVRO);
InputStream inputStream = HadoopOperationManager.getInstance().getFileContent(HDFSModelUtil.convert2HDFSConnectionBean(connection), classLoader, file.getPath());
return extractColumns(connection, inputStream, file.getTable().getName(), file.getValue());
}
Aggregations