use of org.talend.designer.hdfsbrowse.model.HDFSConnectionBean in project tbd-studio-se by Talend.
the class CreateHDFSSchemaAction method checkHDFSConnection.
protected boolean checkHDFSConnection(final HDFSConnection connection) {
final boolean[] result = new boolean[] { true };
IRunnableWithProgress runnableWithProgress = new IRunnableWithProgress() {
@Override
public void run(final IProgressMonitor monitor) throws InvocationTargetException, InterruptedException {
// $NON-NLS-1$
monitor.beginTask(Messages.getString("CreateHDFSSchemaAction.checkConnection"), IProgressMonitor.UNKNOWN);
final Object[] dfs = new Object[1];
Display display = PlatformUI.getWorkbench().getDisplay();
display.syncExec(new Runnable() {
@Override
public void run() {
try {
HDFSConnectionBean connectionBean = HDFSModelUtil.convert2HDFSConnectionBean(connection);
dfs[0] = HadoopOperationManager.getInstance().getDFS(connectionBean);
} catch (Exception e) {
ExceptionHandler.process(e);
} finally {
monitor.done();
}
}
});
if (dfs[0] == null) {
display.syncExec(new Runnable() {
@Override
public void run() {
// $NON-NLS-1$
String mainMsg = Messages.getString("CreateHDFSSchemaAction.connectionFailure.mainMsg");
// $NON-NLS-1$
String detailMsg = Messages.getString("CreateHDFSSchemaAction.connectionFailure.detailMsg");
new ErrorDialogWidthDetailArea(PlatformUI.getWorkbench().getDisplay().getActiveShell(), Activator.PLUGIN_ID, mainMsg, detailMsg);
result[0] = false;
return;
}
});
}
}
};
ProgressMonitorDialog dialog = new ProgressMonitorDialog(PlatformUI.getWorkbench().getDisplay().getActiveShell());
try {
dialog.run(true, true, runnableWithProgress);
} catch (Exception e) {
result[0] = false;
ExceptionHandler.process(e);
}
return result[0];
}
use of org.talend.designer.hdfsbrowse.model.HDFSConnectionBean in project tbd-studio-se by Talend.
the class HDFSSchemaForm method pressRetreiveSchemaButton.
private void pressRetreiveSchemaButton() {
ConnectionStatus connectionStatus = checkConnection(false);
if (connectionStatus == null) {
return;
}
if (!connectionStatus.getResult()) {
tableSettingsInfoLabel.setText(connectionStatus.getMessageException());
} else {
boolean doit = true;
if (tableEditorView.getMetadataEditor().getBeanCount() > 0) {
doit = // $NON-NLS-1$
MessageDialog.openConfirm(// $NON-NLS-1$
getShell(), // $NON-NLS-1$
Messages.getString("HDFSSchemaForm.title.confirmChange"), // $NON-NLS-1$
Messages.getString("HDFSSchemaForm.msg.changeSchema"));
}
if (doit) {
List<MetadataColumn> metadataColumns;
HDFSConnectionBean connectionBean = getConnectionBean();
try {
ClassLoader classLoader = HadoopServerUtil.getClassLoader(connectionBean);
// reconnect the HDFS
HadoopOperationManager.getInstance().getDFS(connectionBean, classLoader);
metadataColumns = ExtractHDFSSchemaManager.getInstance().extractColumns(getConnection(), classLoader, metadataTable);
} catch (Exception e) {
ExceptionMessageDialog.openError(getShell(), Messages.getString("HDFSSchemaForm.checkSchema.errorDialog.title"), e.getMessage(), // $NON-NLS-1$
e);
ExceptionHandler.process(e);
return;
}
tableEditorView.getMetadataEditor().removeAll();
List<MetadataColumn> metadataColumnsValid = new ArrayList<MetadataColumn>();
Iterator iterate = metadataColumns.iterator();
while (iterate.hasNext()) {
MetadataColumn metadataColumn = (MetadataColumn) iterate.next();
if (metadataColumn.getTalendType().equals(JavaTypesManager.DATE.getId()) || metadataColumn.getTalendType().equals(PerlTypesManager.DATE)) {
if ("".equals(metadataColumn.getPattern())) {
// $NON-NLS-1$
// $NON-NLS-1$
metadataColumn.setPattern(TalendQuoteUtils.addQuotes("dd-MM-yyyy"));
}
}
String columnLabel = metadataColumn.getLabel();
// Check the label and add it to the table
metadataColumn.setLabel(tableEditorView.getMetadataEditor().getNextGeneratedColumnName(columnLabel));
metadataColumnsValid.add(metadataColumn);
}
tableEditorView.getMetadataEditor().addAll(metadataColumnsValid);
}
}
updateRetreiveSchemaButton();
changeTableNavigatorStatus(checkFieldsValue());
}
use of org.talend.designer.hdfsbrowse.model.HDFSConnectionBean in project tbd-studio-se by Talend.
the class ExtractParquetFileSchemaService method extractColumns.
private List<MetadataColumn> extractColumns(HDFSConnection connection, String filePath) throws Exception {
List<MetadataColumn> columns = new ArrayList<MetadataColumn>();
HDFSConnectionBean connectionBean = HDFSModelUtil.convert2HDFSConnectionBean(connection);
Object fs = HadoopServerUtil.getDFS(connectionBean, classLoader);
Object conf = HadoopServerUtil.getConfiguration(connectionBean, classLoader);
// $NON-NLS-1$
Object pathObj = ReflectionUtils.newInstance("org.apache.hadoop.fs.Path", classLoader, new Object[] { filePath });
ClassLoader oldClassLoaderLoader = Thread.currentThread().getContextClassLoader();
try {
Thread.currentThread().setContextClassLoader(classLoader);
Object fileReader = ReflectionUtils.invokeStaticMethod("org.apache.parquet.hadoop.ParquetFileReader", classLoader, "open", new Object[] { conf, pathObj });
Object fileMetadata = ReflectionUtils.invokeMethod(fileReader, "getFileMetaData", new Object[] {});
Object schema = ReflectionUtils.invokeMethod(fileMetadata, "getSchema", new Object[] {});
List fields = (List) ReflectionUtils.invokeMethod(schema, "getFields", new Object[] {});
Class RepetitionEnum = Class.forName("org.apache.parquet.schema.Type$Repetition", true, classLoader);
for (Object field : fields) {
String fieldName = (String) ReflectionUtils.invokeMethod(field, "getName", new Object[] {});
Object repetition = ReflectionUtils.invokeMethod(field, "getRepetition", new Object[] {});
boolean isNullable = true;
if (Enum.valueOf(RepetitionEnum, "REQUIRED") == repetition) {
isNullable = false;
}
MetadataColumn metadataColumn = ConnectionFactory.eINSTANCE.createMetadataColumn();
metadataColumn.setLabel(fieldName);
metadataColumn.setNullable(isNullable);
handleFieldMatchedTalendType(field, metadataColumn);
columns.add(metadataColumn);
}
} finally {
Thread.currentThread().setContextClassLoader(oldClassLoaderLoader);
}
return columns;
}
use of org.talend.designer.hdfsbrowse.model.HDFSConnectionBean in project tbd-studio-se by Talend.
the class ExtractHDFSSchemaManager method extractColumns.
public List<MetadataColumn> extractColumns(HDFSConnection connection, ClassLoader classLoader, MetadataTable metadataTable) throws Exception {
List<MetadataColumn> columns = new ArrayList<MetadataColumn>();
if (connection == null || metadataTable == null) {
return columns;
}
EMap<String, String> additionalProperties = metadataTable.getAdditionalProperties();
String hdfsPath = additionalProperties.get(HDFSConstants.HDFS_PATH);
if (StringUtils.isEmpty(hdfsPath)) {
return columns;
}
HDFSConnectionBean connectionBean = HDFSModelUtil.convert2HDFSConnectionBean(connection);
Object filePath = getHDFSFilePath(connectionBean, classLoader, hdfsPath);
IExtractSchemaService<HDFSConnection> service = ExtractHDFSMetaServiceFactory.getService(connectionBean, classLoader, filePath);
return service.extractColumns(connection, metadataTable);
}
use of org.talend.designer.hdfsbrowse.model.HDFSConnectionBean in project tbd-studio-se by Talend.
the class FileSelectorTreeViewerProvider method getElements.
public Object[] getElements(Object inputElement) {
if (inputElement instanceof HDFSConnectionBean) {
try {
HDFSPath root = new HDFSFolder(this, (HDFSConnectionBean) inputElement);
root.setValue(ROOT_PATH);
root.setPath(ROOT_PATH);
return new Object[] { root };
} catch (Exception e) {
log.error(e);
}
}
return EMPTY_CONTENT;
}
Aggregations