use of org.apache.atlas.hook.AtlasHookException in project atlas by apache.
the class SqoopHook method publish.
@Override
public void publish(SqoopJobDataPublisher.Data data) throws AtlasHookException {
try {
Configuration atlasProperties = ApplicationProperties.get();
String clusterName = atlasProperties.getString(ATLAS_CLUSTER_NAME, DEFAULT_CLUSTER_NAME);
AtlasEntity entDbStore = toSqoopDBStoreEntity(data);
AtlasEntity entHiveDb = toHiveDatabaseEntity(clusterName, data.getHiveDB());
AtlasEntity entHiveTable = data.getHiveTable() != null ? toHiveTableEntity(entHiveDb, data.getHiveTable()) : null;
AtlasEntity entProcess = toSqoopProcessEntity(entDbStore, entHiveDb, entHiveTable, data, clusterName);
AtlasEntitiesWithExtInfo entities = new AtlasEntitiesWithExtInfo(entProcess);
entities.addReferredEntity(entDbStore);
entities.addReferredEntity(entHiveDb);
if (entHiveTable != null) {
entities.addReferredEntity(entHiveTable);
}
HookNotification message = new EntityCreateRequestV2(AtlasHook.getUser(), entities);
AtlasHook.notifyEntities(Collections.singletonList(message), atlasProperties.getInt(HOOK_NUM_RETRIES, 3));
} catch (Exception e) {
LOG.error("SqoopHook.publish() failed", e);
throw new AtlasHookException("SqoopHook.publish() failed.", e);
}
}
use of org.apache.atlas.hook.AtlasHookException in project incubator-atlas by apache.
the class HiveHook method createOrUpdateEntities.
private LinkedHashMap<Type, Referenceable> createOrUpdateEntities(HiveMetaStoreBridge dgiBridge, HiveEventContext event, Entity entity, boolean skipTempTables, Table existTable) throws AtlasHookException {
try {
Database db = null;
Table table = null;
Partition partition = null;
LinkedHashMap<Type, Referenceable> result = new LinkedHashMap<>();
List<Referenceable> entities = new ArrayList<>();
switch(entity.getType()) {
case DATABASE:
db = entity.getDatabase();
break;
case TABLE:
table = entity.getTable();
db = dgiBridge.hiveClient.getDatabase(table.getDbName());
break;
case PARTITION:
partition = entity.getPartition();
table = partition.getTable();
db = dgiBridge.hiveClient.getDatabase(table.getDbName());
break;
default:
LOG.info("{}: entity-type not handled by Atlas hook. Ignored", entity.getType());
}
if (db != null) {
db = dgiBridge.hiveClient.getDatabase(db.getName());
}
if (db != null) {
Referenceable dbEntity = dgiBridge.createDBInstance(db);
entities.add(dbEntity);
result.put(Type.DATABASE, dbEntity);
Referenceable tableEntity = null;
if (table != null) {
if (existTable != null) {
table = existTable;
} else {
table = dgiBridge.hiveClient.getTable(table.getDbName(), table.getTableName());
}
// we create the table since we need the HDFS path to temp table lineage.
if (skipTempTables && table.isTemporary() && !TableType.EXTERNAL_TABLE.equals(table.getTableType())) {
LOG.debug("Skipping temporary table registration {} since it is not an external table {} ", table.getTableName(), table.getTableType().name());
} else {
tableEntity = dgiBridge.createTableInstance(dbEntity, table);
entities.add(tableEntity);
result.put(Type.TABLE, tableEntity);
}
}
event.addMessage(new HookNotification.EntityUpdateRequest(event.getUser(), entities));
}
return result;
} catch (Exception e) {
throw new AtlasHookException("HiveHook.createOrUpdateEntities() failed.", e);
}
}
use of org.apache.atlas.hook.AtlasHookException in project incubator-atlas by apache.
the class SqoopHook method publish.
@Override
public void publish(SqoopJobDataPublisher.Data data) throws AtlasHookException {
try {
Configuration atlasProperties = ApplicationProperties.get();
String clusterName = atlasProperties.getString(ATLAS_CLUSTER_NAME, DEFAULT_CLUSTER_NAME);
Referenceable dbStoreRef = createDBStoreInstance(data);
Referenceable dbRef = createHiveDatabaseInstance(clusterName, data.getHiveDB());
Referenceable hiveTableRef = createHiveTableInstance(clusterName, dbRef, data.getHiveTable(), data.getHiveDB());
Referenceable procRef = createSqoopProcessInstance(dbStoreRef, hiveTableRef, data, clusterName);
int maxRetries = atlasProperties.getInt(HOOK_NUM_RETRIES, 3);
HookNotification.HookNotificationMessage message = new HookNotification.EntityCreateRequest(AtlasHook.getUser(), dbStoreRef, dbRef, hiveTableRef, procRef);
AtlasHook.notifyEntities(Arrays.asList(message), maxRetries);
} catch (Exception e) {
throw new AtlasHookException("SqoopHook.publish() failed.", e);
}
}
use of org.apache.atlas.hook.AtlasHookException in project incubator-atlas by apache.
the class HiveMetaStoreBridge method main.
public static void main(String[] args) throws AtlasHookException {
try {
Configuration atlasConf = ApplicationProperties.get();
String[] atlasEndpoint = atlasConf.getStringArray(ATLAS_ENDPOINT);
if (atlasEndpoint == null || atlasEndpoint.length == 0) {
atlasEndpoint = new String[] { DEFAULT_DGI_URL };
}
AtlasClient atlasClient;
if (!AuthenticationUtil.isKerberosAuthenticationEnabled()) {
String[] basicAuthUsernamePassword = AuthenticationUtil.getBasicAuthenticationInput();
atlasClient = new AtlasClient(atlasEndpoint, basicAuthUsernamePassword);
} else {
UserGroupInformation ugi = UserGroupInformation.getCurrentUser();
atlasClient = new AtlasClient(ugi, ugi.getShortUserName(), atlasEndpoint);
}
Options options = new Options();
CommandLineParser parser = new BasicParser();
CommandLine cmd = parser.parse(options, args);
boolean failOnError = false;
if (cmd.hasOption("failOnError")) {
failOnError = true;
}
HiveMetaStoreBridge hiveMetaStoreBridge = new HiveMetaStoreBridge(atlasConf, new HiveConf(), atlasClient);
hiveMetaStoreBridge.importHiveMetadata(failOnError);
} catch (Exception e) {
throw new AtlasHookException("HiveMetaStoreBridge.main() failed.", e);
}
}
use of org.apache.atlas.hook.AtlasHookException in project incubator-atlas by apache.
the class HiveMetaStoreBridge method createOrUpdateTableInstance.
private Referenceable createOrUpdateTableInstance(Referenceable dbReference, Referenceable tableReference, final Table hiveTable) throws AtlasHookException {
LOG.info("Importing objects from {}.{}", hiveTable.getDbName(), hiveTable.getTableName());
if (tableReference == null) {
tableReference = new Referenceable(HiveDataTypes.HIVE_TABLE.getName());
}
String tableQualifiedName = getTableQualifiedName(clusterName, hiveTable);
tableReference.set(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, tableQualifiedName);
tableReference.set(AtlasClient.NAME, hiveTable.getTableName().toLowerCase());
tableReference.set(AtlasClient.OWNER, hiveTable.getOwner());
Date createDate = new Date();
if (hiveTable.getTTable() != null) {
try {
createDate = getTableCreatedTime(hiveTable);
LOG.debug("Setting create time to {} ", createDate);
tableReference.set(CREATE_TIME, createDate);
} catch (Exception ne) {
LOG.error("Error while setting createTime for the table {} ", hiveTable.getCompleteName(), ne);
}
}
Date lastAccessTime = createDate;
if (hiveTable.getLastAccessTime() > 0) {
lastAccessTime = new Date(hiveTable.getLastAccessTime() * MILLIS_CONVERT_FACTOR);
}
tableReference.set(LAST_ACCESS_TIME, lastAccessTime);
tableReference.set("retention", hiveTable.getRetention());
tableReference.set(COMMENT, hiveTable.getParameters().get(COMMENT));
// add reference to the database
tableReference.set(DB, dbReference);
// add reference to the StorageDescriptor
Referenceable sdReferenceable = fillStorageDesc(hiveTable.getSd(), tableQualifiedName, getStorageDescQFName(tableQualifiedName), tableReference.getId());
tableReference.set(STORAGE_DESC, sdReferenceable);
tableReference.set(PARAMETERS, hiveTable.getParameters());
if (hiveTable.getViewOriginalText() != null) {
tableReference.set("viewOriginalText", hiveTable.getViewOriginalText());
}
if (hiveTable.getViewExpandedText() != null) {
tableReference.set("viewExpandedText", hiveTable.getViewExpandedText());
}
tableReference.set(TABLE_TYPE_ATTR, hiveTable.getTableType().name());
tableReference.set("temporary", hiveTable.isTemporary());
// add reference to the Partition Keys
List<Referenceable> partKeys = getColumns(hiveTable.getPartitionKeys(), tableReference);
tableReference.set("partitionKeys", partKeys);
tableReference.set(COLUMNS, getColumns(hiveTable.getCols(), tableReference));
return tableReference;
}
Aggregations