use of org.apache.hadoop.hive.metastore.api.CreationMetadata in project hive by apache.
the class TestMetaStoreMaterializationsCacheCleaner method testCleanerScenario1.
@Test
public void testCleanerScenario1() throws Exception {
// create mock raw store
Configuration conf = new Configuration();
conf.set("metastore.materializations.invalidation.impl", "DISABLE");
// create mock handler
final IHMSHandler handler = mock(IHMSHandler.class);
// initialize invalidation cache (set conf to disable)
MaterializationsInvalidationCache.get().init(conf, handler);
// This is a dummy test, invalidation cache is not supposed to
// record any information.
MaterializationsInvalidationCache.get().notifyTableModification(DB_NAME, TBL_NAME_1, 1, 1);
int id = 2;
BasicTxnInfo txn2 = createTxnInfo(DB_NAME, TBL_NAME_1, id);
MaterializationsInvalidationCache.get().notifyTableModification(DB_NAME, TBL_NAME_1, id, id);
// Create tbl2 (nothing to do)
id = 3;
BasicTxnInfo txn3 = createTxnInfo(DB_NAME, TBL_NAME_1, id);
MaterializationsInvalidationCache.get().notifyTableModification(DB_NAME, TBL_NAME_2, id, id);
// Cleanup (current = 4, duration = 4) -> Does nothing
long removed = MaterializationsInvalidationCache.get().cleanup(0L);
Assert.assertEquals(0L, removed);
// Create mv1
Table mv1 = mock(Table.class);
when(mv1.getDbName()).thenReturn(DB_NAME);
when(mv1.getTableName()).thenReturn(MV_NAME_1);
CreationMetadata mockCM1 = new CreationMetadata(DB_NAME, MV_NAME_1, ImmutableSet.of(DB_NAME + "." + TBL_NAME_1, DB_NAME + "." + TBL_NAME_2));
// Create txn list (highWatermark=4;minOpenTxn=Long.MAX_VALUE)
mockCM1.setValidTxnList("3:" + Long.MAX_VALUE + "::");
when(mv1.getCreationMetadata()).thenReturn(mockCM1);
MaterializationsInvalidationCache.get().createMaterializedView(mockCM1.getDbName(), mockCM1.getTblName(), mockCM1.getTablesUsed(), mockCM1.getValidTxnList());
Map<String, Materialization> invalidationInfos = MaterializationsInvalidationCache.get().getMaterializationInvalidationInfo(DB_NAME, ImmutableList.of(MV_NAME_1));
Assert.assertTrue(invalidationInfos.isEmpty());
id = 10;
BasicTxnInfo txn10 = createTxnInfo(DB_NAME, TBL_NAME_2, id);
MaterializationsInvalidationCache.get().notifyTableModification(DB_NAME, TBL_NAME_2, id, id);
id = 9;
BasicTxnInfo txn9 = createTxnInfo(DB_NAME, TBL_NAME_1, id);
MaterializationsInvalidationCache.get().notifyTableModification(DB_NAME, TBL_NAME_1, id, id);
// Cleanup (current = 12, duration = 4) -> Removes txn1, txn2, txn3
removed = MaterializationsInvalidationCache.get().cleanup(8L);
Assert.assertEquals(0L, removed);
invalidationInfos = MaterializationsInvalidationCache.get().getMaterializationInvalidationInfo(DB_NAME, ImmutableList.of(MV_NAME_1));
Assert.assertTrue(invalidationInfos.isEmpty());
// Create mv2
Table mv2 = mock(Table.class);
when(mv2.getDbName()).thenReturn(DB_NAME);
when(mv2.getTableName()).thenReturn(MV_NAME_2);
CreationMetadata mockCM2 = new CreationMetadata(DB_NAME, MV_NAME_2, ImmutableSet.of(DB_NAME + "." + TBL_NAME_1, DB_NAME + "." + TBL_NAME_2));
// Create txn list (highWatermark=10;minOpenTxn=Long.MAX_VALUE)
mockCM2.setValidTxnList("10:" + Long.MAX_VALUE + "::");
when(mv2.getCreationMetadata()).thenReturn(mockCM2);
MaterializationsInvalidationCache.get().createMaterializedView(mockCM2.getDbName(), mockCM2.getTblName(), mockCM2.getTablesUsed(), mockCM2.getValidTxnList());
when(mv2.getCreationMetadata()).thenReturn(mockCM2);
invalidationInfos = MaterializationsInvalidationCache.get().getMaterializationInvalidationInfo(DB_NAME, ImmutableList.of(MV_NAME_1, MV_NAME_2));
Assert.assertTrue(invalidationInfos.isEmpty());
// Create tbl3 (nothing to do)
MaterializationsInvalidationCache.get().notifyTableModification(DB_NAME, TBL_NAME_3, 11, 11);
MaterializationsInvalidationCache.get().notifyTableModification(DB_NAME, TBL_NAME_3, 18, 18);
MaterializationsInvalidationCache.get().notifyTableModification(DB_NAME, TBL_NAME_1, 14, 14);
MaterializationsInvalidationCache.get().notifyTableModification(DB_NAME, TBL_NAME_1, 17, 17);
MaterializationsInvalidationCache.get().notifyTableModification(DB_NAME, TBL_NAME_2, 16, 16);
// Cleanup (current = 20, duration = 4) -> Removes txn10, txn11
removed = MaterializationsInvalidationCache.get().cleanup(16L);
Assert.assertEquals(0L, removed);
invalidationInfos = MaterializationsInvalidationCache.get().getMaterializationInvalidationInfo(DB_NAME, ImmutableList.of(MV_NAME_1, MV_NAME_2));
Assert.assertTrue(invalidationInfos.isEmpty());
MaterializationsInvalidationCache.get().notifyTableModification(DB_NAME, TBL_NAME_1, 12, 12);
MaterializationsInvalidationCache.get().notifyTableModification(DB_NAME, TBL_NAME_2, 15, 15);
MaterializationsInvalidationCache.get().notifyTableModification(DB_NAME, TBL_NAME_2, 7, 7);
invalidationInfos = MaterializationsInvalidationCache.get().getMaterializationInvalidationInfo(DB_NAME, ImmutableList.of(MV_NAME_1, MV_NAME_2));
Assert.assertTrue(invalidationInfos.isEmpty());
// Cleanup (current = 24, duration = 4) -> Removes txn9, txn14, txn15, txn16, txn17, txn18
removed = MaterializationsInvalidationCache.get().cleanup(20L);
Assert.assertEquals(0L, removed);
invalidationInfos = MaterializationsInvalidationCache.get().getMaterializationInvalidationInfo(DB_NAME, ImmutableList.of(MV_NAME_1, MV_NAME_2));
Assert.assertTrue(invalidationInfos.isEmpty());
// Cleanup (current = 28, duration = 4) -> Removes txn9
removed = MaterializationsInvalidationCache.get().cleanup(24L);
Assert.assertEquals(0L, removed);
}
use of org.apache.hadoop.hive.metastore.api.CreationMetadata in project hive by apache.
the class TestMetaStoreMaterializationsCacheCleaner method testCleanerScenario2.
@Test
public void testCleanerScenario2() throws Exception {
// create mock raw store
Configuration conf = new Configuration();
conf.set("metastore.materializations.invalidation.impl", "DEFAULT");
// create mock handler
final IHMSHandler handler = mock(IHMSHandler.class);
// initialize invalidation cache (set conf to default)
MaterializationsInvalidationCache.get().init(conf, handler);
// Scenario consists of the following steps:
// Create tbl1
// (t = 1) Insert row in tbl1
// (t = 2) Insert row in tbl1
// Create tbl2
// (t = 3) Insert row in tbl2
// Cleanup (current = 4, duration = 4) -> Does nothing
// Create mv1
// (t = 10) Insert row in tbl2
// (t = 9) Insert row in tbl1 (out of order)
// Cleanup (current = 12, duration = 4) -> Removes txn1, txn2, txn3
// Create mv2
// Create tbl3
// (t = 11) Insert row in tbl3
// (t = 18) Insert row in tbl3
// (t = 14) Insert row in tbl1
// (t = 17) Insert row in tbl1
// (t = 16) Insert row in tbl2
// Cleanup (current = 20, duration = 4) -> Removes txn10, txn11
// (t = 12) Insert row in tbl1
// (t = 15) Insert row in tbl2
// (t = 7) Insert row in tbl2
// Cleanup (current = 24, duration = 4) -> Removes txn9, txn14, txn15, txn16, txn17, txn18
// Create tbl1 (nothing to do)
MaterializationsInvalidationCache.get().notifyTableModification(DB_NAME, TBL_NAME_1, 1, 1);
int id = 2;
BasicTxnInfo txn2 = createTxnInfo(DB_NAME, TBL_NAME_1, id);
MaterializationsInvalidationCache.get().notifyTableModification(DB_NAME, TBL_NAME_1, id, id);
// Create tbl2 (nothing to do)
id = 3;
BasicTxnInfo txn3 = createTxnInfo(DB_NAME, TBL_NAME_1, id);
MaterializationsInvalidationCache.get().notifyTableModification(DB_NAME, TBL_NAME_2, id, id);
// Cleanup (current = 4, duration = 4) -> Does nothing
long removed = MaterializationsInvalidationCache.get().cleanup(0L);
Assert.assertEquals(0L, removed);
// Create mv1
Table mv1 = mock(Table.class);
when(mv1.getDbName()).thenReturn(DB_NAME);
when(mv1.getTableName()).thenReturn(MV_NAME_1);
CreationMetadata mockCM1 = new CreationMetadata(DB_NAME, MV_NAME_1, ImmutableSet.of(DB_NAME + "." + TBL_NAME_1, DB_NAME + "." + TBL_NAME_2));
// Create txn list (highWatermark=4;minOpenTxn=Long.MAX_VALUE)
mockCM1.setValidTxnList("3:" + Long.MAX_VALUE + "::");
when(mv1.getCreationMetadata()).thenReturn(mockCM1);
MaterializationsInvalidationCache.get().createMaterializedView(mockCM1.getDbName(), mockCM1.getTblName(), mockCM1.getTablesUsed(), mockCM1.getValidTxnList());
Map<String, Materialization> invalidationInfos = MaterializationsInvalidationCache.get().getMaterializationInvalidationInfo(DB_NAME, ImmutableList.of(MV_NAME_1));
Assert.assertEquals(0L, invalidationInfos.get(MV_NAME_1).getInvalidationTime());
id = 10;
BasicTxnInfo txn10 = createTxnInfo(DB_NAME, TBL_NAME_2, id);
MaterializationsInvalidationCache.get().notifyTableModification(DB_NAME, TBL_NAME_2, id, id);
id = 9;
BasicTxnInfo txn9 = createTxnInfo(DB_NAME, TBL_NAME_1, id);
MaterializationsInvalidationCache.get().notifyTableModification(DB_NAME, TBL_NAME_1, id, id);
// Cleanup (current = 12, duration = 4) -> Removes txn1, txn2, txn3
removed = MaterializationsInvalidationCache.get().cleanup(8L);
Assert.assertEquals(3L, removed);
invalidationInfos = MaterializationsInvalidationCache.get().getMaterializationInvalidationInfo(DB_NAME, ImmutableList.of(MV_NAME_1));
Assert.assertEquals(9L, invalidationInfos.get(MV_NAME_1).getInvalidationTime());
// Create mv2
Table mv2 = mock(Table.class);
when(mv2.getDbName()).thenReturn(DB_NAME);
when(mv2.getTableName()).thenReturn(MV_NAME_2);
CreationMetadata mockCM2 = new CreationMetadata(DB_NAME, MV_NAME_2, ImmutableSet.of(DB_NAME + "." + TBL_NAME_1, DB_NAME + "." + TBL_NAME_2));
// Create txn list (highWatermark=10;minOpenTxn=Long.MAX_VALUE)
mockCM2.setValidTxnList("10:" + Long.MAX_VALUE + "::");
when(mv2.getCreationMetadata()).thenReturn(mockCM2);
MaterializationsInvalidationCache.get().createMaterializedView(mockCM2.getDbName(), mockCM2.getTblName(), mockCM2.getTablesUsed(), mockCM2.getValidTxnList());
when(mv2.getCreationMetadata()).thenReturn(mockCM2);
invalidationInfos = MaterializationsInvalidationCache.get().getMaterializationInvalidationInfo(DB_NAME, ImmutableList.of(MV_NAME_1, MV_NAME_2));
Assert.assertEquals(9L, invalidationInfos.get(MV_NAME_1).getInvalidationTime());
Assert.assertEquals(0L, invalidationInfos.get(MV_NAME_2).getInvalidationTime());
// Create tbl3 (nothing to do)
MaterializationsInvalidationCache.get().notifyTableModification(DB_NAME, TBL_NAME_3, 11, 11);
MaterializationsInvalidationCache.get().notifyTableModification(DB_NAME, TBL_NAME_3, 18, 18);
MaterializationsInvalidationCache.get().notifyTableModification(DB_NAME, TBL_NAME_1, 14, 14);
MaterializationsInvalidationCache.get().notifyTableModification(DB_NAME, TBL_NAME_1, 17, 17);
MaterializationsInvalidationCache.get().notifyTableModification(DB_NAME, TBL_NAME_2, 16, 16);
// Cleanup (current = 20, duration = 4) -> Removes txn10, txn11
removed = MaterializationsInvalidationCache.get().cleanup(16L);
Assert.assertEquals(2L, removed);
invalidationInfos = MaterializationsInvalidationCache.get().getMaterializationInvalidationInfo(DB_NAME, ImmutableList.of(MV_NAME_1, MV_NAME_2));
Assert.assertEquals(9L, invalidationInfos.get(MV_NAME_1).getInvalidationTime());
Assert.assertEquals(14L, invalidationInfos.get(MV_NAME_2).getInvalidationTime());
MaterializationsInvalidationCache.get().notifyTableModification(DB_NAME, TBL_NAME_1, 12, 12);
MaterializationsInvalidationCache.get().notifyTableModification(DB_NAME, TBL_NAME_2, 15, 15);
MaterializationsInvalidationCache.get().notifyTableModification(DB_NAME, TBL_NAME_2, 7, 7);
invalidationInfos = MaterializationsInvalidationCache.get().getMaterializationInvalidationInfo(DB_NAME, ImmutableList.of(MV_NAME_1, MV_NAME_2));
Assert.assertEquals(7L, invalidationInfos.get(MV_NAME_1).getInvalidationTime());
Assert.assertEquals(12L, invalidationInfos.get(MV_NAME_2).getInvalidationTime());
// Cleanup (current = 24, duration = 4) -> Removes txn9, txn14, txn15, txn16, txn17, txn18
removed = MaterializationsInvalidationCache.get().cleanup(20L);
Assert.assertEquals(6L, removed);
invalidationInfos = MaterializationsInvalidationCache.get().getMaterializationInvalidationInfo(DB_NAME, ImmutableList.of(MV_NAME_1, MV_NAME_2));
Assert.assertEquals(7L, invalidationInfos.get(MV_NAME_1).getInvalidationTime());
Assert.assertEquals(12L, invalidationInfos.get(MV_NAME_2).getInvalidationTime());
// Cleanup (current = 28, duration = 4) -> Removes txn9
removed = MaterializationsInvalidationCache.get().cleanup(24L);
Assert.assertEquals(0L, removed);
}
use of org.apache.hadoop.hive.metastore.api.CreationMetadata in project hive by apache.
the class DDLTask method createView.
/**
* Create a new view.
*
* @param db
* The database in question.
* @param crtView
* This is the view we're creating.
* @return Returns 0 when execution succeeds and above 0 if it fails.
* @throws HiveException
* Throws this exception if an unexpected error occurs.
*/
private int createView(Hive db, CreateViewDesc crtView) throws HiveException {
Table oldview = db.getTable(crtView.getViewName(), false);
if (oldview != null) {
// Check whether we are replicating
if (crtView.getReplicationSpec().isInReplicationScope()) {
// if this is a replication spec, then replace-mode semantics might apply.
if (crtView.getReplicationSpec().allowEventReplacementInto(oldview.getParameters())) {
// we replace existing view.
crtView.setReplace(true);
} else {
LOG.debug("DDLTask: Create View is skipped as view {} is newer than update", // no replacement, the existing table state is newer than our update.
crtView.getViewName());
return 0;
}
}
if (!crtView.isReplace()) {
// View already exists, thus we should be replacing
throw new HiveException(ErrorMsg.TABLE_ALREADY_EXISTS.getMsg(crtView.getViewName()));
}
// It should not be a materialized view
assert !crtView.isMaterialized();
// replace existing view
// remove the existing partition columns from the field schema
oldview.setViewOriginalText(crtView.getViewOriginalText());
oldview.setViewExpandedText(crtView.getViewExpandedText());
oldview.setFields(crtView.getSchema());
if (crtView.getComment() != null) {
oldview.setProperty("comment", crtView.getComment());
}
if (crtView.getTblProps() != null) {
oldview.getTTable().getParameters().putAll(crtView.getTblProps());
}
oldview.setPartCols(crtView.getPartCols());
if (crtView.getInputFormat() != null) {
oldview.setInputFormatClass(crtView.getInputFormat());
}
if (crtView.getOutputFormat() != null) {
oldview.setOutputFormatClass(crtView.getOutputFormat());
}
oldview.checkValidity(null);
db.alterTable(crtView.getViewName(), oldview, null);
addIfAbsentByName(new WriteEntity(oldview, WriteEntity.WriteType.DDL_NO_LOCK));
} else {
// We create new view
Table tbl = crtView.toTable(conf);
// We set the signature for the view if it is a materialized view
if (tbl.isMaterializedView()) {
CreationMetadata cm = new CreationMetadata(tbl.getDbName(), tbl.getTableName(), ImmutableSet.copyOf(crtView.getTablesUsed()));
cm.setValidTxnList(conf.get(ValidTxnList.VALID_TXNS_KEY));
tbl.getTTable().setCreationMetadata(cm);
}
db.createTable(tbl, crtView.getIfNotExists());
addIfAbsentByName(new WriteEntity(tbl, WriteEntity.WriteType.DDL_NO_LOCK));
// set lineage info
DataContainer dc = new DataContainer(tbl.getTTable());
queryState.getLineageState().setLineage(new Path(crtView.getViewName()), dc, tbl.getCols());
}
return 0;
}
use of org.apache.hadoop.hive.metastore.api.CreationMetadata in project hive by apache.
the class MaterializedViewTask method execute.
@Override
public int execute(DriverContext driverContext) {
if (driverContext.getCtx().getExplainAnalyze() == AnalyzeState.RUNNING) {
return 0;
}
try {
if (getWork().isRetrieveAndInclude()) {
Hive db = Hive.get(conf);
Table mvTable = db.getTable(getWork().getViewName());
HiveMaterializedViewsRegistry.get().createMaterializedView(db.getConf(), mvTable);
} else if (getWork().isDisableRewrite()) {
// Disabling rewriting, removing from cache
String[] names = getWork().getViewName().split("\\.");
HiveMaterializedViewsRegistry.get().dropMaterializedView(names[0], names[1]);
} else if (getWork().isUpdateCreationMetadata()) {
// We need to update the status of the creation signature
Hive db = Hive.get(conf);
Table mvTable = db.getTable(getWork().getViewName());
CreationMetadata cm = new CreationMetadata(mvTable.getDbName(), mvTable.getTableName(), ImmutableSet.copyOf(mvTable.getCreationMetadata().getTablesUsed()));
cm.setValidTxnList(conf.get(ValidTxnList.VALID_TXNS_KEY));
db.updateCreationMetadata(mvTable.getDbName(), mvTable.getTableName(), cm);
}
} catch (HiveException e) {
LOG.debug("Exception during materialized view cache update", e);
}
return 0;
}
use of org.apache.hadoop.hive.metastore.api.CreationMetadata in project hive by apache.
the class TestGetTableMeta method createTable.
private Table createTable(String dbName, String tableName, TableType type) throws Exception {
TableBuilder builder = new TableBuilder().setDbName(dbName).setTableName(tableName).addCol("id", "int").addCol("name", "string").setType(type.name());
Table table = builder.build();
if (type == TableType.MATERIALIZED_VIEW) {
CreationMetadata cm = new CreationMetadata(dbName, tableName, ImmutableSet.of());
table.setCreationMetadata(cm);
}
if (type == TableType.EXTERNAL_TABLE) {
table.getParameters().put("EXTERNAL", "true");
}
return table;
}
Aggregations