use of co.cask.cdap.data2.metadata.store.MetadataStore in project cdap by caskdata.
the class FileStreamAdmin method updateProperties.
private StreamProperties updateProperties(StreamId streamId, StreamProperties properties) throws Exception {
StreamConfig config = getConfig(streamId);
StreamConfig.Builder builder = StreamConfig.builder(config);
if (properties.getTTL() != null) {
builder.setTTL(properties.getTTL());
}
if (properties.getFormat() != null) {
builder.setFormatSpec(properties.getFormat());
}
if (properties.getNotificationThresholdMB() != null) {
builder.setNotificationThreshold(properties.getNotificationThresholdMB());
}
// update stream description
String description = properties.getDescription();
if (description != null) {
streamMetaStore.addStream(streamId, description);
}
final StreamConfig newConfig = builder.build();
impersonator.doAs(streamId, new Callable<Void>() {
@Override
public Void call() throws Exception {
writeConfig(newConfig);
return null;
}
});
// Update system metadata for stream
SystemMetadataWriter systemMetadataWriter = new StreamSystemMetadataWriter(metadataStore, streamId, newConfig, description);
systemMetadataWriter.write();
return new StreamProperties(config.getTTL(), config.getFormat(), config.getNotificationThresholdMB());
}
use of co.cask.cdap.data2.metadata.store.MetadataStore in project cdap by caskdata.
the class DatasetAdminService method writeSystemMetadata.
private void writeSystemMetadata(DatasetId datasetInstanceId, final DatasetSpecification spec, DatasetProperties props, final DatasetTypeMeta typeMeta, final DatasetType type, final DatasetContext context, boolean existing, UserGroupInformation ugi) throws IOException {
// add system metadata for user datasets only
if (DatasetsUtil.isUserDataset(datasetInstanceId)) {
Dataset dataset = null;
try {
try {
dataset = ImpersonationUtils.doAs(ugi, () -> type.getDataset(context, spec, DatasetDefinition.NO_ARGUMENTS));
} catch (Exception e) {
LOG.warn("Exception while instantiating Dataset {}", datasetInstanceId, e);
}
// Make sure to write whatever system metadata that can be derived
// even if the above instantiation throws exception
SystemMetadataWriter systemMetadataWriter;
if (existing) {
systemMetadataWriter = new DatasetSystemMetadataWriter(metadataStore, datasetInstanceId, props, dataset, typeMeta.getName(), spec.getDescription());
} else {
long createTime = System.currentTimeMillis();
systemMetadataWriter = new DatasetSystemMetadataWriter(metadataStore, datasetInstanceId, props, createTime, dataset, typeMeta.getName(), spec.getDescription());
}
systemMetadataWriter.write();
} finally {
if (dataset != null) {
dataset.close();
}
}
}
}
use of co.cask.cdap.data2.metadata.store.MetadataStore in project cdap by caskdata.
the class LineageAdminTest method testSimpleLoopLineage.
@Test
public void testSimpleLoopLineage() throws Exception {
// Lineage for D1 -> P1 -> D2 -> P2 -> D3 -> P3 -> D4
// | |
// | V
// |<-----------------
//
LineageStore lineageStore = new LineageStore(getTxExecFactory(), getDatasetFramework(), NamespaceId.DEFAULT.dataset("testSimpleLoopLineage"));
Store store = getInjector().getInstance(Store.class);
MetadataStore metadataStore = getInjector().getInstance(MetadataStore.class);
LineageAdmin lineageAdmin = new LineageAdmin(lineageStore, store, metadataStore, new NoOpEntityExistenceVerifier());
// Add access
addRuns(store, run1, run2, run3, run4, run5);
// It is okay to use current time here since access time is ignore during assertions
lineageStore.addAccess(run1, dataset1, AccessType.READ, System.currentTimeMillis(), flowlet1);
lineageStore.addAccess(run1, dataset2, AccessType.WRITE, System.currentTimeMillis(), flowlet1);
lineageStore.addAccess(run2, dataset2, AccessType.READ, System.currentTimeMillis(), flowlet2);
lineageStore.addAccess(run2, dataset1, AccessType.WRITE, System.currentTimeMillis(), flowlet2);
lineageStore.addAccess(run2, dataset3, AccessType.WRITE, System.currentTimeMillis(), flowlet2);
lineageStore.addAccess(run3, dataset3, AccessType.READ, System.currentTimeMillis());
lineageStore.addAccess(run3, dataset4, AccessType.WRITE, System.currentTimeMillis());
Lineage expectedLineage = new Lineage(ImmutableSet.of(new Relation(dataset2, program1, AccessType.WRITE, twillRunId(run1), toSet(flowlet1)), new Relation(dataset1, program1, AccessType.READ, twillRunId(run1), toSet(flowlet1)), new Relation(dataset1, program2, AccessType.WRITE, twillRunId(run2), toSet(flowlet2)), new Relation(dataset2, program2, AccessType.READ, twillRunId(run2), toSet(flowlet2)), new Relation(dataset3, program2, AccessType.WRITE, twillRunId(run2), toSet(flowlet2)), new Relation(dataset4, program3, AccessType.WRITE, twillRunId(run3), emptySet()), new Relation(dataset3, program3, AccessType.READ, twillRunId(run3), emptySet())));
// Lineage for D1
Assert.assertEquals(expectedLineage, lineageAdmin.computeLineage(dataset1, 500, 20000, 100));
// Lineage for D2
Assert.assertEquals(expectedLineage, lineageAdmin.computeLineage(dataset2, 500, 20000, 100));
// Lineage for D1 for one level D1 -> P1 -> D2 -> P2 -> D3
// | |
// | V
// |<-----------------
//
Lineage oneLevelLineage = lineageAdmin.computeLineage(dataset1, 500, 20000, 1);
Assert.assertEquals(ImmutableSet.of(new Relation(dataset2, program1, AccessType.WRITE, twillRunId(run1), toSet(flowlet1)), new Relation(dataset1, program1, AccessType.READ, twillRunId(run1), toSet(flowlet1)), new Relation(dataset1, program2, AccessType.WRITE, twillRunId(run2), toSet(flowlet2)), new Relation(dataset2, program2, AccessType.READ, twillRunId(run2), toSet(flowlet2)), new Relation(dataset3, program2, AccessType.WRITE, twillRunId(run2), toSet(flowlet2))), oneLevelLineage.getRelations());
}
use of co.cask.cdap.data2.metadata.store.MetadataStore in project cdap by caskdata.
the class LineageAdminTest method testDirectCycleTwoRuns.
@Test
public void testDirectCycleTwoRuns() throws Exception {
// Lineage for:
//
// D1 -> P1 (run1)
//
// D1 <- P1 (run2)
//
LineageStore lineageStore = new LineageStore(getTxExecFactory(), getDatasetFramework(), NamespaceId.DEFAULT.dataset("testDirectCycleTwoRuns"));
Store store = getInjector().getInstance(Store.class);
MetadataStore metadataStore = getInjector().getInstance(MetadataStore.class);
LineageAdmin lineageAdmin = new LineageAdmin(lineageStore, store, metadataStore, new NoOpEntityExistenceVerifier());
// Add accesses
addRuns(store, run1, run2, run3, run4, run5);
// It is okay to use current time here since access time is ignore during assertions
lineageStore.addAccess(run1, dataset1, AccessType.READ, System.currentTimeMillis(), flowlet1);
// Write is in a different run
lineageStore.addAccess(new ProgramRunId(run1.getNamespace(), run1.getApplication(), run1.getParent().getType(), run1.getProgram(), run2.getEntityName()), dataset1, AccessType.WRITE, System.currentTimeMillis(), flowlet1);
Lineage expectedLineage = new Lineage(ImmutableSet.of(new Relation(dataset1, program1, AccessType.READ, twillRunId(run1), toSet(flowlet1)), new Relation(dataset1, program1, AccessType.WRITE, twillRunId(run2), toSet(flowlet1))));
Assert.assertEquals(expectedLineage, lineageAdmin.computeLineage(dataset1, 500, 20000, 100));
}
use of co.cask.cdap.data2.metadata.store.MetadataStore in project cdap by caskdata.
the class LineageAdminTest method testBranchLoopLineage.
@Test
public void testBranchLoopLineage() throws Exception {
// Lineage for:
//
// |-------------------------------------|
// | |
// | |
// | -> D4 -> D5 -> P3 -> D6 -> P5
// | | | ^
// V | | |
// D1 -> P1 -> D2 -> P2 -> D3 ----------->|
// | | |
// | | |
// S1 -->| ---------------> P4 -> D7
LineageStore lineageStore = new LineageStore(getTxExecFactory(), getDatasetFramework(), NamespaceId.DEFAULT.dataset("testBranchLoopLineage"));
Store store = getInjector().getInstance(Store.class);
MetadataStore metadataStore = getInjector().getInstance(MetadataStore.class);
LineageAdmin lineageAdmin = new LineageAdmin(lineageStore, store, metadataStore, new NoOpEntityExistenceVerifier());
// Add accesses
addRuns(store, run1, run2, run3, run4, run5);
// It is okay to use current time here since access time is ignore during assertions
lineageStore.addAccess(run1, stream1, AccessType.READ, System.currentTimeMillis(), flowlet1);
lineageStore.addAccess(run1, dataset1, AccessType.READ, System.currentTimeMillis(), flowlet1);
lineageStore.addAccess(run1, dataset2, AccessType.WRITE, System.currentTimeMillis(), flowlet1);
lineageStore.addAccess(run1, dataset4, AccessType.WRITE, System.currentTimeMillis(), flowlet1);
lineageStore.addAccess(run2, dataset2, AccessType.READ, System.currentTimeMillis(), flowlet2);
lineageStore.addAccess(run2, dataset3, AccessType.WRITE, System.currentTimeMillis(), flowlet2);
lineageStore.addAccess(run2, dataset5, AccessType.WRITE, System.currentTimeMillis(), flowlet2);
lineageStore.addAccess(run3, dataset5, AccessType.READ, System.currentTimeMillis());
lineageStore.addAccess(run3, dataset6, AccessType.WRITE, System.currentTimeMillis());
lineageStore.addAccess(run4, dataset2, AccessType.READ, System.currentTimeMillis());
lineageStore.addAccess(run4, dataset3, AccessType.READ, System.currentTimeMillis());
lineageStore.addAccess(run4, dataset7, AccessType.WRITE, System.currentTimeMillis());
lineageStore.addAccess(run5, dataset3, AccessType.READ, System.currentTimeMillis());
lineageStore.addAccess(run5, dataset6, AccessType.READ, System.currentTimeMillis());
lineageStore.addAccess(run5, dataset1, AccessType.WRITE, System.currentTimeMillis());
Lineage expectedLineage = new Lineage(ImmutableSet.of(new Relation(stream1, program1, AccessType.READ, twillRunId(run1), toSet(flowlet1)), new Relation(dataset1, program1, AccessType.READ, twillRunId(run1), toSet(flowlet1)), new Relation(dataset2, program1, AccessType.WRITE, twillRunId(run1), toSet(flowlet1)), new Relation(dataset4, program1, AccessType.WRITE, twillRunId(run1), toSet(flowlet1)), new Relation(dataset2, program2, AccessType.READ, twillRunId(run2), toSet(flowlet2)), new Relation(dataset3, program2, AccessType.WRITE, twillRunId(run2), toSet(flowlet2)), new Relation(dataset5, program2, AccessType.WRITE, twillRunId(run2), toSet(flowlet2)), new Relation(dataset5, program3, AccessType.READ, twillRunId(run3), emptySet()), new Relation(dataset6, program3, AccessType.WRITE, twillRunId(run3), emptySet()), new Relation(dataset2, program4, AccessType.READ, twillRunId(run4), emptySet()), new Relation(dataset3, program4, AccessType.READ, twillRunId(run4), emptySet()), new Relation(dataset7, program4, AccessType.WRITE, twillRunId(run4), emptySet()), new Relation(dataset3, program5, AccessType.READ, twillRunId(run5), emptySet()), new Relation(dataset6, program5, AccessType.READ, twillRunId(run5), emptySet()), new Relation(dataset1, program5, AccessType.WRITE, twillRunId(run5), emptySet())));
// Lineage for D1
Assert.assertEquals(expectedLineage, lineageAdmin.computeLineage(dataset1, 500, 20000, 100));
// Lineage for D5
Assert.assertEquals(expectedLineage, lineageAdmin.computeLineage(dataset5, 500, 20000, 100));
// Lineage for D7
Assert.assertEquals(expectedLineage, lineageAdmin.computeLineage(dataset7, 500, 20000, 100));
// Lineage for S1
Assert.assertEquals(expectedLineage, lineageAdmin.computeLineage(stream1, 500, 20000, 100));
// Lineage for D5 for one level
// -> D5 -> P3 -> D6
// |
// |
// D2 -> P2 -> D3
Lineage oneLevelLineage = lineageAdmin.computeLineage(dataset5, 500, 20000, 1);
Assert.assertEquals(ImmutableSet.of(new Relation(dataset2, program2, AccessType.READ, twillRunId(run2), toSet(flowlet2)), new Relation(dataset3, program2, AccessType.WRITE, twillRunId(run2), toSet(flowlet2)), new Relation(dataset5, program2, AccessType.WRITE, twillRunId(run2), toSet(flowlet2)), new Relation(dataset5, program3, AccessType.READ, twillRunId(run3), emptySet()), new Relation(dataset6, program3, AccessType.WRITE, twillRunId(run3), emptySet())), oneLevelLineage.getRelations());
// Lineage for S1 for one level
//
// -> D4
// |
// |
// D1 -> P1 -> D2
// |
// |
// S1 -->|
oneLevelLineage = lineageAdmin.computeLineage(stream1, 500, 20000, 1);
Assert.assertEquals(ImmutableSet.of(new Relation(stream1, program1, AccessType.READ, twillRunId(run1), toSet(flowlet1)), new Relation(dataset1, program1, AccessType.READ, twillRunId(run1), toSet(flowlet1)), new Relation(dataset2, program1, AccessType.WRITE, twillRunId(run1), toSet(flowlet1)), new Relation(dataset4, program1, AccessType.WRITE, twillRunId(run1), toSet(flowlet1))), oneLevelLineage.getRelations());
}
Aggregations