use of co.cask.cdap.security.auth.context.AuthenticationTestContext in project cdap by caskdata.
the class StreamInputFormatTest method testStreamRecordReader.
@Test
public void testStreamRecordReader() throws Exception {
File inputDir = tmpFolder.newFolder();
File partition = new File(inputDir, "1.1000");
partition.mkdirs();
File eventFile = new File(partition, "bucket.1.0." + StreamFileType.EVENT.getSuffix());
File indexFile = new File(partition, "bucket.1.0." + StreamFileType.INDEX.getSuffix());
// write 1 event
StreamDataFileWriter writer = new StreamDataFileWriter(Files.newOutputStreamSupplier(eventFile), Files.newOutputStreamSupplier(indexFile), 100L);
writer.append(StreamFileTestUtils.createEvent(1000, "test"));
writer.flush();
// get splits from the input format. Expect to get 2 splits,
// one from 0 - some offset and one from offset - Long.MAX_VALUE.
Configuration conf = new Configuration();
TaskAttemptContext context = new TaskAttemptContextImpl(conf, new TaskAttemptID());
AbstractStreamInputFormat.setStreamId(conf, DUMMY_ID);
AbstractStreamInputFormat.setStreamPath(conf, inputDir.toURI());
AbstractStreamInputFormat format = new AbstractStreamInputFormat() {
@Override
public AuthorizationEnforcer getAuthorizationEnforcer(TaskAttemptContext context) {
return new NoOpAuthorizer();
}
@Override
public AuthenticationContext getAuthenticationContext(TaskAttemptContext context) {
return new AuthenticationTestContext();
}
};
List<InputSplit> splits = format.getSplits(new JobContextImpl(new JobConf(conf), new JobID()));
Assert.assertEquals(2, splits.size());
// write another event so that the 2nd split has something to read
writer.append(StreamFileTestUtils.createEvent(1001, "test"));
writer.close();
// create a record reader for the 2nd split
StreamRecordReader<LongWritable, StreamEvent> recordReader = new StreamRecordReader<>(new IdentityStreamEventDecoder(), new NoOpAuthorizer(), new AuthenticationTestContext(), DUMMY_ID);
recordReader.initialize(splits.get(1), context);
// check that we read the 2nd stream event
Assert.assertTrue(recordReader.nextKeyValue());
StreamEvent output = recordReader.getCurrentValue();
Assert.assertEquals(1001, output.getTimestamp());
Assert.assertEquals("test", Bytes.toString(output.getBody()));
// check that there is nothing more to read
Assert.assertFalse(recordReader.nextKeyValue());
}
use of co.cask.cdap.security.auth.context.AuthenticationTestContext in project cdap by caskdata.
the class ProgramGenerationStageTest method testProgramGenerationForToyApp.
@Test
public void testProgramGenerationForToyApp() throws Exception {
cConf.set(Constants.AppFabric.OUTPUT_DIR, "programs");
LocationFactory lf = new LocalLocationFactory(TEMP_FOLDER.newFolder());
// have to do this since we are not going through the route of create namespace -> deploy application
// in real scenarios, the namespace directory would already be created
Location namespaceLocation = lf.create(DefaultId.APPLICATION.getNamespace());
Locations.mkdirsIfNotExists(namespaceLocation);
LocationFactory jarLf = new LocalLocationFactory(TEMP_FOLDER.newFolder());
Location appArchive = AppJarHelper.createDeploymentJar(jarLf, ToyApp.class);
ApplicationSpecification appSpec = Specifications.from(new ToyApp());
ApplicationSpecificationAdapter adapter = ApplicationSpecificationAdapter.create(new ReflectionSchemaGenerator());
ApplicationSpecification newSpec = adapter.fromJson(adapter.toJson(appSpec));
ProgramGenerationStage pgmStage = new ProgramGenerationStage(new NoOpAuthorizer(), new AuthenticationTestContext());
// Can do better here - fixed right now to run the test.
pgmStage.process(new StageContext(Object.class));
pgmStage.process(new ApplicationDeployable(NamespaceId.DEFAULT.artifact("ToyApp", "1.0"), appArchive, DefaultId.APPLICATION, newSpec, null, ApplicationDeployScope.USER));
Assert.assertTrue(true);
}
use of co.cask.cdap.security.auth.context.AuthenticationTestContext in project cdap by caskdata.
the class AbstractDatasetFrameworkTest method testAuditPublish.
@Test
public void testAuditPublish() throws Exception {
// Clear all audit messages
inMemoryAuditPublisher.popMessages();
List<AuditMessage> expectedMessages = new ArrayList<>();
// Adding modules
DatasetFramework framework = getFramework();
framework.addModule(IN_MEMORY, new InMemoryTableModule());
// Creating instances
framework.addInstance(Table.class.getName(), MY_TABLE, DatasetProperties.EMPTY);
expectedMessages.add(new AuditMessage(0, MY_TABLE, "", AuditType.CREATE, AuditPayload.EMPTY_PAYLOAD));
framework.addInstance(Table.class.getName(), MY_TABLE2, DatasetProperties.EMPTY);
expectedMessages.add(new AuditMessage(0, MY_TABLE2, "", AuditType.CREATE, AuditPayload.EMPTY_PAYLOAD));
// Update instance
framework.updateInstance(MY_TABLE, DatasetProperties.EMPTY);
expectedMessages.add(new AuditMessage(0, MY_TABLE, "", AuditType.UPDATE, AuditPayload.EMPTY_PAYLOAD));
// Access instance
ProgramRunId runId = new ProgramId("ns", "app", ProgramType.FLOW, "flow").run(RunIds.generate().getId());
LineageWriterDatasetFramework lineageFramework = new LineageWriterDatasetFramework(framework, new NoOpLineageWriter(), new NoOpUsageRegistry(), new AuthenticationTestContext(), new NoOpAuthorizer());
lineageFramework.setContext(new TestProgramContext(runId));
lineageFramework.setAuditPublisher(inMemoryAuditPublisher);
lineageFramework.getDataset(MY_TABLE, ImmutableMap.<String, String>of(), getClass().getClassLoader());
expectedMessages.add(new AuditMessage(0, MY_TABLE, "", AuditType.ACCESS, new AccessPayload(AccessType.UNKNOWN, runId)));
// Truncate instance
framework.truncateInstance(MY_TABLE);
expectedMessages.add(new AuditMessage(0, MY_TABLE, "", AuditType.TRUNCATE, AuditPayload.EMPTY_PAYLOAD));
// Delete instance
framework.deleteInstance(MY_TABLE);
expectedMessages.add(new AuditMessage(0, MY_TABLE, "", AuditType.DELETE, AuditPayload.EMPTY_PAYLOAD));
// Delete all instances in a namespace
framework.deleteAllInstances(MY_TABLE2.getParent());
expectedMessages.add(new AuditMessage(0, MY_TABLE2, "", AuditType.DELETE, AuditPayload.EMPTY_PAYLOAD));
Assert.assertEquals(expectedMessages, inMemoryAuditPublisher.popMessages());
// cleanup
framework.deleteModule(IN_MEMORY);
}
use of co.cask.cdap.security.auth.context.AuthenticationTestContext in project cdap by caskdata.
the class StreamInputFormatTest method testFormatStreamRecordReader.
@Test
public void testFormatStreamRecordReader() throws IOException, InterruptedException {
File inputDir = tmpFolder.newFolder();
File partition = new File(inputDir, "1.1000");
partition.mkdirs();
File eventFile = new File(partition, "bucket.1.0." + StreamFileType.EVENT.getSuffix());
File indexFile = new File(partition, "bucket.1.0." + StreamFileType.INDEX.getSuffix());
// write 1 event
StreamDataFileWriter writer = new StreamDataFileWriter(Files.newOutputStreamSupplier(eventFile), Files.newOutputStreamSupplier(indexFile), 100L);
StreamEvent streamEvent = new StreamEvent(ImmutableMap.of("header1", "value1", "header2", "value2"), Charsets.UTF_8.encode("hello world"), 1000);
writer.append(streamEvent);
writer.close();
FormatSpecification formatSpec = new FormatSpecification(TextRecordFormat.class.getName(), Schema.recordOf("event", Schema.Field.of("body", Schema.of(Schema.Type.STRING))), Collections.<String, String>emptyMap());
Configuration conf = new Configuration();
AbstractStreamInputFormat.setStreamId(conf, DUMMY_ID);
AbstractStreamInputFormat.setBodyFormatSpecification(conf, formatSpec);
AbstractStreamInputFormat.setStreamPath(conf, inputDir.toURI());
TaskAttemptContext context = new TaskAttemptContextImpl(conf, new TaskAttemptID());
AbstractStreamInputFormat format = new AbstractStreamInputFormat() {
@Override
public AuthorizationEnforcer getAuthorizationEnforcer(TaskAttemptContext context) {
return new NoOpAuthorizer();
}
@Override
public AuthenticationContext getAuthenticationContext(TaskAttemptContext context) {
return new AuthenticationTestContext();
}
};
// read all splits and store the results in the list
List<GenericStreamEventData<StructuredRecord>> recordsRead = Lists.newArrayList();
List<InputSplit> inputSplits = format.getSplits(context);
for (InputSplit split : inputSplits) {
RecordReader<LongWritable, GenericStreamEventData<StructuredRecord>> recordReader = format.createRecordReader(split, context);
recordReader.initialize(split, context);
while (recordReader.nextKeyValue()) {
recordsRead.add(recordReader.getCurrentValue());
}
}
// should only have read 1 record
Assert.assertEquals(1, recordsRead.size());
GenericStreamEventData<StructuredRecord> eventData = recordsRead.get(0);
Assert.assertEquals(streamEvent.getHeaders(), eventData.getHeaders());
Assert.assertEquals("hello world", eventData.getBody().get("body"));
}
Aggregations