use of org.apache.twill.filesystem.Location in project cdap by caskdata.
the class FileMetadataTest method testFileMetadataReadWrite.
@Test
public void testFileMetadataReadWrite() throws Exception {
DatasetFramework datasetFramework = injector.getInstance(DatasetFramework.class);
DatasetManager datasetManager = new DefaultDatasetManager(datasetFramework, NamespaceId.SYSTEM, co.cask.cdap.common.service.RetryStrategies.noRetry());
Transactional transactional = Transactions.createTransactionalWithRetry(Transactions.createTransactional(new MultiThreadDatasetCache(new SystemDatasetInstantiator(datasetFramework), injector.getInstance(TransactionSystemClient.class), NamespaceId.SYSTEM, ImmutableMap.<String, String>of(), null, null)), RetryStrategies.retryOnConflict(20, 100));
FileMetaDataWriter fileMetaDataWriter = new FileMetaDataWriter(datasetManager, transactional);
LogPathIdentifier logPathIdentifier = new LogPathIdentifier(NamespaceId.DEFAULT.getNamespace(), "testApp", "testFlow");
LocationFactory locationFactory = injector.getInstance(LocationFactory.class);
Location location = locationFactory.create(TMP_FOLDER.newFolder().getPath()).append("/logs");
long currentTime = System.currentTimeMillis();
for (int i = 10; i <= 100; i += 10) {
// i is the event time
fileMetaDataWriter.writeMetaData(logPathIdentifier, i, currentTime, location.append(Integer.toString(i)));
}
// for the timestamp 80, add new new log path id with different current time.
fileMetaDataWriter.writeMetaData(logPathIdentifier, 80, currentTime + 1, location.append("81"));
fileMetaDataWriter.writeMetaData(logPathIdentifier, 80, currentTime + 2, location.append("82"));
// reader test
FileMetaDataReader fileMetadataReader = injector.getInstance(FileMetaDataReader.class);
Assert.assertEquals(12, fileMetadataReader.listFiles(logPathIdentifier, 0, 100).size());
Assert.assertEquals(5, fileMetadataReader.listFiles(logPathIdentifier, 20, 50).size());
Assert.assertEquals(2, fileMetadataReader.listFiles(logPathIdentifier, 100, 150).size());
// should include the latest file with event start time 80.
List<LogLocation> locationList = fileMetadataReader.listFiles(logPathIdentifier, 81, 85);
Assert.assertEquals(1, locationList.size());
Assert.assertEquals(80, locationList.get(0).getEventTimeMs());
Assert.assertEquals(location.append("82"), locationList.get(0).getLocation());
Assert.assertEquals(1, fileMetadataReader.listFiles(logPathIdentifier, 150, 1000).size());
}
use of org.apache.twill.filesystem.Location in project cdap by caskdata.
the class CDAPLogAppenderTest method testCDAPLogAppender.
@Test
public void testCDAPLogAppender() throws Exception {
int syncInterval = 1024 * 1024;
CDAPLogAppender cdapLogAppender = new CDAPLogAppender();
cdapLogAppender.setSyncIntervalBytes(syncInterval);
cdapLogAppender.setMaxFileLifetimeMs(TimeUnit.DAYS.toMillis(1));
cdapLogAppender.setMaxFileSizeInBytes(104857600);
cdapLogAppender.setDirPermissions("700");
cdapLogAppender.setFilePermissions("600");
cdapLogAppender.setFileRetentionDurationDays(1);
cdapLogAppender.setLogCleanupIntervalMins(10);
cdapLogAppender.setFileCleanupTransactionTimeout(30);
AppenderContext context = new LocalAppenderContext(injector.getInstance(DatasetFramework.class), injector.getInstance(TransactionSystemClient.class), injector.getInstance(LocationFactory.class), new NoOpMetricsCollectionService());
context.start();
cdapLogAppender.setContext(context);
cdapLogAppender.start();
FileMetaDataReader fileMetaDataReader = injector.getInstance(FileMetaDataReader.class);
LoggingEvent event = new LoggingEvent("co.cask.Test", (ch.qos.logback.classic.Logger) LoggerFactory.getLogger(Logger.ROOT_LOGGER_NAME), Level.ERROR, "test message", null, null);
Map<String, String> properties = new HashMap<>();
properties.put(NamespaceLoggingContext.TAG_NAMESPACE_ID, "default");
properties.put(ApplicationLoggingContext.TAG_APPLICATION_ID, "testApp");
properties.put(FlowletLoggingContext.TAG_FLOW_ID, "testFlow");
properties.put(FlowletLoggingContext.TAG_FLOWLET_ID, "testFlowlet");
event.setMDCPropertyMap(properties);
cdapLogAppender.doAppend(event);
cdapLogAppender.stop();
context.stop();
try {
List<LogLocation> files = fileMetaDataReader.listFiles(cdapLogAppender.getLoggingPath(properties), 0, Long.MAX_VALUE);
Assert.assertEquals(1, files.size());
LogLocation logLocation = files.get(0);
Assert.assertEquals(LogLocation.VERSION_1, logLocation.getFrameworkVersion());
Assert.assertTrue(logLocation.getLocation().exists());
CloseableIterator<LogEvent> logEventCloseableIterator = logLocation.readLog(Filter.EMPTY_FILTER, 0, Long.MAX_VALUE, Integer.MAX_VALUE);
int logCount = 0;
while (logEventCloseableIterator.hasNext()) {
logCount++;
LogEvent logEvent = logEventCloseableIterator.next();
Assert.assertEquals(event.getMessage(), logEvent.getLoggingEvent().getMessage());
}
logEventCloseableIterator.close();
Assert.assertEquals(1, logCount);
// checking permission
String expectedPermissions = "rw-------";
for (LogLocation file : files) {
Location location = file.getLocation();
Assert.assertEquals(expectedPermissions, location.getPermissions());
}
} catch (Exception e) {
Assert.fail();
}
}
use of org.apache.twill.filesystem.Location in project cdap by caskdata.
the class ApplicationLifecycleServiceTest method testMissingDependency.
@Test
public void testMissingDependency() throws Exception {
// tests the fix for CDAP-2543, by having programs which fail to start up due to missing dependency jars
ArtifactId artifactId = NamespaceId.DEFAULT.artifact("missing-guava-dependency", "1.0.0-SNAPSHOT");
Location appJar = createDeploymentJar(locationFactory, AppWithProgramsUsingGuava.class);
File appJarFile = new File(tmpFolder.newFolder(), String.format("%s-%s.jar", artifactId.getArtifact(), artifactId.getVersion()));
Files.copy(Locations.newInputSupplier(appJar), appJarFile);
appJar.delete();
applicationLifecycleService.deployAppAndArtifact(NamespaceId.DEFAULT, "appName", artifactId.toId(), appJarFile, null, null, new ProgramTerminator() {
@Override
public void stop(ProgramId programId) throws Exception {
// no-op
}
}, true);
ApplicationId appId = NamespaceId.DEFAULT.app("appName");
// run records for programs that have missing dependencies should be FAILED, instead of hanging in RUNNING
// fail the Worker#initialize
ProgramId worker = appId.worker(AppWithProgramsUsingGuava.NoOpWorker.NAME);
startProgram(worker.toId());
waitForRuns(1, worker, ProgramRunStatus.FAILED);
// fail the MapReduce#initialize
ProgramId mapreduce = appId.mr(AppWithProgramsUsingGuava.NoOpMR.NAME);
startProgram(mapreduce.toId());
waitForRuns(1, mapreduce, ProgramRunStatus.FAILED);
// fail the CustomAction#initialize
ProgramId workflow = appId.workflow(AppWithProgramsUsingGuava.NoOpWorkflow.NAME);
startProgram(workflow.toId());
waitForRuns(1, workflow, ProgramRunStatus.FAILED);
// fail the Workflow#initialize
appId.workflow(AppWithProgramsUsingGuava.NoOpWorkflow.NAME);
startProgram(workflow.toId(), ImmutableMap.of("fail.in.workflow.initialize", "true"));
waitForRuns(1, workflow, ProgramRunStatus.FAILED);
}
use of org.apache.twill.filesystem.Location in project cdap by caskdata.
the class ApplicationLifecycleServiceTest method testDeployArtifactAndApplicationCleansUpArtifactOnFailure.
// test that the call to deploy an artifact and application in a single step will delete the artifact
// if the application could not be created
@Test(expected = ArtifactNotFoundException.class)
public void testDeployArtifactAndApplicationCleansUpArtifactOnFailure() throws Exception {
Id.Artifact artifactId = Id.Artifact.from(Id.Namespace.DEFAULT, "missing-mr", "1.0.0-SNAPSHOT");
Location appJar = AppJarHelper.createDeploymentJar(locationFactory, MissingMapReduceWorkflowApp.class);
File appJarFile = new File(tmpFolder.newFolder(), String.format("%s-%s.jar", artifactId.getName(), artifactId.getVersion().getVersion()));
Files.copy(Locations.newInputSupplier(appJar), appJarFile);
appJar.delete();
try {
applicationLifecycleService.deployAppAndArtifact(NamespaceId.DEFAULT, "appName", artifactId, appJarFile, null, null, new ProgramTerminator() {
@Override
public void stop(ProgramId programId) throws Exception {
// no-op
}
}, true);
Assert.fail("expected application deployment to fail.");
} catch (Exception e) {
// expected
}
// the artifact should have been cleaned up, and this should throw a not found exception
artifactRepository.getArtifact(artifactId);
}
use of org.apache.twill.filesystem.Location in project cdap by caskdata.
the class MetadataAdminAuthorizationTest method createCConf.
private static CConfiguration createCConf() throws IOException {
CConfiguration cConf = CConfiguration.create();
cConf.setBoolean(Constants.Security.ENABLED, true);
cConf.setBoolean(Constants.Security.Authorization.ENABLED, true);
// we only want to test authorization, but we don't specify principal/keytab, so disable kerberos
cConf.setBoolean(Constants.Security.KERBEROS_ENABLED, false);
cConf.setInt(Constants.Security.Authorization.CACHE_MAX_ENTRIES, 0);
LocationFactory locationFactory = new LocalLocationFactory(new File(TEMPORARY_FOLDER.newFolder().toURI()));
Location authorizerJar = AppJarHelper.createDeploymentJar(locationFactory, InMemoryAuthorizer.class);
cConf.set(Constants.Security.Authorization.EXTENSION_JAR_PATH, authorizerJar.toURI().getPath());
return cConf;
}
Aggregations