use of com.github.ambry.config.VerifiableProperties in project ambry by linkedin.
the class CuratedLogIndexState method initIndex.
/**
* Creates the index instance with the provided {@code metricRegistry}.
* @param newScheduler the {@link ScheduledExecutorService} to use in testing. If null, a default scheduler will be created to use.
* @throws StoreException
*/
void initIndex(ScheduledExecutorService newScheduler) throws StoreException {
StoreConfig config = new StoreConfig(new VerifiableProperties(properties));
sessionId = UUID.randomUUID();
metricRegistry = new MetricRegistry();
metrics = new StoreMetrics(metricRegistry);
shutDownExecutorService(scheduler, 1, TimeUnit.SECONDS);
scheduler = newScheduler == null ? Utils.newScheduler(1, false) : newScheduler;
index = new PersistentIndex(tempDirStr, tempDirStr, scheduler, log, config, CuratedLogIndexState.STORE_KEY_FACTORY, recovery, hardDelete, DISK_IO_SCHEDULER, metrics, time, sessionId, incarnationId);
}
use of com.github.ambry.config.VerifiableProperties in project ambry by linkedin.
the class CommonTestUtils method getCurrentBlobIdVersion.
/**
* Get the current active blob id version via {@link RouterConfig}
* @return the current active blob id version.
*/
public static short getCurrentBlobIdVersion() {
Properties props = new Properties();
props.setProperty("router.hostname", "localhost");
props.setProperty("router.datacenter.name", "localDC");
return new RouterConfig(new VerifiableProperties(props)).routerBlobidCurrentVersion;
}
use of com.github.ambry.config.VerifiableProperties in project ambry by linkedin.
the class StoreDescriptorTest method testStoreDescriptor.
/**
* Tests {@link StoreDescriptor} for unit tests for instantiation and converting bytes into StoreDescriptor
* @throws IOException
*/
@Test
public void testStoreDescriptor() throws IOException {
StoreConfig config = new StoreConfig(new VerifiableProperties(new Properties()));
File tempDir = StoreTestUtils.createTempDirectory("storeDir");
File storeDescriptorFile = new File(tempDir.getAbsolutePath(), StoreDescriptor.STORE_DESCRIPTOR_FILENAME);
StoreDescriptor storeDescriptor = new StoreDescriptor(tempDir.getAbsolutePath(), config);
// store descriptor file should have been created.
StoreDescriptor newStoreDescriptor = new StoreDescriptor(tempDir.getAbsolutePath(), config);
assertEquals("IncarnationId mismatch ", storeDescriptor.getIncarnationId(), newStoreDescriptor.getIncarnationId());
assertTrue("Store descriptor file could not be deleted", storeDescriptorFile.delete());
// Create StoreDescriptor file with new incarnationId
UUID incarnationIdUUID = UUID.randomUUID();
byte[] toBytes = getBytesForStoreDescriptor(StoreDescriptor.VERSION_0, incarnationIdUUID);
storeDescriptorFile = new File(tempDir.getAbsolutePath(), StoreDescriptor.STORE_DESCRIPTOR_FILENAME);
assertTrue("Store descriptor file could not be created", storeDescriptorFile.createNewFile());
createStoreFile(storeDescriptorFile, toBytes);
storeDescriptor = new StoreDescriptor(tempDir.getAbsolutePath(), config);
assertEquals("IncarnationId mismatch ", incarnationIdUUID, storeDescriptor.getIncarnationId());
// check for wrong version
assertTrue("Store descriptor file could not be deleted", storeDescriptorFile.delete());
toBytes = getBytesForStoreDescriptor((short) 1, incarnationIdUUID);
assertTrue("Store descriptor file could not be created", storeDescriptorFile.createNewFile());
createStoreFile(storeDescriptorFile, toBytes);
try {
new StoreDescriptor(tempDir.getAbsolutePath(), config);
fail("Wrong version should have thrown IllegalArgumentException ");
} catch (IllegalArgumentException e) {
}
// check for wrong Crc
assertTrue("Store descriptor file could not be deleted", storeDescriptorFile.delete());
assertTrue("Store descriptor file could not be created", storeDescriptorFile.createNewFile());
toBytes = getBytesForStoreDescriptor(StoreDescriptor.VERSION_0, incarnationIdUUID);
CrcOutputStream crcOutputStream = new CrcOutputStream(new FileOutputStream(storeDescriptorFile));
DataOutputStream dataOutputStream = new DataOutputStream(crcOutputStream);
dataOutputStream.write(toBytes);
dataOutputStream.writeLong(crcOutputStream.getValue() + 1);
dataOutputStream.close();
try {
new StoreDescriptor(tempDir.getAbsolutePath(), config);
fail("Wrong CRC should have thrown IllegalStateException ");
} catch (IllegalStateException e) {
}
}
use of com.github.ambry.config.VerifiableProperties in project ambry by linkedin.
the class HardDeleterTest method setup.
@Before
public void setup() throws Exception {
File rootDirectory = StoreTestUtils.createTempDirectory("ambry");
File indexFile = new File(rootDirectory.getAbsolutePath());
for (File c : indexFile.listFiles()) {
c.delete();
}
scheduler = Utils.newScheduler(1, false);
MetricRegistry metricRegistry = new MetricRegistry();
StoreMetrics metrics = new StoreMetrics(metricRegistry);
Properties props = new Properties();
// the test will set the tokens, so disable the index persistor.
props.setProperty("store.data.flush.interval.seconds", "3600");
props.setProperty("store.deleted.message.retention.hours", "1");
props.setProperty("store.index.max.number.of.inmem.elements", "2");
props.setProperty("store.segment.size.in.bytes", "10000");
// the following determines the number of entries that will be fetched at most. We need this to test the
// case where the endToken does not reach the journal.
props.setProperty("store.hard.delete.operations.bytes.per.sec", "40");
StoreConfig config = new StoreConfig(new VerifiableProperties(props));
log = new Log(rootDirectory.getAbsolutePath(), 10000, StoreTestUtils.DEFAULT_DISK_SPACE_ALLOCATOR, config, metrics, null);
StoreKeyFactory factory = Utils.getObj("com.github.ambry.store.MockIdFactory");
time = new MockTime(SystemTime.getInstance().milliseconds());
helper = new HardDeleteTestHelper(0, 200);
diskIOScheduler = new MockDiskIOScheduler();
index = new MockIndex(rootDirectory.getAbsolutePath(), scheduler, log, config, factory, helper, diskIOScheduler, time, UUID.randomUUID());
helper.setIndex(index, log);
// Setting this below will not enable the hard delete thread. This being a unit test, the methods
// are going to be called directly. We simply want to set the enabled flag to avoid those methods
// from bailing out prematurely.
index.setHardDeleteRunningStatus(true);
}
use of com.github.ambry.config.VerifiableProperties in project ambry by linkedin.
the class HelixParticipantTest method testBadCases.
/**
* Test bad instantiation and initialization scenarios of the {@link HelixParticipant}
*/
@Test
public void testBadCases() {
// Invalid state model def
props.setProperty("clustermap.state.model.definition", "InvalidStateModelDef");
try {
new ClusterMapConfig(new VerifiableProperties(props));
fail("should fail due to invalid state model definition");
} catch (IllegalArgumentException e) {
// expected and restore previous props
props.setProperty("clustermap.state.model.definition", stateModelDef);
}
// Connect failure.
ClusterMapConfig clusterMapConfig = new ClusterMapConfig(new VerifiableProperties(props));
helixManagerFactory.getHelixManager(InstanceType.PARTICIPANT).beBad = true;
HelixParticipant helixParticipant = new HelixParticipant(clusterMapConfig, helixManagerFactory, new MetricRegistry(), getDefaultZkConnectStr(clusterMapConfig), true);
try {
helixParticipant.participate(Collections.emptyList(), null, null);
fail("Participation should have failed");
} catch (IOException e) {
// OK
}
// Bad param during instantiation.
props.setProperty("clustermap.cluster.name", "");
clusterMapConfig = new ClusterMapConfig(new VerifiableProperties(props));
try {
new HelixParticipant(clusterMapConfig, helixManagerFactory, new MetricRegistry(), getDefaultZkConnectStr(clusterMapConfig), true);
fail("Instantiation should have failed");
} catch (IllegalStateException e) {
// OK
}
props.setProperty("clustermap.cluster.name", "HelixParticipantTestCluster");
props.setProperty("clustermap.dcs.zk.connect.strings", "");
clusterMapConfig = new ClusterMapConfig(new VerifiableProperties(props));
try {
new HelixClusterAgentsFactory(clusterMapConfig, new MetricRegistry()).getClusterParticipants();
fail("Instantiation should have failed");
} catch (IOException e) {
// OK
}
}
Aggregations