use of org.apache.helix.HelixProperty in project ambry by linkedin.
the class HelixHealthReportAggregatorTask method run.
@Override
public TaskResult run() {
Pair<StatsSnapshot, StatsSnapshot> results = null;
Exception exception = null;
try {
HelixDataAccessor helixDataAccessor = manager.getHelixDataAccessor();
List<String> instanceNames = manager.getClusterManagmentTool().getInstancesInCluster(manager.getClusterName());
Map<String, String> statsWrappersJSON = new HashMap<>();
for (String instanceName : instanceNames) {
PropertyKey.Builder keyBuilder = helixDataAccessor.keyBuilder();
HelixProperty record = helixDataAccessor.getProperty(keyBuilder.healthReport(instanceName, healthReportName));
if (record != null && record.getRecord() != null) {
statsWrappersJSON.put(instanceName, record.getRecord().getSimpleField(statsFieldName));
}
}
ObjectMapper mapper = new ObjectMapper();
results = clusterAggregator.doWork(statsWrappersJSON, statsReportType);
String resultId = String.format("%s%s", AGGREGATED_REPORT_PREFIX, healthReportName);
ZNRecord znRecord = new ZNRecord(resultId);
znRecord.setSimpleField(RAW_VALID_SIZE_FIELD_NAME, mapper.writeValueAsString(results.getFirst()));
znRecord.setSimpleField(VALID_SIZE_FIELD_NAME, mapper.writeValueAsString(results.getSecond()));
znRecord.setSimpleField(TIMESTAMP_FIELD_NAME, String.valueOf(time.milliseconds()));
znRecord.setListField(ERROR_OCCURRED_INSTANCES_FIELD_NAME, clusterAggregator.getExceptionOccurredInstances(statsReportType));
String path = String.format("/%s", resultId);
manager.getHelixPropertyStore().set(path, znRecord, AccessOption.PERSISTENT);
return new TaskResult(TaskResult.Status.COMPLETED, "Aggregation success");
} catch (Exception e) {
logger.error("Exception thrown while aggregating stats from health reports across all nodes ", e);
exception = e;
return new TaskResult(TaskResult.Status.FAILED, "Exception thrown");
} finally {
if (clusterMapConfig.clustermapEnableContainerDeletionAggregation && callback != null && results != null && statsReportType.equals(StatsReportType.ACCOUNT_REPORT)) {
callback.onCompletion(results.getFirst(), exception);
}
}
}
use of org.apache.helix.HelixProperty in project helix by apache.
the class TestDisableExternalView method testDisableExternalView.
@Test
public void testDisableExternalView() throws InterruptedException {
ZKHelixDataAccessor accessor = new ZKHelixDataAccessor(CLUSTER_NAME, new ZkBaseDataAccessor<ZNRecord>(_gZkClient));
PropertyKey.Builder keyBuilder = accessor.keyBuilder();
// verify external view for TEST_DB1 does not exist
ExternalView externalView = null;
externalView = accessor.getProperty(keyBuilder.externalView(TEST_DB1));
Assert.assertNull(externalView, "There should be no external-view for " + TEST_DB1 + ", but is: " + externalView);
// verify external view for TEST_DB2 exists
externalView = accessor.getProperty(keyBuilder.externalView(TEST_DB2));
Assert.assertNotNull(externalView, "Could not find external-view for " + TEST_DB2);
// disable external view in IS
IdealState idealState = _admin.getResourceIdealState(CLUSTER_NAME, TEST_DB2);
idealState.setDisableExternalView(true);
_admin.setResourceIdealState(CLUSTER_NAME, TEST_DB2, idealState);
// touch liveinstance to trigger externalview compute stage
String instance = PARTICIPANT_PREFIX + "_" + START_PORT;
HelixProperty liveInstance = accessor.getProperty(keyBuilder.liveInstance(instance));
accessor.setProperty(keyBuilder.liveInstance(instance), liveInstance);
// verify the external view for the db got removed
for (int i = 0; i < 10; i++) {
Thread.sleep(100);
externalView = accessor.getProperty(keyBuilder.externalView(TEST_DB2));
if (externalView == null) {
break;
}
}
Assert.assertNull(externalView, "external-view for " + TEST_DB2 + " should be removed, but was: " + externalView);
}
use of org.apache.helix.HelixProperty in project helix by apache.
the class TestHelixDataAccessor method beforeClass.
@BeforeClass
public void beforeClass() {
_zkClient = new MockZkClient(ZK_ADDR);
baseDataAccessor = new ZkBaseDataAccessor<>(_zkClient);
accessor = new ZKHelixDataAccessor("HELIX", baseDataAccessor);
Map<String, HelixProperty> paths = new TreeMap<>();
propertyKeys = new ArrayList<>();
for (int i = 0; i < 5; i++) {
PropertyKey key = accessor.keyBuilder().idealStates("RESOURCE" + i);
propertyKeys.add(key);
paths.put(key.getPath(), new HelixProperty("RESOURCE" + i));
accessor.setProperty(key, paths.get(key.getPath()));
}
List<HelixProperty> data = accessor.getProperty(new ArrayList<>(propertyKeys), true);
Assert.assertEquals(data.size(), 5);
PropertyKey key = accessor.keyBuilder().idealStates("RESOURCE6");
propertyKeys.add(key);
_zkClient.putData(key.getPath(), null);
}
use of org.apache.helix.HelixProperty in project helix by apache.
the class ClusterRepresentationUtil method getPropertyAsString.
public static String getPropertyAsString(ZkClient zkClient, String clusterName, PropertyKey propertyKey, MediaType mediaType) throws JsonGenerationException, JsonMappingException, IOException {
ZKHelixDataAccessor accessor = new ZKHelixDataAccessor(clusterName, new ZkBaseDataAccessor<ZNRecord>(zkClient));
ZNRecord record = EMPTY_ZNRECORD;
HelixProperty property = accessor.getProperty(propertyKey);
if (property != null) {
record = property.getRecord();
}
return ObjectToJson(record);
}
use of org.apache.helix.HelixProperty in project helix by apache.
the class ClusterRepresentationUtil method getClusterPropertyAsString.
public static String getClusterPropertyAsString(ZkClient zkClient, String clusterName, MediaType mediaType, PropertyKey propertyKey) throws JsonGenerationException, JsonMappingException, IOException {
ZKHelixDataAccessor accessor = new ZKHelixDataAccessor(clusterName, new ZkBaseDataAccessor<ZNRecord>(zkClient));
HelixProperty property = accessor.getProperty(propertyKey);
ZNRecord record = property == null ? null : property.getRecord();
return ZNRecordToJson(record);
}
Aggregations