use of org.I0Itec.zkclient.DataUpdater in project helix by apache.
the class DeprecatedTaskRebalancer method cleanup.
/**
* Cleans up all Helix state associated with this job, wiping workflow-level information if this
* is the last remaining job in its workflow, and the workflow is terminable.
*/
private static void cleanup(HelixManager mgr, final String resourceName, WorkflowConfig cfg, String workflowResource) {
LOG.info("Cleaning up job: " + resourceName + " in workflow: " + workflowResource);
HelixDataAccessor accessor = mgr.getHelixDataAccessor();
// Remove any DAG references in workflow
PropertyKey workflowKey = getConfigPropertyKey(accessor, workflowResource);
DataUpdater<ZNRecord> dagRemover = new DataUpdater<ZNRecord>() {
@Override
public ZNRecord update(ZNRecord currentData) {
JobDag jobDag = JobDag.fromJson(currentData.getSimpleField(WorkflowConfig.WorkflowConfigProperty.Dag.name()));
for (String child : jobDag.getDirectChildren(resourceName)) {
jobDag.getChildrenToParents().get(child).remove(resourceName);
}
for (String parent : jobDag.getDirectParents(resourceName)) {
jobDag.getParentsToChildren().get(parent).remove(resourceName);
}
jobDag.getChildrenToParents().remove(resourceName);
jobDag.getParentsToChildren().remove(resourceName);
jobDag.getAllNodes().remove(resourceName);
try {
currentData.setSimpleField(WorkflowConfig.WorkflowConfigProperty.Dag.name(), jobDag.toJson());
} catch (Exception e) {
LOG.equals("Could not update DAG for job: " + resourceName);
}
return currentData;
}
};
accessor.getBaseDataAccessor().update(workflowKey.getPath(), dagRemover, AccessOption.PERSISTENT);
// Delete resource configs.
PropertyKey cfgKey = getConfigPropertyKey(accessor, resourceName);
if (!accessor.removeProperty(cfgKey)) {
throw new RuntimeException(String.format("Error occurred while trying to clean up job %s. Failed to remove node %s from Helix. Aborting further clean up steps.", resourceName, cfgKey));
}
// Delete property store information for this resource.
// For recurring workflow, it's OK if the node doesn't exist.
String propStoreKey = getRebalancerPropStoreKey(resourceName);
mgr.getHelixPropertyStore().remove(propStoreKey, AccessOption.PERSISTENT);
// Delete the ideal state itself.
PropertyKey isKey = getISPropertyKey(accessor, resourceName);
if (!accessor.removeProperty(isKey)) {
throw new RuntimeException(String.format("Error occurred while trying to clean up task %s. Failed to remove node %s from Helix.", resourceName, isKey));
}
// Delete dead external view
// because job is already completed, there is no more current state change
// thus dead external views removal will not be triggered
PropertyKey evKey = accessor.keyBuilder().externalView(resourceName);
accessor.removeProperty(evKey);
LOG.info(String.format("Successfully cleaned up job resource %s.", resourceName));
boolean lastInWorkflow = true;
for (String job : cfg.getJobDag().getAllNodes()) {
// check if property store information or resource configs exist for this job
if (mgr.getHelixPropertyStore().exists(getRebalancerPropStoreKey(job), AccessOption.PERSISTENT) || accessor.getProperty(getConfigPropertyKey(accessor, job)) != null || accessor.getProperty(getISPropertyKey(accessor, job)) != null) {
lastInWorkflow = false;
break;
}
}
// clean up workflow-level info if this was the last in workflow
if (lastInWorkflow && (cfg.isTerminable() || cfg.getTargetState() == TargetState.DELETE)) {
// delete workflow config
PropertyKey workflowCfgKey = getConfigPropertyKey(accessor, workflowResource);
if (!accessor.removeProperty(workflowCfgKey)) {
throw new RuntimeException(String.format("Error occurred while trying to clean up workflow %s. Failed to remove node %s from Helix. Aborting further clean up steps.", workflowResource, workflowCfgKey));
}
// Delete property store information for this workflow
String workflowPropStoreKey = getRebalancerPropStoreKey(workflowResource);
if (!mgr.getHelixPropertyStore().remove(workflowPropStoreKey, AccessOption.PERSISTENT)) {
throw new RuntimeException(String.format("Error occurred while trying to clean up workflow %s. Failed to remove node %s from Helix. Aborting further clean up steps.", workflowResource, workflowPropStoreKey));
}
// Remove pending timer for this workflow if exists
if (SCHEDULED_TIMES.containsKey(workflowResource)) {
SCHEDULED_TIMES.remove(workflowResource);
}
}
}
use of org.I0Itec.zkclient.DataUpdater in project helix by apache.
the class ZKUtil method createOrReplace.
public static void createOrReplace(ZkClient client, String path, final ZNRecord record, final boolean persistent) {
int retryCount = 0;
while (retryCount < RETRYLIMIT) {
try {
if (client.exists(path)) {
DataUpdater<Object> updater = new DataUpdater<Object>() {
@Override
public Object update(Object currentData) {
return record;
}
};
client.updateDataSerialized(path, updater);
} else {
CreateMode mode = (persistent) ? CreateMode.PERSISTENT : CreateMode.EPHEMERAL;
client.create(path, record, mode);
}
break;
} catch (Exception e) {
retryCount = retryCount + 1;
logger.warn("Exception trying to createOrReplace " + path + " Exception:" + e.getMessage() + ". Will retry.");
}
}
}
use of org.I0Itec.zkclient.DataUpdater in project helix by apache.
the class ParticipantManager method carryOverPreviousCurrentState.
/**
* carry over current-states from last sessions
* set to initial state for current session only when state doesn't exist in current session
*/
private void carryOverPreviousCurrentState() {
List<String> sessions = _dataAccessor.getChildNames(_keyBuilder.sessions(_instanceName));
for (String session : sessions) {
if (session.equals(_sessionId)) {
continue;
}
List<CurrentState> lastCurStates = _dataAccessor.getChildValues(_keyBuilder.currentStates(_instanceName, session));
for (CurrentState lastCurState : lastCurStates) {
LOG.info("Carrying over old session: " + session + ", resource: " + lastCurState.getId() + " to current session: " + _sessionId);
String stateModelDefRef = lastCurState.getStateModelDefRef();
if (stateModelDefRef == null) {
LOG.error("skip carry-over because previous current state doesn't have a state model definition. previous current-state: " + lastCurState);
continue;
}
StateModelDefinition stateModel = _dataAccessor.getProperty(_keyBuilder.stateModelDef(stateModelDefRef));
BaseDataAccessor<ZNRecord> baseAccessor = _dataAccessor.getBaseDataAccessor();
String curStatePath = _keyBuilder.currentState(_instanceName, _sessionId, lastCurState.getResourceName()).getPath();
String initState = stateModel.getInitialState();
if (lastCurState.getBucketSize() > 0) {
// update parent node
ZNRecord metaRecord = new ZNRecord(lastCurState.getId());
metaRecord.setSimpleFields(lastCurState.getRecord().getSimpleFields());
DataUpdater<ZNRecord> metaRecordUpdater = new CurStateCarryOverUpdater(_sessionId, initState, new CurrentState(metaRecord));
boolean success = baseAccessor.update(curStatePath, metaRecordUpdater, AccessOption.PERSISTENT);
if (success) {
// update current state buckets
ZNRecordBucketizer bucketizer = new ZNRecordBucketizer(lastCurState.getBucketSize());
Map<String, ZNRecord> map = bucketizer.bucketize(lastCurState.getRecord());
List<String> paths = new ArrayList<String>();
List<DataUpdater<ZNRecord>> updaters = new ArrayList<DataUpdater<ZNRecord>>();
for (String bucketName : map.keySet()) {
paths.add(curStatePath + "/" + bucketName);
updaters.add(new CurStateCarryOverUpdater(_sessionId, initState, new CurrentState(map.get(bucketName))));
}
baseAccessor.updateChildren(paths, updaters, AccessOption.PERSISTENT);
}
} else {
_dataAccessor.getBaseDataAccessor().update(curStatePath, new CurStateCarryOverUpdater(_sessionId, initState, lastCurState), AccessOption.PERSISTENT);
}
}
}
/**
* remove previous current state parent nodes
*/
for (String session : sessions) {
if (session.equals(_sessionId)) {
continue;
}
String path = _keyBuilder.currentStates(_instanceName, session).getPath();
LOG.info("Removing current states from previous sessions. path: " + path);
_zkclient.deleteRecursively(path);
}
}
use of org.I0Itec.zkclient.DataUpdater in project helix by apache.
the class TestAutoFallbackPropertyStore method testMultiUpdateUsingFallbackPath.
@Test
public void testMultiUpdateUsingFallbackPath() {
String className = TestHelper.getTestClassName();
String methodName = TestHelper.getTestMethodName();
String clusterName = className + "_" + methodName;
System.out.println("START " + clusterName + " at " + new Date(System.currentTimeMillis()));
String root = String.format("/%s/%s", clusterName, PropertyType.PROPERTYSTORE.name());
String fallbackRoot = String.format("/%s/%s", clusterName, "HELIX_PROPERTYSTORE");
ZkBaseDataAccessor<ZNRecord> baseAccessor = new ZkBaseDataAccessor<ZNRecord>(_gZkClient);
// create 0-9 under fallbackRoot
for (int i = 0; i < 10; i++) {
String path = String.format("%s/%d", fallbackRoot, i);
baseAccessor.create(path, new ZNRecord(Integer.toString(i)), AccessOption.PERSISTENT);
}
AutoFallbackPropertyStore<ZNRecord> store = new AutoFallbackPropertyStore<ZNRecord>(baseAccessor, root, fallbackRoot);
List<String> paths = new ArrayList<String>();
List<DataUpdater<ZNRecord>> updaters = new ArrayList<DataUpdater<ZNRecord>>();
for (int i = 0; i < 10; i++) {
String path = String.format("/%d", i);
Assert.assertFalse(baseAccessor.exists(String.format("%s%s", root, path), 0), "Should not exist under new location");
Assert.assertTrue(baseAccessor.exists(String.format("%s%s", fallbackRoot, path), 0), "Should exist under fallback location");
paths.add(path);
updaters.add(new MyDataUpdater("new" + i));
}
boolean[] succeed = store.updateChildren(paths, updaters, AccessOption.PERSISTENT);
for (int i = 0; i < 10; i++) {
Assert.assertTrue(succeed[i]);
String path = paths.get(i);
// fallback path should remain unchanged
ZNRecord record = baseAccessor.get(String.format("%s%s", fallbackRoot, path), null, 0);
Assert.assertNotNull(record);
Assert.assertEquals(record.getId(), "" + i);
Assert.assertNull(record.getSimpleField("key"));
// new path should have simple field set
record = baseAccessor.get(String.format("%s%s", root, path), null, 0);
Assert.assertNotNull(record);
Assert.assertEquals(record.getId(), "" + i);
Assert.assertNotNull(record.getSimpleField("key"));
Assert.assertEquals(record.getSimpleField("key"), "value");
}
System.out.println("END " + clusterName + " at " + new Date(System.currentTimeMillis()));
}
use of org.I0Itec.zkclient.DataUpdater in project helix by apache.
the class TestAutoFallbackPropertyStore method testMultiUpdateUsingNewath.
@Test
public void testMultiUpdateUsingNewath() {
String className = TestHelper.getTestClassName();
String methodName = TestHelper.getTestMethodName();
String clusterName = className + "_" + methodName;
System.out.println("START " + clusterName + " at " + new Date(System.currentTimeMillis()));
String root = String.format("/%s/%s", clusterName, PropertyType.PROPERTYSTORE.name());
String fallbackRoot = String.format("/%s/%s", clusterName, "HELIX_PROPERTYSTORE");
ZkBaseDataAccessor<ZNRecord> baseAccessor = new ZkBaseDataAccessor<ZNRecord>(_gZkClient);
// create 0-9 under both fallbackRoot and new root
for (int i = 0; i < 10; i++) {
String path = String.format("%s/%d", fallbackRoot, i);
baseAccessor.create(path, new ZNRecord(Integer.toString(i)), AccessOption.PERSISTENT);
path = String.format("%s/%d", root, i);
baseAccessor.create(path, new ZNRecord("new" + i), AccessOption.PERSISTENT);
}
AutoFallbackPropertyStore<ZNRecord> store = new AutoFallbackPropertyStore<ZNRecord>(baseAccessor, root, fallbackRoot);
List<String> paths = new ArrayList<String>();
List<DataUpdater<ZNRecord>> updaters = new ArrayList<DataUpdater<ZNRecord>>();
for (int i = 0; i < 20; i++) {
String path = String.format("/%d", i);
if (i < 10) {
Assert.assertTrue(baseAccessor.exists(String.format("%s%s", root, path), 0), "Should exist under new location");
Assert.assertTrue(baseAccessor.exists(String.format("%s%s", fallbackRoot, path), 0), "Should exist under fallback location");
} else {
Assert.assertFalse(baseAccessor.exists(String.format("%s%s", root, path), 0), "Should not exist under new location");
Assert.assertFalse(baseAccessor.exists(String.format("%s%s", fallbackRoot, path), 0), "Should not exist under fallback location");
}
paths.add(path);
updaters.add(new MyDataUpdater("new" + i));
}
boolean[] succeed = store.updateChildren(paths, updaters, AccessOption.PERSISTENT);
for (int i = 0; i < 10; i++) {
Assert.assertTrue(succeed[i]);
String path = paths.get(i);
// fallback path should remain unchanged
if (i < 10) {
ZNRecord record = baseAccessor.get(String.format("%s%s", fallbackRoot, path), null, 0);
Assert.assertNotNull(record);
Assert.assertEquals(record.getId(), "" + i);
Assert.assertNull(record.getSimpleField("key"));
} else {
Assert.assertFalse(baseAccessor.exists(String.format("%s%s", fallbackRoot, path), 0), "Should not exist under fallback location");
}
// new path should have simple field set
ZNRecord record = baseAccessor.get(String.format("%s%s", root, path), null, 0);
Assert.assertNotNull(record);
Assert.assertEquals(record.getId(), "new" + i);
if (i < 10) {
Assert.assertNotNull(record.getSimpleField("key"));
Assert.assertEquals(record.getSimpleField("key"), "value");
} else {
Assert.assertNull(record.getSimpleField("key"));
}
}
System.out.println("END " + clusterName + " at " + new Date(System.currentTimeMillis()));
}
Aggregations