use of org.apache.asterix.event.schema.cluster.Cluster in project asterixdb by apache.
the class PatternCreator method createRemoveAsterixStoragePattern.
private Patterns createRemoveAsterixStoragePattern(AsterixInstance instance) throws Exception {
List<Pattern> patternList = new ArrayList<>();
Cluster cluster = instance.getCluster();
String pargs;
for (Node node : cluster.getNode()) {
Nodeid nodeid = new Nodeid(new Value(null, node.getId()));
String[] nodeIODevices;
String iodevices = node.getIodevices() == null ? cluster.getIodevices() : node.getIodevices();
nodeIODevices = iodevices.trim().split(",");
String nodeStore = cluster.getStore().trim();
for (String nodeIODevice : nodeIODevices) {
pargs = nodeIODevice.trim() + File.separator + nodeStore;
Event event = new Event("file_delete", nodeid, pargs);
patternList.add(new Pattern(null, 1, null, event));
}
}
return new Patterns(patternList);
}
use of org.apache.asterix.event.schema.cluster.Cluster in project asterixdb by apache.
the class PatternCreator method createRemoveAsterixLogDirPattern.
private Patterns createRemoveAsterixLogDirPattern(AsterixInstance instance) throws Exception {
List<Pattern> patternList = new ArrayList<>();
Cluster cluster = instance.getCluster();
String pargs = instance.getCluster().getLogDir();
Nodeid nodeid = new Nodeid(new Value(null, cluster.getMasterNode().getId()));
Event event = new Event("file_delete", nodeid, pargs);
patternList.add(new Pattern(null, 1, null, event));
for (Node node : cluster.getNode()) {
nodeid = new Nodeid(new Value(null, node.getId()));
if (node.getLogDir() != null) {
pargs = node.getLogDir();
}
event = new Event("file_delete", nodeid, pargs);
patternList.add(new Pattern(null, 1, null, event));
}
return new Patterns(patternList);
}
use of org.apache.asterix.event.schema.cluster.Cluster in project asterixdb by apache.
the class PatternCreator method getLibraryUninstallPattern.
public Patterns getLibraryUninstallPattern(AsterixInstance instance, String dataverse, String libraryName) throws Exception {
List<Pattern> patternList = new ArrayList<>();
Cluster cluster = instance.getCluster();
String workingDir = cluster.getWorkingDir().getDir();
String destFile = dataverse + "." + libraryName;
String pargs = workingDir + File.separator + "uninstall" + " " + destFile;
String metadataNodeId = instance.getMetadataNodeId();
Nodeid nodeid = new Nodeid(new Value(null, metadataNodeId));
Event event = new Event("file_create", nodeid, pargs);
Pattern p = new Pattern(null, 1, null, event);
patternList.add(p);
Iterator<Node> uninstallTargets = cluster.getNode().iterator();
String libDir = workingDir + File.separator + "library" + File.separator + dataverse + File.separator + libraryName;
Node uninstallNode = uninstallTargets.next();
nodeid = new Nodeid(new Value(null, uninstallNode.getId()));
event = new Event("file_delete", nodeid, libDir);
p = new Pattern(null, 1, null, event);
patternList.add(p);
pargs = libDir;
if (!cluster.getWorkingDir().isNFS()) {
while (uninstallTargets.hasNext()) {
uninstallNode = uninstallTargets.next();
nodeid = new Nodeid(new Value(null, uninstallNode.getId()));
event = new Event("file_delete", nodeid, pargs);
p = new Pattern(null, 1, null, event);
patternList.add(p);
}
nodeid = new Nodeid(new Value(null, cluster.getMasterNode().getId()));
event = new Event("file_delete", nodeid, pargs);
p = new Pattern(null, 1, null, event);
patternList.add(p);
}
return new Patterns(patternList);
}
use of org.apache.asterix.event.schema.cluster.Cluster in project asterixdb by apache.
the class PatternCreator method createRemoveHDFSBackupPattern.
private Patterns createRemoveHDFSBackupPattern(AsterixInstance instance, String hdfsBackupDir) throws Exception {
List<Pattern> patternList = new ArrayList<>();
Cluster cluster = instance.getCluster();
String hdfsUrl = AsterixEventService.getConfiguration().getBackup().getHdfs().getUrl();
String hadoopVersion = AsterixEventService.getConfiguration().getBackup().getHdfs().getVersion();
String workingDir = cluster.getWorkingDir().getDir();
Node launchingNode = cluster.getNode().get(0);
Nodeid nodeid = new Nodeid(new Value(null, launchingNode.getId()));
String pathToDelete = hdfsBackupDir + File.separator + instance.getName();
String pargs = workingDir + " " + hadoopVersion + " " + hdfsUrl + " " + pathToDelete;
Event event = new Event("hdfs_delete", nodeid, pargs);
patternList.add(new Pattern(null, 1, null, event));
return new Patterns(patternList);
}
use of org.apache.asterix.event.schema.cluster.Cluster in project asterixdb by apache.
the class PatternCreator method createRemoveAsterixRootMetadata.
private Patterns createRemoveAsterixRootMetadata(AsterixInstance instance) throws Exception {
List<Pattern> patternList = new ArrayList<>();
Cluster cluster = instance.getCluster();
Nodeid nodeid;
String pargs;
Event event;
for (Node node : cluster.getNode()) {
String iodevices = node.getIodevices() == null ? cluster.getIodevices() : node.getIodevices();
String primaryIODevice = iodevices.split(",")[0].trim();
pargs = primaryIODevice + File.separator + StorageConstants.METADATA_ROOT;
nodeid = new Nodeid(new Value(null, node.getId()));
event = new Event("file_delete", nodeid, pargs);
patternList.add(new Pattern(null, 1, null, event));
}
return new Patterns(patternList);
}
Aggregations