use of org.apache.asterix.event.schema.pattern.Nodeid in project asterixdb by apache.
the class PatternCreator method getLibraryUninstallPattern.
public Patterns getLibraryUninstallPattern(AsterixInstance instance, String dataverse, String libraryName) throws Exception {
List<Pattern> patternList = new ArrayList<>();
Cluster cluster = instance.getCluster();
String workingDir = cluster.getWorkingDir().getDir();
String destFile = dataverse + "." + libraryName;
String pargs = workingDir + File.separator + "uninstall" + " " + destFile;
String metadataNodeId = instance.getMetadataNodeId();
Nodeid nodeid = new Nodeid(new Value(null, metadataNodeId));
Event event = new Event("file_create", nodeid, pargs);
Pattern p = new Pattern(null, 1, null, event);
patternList.add(p);
Iterator<Node> uninstallTargets = cluster.getNode().iterator();
String libDir = workingDir + File.separator + "library" + File.separator + dataverse + File.separator + libraryName;
Node uninstallNode = uninstallTargets.next();
nodeid = new Nodeid(new Value(null, uninstallNode.getId()));
event = new Event("file_delete", nodeid, libDir);
p = new Pattern(null, 1, null, event);
patternList.add(p);
pargs = libDir;
if (!cluster.getWorkingDir().isNFS()) {
while (uninstallTargets.hasNext()) {
uninstallNode = uninstallTargets.next();
nodeid = new Nodeid(new Value(null, uninstallNode.getId()));
event = new Event("file_delete", nodeid, pargs);
p = new Pattern(null, 1, null, event);
patternList.add(p);
}
nodeid = new Nodeid(new Value(null, cluster.getMasterNode().getId()));
event = new Event("file_delete", nodeid, pargs);
p = new Pattern(null, 1, null, event);
patternList.add(p);
}
return new Patterns(patternList);
}
use of org.apache.asterix.event.schema.pattern.Nodeid in project asterixdb by apache.
the class PatternCreator method createRemoveHDFSBackupPattern.
private Patterns createRemoveHDFSBackupPattern(AsterixInstance instance, String hdfsBackupDir) throws Exception {
List<Pattern> patternList = new ArrayList<>();
Cluster cluster = instance.getCluster();
String hdfsUrl = AsterixEventService.getConfiguration().getBackup().getHdfs().getUrl();
String hadoopVersion = AsterixEventService.getConfiguration().getBackup().getHdfs().getVersion();
String workingDir = cluster.getWorkingDir().getDir();
Node launchingNode = cluster.getNode().get(0);
Nodeid nodeid = new Nodeid(new Value(null, launchingNode.getId()));
String pathToDelete = hdfsBackupDir + File.separator + instance.getName();
String pargs = workingDir + " " + hadoopVersion + " " + hdfsUrl + " " + pathToDelete;
Event event = new Event("hdfs_delete", nodeid, pargs);
patternList.add(new Pattern(null, 1, null, event));
return new Patterns(patternList);
}
use of org.apache.asterix.event.schema.pattern.Nodeid in project asterixdb by apache.
the class PatternCreator method createRemoveAsterixRootMetadata.
private Patterns createRemoveAsterixRootMetadata(AsterixInstance instance) throws Exception {
List<Pattern> patternList = new ArrayList<>();
Cluster cluster = instance.getCluster();
Nodeid nodeid;
String pargs;
Event event;
for (Node node : cluster.getNode()) {
String iodevices = node.getIodevices() == null ? cluster.getIodevices() : node.getIodevices();
String primaryIODevice = iodevices.split(",")[0].trim();
pargs = primaryIODevice + File.separator + StorageConstants.METADATA_ROOT;
nodeid = new Nodeid(new Value(null, node.getId()));
event = new Event("file_delete", nodeid, pargs);
patternList.add(new Pattern(null, 1, null, event));
}
return new Patterns(patternList);
}
use of org.apache.asterix.event.schema.pattern.Nodeid in project asterixdb by apache.
the class PatternCreator method createRemoveAsterixWorkingDirPattern.
public Patterns createRemoveAsterixWorkingDirPattern(AsterixInstance instance) throws Exception {
List<Pattern> patternList = new ArrayList<>();
Cluster cluster = instance.getCluster();
String workingDir = cluster.getWorkingDir().getDir();
String pargs = workingDir;
Nodeid nodeid = new Nodeid(new Value(null, cluster.getMasterNode().getId()));
Event event = new Event("file_delete", nodeid, pargs);
patternList.add(new Pattern(null, 1, null, event));
if (!cluster.getWorkingDir().isNFS()) {
for (Node node : cluster.getNode()) {
nodeid = new Nodeid(new Value(null, node.getId()));
event = new Event("file_delete", nodeid, pargs);
patternList.add(new Pattern(null, 1, null, event));
}
}
return new Patterns(patternList);
}
use of org.apache.asterix.event.schema.pattern.Nodeid in project asterixdb by apache.
the class PatternCreator method getLocalBackUpAsterixPattern.
private Patterns getLocalBackUpAsterixPattern(AsterixInstance instance, Backup backupConf) throws Exception {
Cluster cluster = instance.getCluster();
String backupDir = backupConf.getBackupDir();
String workingDir = cluster.getWorkingDir().getDir();
String backupId = Integer.toString(instance.getBackupInfo().size());
String iodevices;
String txnLogDir;
String store;
String pargs;
store = cluster.getStore();
List<Pattern> patternList = new ArrayList<>();
for (Node node : cluster.getNode()) {
Nodeid nodeid = new Nodeid(new Value(null, node.getId()));
iodevices = node.getIodevices() == null ? instance.getCluster().getIodevices() : node.getIodevices();
txnLogDir = node.getTxnLogDir() == null ? instance.getCluster().getTxnLogDir() : node.getTxnLogDir();
pargs = workingDir + " " + instance.getName() + " " + iodevices + " " + store + " " + StorageConstants.METADATA_ROOT + " " + txnLogDir + " " + backupId + " " + backupDir + " " + "local" + " " + node.getId();
Event event = new Event("backup", nodeid, pargs);
patternList.add(new Pattern(null, 1, null, event));
}
return new Patterns(patternList);
}
Aggregations