use of org.apache.asterix.event.schema.pattern.Patterns in project asterixdb by apache.
the class PatternCreator method createRemoveAsterixWorkingDirPattern.
public Patterns createRemoveAsterixWorkingDirPattern(AsterixInstance instance) throws Exception {
List<Pattern> patternList = new ArrayList<>();
Cluster cluster = instance.getCluster();
String workingDir = cluster.getWorkingDir().getDir();
String pargs = workingDir;
Nodeid nodeid = new Nodeid(new Value(null, cluster.getMasterNode().getId()));
Event event = new Event("file_delete", nodeid, pargs);
patternList.add(new Pattern(null, 1, null, event));
if (!cluster.getWorkingDir().isNFS()) {
for (Node node : cluster.getNode()) {
nodeid = new Nodeid(new Value(null, node.getId()));
event = new Event("file_delete", nodeid, pargs);
patternList.add(new Pattern(null, 1, null, event));
}
}
return new Patterns(patternList);
}
use of org.apache.asterix.event.schema.pattern.Patterns in project asterixdb by apache.
the class PatternCreator method getLocalBackUpAsterixPattern.
private Patterns getLocalBackUpAsterixPattern(AsterixInstance instance, Backup backupConf) throws Exception {
Cluster cluster = instance.getCluster();
String backupDir = backupConf.getBackupDir();
String workingDir = cluster.getWorkingDir().getDir();
String backupId = Integer.toString(instance.getBackupInfo().size());
String iodevices;
String txnLogDir;
String store;
String pargs;
store = cluster.getStore();
List<Pattern> patternList = new ArrayList<>();
for (Node node : cluster.getNode()) {
Nodeid nodeid = new Nodeid(new Value(null, node.getId()));
iodevices = node.getIodevices() == null ? instance.getCluster().getIodevices() : node.getIodevices();
txnLogDir = node.getTxnLogDir() == null ? instance.getCluster().getTxnLogDir() : node.getTxnLogDir();
pargs = workingDir + " " + instance.getName() + " " + iodevices + " " + store + " " + StorageConstants.METADATA_ROOT + " " + txnLogDir + " " + backupId + " " + backupDir + " " + "local" + " " + node.getId();
Event event = new Event("backup", nodeid, pargs);
patternList.add(new Pattern(null, 1, null, event));
}
return new Patterns(patternList);
}
use of org.apache.asterix.event.schema.pattern.Patterns in project asterixdb by apache.
the class PatternCreator method createHadoopLibraryTransferPattern.
public Patterns createHadoopLibraryTransferPattern(Cluster cluster) throws Exception {
List<Pattern> patternList = new ArrayList<>();
String workingDir = cluster.getWorkingDir().getDir();
String hadoopVersion = AsterixEventService.getConfiguration().getBackup().getHdfs().getVersion();
File hadoopDir = new File(AsterixEventService.getEventHome() + File.separator + "hadoop-" + hadoopVersion);
if (!hadoopDir.exists()) {
throw new IllegalStateException("Hadoop version :" + hadoopVersion + " not supported");
}
Nodeid nodeid = new Nodeid(new Value(null, EventDriver.CLIENT_NODE.getId()));
String username = cluster.getUsername() != null ? cluster.getUsername() : System.getProperty("user.name");
String pargs = username + " " + hadoopDir.getAbsolutePath() + " " + cluster.getMasterNode().getClusterIp() + " " + workingDir;
Event event = new Event("directory_transfer", nodeid, pargs);
Pattern p = new Pattern(null, 1, null, event);
addInitialDelay(p, 2, "sec");
patternList.add(p);
boolean copyToNC = !cluster.getWorkingDir().isNFS();
if (copyToNC) {
for (Node node : cluster.getNode()) {
nodeid = new Nodeid(new Value(null, node.getId()));
pargs = cluster.getUsername() + " " + hadoopDir.getAbsolutePath() + " " + node.getClusterIp() + " " + workingDir;
event = new Event("directory_transfer", nodeid, pargs);
p = new Pattern(null, 1, null, event);
addInitialDelay(p, 2, "sec");
patternList.add(p);
}
}
return new Patterns(patternList);
}
use of org.apache.asterix.event.schema.pattern.Patterns in project asterixdb by apache.
the class PatternCreator method getRestoreAsterixPattern.
public Patterns getRestoreAsterixPattern(AsterixInstance instance, BackupInfo backupInfo) throws Exception {
BackupType backupType = backupInfo.getBackupType();
Patterns patterns = null;
switch(backupType) {
case HDFS:
patterns = getHDFSRestoreAsterixPattern(instance, backupInfo);
break;
case LOCAL:
patterns = getLocalRestoreAsterixPattern(instance, backupInfo);
break;
}
return patterns;
}
use of org.apache.asterix.event.schema.pattern.Patterns in project asterixdb by apache.
the class PatternCreator method createRemoveLocalBackupPattern.
private Patterns createRemoveLocalBackupPattern(AsterixInstance instance, String localBackupDir) throws Exception {
List<Pattern> patternList = new ArrayList<>();
Cluster cluster = instance.getCluster();
String pathToDelete = localBackupDir + File.separator + instance.getName();
String pargs = pathToDelete;
List<String> removedBackupDirs = new ArrayList<>();
for (Node node : cluster.getNode()) {
if (removedBackupDirs.contains(node.getClusterIp())) {
continue;
}
Nodeid nodeid = new Nodeid(new Value(null, node.getId()));
Event event = new Event("file_delete", nodeid, pargs);
patternList.add(new Pattern(null, 1, null, event));
removedBackupDirs.add(node.getClusterIp());
}
return new Patterns(patternList);
}
Aggregations