use of org.apache.ignite.igfs.IgfsPath in project ignite by apache.
the class IgfsBackupFailoverSelfTest method testWriteFailoverWhileStoppingMultipleNodes.
/**
* @throws Exception
*/
public void testWriteFailoverWhileStoppingMultipleNodes() throws Exception {
final IgfsImpl igfs0 = nodeDatas[0].igfsImpl;
clear(igfs0);
IgfsAbstractSelfTest.create(igfs0, paths(DIR, SUBDIR), null);
final IgfsOutputStream[] outStreams = new IgfsOutputStream[files];
// Create files:
for (int f = 0; f < files; f++) {
final byte[] data = createChunk(fileSize, f);
IgfsOutputStream os = null;
try {
os = igfs0.create(filePath(f), 256, true, null, 0, -1, null);
assert os != null;
writeFileChunks(os, data);
} finally {
if (os != null)
os.flush();
}
outStreams[f] = os;
X.println("write #1 completed: " + f);
}
final AtomicBoolean stop = new AtomicBoolean();
GridTestUtils.runMultiThreadedAsync(new Callable() {
@Override
public Object call() throws Exception {
// Some delay to ensure read is in progress.
Thread.sleep(10_000);
// Now stop all the nodes but the 1st:
for (int n = 1; n < numIgfsNodes; n++) {
stopGrid(n);
X.println("#### grid " + n + " stopped.");
}
// Thread.sleep(10_000);
stop.set(true);
return null;
}
}, 1, "igfs-node-stopper");
// Write #2:
for (int f0 = 0; f0 < files; f0++) {
final IgfsOutputStream os = outStreams[f0];
assert os != null;
final int f = f0;
int att = doWithRetries(1, new Callable<Void>() {
@Override
public Void call() throws Exception {
IgfsOutputStream ios = os;
try {
writeChunks0(igfs0, ios, f);
} catch (IOException ioe) {
log().warning("Attempt to append the data to existing stream failed: ", ioe);
ios = igfs0.append(filePath(f), false);
assert ios != null;
writeChunks0(igfs0, ios, f);
}
return null;
}
});
assert att == 1;
X.println("write #2 completed: " + f0 + " in " + att + " attempts.");
}
GridTestUtils.waitForCondition(new GridAbsPredicate() {
@Override
public boolean apply() {
return stop.get();
}
}, 25_000);
// Check files:
for (int f = 0; f < files; f++) {
IgfsPath path = filePath(f);
byte[] data = createChunk(fileSize, f);
// Check through 1st node:
checkExist(igfs0, path);
assertEquals("File length mismatch.", data.length * 2, igfs0.size(path));
checkFileContent(igfs0, path, data, data);
X.println("Read test completed: " + f);
}
}
use of org.apache.ignite.igfs.IgfsPath in project ignite by apache.
the class IgfsUtils method readPath.
/**
* Read non-null path from the input.
*
* @param in Input.
* @return IGFS path.
* @throws IOException If failed.
*/
public static IgfsPath readPath(ObjectInput in) throws IOException {
IgfsPath res = new IgfsPath();
res.readExternal(in);
return res;
}
use of org.apache.ignite.igfs.IgfsPath in project ignite by apache.
the class HadoopAbstractMapReduceTest method checkJobStatistics.
/**
* Simple test job statistics.
*
* @param jobId Job id.
* @throws IgniteCheckedException
*/
private void checkJobStatistics(HadoopJobId jobId) throws IgniteCheckedException, IOException {
HadoopCounters cntrs = grid(0).hadoop().counters(jobId);
HadoopPerformanceCounter perfCntr = HadoopPerformanceCounter.getCounter(cntrs, null);
Map<String, SortedMap<Integer, Long>> tasks = new TreeMap<>();
Map<String, Integer> phaseOrders = new HashMap<>();
phaseOrders.put("submit", 0);
phaseOrders.put("prepare", 1);
phaseOrders.put("start", 2);
phaseOrders.put("Cstart", 3);
phaseOrders.put("finish", 4);
String prevTaskId = null;
long apiEvtCnt = 0;
for (T2<String, Long> evt : perfCntr.evts()) {
// We expect string pattern: COMBINE 1 run 7fa86a14-5a08-40e3-a7cb-98109b52a706
String[] parsedEvt = evt.get1().split(" ");
String taskId;
String taskPhase;
if ("JOB".equals(parsedEvt[0])) {
taskId = parsedEvt[0];
taskPhase = parsedEvt[1];
} else {
taskId = ("COMBINE".equals(parsedEvt[0]) ? "MAP" : parsedEvt[0].substring(0, 3)) + parsedEvt[1];
taskPhase = ("COMBINE".equals(parsedEvt[0]) ? "C" : "") + parsedEvt[2];
}
if (!taskId.equals(prevTaskId))
tasks.put(taskId, new TreeMap<Integer, Long>());
Integer pos = phaseOrders.get(taskPhase);
assertNotNull("Invalid phase " + taskPhase, pos);
tasks.get(taskId).put(pos, evt.get2());
prevTaskId = taskId;
apiEvtCnt++;
}
for (Map.Entry<String, SortedMap<Integer, Long>> task : tasks.entrySet()) {
Map<Integer, Long> order = task.getValue();
long prev = 0;
for (Map.Entry<Integer, Long> phase : order.entrySet()) {
assertTrue("Phase order of " + task.getKey() + " is invalid", phase.getValue() >= prev);
prev = phase.getValue();
}
}
final IgfsPath statPath = new IgfsPath("/xxx/" + USER + "/zzz/" + jobId + "/performance");
assert GridTestUtils.waitForCondition(new GridAbsPredicate() {
@Override
public boolean apply() {
return igfs.exists(statPath);
}
}, 20_000);
final long apiEvtCnt0 = apiEvtCnt;
boolean res = GridTestUtils.waitForCondition(new GridAbsPredicate() {
@Override
public boolean apply() {
try {
try (BufferedReader reader = new BufferedReader(new InputStreamReader(igfs.open(statPath)))) {
return apiEvtCnt0 == HadoopTestUtils.simpleCheckJobStatFile(reader);
}
} catch (IOException e) {
throw new RuntimeException(e);
}
}
}, 10000);
if (!res) {
BufferedReader reader = new BufferedReader(new InputStreamReader(igfs.open(statPath)));
assert false : "Invalid API events count [exp=" + apiEvtCnt0 + ", actual=" + HadoopTestUtils.simpleCheckJobStatFile(reader) + ']';
}
}
use of org.apache.ignite.igfs.IgfsPath in project ignite by apache.
the class HadoopAbstractWordCountTest method generateTestFile.
/**
* Generates test file.
*
* @param path File name.
* @param wordCounts Words and counts.
* @throws Exception If failed.
*/
protected void generateTestFile(String path, Object... wordCounts) throws Exception {
List<String> wordsArr = new ArrayList<>();
// Generating
for (int i = 0; i < wordCounts.length; i += 2) {
String word = (String) wordCounts[i];
int cnt = (Integer) wordCounts[i + 1];
while (cnt-- > 0) wordsArr.add(word);
}
// Shuffling
for (int i = 0; i < wordsArr.size(); i++) {
int j = (int) (Math.random() * wordsArr.size());
Collections.swap(wordsArr, i, j);
}
// Input file preparing
PrintWriter testInputFileWriter = new PrintWriter(igfs.create(new IgfsPath(path), true));
int j = 0;
while (j < wordsArr.size()) {
int i = 5 + (int) (Math.random() * 5);
List<String> subList = wordsArr.subList(j, Math.min(j + i, wordsArr.size()));
j += i;
testInputFileWriter.println(Joiner.on(' ').join(subList));
}
testInputFileWriter.close();
}
use of org.apache.ignite.igfs.IgfsPath in project ignite by apache.
the class HadoopMapReduceErrorResilienceTest method doTestRecoveryAfterAnError.
/**
* Tests correct work after an error.
*
* @throws Exception On error.
*/
private void doTestRecoveryAfterAnError(int useNewBits, HadoopErrorSimulator.Kind simulatorKind) throws Exception {
try {
IgfsPath inDir = new IgfsPath(PATH_INPUT);
igfs.mkdirs(inDir);
IgfsPath inFile = new IgfsPath(inDir, HadoopWordCount2.class.getSimpleName() + "-input");
generateTestFile(inFile.toString(), "red", red, "blue", blue, "green", green, "yellow", yellow);
boolean useNewMapper = (useNewBits & 1) == 0;
boolean useNewCombiner = (useNewBits & 2) == 0;
boolean useNewReducer = (useNewBits & 4) == 0;
for (int i = 0; i < 12; i++) {
int bits = 1 << i;
System.out.println("############################ Simulator kind = " + simulatorKind + ", Stage bits = " + bits);
HadoopErrorSimulator sim = HadoopErrorSimulator.create(simulatorKind, bits);
doTestWithErrorSimulator(sim, inFile, useNewMapper, useNewCombiner, useNewReducer);
}
} catch (Throwable t) {
t.printStackTrace();
fail("Unexpected throwable: " + t);
}
}
Aggregations