use of java.io.Writer in project hadoop by apache.
the class TestLdapGroupsMapping method testExtractPassword.
@Test
public void testExtractPassword() throws IOException {
File testDir = GenericTestUtils.getTestDir();
testDir.mkdirs();
File secretFile = new File(testDir, "secret.txt");
Writer writer = new FileWriter(secretFile);
writer.write("hadoop");
writer.close();
LdapGroupsMapping mapping = new LdapGroupsMapping();
Assert.assertEquals("hadoop", mapping.extractPassword(secretFile.getPath()));
}
use of java.io.Writer in project hadoop by apache.
the class TestBadRecords method createInput.
private void createInput() throws Exception {
OutputStream os = getFileSystem().create(new Path(getInputDir(), "text.txt"));
Writer wr = new OutputStreamWriter(os);
for (String inp : input) {
wr.write(inp + "\n");
}
wr.close();
}
use of java.io.Writer in project hadoop by apache.
the class RumenToSLSConverter method generateSLSLoadFile.
private static void generateSLSLoadFile(String inputFile, String outputFile) throws IOException {
try (Reader input = new InputStreamReader(new FileInputStream(inputFile), "UTF-8")) {
try (Writer output = new OutputStreamWriter(new FileOutputStream(outputFile), "UTF-8")) {
ObjectMapper mapper = new ObjectMapper();
ObjectWriter writer = mapper.writerWithDefaultPrettyPrinter();
Iterator<Map> i = mapper.readValues(new JsonFactory().createParser(input), Map.class);
while (i.hasNext()) {
Map m = i.next();
output.write(writer.writeValueAsString(createSLSJob(m)) + EOL);
}
}
}
}
use of java.io.Writer in project hadoop by apache.
the class RumenToSLSConverter method generateSLSNodeFile.
@SuppressWarnings("unchecked")
private static void generateSLSNodeFile(String outputFile) throws IOException {
try (Writer output = new OutputStreamWriter(new FileOutputStream(outputFile), "UTF-8")) {
ObjectMapper mapper = new ObjectMapper();
ObjectWriter writer = mapper.writerWithDefaultPrettyPrinter();
for (Map.Entry<String, Set<String>> entry : rackNodeMap.entrySet()) {
Map rack = new LinkedHashMap();
rack.put("rack", entry.getKey());
List nodes = new ArrayList();
for (String name : entry.getValue()) {
Map node = new LinkedHashMap();
node.put("node", name);
nodes.add(node);
}
rack.put("nodes", nodes);
output.write(writer.writeValueAsString(rack) + EOL);
}
}
}
use of java.io.Writer in project hadoop by apache.
the class TestTaskLogAppender method testTaskLogAppender.
/**
* test TaskLogAppender
*/
@SuppressWarnings("deprecation")
@Test(timeout = 5000)
public void testTaskLogAppender() {
TaskLogAppender appender = new TaskLogAppender();
System.setProperty(TaskLogAppender.TASKID_PROPERTY, "attempt_01_02_m03_04_001");
System.setProperty(TaskLogAppender.LOGSIZE_PROPERTY, "1003");
appender.activateOptions();
assertEquals(appender.getTaskId(), "attempt_01_02_m03_04_001");
assertEquals(appender.getTotalLogFileSize(), 1000);
assertEquals(appender.getIsCleanup(), false);
// test writer
Writer writer = new StringWriter();
appender.setWriter(writer);
Layout layout = new PatternLayout("%-5p [%t]: %m%n");
appender.setLayout(layout);
Category logger = Logger.getLogger(getClass().getName());
LoggingEvent event = new LoggingEvent("fqnOfCategoryClass", logger, Priority.INFO, "message", new Throwable());
appender.append(event);
appender.flush();
appender.close();
assertTrue(writer.toString().length() > 0);
// test cleanup should not changed
appender = new TaskLogAppender();
appender.setIsCleanup(true);
appender.activateOptions();
assertEquals(appender.getIsCleanup(), true);
}
Aggregations