use of org.apache.hadoop.yarn.api.records.timelineservice.TimelineEvent in project hadoop by apache.
the class SimpleEntityWriterV2 method writeEntities.
protected void writeEntities(Configuration tlConf, TimelineCollectorManager manager, Context context) throws IOException {
Configuration conf = context.getConfiguration();
// simulate the app id with the task id
int taskId = context.getTaskAttemptID().getTaskID().getId();
long timestamp = conf.getLong(TIMELINE_SERVICE_PERFORMANCE_RUN_ID, 0);
ApplicationId appId = ApplicationId.newInstance(timestamp, taskId);
// create the app level timeline collector
AppLevelTimelineCollector collector = new AppLevelTimelineCollector(appId);
manager.putIfAbsent(appId, collector);
try {
// set the context
// flow id: job name, flow run id: timestamp, user id
TimelineCollectorContext tlContext = collector.getTimelineEntityContext();
tlContext.setFlowName(context.getJobName());
tlContext.setFlowRunId(timestamp);
tlContext.setUserId(context.getUser());
final int kbs = conf.getInt(KBS_SENT, KBS_SENT_DEFAULT);
long totalTime = 0;
final int testtimes = conf.getInt(TEST_TIMES, TEST_TIMES_DEFAULT);
final Random rand = new Random();
final TaskAttemptID taskAttemptId = context.getTaskAttemptID();
final char[] payLoad = new char[kbs * 1024];
for (int i = 0; i < testtimes; i++) {
// Generate a fixed length random payload
for (int xx = 0; xx < kbs * 1024; xx++) {
int alphaNumIdx = rand.nextInt(ALPHA_NUMS.length);
payLoad[xx] = ALPHA_NUMS[alphaNumIdx];
}
String entId = taskAttemptId + "_" + Integer.toString(i);
final TimelineEntity entity = new TimelineEntity();
entity.setId(entId);
entity.setType("FOO_ATTEMPT");
entity.addInfo("PERF_TEST", payLoad);
// add an event
TimelineEvent event = new TimelineEvent();
event.setId("foo_event_id");
event.setTimestamp(System.currentTimeMillis());
event.addInfo("foo_event", "test");
entity.addEvent(event);
// add a metric
TimelineMetric metric = new TimelineMetric();
metric.setId("foo_metric");
metric.addValue(System.currentTimeMillis(), 123456789L);
entity.addMetric(metric);
// add a config
entity.addConfig("foo", "bar");
TimelineEntities entities = new TimelineEntities();
entities.addEntity(entity);
// use the current user for this purpose
UserGroupInformation ugi = UserGroupInformation.getCurrentUser();
long startWrite = System.nanoTime();
try {
collector.putEntities(entities, ugi);
} catch (Exception e) {
context.getCounter(PerfCounters.TIMELINE_SERVICE_WRITE_FAILURES).increment(1);
LOG.error("writing to the timeline service failed", e);
}
long endWrite = System.nanoTime();
totalTime += TimeUnit.NANOSECONDS.toMillis(endWrite - startWrite);
}
LOG.info("wrote " + testtimes + " entities (" + kbs * testtimes + " kB) in " + totalTime + " ms");
context.getCounter(PerfCounters.TIMELINE_SERVICE_WRITE_TIME).increment(totalTime);
context.getCounter(PerfCounters.TIMELINE_SERVICE_WRITE_COUNTER).increment(testtimes);
context.getCounter(PerfCounters.TIMELINE_SERVICE_WRITE_KBS).increment(kbs * testtimes);
} finally {
// clean up
manager.remove(appId);
}
}
use of org.apache.hadoop.yarn.api.records.timelineservice.TimelineEvent in project hadoop by apache.
the class TestSystemMetricsPublisherForV2 method verifyEntity.
private static void verifyEntity(File entityFile, long expectedEvents, String eventForCreatedTime, long expectedMetrics) throws IOException {
BufferedReader reader = null;
String strLine;
long count = 0;
long metricsCount = 0;
try {
reader = new BufferedReader(new FileReader(entityFile));
while ((strLine = reader.readLine()) != null) {
if (strLine.trim().length() > 0) {
TimelineEntity entity = FileSystemTimelineReaderImpl.getTimelineRecordFromJSON(strLine.trim(), TimelineEntity.class);
metricsCount = entity.getMetrics().size();
for (TimelineEvent event : entity.getEvents()) {
if (event.getId().equals(eventForCreatedTime)) {
assertTrue(entity.getCreatedTime() > 0);
break;
}
}
count++;
}
}
} finally {
reader.close();
}
assertEquals("Expected " + expectedEvents + " events to be published", expectedEvents, count);
assertEquals("Expected " + expectedMetrics + " metrics is incorrect", expectedMetrics, metricsCount);
}
use of org.apache.hadoop.yarn.api.records.timelineservice.TimelineEvent in project hadoop by apache.
the class DataGeneratorForTest method loadApps.
static void loadApps(HBaseTestingUtility util) throws IOException {
TimelineEntities te = new TimelineEntities();
TimelineEntity entity = new TimelineEntity();
String id = "application_1111111111_2222";
entity.setId(id);
entity.setType(TimelineEntityType.YARN_APPLICATION.toString());
Long cTime = 1425016502000L;
entity.setCreatedTime(cTime);
// add the info map in Timeline Entity
Map<String, Object> infoMap = new HashMap<>();
infoMap.put("infoMapKey1", "infoMapValue2");
infoMap.put("infoMapKey2", 20);
infoMap.put("infoMapKey3", 85.85);
entity.addInfo(infoMap);
// add the isRelatedToEntity info
Set<String> isRelatedToSet = new HashSet<>();
isRelatedToSet.add("relatedto1");
Map<String, Set<String>> isRelatedTo = new HashMap<>();
isRelatedTo.put("task", isRelatedToSet);
entity.setIsRelatedToEntities(isRelatedTo);
// add the relatesTo info
Set<String> relatesToSet = new HashSet<>();
relatesToSet.add("relatesto1");
relatesToSet.add("relatesto3");
Map<String, Set<String>> relatesTo = new HashMap<>();
relatesTo.put("container", relatesToSet);
Set<String> relatesToSet11 = new HashSet<>();
relatesToSet11.add("relatesto4");
relatesTo.put("container1", relatesToSet11);
entity.setRelatesToEntities(relatesTo);
// add some config entries
Map<String, String> conf = new HashMap<>();
conf.put("config_param1", "value1");
conf.put("config_param2", "value2");
conf.put("cfg_param1", "value3");
entity.addConfigs(conf);
// add metrics
Set<TimelineMetric> metrics = new HashSet<>();
TimelineMetric m1 = new TimelineMetric();
m1.setId("MAP_SLOT_MILLIS");
Map<Long, Number> metricValues = new HashMap<>();
long ts = System.currentTimeMillis();
metricValues.put(ts - 120000, 100000000);
metricValues.put(ts - 100000, 200000000);
metricValues.put(ts - 80000, 300000000);
metricValues.put(ts - 60000, 400000000);
metricValues.put(ts - 40000, 50000000000L);
metricValues.put(ts - 20000, 60000000000L);
m1.setType(Type.TIME_SERIES);
m1.setValues(metricValues);
metrics.add(m1);
TimelineMetric m12 = new TimelineMetric();
m12.setId("MAP1_BYTES");
m12.addValue(ts, 50);
metrics.add(m12);
entity.addMetrics(metrics);
TimelineEvent event = new TimelineEvent();
event.setId("start_event");
event.setTimestamp(ts);
entity.addEvent(event);
te.addEntity(entity);
TimelineEntities te1 = new TimelineEntities();
TimelineEntity entity1 = new TimelineEntity();
String id1 = "application_1111111111_3333";
entity1.setId(id1);
entity1.setType(TimelineEntityType.YARN_APPLICATION.toString());
entity1.setCreatedTime(cTime + 20L);
// add the info map in Timeline Entity
Map<String, Object> infoMap1 = new HashMap<>();
infoMap1.put("infoMapKey1", "infoMapValue1");
infoMap1.put("infoMapKey2", 10);
entity1.addInfo(infoMap1);
// add the isRelatedToEntity info
Set<String> isRelatedToSet1 = new HashSet<>();
isRelatedToSet1.add("relatedto3");
isRelatedToSet1.add("relatedto5");
Map<String, Set<String>> isRelatedTo1 = new HashMap<>();
isRelatedTo1.put("task1", isRelatedToSet1);
Set<String> isRelatedToSet11 = new HashSet<>();
isRelatedToSet11.add("relatedto4");
isRelatedTo1.put("task2", isRelatedToSet11);
entity1.setIsRelatedToEntities(isRelatedTo1);
// add the relatesTo info
Set<String> relatesToSet1 = new HashSet<>();
relatesToSet1.add("relatesto1");
relatesToSet1.add("relatesto2");
Map<String, Set<String>> relatesTo1 = new HashMap<>();
relatesTo1.put("container", relatesToSet1);
entity1.setRelatesToEntities(relatesTo1);
// add some config entries
Map<String, String> conf1 = new HashMap<>();
conf1.put("cfg_param1", "value1");
conf1.put("cfg_param2", "value2");
entity1.addConfigs(conf1);
// add metrics
Set<TimelineMetric> metrics1 = new HashSet<>();
TimelineMetric m2 = new TimelineMetric();
m2.setId("MAP1_SLOT_MILLIS");
Map<Long, Number> metricValues1 = new HashMap<>();
long ts1 = System.currentTimeMillis();
metricValues1.put(ts1 - 120000, 100000000);
metricValues1.put(ts1 - 100000, 200000000);
metricValues1.put(ts1 - 80000, 300000000);
metricValues1.put(ts1 - 60000, 400000000);
metricValues1.put(ts1 - 40000, 50000000000L);
metricValues1.put(ts1 - 20000, 60000000000L);
m2.setType(Type.TIME_SERIES);
m2.setValues(metricValues1);
metrics1.add(m2);
entity1.addMetrics(metrics1);
TimelineEvent event11 = new TimelineEvent();
event11.setId("end_event");
event11.setTimestamp(ts);
entity1.addEvent(event11);
TimelineEvent event12 = new TimelineEvent();
event12.setId("update_event");
event12.setTimestamp(ts - 10);
entity1.addEvent(event12);
te1.addEntity(entity1);
TimelineEntities te2 = new TimelineEntities();
TimelineEntity entity2 = new TimelineEntity();
String id2 = "application_1111111111_4444";
entity2.setId(id2);
entity2.setType(TimelineEntityType.YARN_APPLICATION.toString());
entity2.setCreatedTime(cTime + 40L);
TimelineEvent event21 = new TimelineEvent();
event21.setId("update_event");
event21.setTimestamp(ts - 20);
entity2.addEvent(event21);
Set<String> isRelatedToSet2 = new HashSet<String>();
isRelatedToSet2.add("relatedto3");
Map<String, Set<String>> isRelatedTo2 = new HashMap<>();
isRelatedTo2.put("task1", isRelatedToSet2);
entity2.setIsRelatedToEntities(isRelatedTo2);
Map<String, Set<String>> relatesTo3 = new HashMap<>();
Set<String> relatesToSet14 = new HashSet<String>();
relatesToSet14.add("relatesto7");
relatesTo3.put("container2", relatesToSet14);
entity2.setRelatesToEntities(relatesTo3);
te2.addEntity(entity2);
HBaseTimelineWriterImpl hbi = null;
try {
hbi = new HBaseTimelineWriterImpl();
hbi.init(util.getConfiguration());
hbi.start();
String cluster = "cluster1";
String user = "user1";
String flow = "some_flow_name";
String flowVersion = "AB7822C10F1111";
long runid = 1002345678919L;
String appName = "application_1111111111_2222";
hbi.write(cluster, user, flow, flowVersion, runid, appName, te);
appName = "application_1111111111_3333";
hbi.write(cluster, user, flow, flowVersion, runid, appName, te1);
appName = "application_1111111111_4444";
hbi.write(cluster, user, flow, flowVersion, runid, appName, te2);
hbi.stop();
} finally {
if (hbi != null) {
hbi.stop();
hbi.close();
}
}
}
use of org.apache.hadoop.yarn.api.records.timelineservice.TimelineEvent in project hadoop by apache.
the class TestFlowDataGenerator method getEntityMetricsApp1.
static TimelineEntity getEntityMetricsApp1(long insertTs) {
TimelineEntity entity = new TimelineEntity();
String id = "flowRunMetrics_test";
String type = TimelineEntityType.YARN_APPLICATION.toString();
entity.setId(id);
entity.setType(type);
long cTime = 1425016501000L;
entity.setCreatedTime(cTime);
// add metrics
Set<TimelineMetric> metrics = new HashSet<>();
TimelineMetric m1 = new TimelineMetric();
m1.setId(METRIC_1);
Map<Long, Number> metricValues = new HashMap<Long, Number>();
long ts = insertTs;
metricValues.put(ts - 100000, 2L);
metricValues.put(ts - 80000, 40L);
m1.setType(Type.TIME_SERIES);
m1.setValues(metricValues);
metrics.add(m1);
TimelineMetric m2 = new TimelineMetric();
m2.setId(METRIC_2);
metricValues = new HashMap<Long, Number>();
ts = insertTs;
metricValues.put(ts - 100000, 31L);
metricValues.put(ts - 80000, 57L);
m2.setType(Type.TIME_SERIES);
m2.setValues(metricValues);
metrics.add(m2);
entity.addMetrics(metrics);
TimelineEvent event = new TimelineEvent();
event.setId(ApplicationMetricsConstants.CREATED_EVENT_TYPE);
long endTs = 1439379885000L;
event.setTimestamp(endTs);
String expKey = "foo_event_greater";
String expVal = "test_app_greater";
event.addInfo(expKey, expVal);
entity.addEvent(event);
return entity;
}
use of org.apache.hadoop.yarn.api.records.timelineservice.TimelineEvent in project hadoop by apache.
the class TestTimelineReaderWebServicesHBaseStorage method loadData.
private static void loadData() throws Exception {
String cluster = "cluster1";
String user = "user1";
String flow = "flow_name";
String flowVersion = "CF7022C10F1354";
Long runid = 1002345678919L;
Long runid1 = 1002345678920L;
TimelineEntities te = new TimelineEntities();
TimelineEntity entity = new TimelineEntity();
String id = "application_1111111111_1111";
String type = TimelineEntityType.YARN_APPLICATION.toString();
entity.setId(id);
entity.setType(type);
Long cTime = 1425016501000L;
entity.setCreatedTime(cTime);
entity.addConfig("cfg2", "value1");
// add metrics
Set<TimelineMetric> metrics = new HashSet<>();
TimelineMetric m1 = new TimelineMetric();
m1.setId("MAP_SLOT_MILLIS");
Map<Long, Number> metricValues = ImmutableMap.of(ts - 100000, (Number) 2, ts - 90000, 7, ts - 80000, 40);
m1.setType(Type.TIME_SERIES);
m1.setValues(metricValues);
metrics.add(m1);
m1 = new TimelineMetric();
m1.setId("MAP1_SLOT_MILLIS");
metricValues = ImmutableMap.of(ts - 100000, (Number) 2, ts - 90000, 9, ts - 80000, 40);
m1.setType(Type.TIME_SERIES);
m1.setValues(metricValues);
metrics.add(m1);
m1 = new TimelineMetric();
m1.setId("HDFS_BYTES_READ");
metricValues = ImmutableMap.of(ts - 100000, (Number) 31, ts - 80000, 57);
m1.setType(Type.TIME_SERIES);
m1.setValues(metricValues);
metrics.add(m1);
entity.addMetrics(metrics);
TimelineEvent event = new TimelineEvent();
event.setId(ApplicationMetricsConstants.CREATED_EVENT_TYPE);
event.setTimestamp(cTime);
String expKey = "foo_event";
Object expVal = "test";
event.addInfo(expKey, expVal);
entity.addEvent(event);
TimelineEvent event11 = new TimelineEvent();
event11.setId(ApplicationMetricsConstants.FINISHED_EVENT_TYPE);
Long expTs = 1425019501000L;
event11.setTimestamp(expTs);
entity.addEvent(event11);
te.addEntity(entity);
// write another application with same metric to this flow
TimelineEntities te1 = new TimelineEntities();
TimelineEntity entity1 = new TimelineEntity();
id = "application_1111111111_2222";
type = TimelineEntityType.YARN_APPLICATION.toString();
entity1.setId(id);
entity1.setType(type);
cTime = 1425016501000L;
entity1.setCreatedTime(cTime);
entity1.addConfig("cfg1", "value1");
// add metrics
metrics.clear();
TimelineMetric m2 = new TimelineMetric();
m2.setId("MAP_SLOT_MILLIS");
metricValues = new HashMap<Long, Number>();
metricValues.put(ts - 100000, 5L);
metricValues.put(ts - 80000, 101L);
m2.setType(Type.TIME_SERIES);
m2.setValues(metricValues);
metrics.add(m2);
entity1.addMetrics(metrics);
TimelineEvent event1 = new TimelineEvent();
event1.setId(ApplicationMetricsConstants.CREATED_EVENT_TYPE);
event1.setTimestamp(cTime);
event1.addInfo(expKey, expVal);
entity1.addEvent(event1);
te1.addEntity(entity1);
String flow2 = "flow_name2";
String flowVersion2 = "CF7022C10F1454";
Long runid2 = 2102356789046L;
TimelineEntities te3 = new TimelineEntities();
TimelineEntity entity3 = new TimelineEntity();
id = "application_11111111111111_2223";
entity3.setId(id);
entity3.setType(type);
cTime = 1425016501037L;
entity3.setCreatedTime(cTime);
TimelineEvent event2 = new TimelineEvent();
event2.setId(ApplicationMetricsConstants.CREATED_EVENT_TYPE);
event2.setTimestamp(cTime);
event2.addInfo("foo_event", "test");
entity3.addEvent(event2);
te3.addEntity(entity3);
TimelineEntities te4 = new TimelineEntities();
TimelineEntity entity4 = new TimelineEntity();
id = "application_1111111111_2224";
entity4.setId(id);
entity4.setType(type);
cTime = 1425016501034L;
entity4.setCreatedTime(cTime);
TimelineEvent event4 = new TimelineEvent();
event4.setId(ApplicationMetricsConstants.CREATED_EVENT_TYPE);
event4.setTimestamp(cTime);
event4.addInfo("foo_event", "test");
entity4.addEvent(event4);
metrics.clear();
m2 = new TimelineMetric();
m2.setId("MAP_SLOT_MILLIS");
metricValues = ImmutableMap.of(ts - 100000, (Number) 5L, ts - 80000, 101L);
m2.setType(Type.TIME_SERIES);
m2.setValues(metricValues);
metrics.add(m2);
entity4.addMetrics(metrics);
te4.addEntity(entity4);
TimelineEntities te5 = new TimelineEntities();
TimelineEntity entity5 = new TimelineEntity();
entity5.setId("entity1");
entity5.setType("type1");
entity5.setCreatedTime(1425016501034L);
// add some config entries
entity5.addConfigs(ImmutableMap.of("config_param1", "value1", "config_param2", "value2", "cfg_param1", "value3"));
entity5.addInfo(ImmutableMap.of("info1", (Object) "cluster1", "info2", 2.0, "info3", 35000, "info4", 36000));
metrics = new HashSet<>();
m1 = new TimelineMetric();
m1.setId("MAP_SLOT_MILLIS");
metricValues = ImmutableMap.of(ts - 100000, (Number) 2, ts - 80000, 40);
m1.setType(Type.TIME_SERIES);
m1.setValues(metricValues);
metrics.add(m1);
m1 = new TimelineMetric();
m1.setId("HDFS_BYTES_READ");
metricValues = ImmutableMap.of(ts - 100000, (Number) 31, ts - 80000, 57);
m1.setType(Type.TIME_SERIES);
m1.setValues(metricValues);
metrics.add(m1);
entity5.addMetrics(metrics);
TimelineEvent event51 = new TimelineEvent();
event51.setId("event1");
event51.setTimestamp(cTime);
entity5.addEvent(event51);
TimelineEvent event52 = new TimelineEvent();
event52.setId("event2");
event52.setTimestamp(cTime);
entity5.addEvent(event52);
TimelineEvent event53 = new TimelineEvent();
event53.setId("event3");
event53.setTimestamp(cTime);
entity5.addEvent(event53);
TimelineEvent event54 = new TimelineEvent();
event54.setId("event4");
event54.setTimestamp(cTime);
entity5.addEvent(event54);
Map<String, Set<String>> isRelatedTo1 = new HashMap<String, Set<String>>();
isRelatedTo1.put("type2", Sets.newHashSet("entity21", "entity22", "entity23", "entity24"));
isRelatedTo1.put("type4", Sets.newHashSet("entity41", "entity42"));
isRelatedTo1.put("type1", Sets.newHashSet("entity14", "entity15"));
isRelatedTo1.put("type3", Sets.newHashSet("entity31", "entity35", "entity32", "entity33"));
entity5.addIsRelatedToEntities(isRelatedTo1);
Map<String, Set<String>> relatesTo1 = new HashMap<String, Set<String>>();
relatesTo1.put("type2", Sets.newHashSet("entity21", "entity22", "entity23", "entity24"));
relatesTo1.put("type4", Sets.newHashSet("entity41", "entity42"));
relatesTo1.put("type1", Sets.newHashSet("entity14", "entity15"));
relatesTo1.put("type3", Sets.newHashSet("entity31", "entity35", "entity32", "entity33"));
entity5.addRelatesToEntities(relatesTo1);
te5.addEntity(entity5);
TimelineEntity entity6 = new TimelineEntity();
entity6.setId("entity2");
entity6.setType("type1");
entity6.setCreatedTime(1425016501034L);
entity6.addConfigs(ImmutableMap.of("cfg_param3", "value1", "configuration_param2", "value2", "config_param1", "value3"));
entity6.addInfo(ImmutableMap.of("info1", (Object) "cluster2", "info2", 2.0, "info4", 35000));
metrics = new HashSet<>();
m1 = new TimelineMetric();
m1.setId("MAP1_SLOT_MILLIS");
metricValues = ImmutableMap.of(ts - 100000, (Number) 12, ts - 80000, 140);
m1.setType(Type.TIME_SERIES);
m1.setValues(metricValues);
metrics.add(m1);
m1 = new TimelineMetric();
m1.setId("HDFS_BYTES_READ");
metricValues = ImmutableMap.of(ts - 100000, (Number) 78, ts - 80000, 157);
m1.setType(Type.TIME_SERIES);
m1.setValues(metricValues);
metrics.add(m1);
m1 = new TimelineMetric();
m1.setId("MAP11_SLOT_MILLIS");
m1.setType(Type.SINGLE_VALUE);
m1.addValue(ts - 100000, 122);
metrics.add(m1);
entity6.addMetrics(metrics);
TimelineEvent event61 = new TimelineEvent();
event61.setId("event1");
event61.setTimestamp(cTime);
entity6.addEvent(event61);
TimelineEvent event62 = new TimelineEvent();
event62.setId("event5");
event62.setTimestamp(cTime);
entity6.addEvent(event62);
TimelineEvent event63 = new TimelineEvent();
event63.setId("event3");
event63.setTimestamp(cTime);
entity6.addEvent(event63);
TimelineEvent event64 = new TimelineEvent();
event64.setId("event6");
event64.setTimestamp(cTime);
entity6.addEvent(event64);
Map<String, Set<String>> isRelatedTo2 = new HashMap<String, Set<String>>();
isRelatedTo2.put("type2", Sets.newHashSet("entity21", "entity22", "entity23", "entity24"));
isRelatedTo2.put("type5", Sets.newHashSet("entity51", "entity52"));
isRelatedTo2.put("type6", Sets.newHashSet("entity61", "entity66"));
isRelatedTo2.put("type3", Sets.newHashSet("entity31"));
entity6.addIsRelatedToEntities(isRelatedTo2);
Map<String, Set<String>> relatesTo2 = new HashMap<String, Set<String>>();
relatesTo2.put("type2", Sets.newHashSet("entity21", "entity22", "entity23", "entity24"));
relatesTo2.put("type5", Sets.newHashSet("entity51", "entity52"));
relatesTo2.put("type6", Sets.newHashSet("entity61", "entity66"));
relatesTo2.put("type3", Sets.newHashSet("entity31"));
entity6.addRelatesToEntities(relatesTo2);
te5.addEntity(entity6);
HBaseTimelineWriterImpl hbi = null;
Configuration c1 = util.getConfiguration();
try {
hbi = new HBaseTimelineWriterImpl();
hbi.init(c1);
hbi.write(cluster, user, flow, flowVersion, runid, entity.getId(), te);
hbi.write(cluster, user, flow, flowVersion, runid, entity1.getId(), te1);
hbi.write(cluster, user, flow, flowVersion, runid1, entity4.getId(), te4);
hbi.write(cluster, user, flow2, flowVersion2, runid2, entity3.getId(), te3);
hbi.write(cluster, user, flow, flowVersion, runid, "application_1111111111_1111", te5);
hbi.flush();
} finally {
if (hbi != null) {
hbi.close();
}
}
}
Aggregations