use of java.io.BufferedOutputStream in project Openfire by igniterealtime.
the class Recorder method openFile.
private void openFile(MediaInfo mediaInfo) throws IOException {
File recordFile = new File(recordPath);
try {
synchronized (this) {
if (recordFile.exists()) {
recordFile.delete();
}
recordFile.createNewFile();
fo = new FileOutputStream(recordFile);
bo = new BufferedOutputStream(fo, BUFFER_SIZE);
if (recordRtp == false) {
writeAuHeader(mediaInfo);
} else {
/*
* Write RTP header
*/
byte[] buf = new byte[16];
// R
buf[0] = (byte) 0x52;
// T
buf[1] = (byte) 0x54;
// P
buf[2] = (byte) 0x50;
bo.write(buf, 0, buf.length);
}
}
} catch (IOException e) {
fo = null;
bo = null;
Logger.error("can't create buffered output stream for " + recordPath + " " + e.getMessage());
throw new IOException("can't create buffered output stream for " + recordPath + " " + e.getMessage());
}
Logger.println("Recording to " + recordFile.getAbsolutePath() + " recording type is " + (recordRtp ? "RTP" : "Audio"));
}
use of java.io.BufferedOutputStream in project pinot by linkedin.
the class TarGzCompressionUtils method createTarGzOfDirectory.
public static String createTarGzOfDirectory(String directoryPath, String tarGzPath, String entryPrefix) throws IOException {
FileOutputStream fOut = null;
BufferedOutputStream bOut = null;
GzipCompressorOutputStream gzOut = null;
TarArchiveOutputStream tOut = null;
if (!tarGzPath.endsWith(TAR_GZ_FILE_EXTENTION)) {
tarGzPath = tarGzPath + TAR_GZ_FILE_EXTENTION;
}
try {
fOut = new FileOutputStream(new File(tarGzPath));
bOut = new BufferedOutputStream(fOut);
gzOut = new GzipCompressorOutputStream(bOut);
tOut = new TarArchiveOutputStream(gzOut);
tOut.setLongFileMode(TarArchiveOutputStream.LONGFILE_GNU);
addFileToTarGz(tOut, directoryPath, entryPrefix);
} catch (IOException e) {
LOGGER.error("Failed to create tar.gz file for {} at path: {}", directoryPath, tarGzPath, e);
Utils.rethrowException(e);
} finally {
if (tOut != null) {
tOut.finish();
tOut.close();
}
if (gzOut != null) {
gzOut.close();
}
if (bOut != null) {
bOut.close();
}
if (fOut != null) {
fOut.close();
}
}
return tarGzPath;
}
use of java.io.BufferedOutputStream in project pinot by linkedin.
the class OffHeapBitmapInvertedIndexCreator method seal.
@Override
public void seal() throws IOException {
FileOutputStream fos = null;
FileInputStream fisOffsets = null;
FileInputStream fisBitmaps = null;
final DataOutputStream bitmapsOut;
final DataOutputStream offsetsOut;
String tempOffsetsFile = invertedIndexFile + ".offsets";
String tempBitmapsFile = invertedIndexFile + ".binary";
try {
// build the posting list
constructPostingLists();
// we need two separate streams, one to write the offsets and another to write the serialized
// bitmap data. We need two because we dont the serialized length of each bitmap without
// constructing.
offsetsOut = new DataOutputStream(new BufferedOutputStream(new FileOutputStream(tempOffsetsFile)));
bitmapsOut = new DataOutputStream(new BufferedOutputStream(new FileOutputStream(tempBitmapsFile)));
// write out offsets of bitmaps. The information can be used to access a certain bitmap
// directly.
// Totally (invertedIndex.length+1) offsets will be written out; the last offset is used to
// calculate the length of
// the last bitmap, which might be needed when accessing bitmaps randomly.
// If a bitmap's offset is k, then k bytes need to be skipped to reach the bitmap.
int startOffset = 4 * (cardinality + 1);
// The first bitmap's offset
offsetsOut.writeInt(startOffset);
MutableRoaringBitmap bitmap = new MutableRoaringBitmap();
for (int i = 0; i < cardinality; i++) {
bitmap.clear();
int length = postingListLengths.get(i);
for (int j = 0; j < length; j++) {
int bufferOffset = postingListStartOffsets.get(i) + j;
int value = postingListBuffer.get(bufferOffset);
bitmap.add(value);
}
// serialize bitmap to bitmapsOut stream
bitmap.serialize(bitmapsOut);
startOffset += bitmap.serializedSizeInBytes();
// write offset
offsetsOut.writeInt(startOffset);
}
offsetsOut.close();
bitmapsOut.close();
// merge the two files by simply writing offsets data first and then bitmap serialized data
fos = new FileOutputStream(invertedIndexFile);
fisOffsets = new FileInputStream(tempOffsetsFile);
fisBitmaps = new FileInputStream(tempBitmapsFile);
FileChannel channelOffsets = fisOffsets.getChannel();
channelOffsets.transferTo(0, channelOffsets.size(), fos.getChannel());
FileChannel channelBitmaps = fisBitmaps.getChannel();
channelBitmaps.transferTo(0, channelBitmaps.size(), fos.getChannel());
LOGGER.debug("persisted bitmap inverted index for column : " + spec.getName() + " in " + invertedIndexFile.getAbsolutePath());
} catch (Exception e) {
LOGGER.error("Exception while creating bitmap index for column:" + spec.getName(), e);
} finally {
IOUtils.closeQuietly(fos);
IOUtils.closeQuietly(fisOffsets);
IOUtils.closeQuietly(fisOffsets);
IOUtils.closeQuietly(fos);
IOUtils.closeQuietly(fos);
// MMaputils handles the null checks for buffer
MmapUtils.unloadByteBuffer(origValueBuffer);
origValueBuffer = null;
valueBuffer = null;
if (origLengths != null) {
MmapUtils.unloadByteBuffer(origLengths);
origLengths = null;
lengths = null;
}
MmapUtils.unloadByteBuffer(origPostingListBuffer);
origPostingListBuffer = null;
postingListBuffer = null;
MmapUtils.unloadByteBuffer(origPostingListCurrentOffsets);
origPostingListCurrentOffsets = null;
postingListCurrentOffsets = null;
MmapUtils.unloadByteBuffer(origPostingListLengths);
origPostingListLengths = null;
postingListLengths = null;
MmapUtils.unloadByteBuffer(origPostingListStartOffsets);
origPostingListStartOffsets = null;
postingListStartOffsets = null;
FileUtils.deleteQuietly(new File(tempOffsetsFile));
FileUtils.deleteQuietly(new File(tempBitmapsFile));
}
}
use of java.io.BufferedOutputStream in project pinot by linkedin.
the class OffHeapStarTreeBuilder method init.
public void init(StarTreeBuilderConfig builderConfig) throws Exception {
schema = builderConfig.schema;
timeColumnName = schema.getTimeColumnName();
this.dimensionsSplitOrder = builderConfig.dimensionsSplitOrder;
skipStarNodeCreationForDimensions = builderConfig.getSkipStarNodeCreationForDimensions();
skipMaterializationForDimensions = builderConfig.getSkipMaterializationForDimensions();
skipMaterializationCardinalityThreshold = builderConfig.getSkipMaterializationCardinalityThreshold();
enableOffHeapFormat = builderConfig.isEnableOffHealpFormat();
this.maxLeafRecords = builderConfig.maxLeafRecords;
this.outDir = builderConfig.getOutDir();
if (outDir == null) {
outDir = new File(System.getProperty("java.io.tmpdir"), V1Constants.STAR_TREE_INDEX_DIR + "_" + DateTime.now());
}
LOG.info("Index output directory:{}", outDir);
dimensionTypes = new ArrayList<>();
dimensionNames = new ArrayList<>();
dimensionNameToIndexMap = HashBiMap.create();
dimensionNameToStarValueMap = new HashMap<>();
dictionaryMap = new HashMap<>();
// READ DIMENSIONS COLUMNS
List<DimensionFieldSpec> dimensionFieldSpecs = schema.getDimensionFieldSpecs();
for (int index = 0; index < dimensionFieldSpecs.size(); index++) {
DimensionFieldSpec spec = dimensionFieldSpecs.get(index);
String dimensionName = spec.getName();
dimensionNames.add(dimensionName);
dimensionNameToIndexMap.put(dimensionName, index);
Object starValue;
starValue = getAllStarValue(spec);
dimensionNameToStarValueMap.put(dimensionName, starValue);
dimensionTypes.add(spec.getDataType());
HashBiMap<Object, Integer> dictionary = HashBiMap.create();
dictionaryMap.put(dimensionName, dictionary);
}
// this dimension unless explicitly specified in split order
if (timeColumnName != null) {
dimensionNames.add(timeColumnName);
TimeFieldSpec timeFieldSpec = schema.getTimeFieldSpec();
dimensionTypes.add(timeFieldSpec.getDataType());
int index = dimensionNameToIndexMap.size();
dimensionNameToIndexMap.put(timeColumnName, index);
Object starValue;
starValue = getAllStarValue(timeFieldSpec);
dimensionNameToStarValueMap.put(timeColumnName, starValue);
HashBiMap<Object, Integer> dictionary = HashBiMap.create();
dictionaryMap.put(schema.getTimeColumnName(), dictionary);
}
dimensionSizeBytes = dimensionNames.size() * Integer.SIZE / 8;
this.numDimensions = dimensionNames.size();
// READ METRIC COLUMNS
this.metricNames = new ArrayList<>();
this.metricNameToIndexMap = new HashMap<>();
this.metricSizeBytes = 0;
List<MetricFieldSpec> metricFieldSpecs = schema.getMetricFieldSpecs();
for (int index = 0; index < metricFieldSpecs.size(); index++) {
MetricFieldSpec spec = metricFieldSpecs.get(index);
String metricName = spec.getName();
metricNames.add(metricName);
metricNameToIndexMap.put(metricName, index);
metricSizeBytes += spec.getFieldSize();
}
numMetrics = metricNames.size();
builderConfig.getOutDir().mkdirs();
dataFile = new File(outDir, "star-tree.buf");
LOG.info("StarTree output data file: {}", dataFile.getAbsolutePath());
dataBuffer = new DataOutputStream(new BufferedOutputStream(new FileOutputStream(dataFile)));
// INITIALIZE THE ROOT NODE
this.starTreeRootIndexNode = new StarTreeIndexNode();
this.starTreeRootIndexNode.setDimensionName(StarTreeIndexNodeInterf.ALL);
this.starTreeRootIndexNode.setDimensionValue(StarTreeIndexNodeInterf.ALL);
this.starTreeRootIndexNode.setLevel(0);
LOG.info("dimensionNames:{}", dimensionNames);
LOG.info("metricNames:{}", metricNames);
}
use of java.io.BufferedOutputStream in project nmid-headline by miao1007.
the class ImageUtils method saveBackgroundImage.
public static void saveBackgroundImage(Context ctx, String filePath, Bitmap bitmap, int quality) throws IOException {
if (bitmap != null) {
File file = new File(filePath.substring(0, filePath.lastIndexOf(File.separator)));
if (!file.exists()) {
file.mkdirs();
}
BufferedOutputStream bos = new BufferedOutputStream(new FileOutputStream(filePath));
bitmap.compress(CompressFormat.PNG, quality, bos);
bos.flush();
bos.close();
if (ctx != null) {
scanPhoto(ctx, filePath);
}
}
}
Aggregations