use of org.apache.commons.io.input.UnsynchronizedByteArrayInputStream in project exist by eXist-db.
the class ConfigurableTest method simple.
@Test
public void simple() throws Exception {
InputStream is = new UnsynchronizedByteArrayInputStream(config1.getBytes(UTF_8));
Configuration config = Configurator.parse(is);
ConfigurableObject object = new ConfigurableObject(config);
assertNotNull(object.subclasses);
assertEquals("A", object.subclasses.name);
assertEquals("1", object.subclasses.version);
assertEquals(1, object.subclasses.subconfs.size());
// XXX: assertEquals(2, object.subclasses.subconfs.size());
assertEquals("1", object.subclasses.subconfs.get(0).getKey());
assertEquals("secret1", object.subclasses.subconfs.get(0).getSecret());
// XXX: assertEquals("2", object.subclasses.subconfs.get(1).getKey());
// XXX: assertEquals("secret2", object.subclasses.subconfs.get(1).getSecret());
}
use of org.apache.commons.io.input.UnsynchronizedByteArrayInputStream in project exist by eXist-db.
the class AbstractExtractFunction method processCompressedEntry.
/**
* Processes a compressed entry from an archive
*
* @param name The name of the entry
* @param isDirectory true if the entry is a directory, false otherwise
* @param is an InputStream for reading the uncompressed data of the entry
* @param filterParam is an additional param for entry filtering function
* @param storeParam is an additional param for entry storing function
*
* @return the result of processing the compressed entry.
*
* @throws XPathException if a query error occurs
* @throws XMLDBException if a database error occurs
* @throws IOException if an I/O error occurs
*/
protected Sequence processCompressedEntry(String name, boolean isDirectory, InputStream is, Sequence filterParam, Sequence storeParam) throws IOException, XPathException, XMLDBException {
String dataType = isDirectory ? "folder" : "resource";
// call the entry-filter function
Sequence[] filterParams = new Sequence[3];
filterParams[0] = new StringValue(name);
filterParams[1] = new StringValue(dataType);
filterParams[2] = filterParam;
Sequence entryFilterFunctionResult = entryFilterFunction.evalFunction(contextSequence, null, filterParams);
if (BooleanValue.FALSE == entryFilterFunctionResult.itemAt(0)) {
return Sequence.EMPTY_SEQUENCE;
} else {
Sequence entryDataFunctionResult;
Sequence uncompressedData = Sequence.EMPTY_SEQUENCE;
if (entryDataFunction.getSignature().getReturnType().getPrimaryType() != Type.EMPTY && entryDataFunction.getSignature().getArgumentCount() == 3) {
Sequence[] dataParams = new Sequence[3];
System.arraycopy(filterParams, 0, dataParams, 0, 2);
dataParams[2] = storeParam;
entryDataFunctionResult = entryDataFunction.evalFunction(contextSequence, null, dataParams);
String path = entryDataFunctionResult.itemAt(0).getStringValue();
Collection root = new LocalCollection(context.getSubject(), context.getBroker().getBrokerPool(), new AnyURIValue("/db").toXmldbURI());
if (isDirectory) {
XMLDBAbstractCollectionManipulator.createCollection(root, path);
} else {
Resource resource;
Path file = Paths.get(path).normalize();
name = FileUtils.fileName(file);
path = file.getParent().toAbsolutePath().toString();
Collection target = (path == null) ? root : XMLDBAbstractCollectionManipulator.createCollection(root, path);
MimeType mime = MimeTable.getInstance().getContentTypeFor(name);
// copy the input data
final byte[] entryData;
try (final UnsynchronizedByteArrayOutputStream baos = new UnsynchronizedByteArrayOutputStream()) {
baos.write(is);
entryData = baos.toByteArray();
}
try (final InputStream bis = new UnsynchronizedByteArrayInputStream(entryData)) {
NodeValue content = ModuleUtils.streamToXML(context, bis);
resource = target.createResource(name, "XMLResource");
ContentHandler handler = ((XMLResource) resource).setContentAsSAX();
handler.startDocument();
content.toSAX(context.getBroker(), handler, null);
handler.endDocument();
} catch (SAXException e) {
resource = target.createResource(name, "BinaryResource");
resource.setContent(entryData);
}
if (resource != null) {
if (mime != null) {
((EXistResource) resource).setMimeType(mime.getName());
}
target.storeResource(resource);
}
}
} else {
// copy the input data
final byte[] entryData;
try (final UnsynchronizedByteArrayOutputStream baos = new UnsynchronizedByteArrayOutputStream()) {
baos.write(is);
entryData = baos.toByteArray();
}
// try and parse as xml, fall back to binary
try (final InputStream bis = new UnsynchronizedByteArrayInputStream(entryData)) {
uncompressedData = ModuleUtils.streamToXML(context, bis);
} catch (SAXException saxe) {
if (entryData.length > 0) {
try (final InputStream bis = new UnsynchronizedByteArrayInputStream(entryData)) {
uncompressedData = BinaryValueFromInputStream.getInstance(context, new Base64BinaryValueType(), bis);
}
}
}
// call the entry-data function
Sequence[] dataParams = new Sequence[4];
System.arraycopy(filterParams, 0, dataParams, 0, 2);
dataParams[2] = uncompressedData;
dataParams[3] = storeParam;
entryDataFunctionResult = entryDataFunction.evalFunction(contextSequence, null, dataParams);
}
return entryDataFunctionResult;
}
}
use of org.apache.commons.io.input.UnsynchronizedByteArrayInputStream in project exist by eXist-db.
the class BlobStoreRecoveryTest method addCommit.
private void addCommit(final TransactionManager transactionManager, final BlobStore blobStore, final byte[] blob) throws IOException, TransactionException {
try (final InputStream is = new UnsynchronizedByteArrayInputStream(blob)) {
try (final Txn transaction = transactionManager.beginTransaction()) {
blobStore.add(transaction, is);
transaction.commit();
}
}
}
use of org.apache.commons.io.input.UnsynchronizedByteArrayInputStream in project exist by eXist-db.
the class BlobStoreRecoveryTest method addNoCommit.
private void addNoCommit(final TransactionManager transactionManager, final BlobStore blobStore, final byte[] blob) throws IOException, TransactionException {
try (final InputStream is = new UnsynchronizedByteArrayInputStream(blob)) {
final Txn transaction = transactionManager.beginTransaction();
blobStore.add(transaction, is);
// NOTE must not use ARM to close the transaction, otherwise it will auto-abort!
}
}
use of org.apache.commons.io.input.UnsynchronizedByteArrayInputStream in project exist by eXist-db.
the class CachingFilterInputStreamTest_NonMarkableByteArrayInputStream method sharedCacheWritesInOrder.
@Test
public void sharedCacheWritesInOrder() throws IOException, InstantiationException, IllegalAccessException, NoSuchMethodException, IllegalArgumentException, InvocationTargetException {
final byte[] testData = generateRandomData(_64KB);
final InputStream is = new MarkShieldInputStream(new UnsynchronizedByteArrayInputStream(testData));
// first CachingFilterInputStream
final CachingFilterInputStream cfis1 = new CachingFilterInputStream(getNewCache(is));
// read first 6KB
final byte[] cfis1Part1 = new byte[_6KB];
cfis1.read(cfis1Part1);
// ensure first 6KB was read!
assertArrayEquals(subArray(testData, _6KB), cfis1Part1);
// second CachingFilterInputStream wraps first CachingFilterInputStream
final CachingFilterInputStream cfis2 = new CachingFilterInputStream(cfis1);
// read first 32KB from second InputStream
final byte[] cfis2Part1 = new byte[_32KB];
cfis2.read(cfis2Part1);
// ensure next 32KB was read!
assertArrayEquals(subArray(testData, _32KB), cfis2Part1);
// interleave by reading another 6KB from first InputStream
final byte[] cfis1Part2 = new byte[_6KB];
cfis1.read(cfis1Part2);
// ensure first 6KB was read!
assertArrayEquals(subArray(testData, _6KB, _6KB), cfis1Part2);
}
Aggregations