use of org.apache.derby.iapi.services.io.LimitInputStream in project derby by apache.
the class ImportBlob method getBinaryStream.
/**
* Returns <code>BLOB</code> value designated by this
* <code>Blob</code> object as a input stream.
*
* @return a stream containing the <code>BLOB</code> data
* @exception SQLException if any error occurs while setting up
* this blob data in the import file as stream.
*/
public java.io.InputStream getBinaryStream() throws SQLException {
try {
InputStream fis;
if (blobData != null) {
fis = new java.io.ByteArrayInputStream(blobData);
// wrap the InputStream with a LimitInputStream class,
// only the length of the
LimitInputStream limitIn = new LimitInputStream(fis);
limitIn.setLimit((int) blobLength);
return limitIn;
} else {
return lobFile.getBinaryStream(blobPosition, blobLength);
}
} catch (Exception e) {
throw LoadError.unexpectedError(e);
}
}
use of org.apache.derby.iapi.services.io.LimitInputStream in project derby by apache.
the class StreamFileContainer method open.
/**
************************************************************************
* Private/Protected methods of This class:
**************************************************************************
*/
/**
* Open a stream file container.
* <p>
* Open a container. Open the file that maps to this container, if the
* file does not exist then we assume the container was never created
* and return.
* If the file exists but we have trouble opening it then we throw some
* exception.
* <p>
*
* @return The opened StreamFileContainer.
*
* @param forUpdate Currently only accepts false, updating and existing
* stream file container is not currently supported.
*
* @exception StandardException Standard exception policy.
*/
protected StreamFileContainer open(boolean forUpdate) throws StandardException {
file = getFileName(this.identity, false, true);
if (!privExists(file))
return null;
try {
if (!forUpdate) {
fileIn = privGetInputStream(file);
if (dataFactory.databaseEncrypted()) {
// if the database is encrypted, when reading the data back
// from the file stream, we need to used the decrypt stream
// to buffer up the bytes for reading. DecryptInputStream
// also decrypts the data.
MemByteHolder byteHolder = new MemByteHolder(RawStoreFactory.STREAM_FILE_BUFFER_SIZE_DEFAULT);
decryptIn = new DecryptInputStream(fileIn, byteHolder, dataFactory);
limitIn = new LimitInputStream(decryptIn);
} else {
bufferedIn = new BufferedInputStream(fileIn, RawStoreFactory.STREAM_FILE_BUFFER_SIZE_DEFAULT);
limitIn = new LimitInputStream(bufferedIn);
}
// the logicalDataIn input stream is on top of a limit Input
// stream, use a limit stream to make sure we don't read off
// more then what each column says it contains
logicalDataIn = new FormatIdInputStream(limitIn);
// get the record header
recordHeader = new StoredRecordHeader();
recordHeader.read(logicalDataIn);
} else {
if (SanityManager.DEBUG)
SanityManager.THROWASSERT("updating existing stream container not supported yet");
return null;
}
} catch (IOException ioe) {
throw StandardException.newException(SQLState.FILE_CREATE, ioe, file);
}
return this;
}
use of org.apache.derby.iapi.services.io.LimitInputStream in project derby by apache.
the class EmbedResultSet method getBinaryStream.
/**
* Get the column as an InputStream. If the column is already of type
* InputStream then just return it, otherwise convert the column to a set
* of bytes and create a stream out of the bytes.
*
* @param columnIndex the first column is 1, the second is 2, ...
* @return a Java input stream that delivers the database column value
* as a stream of uninterpreted bytes. If the value is SQL NULL
* then the result is null.
* @exception SQLException thrown on failure.
*/
public final InputStream getBinaryStream(int columnIndex) throws SQLException {
checkIfClosed("getBinaryStream");
int lmfs;
int colType = getColumnType(columnIndex);
switch(colType) {
case Types.BINARY:
case Types.VARBINARY:
case Types.LONGVARBINARY:
lmfs = maxFieldSize;
break;
case Types.BLOB:
lmfs = 0;
break;
default:
throw dataTypeConversion("java.io.InputStream", columnIndex);
}
Object syncLock = getConnectionSynchronization();
synchronized (syncLock) {
boolean pushStack = false;
try {
useStreamOrLOB(columnIndex);
DataValueDescriptor dvd = getColumn(columnIndex);
if (wasNull = dvd.isNull()) {
return null;
}
pushStack = true;
setupContextStack();
// The stream we will return to the user
InputStream stream;
if (dvd.hasStream()) {
stream = new BinaryToRawStream(dvd.getStream(), dvd);
} else {
stream = new ByteArrayInputStream(dvd.getBytes());
}
if (lmfs > 0) {
// Just wrap the InputStream with a LimitInputStream class
LimitInputStream limitResultIn = new LimitInputStream(stream);
limitResultIn.setLimit(lmfs);
stream = limitResultIn;
}
// Wrap in a stream throwing exception on invocations when closed.
stream = new CloseFilterInputStream(stream);
currentStream = stream;
return stream;
} catch (Throwable t) {
throw noStateChangeException(t);
} finally {
if (pushStack) {
restoreContextStack();
}
}
}
}
use of org.apache.derby.iapi.services.io.LimitInputStream in project derby by apache.
the class ImportFileInputStream method openLobFile.
/**
* Open the lob file and setup the stream required to read the data.
* @param lobFile the file that contains lob data.
* @exception Exception if an error occurs.
*/
private void openLobFile(final File lobFile) throws Exception {
RandomAccessFile lobRaf;
try {
// open the lob file under a privelged block.
try {
lobRaf = AccessController.doPrivileged(new java.security.PrivilegedExceptionAction<RandomAccessFile>() {
public RandomAccessFile run() throws IOException {
return new RandomAccessFile(lobFile, "r");
}
});
} catch (PrivilegedActionException pae) {
throw pae.getException();
}
} catch (FileNotFoundException ex) {
throw PublicAPI.wrapStandardException(StandardException.newException(SQLState.LOB_DATA_FILE_NOT_FOUND, lobFile.getPath()));
}
// Set up stream to read from input file, starting from
// any offset in the file. Users can specify columns in
// any order or skip some during import. So it is
// required for this stream to have the ability to read
// from any offset in the file.
lobInputStream = new ImportFileInputStream(lobRaf);
// wrap the lobInputStream with a LimitInputStream class,
// This will help in making sure only the specific amout
// of data is read from the file, for example to read one
// column data from the file.
lobLimitIn = new LimitInputStream(lobInputStream);
}
Aggregations