use of org.apache.sis.internal.geotiff.Compression in project sis by apache.
the class ImageMetadataBuilder method finish.
/**
* Completes the metadata with the information stored in the fields of the IFD.
* This method is invoked only if the user requested the ISO 19115 metadata.
* It should be invoked last, after all other metadata have been set.
*
* @throws DataStoreException if an error occurred while reading metadata from the data store.
*/
void finish(final ImageFileDirectory image) throws DataStoreException {
image.getIdentifier().ifPresent((id) -> addTitle(id.toString()));
/*
* Add information about the file format.
*
* Destination: metadata/identificationInfo/resourceFormat
*/
final GeoTiffStore store = image.reader.store;
if (store.hidden) {
// Should be before `addCompression(…)`.
store.setFormatInfo(this);
}
final Compression compression = image.getCompression();
if (compression != null) {
addCompression(CharSequences.upperCaseToSentence(compression.name()));
}
/*
* Add the resolution into the metadata. Our current ISO 19115 implementation restricts
* the resolution unit to metres, but it may be relaxed in a future SIS version.
*
* Destination: metadata/identificationInfo/spatialResolution/distance
*/
if (!Double.isNaN(resolution) && resolutionUnit != null) {
addResolution(resolutionUnit.getConverterTo(Units.METRE).convert(resolution));
}
/*
* Cell size is relevant only if the Threshholding TIFF tag value is 2. By convention in
* this implementation class, other Threshholding values are stored as negative cell sizes:
*
* -1 means that Threshholding is 1 or unspecified.
* -2 means that Threshholding is 2 but the matrix size has not yet been specified.
* -3 means that Threshholding is 3 (randomized process such as error diffusion).
*
* Destination: metadata/resourceLineage/processStep/description
*/
final int cellWidth = this.cellWidth;
final int cellHeight = this.cellHeight;
switch(Math.min(cellWidth, cellHeight)) {
case -1:
{
// Nothing to report.
break;
}
case -3:
{
addProcessDescription(Resources.formatInternational(Resources.Keys.RandomizedProcessApplied));
break;
}
default:
{
addProcessDescription(Resources.formatInternational(Resources.Keys.DitheringOrHalftoningApplied_2, (cellWidth >= 0) ? cellWidth : '?', (cellHeight >= 0) ? cellHeight : '?'));
break;
}
}
/*
* If there is XML metadata, append them last in order
* to allow them to be merged with existing metadata.
*/
while (complement != null) try {
complement = complement.appendTo(this);
} catch (Exception ex) {
image.warning(image.reader.errors().getString(Errors.Keys.CanNotSetPropertyValue_1, complement.tag()), ex);
}
}
use of org.apache.sis.internal.geotiff.Compression in project sis by apache.
the class NativeMetadata method read.
/**
* Reads the tree table content. This method assumes that the caller already verified that the
* file is a GeoTIFF file. Tags are keys are added in the order they are declared in the file.
*/
final DefaultTreeTable read(final Reader reader) throws IOException {
input = reader.input;
isClassic = reader.intSizeExpansion == 0;
final int offsetSize = Integer.BYTES << reader.intSizeExpansion;
final DefaultTreeTable table = new DefaultTreeTable(CODE, NAME, VALUE);
final TreeTable.Node root = table.getRoot();
root.setValue(NAME, "TIFF");
input.mark();
try {
input.seek(addExact(reader.origin, isClassic ? 2 * Short.BYTES : 4 * Short.BYTES));
final Set<Long> doneIFD = new HashSet<>();
long nextIFD;
/*
* Following loop is a simplified copy of `Reader.getImageFileDirectory(int)` method,
* without the "deferred entries" mechanism. Instead we seek immediately.
*/
int imageNumber = 0;
while ((nextIFD = readInt(false)) != 0) {
if (!doneIFD.add(nextIFD)) {
// Safety against infinite recursivity.
break;
}
final TreeTable.Node image = root.newChild();
image.setValue(NAME, vocabulary.getString(Vocabulary.Keys.Image_1, imageNumber));
input.seek(Math.addExact(reader.origin, nextIFD));
for (long remaining = readInt(true); --remaining >= 0; ) {
final short tag = (short) input.readUnsignedShort();
// May be null.
final Type type = Type.valueOf(input.readShort());
final long count = readInt(false);
final long size = (type != null) ? Math.multiplyExact(type.size, count) : 0;
final long next = addExact(input.getStreamPosition(), offsetSize);
boolean visible;
/*
* Exclude the tags about location of tiles in the GeoTIFF files.
* Values of those tags are potentially large and rarely useful for human reading.
* This switch is only about tags to skip; special handlings of some tags are done later.
*/
switch(tag) {
case Tags.TileOffsets:
case Tags.StripOffsets:
case Tags.TileByteCounts:
case Tags.StripByteCounts:
visible = false;
break;
default:
visible = (size != 0);
break;
}
if (visible) {
if (size > offsetSize) {
final long offset = readInt(false);
input.seek(Math.addExact(reader.origin, offset));
}
/*
* Some tags need to be handle in a special way. The main cases are GeoTIFF keys.
* But other cases exist (e.g. GEO_METADATA and GDAL_METADATA).
*/
Object value = null;
XMLMetadata children = null;
switch(tag) {
case Tags.GeoKeyDirectory:
{
// Flush previous keys if any (should never happen).
writeGeoKeys();
keyDirectory = type.readVector(input, count);
value = "GeoTIFF";
break;
}
case Tags.GeoDoubleParams:
{
numericParameters = type.readVector(input, count);
visible = false;
break;
}
case Tags.GeoAsciiParams:
{
setAsciiParameters(type.readString(input, count, reader.store.encoding));
visible = false;
break;
}
case Tags.GDAL_METADATA:
case Tags.GEO_METADATA:
{
children = new XMLMetadata(reader, type, count, tag);
if (children.isEmpty()) {
// Fallback on showing array of numerical values.
value = type.readVector(input, count);
}
break;
}
default:
{
value = type.readObject(input, count);
if (value instanceof Vector) {
final Vector v = (Vector) value;
switch(v.size()) {
case 0:
value = null;
break;
case 1:
value = v.get(0);
break;
}
}
/*
* Replace a few numerical values by a more readable string when available.
* We currently perform this replacement only for tags for which we defined
* an enumeration.
*/
switch(tag) {
case Tags.Compression:
value = toString(value, Compression::valueOf, Compression.UNKNOWN);
break;
case Tags.Predictor:
value = toString(value, Predictor::valueOf, Predictor.UNKNOWN);
break;
}
}
}
if (visible) {
final String name = Tags.name(tag);
final TreeTable.Node node;
if (children != null) {
node = new XMLMetadata.Root(children, (DefaultTreeTable.Node) image, name);
} else {
node = image.newChild();
node.setValue(NAME, name);
node.setValue(VALUE, value);
}
node.setValue(CODE, Short.toUnsignedInt(tag));
if (tag == Tags.GeoKeyDirectory) {
geoNode = node;
}
}
}
input.seek(next);
}
imageNumber++;
}
} catch (ArithmeticException e) {
// Can not seek that far.
throw new IOException(e);
} finally {
input.reset();
}
writeGeoKeys();
return table;
}
use of org.apache.sis.internal.geotiff.Compression in project sis by apache.
the class DataCube method read.
/**
* Creates a {@link GridCoverage} which will load pixel data in the given domain.
*
* @param domain desired grid extent and resolution, or {@code null} for reading the whole domain.
* @param range 0-based index of sample dimensions to read, or an empty sequence for reading all ranges.
* @return the grid coverage for the specified domain and range.
* @throws DataStoreException if an error occurred while reading the grid coverage data.
*/
@Override
public final GridCoverage read(final GridGeometry domain, final int... range) throws DataStoreException {
final long startTime = System.nanoTime();
GridCoverage coverage;
try {
synchronized (getSynchronizationLock()) {
final Subset subset = new Subset(domain, range);
final Compression compression = getCompression();
if (compression == null) {
throw new DataStoreContentException(reader.resources().getString(Resources.Keys.MissingValue_2, Tags.name(Tags.Compression)));
}
/*
* The `DataSubset` parent class is the most efficient but has many limitations
* documented in the javadoc of its `readSlice(…)` method. If any pre-condition
* is not met, we need to fallback on the less direct `CompressedSubset` class.
*/
if (compression == Compression.NONE && getPredictor() == Predictor.NONE && canReadDirect(subset)) {
coverage = new DataSubset(this, subset);
} else {
coverage = new CompressedSubset(this, subset);
}
coverage = preload(coverage);
}
} catch (RuntimeException e) {
throw canNotRead(reader.input.filename, domain, e);
}
logReadOperation(reader.store.path, coverage.getGridGeometry(), startTime);
return coverage;
}
Aggregations