use of com.ibm.j9ddr.vm29.j9.DataType in project VERDICT by ge-high-assurance.
the class Aadl2Vdm method defineDataImplementationType.
/**
* @author Vidhya Tekken Valapil
* populate information related to data implementation types in the vdm
*/
private void defineDataImplementationType(DataImplementation dataImplementation, Model model, HashSet<String> dataTypeDecl) {
// DEFINE DATA TYPE IN DECLARATIONS IF NOT ALREADY DEFINED
String dataImplementationName = dataImplementation.getName();
if (!dataTypeDecl.contains(dataImplementationName)) {
dataTypeDecl.add(dataImplementationName);
// vdm data type declaration
TypeDeclaration dataTypeVdm = new TypeDeclaration();
dataTypeVdm.setName(dataImplementationName);
verdict.vdm.vdm_data.DataType dtype = new verdict.vdm.vdm_data.DataType();
// GET DETAILS OF THE DATA IMPLEMENTATION AND CREATE CORRESPONDING VDM DATATYPE
EList<DataSubcomponent> subcomponents = dataImplementation.getOwnedDataSubcomponents();
if (!(subcomponents.isEmpty())) {
// if the dataType definition has subcomponents
RecordType recType = new RecordType();
for (DataSubcomponent dataSubComp : subcomponents) {
RecordField recField = new RecordField();
recField.setName(dataSubComp.getName());
DataSubcomponentType dataSubCompType = dataSubComp.getDataSubcomponentType();
if (dataSubCompType instanceof org.osate.aadl2.DataType) {
org.osate.aadl2.DataType aadlDataType = (org.osate.aadl2.DataType) dataSubCompType;
Agree2Vdm agree2vdm = new Agree2Vdm();
verdict.vdm.vdm_data.DataType recFieldDtype = agree2vdm.getVdmTypeFromAADLType(aadlDataType);
recField.setType(recFieldDtype);
resolveAADLDataType(aadlDataType, model, dataTypeDecl);
recType.getRecordField().add(recField);
} else if (dataSubCompType instanceof DataImplementation) {
DataImplementation dataSubCompDataImplementation = (DataImplementation) dataSubCompType;
verdict.vdm.vdm_data.DataType recFieldDtype = new verdict.vdm.vdm_data.DataType();
recFieldDtype.setUserDefinedType(dataSubCompDataImplementation.getName());
recField.setType(recFieldDtype);
defineDataImplementationType(dataSubCompDataImplementation, model, dataTypeDecl);
recType.getRecordField().add(recField);
} else {
System.out.println("Unexpected Data Subcomponent that is not a DataTypeImpl or DataImplementatioImpl.");
}
}
if (recType.getRecordField().size() != 0) {
dtype.setRecordType(recType);
dataTypeVdm.setDefinition(dtype);
}
} else {
// if the dataType is base type boolean or integer or char or string
System.out.println("Data implementation type has no subcomponents");
}
// add the typeDeclaration to the model
model.getTypeDeclaration().add(dataTypeVdm);
}
}
use of com.ibm.j9ddr.vm29.j9.DataType in project chipster-web-server by chipster.
the class XmlSession method fixModificationParents.
/**
* Fix dataset relations of the user modified datasets
*
* When user creates a new dataset from the selected rows of an old dataset, the
* Java client creates a MODIFICATION type of link between the datasets and
* creates a dummy Operation object, which will converted to a Job object for
* the web app. The new client doesn't use links, but gets dataset relations
* from the Job inputs. However, the Java client doesn't create the input
* definitions for the dummy job, so the web app wouldn't be able to show them.
* This method creates the Input objects for the Jobs to show these the dataset
* relationship correctly in the web app.
*
* @param sessionType
* @param session
* @param datasetMap
* @param jobMap
* @throws RestException
*/
private static void fixModificationParents(SessionType sessionType, Session session, Map<UUID, Dataset> datasetMap, Map<UUID, Job> jobMap) throws RestException {
for (DataType dataType : sessionType.getData()) {
UUID datasetId = UUID.fromString(dataType.getDataId());
Dataset dataset = datasetMap.get(datasetId);
Job job = jobMap.get(dataset.getSourceJob());
if (job != null && "operation-definition-id-user-modification".equals(job.getToolId())) {
List<DataType> parents = getLinked(sessionType, dataType, Link.MODIFICATION);
if (parents.size() == 1) {
Input input = new Input();
String dataId = parents.get(0).getDataId();
input.setDatasetId(dataId);
List<Input> inputs = new ArrayList<>();
inputs.add(input);
job.setInputs(inputs);
}
}
}
}
use of com.ibm.j9ddr.vm29.j9.DataType in project chipster-web-server by chipster.
the class XmlSession method getEntryToDatasetIdMap.
/**
* There are two variants of version 2 xml sessions: the zip entries of the data
* files in the older variant are named as "file-0", "file-1" and so on while
* the newer uses the dataId for the entry name. Map entry names to dataIds to
* support the older variant. In the newer variant the key and value will be the
* same.
*
* @param sessionType
* @return
*/
private static HashMap<String, String> getEntryToDatasetIdMap(SessionType sessionType) {
HashMap<String, String> entryToDatasetIdMap = new HashMap<>();
for (DataType dataType : sessionType.getData()) {
boolean found = false;
for (LocationType locationType : dataType.getLocation()) {
if (StorageMethod.LOCAL_SESSION_ZIP.toString().equals(locationType.getMethod())) {
found = true;
String url = locationType.getUrl();
String entryName = url.substring(url.indexOf("#") + 1);
entryToDatasetIdMap.put(entryName, dataType.getDataId());
}
}
if (!found) {
throw new BadRequestException("file content of " + dataType.getName() + " not found");
}
}
return entryToDatasetIdMap;
}
use of com.ibm.j9ddr.vm29.j9.DataType in project AGREE by loonwerks.
the class AgreeTypeSystem method typeDefFromClassifier.
public static TypeDef typeDefFromClassifier(Classifier c) {
if (c instanceof DataType || (c instanceof DataImplementation && ((DataImplementation) c).getAllSubcomponents().isEmpty() && ((DataImplementation) c).getType() != null)) {
// Includes special case for data implementations implementing extensions of primitive types
Classifier classifierType = c instanceof DataImplementation ? ((DataImplementation) c).getType() : c;
List<PropertyAssociation> pas = classifierType.getAllPropertyAssociations();
for (Classifier classType : classifierType.getSelfPlusAllExtended()) {
if (classType != null && hasIntegerDataRepresentation(classType)) {
for (PropertyAssociation choice : pas) {
Property p = choice.getProperty();
PropertyExpression v = choice.getOwnedValues().get(0).getOwnedValue();
String key = p.getQualifiedName();
if (key.equals("Data_Model::Integer_Range")) {
if (v instanceof RangeValue) {
try {
RangeValue rangeValue = (RangeValue) v;
long min = intFromPropExp(rangeValue.getMinimum()).get();
long max = intFromPropExp(rangeValue.getMaximum()).get();
return new RangeIntTypeDef(min, max);
} catch (Exception e) {
return Prim.ErrorTypeDef;
}
}
}
}
return Prim.IntTypeDef;
} else if (classType != null && hasFloatDataRepresentation(classType)) {
for (PropertyAssociation choice : pas) {
Property p = choice.getProperty();
PropertyExpression v = choice.getOwnedValues().get(0).getOwnedValue();
String key = p.getQualifiedName();
if (key.equals("Data_Model::Real_Range")) {
if (v instanceof RangeValue) {
try {
RangeValue rangeValue = (RangeValue) v;
double min = realFromPropExp(rangeValue.getMinimum()).get();
double max = realFromPropExp(rangeValue.getMaximum()).get();
return new RangeRealTypeDef(min, max);
} catch (Exception e) {
return Prim.ErrorTypeDef;
}
}
}
}
return Prim.RealTypeDef;
} else if (classType != null && hasBooleanDataRepresentation(classType)) {
return Prim.BoolTypeDef;
}
}
boolean prop_isArray = false;
int prop_arraySize = 0;
TypeDef prop_arrayBaseType = null;
boolean prop_isEnum = false;
List<String> prop_enumValues = null;
for (PropertyAssociation choice : pas) {
Property p = choice.getProperty();
PropertyExpression v = choice.getOwnedValues().get(0).getOwnedValue();
String key = p.getQualifiedName();
key = key == null ? p.getName() : key;
if (key == null) {
return Prim.ErrorTypeDef;
}
if (key.equalsIgnoreCase("Data_Model::Data_Representation")) {
if (v instanceof NamedValue) {
AbstractNamedValue anv = ((NamedValue) v).getNamedValue();
if (anv instanceof EnumerationLiteral) {
EnumerationLiteral el = (EnumerationLiteral) anv;
prop_isArray = el.getName().equals("Array");
prop_isEnum = el.getName().equals("Enum");
}
}
} else if (key.equalsIgnoreCase("Data_Model::Enumerators")) {
if (v instanceof ListValue) {
EList<PropertyExpression> peList = ((ListValue) v).getOwnedListElements();
String prefix = c.getQualifiedName() + "_";
prop_enumValues = new ArrayList<>();
for (PropertyExpression pe : peList) {
if (pe instanceof StringLiteral) {
String enumString = prefix + ((StringLiteral) pe).getValue();
prop_enumValues.add(enumString);
}
}
}
} else if (key.equalsIgnoreCase("Data_Model::Base_Type")) {
if (v instanceof ListValue) {
ListValue l = (ListValue) v;
PropertyExpression pe = l.getOwnedListElements().get(0);
if (pe instanceof ClassifierValue) {
prop_arrayBaseType = typeDefFromClassifier(((ClassifierValue) pe).getClassifier());
}
}
} else if (key.equalsIgnoreCase("Data_Model::Dimension")) {
if (v instanceof ListValue) {
ListValue l = (ListValue) v;
PropertyExpression pe = l.getOwnedListElements().get(0);
prop_arraySize = Math.toIntExact(intFromPropExp(pe).orElse((long) -1).longValue());
}
}
}
if (prop_isArray && prop_arraySize > 0 && prop_arrayBaseType != null) {
return new ArrayTypeDef(prop_arrayBaseType, prop_arraySize, Optional.of(c));
} else if (prop_isEnum && prop_enumValues != null) {
String name = c.getQualifiedName();
return new EnumTypeDef(name, prop_enumValues, c);
}
} else if (c instanceof ComponentClassifier) {
Map<String, TypeDef> fields = new HashMap<>();
Classifier currClsfr = c;
while (currClsfr != null) {
ComponentType ct = null;
if (currClsfr instanceof ComponentImplementation) {
EList<Subcomponent> subcomps = ((ComponentImplementation) currClsfr).getAllSubcomponents();
for (Subcomponent sub : subcomps) {
String fieldName = sub.getName();
if (sub.getClassifier() != null) {
boolean prop_isArray = false;
int prop_arraySize = 0;
boolean prop_isEnum = false;
List<String> prop_enumValues = null;
for (PropertyAssociation pa : sub.getOwnedPropertyAssociations()) {
Property p = pa.getProperty();
String key = p.getQualifiedName();
key = key == null ? p.getName() : key;
PropertyExpression v = null;
if (!pa.getOwnedValues().isEmpty()) {
v = pa.getOwnedValues().get(0).getOwnedValue();
} else {
continue;
}
if (key.equals("Data_Model::Data_Representation")) {
if (v instanceof NamedValue) {
AbstractNamedValue anv = ((NamedValue) v).getNamedValue();
if (anv instanceof EnumerationLiteral) {
EnumerationLiteral el = (EnumerationLiteral) anv;
prop_isArray = el.getName().equals("Array");
prop_isEnum = el.getName().equals("Enum");
}
}
} else if (key.equals("Data_Model::Dimension")) {
if (v instanceof ListValue) {
ListValue l = (ListValue) v;
PropertyExpression pe = l.getOwnedListElements().get(0);
prop_arraySize = Math.toIntExact(intFromPropExp(pe).orElse((long) -1).longValue());
}
} else if (key.equals("Data_Model::Enumerators")) {
if (v instanceof ListValue) {
EList<PropertyExpression> peList = ((ListValue) v).getOwnedListElements();
String prefix = c.getQualifiedName() + "_";
prop_enumValues = new ArrayList<>();
for (PropertyExpression pe : peList) {
if (pe instanceof StringLiteral) {
String enumString = prefix + ((StringLiteral) pe).getValue();
prop_enumValues.add(enumString);
}
}
}
}
}
if (prop_isArray && prop_arraySize > 0) {
TypeDef typeDef = new ArrayTypeDef(typeDefFromClassifier(sub.getClassifier()), prop_arraySize, Optional.empty());
fields.putIfAbsent(fieldName, typeDef);
} else if (prop_isEnum && prop_enumValues != null) {
String name = c.getQualifiedName();
TypeDef typeDef = new EnumTypeDef(name, prop_enumValues, c);
fields.putIfAbsent(fieldName, typeDef);
} else if (sub.getArrayDimensions().size() == 0) {
TypeDef typeDef = typeDefFromClassifier(sub.getClassifier());
fields.putIfAbsent(fieldName, typeDef);
} else if (sub.getArrayDimensions().size() == 1) {
ArrayDimension ad = sub.getArrayDimensions().get(0);
int size = Math.toIntExact(getArraySize(ad));
TypeDef stem = typeDefFromClassifier(sub.getClassifier());
TypeDef typeDef = new ArrayTypeDef(stem, size, Optional.empty());
fields.putIfAbsent(fieldName, typeDef);
}
}
}
ct = ((ComponentImplementation) currClsfr).getType();
} else if (c instanceof ComponentType) {
ct = (ComponentType) currClsfr;
}
if (ct != null) {
EList<Feature> features = ct.getAllFeatures();
for (Feature feature : features) {
String fieldName = feature.getName();
if (feature.getClassifier() != null) {
if (feature.getArrayDimensions().size() == 0) {
TypeDef typeDef = typeDefFromClassifier(feature.getClassifier());
fields.putIfAbsent(fieldName, typeDef);
} else if (feature.getArrayDimensions().size() == 1) {
ArrayDimension ad = feature.getArrayDimensions().get(0);
int size = Math.toIntExact(getArraySize(ad));
TypeDef stem = typeDefFromClassifier(feature.getClassifier());
TypeDef typeDef = new ArrayTypeDef(stem, size, Optional.empty());
fields.putIfAbsent(fieldName, typeDef);
}
}
}
for (AnnexSubclause annex : AnnexUtil.getAllAnnexSubclauses(currClsfr, AgreePackage.eINSTANCE.getAgreeContractSubclause())) {
AgreeContract contract = (AgreeContract) ((AgreeContractSubclause) annex).getContract();
for (SpecStatement spec : contract.getSpecs()) {
List<Arg> args = new ArrayList<>();
if (spec instanceof EqStatement) {
args = ((EqStatement) spec).getLhs();
} else if (spec instanceof InputStatement) {
args = ((InputStatement) spec).getLhs();
}
for (Arg arg : args) {
String fieldName = arg.getName();
TypeDef typeDef = typeDefFromNE(arg);
fields.putIfAbsent(fieldName, typeDef);
}
if (spec instanceof ConstStatement) {
String fieldName = ((ConstStatement) spec).getName();
TypeDef typeDef = AgreeTypeSystem.typeDefFromType(((ConstStatement) spec).getType());
fields.putIfAbsent(fieldName, typeDef);
}
}
}
}
currClsfr = currClsfr.getExtended();
}
String name = c.getQualifiedName();
return new RecordTypeDef(name, fields, c);
}
return Prim.ErrorTypeDef;
}
use of com.ibm.j9ddr.vm29.j9.DataType in project gridfour by gwlucastrig.
the class PackageData method process.
void process(PrintStream ps, TestOptions options, String[] args) throws IOException {
// The packaging of data in a Gvrs file can be thought of in terms of
// the steps shown below.
//
// 0. Obtain descriptive parameters about source data. In this
// case, the application is packing data from a NetCDF source
// and most of the descriptive parameters follow the pattern
// established in the earlier ExtractData.java demonstration
//
// 1. Define the fixed metadata about the file (it's dimensions,
// data type, tile organization, etc.) using a GvrsFileSpecification
// object.
//
// 2. Open a new GvrsFile object using the settings created in step 1.
// Adjust any run-time parameters (such as the tile-cache size)
// according to the needs of the application.
//
// 3. Extract the data from its source and store in the Gvrs file.
//
ps.format("%nGvrs Packaging Application for NetCDF-format Global DEM files%n");
Locale locale = Locale.getDefault();
Date date = new Date();
SimpleDateFormat sdFormat = new SimpleDateFormat("dd MMM yyyy HH:mm z", locale);
ps.format("Date of Execution: %s%n", sdFormat.format(date));
String inputPath = options.getInputFile().getPath();
File outputFile = options.getOutputFile();
if (outputFile == null) {
ps.format("Missing specification for output file%n");
ps.format("Packaging application terminated%n");
return;
}
ps.format("Input file: %s%n", inputPath);
ps.format("Output file: %s%n", outputFile.getPath());
boolean[] matched = new boolean[args.length];
boolean useLsop = options.scanBooleanOption(args, "-lsop", matched, false);
// Open the NetCDF file -----------------------------------
ps.println("Opening NetCDF input file");
NetcdfFile ncfile = NetcdfFile.open(inputPath);
// Identify which Variable instances carry information about the
// geographic (latitude/longitude) coordinate system and also which
// carry information for elevation and bathymetry.
// the Variable that carries row-latitude information
Variable lat;
// the Variable that carries column-longitude information
Variable lon;
// the variable that carries elevation and bathymetry
Variable z;
lat = ncfile.findVariable("lat");
lon = ncfile.findVariable("lon");
z = ncfile.findVariable("elevation");
int[] tileSize;
// Use the input file name to format a product label
File inputFile = new File(inputPath);
String productLabel = inputFile.getName();
if (productLabel.toLowerCase().endsWith(".nc")) {
productLabel = productLabel.substring(0, productLabel.length() - 3);
}
if (lat == null) {
// ETOPO1 specification
tileSize = options.getTileSize(90, 120);
lat = ncfile.findVariable("y");
lon = ncfile.findVariable("x");
z = ncfile.findVariable("z");
} else {
tileSize = options.getTileSize(90, 120);
}
if (lat == null || lon == null || z == null) {
throw new IllegalArgumentException("Input does not contain valid lat,lon, and elevation Variables");
}
// using the variables from above, extract coordinate system
// information for the product and print it to the output.
ExtractionCoordinates extractionCoords = new ExtractionCoordinates(lat, lon);
extractionCoords.summarizeCoordinates(ps);
// Get the dimensions of the raster (grid) elevation/bathymetry data.
// should be 1.
int rank = z.getRank();
int[] shape = z.getShape();
int nRows = shape[0];
int nCols = shape[1];
ps.format("Rows: %8d%n", nRows);
ps.format("Columns: %8d%n", nCols);
int nRowsInTile = tileSize[0];
int nColsInTile = tileSize[1];
// Initialize the specification used to initialize the Gvrs file -------
GvrsFileSpecification spec = new GvrsFileSpecification(nRows, nCols, nRowsInTile, nColsInTile);
spec.setLabel(productLabel);
// Initialize the data type. If a zScale option was specified,
// use integer-coded floats. Otherwise, pick the data type
// based on whether the NetCDF file gives integral or floating point
// data.
boolean isZScaleSpecified = options.isZScaleSpecified();
float zScale = (float) options.getZScale();
float zOffset = (float) options.getZOffset();
// data type from NetCDF file
DataType sourceDataType = z.getDataType();
GvrsElementSpecification elementSpec = null;
GvrsElementType gvrsDataType;
if (isZScaleSpecified) {
// the options define our data type
int encodedLimitDepth = (int) ((LIMIT_DEPTH - zOffset) * zScale);
int encodedLimitElev = (int) ((LIMIT_ELEVATION - zOffset) * zScale);
elementSpec = new GvrsElementSpecificationIntCodedFloat("z", zScale, zOffset, encodedLimitDepth, encodedLimitElev, Integer.MIN_VALUE, true);
spec.addElementSpecification(elementSpec);
gvrsDataType = GvrsElementType.INT_CODED_FLOAT;
} else if (sourceDataType.isIntegral()) {
elementSpec = new GvrsElementSpecificationShort("z", LIMIT_DEPTH, LIMIT_ELEVATION, FILL_VALUE);
spec.addElementSpecification(elementSpec);
gvrsDataType = GvrsElementType.SHORT;
} else {
elementSpec = new GvrsElementSpecificationFloat("z", LIMIT_DEPTH, LIMIT_ELEVATION, Float.NaN);
spec.addElementSpecification(elementSpec);
gvrsDataType = GvrsElementType.FLOAT;
}
elementSpec.setDescription("Elevation (positive values) or depth (negative), in meters");
elementSpec.setUnitOfMeasure("m");
// Example with special character
elementSpec.setLabel("die H\u00f6henlage");
ps.println("Source date type " + sourceDataType + ", stored as " + gvrsDataType);
ps.println("");
// Determine whether data compression is used -------------------
boolean compressionEnabled = options.isCompressionEnabled();
spec.setDataCompressionEnabled(compressionEnabled);
boolean checksumsEnalbed = options.isChecksumComputationEnabled();
spec.setChecksumEnabled(checksumsEnalbed);
boolean bigAddressSpaceEnabled = options.isBigAddressSpaceEnabled();
spec.setExtendedFileSizeEnabled(bigAddressSpaceEnabled);
double[] geoCoords = extractionCoords.getGeographicCoordinateBounds();
spec.setGeographicCoordinates(geoCoords[0], geoCoords[1], geoCoords[2], geoCoords[3]);
// Check to verify that the geographic coordinates and grid coordinate
// are correctly implemented. This test is not truly part of the packaging
// process (since it should always work), but is included here as a
// diagnostic.
extractionCoords.checkSpecificationTransform(ps, spec);
// is enabled and the data type is integral.
if (useLsop) {
LsCodecUtility.addLsopToSpecification(spec, false);
}
// Create the output file and store the content from the input file.
if (outputFile.exists()) {
ps.println("Output file exists. Removing old file");
boolean status = outputFile.delete();
if (!status) {
ps.println("Removal attempt failed");
return;
}
}
ps.println("Begin processing");
double zMin = Double.POSITIVE_INFINITY;
double zMax = Double.NEGATIVE_INFINITY;
double zSum = 0;
long nSum = 0;
try (GvrsFile gvrs = new GvrsFile(outputFile, spec)) {
gvrs.writeMetadata(GvrsMnc.Copyright, "This data is in the public domain and may be used free of charge");
gvrs.writeMetadata(GvrsMnc.TermsOfUse, "This data should not be used for navigation");
GvrsElement zElement = gvrs.getElement("z");
gvrs.setTileCacheSize(GvrsCacheSize.Large);
storeGeoreferencingInformation(gvrs);
// Initialize data-statistics collection ---------------------------
// we happen to know the range of values for the global DEM a-priori.
// it ranges from about -11000 to 8650. This allows us to tabulate counts
// of which values we find in the data source. We can use this information
// to estimate the entropy of the source data and make a realistic
// assessment of how many bytes would be needed to store them.
InputDataStatCollector stats = new InputDataStatCollector(-11000, 8650, zScale);
int[] readOrigin = new int[rank];
int[] readShape = new int[rank];
// -----------------------------------------------------------------
// Package the data
long time0 = System.currentTimeMillis();
for (int iRow = 0; iRow < nRows; iRow++) {
if (iRow % 1000 == 999) {
long time1 = System.currentTimeMillis();
double deltaT = time1 - time0;
// rows per millis
double rate = (iRow + 1) / deltaT;
int nRemaining = nRows - iRow;
long remainingT = (long) (nRemaining / rate);
Date d = new Date(time1 + remainingT);
ps.format("Completed %d rows, %4.1f%% of total, est completion at %s%n", iRow + 1, 100.0 * (double) iRow / (nRows - 1.0), d);
ps.flush();
}
int row0 = iRow;
int col0 = 0;
readOrigin[0] = row0;
readOrigin[1] = col0;
readShape[0] = 1;
readShape[1] = nCols;
// happen in this application unless the input file is corrupt.
try {
Array array = z.read(readOrigin, readShape);
// and store it in the Gvrs file.
switch(gvrsDataType) {
case INTEGER:
case SHORT:
for (int iCol = 0; iCol < nCols; iCol++) {
int sample = array.getInt(iCol);
zElement.writeValueInt(iRow, iCol, sample);
stats.addSample(sample);
if (sample < zMin) {
zMin = sample;
}
if (sample > zMax) {
zMax = sample;
}
zSum += sample;
nSum++;
}
break;
case INT_CODED_FLOAT:
case FLOAT:
default:
for (int iCol = 0; iCol < nCols; iCol++) {
float sample = array.getFloat(iCol);
zElement.writeValue(iRow, iCol, sample);
stats.addSample(sample);
if (sample < zMin) {
zMin = sample;
}
if (sample > zMax) {
zMax = sample;
}
zSum += sample;
nSum++;
}
}
} catch (InvalidRangeException irex) {
throw new IOException(irex.getMessage(), irex);
}
}
gvrs.flush();
long time1 = System.currentTimeMillis();
double timeToProcess = (time1 - time0) / 1000.0;
ps.format("Finished processing file in %4.1f seconds%n", timeToProcess);
ps.format("Entropy for input data %4.1f bits/sample%n", stats.getEntropy());
long outputSize = outputFile.length();
long nCells = (long) nRows * (long) nCols;
double bitsPerSymbol = 8.0 * (double) outputSize / (double) nCells;
ps.format("Storage used (including overhead) %6.4f bits/sample%n", bitsPerSymbol);
ps.format("%nSummary of file content and packaging actions------------%n");
gvrs.summarize(ps, true);
ps.format("Range of z values:%n");
ps.format(" Min z: %8.3f%n", zMin);
ps.format(" Max z: %8.3f%n", zMax);
ps.format(" Avg z: %8.3f%n", zSum / (nSum > 0 ? nSum : 1));
}
// to those of the source data.
if (options.isVerificationEnabled()) {
int[] readOrigin = new int[rank];
int[] readShape = new int[rank];
ps.println("\nTesting product for data consistency with source");
ps.println("Opening gvrs file for reading");
long time0 = System.currentTimeMillis();
try (GvrsFile gvrs = new GvrsFile(outputFile, "r")) {
long time1 = System.currentTimeMillis();
ps.println("Opening complete in " + (time1 - time0) + " ms");
GvrsFileSpecification testSpec = gvrs.getSpecification();
String testLabel = testSpec.getLabel();
ps.println("Label: " + testLabel);
GvrsMetadata m = gvrs.readMetadata("Copyright", 0);
if (m != null) {
ps.println("Copyright: " + m.getString());
}
GvrsElement zElement = gvrs.getElement("z");
ps.println("Element: " + zElement.getName() + ", " + zElement.getDescription());
gvrs.setTileCacheSize(GvrsCacheSize.Large);
for (int iRow = 0; iRow < nRows; iRow++) {
if (iRow % 10000 == 9999) {
time1 = System.currentTimeMillis();
double deltaT = time1 - time0;
// rows per millis
double rate = (iRow + 1) / deltaT;
int nRemaining = nRows - iRow;
long remainingT = (long) (nRemaining / rate);
Date d = new Date(time1 + remainingT);
ps.format("Completed %d rows, %4.1f%% of total, est completion at %s%n", iRow + 1, 100.0 * (double) iRow / (nRows - 1.0), d);
ps.flush();
}
int row0 = iRow;
int col0 = 0;
readOrigin[0] = row0;
readOrigin[1] = col0;
readShape[0] = 1;
readShape[1] = nCols;
try {
Array array = z.read(readOrigin, readShape);
switch(gvrsDataType) {
case INTEGER:
for (int iCol = 0; iCol < nCols; iCol++) {
int sample = array.getInt(iCol);
int test = zElement.readValueInt(iRow, iCol);
if (sample != test) {
ps.println("Failure at " + iRow + ", " + iCol);
test = zElement.readValueInt(iRow, iCol);
System.exit(-1);
}
}
break;
case INT_CODED_FLOAT:
for (int iCol = 0; iCol < nCols; iCol++) {
double sample = array.getDouble(iCol);
int iSample = (int) ((sample - zOffset) * zScale + 0.5);
float fSample = iSample / zScale + zOffset;
float test = zElement.readValue(iRow, iCol);
double delta = Math.abs(fSample - test);
if (delta > 1.01 / zScale) {
ps.println("Failure at " + iRow + ", " + iCol);
System.exit(-1);
}
}
break;
case FLOAT:
default:
for (int iCol = 0; iCol < nCols; iCol++) {
float sample = array.getFloat(iCol);
float test = zElement.readValue(iRow, iCol);
if (sample != test) {
ps.println("Failure at " + iRow + ", " + iCol);
test = zElement.readValueInt(iRow, iCol);
System.exit(-1);
}
}
}
} catch (InvalidRangeException irex) {
throw new IOException(irex.getMessage(), irex);
}
}
time1 = System.currentTimeMillis();
ps.println("Exhaustive cross check complete in " + (time1 - time0) + " ms");
gvrs.summarize(ps, false);
}
}
ncfile.close();
}
Aggregations