use of org.apache.commons.io.serialization.ValidatingObjectInputStream in project kylo by Teradata.
the class SparkLauncherSparkShellProcessTest method testCluster.
/**
* Test a cluster process.
*/
@Test
public void testCluster() throws Exception {
// Mock process listener
final AtomicInteger processReady = new AtomicInteger(0);
final AtomicInteger processStarted = new AtomicInteger(0);
final AtomicInteger processStopped = new AtomicInteger(0);
final SparkShellProcessListener listener = new SparkShellProcessListener() {
@Override
public void processReady(@Nonnull SparkShellProcess process) {
processReady.incrementAndGet();
}
@Override
public void processStarted(@Nonnull SparkShellProcess process) {
processStarted.incrementAndGet();
}
@Override
public void processStopped(@Nonnull SparkShellProcess process) {
processStopped.incrementAndGet();
}
};
// Create deserialized process
final ByteArrayOutputStream byteOutStream = new ByteArrayOutputStream();
final ObjectOutputStream objectOutStream = new ObjectOutputStream(byteOutStream);
objectOutStream.writeObject(new SparkLauncherSparkShellProcess(Mockito.mock(SparkAppHandle.class), CLIENT_ID, CLIENT_SECRET, 0, TimeUnit.MILLISECONDS));
final ByteArrayInputStream byteInStream = new ByteArrayInputStream(byteOutStream.toByteArray());
final ValidatingObjectInputStream objectInStream = new ValidatingObjectInputStream(byteInStream);
objectInStream.accept(SparkLauncherSparkShellProcess.class);
final SparkLauncherSparkShellProcess process = (SparkLauncherSparkShellProcess) objectInStream.readObject();
// Test default process state
process.addListener(listener);
Assert.assertFalse(process.waitForReady());
// Test registration
process.setHostname(HOSTNAME);
process.setPort(PORT);
process.setReady(true);
Assert.assertEquals(1, processReady.get());
// Test retrieving hostname and port
Assert.assertTrue(process.waitForReady());
Assert.assertEquals(HOSTNAME, process.getHostname());
Assert.assertEquals(PORT, process.getPort());
}
use of org.apache.commons.io.serialization.ValidatingObjectInputStream in project kylo by Teradata.
the class FeedEventStatistics method loadBackup.
public boolean loadBackup(String location) {
FeedEventStatisticsData inStats = null;
try (FileInputStream fin = new FileInputStream(location);
GZIPInputStream gis = new GZIPInputStream(fin);
ValidatingObjectInputStream ois = new ValidatingObjectInputStream(gis)) {
ois.accept(FeedEventStatisticsDataV3.class, FeedEventStatisticsDataV2.class, FeedEventStatisticsData.class);
ois.accept("java.lang.*", "java.util.*", "[Ljava.lang.*", "[Ljava.util.*");
inStats = (FeedEventStatisticsData) ois.readObject();
ois.close();
} catch (Exception ex) {
if (!(ex instanceof FileNotFoundException)) {
log.error("Unable to load feed event statistics backup from {}. {} ", location, ex.getMessage(), ex);
} else {
log.info("Kylo feed event statistics backup file not found. Not loading backup from {}. ", location);
}
}
if (inStats != null) {
boolean success = this.load(inStats);
// DELETE backup
if (deleteBackupAfterLoad) {
try {
File f = new File(location);
if (f.exists()) {
if (!f.delete()) {
throw new RuntimeException("Error deleting file " + f.getName());
}
}
} catch (Exception e) {
}
}
return success;
}
return false;
}
use of org.apache.commons.io.serialization.ValidatingObjectInputStream in project incubator-rya by apache.
the class SchemaWritable method readFields.
@Override
public void readFields(DataInput in) throws IOException {
int size = in.readInt();
if (size < 1)
throw new Error("De-serializtion failed, count is less than one.");
byte[] bytes = new byte[size];
in.readFully(bytes);
// ObjectInputStream stream = new ObjectInputStream(new ByteArrayInputStream(bytes));
try (//
final ByteArrayInputStream bais = new ByteArrayInputStream(bytes);
final ValidatingObjectInputStream vois = new ValidatingObjectInputStream(bais)) // this is how you find classes that you missed in the vois.accept() list, below.
// { @Override protected void invalidClassNameFound(String className) throws java.io.InvalidClassException {
// System.out.println("vois.accept(" + className + ".class, ");};};
{
// this is a (hopefully) complete list of classes involved in a Schema to be serialized.
// if a useful class is missing, throws an InvalidClassException.
//
vois.accept(//
java.util.ArrayList.class, //
org.apache.rya.reasoning.OwlProperty.class, //
java.util.HashSet.class, //
org.apache.rya.reasoning.OwlClass.class, //
org.openrdf.model.impl.URIImpl.class, org.openrdf.model.impl.BNodeImpl.class);
try {
Iterable<?> propList = (Iterable<?>) vois.readObject();
Iterable<?> classList = (Iterable<?>) vois.readObject();
for (Object p : propList) {
OwlProperty prop = (OwlProperty) p;
properties.put(prop.getURI(), prop);
}
for (Object c : classList) {
OwlClass owlClass = (OwlClass) c;
classes.put(owlClass.getURI(), owlClass);
}
} catch (ClassNotFoundException e) {
throw new Error("While reading a schema object.");
}
}
}
use of org.apache.commons.io.serialization.ValidatingObjectInputStream in project incubator-rya by apache.
the class FluoQueryMetadataDAO method readAggregationMetadataBuilder.
private AggregationMetadata.Builder readAggregationMetadataBuilder(final SnapshotBase sx, final String nodeId) {
requireNonNull(sx);
requireNonNull(nodeId);
// Fetch the values from the Fluo table.
final String rowId = nodeId;
final Map<Column, String> values = sx.gets(rowId, FluoQueryColumns.AGGREGATION_VARIABLE_ORDER, FluoQueryColumns.AGGREGATION_PARENT_NODE_ID, FluoQueryColumns.AGGREGATION_CHILD_NODE_ID, FluoQueryColumns.AGGREGATION_GROUP_BY_BINDING_NAMES);
// Return an object holding them.
final String varOrderString = values.get(FluoQueryColumns.AGGREGATION_VARIABLE_ORDER);
final VariableOrder varOrder = new VariableOrder(varOrderString);
final String parentNodeId = values.get(FluoQueryColumns.AGGREGATION_PARENT_NODE_ID);
final String childNodeId = values.get(FluoQueryColumns.AGGREGATION_CHILD_NODE_ID);
// Read the Group By variable order if one was present.
final String groupByString = values.get(FluoQueryColumns.AGGREGATION_GROUP_BY_BINDING_NAMES);
final VariableOrder groupByVars = groupByString.isEmpty() ? new VariableOrder() : new VariableOrder(groupByString.split(";"));
// Deserialize the collection of AggregationElements.
final Bytes aggBytes = sx.get(Bytes.of(nodeId.getBytes(Charsets.UTF_8)), FluoQueryColumns.AGGREGATION_AGGREGATIONS);
final Collection<AggregationElement> aggregations;
try (final ValidatingObjectInputStream vois = new ValidatingObjectInputStream(aggBytes.toInputStream())) // // this is how you find classes that you missed in the vois.accept() list, below.
// { @Override protected void invalidClassNameFound(String className) throws java.io.InvalidClassException {
// System.out.println("vois.accept(" + className + ".class, ");};};
{
// These classes are allowed to be deserialized. Others throw InvalidClassException.
vois.accept(java.util.ArrayList.class, java.lang.Enum.class, AggregationElement.class, AggregationType.class);
final Object object = vois.readObject();
if (!(object instanceof Collection<?>)) {
throw new InvalidClassException("Object read was not of type Collection. It was: " + object.getClass());
}
aggregations = (Collection<AggregationElement>) object;
} catch (final IOException | ClassNotFoundException e) {
throw new RuntimeException("Problem encountered while reading AggregationMetadata from the Fluo table. Unable " + "to deserialize the AggregationElements from a byte[].", e);
}
final AggregationMetadata.Builder builder = AggregationMetadata.builder(nodeId).setVarOrder(varOrder).setParentNodeId(parentNodeId).setChildNodeId(childNodeId).setGroupByVariableOrder(groupByVars);
for (final AggregationElement aggregation : aggregations) {
builder.addAggregation(aggregation);
}
return builder;
}
use of org.apache.commons.io.serialization.ValidatingObjectInputStream in project jackrabbit-oak by apache.
the class DataStoreCacheUpgradeUtils method deSerializeUploadMap.
/**
* De-serialize the pending uploads map from {@link org.apache.jackrabbit.core.data.AsyncUploadCache}.
*
* @param homeDir the directory where the serialized file is maintained
* @return the de-serialized map
*/
private static Map<String, Long> deSerializeUploadMap(File homeDir) {
Map<String, Long> asyncUploadMap = Maps.newHashMap();
File asyncUploadMapFile = new File(homeDir, UPLOAD_MAP);
if (asyncUploadMapFile.exists()) {
String path = asyncUploadMapFile.getAbsolutePath();
InputStream fis = null;
try {
fis = (new FileInputStream(path));
ValidatingObjectInputStream input = new ValidatingObjectInputStream(fis);
input.accept(HashMap.class, Map.class, Number.class, Long.class, String.class);
asyncUploadMap = (Map<String, Long>) input.readObject();
} catch (Exception e) {
LOG.error("Error in reading pending uploads map [{}] from location [{}]", UPLOAD_MAP, homeDir, e);
} finally {
IOUtils.closeQuietly(fis);
}
LOG.debug("AsyncUploadMap read [{}]", asyncUploadMap);
}
return asyncUploadMap;
}
Aggregations