use of org.apache.geode.pdx.internal.PdxType in project geode by apache.
the class GetPDXTypeById method cmdExecute.
@Override
public void cmdExecute(Message clientMessage, ServerConnection serverConnection, long start) throws IOException, ClassNotFoundException {
serverConnection.setAsTrue(REQUIRES_RESPONSE);
if (logger.isDebugEnabled()) {
logger.debug("{}: Received get pdx type by id request ({} parts) from {}", serverConnection.getName(), clientMessage.getNumberOfParts(), serverConnection.getSocketString());
}
int pdxId = clientMessage.getPart(0).getInt();
PdxType type;
try {
InternalCache cache = serverConnection.getCache();
TypeRegistry registry = cache.getPdxRegistry();
type = registry.getType(pdxId);
} catch (Exception e) {
writeException(clientMessage, e, false, serverConnection);
serverConnection.setAsTrue(RESPONDED);
return;
}
Message responseMsg = serverConnection.getResponseMessage();
responseMsg.setMessageType(MessageType.RESPONSE);
responseMsg.setNumberOfParts(1);
responseMsg.setTransactionId(clientMessage.getTransactionId());
responseMsg.addObjPart(type);
responseMsg.send(serverConnection);
serverConnection.setAsTrue(RESPONDED);
}
use of org.apache.geode.pdx.internal.PdxType in project geode by apache.
the class DiskStoreImpl method getPdxTypes.
private Collection<PdxType> getPdxTypes() throws IOException {
// Since we are recovering a disk store, the cast from DiskRegionView -->
// PlaceHolderDiskRegion
// and from RegionEntry --> DiskEntry should be ok.
// In offline mode, we need to schedule the regions to be recovered
// explicitly.
DiskRegionView foundPdx = null;
for (DiskRegionView drv : getKnown()) {
if (drv.getName().equals(PeerTypeRegistration.REGION_FULL_PATH)) {
foundPdx = drv;
scheduleForRecovery((PlaceHolderDiskRegion) drv);
}
}
if (foundPdx == null) {
return Collections.emptyList();
// throw new IllegalStateException("The disk store does not contain any PDX types.");
}
recoverRegionsThatAreReady();
ArrayList<PdxType> result = new ArrayList<PdxType>();
for (RegionEntry re : foundPdx.getRecoveredEntryMap().regionEntries()) {
Object value = re._getValueRetain(foundPdx, true);
if (Token.isRemoved(value)) {
continue;
}
if (value instanceof CachedDeserializable) {
value = ((CachedDeserializable) value).getDeserializedForReading();
}
if (value instanceof PdxType) {
PdxType type = (PdxType) value;
result.add(type);
}
}
Collections.sort(result, new Comparator<PdxType>() {
@Override
public int compare(PdxType o1, PdxType o2) {
return o1.getClassName().compareTo(o2.getClassName());
}
});
return result;
}
use of org.apache.geode.pdx.internal.PdxType in project geode by apache.
the class DiskStoreImpl method dumpPdxTypes.
private void dumpPdxTypes(PrintStream printStream) {
try {
ArrayList<PdxType> types = new ArrayList<>();
ArrayList<EnumInfo> enums = new ArrayList<>();
for (Object i : getPdxTypesAndEnums()) {
if (i instanceof PdxType) {
types.add((PdxType) i);
} else {
enums.add((EnumInfo) i);
}
}
types.sort(Comparator.comparing(PdxType::getClassName));
enums.sort(EnumInfo::compareTo);
printStream.println("PDX Types:");
for (PdxType type : types) {
type.toStream(printStream, true);
}
printStream.println("PDX Enums:");
for (EnumInfo e : enums) {
e.toStream(printStream);
}
} catch (IOException ignore) {
}
}
use of org.apache.geode.pdx.internal.PdxType in project geode by apache.
the class DataType method getDataType.
public static String getDataType(byte[] bytes) {
final DataInput in = getDataInput(bytes);
byte header = 0;
try {
header = in.readByte();
} catch (IOException e) {
return "IOException: " + e.getMessage();
}
try {
switch(header) {
case DS_FIXED_ID_BYTE:
{
return "org.apache.geode.internal.DataSerializableFixedID:" + DSFIDFactory.create(in.readByte(), in).getClass().getName();
}
case DS_FIXED_ID_SHORT:
{
return "org.apache.geode.internal.DataSerializableFixedID:" + DSFIDFactory.create(in.readShort(), in).getClass().getName();
}
case DS_FIXED_ID_INT:
{
return "org.apache.geode.internal.DataSerializableFixedID:" + DSFIDFactory.create(in.readInt(), in).getClass().getName();
}
case DS_NO_FIXED_ID:
return "org.apache.geode.internal.DataSerializableFixedID:" + DataSerializer.readClass(in).getName();
case NULL:
return "null";
case NULL_STRING:
case STRING:
case HUGE_STRING:
case STRING_BYTES:
case HUGE_STRING_BYTES:
return "java.lang.String";
case CLASS:
return "java.lang.Class";
case DATE:
return "java.util.Date";
case FILE:
return "java.io.File";
case INET_ADDRESS:
return "java.net.InetAddress";
case BOOLEAN:
return "java.lang.Boolean";
case CHARACTER:
return "java.lang.Character";
case BYTE:
return "java.lang.Byte";
case SHORT:
return "java.lang.Short";
case INTEGER:
return "java.lang.Integer";
case LONG:
return "java.lang.Long";
case FLOAT:
return "java.lang.Float";
case DOUBLE:
return "java.lang.Double";
case BYTE_ARRAY:
return "byte[]";
case ARRAY_OF_BYTE_ARRAYS:
return "byte[][]";
case SHORT_ARRAY:
return "short[]";
case STRING_ARRAY:
return "java.lang.String[]";
case INT_ARRAY:
return "int[]";
case LONG_ARRAY:
return "long[]";
case FLOAT_ARRAY:
return "float[]";
case DOUBLE_ARRAY:
return "double[]";
case BOOLEAN_ARRAY:
return "boolean[]";
case CHAR_ARRAY:
return "char[]";
case OBJECT_ARRAY:
return "java.lang.Object[]";
case ARRAY_LIST:
return "java.util.ArrayList";
case LINKED_LIST:
return "java.util.LinkedList";
case HASH_SET:
return "java.util.HashSet";
case LINKED_HASH_SET:
return "java.util.LinkedHashSet";
case HASH_MAP:
return "java.util.HashMap";
case IDENTITY_HASH_MAP:
return "java.util.IdentityHashMap";
case HASH_TABLE:
return "java.util.Hashtable";
// return "java.util.concurrent.ConcurrentHashMap";
case PROPERTIES:
return "java.util.Properties";
case TIME_UNIT:
return "java.util.concurrent.TimeUnit";
case USER_CLASS:
byte userClassDSId = in.readByte();
return "DataSerializer: with Id:" + userClassDSId;
case USER_CLASS_2:
short userClass2DSId = in.readShort();
return "DataSerializer: with Id:" + userClass2DSId;
case USER_CLASS_4:
int userClass4DSId = in.readInt();
return "DataSerializer: with Id:" + userClass4DSId;
case VECTOR:
return "java.util.Vector";
case STACK:
return "java.util.Stack";
case TREE_MAP:
return "java.util.TreeMap";
case TREE_SET:
return "java.util.TreeSet";
case BOOLEAN_TYPE:
return "java.lang.Boolean.class";
case CHARACTER_TYPE:
return "java.lang.Character.class";
case BYTE_TYPE:
return "java.lang.Byte.class";
case SHORT_TYPE:
return "java.lang.Short.class";
case INTEGER_TYPE:
return "java.lang.Integer.class";
case LONG_TYPE:
return "java.lang.Long.class";
case FLOAT_TYPE:
return "java.lang.Float.class";
case DOUBLE_TYPE:
return "java.lang.Double.class";
case VOID_TYPE:
return "java.lang.Void.class";
case USER_DATA_SERIALIZABLE:
{
Instantiator instantiator = InternalInstantiator.getInstantiator(in.readByte());
return "org.apache.geode.Instantiator:" + instantiator.getInstantiatedClass().getName();
}
case USER_DATA_SERIALIZABLE_2:
{
Instantiator instantiator = InternalInstantiator.getInstantiator(in.readShort());
return "org.apache.geode.Instantiator:" + instantiator.getInstantiatedClass().getName();
}
case USER_DATA_SERIALIZABLE_4:
{
Instantiator instantiator = InternalInstantiator.getInstantiator(in.readInt());
return "org.apache.geode.Instantiator:" + instantiator.getInstantiatedClass().getName();
}
case DATA_SERIALIZABLE:
return "org.apache.geode.DataSerializable:" + DataSerializer.readClass(in).getName();
case SERIALIZABLE:
{
String name = null;
try {
Object obj = InternalDataSerializer.basicReadObject(getDataInput(bytes));
name = obj.getClass().getName();
} catch (ClassNotFoundException e) {
name = e.getMessage();
}
return "java.io.Serializable:" + name;
}
case PDX:
{
int typeId = in.readInt();
try {
InternalCache gfc = GemFireCacheImpl.getForPdx("PDX registry is unavailable because the Cache has been closed.");
PdxType pdxType = gfc.getPdxRegistry().getType(typeId);
if (pdxType == null) {
// fix 52164
return "org.apache.geode.pdx.PdxInstance: unknown id=" + typeId;
}
return "org.apache.geode.pdx.PdxInstance:" + pdxType.getClassName();
} catch (CacheClosedException e) {
return "org.apache.geode.pdx.PdxInstance:PdxRegistryClosed";
}
}
case PDX_ENUM:
{
int dsId = in.readByte();
int tmp = InternalDataSerializer.readArrayLength(in);
int enumId = (dsId << 24) | (tmp & 0xFFFFFF);
try {
InternalCache gfc = GemFireCacheImpl.getForPdx("PDX registry is unavailable because the Cache has been closed.");
EnumInfo enumInfo = gfc.getPdxRegistry().getEnumInfoById(enumId);
return "PdxRegistry/java.lang.Enum:" + enumInfo.getClassName();
} catch (CacheClosedException e) {
return "PdxRegistry/java.lang.Enum:PdxRegistryClosed";
}
}
case GEMFIRE_ENUM:
{
String name = DataSerializer.readString(in);
return "java.lang.Enum:" + name;
}
case PDX_INLINE_ENUM:
{
String name = DataSerializer.readString(in);
return "java.lang.Enum:" + name;
}
case BIG_INTEGER:
return "java.math.BigInteger";
case BIG_DECIMAL:
return "java.math.BigDecimal";
case UUID:
return "java.util.UUID";
case TIMESTAMP:
return "java.sql.Timestamp";
default:
}
return "Unknown header byte: " + header;
} catch (IOException e) {
throw new Error(e);
} catch (ClassNotFoundException e) {
throw new Error(e);
}
}
use of org.apache.geode.pdx.internal.PdxType in project geode by apache.
the class PdxDeleteFieldDUnitTest method testPdxDeleteFieldVersioning.
@Test
public void testPdxDeleteFieldVersioning() throws Exception {
final String DS_NAME = "PdxDeleteFieldDUnitTestDiskStore";
final String DS_NAME2 = "PdxDeleteFieldDUnitTestDiskStore2";
final Properties props = new Properties();
final int[] locatorPorts = AvailablePortHelper.getRandomAvailableTCPPorts(2);
props.setProperty(MCAST_PORT, "0");
props.setProperty(LOCATORS, "localhost[" + locatorPorts[0] + "],localhost[" + locatorPorts[1] + "]");
props.setProperty(ENABLE_CLUSTER_CONFIGURATION, "false");
final File f = new File(DS_NAME);
f.mkdir();
final File f2 = new File(DS_NAME2);
f2.mkdir();
this.filesToBeDeleted.add(DS_NAME);
this.filesToBeDeleted.add(DS_NAME2);
Host host = Host.getHost(0);
VM vm1 = host.getVM(0);
VM vm2 = host.getVM(1);
vm1.invoke(new SerializableCallable() {
public Object call() throws Exception {
disconnectFromDS();
props.setProperty(START_LOCATOR, "localhost[" + locatorPorts[0] + "]");
final Cache cache = (new CacheFactory(props)).setPdxPersistent(true).setPdxDiskStore(DS_NAME).create();
DiskStoreFactory dsf = cache.createDiskStoreFactory();
dsf.setDiskDirs(new File[] { f });
dsf.create(DS_NAME);
RegionFactory<String, PdxValue> rf1 = cache.createRegionFactory(RegionShortcut.REPLICATE_PERSISTENT);
rf1.setDiskStoreName(DS_NAME);
Region<String, PdxValue> region1 = rf1.create("region1");
region1.put("key1", new PdxValue(1, 2L));
return null;
}
});
vm2.invoke(new SerializableCallable() {
public Object call() throws Exception {
disconnectFromDS();
props.setProperty(START_LOCATOR, "localhost[" + locatorPorts[1] + "]");
final Cache cache = (new CacheFactory(props)).setPdxReadSerialized(true).setPdxPersistent(true).setPdxDiskStore(DS_NAME2).create();
DiskStoreFactory dsf = cache.createDiskStoreFactory();
dsf.setDiskDirs(new File[] { f2 });
dsf.create(DS_NAME2);
RegionFactory rf1 = cache.createRegionFactory(RegionShortcut.REPLICATE_PERSISTENT);
rf1.setDiskStoreName(DS_NAME2);
Region region1 = rf1.create("region1");
Object v = region1.get("key1");
assertNotNull(v);
cache.close();
return null;
}
});
vm1.invoke(new SerializableCallable() {
public Object call() throws Exception {
Cache cache = CacheFactory.getAnyInstance();
if (cache != null && !cache.isClosed()) {
cache.close();
}
return null;
}
});
vm1.invoke(new SerializableCallable() {
public Object call() throws Exception {
Collection<PdxType> types = DiskStoreImpl.pdxDeleteField(DS_NAME, new File[] { f }, PdxValue.class.getName(), "fieldToDelete");
assertEquals(1, types.size());
PdxType pt = types.iterator().next();
assertEquals(PdxValue.class.getName(), pt.getClassName());
assertEquals(null, pt.getPdxField("fieldToDelete"));
return null;
}
});
vm1.invoke(new SerializableCallable() {
public Object call() throws Exception {
props.setProperty(START_LOCATOR, "localhost[" + locatorPorts[0] + "]");
final Cache cache = (new CacheFactory(props)).setPdxPersistent(true).setPdxDiskStore(DS_NAME).create();
DiskStoreFactory dsf = cache.createDiskStoreFactory();
dsf.setDiskDirs(new File[] { f });
dsf.create(DS_NAME);
RegionFactory<String, PdxValue> rf1 = cache.createRegionFactory(RegionShortcut.REPLICATE_PERSISTENT);
rf1.setDiskStoreName(DS_NAME);
Region<String, PdxValue> region1 = rf1.create("region1");
return null;
}
});
vm2.invoke(new SerializableCallable() {
public Object call() throws Exception {
props.setProperty(START_LOCATOR, "localhost[" + locatorPorts[1] + "]");
final Cache cache = (new CacheFactory(props)).setPdxReadSerialized(true).setPdxPersistent(true).setPdxDiskStore(DS_NAME2).create();
DiskStoreFactory dsf = cache.createDiskStoreFactory();
dsf.setDiskDirs(new File[] { f2 });
dsf.create(DS_NAME2);
RegionFactory rf1 = cache.createRegionFactory(RegionShortcut.REPLICATE_PERSISTENT);
rf1.setDiskStoreName(DS_NAME2);
Region region1 = rf1.create("region1");
PdxInstance v = (PdxInstance) region1.get("key1");
assertNotNull(v);
assertEquals(1, v.getField("value"));
assertEquals(null, v.getField("fieldToDelete"));
cache.close();
return null;
}
});
vm1.invoke(new SerializableCallable() {
public Object call() throws Exception {
Cache cache = CacheFactory.getAnyInstance();
if (cache != null && !cache.isClosed()) {
cache.close();
}
return null;
}
});
}
Aggregations