use of org.apache.geode.pdx.internal.EnumInfo in project geode by apache.
the class PDXCommands method pdxRename.
@CliCommand(value = CliStrings.PDX_RENAME, help = CliStrings.PDX_RENAME__HELP)
@CliMetaData(shellOnly = true, relatedTopic = { CliStrings.TOPIC_GEODE_DISKSTORE })
@ResourceOperation(resource = Resource.DATA, operation = Operation.MANAGE)
public Result pdxRename(@CliOption(key = CliStrings.PDX_RENAME_OLD, mandatory = true, help = CliStrings.PDX_RENAME_OLD__HELP) String oldClassName, @CliOption(key = CliStrings.PDX_RENAME_NEW, mandatory = true, help = CliStrings.PDX_RENAME_NEW__HELP) String newClassName, @CliOption(key = CliStrings.PDX_DISKSTORE, mandatory = true, help = CliStrings.PDX_DISKSTORE__HELP) String diskStore, @CliOption(key = CliStrings.PDX_DISKDIR, mandatory = true, help = CliStrings.PDX_DISKDIR__HELP) String[] diskDirs) {
try {
final File[] dirs = new File[diskDirs.length];
for (int i = 0; i < diskDirs.length; i++) {
dirs[i] = new File((diskDirs[i]));
}
Collection<Object> results = DiskStoreImpl.pdxRename(diskStore, dirs, oldClassName, newClassName);
if (results.isEmpty()) {
return ResultBuilder.createGemFireErrorResult(CliStrings.format(CliStrings.PDX_RENAME__EMPTY));
}
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
PrintStream printStream = new PrintStream(outputStream);
for (Object p : results) {
if (p instanceof PdxType) {
((PdxType) p).toStream(printStream, false);
} else {
((EnumInfo) p).toStream(printStream);
}
}
String resultString = CliStrings.format(CliStrings.PDX_RENAME__SUCCESS, outputStream.toString());
return ResultBuilder.createInfoResult(resultString.toString());
} catch (Exception e) {
return ResultBuilder.createGemFireErrorResult(CliStrings.format(CliStrings.PDX_RENAME__ERROR, e.getMessage()));
}
}
use of org.apache.geode.pdx.internal.EnumInfo in project geode by apache.
the class PdxRenameDUnitTest method testPdxRenameVersioning.
@Test
public void testPdxRenameVersioning() throws Exception {
final String DS_NAME = "PdxRenameDUnitTestDiskStore";
final String DS_NAME2 = "PdxRenameDUnitTestDiskStore2";
final int[] locatorPorts = AvailablePortHelper.getRandomAvailableTCPPorts(2);
final File f = new File(DS_NAME);
f.mkdir();
final File f2 = new File(DS_NAME2);
f2.mkdir();
this.filesToBeDeleted.add(DS_NAME);
this.filesToBeDeleted.add(DS_NAME2);
final Properties props = new Properties();
props.setProperty(MCAST_PORT, "0");
props.setProperty(LOCATORS, "localhost[" + locatorPorts[0] + "],localhost[" + locatorPorts[1] + "]");
props.setProperty(ENABLE_CLUSTER_CONFIGURATION, "false");
Host host = Host.getHost(0);
VM vm1 = host.getVM(0);
VM vm2 = host.getVM(1);
vm1.invoke(new SerializableCallable() {
public Object call() throws Exception {
disconnectFromDS();
props.setProperty(START_LOCATOR, "localhost[" + locatorPorts[0] + "]");
final Cache cache = (new CacheFactory(props)).setPdxPersistent(true).setPdxDiskStore(DS_NAME).create();
DiskStoreFactory dsf = cache.createDiskStoreFactory();
dsf.setDiskDirs(new File[] { f });
dsf.create(DS_NAME);
RegionFactory<String, PdxValue> rf1 = cache.createRegionFactory(RegionShortcut.REPLICATE_PERSISTENT);
rf1.setDiskStoreName(DS_NAME);
Region<String, PdxValue> region1 = rf1.create("region1");
region1.put("key1", new PdxValue(1));
return null;
}
});
vm2.invoke(new SerializableCallable() {
public Object call() throws Exception {
disconnectFromDS();
props.setProperty(START_LOCATOR, "localhost[" + locatorPorts[1] + "]");
final Cache cache = (new CacheFactory(props)).setPdxReadSerialized(true).setPdxPersistent(true).setPdxDiskStore(DS_NAME2).create();
DiskStoreFactory dsf = cache.createDiskStoreFactory();
dsf.setDiskDirs(new File[] { f2 });
dsf.create(DS_NAME2);
RegionFactory rf1 = cache.createRegionFactory(RegionShortcut.REPLICATE_PERSISTENT);
rf1.setDiskStoreName(DS_NAME2);
Region region1 = rf1.create("region1");
Object v = region1.get("key1");
assertNotNull(v);
cache.close();
return null;
}
});
vm1.invoke(new SerializableCallable() {
public Object call() throws Exception {
Cache cache = CacheFactory.getAnyInstance();
if (cache != null && !cache.isClosed()) {
cache.close();
}
return null;
}
});
vm1.invoke(new SerializableCallable() {
public Object call() throws Exception {
Collection<Object> renameResults = DiskStoreImpl.pdxRename(DS_NAME, new File[] { f }, "apache", "pivotal");
assertEquals(2, renameResults.size());
for (Object o : renameResults) {
if (o instanceof PdxType) {
PdxType t = (PdxType) o;
assertEquals("org.pivotal.geode.internal.PdxRenameDUnitTest$PdxValue", t.getClassName());
} else {
EnumInfo ei = (EnumInfo) o;
assertEquals("org.pivotal.geode.internal.PdxRenameDUnitTest$Day", ei.getClassName());
}
}
return null;
}
});
vm1.invoke(new SerializableCallable() {
public Object call() throws Exception {
props.setProperty(START_LOCATOR, "localhost[" + locatorPorts[0] + "]");
final Cache cache = (new CacheFactory(props)).setPdxPersistent(true).setPdxDiskStore(DS_NAME).create();
DiskStoreFactory dsf = cache.createDiskStoreFactory();
dsf.setDiskDirs(new File[] { f });
dsf.create(DS_NAME);
RegionFactory<String, PdxValue> rf1 = cache.createRegionFactory(RegionShortcut.REPLICATE_PERSISTENT);
rf1.setDiskStoreName(DS_NAME);
Region<String, PdxValue> region1 = rf1.create("region1");
return null;
}
});
vm2.invoke(new SerializableCallable() {
public Object call() throws Exception {
disconnectFromDS();
props.setProperty(START_LOCATOR, "localhost[" + locatorPorts[1] + "]");
final Cache cache = (new CacheFactory(props)).setPdxReadSerialized(true).setPdxPersistent(true).setPdxDiskStore(DS_NAME2).create();
DiskStoreFactory dsf = cache.createDiskStoreFactory();
dsf.setDiskDirs(new File[] { f2 });
dsf.create(DS_NAME2);
RegionFactory rf1 = cache.createRegionFactory(RegionShortcut.REPLICATE_PERSISTENT);
rf1.setDiskStoreName(DS_NAME2);
Region region1 = rf1.create("region1");
PdxInstance v = (PdxInstance) region1.get("key1");
assertNotNull(v);
assertEquals("org.pivotal.geode.internal.PdxRenameDUnitTest$PdxValue", ((PdxInstanceImpl) v).getClassName());
cache.close();
return null;
}
});
vm1.invoke(new SerializableCallable() {
public Object call() throws Exception {
Cache cache = CacheFactory.getAnyInstance();
if (cache != null && !cache.isClosed()) {
cache.close();
}
return null;
}
});
}
use of org.apache.geode.pdx.internal.EnumInfo in project geode by apache.
the class DataTypeJUnitTest method getDataTypeShouldReturnPDXEnumType.
@Test
public void getDataTypeShouldReturnPDXEnumType() throws IOException {
int somePdxEnumId = 1;
EnumInfo somePdxEnumInfo = mock(EnumInfo.class);
doReturn("PDXENUM").when(somePdxEnumInfo).getClassName();
TypeRegistry mockTypeRegistry = mock(TypeRegistry.class);
when(mockTypeRegistry.getEnumInfoById(0)).thenReturn(somePdxEnumInfo);
GemFireCacheImpl pdxInstance = mock(GemFireCacheImpl.class);
when(pdxInstance.getPdxRegistry()).thenReturn(mockTypeRegistry);
PowerMockito.mockStatic(GemFireCacheImpl.class);
when(GemFireCacheImpl.getForPdx("PDX registry is unavailable because the Cache has been closed.")).thenReturn(pdxInstance);
ByteArrayOutputStream baos = new ByteArrayOutputStream();
DataOutputStream out = new DataOutputStream(baos);
out.writeByte(DSCODE.PDX_ENUM);
out.writeInt(somePdxEnumId);
byte[] bytes = baos.toByteArray();
String type = DataType.getDataType(bytes);
assertThat(type).isEqualTo("PdxRegistry/java.lang.Enum:PDXENUM");
}
use of org.apache.geode.pdx.internal.EnumInfo in project geode by apache.
the class PdxAttributesJUnitTest method testDuplicatePdxTypeId.
@Test
public void testDuplicatePdxTypeId() throws Exception {
int dsId = 5;
CacheFactory cf = new CacheFactory();
cf.set(MCAST_PORT, "0");
cf.set(ConfigurationProperties.DISTRIBUTED_SYSTEM_ID, String.valueOf(dsId));
Cache cache = cf.create();
// define a type.
defineAType();
Region pdxRegion = cache.getRegion(PeerTypeRegistration.REGION_NAME);
Iterator itr = pdxRegion.entrySet().iterator();
boolean foundException = false;
boolean foundEnumException = false;
while (itr.hasNext()) {
Map.Entry ent = (Map.Entry) itr.next();
if (ent.getKey() instanceof Integer) {
int pdxTypeId = (int) ent.getKey();
try {
pdxRegion.put(pdxTypeId, new PdxType());
} catch (CacheWriterException cwe) {
foundException = true;
}
} else {
EnumId enumId = (EnumId) ent.getKey();
EnumInfo enumInfo = new EnumInfo(SimpleEnum.ONE);
try {
pdxRegion.put(enumId, enumInfo);
} catch (CacheWriterException cwe) {
foundEnumException = true;
}
}
}
assertEquals(true, foundException);
assertEquals(true, foundEnumException);
cache.close();
}
use of org.apache.geode.pdx.internal.EnumInfo in project geode by apache.
the class PdxAttributesJUnitTest method testPdxTypeIdWithNegativeDsId.
@Test
public void testPdxTypeIdWithNegativeDsId() throws Exception {
// in this case geode will use 0 as dsId
int dsId = -1;
CacheFactory cf = new CacheFactory();
cf.set(MCAST_PORT, "0");
cf.set(ConfigurationProperties.DISTRIBUTED_SYSTEM_ID, String.valueOf(dsId));
Cache cache = cf.create();
// define a type.
defineAType();
Region pdxRegion = cache.getRegion(PeerTypeRegistration.REGION_NAME);
Iterator itr = pdxRegion.entrySet().iterator();
boolean found = false;
boolean foundEnum = false;
while (itr.hasNext()) {
Map.Entry ent = (Map.Entry) itr.next();
if (ent.getKey() instanceof Integer) {
int pdxTypeId = (int) ent.getKey();
PdxType pdxType = (PdxType) ent.getValue();
int pdxTypeHashcode = pdxType.hashCode();
System.out.println("pdx hashcode " + pdxTypeHashcode);
int expectedPdxTypeId = PeerTypeRegistration.PLACE_HOLDER_FOR_TYPE_ID & pdxTypeHashcode;
assertEquals(expectedPdxTypeId, pdxTypeId);
found = true;
} else {
EnumId enumId = (EnumId) ent.getKey();
EnumInfo enumInfo = (EnumInfo) ent.getValue();
EnumInfo expectedEnumInfo = new EnumInfo(SimpleEnum.TWO);
int expectKey = PeerTypeRegistration.PLACE_HOLDER_FOR_TYPE_ID & expectedEnumInfo.hashCode();
;
assertEquals(expectKey, enumId.intValue());
foundEnum = true;
}
}
assertEquals(true, found);
cache.close();
}
Aggregations