use of java.util.IdentityHashMap in project hive by apache.
the class TestJdbcDriver2 method getTablesTest.
/**
* Test the type returned for pre-created table type table and view type table
* @param tableTypeNames expected table types
* @param viewTypeName expected view type
* @throws SQLException
*/
private void getTablesTest(Set<String> tableTypeNames, String viewTypeName) throws SQLException {
String[] ALL = null;
String[] VIEW_ONLY = { viewTypeName };
String[] TABLE_ONLY = tableTypeNames.toArray(new String[tableTypeNames.size()]);
Set<String> viewOrTableArray = new HashSet<String>();
viewOrTableArray.addAll(tableTypeNames);
viewOrTableArray.add(viewTypeName);
String testTblWithDb = StatsUtils.getFullyQualifiedTableName(testDbName, tableName);
String testPartTblWithDb = StatsUtils.getFullyQualifiedTableName(testDbName, partitionedTableName);
String testDataTypeTblWithDb = StatsUtils.getFullyQualifiedTableName(testDbName, dataTypeTableName);
String testViewWithDb = StatsUtils.getFullyQualifiedTableName(testDbName, viewName);
String testExtTblWithDb = StatsUtils.getFullyQualifiedTableName(testDbName, externalTableName);
Map<Object[], String[]> tests = new IdentityHashMap<Object[], String[]>();
tests.put(new Object[] { null, "testjdbc%", ALL }, new String[] { testTblWithDb, testPartTblWithDb, testViewWithDb, testExtTblWithDb, testDataTypeTblWithDb });
tests.put(new Object[] { "test%", "testjdbc%", ALL }, new String[] { testTblWithDb, testPartTblWithDb, testViewWithDb, testExtTblWithDb, testDataTypeTblWithDb });
tests.put(new Object[] { "test%", "testjdbc%", VIEW_ONLY }, new String[] { testViewWithDb });
tests.put(new Object[] { null, "testjdbcdrivertbl", ALL }, new String[] { testTblWithDb });
tests.put(new Object[] { "%jdbc%", "testjdbcdrivertbl", ALL }, new String[] { testTblWithDb });
tests.put(new Object[] { "%jdbc%", "testjdbc%", ALL }, new String[] { testTblWithDb, testPartTblWithDb, testViewWithDb, testExtTblWithDb, testDataTypeTblWithDb });
tests.put(new Object[] { "%jdbc%", "testjdbcdrivertbl", TABLE_ONLY }, new String[] { testTblWithDb });
tests.put(new Object[] { null, "test_dbcdri_ertbl", ALL }, new String[] { testTblWithDb });
tests.put(new Object[] { null, "%jdbc%", ALL }, new String[] { testTblWithDb, testPartTblWithDb, testViewWithDb, testDataTypeTblWithDb, testExtTblWithDb });
tests.put(new Object[] { "%", "%jdbc%", VIEW_ONLY }, new String[] { testViewWithDb });
tests.put(new Object[] { null, "%jdbc%", TABLE_ONLY }, new String[] { testTblWithDb, testPartTblWithDb, testExtTblWithDb, testDataTypeTblWithDb });
for (Map.Entry<Object[], String[]> entry : tests.entrySet()) {
Object[] checkPattern = entry.getKey();
String debugString = checkPattern[0] + ", " + checkPattern[1] + ", " + Arrays.toString((String[]) checkPattern[2]);
Set<String> expectedTables = new HashSet<String>(Arrays.asList(entry.getValue()));
ResultSet rs = con.getMetaData().getTables(null, (String) checkPattern[0], (String) checkPattern[1], (String[]) checkPattern[2]);
ResultSetMetaData resMeta = rs.getMetaData();
assertEquals(10, resMeta.getColumnCount());
assertEquals("TABLE_CAT", resMeta.getColumnName(1));
assertEquals("TABLE_SCHEM", resMeta.getColumnName(2));
assertEquals("TABLE_NAME", resMeta.getColumnName(3));
assertEquals("TABLE_TYPE", resMeta.getColumnName(4));
assertEquals("REMARKS", resMeta.getColumnName(5));
int cnt = 0;
while (rs.next()) {
String resultDbName = rs.getString("TABLE_SCHEM");
String resultTableName = rs.getString("TABLE_NAME");
String fullTableName = StatsUtils.getFullyQualifiedTableName(resultDbName, resultTableName);
assertTrue("Invalid table " + fullTableName + " for test " + debugString, expectedTables.contains(fullTableName));
String resultTableComment = rs.getString("REMARKS");
assertTrue("Missing comment on the table.", resultTableComment.length() > 0);
String tableType = rs.getString("TABLE_TYPE");
if (resultTableName.endsWith("view")) {
assertEquals("Expected a tabletype view but got something else.", viewTypeName, tableType);
} else {
assertTrue("Expected one of " + tableTypeNames + " table but got something else: " + tableType, tableTypeNames.contains(tableType));
}
cnt++;
}
rs.close();
assertEquals("Received an incorrect number of tables for test " + debugString, expectedTables.size(), cnt);
}
}
use of java.util.IdentityHashMap in project cxf by apache.
the class SchemaUtil method getSchemas.
public void getSchemas(final Definition def, final SchemaCollection schemaCol, List<SchemaInfo> schemas) {
List<Definition> defList = new ArrayList<>();
parseImports(def, defList);
extractSchema(def, schemaCol, schemas);
// added
getSchemaList(def);
Map<Definition, Definition> done = new IdentityHashMap<Definition, Definition>();
done.put(def, def);
for (Definition def2 : defList) {
if (!done.containsKey(def2)) {
extractSchema(def2, schemaCol, schemas);
// added
getSchemaList(def2);
done.put(def2, def2);
}
}
}
use of java.util.IdentityHashMap in project cxf by apache.
the class WadlGenerator method generateWADL.
public StringBuilder generateWADL(String baseURI, List<ClassResourceInfo> cris, boolean isJson, Message m, UriInfo ui) {
StringBuilder sbMain = new StringBuilder();
if (!isJson && stylesheetReference != null && !applyStylesheetLocally) {
sbMain.append("<?xml-stylesheet ").append(getStylesheetInstructionData(baseURI)).append("?>");
}
sbMain.append("<application");
if (!isJson) {
sbMain.append(" xmlns=\"").append(getNamespace()).append("\" xmlns:xs=\"").append(Constants.URI_2001_SCHEMA_XSD).append("\"");
}
StringBuilder sbGrammars = new StringBuilder();
sbGrammars.append("<grammars>");
StringBuilder sbResources = new StringBuilder();
sbResources.append("<resources base=\"").append(baseURI).append("\">");
MessageBodyWriter<?> jaxbWriter = (m != null && useJaxbContextForQnames) ? ServerProviderFactory.getInstance(m).getDefaultJaxbWriter() : null;
ResourceTypes resourceTypes = ResourceUtils.getAllRequestResponseTypes(cris, useJaxbContextForQnames, jaxbWriter);
checkXmlSeeAlso(resourceTypes);
Set<Class<?>> allTypes = resourceTypes.getAllTypes().keySet();
JAXBContext jaxbContext = null;
if (useJaxbContextForQnames && !allTypes.isEmpty()) {
jaxbContext = ResourceUtils.createJaxbContext(new HashSet<>(allTypes), null, jaxbContextProperties);
if (jaxbContext == null) {
LOG.warning("JAXB Context is null: possibly due to one of input classes being not accepted");
}
}
SchemaWriter schemaWriter = createSchemaWriter(resourceTypes, jaxbContext, ui);
ElementQNameResolver qnameResolver = schemaWriter == null ? null : createElementQNameResolver(jaxbContext);
Map<Class<?>, QName> clsMap = new IdentityHashMap<>();
Set<ClassResourceInfo> visitedResources = new LinkedHashSet<>();
for (ClassResourceInfo cri : cris) {
startResourceTag(sbResources, cri, cri.getURITemplate().getValue());
Annotation description = AnnotationUtils.getClassAnnotation(cri.getServiceClass(), Description.class);
if (description == null) {
description = AnnotationUtils.getClassAnnotation(cri.getServiceClass(), Descriptions.class);
}
if (description != null) {
handleDocs(new Annotation[] { description }, sbResources, DocTarget.RESOURCE, true, isJson);
} else {
handleClassJavaDocs(cri, sbResources);
}
handleResource(sbResources, allTypes, qnameResolver, clsMap, cri, visitedResources, isJson);
sbResources.append("</resource>");
}
sbResources.append("</resources>");
handleGrammars(sbMain, sbGrammars, schemaWriter, clsMap);
sbGrammars.append("</grammars>");
sbMain.append(">");
handleApplicationDocs(sbMain);
sbMain.append(sbGrammars.toString());
sbMain.append(sbResources.toString());
sbMain.append("</application>");
return sbMain;
}
use of java.util.IdentityHashMap in project ignite by apache.
the class IgniteHadoopWeightedMapReducePlanner method assignReducersToSplits.
/**
* Distribute reducers between splits.
*
* @param splits Splits.
* @param reducerCnt Reducer count.
* @return Map from input split to reducer count.
*/
private Map<HadoopInputSplit, Integer> assignReducersToSplits(Collection<HadoopInputSplit> splits, int reducerCnt) {
Map<HadoopInputSplit, Integer> res = new IdentityHashMap<>(splits.size());
int base = reducerCnt / splits.size();
int remainder = reducerCnt % splits.size();
for (HadoopInputSplit split : splits) {
int val = base;
if (remainder > 0) {
val++;
remainder--;
}
res.put(split, val);
}
assert remainder == 0;
return res;
}
use of java.util.IdentityHashMap in project vcell by virtualcell.
the class SimulationWarning method analyzeDiffusion.
/**
* make sure diffusion expressions are constants, store for later use
* @throws ExpressionException
*/
private static Map<MembraneSubDomain, List<DiffusionValue>> analyzeDiffusion(Simulation simulation, double timeStep, IssueContext issueContext, List<Issue> issueList) throws ExpressionException {
Map<MembraneSubDomain, List<DiffusionValue>> diffusionValuesMap = new IdentityHashMap<>();
diffusionValuesMap.clear();
MutableDouble value = new MutableDouble();
MathDescription cm = simulation.getMathDescription();
Objects.requireNonNull(cm);
MathDescription localMath = new MathDescription(cm);
SimulationSymbolTable symTable = new SimulationSymbolTable(simulation, 0);
Map<MembraneSubDomain, List<DiffusionValue>> dvMap = new HashMap<>();
double maxDiffValue = Double.MIN_VALUE;
List<DiffusionValue> diffusionList = new ArrayList<>();
for (SubDomain sd : localMath.getSubDomainCollection()) {
final boolean isMembrane = sd instanceof MembraneSubDomain;
diffusionList.clear();
for (ParticleProperties pp : sd.getParticleProperties()) {
String name = pp.getVariable().getName();
Expression diffExp = pp.getDiffusion();
Expression flattened = MathUtilities.substituteFunctions(diffExp, symTable).flatten();
if (isConstant(flattened, value)) {
if (isMembrane) {
DiffusionValue dv = new DiffusionValue(name, value.doubleValue());
maxDiffValue = Math.max(maxDiffValue, dv.value);
diffusionList.add(dv);
}
} else {
String s = "Smoldyn only supports constant diffusion, " + name + " is variable";
Issue i = new Issue(simulation, issueContext, IssueCategory.SMOLYDN_DIFFUSION, s, s, Severity.ERROR);
issueList.add(i);
}
}
if (isMembrane && !diffusionList.isEmpty()) {
dvMap.put((MembraneSubDomain) sd, diffusionList);
}
}
diffusionValuesMap.putAll(dvMap);
MeshSpecification ms = simulation.getMeshSpecification();
Geometry g = ms.getGeometry();
int dim = g.getDimension();
double minDelta = Double.MAX_VALUE;
switch(dim) {
case 3:
minDelta = Math.min(minDelta, ms.getDz(true));
// fall-through
case 2:
minDelta = Math.min(minDelta, ms.getDy(true));
// fall-through
case 1:
minDelta = Math.min(minDelta, ms.getDx(true));
break;
default:
throw new RuntimeException("Invalid dimension " + dim + " for smoldyn solver");
}
double minArea = minDelta * minDelta / 2;
double limit = PRECHECK_LIMIT_ADJUST * minArea / maxDiffValue;
boolean warn = (timeStep > limit);
if (lg.isDebugEnabled()) {
lg.debug("Min delta " + minDelta + ", min area " + minArea + " time limit " + limit + " timeStep " + timeStep + " -> warn = " + warn);
}
if (warn) {
String s = "Time step " + timeStep + " may be too large, performing further analysis ...";
Issue i = new Issue(simulation, issueContext, IssueCategory.SMOLYDN_DIFFUSION, s, s, Severity.WARNING);
issueList.add(i);
}
lg.debug("end of diffusion analysis");
return diffusionValuesMap;
}
Aggregations