use of org.testng.annotations.BeforeSuite in project openstack4j by ContainX.
the class ServiceTypeTest method setup.
@BeforeSuite
public void setup() {
types = new HashMap<>();
types.put(ServiceType.IDENTITY, Arrays.asList("identity", "identityV2", "identityv3"));
types.put(ServiceType.APP_CATALOG, Arrays.asList("application-catalog", "Application-catalogv2", "application-Catalogv3"));
types.put(ServiceType.COMPUTE, Arrays.asList("compute", "COMPUTEv2", "computeV3"));
types.put(ServiceType.IMAGE, Arrays.asList("image", "imageV2", "imagev3"));
types.put(ServiceType.BLOCK_STORAGE, Arrays.asList("volume", "volumev1", "volumev2"));
types.put(ServiceType.OBJECT_STORAGE, Arrays.asList("obJect-stOre", "object-storev2", "object-storev3"));
types.put(ServiceType.NETWORK, Arrays.asList("network", "networkv2.0", "networkV3"));
types.put(ServiceType.EC2, Arrays.asList("EC2", "ec2V2", "ec2v3"));
types.put(ServiceType.TELEMETRY, Arrays.asList("metering", "meteringV2.0", "meteringv3"));
types.put(ServiceType.TELEMETRY_AODH, Arrays.asList("alarming", "alarmingV2", "alarmingV3"));
types.put(ServiceType.ORCHESTRATION, Arrays.asList("orchestration", "orchestrationv2", "orchestrationv3"));
types.put(ServiceType.CLUSTERING, Arrays.asList("clustering", "clusteringV2", "clustering3"));
types.put(ServiceType.SAHARA, Arrays.asList("data_processing", "data_processingV1", "data_processingv3"));
types.put(ServiceType.SHARE, Arrays.asList("share", "sharev2", "shareV3"));
types.put(ServiceType.DATABASE, Arrays.asList("database", "databaseV2", "Databasev3"));
types.put(ServiceType.BARBICAN, Arrays.asList("key-manager", "key-managerv2", "key-managerv3"));
types.put(ServiceType.TACKER, Arrays.asList("nfv-orchestration", "nfv-orchestrationv2", "nfv-orchestration3"));
types.put(ServiceType.ARTIFACT, Arrays.asList("artifact", "artifactv2", "artifactv3"));
types.put(ServiceType.MAGNUM, Arrays.asList("container", "ContainerV3", "containerv1"));
types.put(ServiceType.DNS, Arrays.asList("dns", "dnsv2", "dnsV3"));
types.put(ServiceType.WORKFLOW, Arrays.asList("workflow", "workflowv3", "workflowv2"));
unknownTypes = new HashMap();
unknownTypes.put(ServiceType.ORCHESTRATION, Arrays.asList("heat-cfg", "heatother", "heatvm", "heat-cfg4"));
}
use of org.testng.annotations.BeforeSuite in project rest.li by linkedin.
the class ZooKeeperServerTest method doOneTimeSetUp.
@BeforeSuite
public void doOneTimeSetUp() throws InterruptedException {
try {
_zkServer = new ZKServer(PORT);
_zkServer.startup();
} catch (IOException e) {
fail("unable to instantiate real zk server on port " + PORT);
}
}
use of org.testng.annotations.BeforeSuite in project pinot by linkedin.
the class DefaultAggregationExecutorTest method init.
/**
* Initializations prior to the test:
* - Build a segment with metric columns (that will be aggregated) containing
* randomly generated data.
*
* @throws Exception
*/
@BeforeSuite
void init() throws Exception {
_random = new Random(System.currentTimeMillis());
_docIdSet = new int[NUM_ROWS];
int numColumns = AGGREGATION_FUNCTIONS.length;
_inputData = new double[numColumns][NUM_ROWS];
_columns = new String[numColumns];
setupSegment();
_aggregationInfoList = new ArrayList<>();
for (int i = 0; i < _columns.length; i++) {
AggregationInfo aggregationInfo = new AggregationInfo();
aggregationInfo.setAggregationType(AGGREGATION_FUNCTIONS[i]);
Map<String, String> params = new HashMap<String, String>();
params.put("column", _columns[i]);
aggregationInfo.setAggregationParams(params);
_aggregationInfoList.add(aggregationInfo);
}
}
use of org.testng.annotations.BeforeSuite in project pinot by linkedin.
the class SearchableByteBufferUtilTest method setup.
@BeforeSuite
public void setup() throws Exception {
// Generate DISTINCT_VALUES shorts, ints, longs, floats and doubles
Random random = new Random();
SortedSet<Short> shortValues = new TreeSet<Short>();
SortedSet<Integer> intValues = new TreeSet<Integer>();
SortedSet<Long> longValues = new TreeSet<Long>();
SortedSet<Float> floatValues = new TreeSet<Float>();
SortedSet<Double> doubleValues = new TreeSet<Double>();
// Add boundary conditions
shortValues.add(Short.MIN_VALUE);
shortValues.add(Short.MAX_VALUE);
intValues.add(Integer.MIN_VALUE);
intValues.add(Integer.MAX_VALUE);
longValues.add(Long.MIN_VALUE);
longValues.add(Long.MAX_VALUE);
floatValues.add(Float.MIN_VALUE);
floatValues.add(Float.MAX_VALUE);
// floatValues.add(Float.NaN);
floatValues.add(Float.NEGATIVE_INFINITY);
floatValues.add(Float.POSITIVE_INFINITY);
doubleValues.add(Double.MIN_VALUE);
doubleValues.add(Double.MAX_VALUE);
// doubleValues.add(Double.NaN);
doubleValues.add(Double.NEGATIVE_INFINITY);
doubleValues.add(Double.POSITIVE_INFINITY);
while (shortValues.size() < DISTINCT_VALUES) {
shortValues.add((short) random.nextInt());
}
while (intValues.size() < DISTINCT_VALUES) {
intValues.add(random.nextInt());
}
while (longValues.size() < DISTINCT_VALUES) {
longValues.add(random.nextLong());
}
while (floatValues.size() < DISTINCT_VALUES) {
floatValues.add(random.nextFloat());
}
while (doubleValues.size() < DISTINCT_VALUES) {
doubleValues.add(random.nextDouble());
}
// Copy these values into arrays
int i = 0;
for (Short shortValue : shortValues) {
_shorts[i] = shortValue;
++i;
}
i = 0;
for (Integer intValue : intValues) {
_ints[i] = intValue;
++i;
}
i = 0;
for (Long longValue : longValues) {
_longs[i] = longValue;
++i;
}
i = 0;
for (Float floatValue : floatValues) {
_floats[i] = floatValue;
++i;
}
i = 0;
for (Double doubleValue : doubleValues) {
_doubles[i] = doubleValue;
++i;
}
// Write a file that multiplexes all these values
final int SHORTS_OFFSET = 0;
final int INTS_OFFSET = SHORTS_OFFSET + Short.SIZE / Byte.SIZE;
final int LONGS_OFFSET = INTS_OFFSET + Integer.SIZE / Byte.SIZE;
final int FLOATS_OFFSET = LONGS_OFFSET + Long.SIZE / Byte.SIZE;
final int DOUBLES_OFFSET = FLOATS_OFFSET + Float.SIZE / Byte.SIZE;
final int ROW_WIDTH = DOUBLES_OFFSET + Double.SIZE / Byte.SIZE;
ByteBuffer buffer = ByteBuffer.allocate(ROW_WIDTH * DISTINCT_VALUES);
for (i = 0; i < DISTINCT_VALUES; ++i) {
final int baseOffset = i * ROW_WIDTH;
buffer.putShort(baseOffset + SHORTS_OFFSET, _shorts[i]);
buffer.putInt(baseOffset + INTS_OFFSET, _ints[i]);
buffer.putLong(baseOffset + LONGS_OFFSET, _longs[i]);
buffer.putFloat(baseOffset + FLOATS_OFFSET, _floats[i]);
buffer.putDouble(baseOffset + DOUBLES_OFFSET, _doubles[i]);
}
File tempFile = File.createTempFile("pinot-test", ".tmp");
tempFile.deleteOnExit();
FileChannel channel = new FileOutputStream(tempFile).getChannel();
// buffer.flip();
channel.write(buffer);
channel.close();
_genericRowColumnDataFileReader = GenericRowColumnDataFileReader.forHeap(tempFile, DISTINCT_VALUES, 5, new int[] { Short.SIZE / Byte.SIZE, Integer.SIZE / Byte.SIZE, Long.SIZE / Byte.SIZE, Float.SIZE / Byte.SIZE, Double.SIZE / Byte.SIZE });
_searchableByteBufferUtil = new SearchableByteBufferUtil(_genericRowColumnDataFileReader);
}
use of org.testng.annotations.BeforeSuite in project OpenAM by OpenRock.
the class AMIdentityTestBase method suiteSetup.
/**
* Creates realm before the test suites are executed.
*
* @throws SMSException if realm cannot be created.
* @throws SSOException if the super administrator Single Sign On is
* invalid.
*/
@Parameters({ "parent-realms" })
@BeforeSuite(groups = { "api" })
public void suiteSetup(String realms) throws SSOException, SMSException {
Object[] params = { realms };
entering("suiteSetup", params);
StringTokenizer st = new StringTokenizer(realms, ",");
while (st.hasMoreElements()) {
String realm = st.nextToken().trim();
createSubRealm(getAdminSSOToken(), realm);
}
exiting("suiteSetup");
}
Aggregations