use of java.math.BigInteger in project hadoop by apache.
the class TestModular method squareBenchmarks.
static void squareBenchmarks() {
final Timer t = new Timer(false);
t.tick("squareBenchmarks(), MAX_SQRT=" + Modular.MAX_SQRT_LONG);
final long[][][] rn = generateRN(1000, 1000);
t.tick("generateRN");
for (int i = 0; i < rn.length; i++) {
final long n = rn[i][0][0];
for (int j = 1; j < rn[i].length; j++) {
final long r = rn[i][j][0];
final long answer = rn[i][j][1];
final long s = square_slow(r, n);
if (s != answer)
assertEquals("r=" + r + ", n=" + n + ", answer=" + answer + " but s=" + s, answer, s);
}
}
t.tick("square_slow");
for (int i = 0; i < rn.length; i++) {
final long n = rn[i][0][0];
long r2p64 = (0x4000000000000000L % n) << 1;
if (r2p64 >= n)
r2p64 -= n;
for (int j = 1; j < rn[i].length; j++) {
final long r = rn[i][j][0];
final long answer = rn[i][j][1];
final long s = square(r, n, r2p64);
if (s != answer)
assertEquals("r=" + r + ", n=" + n + ", answer=" + answer + " but s=" + s, answer, s);
}
}
t.tick("square");
for (int i = 0; i < rn.length; i++) {
final long n = rn[i][0][0];
final BigInteger N = BigInteger.valueOf(n);
for (int j = 1; j < rn[i].length; j++) {
final long r = rn[i][j][0];
final long answer = rn[i][j][1];
final BigInteger R = BigInteger.valueOf(r);
final long s = R.multiply(R).mod(N).longValue();
if (s != answer)
assertEquals("r=" + r + ", n=" + n + ", answer=" + answer + " but s=" + s, answer, s);
}
}
t.tick("R.multiply(R).mod(N)");
for (int i = 0; i < rn.length; i++) {
final long n = rn[i][0][0];
final BigInteger N = BigInteger.valueOf(n);
for (int j = 1; j < rn[i].length; j++) {
final long r = rn[i][j][0];
final long answer = rn[i][j][1];
final BigInteger R = BigInteger.valueOf(r);
final long s = R.modPow(TWO, N).longValue();
if (s != answer)
assertEquals("r=" + r + ", n=" + n + ", answer=" + answer + " but s=" + s, answer, s);
}
}
t.tick("R.modPow(TWO, N)");
}
use of java.math.BigInteger in project hadoop by apache.
the class TestModular method generateEN.
static long[][][] generateEN(int nsize, int esize) {
final long[][][] en = new long[nsize][][];
for (int i = 0; i < en.length; i++) {
en[i] = new long[esize + 1][];
long n = (RANDOM.nextLong() & 0xFFFFFFFFFFFFFFFL) | 1L;
if (n == 1)
n = 3;
en[i][0] = new long[] { n };
final BigInteger N = BigInteger.valueOf(n);
for (int j = 1; j < en[i].length; j++) {
long e = RANDOM.nextLong();
if (e < 0)
e = -e;
final BigInteger E = BigInteger.valueOf(e);
en[i][j] = new long[] { e, TWO.modPow(E, N).longValue() };
}
}
return en;
}
use of java.math.BigInteger in project hadoop by apache.
the class GenSort method generateAsciiRecord.
/**
* Generate an ascii record suitable for all sort benchmarks including
* PennySort.
*/
static void generateAsciiRecord(byte[] recBuf, Unsigned16 rand, Unsigned16 recordNumber) {
/* generate the 10-byte ascii key using mostly the high 64 bits.
*/
long temp = rand.getHigh8();
if (temp < 0) {
// use biginteger to avoid the negative sign problem
BigInteger bigTemp = makeBigInteger(temp);
recBuf[0] = (byte) (' ' + (bigTemp.mod(NINETY_FIVE).longValue()));
temp = bigTemp.divide(NINETY_FIVE).longValue();
} else {
recBuf[0] = (byte) (' ' + (temp % 95));
temp /= 95;
}
for (int i = 1; i < 8; ++i) {
recBuf[i] = (byte) (' ' + (temp % 95));
temp /= 95;
}
temp = rand.getLow8();
if (temp < 0) {
BigInteger bigTemp = makeBigInteger(temp);
recBuf[8] = (byte) (' ' + (bigTemp.mod(NINETY_FIVE).longValue()));
temp = bigTemp.divide(NINETY_FIVE).longValue();
} else {
recBuf[8] = (byte) (' ' + (temp % 95));
temp /= 95;
}
recBuf[9] = (byte) (' ' + (temp % 95));
/* add 2 bytes of "break" */
recBuf[10] = ' ';
recBuf[11] = ' ';
/* convert the 128-bit record number to 32 bits of ascii hexadecimal
* as the next 32 bytes of the record.
*/
for (int i = 0; i < 32; i++) {
recBuf[12 + i] = (byte) recordNumber.getHexDigit(i);
}
/* add 2 bytes of "break" data */
recBuf[44] = ' ';
recBuf[45] = ' ';
/* add 52 bytes of filler based on low 48 bits of random number */
for (int i = 0; i < 13; ++i) {
recBuf[46 + i * 4] = recBuf[47 + i * 4] = recBuf[48 + i * 4] = recBuf[49 + i * 4] = (byte) rand.getHexDigit(19 + i);
}
/* add 2 bytes of "break" data */
recBuf[98] = '\r';
/* nice for Windows */
recBuf[99] = '\n';
}
use of java.math.BigInteger in project hadoop by apache.
the class TestModular method generateRN.
static long[][][] generateRN(int nsize, int rsize) {
final long[][][] rn = new long[nsize][][];
for (int i = 0; i < rn.length; i++) {
rn[i] = new long[rsize + 1][];
long n = RANDOM.nextLong() & 0xFFFFFFFFFFFFFFFL;
if (n <= 1)
n = 0xFFFFFFFFFFFFFFFL - n;
rn[i][0] = new long[] { n };
final BigInteger N = BigInteger.valueOf(n);
for (int j = 1; j < rn[i].length; j++) {
long r = RANDOM.nextLong();
if (r < 0)
r = -r;
if (r >= n)
r %= n;
final BigInteger R = BigInteger.valueOf(r);
rn[i][j] = new long[] { r, R.multiply(R).mod(N).longValue() };
}
}
return rn;
}
use of java.math.BigInteger in project hadoop by apache.
the class ProcfsBasedProcessTree method constructProcessInfo.
/**
* Construct the ProcessInfo using the process' PID and procfs rooted at the
* specified directory and return the same. It is provided mainly to assist
* testing purposes.
*
* Returns null on failing to read from procfs,
*
* @param pinfo ProcessInfo that needs to be updated
* @param procfsDir root of the proc file system
* @return updated ProcessInfo, null on errors.
*/
private static ProcessInfo constructProcessInfo(ProcessInfo pinfo, String procfsDir) {
ProcessInfo ret = null;
// Read "procfsDir/<pid>/stat" file - typically /proc/<pid>/stat
BufferedReader in = null;
InputStreamReader fReader = null;
try {
File pidDir = new File(procfsDir, pinfo.getPid());
fReader = new InputStreamReader(new FileInputStream(new File(pidDir, PROCFS_STAT_FILE)), Charset.forName("UTF-8"));
in = new BufferedReader(fReader);
} catch (FileNotFoundException f) {
// The process vanished in the interim!
return ret;
}
ret = pinfo;
try {
// only one line
String str = in.readLine();
Matcher m = PROCFS_STAT_FILE_FORMAT.matcher(str);
boolean mat = m.find();
if (mat) {
String processName = "(" + m.group(2) + ")";
// Set (name) (ppid) (pgrpId) (session) (utime) (stime) (vsize) (rss)
pinfo.updateProcessInfo(processName, m.group(3), Integer.parseInt(m.group(4)), Integer.parseInt(m.group(5)), Long.parseLong(m.group(7)), new BigInteger(m.group(8)), Long.parseLong(m.group(10)), Long.parseLong(m.group(11)));
} else {
LOG.warn("Unexpected: procfs stat file is not in the expected format" + " for process with pid " + pinfo.getPid());
ret = null;
}
} catch (IOException io) {
LOG.warn("Error reading the stream " + io);
ret = null;
} finally {
// Close the streams
try {
fReader.close();
try {
in.close();
} catch (IOException i) {
LOG.warn("Error closing the stream " + in);
}
} catch (IOException i) {
LOG.warn("Error closing the stream " + fReader);
}
}
return ret;
}
Aggregations