use of org.apache.spark.sql.types.Decimal in project carbondata by apache.
the class LessThanExpressionUnitTest method testEvaluateForLessThanExpressionWithDecimalDataType.
@Test
public void testEvaluateForLessThanExpressionWithDecimalDataType() throws FilterUnsupportedException, FilterIllegalMemberException {
ColumnExpression right = new ColumnExpression("contact", DataType.DECIMAL);
right.setColIndex(0);
ColumnExpression left = new ColumnExpression("contact", DataType.DECIMAL);
left.setColIndex(1);
lessThanExpression = new LessThanExpression(left, right);
RowImpl value = new RowImpl();
Decimal[] row = new Decimal[] { Decimal.apply(256324.0) };
Decimal[] row1 = new Decimal[] { Decimal.apply(123451245.0) };
Object[] objectRow = { row1, row };
value.setValues(objectRow);
new MockUp<ExpressionResult>() {
Boolean returnMockFlag = true;
@Mock
public BigDecimal getDecimal() {
if (returnMockFlag) {
returnMockFlag = false;
return new BigDecimal(256324.0);
} else {
return new BigDecimal(123451245.0);
}
}
};
ExpressionResult result = lessThanExpression.evaluate(value);
assertTrue(result.getBoolean());
}
use of org.apache.spark.sql.types.Decimal in project carbondata by apache.
the class TablePage method addRow.
/**
* Add one row to the internal store, it will be converted into columnar layout
*
* @param rowId Id of the input row
* @param row row object
*/
void addRow(int rowId, CarbonRow row) throws KeyGenException {
// convert each column category
// 1. convert dictionary columns
byte[] mdk = WriteStepRowUtil.getMdk(row, model.getMDKeyGenerator());
byte[][] keys = model.getSegmentProperties().getFixedLengthKeySplitter().splitKey(mdk);
keyColumnPage.putKey(rowId, keys);
// 2. convert noDictionary columns and complex columns.
int noDictionaryCount = noDictDimensionPage.length;
int complexColumnCount = complexDimensionPage.length;
if (noDictionaryCount > 0 || complexColumnCount > 0) {
byte[][] noDictAndComplex = WriteStepRowUtil.getNoDictAndComplexDimension(row);
for (int i = 0; i < noDictAndComplex.length; i++) {
if (i < noDictionaryCount) {
// noDictionary columns, since it is variable length, we need to prepare each
// element as LV encoded byte array (first two bytes are the length of the array)
byte[] valueWithLength = addLengthToByteArray(noDictAndComplex[i]);
noDictDimensionPage[i].putByteArray(rowId, valueWithLength);
} else {
// complex columns
addComplexColumn(i - noDictionaryCount, rowId, noDictAndComplex[i]);
}
}
}
// 3. convert measure columns
Object[] measureColumns = WriteStepRowUtil.getMeasure(row);
for (int i = 0; i < measurePage.length; i++) {
Object value = measureColumns[i];
// need to convert it to byte array.
if (measurePage[i].getDataType() == DataType.DECIMAL && model.isCompactionFlow()) {
BigDecimal bigDecimal = ((Decimal) value).toJavaBigDecimal();
value = DataTypeUtil.bigDecimalToByte(bigDecimal);
}
measurePage[i].putData(rowId, value);
}
}
use of org.apache.spark.sql.types.Decimal in project carbondata by apache.
the class NotInExpressionUnitTest method testEvaluateForNotInExpressionWithDecimalDataType.
@Test
public void testEvaluateForNotInExpressionWithDecimalDataType() throws FilterUnsupportedException, FilterIllegalMemberException {
ColumnExpression left = new ColumnExpression("left_contact", DataType.DECIMAL);
left.setColIndex(0);
ColumnExpression right = new ColumnExpression("right_contact", DataType.DECIMAL);
right.setColIndex(1);
notInExpression = new NotInExpression(left, right);
RowImpl value = new RowImpl();
Decimal row = Decimal.apply(123452154.0);
Decimal row1 = Decimal.apply(1234521215454.0);
Object[] objectRow = { row, row1 };
value.setValues(objectRow);
new MockUp<ExpressionResult>() {
@Mock
public BigDecimal getDecimal() {
return new BigDecimal(1234521215454.0);
}
};
ExpressionResult result = notInExpression.evaluate(value);
assertTrue(result.getBoolean());
}
use of org.apache.spark.sql.types.Decimal in project carbondata by apache.
the class NotEqualsExpressionUnitTest method testEvaluateWithForNotEqualsExpressionDecimalDataType.
@Test
public void testEvaluateWithForNotEqualsExpressionDecimalDataType() throws FilterUnsupportedException, FilterIllegalMemberException {
ColumnExpression right = new ColumnExpression("contact", DataType.DECIMAL);
right.setColIndex(1);
ColumnExpression left = new ColumnExpression("contact", DataType.DECIMAL);
left.setColIndex(0);
notEqualsExpression = new NotEqualsExpression(left, right);
RowImpl value = new RowImpl();
Decimal[] row = new Decimal[] { Decimal.apply(12345.0) };
Decimal[] row1 = new Decimal[] { Decimal.apply(1235445.0) };
Object[] objectRow = { row, row1 };
value.setValues(objectRow);
new MockUp<ExpressionResult>() {
Boolean returnMockFlag = true;
@Mock
public BigDecimal getDecimal() {
if (returnMockFlag) {
returnMockFlag = false;
return new BigDecimal(12345.0);
} else {
return new BigDecimal(1235445.0);
}
}
};
ExpressionResult result = notEqualsExpression.evaluate(value);
assertTrue(result.getBoolean());
}
use of org.apache.spark.sql.types.Decimal in project carbondata by apache.
the class LessThanEqualToExpressionUnitTest method testEvaluateForLessThanEqualToExpressionWithDecimalDataType.
@Test
public void testEvaluateForLessThanEqualToExpressionWithDecimalDataType() throws FilterUnsupportedException, FilterIllegalMemberException {
ColumnExpression right = new ColumnExpression("right_contact", DataType.DECIMAL);
right.setColIndex(0);
ColumnExpression left = new ColumnExpression("left_contact", DataType.DECIMAL);
left.setColIndex(1);
lessThanEqualToExpression = new LessThanEqualToExpression(left, right);
RowImpl value = new RowImpl();
Decimal[] row = new Decimal[] { Decimal.apply(46851.2) };
Decimal[] row1 = new Decimal[] { Decimal.apply(45821.02) };
Object[] objectRow = { row1, row };
value.setValues(objectRow);
new MockUp<ExpressionResult>() {
Boolean returnMockFlag = true;
@Mock
public BigDecimal getDecimal() {
if (returnMockFlag) {
returnMockFlag = false;
return new BigDecimal(45821.02);
} else {
return new BigDecimal(46851.2);
}
}
};
ExpressionResult result = lessThanEqualToExpression.evaluate(value);
assertTrue(result.getBoolean());
}
Aggregations