use of org.apache.carbondata.core.index.IndexFilter in project carbondata by apache.
the class Hive2CarbonExpressionTest method testEqualHiveFilter.
@Test
public void testEqualHiveFilter() throws IOException {
ExprNodeDesc column = new ExprNodeColumnDesc(TypeInfoFactory.intTypeInfo, "id", null, false);
ExprNodeDesc constant = new ExprNodeConstantDesc(TypeInfoFactory.intTypeInfo, "1001");
List<ExprNodeDesc> children = Lists.newArrayList();
children.add(column);
children.add(constant);
ExprNodeGenericFuncDesc node = new ExprNodeGenericFuncDesc(TypeInfoFactory.intTypeInfo, new GenericUDFOPEqual(), children);
Configuration configuration = new Configuration();
configuration.set("mapreduce.input.carboninputformat.filter.predicate", SerializationUtilities.serializeExpression(node));
CarbonInputFormat.setFilterPredicates(configuration, new IndexFilter(table, Hive2CarbonExpression.convertExprHive2Carbon(node)));
final Job job = new Job(new JobConf(configuration));
final CarbonTableInputFormat format = new CarbonTableInputFormat();
format.setTableInfo(job.getConfiguration(), table.getTableInfo());
format.setTablePath(job.getConfiguration(), table.getTablePath());
format.setTableName(job.getConfiguration(), table.getTableName());
format.setDatabaseName(job.getConfiguration(), table.getDatabaseName());
List<InputSplit> list = format.getSplits(job);
Assert.assertTrue(list.size() == 0);
}
use of org.apache.carbondata.core.index.IndexFilter in project carbondata by apache.
the class Hive2CarbonExpressionTest method testAndHiveFilter.
@Test
public void testAndHiveFilter() throws IOException {
ExprNodeDesc column1 = new ExprNodeColumnDesc(TypeInfoFactory.intTypeInfo, "id", null, false);
ExprNodeDesc constant1 = new ExprNodeConstantDesc(TypeInfoFactory.intTypeInfo, "500");
List<ExprNodeDesc> children1 = Lists.newArrayList();
children1.add(column1);
children1.add(constant1);
ExprNodeGenericFuncDesc node1 = new ExprNodeGenericFuncDesc(TypeInfoFactory.intTypeInfo, new GenericUDFOPEqual(), children1);
ExprNodeDesc column2 = new ExprNodeColumnDesc(TypeInfoFactory.intTypeInfo, "id", null, false);
ExprNodeDesc constant2 = new ExprNodeConstantDesc(TypeInfoFactory.intTypeInfo, "4999999");
List<ExprNodeDesc> children2 = Lists.newArrayList();
children2.add(column2);
children2.add(constant2);
List<ExprNodeDesc> children3 = Lists.newArrayList();
ExprNodeGenericFuncDesc node2 = new ExprNodeGenericFuncDesc(TypeInfoFactory.intTypeInfo, new GenericUDFOPEqual(), children2);
children3.add(node1);
children3.add(node2);
ExprNodeGenericFuncDesc node3 = new ExprNodeGenericFuncDesc(TypeInfoFactory.intTypeInfo, new GenericUDFOPAnd(), children3);
Configuration configuration = new Configuration();
CarbonInputFormat.setFilterPredicates(configuration, new IndexFilter(table, Hive2CarbonExpression.convertExprHive2Carbon(node3)));
final Job job = new Job(new JobConf(configuration));
final CarbonFileInputFormat format = new CarbonFileInputFormat();
format.setTableInfo(job.getConfiguration(), table.getTableInfo());
format.setTablePath(job.getConfiguration(), table.getTablePath());
format.setTableName(job.getConfiguration(), table.getTableName());
format.setDatabaseName(job.getConfiguration(), table.getDatabaseName());
List<InputSplit> list = format.getSplits(job);
Assert.assertEquals(0, list.size());
}
use of org.apache.carbondata.core.index.IndexFilter in project carbondata by apache.
the class Hive2CarbonExpressionTest method testNotNullHiveFilter.
@Test
public void testNotNullHiveFilter() throws IOException {
ExprNodeDesc column1 = new ExprNodeColumnDesc(TypeInfoFactory.booleanTypeInfo, "name", null, false);
List<ExprNodeDesc> children1 = Lists.newArrayList();
children1.add(column1);
ExprNodeGenericFuncDesc node1 = new ExprNodeGenericFuncDesc(TypeInfoFactory.booleanTypeInfo, new GenericUDFOPNotNull(), children1);
Configuration configuration = new Configuration();
CarbonInputFormat.setFilterPredicates(configuration, new IndexFilter(table, Hive2CarbonExpression.convertExprHive2Carbon(node1)));
final Job job = new Job(new JobConf(configuration));
final CarbonFileInputFormat format = new CarbonFileInputFormat();
format.setTableInfo(job.getConfiguration(), table.getTableInfo());
format.setTablePath(job.getConfiguration(), table.getTablePath());
format.setTableName(job.getConfiguration(), table.getTableName());
format.setDatabaseName(job.getConfiguration(), table.getDatabaseName());
List<InputSplit> list = format.getSplits(job);
Assert.assertTrue(list.size() == 1);
}
use of org.apache.carbondata.core.index.IndexFilter in project carbondata by apache.
the class Hive2CarbonExpressionTest method testNotEqualHiveFilter.
@Test
public void testNotEqualHiveFilter() throws IOException {
ExprNodeDesc column = new ExprNodeColumnDesc(TypeInfoFactory.intTypeInfo, "id", null, false);
ExprNodeDesc constant = new ExprNodeConstantDesc(TypeInfoFactory.intTypeInfo, "500");
List<ExprNodeDesc> children = Lists.newArrayList();
children.add(column);
children.add(constant);
ExprNodeGenericFuncDesc node = new ExprNodeGenericFuncDesc(TypeInfoFactory.intTypeInfo, new GenericUDFOPNotEqual(), children);
Configuration configuration = new Configuration();
CarbonInputFormat.setFilterPredicates(configuration, new IndexFilter(table, Hive2CarbonExpression.convertExprHive2Carbon(node)));
final Job job = new Job(new JobConf(configuration));
final CarbonFileInputFormat format = new CarbonFileInputFormat();
format.setTableInfo(job.getConfiguration(), table.getTableInfo());
format.setTablePath(job.getConfiguration(), table.getTablePath());
format.setTableName(job.getConfiguration(), table.getTableName());
format.setDatabaseName(job.getConfiguration(), table.getDatabaseName());
List<InputSplit> list = format.getSplits(job);
Assert.assertTrue(list.size() == 1);
}
use of org.apache.carbondata.core.index.IndexFilter in project carbondata by apache.
the class Hive2CarbonExpressionTest method testInHiveFilter.
@Test
public void testInHiveFilter() throws IOException {
ExprNodeDesc column1 = new ExprNodeColumnDesc(TypeInfoFactory.intTypeInfo, "id", null, false);
List<ExprNodeDesc> children1 = Lists.newArrayList();
ExprNodeDesc constant1 = new ExprNodeConstantDesc(TypeInfoFactory.intTypeInfo, "500");
ExprNodeDesc constant2 = new ExprNodeConstantDesc(TypeInfoFactory.intTypeInfo, "600");
ExprNodeDesc constant3 = new ExprNodeConstantDesc(TypeInfoFactory.intTypeInfo, "700");
children1.add(column1);
children1.add(constant1);
children1.add(constant2);
children1.add(constant3);
ExprNodeGenericFuncDesc node1 = new ExprNodeGenericFuncDesc(TypeInfoFactory.intTypeInfo, new GenericUDFIn(), children1);
Configuration configuration = new Configuration();
CarbonInputFormat.setFilterPredicates(configuration, new IndexFilter(table, Hive2CarbonExpression.convertExprHive2Carbon(node1)));
final Job job = new Job(new JobConf(configuration));
final CarbonFileInputFormat format = new CarbonFileInputFormat();
format.setTableInfo(job.getConfiguration(), table.getTableInfo());
format.setTablePath(job.getConfiguration(), table.getTablePath());
format.setTableName(job.getConfiguration(), table.getTableName());
format.setDatabaseName(job.getConfiguration(), table.getDatabaseName());
List<InputSplit> list = format.getSplits(job);
Assert.assertEquals(1, list.size());
}
Aggregations