Search in sources :

Example 6 with SQLField

use of org.pentaho.di.core.sql.SQLField in project pdi-dataservice-server-plugin by pentaho.

the class SqlTransGenerator method generateTransMeta.

/**
 * Generate the transformation metadata for
 *
 * @return
 */
public TransMeta generateTransMeta() throws KettleException {
    TransMeta transMeta = new TransMeta();
    StringBuilder sbsql = new StringBuilder(sql.getServiceName());
    sbsql.append(" - SQL - ").append(sql.getSqlString());
    // 
    for (int i = sbsql.length() - 1; i >= 0; i--) {
        if (sbsql.charAt(i) == '\n' || sbsql.charAt(i) == '\r') {
            sbsql.setCharAt(i, ' ');
        }
    }
    transMeta.setName(sbsql.toString());
    xLocation = 50;
    // Add an injector where we will pump in the rows from the service transformation.
    // 
    StepMeta firstStep = generateInjectorStep();
    transMeta.addStep(firstStep);
    injectorStepName = firstStep.getName();
    StepMeta lastStep = firstStep;
    if (serviceLimit > 0) {
        StepMeta serviceLimitStep = generateLimitStep("Limit input rows", 0, serviceLimit);
        lastStep = addToTrans(serviceLimitStep, transMeta, lastStep);
    }
    // Set conversion masks
    lastStep = addToTrans(generateConversionStep(), transMeta, lastStep);
    // Add possible constants to the rows...
    // 
    List<SQLField> constFields = sql.getSelectFields().getConstantFields();
    if (!constFields.isEmpty()) {
        StepMeta constStep = generateConstStep(constFields);
        lastStep = addToTrans(constStep, transMeta, lastStep);
    }
    // Add filters, constants, calculator steps to calculate possible IIF functions...
    // This block is for the IIF methods in the SELECT clause
    // 
    List<SQLField> iifFields = sql.getSelectFields().getIifFunctionFields();
    for (SQLField iifField : iifFields) {
        lastStep = generateIifStep(iifField, transMeta, lastStep);
    }
    // 
    if (sql.getWhereCondition() != null && !sql.getWhereCondition().isEmpty()) {
        StepMeta filterStep = generateFilterStep(sql.getWhereCondition().getCondition(), false);
        lastStep = addToTrans(filterStep, transMeta, lastStep);
    }
    // We optionally need to aggregate the data
    // 
    List<SQLField> aggFields = sql.getSelectFields().getAggregateFields();
    if (sql.getHavingCondition() != null) {
        List<SQLField> havingFields = sql.getHavingCondition().extractHavingFields(sql.getSelectFields().getFields(), aggFields, transMeta.getStepFields(lastStep));
        aggFields.addAll(havingFields);
    }
    List<SQLField> groupFields = sql.getGroupFields().getFields();
    if (aggFields.size() > 0 || groupFields.size() > 0) {
        StepMeta groupStep = generateGroupByStep(aggFields, groupFields, transMeta.getStepFields(lastStep));
        lastStep = addToTrans(groupStep, transMeta, lastStep);
    }
    // 
    if (sql.getOrderFields() != null) {
        iifFields = sql.getOrderFields().getIifFunctionFields();
        for (SQLField iifField : iifFields) {
            lastStep = generateIifStep(iifField, transMeta, lastStep);
        }
    }
    // 
    if (sql.getHavingCondition() != null && !sql.getHavingCondition().isEmpty()) {
        StepMeta filterStep = generateFilterStep(sql.getHavingCondition().getCondition(), true);
        lastStep = addToTrans(filterStep, transMeta, lastStep);
    }
    // 
    if (sql.getSelectFields().isDistinct()) {
        // Add a Unique Rows By HashSet step
        // 
        StepMeta filterStep = generateUniqueStep(transMeta.getStepFields(lastStep));
        lastStep = addToTrans(filterStep, transMeta, lastStep);
    }
    // 
    if (sql.getOrderFields() != null && !sql.getOrderFields().isEmpty()) {
        StepMeta sortStep = generateSortStep(transMeta.getStepFields(lastStep));
        lastStep = addToTrans(sortStep, transMeta, lastStep);
    }
    if (sql.getSelectFields().getFields().size() > 0) {
        lastStep = addToTrans(generateSelectStep(), transMeta, lastStep);
    }
    // Limit the data from the limit keyword
    if (sql.getLimitClause() != null) {
        int limit = sql.getLimitValues().getLimit();
        int offset = sql.getLimitValues().getOffset();
        StepMeta limitStep = generateLimitStep("Limit rows", offset, limit);
        lastStep = addToTrans(limitStep, transMeta, lastStep);
    }
    // 
    if (rowLimit > 0) {
        StepMeta sampleStep = generateSampleStep();
        lastStep = addToTrans(sampleStep, transMeta, lastStep);
    }
    // Finally add a dummy step containing the result
    // 
    StepMeta resultStep = generateResultStep();
    resultStepName = resultStep.getName();
    lastStep = addToTrans(resultStep, transMeta, lastStep);
    return transMeta;
}
Also used : TransMeta(org.pentaho.di.trans.TransMeta) DummyTransMeta(org.pentaho.di.trans.steps.dummytrans.DummyTransMeta) SQLField(org.pentaho.di.core.sql.SQLField) StepMeta(org.pentaho.di.trans.step.StepMeta)

Example 7 with SQLField

use of org.pentaho.di.core.sql.SQLField in project pdi-dataservice-server-plugin by pentaho.

the class SqlTransGenerator method generateConstStep.

private StepMeta generateConstStep(List<SQLField> fields) throws KettleException {
    ConstantMeta meta = new ConstantMeta();
    meta.allocate(fields.size());
    for (int i = 0; i < fields.size(); i++) {
        SQLField field = fields.get(i);
        ValueMetaInterface valueMeta = field.getValueMeta();
        meta.getFieldName()[i] = "Constant_" + field.getFieldIndex() + "_" + field.getField();
        meta.getFieldFormat()[i] = valueMeta.getConversionMask();
        meta.getFieldType()[i] = valueMeta.getTypeDesc();
        meta.getFieldLength()[i] = valueMeta.getLength();
        meta.getFieldPrecision()[i] = valueMeta.getPrecision();
        meta.getDecimal()[i] = valueMeta.getDecimalSymbol();
        meta.getGroup()[i] = valueMeta.getGroupingSymbol();
        meta.getValue()[i] = valueMeta.getString(field.getValueData());
    }
    StepMeta stepMeta = new StepMeta("Constants", meta);
    stepMeta.setLocation(xLocation, 50);
    xLocation += 100;
    stepMeta.setDraw(true);
    return stepMeta;
}
Also used : SQLField(org.pentaho.di.core.sql.SQLField) ConstantMeta(org.pentaho.di.trans.steps.constant.ConstantMeta) StepMeta(org.pentaho.di.trans.step.StepMeta) ValueMetaInterface(org.pentaho.di.core.row.ValueMetaInterface)

Example 8 with SQLField

use of org.pentaho.di.core.sql.SQLField in project pdi-dataservice-server-plugin by pentaho.

the class SqlTransGenerator method generateUniqueStep.

private StepMeta generateUniqueStep(RowMetaInterface rowMeta) {
    SQLFields fields = sql.getSelectFields();
    MemoryGroupByMeta meta = new MemoryGroupByMeta();
    meta.allocate(fields.getFields().size(), 0);
    for (int i = 0; i < fields.getFields().size(); i++) {
        SQLField field = fields.getFields().get(i);
        if (!Utils.isEmpty(field.getAlias()) && rowMeta.searchValueMeta(field.getAlias()) != null) {
            meta.getGroupField()[i] = field.getAlias();
        } else {
            meta.getGroupField()[i] = field.getField();
        }
    }
    StepMeta stepMeta = new StepMeta("DISTINCT", meta);
    stepMeta.setLocation(xLocation, 50);
    xLocation += 100;
    stepMeta.setDraw(true);
    return stepMeta;
}
Also used : MemoryGroupByMeta(org.pentaho.di.trans.steps.memgroupby.MemoryGroupByMeta) SQLFields(org.pentaho.di.core.sql.SQLFields) SQLField(org.pentaho.di.core.sql.SQLField) StepMeta(org.pentaho.di.trans.step.StepMeta)

Example 9 with SQLField

use of org.pentaho.di.core.sql.SQLField in project pdi-dataservice-server-plugin by pentaho.

the class SqlTransGenerator method generateGroupByStep.

private StepMeta generateGroupByStep(List<SQLField> aggFields, List<SQLField> groupFields, RowMetaInterface inputFields) throws KettleException {
    MemoryGroupByMeta meta = new MemoryGroupByMeta();
    meta.allocate(groupFields.size(), aggFields.size());
    // See if we need to always return a row or not (0 rows counted scenario)
    // 
    boolean returnRow = false;
    // 
    for (int i = 0; i < groupFields.size(); i++) {
        SQLField field = groupFields.get(i);
        meta.getGroupField()[i] = field.getField();
    }
    // 
    for (int i = 0; i < aggFields.size(); i++) {
        SQLField field = aggFields.get(i);
        ValueMetaInterface valueMeta = field.getValueMeta();
        meta.getAggregateField()[i] = Const.NVL(field.getAlias(), field.getField());
        String subjectField;
        if (field.getValueData() == null) {
            // 
            if (valueMeta == null) {
                // 
                if (inputFields.size() == 0) {
                    throw new KettleException("No field fields found to aggregate on.");
                }
                subjectField = inputFields.getValueMeta(0).getName();
            } else {
                subjectField = valueMeta.getName();
            }
        } else {
            // A constant field to aggregate.
            // 
            subjectField = "Constant_" + field.getFieldIndex() + "_" + field.getField();
        }
        meta.getSubjectField()[i] = subjectField;
        int agg = 0;
        switch(field.getAggregation()) {
            case SUM:
                agg = MemoryGroupByMeta.TYPE_GROUP_SUM;
                break;
            case MIN:
                agg = MemoryGroupByMeta.TYPE_GROUP_MIN;
                break;
            case MAX:
                agg = MemoryGroupByMeta.TYPE_GROUP_MAX;
                break;
            case COUNT:
                if (field.isCountStar()) {
                    agg = MemoryGroupByMeta.TYPE_GROUP_COUNT_ANY;
                } else if (field.isCountDistinct()) {
                    agg = MemoryGroupByMeta.TYPE_GROUP_COUNT_DISTINCT;
                } else {
                    // Count a particular field
                    agg = MemoryGroupByMeta.TYPE_GROUP_COUNT_ALL;
                }
                returnRow = true;
                break;
            case AVG:
                agg = MemoryGroupByMeta.TYPE_GROUP_AVERAGE;
                break;
            default:
                throw new KettleException("Unhandled aggregation method [" + field.getAggregation() + "]");
        }
        meta.getAggregateType()[i] = agg;
    }
    meta.setAlwaysGivingBackOneRow(returnRow);
    StepMeta stepMeta = new StepMeta("Group by", meta);
    stepMeta.setLocation(xLocation, 50);
    xLocation += 100;
    stepMeta.setDraw(true);
    return stepMeta;
}
Also used : MemoryGroupByMeta(org.pentaho.di.trans.steps.memgroupby.MemoryGroupByMeta) KettleException(org.pentaho.di.core.exception.KettleException) SQLField(org.pentaho.di.core.sql.SQLField) StepMeta(org.pentaho.di.trans.step.StepMeta) ValueMetaInterface(org.pentaho.di.core.row.ValueMetaInterface)

Aggregations

SQLField (org.pentaho.di.core.sql.SQLField)9 StepMeta (org.pentaho.di.trans.step.StepMeta)6 RowMeta (org.pentaho.di.core.row.RowMeta)3 ValueMetaInterface (org.pentaho.di.core.row.ValueMetaInterface)3 ValueMetaString (org.pentaho.di.core.row.value.ValueMetaString)3 CoreMatchers.containsString (org.hamcrest.CoreMatchers.containsString)2 Matchers.anyString (org.mockito.Matchers.anyString)2 KettleException (org.pentaho.di.core.exception.KettleException)2 RowMetaInterface (org.pentaho.di.core.row.RowMetaInterface)2 SQLFields (org.pentaho.di.core.sql.SQLFields)2 MemoryGroupByMeta (org.pentaho.di.trans.steps.memgroupby.MemoryGroupByMeta)2 ImmutableMap (com.google.common.collect.ImmutableMap)1 OutputStream (java.io.OutputStream)1 ArrayList (java.util.ArrayList)1 Map (java.util.Map)1 Before (org.junit.Before)1 Test (org.junit.Test)1 Mockito.doAnswer (org.mockito.Mockito.doAnswer)1 InvocationOnMock (org.mockito.invocation.InvocationOnMock)1 Answer (org.mockito.stubbing.Answer)1