use of org.apache.drill.common.types.TypeProtos.MinorType in project drill by apache.
the class ParquetToDrillTypeConverter method toMajorType.
public static TypeProtos.MajorType toMajorType(PrimitiveType.PrimitiveTypeName primitiveTypeName, int length, TypeProtos.DataMode mode, SchemaElement schemaElement, OptionManager options) {
ConvertedType convertedType = schemaElement.getConverted_type();
MinorType minorType = getMinorType(primitiveTypeName, length, convertedType, options);
TypeProtos.MajorType.Builder typeBuilder = TypeProtos.MajorType.newBuilder().setMinorType(minorType).setMode(mode);
if (Types.isDecimalType(minorType)) {
int precision = schemaElement.getPrecision();
int scale = schemaElement.getScale();
typeBuilder.setPrecision(precision).setScale(scale);
}
return typeBuilder.build();
}
use of org.apache.drill.common.types.TypeProtos.MinorType in project drill by apache.
the class LogFormatPlugin method makeColumn.
private void makeColumn(SchemaBuilder builder, String name, int patternIndex) {
String typeName = formatConfig.getDataType(patternIndex);
MinorType type;
if (Strings.isNullOrEmpty(typeName)) {
// No type name. VARCHAR is a safe guess
type = MinorType.VARCHAR;
} else {
type = MinorType.valueOf(typeName.toUpperCase());
}
// Verify supported types
switch(type) {
case VARCHAR:
case INT:
case SMALLINT:
case BIGINT:
case FLOAT4:
case FLOAT8:
case DATE:
case TIMESTAMP:
case TIME:
break;
default:
throw UserException.validationError().message("Undefined column types").addContext("Position", patternIndex).addContext("Field name", name).addContext("Type", typeName).build(logger);
}
builder.addNullable(name, type);
}
use of org.apache.drill.common.types.TypeProtos.MinorType in project drill by apache.
the class MockGroupScanPOP method clone.
@Override
public GroupScan clone(List<SchemaPath> columns) {
if (columns.isEmpty()) {
throw new IllegalArgumentException("No columns for mock scan");
}
List<MockColumn> mockCols = new ArrayList<>();
Pattern p = Pattern.compile("(\\w+)_([isdb])(\\d*)");
for (SchemaPath path : columns) {
String col = path.getLastSegment().getNameSegment().getPath();
if (SchemaPath.DYNAMIC_STAR.equals(col)) {
return this;
}
Matcher m = p.matcher(col);
if (!m.matches()) {
throw new IllegalArgumentException("Badly formatted mock column name: " + col);
}
@SuppressWarnings("unused") String name = m.group(1);
String type = m.group(2);
String length = m.group(3);
int width = 10;
if (!length.isEmpty()) {
width = Integer.parseInt(length);
}
MinorType minorType;
switch(type) {
case "i":
minorType = MinorType.INT;
break;
case "s":
minorType = MinorType.VARCHAR;
break;
case "d":
minorType = MinorType.FLOAT8;
break;
case "b":
minorType = MinorType.BIT;
break;
default:
throw new IllegalArgumentException("Unsupported field type " + type + " for mock column " + col);
}
MockTableDef.MockColumn mockCol = new MockColumn(col, minorType, DataMode.REQUIRED, width, 0, 0, null, 1, null);
mockCols.add(mockCol);
}
MockScanEntry entry = readEntries.get(0);
MockColumn[] types = new MockColumn[mockCols.size()];
mockCols.toArray(types);
MockScanEntry newEntry = new MockScanEntry(entry.records, true, 0, 1, types);
List<MockScanEntry> newEntries = new ArrayList<>();
newEntries.add(newEntry);
return new MockGroupScanPOP(url, newEntries);
}
use of org.apache.drill.common.types.TypeProtos.MinorType in project drill by apache.
the class SasBatchReader method buildSchema.
private TupleMetadata buildSchema() {
SchemaBuilder builder = new SchemaBuilder();
List<Column> columns = sasFileReader.getColumns();
for (Column column : columns) {
String columnName = column.getName();
String columnType = column.getType().getSimpleName();
ColumnFormat columnFormat = column.getFormat();
try {
MinorType type = null;
if (DateTimeConstants.TIME_FORMAT_STRINGS.contains(columnFormat.getName())) {
type = MinorType.TIME;
} else if (DateTimeConstants.DATE_FORMAT_STRINGS.containsKey(columnFormat.getName())) {
type = MinorType.DATE;
} else if (DateTimeConstants.DATETIME_FORMAT_STRINGS.containsKey(columnFormat.getName())) {
type = MinorType.TIMESTAMP;
} else {
type = getType(columnType);
}
builder.addNullable(columnName, type);
} catch (Exception e) {
throw UserException.dataReadError().message("Error with type of column " + columnName + "; Type: " + columnType).addContext(errorContext).build(logger);
}
}
return builder.buildSchema();
}
use of org.apache.drill.common.types.TypeProtos.MinorType in project drill by apache.
the class PdfBatchReader method buildWriterListFromProvidedSchema.
private void buildWriterListFromProvidedSchema(TupleMetadata schema) {
if (schema == null) {
buildWriterList();
return;
}
int counter = 0;
for (MaterializedField field : schema.toFieldList()) {
String fieldName = field.getName();
MinorType type = field.getType().getMinorType();
columnHeaders.add(fieldName);
switch(type) {
case VARCHAR:
writers.add(new StringPdfColumnWriter(counter, fieldName, rowWriter));
break;
case SMALLINT:
case TINYINT:
case INT:
writers.add(new IntPdfColumnWriter(counter, fieldName, rowWriter));
break;
case BIGINT:
writers.add(new BigIntPdfColumnWriter(counter, fieldName, rowWriter));
break;
case FLOAT4:
case FLOAT8:
writers.add(new DoublePdfColumnWriter(counter, fieldName, rowWriter));
break;
case DATE:
writers.add(new DatePdfColumnWriter(counter, fieldName, rowWriter, negotiator));
break;
case TIME:
writers.add(new TimePdfColumnWriter(counter, fieldName, rowWriter, negotiator));
break;
case TIMESTAMP:
writers.add(new TimestampPdfColumnWriter(counter, fieldName, rowWriter, negotiator));
break;
default:
throw UserException.unsupportedError().message("PDF Reader with provided schema does not support " + type.name() + " data type.").addContext(errorContext).build(logger);
}
}
}
Aggregations