use of org.apache.carbondata.common.exceptions.sql.InvalidLoadOptionException in project carbondata by apache.
the class HiveCarbonUtil method getCarbonLoadModel.
public static CarbonLoadModel getCarbonLoadModel(String tableName, String databaseName, String location, String sortColumnsString, String[] columns, String[] columnTypes, Configuration configuration) {
CarbonLoadModel loadModel;
CarbonTable carbonTable;
try {
String schemaFilePath = CarbonTablePath.getSchemaFilePath(location, configuration);
AbsoluteTableIdentifier absoluteTableIdentifier = AbsoluteTableIdentifier.from(location, databaseName, tableName, "");
if (FileFactory.getCarbonFile(schemaFilePath).exists()) {
carbonTable = SchemaReader.readCarbonTableFromStore(absoluteTableIdentifier);
carbonTable.setTransactionalTable(true);
} else {
String carbonDataFile = CarbonUtil.getFilePathExternalFilePath(location, configuration);
if (carbonDataFile == null) {
carbonTable = CarbonTable.buildFromTableInfo(getTableInfo(tableName, databaseName, location, sortColumnsString, columns, columnTypes, new ArrayList<>()));
} else {
carbonTable = CarbonTable.buildFromTableInfo(SchemaReader.inferSchema(absoluteTableIdentifier, false, configuration));
}
carbonTable.setTransactionalTable(false);
}
} catch (SQLException | IOException e) {
throw new RuntimeException("Unable to fetch schema for the table: " + tableName, e);
}
CarbonLoadModelBuilder carbonLoadModelBuilder = new CarbonLoadModelBuilder(carbonTable);
Map<String, String> options = new HashMap<>();
options.put("fileheader", Strings.mkString(columns, ","));
try {
loadModel = carbonLoadModelBuilder.build(options, System.currentTimeMillis(), "");
} catch (InvalidLoadOptionException | IOException e) {
throw new RuntimeException(e);
}
loadModel.setSkipParsers();
loadModel.setMetrics(new DataLoadMetrics());
return loadModel;
}
use of org.apache.carbondata.common.exceptions.sql.InvalidLoadOptionException in project carbondata by apache.
the class CarbonReaderTest method testReadUserSchemaOfComplex.
// TODO: support get schema of complex data type
@Ignore
public void testReadUserSchemaOfComplex() throws IOException {
String path = "./testWriteFiles";
FileUtils.deleteDirectory(new File(path));
String mySchema = "{" + " \"name\": \"address\", " + " \"type\": \"record\", " + " \"fields\": [ " + " { \"name\": \"name\", \"type\": \"string\"}, " + " { \"name\": \"age\", \"type\": \"int\"}, " + " { " + " \"name\": \"address\", " + " \"type\": { " + " \"type\" : \"record\", " + " \"name\" : \"my_address\", " + " \"fields\" : [ " + " {\"name\": \"street\", \"type\": \"string\"}, " + " {\"name\": \"city\", \"type\": \"string\"} " + " ]} " + " }, " + " {\"name\" :\"doorNum\", " + " \"type\" : { " + " \"type\" :\"array\", " + " \"items\":{ " + " \"name\" :\"EachdoorNums\", " + " \"type\" : \"int\", " + " \"default\":-1} " + " } " + " }] " + "}";
String json = "{\"name\":\"bob\", \"age\":10, \"address\" : {\"street\":\"abc\", \"city\":\"bang\"}, " + " \"doorNum\" : [1,2,3,4]}";
try {
WriteAvroComplexData(mySchema, json, path);
} catch (InvalidLoadOptionException e) {
e.printStackTrace();
Assert.fail(e.getMessage());
}
File folder = new File(path);
Assert.assertTrue(folder.exists());
File[] dataFiles = folder.listFiles(new FileFilter() {
@Override
public boolean accept(File pathname) {
return pathname.getName().endsWith(CarbonCommonConstants.FACT_FILE_EXT);
}
});
Assert.assertNotNull(dataFiles);
Assert.assertEquals(1, dataFiles.length);
File[] dataFiles2 = new File(path).listFiles(new FilenameFilter() {
@Override
public boolean accept(File dir, String name) {
return name.endsWith("carbonindex");
}
});
Schema schema = CarbonSchemaReader.readSchema(dataFiles2[0].getAbsolutePath()).asOriginOrder();
for (int i = 0; i < schema.getFields().length; i++) {
System.out.println((schema.getFields())[i].getFieldName() + "\t" + schema.getFields()[i].getSchemaOrdinal());
}
FileUtils.deleteDirectory(new File(path));
}
use of org.apache.carbondata.common.exceptions.sql.InvalidLoadOptionException in project carbondata by apache.
the class CSVCarbonWriterTest method testWhenWriterthrowsError.
@Test(expected = IOException.class)
public void testWhenWriterthrowsError() throws IOException {
CarbonWriter carbonWriter = null;
String path = "./testWriteFiles";
FileUtils.deleteDirectory(new File(path));
Field[] fields = new Field[2];
fields[0] = new Field("name", DataTypes.STRING);
fields[1] = new Field("age", DataTypes.INT);
try {
carbonWriter = CarbonWriter.builder().outputPath(path).withCsvInput(new Schema(fields)).writtenBy("CSVCarbonWriterTest").build();
} catch (InvalidLoadOptionException e) {
e.printStackTrace();
Assert.assertTrue(false);
}
carbonWriter.write("babu,1");
carbonWriter.close();
}
use of org.apache.carbondata.common.exceptions.sql.InvalidLoadOptionException in project carbondata by apache.
the class ImageTest method testWriteNonBase64WithBase64Decoder.
@Test
public void testWriteNonBase64WithBase64Decoder() throws IOException, InvalidLoadOptionException, InterruptedException {
String imagePath = "./src/test/resources/image/carbondatalogo.jpg";
int num = 1;
int rows = 10;
String path = "./target/binary";
try {
FileUtils.deleteDirectory(new File(path));
} catch (IOException e) {
e.printStackTrace();
}
Field[] fields = new Field[7];
fields[0] = new Field("name", DataTypes.STRING);
fields[1] = new Field("age", DataTypes.INT);
fields[2] = new Field("image1", DataTypes.BINARY);
fields[3] = new Field("image2", DataTypes.BINARY);
fields[4] = new Field("image3", DataTypes.BINARY);
fields[5] = new Field("decodeString", DataTypes.BINARY);
fields[6] = new Field("decodeByte", DataTypes.BINARY);
byte[] originBinary = null;
// read and write image data
for (int j = 0; j < num; j++) {
CarbonWriter writer = CarbonWriter.builder().outputPath(path).withCsvInput(new Schema(fields)).writtenBy("SDKS3Example").withPageSizeInMb(1).withLoadOption("binary_decoder", "base64").build();
for (int i = 0; i < rows; i++) {
// read image and encode to Hex
BufferedInputStream bis = new BufferedInputStream(new FileInputStream(imagePath));
originBinary = new byte[bis.available()];
while ((bis.read(originBinary)) != -1) {
}
// write data
writer.write(new Object[] { "robot" + (i % 10), i, originBinary, originBinary, originBinary, "^YWJj", "^YWJj".getBytes() });
bis.close();
}
try {
writer.close();
Assert.assertTrue(false);
} catch (Exception e) {
Assert.assertTrue(e.getMessage().contains("Binary decoder is base64, but data is not base64"));
}
}
}
use of org.apache.carbondata.common.exceptions.sql.InvalidLoadOptionException in project carbondata by apache.
the class AvroCarbonWriterTest method testExceptionForInvalidDate.
@Test
public void testExceptionForInvalidDate() throws IOException, InvalidLoadOptionException {
Field[] field = new Field[2];
field[0] = new Field("name", DataTypes.STRING);
field[1] = new Field("date", DataTypes.DATE);
CarbonWriterBuilder writer = CarbonWriter.builder().uniqueIdentifier(System.currentTimeMillis()).outputPath(path);
try {
Map<String, String> loadOptions = new HashMap<String, String>();
loadOptions.put("bad_records_action", "fail");
CarbonWriter carbonWriter = writer.withLoadOptions(loadOptions).withCsvInput(new org.apache.carbondata.sdk.file.Schema(field)).writtenBy("AvroCarbonWriterTest").build();
carbonWriter.write(new String[] { "k", "20-02-2233" });
carbonWriter.close();
Assert.fail();
} catch (Exception e) {
assert (e.getMessage().contains("Data load failed due to bad record"));
}
FileUtils.deleteDirectory(new File(path));
}
Aggregations