use of io.confluent.ksql.util.KsqlException in project ksql by confluentinc.
the class StandaloneExecutor method readQueriesFile.
private static String readQueriesFile(final String queryFilePath) {
final StringBuilder sb = new StringBuilder();
try (final BufferedReader br = new BufferedReader(new InputStreamReader(new FileInputStream(queryFilePath), StandardCharsets.UTF_8))) {
String line = br.readLine();
while (line != null) {
sb.append(line);
sb.append(System.lineSeparator());
line = br.readLine();
}
} catch (IOException e) {
throw new KsqlException("Could not read the query file. Details: " + e.getMessage(), e);
}
return sb.toString();
}
use of io.confluent.ksql.util.KsqlException in project ksql by confluentinc.
the class KsqlDelimitedDeserializer method deserialize.
@Override
public GenericRow deserialize(final String topic, final byte[] bytes) {
if (bytes == null) {
return null;
}
String recordCsvString = new String(bytes, StandardCharsets.UTF_8);
try {
List<CSVRecord> csvRecords = CSVParser.parse(recordCsvString, CSVFormat.DEFAULT).getRecords();
if (csvRecords == null || csvRecords.isEmpty()) {
throw new KsqlException("Deserialization error in the delimited line: " + recordCsvString);
}
CSVRecord csvRecord = csvRecords.get(0);
if (csvRecord == null || csvRecord.size() == 0) {
throw new KsqlException("Deserialization error in the delimited line: " + recordCsvString);
}
List<Object> columns = new ArrayList<>();
if (csvRecord.size() != schema.fields().size()) {
throw new KsqlException(String.format("Unexpected field count, csvFields:%d schemaFields:%d line: %s", csvRecord.size(), schema.fields().size(), recordCsvString));
}
for (int i = 0; i < csvRecord.size(); i++) {
if (csvRecord.get(i) == null) {
columns.add(null);
} else {
columns.add(enforceFieldType(schema.fields().get(i).schema(), csvRecord.get(i)));
}
}
return new GenericRow(columns);
} catch (Exception e) {
throw new SerializationException("Exception in deserializing the delimited row: " + recordCsvString, e);
}
}
Aggregations