use of com.linkedin.databus2.core.DatabusException in project databus by linkedin.
the class TrailFilePositionSetter method findTxnScn.
private FilePositionResult findTxnScn(ConcurrentAppendableCompositeFileInputStream stream, long expScn, TransactionSCNFinderCallback callback) throws IOException {
FilePositionResult result = null;
//File prevFile = null;
ScnTxnPos pos = null;
callback.begin(expScn);
byte[] bArr = new byte[4 * 1024];
File prevFile = null;
File currFile = null;
long currPosition = -1;
List<String> lines = new ArrayList<String>();
List<Integer> lineEndPos = new ArrayList<Integer>();
String prevLine = null;
boolean done = false;
while (!done) {
prevFile = currFile;
int numBytes = stream.read(bArr);
if (numBytes <= 0)
break;
currFile = stream.getCurrentFile();
currPosition = stream.getCurrentPosition();
boolean spanFile = false;
int endOffset = 0;
if ((currFile != null) && (prevFile != null) && (!currFile.equals(prevFile))) {
// Crossed File boundary while reading this block. Track the endOffset where the file ends
spanFile = true;
endOffset = (int) (numBytes - currPosition);
}
prevLine = splitBytesByNewLines(bArr, numBytes, spanFile, endOffset, prevLine, lines, lineEndPos);
// On First Read, call the beginFileProcessing callback
if (prevFile == null)
callback.beginFileProcessing(currFile.getName());
int currOffset = 0;
for (int i = 0; i < lines.size(); i++) {
String l = lines.get(i);
//newLineLen can be one of (-1) File Boundary, (1) "\n" or "\r" , (2) "\r\n"
int newLineLen = lineEndPos.get(i) - currOffset - l.length();
try {
done = callback.processLine(l, newLineLen);
} catch (DatabusException e) {
_log.error("Got Exception when processing line (" + l + ").", e);
result = FilePositionResult.createErrorResult(e);
return result;
}
if (done)
break;
// when File boundary on this line
if (lineEndPos.get(i) == -1) {
callback.endFileProcessing(prevFile.getName());
callback.beginFileProcessing(currFile.getName());
}
currOffset = ((lineEndPos.get(i) < 0) ? currOffset + l.length() : lineEndPos.get(i));
}
lines.clear();
lineEndPos.clear();
}
// There could last transaction which would be complete with the prevLine added.
if (!done && (prevLine != null)) {
try {
callback.processLine(prevLine, NO_NEWLINE_LEN);
} catch (DatabusException e) {
if (_log.isDebugEnabled())
_log.debug("Got Exception when processing line (" + prevLine + ").", e);
result = FilePositionResult.createErrorResult(e);
return result;
}
}
pos = callback.getTxnPos();
if (callback.getNumTxnsSeen() <= 0) {
result = FilePositionResult.createNoTxnsFoundResult();
} else if (expScn == USE_LATEST_SCN) {
result = FilePositionResult.createFoundResult(pos);
} else if (expScn == USE_EARLIEST_SCN) {
result = FilePositionResult.createFoundResult(pos);
} else {
// Normal SCN
if (pos.getMaxScn() == expScn)
result = FilePositionResult.createFoundResult(pos);
else
result = FilePositionResult.createExactScnNotFoundResult(pos);
}
return result;
}
use of com.linkedin.databus2.core.DatabusException in project databus by linkedin.
the class BufferInfoRequestProcessor method processInboundRequest.
private void processInboundRequest(DatabusRequest request, PhysicalPartition pPart) throws IOException, DatabusException {
DbusEventBuffer evb = _eventBufferMult.getOneBuffer(pPart);
if (null == evb) {
LOG.error("BufferInfoRequest : Buffer not available for physical partition :" + pPart);
throw new BufferNotFoundException("Buffer not available for partition :" + pPart);
}
BufferInfoResponse response = new BufferInfoResponse();
response.setMinScn(evb.getMinScn());
response.setMaxScn(evb.lastWrittenScn());
response.setTimestampFirstEvent(evb.getTimestampOfFirstEvent());
response.setTimestampLatestEvent(evb.getTimestampOfLatestDataEvent());
writeJsonObjectToResponse(response, request);
}
use of com.linkedin.databus2.core.DatabusException in project databus by linkedin.
the class BufferInfoRequestProcessor method process.
@Override
public DatabusRequest process(DatabusRequest request) throws IOException, RequestProcessingException, DatabusException {
String category = request.getParams().getProperty(DatabusRequest.PATH_PARAM_NAME);
if (null == category) {
throw new InvalidRequestParamValueException(COMMAND_NAME, "category", "null");
}
if (category.startsWith(INBOUND_VIEW)) {
String sourceIdStr = category.substring(INBOUND_VIEW.length());
sourceIdStr = sourceIdStr.replace('/', ':');
PhysicalPartition pPartition = PhysicalPartition.parsePhysicalPartitionString(sourceIdStr, ":");
processInboundRequest(request, pPartition);
} else {
throw new InvalidRequestParamValueException(COMMAND_NAME, "category", category);
}
return request;
}
use of com.linkedin.databus2.core.DatabusException in project databus by linkedin.
the class TypeInfoFactoryInteractive method buildTableType.
public TableTypeInfo buildTableType(Connection con, String tableOwner, String tableName, String pk, ConsoleReader reader, HashMap<String, String> dbFieldToAvroDataType) throws SQLException, IOException, DatabusException {
PreparedStatement stmt = null;
ResultSet rs = null;
try {
String fullTableName = tableOwner + "." + tableName;
stmt = con.prepareStatement("SELECT * FROM " + fullTableName + " WHERE 0=1");
rs = stmt.executeQuery();
ResultSetMetaData rsmd = rs.getMetaData();
List<FieldInfo> fields = new ArrayList<FieldInfo>();
int numColumns = rsmd.getColumnCount();
for (int column = 1; column <= numColumns; column++) {
String columnName = rsmd.getColumnName(column);
System.out.println("Processing column " + tableName + "." + columnName + ":" + rsmd.getColumnTypeName(column));
int columnPrecision = rsmd.getPrecision(column);
int columnScale = rsmd.getScale(column);
String columnTypeName;
String columnTypeOwner;
String[] columnTypeParts = rsmd.getColumnTypeName(column).split("\\.");
if (columnTypeParts.length == 1) {
columnTypeOwner = null;
columnTypeName = columnTypeParts[0];
} else {
columnTypeOwner = columnTypeParts[0];
columnTypeName = columnTypeParts[1];
}
if (columnTypeName.equals("NUMBER")) {
System.out.println("If you are not sure about the following question, please talk with your DBA or the database owner");
System.out.println("The following datatypes will be used by the avro generator: ");
System.out.println("If scale <= 6 ===> FLOAT (Irrespective of the precision)");
System.out.println("If scale <= 17 ===> DOUBLE (Irrespective of the precision)");
System.out.println("If (precision > 9 or precision = 0) and scale = 0 ===> LONG ");
System.out.println("If precision <= 9 and scale = 0 ===> INTEGER");
SimpleTypeInfo typeInfoValidate = new SimpleTypeInfo(columnTypeName, columnPrecision, columnScale);
if (columnPrecision == 0 && columnScale == 0)
System.out.println("Unable to determine the scale and precision for this column, please manually verify the the scale/precision in the oracle table ALL_TAB_COLUMNS");
System.out.println("The precision [" + columnPrecision + "] and scale [" + columnScale + "] will be used for the field " + columnName + " which has oracle datatype " + columnTypeName + " and the avro datatype " + typeInfoValidate.getPrimitiveType() + " will be used. (yes - to use the printed values, no - to override the datatype with user input): ");
//If the hashmap is present, this indicates that it's cli driven, we don't ask user input, we except it to passed through cli.
if (dbFieldToAvroDataType == null) {
String line = checkAndRead(reader);
while (true) {
if (line.equals("yes")) {
System.out.println("Using the precision [" + columnPrecision + "] and scale [" + columnScale + "]");
break;
} else if (line.equals("no")) {
System.out.println("Overriding the avro datatype..");
System.out.println("Please enter the avro datatype you would like to use [FLOAT,DOUBLE,LONG,INTEGER]: ");
String datatype = checkAndRead(reader);
try {
ScalePrecision scalePrecision = getScaleAndPrecision(datatype);
columnPrecision = scalePrecision.getPrecision();
columnScale = scalePrecision.getScale();
} catch (DatabusException e) {
//Invalid input, retry.
continue;
}
typeInfoValidate = new SimpleTypeInfo(columnTypeName, columnPrecision, columnScale);
System.out.println("Based on your input, the avro datatype " + typeInfoValidate.getPrimitiveType() + " will be used for the field " + columnName);
break;
} else {
System.out.println("Invalid input, say 'yes' or 'no'");
line = checkAndRead(reader);
}
}
} else {
if (dbFieldToAvroDataType.containsKey(columnName.trim())) {
String avroDataType = dbFieldToAvroDataType.get(columnName.trim());
ScalePrecision scalePrecision = getScaleAndPrecision(dbFieldToAvroDataType.get(columnName.trim()));
System.out.println("Using avro data type [" + avroDataType + "] for the column [" + columnName + "]");
columnPrecision = scalePrecision.getPrecision();
columnScale = scalePrecision.getScale();
} else {
System.out.println("The override for the column [" + columnName + "] is not present, this is expected from the user input in cli");
throw new DatabusException("Number override not present");
}
}
}
TypeInfo typeInfo = getTypeInfo(con, columnTypeOwner, columnTypeName, columnPrecision, columnScale, "", reader, dbFieldToAvroDataType);
FieldInfo field = new FieldInfo(columnName, typeInfo, column - 1);
fields.add(field);
}
return new TableTypeInfo(tableOwner, tableName, fields, pk);
} catch (IOException e) {
System.out.println("Unable to process user input, please try again.");
e.printStackTrace();
throw e;
} finally {
SchemaUtils.close(rs);
SchemaUtils.close(stmt);
}
}
use of com.linkedin.databus2.core.DatabusException in project databus by linkedin.
the class ColumnsState method constructPkeys.
/**
* Constructs the primary key pair and stores it in the current state
* @param eventFields Map containing dbFieldName => fieldValue
* @param pkFieldName The name of primaryKey field (comma seperated list). e.g., if the schema has pk=id, member_id. Then, pkFieldName is id, member_id.
* This is used only for logging purpose
* @param pk The primary key object stored as an class object
* @param field The current field being processed(Avro)
* @param databaseFieldName Field being processed(oracle name)
* @param fieldValueObj Value of the current field being processed
* @throws DatabusException
*/
private void constructPkeys(HashMap<String, ColumnState.EventField> eventFields, String pkFieldName, PrimaryKey pk, Schema.Field field, String databaseFieldName, Object fieldValueObj) throws DatabusException {
if (eventFields.get(databaseFieldName).isKey()) {
if (!pk.isPartOfPrimaryKey(field))
throw new DatabusException("The primary key is not as expected. Expected: " + pkFieldName + " found from xml: " + field.name());
if (fieldValueObj == null)
throw new DatabusException("Unable to find the value of the object");
Schema.Type pkFieldType = SchemaHelper.unwindUnionSchema(field).getType();
KeyPair pair = new KeyPair(fieldValueObj, pkFieldType);
_keyPairs.add(pair);
}
}
Aggregations