use of org.apache.flink.api.java.typeutils.ListTypeInfo in project flink by apache.
the class AbstractRowTimeUnboundedPrecedingOver method open.
@Override
public void open(Configuration parameters) throws Exception {
function = genAggsHandler.newInstance(getRuntimeContext().getUserCodeClassLoader());
function.open(new PerKeyStateDataViewStore(getRuntimeContext()));
output = new JoinedRowData();
sortedTimestamps = new LinkedList<Long>();
// initialize accumulator state
InternalTypeInfo<RowData> accTypeInfo = InternalTypeInfo.ofFields(accTypes);
ValueStateDescriptor<RowData> accStateDesc = new ValueStateDescriptor<RowData>("accState", accTypeInfo);
accState = getRuntimeContext().getState(accStateDesc);
// input element are all binary row as they are came from network
InternalTypeInfo<RowData> inputType = InternalTypeInfo.ofFields(inputFieldTypes);
ListTypeInfo<RowData> rowListTypeInfo = new ListTypeInfo<RowData>(inputType);
MapStateDescriptor<Long, List<RowData>> inputStateDesc = new MapStateDescriptor<Long, List<RowData>>("inputState", Types.LONG, rowListTypeInfo);
inputState = getRuntimeContext().getMapState(inputStateDesc);
initCleanupTimeState("RowTimeUnboundedOverCleanupTime");
// metrics
this.numLateRecordsDropped = getRuntimeContext().getMetricGroup().counter(LATE_ELEMENTS_DROPPED_METRIC_NAME);
}
use of org.apache.flink.api.java.typeutils.ListTypeInfo in project flink by apache.
the class RowTimeRowsBoundedPrecedingFunction method open.
@Override
public void open(Configuration parameters) throws Exception {
function = genAggsHandler.newInstance(getRuntimeContext().getUserCodeClassLoader());
function.open(new PerKeyStateDataViewStore(getRuntimeContext()));
output = new JoinedRowData();
ValueStateDescriptor<Long> lastTriggeringTsDescriptor = new ValueStateDescriptor<Long>("lastTriggeringTsState", Types.LONG);
lastTriggeringTsState = getRuntimeContext().getState(lastTriggeringTsDescriptor);
ValueStateDescriptor<Long> dataCountStateDescriptor = new ValueStateDescriptor<Long>("processedCountState", Types.LONG);
counterState = getRuntimeContext().getState(dataCountStateDescriptor);
InternalTypeInfo<RowData> accTypeInfo = InternalTypeInfo.ofFields(accTypes);
ValueStateDescriptor<RowData> accStateDesc = new ValueStateDescriptor<RowData>("accState", accTypeInfo);
accState = getRuntimeContext().getState(accStateDesc);
// input element are all binary row as they are came from network
InternalTypeInfo<RowData> inputType = InternalTypeInfo.ofFields(inputFieldTypes);
ListTypeInfo<RowData> rowListTypeInfo = new ListTypeInfo<RowData>(inputType);
MapStateDescriptor<Long, List<RowData>> inputStateDesc = new MapStateDescriptor<Long, List<RowData>>("inputState", Types.LONG, rowListTypeInfo);
inputState = getRuntimeContext().getMapState(inputStateDesc);
initCleanupTimeState("RowTimeBoundedRowsOverCleanupTime");
// metrics
this.numLateRecordsDropped = getRuntimeContext().getMetricGroup().counter(LATE_ELEMENTS_DROPPED_METRIC_NAME);
}
use of org.apache.flink.api.java.typeutils.ListTypeInfo in project flink by apache.
the class AbstractStreamArrowPythonOverWindowAggregateFunctionOperator method open.
@Override
public void open() throws Exception {
super.open();
InternalTimerService<VoidNamespace> internalTimerService = getInternalTimerService("python-over-window-timers", VoidNamespaceSerializer.INSTANCE, this);
timerService = new SimpleTimerService(internalTimerService);
InternalTypeInfo<RowData> inputTypeInfo = InternalTypeInfo.of(inputType);
ListTypeInfo<RowData> rowListTypeInfo = new ListTypeInfo<>(inputTypeInfo);
MapStateDescriptor<Long, List<RowData>> inputStateDesc = new MapStateDescriptor<>("inputState", Types.LONG, rowListTypeInfo);
ValueStateDescriptor<Long> lastTriggeringTsDescriptor = new ValueStateDescriptor<>("lastTriggeringTsState", Types.LONG);
lastTriggeringTsState = getRuntimeContext().getState(lastTriggeringTsDescriptor);
ValueStateDescriptor<Long> cleanupTsStateDescriptor = new ValueStateDescriptor<>("cleanupTsState", Types.LONG);
cleanupTsState = getRuntimeContext().getState(cleanupTsStateDescriptor);
inputState = getRuntimeContext().getMapState(inputStateDesc);
}
use of org.apache.flink.api.java.typeutils.ListTypeInfo in project flink by apache.
the class PythonBridgeUtils method getPickledBytesFromJavaObject.
public static Object getPickledBytesFromJavaObject(Object obj, TypeInformation<?> dataType) throws IOException {
Pickler pickler = new Pickler();
initialize();
if (obj == null) {
return new byte[0];
} else {
if (dataType instanceof SqlTimeTypeInfo) {
SqlTimeTypeInfo<?> sqlTimeTypeInfo = SqlTimeTypeInfo.getInfoFor(dataType.getTypeClass());
if (sqlTimeTypeInfo == DATE) {
return pickler.dumps(((Date) obj).toLocalDate().toEpochDay());
} else if (sqlTimeTypeInfo == TIME) {
return pickler.dumps(((Time) obj).toLocalTime().toNanoOfDay() / 1000);
}
} else if (dataType instanceof RowTypeInfo || dataType instanceof TupleTypeInfo) {
TypeInformation<?>[] fieldTypes = ((TupleTypeInfoBase<?>) dataType).getFieldTypes();
int arity = dataType instanceof RowTypeInfo ? ((Row) obj).getArity() : ((Tuple) obj).getArity();
List<Object> fieldBytes = new ArrayList<>(arity + 1);
if (dataType instanceof RowTypeInfo) {
fieldBytes.add(new byte[] { ((Row) obj).getKind().toByteValue() });
}
for (int i = 0; i < arity; i++) {
Object field = dataType instanceof RowTypeInfo ? ((Row) obj).getField(i) : ((Tuple) obj).getField(i);
fieldBytes.add(getPickledBytesFromJavaObject(field, fieldTypes[i]));
}
return fieldBytes;
} else if (dataType instanceof BasicArrayTypeInfo || dataType instanceof PrimitiveArrayTypeInfo) {
Object[] objects = (Object[]) obj;
List<Object> serializedElements = new ArrayList<>(objects.length);
TypeInformation<?> elementType = dataType instanceof BasicArrayTypeInfo ? ((BasicArrayTypeInfo<?, ?>) dataType).getComponentInfo() : ((PrimitiveArrayTypeInfo<?>) dataType).getComponentType();
for (Object object : objects) {
serializedElements.add(getPickledBytesFromJavaObject(object, elementType));
}
return pickler.dumps(serializedElements);
} else if (dataType instanceof MapTypeInfo) {
List<List<Object>> serializedMapKV = new ArrayList<>(2);
Map<Object, Object> mapObj = (Map) obj;
List<Object> keyBytesList = new ArrayList<>(mapObj.size());
List<Object> valueBytesList = new ArrayList<>(mapObj.size());
for (Map.Entry entry : mapObj.entrySet()) {
keyBytesList.add(getPickledBytesFromJavaObject(entry.getKey(), ((MapTypeInfo) dataType).getKeyTypeInfo()));
valueBytesList.add(getPickledBytesFromJavaObject(entry.getValue(), ((MapTypeInfo) dataType).getValueTypeInfo()));
}
serializedMapKV.add(keyBytesList);
serializedMapKV.add(valueBytesList);
return pickler.dumps(serializedMapKV);
} else if (dataType instanceof ListTypeInfo) {
List objects = (List) obj;
List<Object> serializedElements = new ArrayList<>(objects.size());
TypeInformation elementType = ((ListTypeInfo) dataType).getElementTypeInfo();
for (Object object : objects) {
serializedElements.add(getPickledBytesFromJavaObject(object, elementType));
}
return pickler.dumps(serializedElements);
}
if (dataType instanceof BasicTypeInfo && BasicTypeInfo.getInfoFor(dataType.getTypeClass()) == FLOAT_TYPE_INFO) {
// Serialization of float type with pickler loses precision.
return pickler.dumps(String.valueOf(obj));
} else if (dataType instanceof PickledByteArrayTypeInfo || dataType instanceof BasicTypeInfo) {
return pickler.dumps(obj);
} else {
// other typeinfos will use the corresponding serializer to serialize data.
TypeSerializer serializer = dataType.createSerializer(null);
ByteArrayOutputStreamWithPos baos = new ByteArrayOutputStreamWithPos();
DataOutputViewStreamWrapper baosWrapper = new DataOutputViewStreamWrapper(baos);
serializer.serialize(obj, baosWrapper);
return pickler.dumps(baos.toByteArray());
}
}
}
use of org.apache.flink.api.java.typeutils.ListTypeInfo in project flink by apache.
the class ProcTimeRowsBoundedPrecedingFunction method open.
@Override
public void open(Configuration parameters) throws Exception {
function = genAggsHandler.newInstance(getRuntimeContext().getUserCodeClassLoader());
function.open(new PerKeyStateDataViewStore(getRuntimeContext()));
output = new JoinedRowData();
// input element are all binary row as they are came from network
InternalTypeInfo<RowData> inputType = InternalTypeInfo.ofFields(inputFieldTypes);
// We keep the elements received in a Map state keyed
// by the ingestion time in the operator.
// we also keep counter of processed elements
// and timestamp of oldest element
ListTypeInfo<RowData> rowListTypeInfo = new ListTypeInfo<RowData>(inputType);
MapStateDescriptor<Long, List<RowData>> mapStateDescriptor = new MapStateDescriptor<Long, List<RowData>>("inputState", BasicTypeInfo.LONG_TYPE_INFO, rowListTypeInfo);
inputState = getRuntimeContext().getMapState(mapStateDescriptor);
InternalTypeInfo<RowData> accTypeInfo = InternalTypeInfo.ofFields(accTypes);
ValueStateDescriptor<RowData> stateDescriptor = new ValueStateDescriptor<RowData>("accState", accTypeInfo);
accState = getRuntimeContext().getState(stateDescriptor);
ValueStateDescriptor<Long> processedCountDescriptor = new ValueStateDescriptor<Long>("processedCountState", Types.LONG);
counterState = getRuntimeContext().getState(processedCountDescriptor);
ValueStateDescriptor<Long> smallestTimestampDescriptor = new ValueStateDescriptor<Long>("smallestTSState", Types.LONG);
smallestTsState = getRuntimeContext().getState(smallestTimestampDescriptor);
initCleanupTimeState("ProcTimeBoundedRowsOverCleanupTime");
}
Aggregations