use of org.apache.flink.annotation.VisibleForTesting in project flink by apache.
the class HiveCatalog method getHiveTable.
@VisibleForTesting
public Table getHiveTable(ObjectPath tablePath) throws TableNotExistException {
try {
Table table = client.getTable(tablePath.getDatabaseName(), tablePath.getObjectName());
boolean isHiveTable;
if (table.getParameters().containsKey(CatalogPropertiesUtil.IS_GENERIC)) {
isHiveTable = !Boolean.parseBoolean(table.getParameters().remove(CatalogPropertiesUtil.IS_GENERIC));
} else {
isHiveTable = !table.getParameters().containsKey(FLINK_PROPERTY_PREFIX + CONNECTOR.key()) && !table.getParameters().containsKey(FLINK_PROPERTY_PREFIX + CONNECTOR_TYPE);
}
// for hive table, we add the connector property
if (isHiveTable) {
table.getParameters().put(CONNECTOR.key(), IDENTIFIER);
}
return table;
} catch (NoSuchObjectException e) {
throw new TableNotExistException(getName(), tablePath);
} catch (TException e) {
throw new CatalogException(String.format("Failed to get table %s from Hive metastore", tablePath.getFullName()), e);
}
}
use of org.apache.flink.annotation.VisibleForTesting in project flink by apache.
the class StandaloneApplicationClusterEntryPoint method loadConfigurationFromClusterConfig.
@VisibleForTesting
static Configuration loadConfigurationFromClusterConfig(StandaloneApplicationClusterConfiguration clusterConfiguration) {
Configuration configuration = loadConfiguration(clusterConfiguration);
setStaticJobId(clusterConfiguration, configuration);
SavepointRestoreSettings.toConfiguration(clusterConfiguration.getSavepointRestoreSettings(), configuration);
return configuration;
}
use of org.apache.flink.annotation.VisibleForTesting in project flink by apache.
the class FlinkConfMountDecorator method getFlinkConfData.
@VisibleForTesting
String getFlinkConfData(Map<String, String> propertiesMap) throws IOException {
try (StringWriter sw = new StringWriter();
PrintWriter out = new PrintWriter(sw)) {
propertiesMap.forEach((k, v) -> {
out.print(k);
out.print(": ");
out.println(v);
});
return sw.toString();
}
}
use of org.apache.flink.annotation.VisibleForTesting in project flink by apache.
the class KafkaSource method createReader.
@VisibleForTesting
SourceReader<OUT, KafkaPartitionSplit> createReader(SourceReaderContext readerContext, Consumer<Collection<String>> splitFinishedHook) throws Exception {
FutureCompletingBlockingQueue<RecordsWithSplitIds<ConsumerRecord<byte[], byte[]>>> elementsQueue = new FutureCompletingBlockingQueue<>();
deserializationSchema.open(new DeserializationSchema.InitializationContext() {
@Override
public MetricGroup getMetricGroup() {
return readerContext.metricGroup().addGroup("deserializer");
}
@Override
public UserCodeClassLoader getUserCodeClassLoader() {
return readerContext.getUserCodeClassLoader();
}
});
final KafkaSourceReaderMetrics kafkaSourceReaderMetrics = new KafkaSourceReaderMetrics(readerContext.metricGroup());
Supplier<KafkaPartitionSplitReader> splitReaderSupplier = () -> new KafkaPartitionSplitReader(props, readerContext, kafkaSourceReaderMetrics);
KafkaRecordEmitter<OUT> recordEmitter = new KafkaRecordEmitter<>(deserializationSchema);
return new KafkaSourceReader<>(elementsQueue, new KafkaSourceFetcherManager(elementsQueue, splitReaderSupplier::get, splitFinishedHook), recordEmitter, toConfiguration(props), readerContext, kafkaSourceReaderMetrics);
}
use of org.apache.flink.annotation.VisibleForTesting in project flink by apache.
the class LocalRecoverableWriter method generateStagingTempFilePath.
@VisibleForTesting
public static File generateStagingTempFilePath(File targetFile) {
checkArgument(!targetFile.isDirectory(), "targetFile must not be a directory");
final File parent = targetFile.getParentFile();
final String name = targetFile.getName();
checkArgument(parent != null, "targetFile must not be the root directory");
while (true) {
File candidate = new File(parent, "." + name + ".inprogress." + UUID.randomUUID().toString());
if (!candidate.exists()) {
return candidate;
}
}
}
Aggregations