use of org.apache.flink.fs.s3.common.writer.S3AccessHelper in project flink by apache.
the class AbstractS3FileSystemFactory method create.
@Override
public FileSystem create(URI fsUri) throws IOException {
Configuration flinkConfig = this.flinkConfig;
if (flinkConfig == null) {
LOG.warn("Creating S3 FileSystem without configuring the factory. All behavior will be default.");
flinkConfig = new Configuration();
}
LOG.debug("Creating S3 file system backed by {}", name);
LOG.debug("Loading Hadoop configuration for {}", name);
try {
// create the Hadoop FileSystem
org.apache.hadoop.conf.Configuration hadoopConfig = hadoopConfigLoader.getOrLoadHadoopConfig();
org.apache.hadoop.fs.FileSystem fs = createHadoopFileSystem();
fs.initialize(getInitURI(fsUri, hadoopConfig), hadoopConfig);
// load the entropy injection settings
String entropyInjectionKey = flinkConfig.getString(ENTROPY_INJECT_KEY_OPTION);
int numEntropyChars = -1;
if (entropyInjectionKey != null) {
if (entropyInjectionKey.matches(INVALID_ENTROPY_KEY_CHARS)) {
throw new IllegalConfigurationException("Invalid character in value for " + ENTROPY_INJECT_KEY_OPTION.key() + " : " + entropyInjectionKey);
}
numEntropyChars = flinkConfig.getInteger(ENTROPY_INJECT_LENGTH_OPTION);
if (numEntropyChars <= 0) {
throw new IllegalConfigurationException(ENTROPY_INJECT_LENGTH_OPTION.key() + " must configure a value > 0");
}
}
final String[] localTmpDirectories = ConfigurationUtils.parseTempDirectories(flinkConfig);
Preconditions.checkArgument(localTmpDirectories.length > 0);
final String localTmpDirectory = localTmpDirectories[0];
final long s3minPartSize = flinkConfig.getLong(PART_UPLOAD_MIN_SIZE);
final int maxConcurrentUploads = flinkConfig.getInteger(MAX_CONCURRENT_UPLOADS);
final S3AccessHelper s3AccessHelper = getS3AccessHelper(fs);
return new FlinkS3FileSystem(fs, localTmpDirectory, entropyInjectionKey, numEntropyChars, s3AccessHelper, s3minPartSize, maxConcurrentUploads);
} catch (IOException e) {
throw e;
} catch (Exception e) {
throw new IOException(e.getMessage(), e);
}
}
Aggregations