use of com.dtstack.taier.pluginapi.exception.PluginDefineException in project Taier by DTStack.
the class ClientExt method creatDirIfPresent.
private String creatDirIfPresent() {
String confMd5Sum = sparkYarnConfig.getMd5sum();
String confFileDirName = String.format("%s/%s", tmpHadoopFilePath, confMd5Sum);
String remotePath = sparkYarnConfig.getConfHdfsPath();
File dirFile = new File(confFileDirName);
try {
Files.createParentDirs(dirFile);
} catch (IOException e) {
throw new PluginDefineException(String.format("can not create dir '%s' on engine", dirFile.getParent()));
}
if (dirFile.exists()) {
File[] files = dirFile.listFiles();
if (files != null && files.length > 0) {
return confFileDirName;
}
} else {
if (!dirFile.mkdir()) {
throw new PluginDefineException(String.format("can not create dir '%s' on engine", confFileDirName));
}
}
boolean downLoadSuccess = filesystemManager.downloadDir(remotePath, confFileDirName);
LOG.info("downloadDir remotePath:{} confFileDirName:{} status is: {} ", remotePath, confFileDirName, downLoadSuccess);
if (!downLoadSuccess) {
throw new PluginDefineException("yarn-site.xml/hdfs-site.xml/hive-site.xml configuration download failed");
}
return confFileDirName;
}
use of com.dtstack.taier.pluginapi.exception.PluginDefineException in project Taier by DTStack.
the class PerJobClientFactory method appendJobConfigAndInitFs.
private Configuration appendJobConfigAndInitFs(JobClient jobClient, Configuration configuration) {
Properties properties = jobClient.getConfProperties();
if (properties != null) {
for (Object key : properties.keySet()) {
String keyStr = key.toString();
if (!StringUtils.contains(keyStr, ".") && !StringUtils.equalsIgnoreCase(keyStr, ConfigConstrant.LOG_LEVEL_KEY)) {
continue;
}
String value = properties.getProperty(keyStr);
if (StringUtils.equalsIgnoreCase(keyStr, SecurityOptions.KERBEROS_LOGIN_CONTEXTS.key()) && StringUtils.isNotEmpty(value)) {
value = StringUtils.replacePattern(value, "\\s*", "");
String contexts = configuration.get(SecurityOptions.KERBEROS_LOGIN_CONTEXTS);
contexts = StringUtils.replacePattern(contexts, "\\s*", "");
contexts = StringUtils.isNotEmpty(contexts) ? String.format("%s,%s", value, contexts) : value;
List<String> contextsTmp = Arrays.asList(StringUtils.split(contexts, ","));
Set contextsSet = new HashSet(contextsTmp);
value = StringUtils.join(contextsSet, ",");
}
configuration.setString(keyStr, value);
}
}
String taskId = jobClient.getJobId();
configuration.setString(ConfigConstrant.KEY_PROMGATEWAY_JOBNAME, taskId);
ClusterMode clusterMode = ClusterMode.getClusteMode(flinkConfig.getClusterMode());
Boolean isPerjob = ClusterMode.isPerjob(clusterMode);
if (!flinkConfig.getFlinkHighAvailability() && !isPerjob) {
setNoneHaModeConfig(configuration);
} else {
configuration.setString(HighAvailabilityOptions.HA_MODE, HighAvailabilityMode.ZOOKEEPER.toString());
configuration.setString(HighAvailabilityOptions.HA_CLUSTER_ID, jobClient.getJobId());
}
configuration.setString(YarnConfigOptions.APPLICATION_NAME, jobClient.getJobName());
if (StringUtils.isNotBlank(flinkConfig.getPluginLoadMode()) && ConfigConstrant.FLINK_PLUGIN_SHIPFILE_LOAD.equalsIgnoreCase(flinkConfig.getPluginLoadMode())) {
configuration.setString(ConfigConstrant.FLINK_PLUGIN_LOAD_MODE, flinkConfig.getPluginLoadMode());
}
String classloaderCache = configuration.getString(ClassLoaderType.CLASSLOADER_DTSTACK_CACHE, ClassLoaderType.CLASSLOADER_DTSTACK_CACHE_TRUE);
configuration.setString(ClassLoaderType.CLASSLOADER_DTSTACK_CACHE, classloaderCache);
String append = configuration.getString(CoreOptions.ALWAYS_PARENT_FIRST_LOADER_PATTERNS_ADDITIONAL);
if (jobClient.getJobType() == EJobType.SQL || jobClient.getJobType() == EJobType.SYNC) {
String dtstackAppend = "com.fasterxml.jackson.";
if (StringUtils.isNotEmpty(append)) {
dtstackAppend = dtstackAppend + ";" + append;
}
configuration.setString(CoreOptions.ALWAYS_PARENT_FIRST_LOADER_PATTERNS_ADDITIONAL, dtstackAppend);
}
try {
FileSystem.initialize(configuration);
} catch (Exception e) {
LOG.error("", e);
throw new PluginDefineException(e);
}
return configuration;
}
use of com.dtstack.taier.pluginapi.exception.PluginDefineException in project Taier by DTStack.
the class SessionClientFactory method initYarnClusterClient.
public ClusterClient<ApplicationId> initYarnClusterClient() {
Configuration newConf = new Configuration(flinkConfiguration);
ApplicationId applicationId = acquireAppIdAndSetClusterId(newConf);
if (applicationId == null) {
throw new PluginDefineException("No flink session found on yarn cluster.");
}
if (!flinkConfig.getFlinkHighAvailability()) {
setNoneHaModeConfig(newConf);
}
YarnClusterDescriptor clusterDescriptor = getClusterDescriptor(newConf, flinkClientBuilder.getYarnConf());
ClusterClient<ApplicationId> clusterClient = null;
try {
ClusterClientProvider<ApplicationId> clusterClientProvider = clusterDescriptor.retrieve(applicationId);
clusterClient = clusterClientProvider.getClusterClient();
} catch (Exception e) {
LOG.info("No flink session, Couldn't retrieve Yarn cluster.", e);
throw new PluginDefineException("No flink session, Couldn't retrieve Yarn cluster.");
}
LOG.warn("---init flink client with yarn session success----");
return clusterClient;
}
use of com.dtstack.taier.pluginapi.exception.PluginDefineException in project Taier by DTStack.
the class StandaloneClientFactory method createClusterClient.
private ClusterClient createClusterClient() {
ClusterClientFactory<StandaloneClusterId> standaloneClientFactory = new org.apache.flink.client.deployment.StandaloneClientFactory();
final StandaloneClusterId clusterId = standaloneClientFactory.getClusterId(flinkConfiguration);
if (clusterId == null) {
throw new PluginDefineException("No cluster id was specified. Please specify a cluster to which you would like to connect.");
}
try (ClusterDescriptor<StandaloneClusterId> clusterDescriptor = standaloneClientFactory.createClusterDescriptor(flinkConfiguration)) {
ClusterClientProvider<StandaloneClusterId> clientProvider = clusterDescriptor.retrieve(clusterId);
return clientProvider.getClusterClient();
} catch (ClusterRetrieveException e) {
LOG.error("No standalone session, Couldn't retrieve cluster Client.", e);
throw new PluginDefineException("No standalone session, Couldn't retrieve cluster Client.", e);
}
}
use of com.dtstack.taier.pluginapi.exception.PluginDefineException in project Taier by DTStack.
the class PrepareOperator method getResourceFile.
public static File getResourceFile(String sql) {
Matcher matcher = resourceFilePattern.matcher(sql);
if (!matcher.find()) {
throw new PluginDefineException("Get Resource File Error: " + sql);
}
String filePath = matcher.group(1);
return new File(filePath);
}
Aggregations