use of com.dtstack.taier.flink.FlinkConfig in project Taier by DTStack.
the class AbstractClientFactoryTest method testSetHdfsFlinkJarPath.
@Test
public void testSetHdfsFlinkJarPath() {
FlinkConfig flinkConfig = new FlinkConfig();
flinkConfig.setRemotePluginRootDir("/data/insight_plugin/flinkplugin");
Configuration flinkConfiguration = abstractClientFactory.setHdfsFlinkJarPath(flinkConfig, new Configuration());
Assert.assertFalse(flinkConfiguration.containsKey("remoteFlinkJarPath"));
Assert.assertFalse(flinkConfiguration.containsKey("remotePluginRootDir"));
Assert.assertFalse(flinkConfiguration.containsKey("flinkJarPath"));
Assert.assertFalse(flinkConfiguration.containsKey("flinkPluginRoot"));
flinkConfig.setRemoteFlinkJarPath("hdfs://ns/110_flinkplugin/");
flinkConfiguration = abstractClientFactory.setHdfsFlinkJarPath(flinkConfig, new Configuration());
Assert.assertFalse(flinkConfiguration.containsKey("remoteFlinkJarPath"));
Assert.assertFalse(flinkConfiguration.containsKey("remotePluginRootDir"));
Assert.assertFalse(flinkConfiguration.containsKey("flinkJarPath"));
Assert.assertFalse(flinkConfiguration.containsKey("flinkPluginRoot"));
flinkConfig.setRemotePluginRootDir("hdfs://ns/data/insight_plugin/flinkplugin");
flinkConfig.setFlinkJarPath("/data/dtInsight/flink110/flink-1.10.1");
flinkConfig.setFlinkPluginRoot("/data/insight_plugin/flinkplugin");
flinkConfiguration = abstractClientFactory.setHdfsFlinkJarPath(flinkConfig, new Configuration());
Assert.assertTrue(flinkConfiguration.containsKey("remoteFlinkJarPath"));
Assert.assertTrue(flinkConfiguration.containsKey("remotePluginRootDir"));
Assert.assertTrue(flinkConfiguration.containsKey("flinkJarPath"));
Assert.assertTrue(flinkConfiguration.containsKey("flinkPluginRoot"));
}
use of com.dtstack.taier.flink.FlinkConfig in project Taier by DTStack.
the class AbstractClientFactory method createClientFactory.
public static IClientFactory createClientFactory(FlinkClientBuilder flinkClientBuilder) {
FlinkConfig flinkConfig = flinkClientBuilder.getFlinkConfig();
ClusterMode clusterMode = ClusterMode.getClusteMode(flinkConfig.getClusterMode());
IClientFactory clientFactory;
switch(clusterMode) {
case PER_JOB:
clientFactory = new PerJobClientFactory(flinkClientBuilder);
break;
case SESSION:
clientFactory = new SessionClientFactory(flinkClientBuilder);
break;
case STANDALONE:
clientFactory = new StandaloneClientFactory(flinkClientBuilder);
break;
default:
throw new PluginDefineException("not support clusterMode: " + clusterMode);
}
return clientFactory;
}
use of com.dtstack.taier.flink.FlinkConfig in project Taier by DTStack.
the class SyncPluginInfoTest method testCreateSyncPluginArgs.
@Test
public void testCreateSyncPluginArgs() {
JobClient jobClient = new JobClient();
jobClient.setClassArgs("-jobid flink_test_stream");
jobClient.setComputeType(ComputeType.STREAM);
Whitebox.setInternalState(jobClient, "confProperties", new Properties());
FlinkConfig flinkConfig = new FlinkConfig();
flinkConfig.setRemoteFlinkJarPath("/opt/dtstack/110_flinkplugin/");
flinkConfig.setFlinkPluginRoot("/opt/dtstack/110_flinkplugin/");
flinkConfig.setMonitorAddress("http://localhost:8088");
flinkConfig.setPluginLoadMode("shipfile");
SyncPluginInfo syncPluginInfo = SyncPluginInfo.create(flinkConfig);
List<String> args = syncPluginInfo.createSyncPluginArgs(jobClient, new FlinkClient());
String result = new Gson().toJson(args);
String expectStr = "[\"-jobid\",\"flink_test_stream\",\"-monitor\",\"http://localhost:8088\",\"-pluginLoadMode\",\"shipfile\",\"-mode\",\"yarnPer\"]";
Assert.assertEquals(expectStr, result);
}
use of com.dtstack.taier.flink.FlinkConfig in project Taier by DTStack.
the class SessionClientFactory method createYarnSessionClusterDescriptor.
public YarnClusterDescriptor createYarnSessionClusterDescriptor() throws MalformedURLException {
Configuration newConf = new Configuration(flinkConfiguration);
String flinkJarPath = flinkConfig.getFlinkJarPath();
String pluginLoadMode = flinkConfig.getPluginLoadMode();
YarnConfiguration yarnConf = flinkClientBuilder.getYarnConf();
FileUtil.checkFileExist(flinkJarPath);
if (!flinkConfig.getFlinkHighAvailability()) {
setNoneHaModeConfig(newConf);
} else {
// 由engine管控的yarnsession clusterId不进行设置,默认使用appId作为clusterId
newConf.removeConfig(HighAvailabilityOptions.HA_CLUSTER_ID);
}
List<File> keytabFiles = null;
if (flinkConfig.isOpenKerberos()) {
keytabFiles = getKeytabFilesAndSetSecurityConfig(newConf);
}
newConf = setHdfsFlinkJarPath(flinkConfig, newConf);
YarnClusterDescriptor clusterDescriptor = getClusterDescriptor(newConf, yarnConf);
if (StringUtils.isNotBlank(pluginLoadMode) && ConfigConstrant.FLINK_PLUGIN_SHIPFILE_LOAD.equalsIgnoreCase(pluginLoadMode)) {
newConf.setString(ConfigConstrant.FLINK_PLUGIN_LOAD_MODE, flinkConfig.getPluginLoadMode());
String flinkPluginRoot = flinkConfig.getFlinkPluginRoot();
if (StringUtils.isNotBlank(flinkPluginRoot)) {
String syncPluginDir = flinkPluginRoot + ConfigConstrant.SP + ConfigConstrant.SYNCPLUGIN_DIR;
File syncFile = new File(syncPluginDir);
if (!syncFile.exists()) {
throw new PluginDefineException("syncPlugin path is null");
}
List<File> pluginPaths = Arrays.stream(syncFile.listFiles()).filter(file -> !file.getName().endsWith("zip")).collect(Collectors.toList());
clusterDescriptor.addShipFiles(pluginPaths);
}
}
if (CollectionUtils.isNotEmpty(keytabFiles)) {
clusterDescriptor.addShipFiles(keytabFiles);
}
List<URL> classpaths = getFlinkJarFile(flinkJarPath, clusterDescriptor);
clusterDescriptor.setProvidedUserJarFiles(classpaths);
return clusterDescriptor;
}
Aggregations