use of com.qlangtech.tis.lang.TisException in project plugins by qlangtech.
the class FlinkTaskNodeController method getRCDeployment.
@Override
public IDeploymentDetail getRCDeployment(TargetResName collection) {
ExtendFlinkJobDeploymentDetails rcDeployment = null;
JobID launchJobID = getLaunchJobID(collection);
if (launchJobID == null) {
return null;
}
try {
try (RestClusterClient restClient = this.factory.getFlinkCluster()) {
CompletableFuture<JobStatus> jobStatus = restClient.getJobStatus(launchJobID);
JobStatus status = jobStatus.get(5, TimeUnit.SECONDS);
// }
if (status == null) {
return null;
}
CompletableFuture<JobDetailsInfo> jobDetails = restClient.getJobDetails(launchJobID);
JobDetailsInfo jobDetailsInfo = jobDetails.get(5, TimeUnit.SECONDS);
rcDeployment = new ExtendFlinkJobDeploymentDetails(factory.getClusterCfg(), jobDetailsInfo);
return rcDeployment;
}
} catch (TimeoutException e) {
FlinkCluster clusterCfg = this.factory.getClusterCfg();
throw new TisException("flinkClusterId:" + clusterCfg.getClusterId() + ",Address:" + clusterCfg.getJobManagerAddress().getURL() + "连接超时,请检查相应配置是否正确", e);
} catch (ExecutionException e) {
Throwable cause = e.getCause();
if (StringUtils.indexOf(cause.getMessage(), "NotFoundException") > -1) {
return null;
}
// }
throw new RuntimeException(e);
// if (cause instanceof ) {
//
// }
} catch (Exception e) {
throw new RuntimeException(e);
}
}
use of com.qlangtech.tis.lang.TisException in project plugins by qlangtech.
the class HudiDumpPostTask method launchSparkRddConvert.
private void launchSparkRddConvert(ITISFileSystem fs, IPath dumpDir, IPath fsSourcePropsPath) throws Exception {
// HashMap env = new HashMap();
Map<String, String> env = Config.getInstance().getAllKV();
String mdcCollection = MDC.get(TISCollectionUtils.KEY_COLLECTION);
final String taskId = MDC.get(IParamContext.KEY_TASK_ID);
env.put(IParamContext.KEY_TASK_ID, taskId);
if (StringUtils.isNotEmpty(mdcCollection)) {
env.put(TISCollectionUtils.KEY_COLLECTION, mdcCollection);
}
logger.info("environment props ===========================");
for (Map.Entry<String, String> entry : env.entrySet()) {
logger.info("key:{},value:{}", entry.getKey(), entry.getValue());
}
logger.info("=============================================");
SparkLauncher handle = new SparkLauncher(env);
File logFile = new File(TisAppLaunchPort.getAssebleTaskDir(), "full-" + taskId + ".log");
FileUtils.touch(logFile);
handle.redirectError(logFile);
// handle.redirectError(new File("error.log"));
// handle.redirectToLog(DataXHudiWriter.class.getName());
// String tabName = this.getFileName();
File hudiDependencyDir = HudiConfig.getHudiDependencyDir();
File sparkHome = HudiConfig.getSparkHome();
File resJar = FileUtils.listFiles(hudiDependencyDir, new String[] { "jar" }, false).stream().findFirst().orElseThrow(() -> new IllegalStateException("must have resJar hudiDependencyDir:" + hudiDependencyDir.getAbsolutePath()));
File addedJars = new File(hudiDependencyDir, "lib");
boolean[] hasAddJar = new boolean[1];
FileUtils.listFiles(addedJars, new String[] { "jar" }, false).forEach((jar) -> {
handle.addJar(String.valueOf(jar.toPath().normalize()));
hasAddJar[0] = true;
});
if (!hasAddJar[0]) {
throw new IllegalStateException("path must contain jars:" + addedJars.getAbsolutePath());
}
handle.setAppResource(String.valueOf(resJar.toPath().normalize()));
// ISparkConnGetter sparkConnGetter = writerPlugin.getSparkConnGetter();
handle.setMaster(sparkConnGetter.getSparkMaster());
handle.setSparkHome(String.valueOf(sparkHome.toPath().normalize()));
handle.setMainClass("com.alibaba.datax.plugin.writer.hudi.TISHoodieDeltaStreamer");
handle.addAppArgs("--table-type", this.hudiWriter.getHudiTableType().getValue(), "--source-class", "org.apache.hudi.utilities.sources.CsvDFSSource", "--source-ordering-field", hudiTab.sourceOrderingField, "--target-base-path", String.valueOf(HudiTableMeta.getHudiDataDir(fs, dumpDir)), "--target-table", this.hudiTab.getName() + "/" + hudiWriter.dataXName, "--props", String.valueOf(fsSourcePropsPath), "--schemaprovider-class", "org.apache.hudi.utilities.schema.FilebasedSchemaProvider", "--enable-sync");
if (hudiWriter.getHudiTableType() == HudiWriteTabType.MOR) {
handle.addAppArgs("--disable-compaction");
}
CountDownLatch countDownLatch = new CountDownLatch(1);
SparkAppHandle sparkAppHandle = handle.startApplication(new SparkAppHandle.Listener() {
@Override
public void stateChanged(SparkAppHandle sparkAppHandle) {
SparkAppHandle.State state = sparkAppHandle.getState();
if (state.isFinal()) {
// finalState[0] = state;
System.out.println("Info:" + state + ",appId:" + sparkAppHandle.getAppId());
countDownLatch.countDown();
}
}
@Override
public void infoChanged(SparkAppHandle sparkAppHandle) {
System.out.println("Info:" + sparkAppHandle.getState().toString());
}
});
countDownLatch.await();
if (sparkAppHandle.getState() != SparkAppHandle.State.FINISHED) {
throw new TisException("spark app:" + sparkAppHandle.getAppId() + " execute result not successful:" + sparkAppHandle.getState());
}
}
use of com.qlangtech.tis.lang.TisException in project tis by qlangtech.
the class CoreAction method doDeployIncrSyncChannal.
/**
* 部署实例
* https://nightlies.apache.org/flink/flink-docs-release-1.14/docs/deployment/resource-providers/standalone/kubernetes/
*
* @param context
* @throws Exception
*/
@Func(value = PermissionConstant.PERMISSION_INCR_PROCESS_MANAGE)
public void doDeployIncrSyncChannal(Context context) throws Exception {
// 先进行打包编译
StringBuffer logger = new StringBuffer("flin sync app:" + this.getCollectionName());
try {
TISK8sDelegate k8sClient = TISK8sDelegate.getK8SDelegate(this.getCollectionName());
k8sClient.checkUseable();
long start = System.currentTimeMillis();
this.doCompileAndPackage(context);
if (context.hasErrors()) {
return;
}
logger.append("\n compile and package consume:" + (System.currentTimeMillis() - start) + "ms ");
// 编译并且打包
IndexStreamCodeGenerator indexStreamCodeGenerator = getIndexStreamCodeGenerator(this);
// 将打包好的构建,发布到k8s集群中去
// https://github.com/kubernetes-client/java
start = System.currentTimeMillis();
// 通过k8s发布
k8sClient.deploy(null, indexStreamCodeGenerator.getIncrScriptTimestamp());
logger.append("\n deploy to flink cluster consume:" + (System.currentTimeMillis() - start) + "ms ");
IndexIncrStatus incrStatus = new IndexIncrStatus();
this.setBizResult(context, incrStatus);
} catch (Exception ex) {
logger.append("an error occur:" + ex.getMessage());
throw new TisException(ex.getMessage(), ex);
} finally {
log.info(logger.toString());
}
}
use of com.qlangtech.tis.lang.TisException in project plugins by qlangtech.
the class K8sExceptionUtils method convert.
public static TisException convert(String msg, ApiException e) {
V1Status v1Status = JSON.parseObject(e.getResponseBody(), V1Status.class);
String errMsg = msg;
if (v1Status != null) {
errMsg = (msg == null) ? v1Status.getMessage() : msg + ":" + v1Status.getMessage();
}
return new TisException(StringUtils.defaultIfEmpty(errMsg, e.getMessage()), e);
}
use of com.qlangtech.tis.lang.TisException in project plugins by qlangtech.
the class FlinkTaskNodeController method checkUseable.
@Override
public void checkUseable() {
FlinkCluster cluster = factory.getClusterCfg();
try {
try (RestClusterClient restClient = cluster.createFlinkRestClusterClient(Optional.of(1000l))) {
// restClient.getClusterId();
CompletableFuture<Collection<JobStatusMessage>> status = restClient.listJobs();
Collection<JobStatusMessage> jobStatus = status.get();
}
} catch (Exception e) {
throw new TisException("Please check link is valid:" + cluster.getJobManagerAddress().getURL(), e);
}
}
Aggregations