use of com.dtstack.taier.pluginapi.exception.PluginDefineException in project Taier by DTStack.
the class PackagedProgram method extractContainedLibraries.
/**
* Takes all JAR files that are contained in this program's JAR file and extracts them
* to the system's temp directory.
*
* @return The file names of the extracted temporary files.
* @throws PluginDefineException Thrown, if the extraction process failed.
*/
public static List<File> extractContainedLibraries(URL jarFile) throws PluginDefineException {
Random rnd = new Random();
JarFile jar = null;
try {
jar = new JarFile(new File(jarFile.toURI()));
final List<JarEntry> containedJarFileEntries = new ArrayList<JarEntry>();
Enumeration<JarEntry> entries = jar.entries();
while (entries.hasMoreElements()) {
JarEntry entry = entries.nextElement();
String name = entry.getName();
if (name.length() > 8 && name.startsWith("lib/") && name.endsWith(".jar")) {
containedJarFileEntries.add(entry);
}
}
if (containedJarFileEntries.isEmpty()) {
return Collections.emptyList();
} else {
// go over all contained jar files
final List<File> extractedTempLibraries = new ArrayList<File>(containedJarFileEntries.size());
final byte[] buffer = new byte[4096];
boolean incomplete = true;
try {
for (int i = 0; i < containedJarFileEntries.size(); i++) {
final JarEntry entry = containedJarFileEntries.get(i);
String name = entry.getName();
// '/' as in case of zip, jar
// java.util.zip.ZipEntry#isDirectory always looks only for '/' not for File.separator
name = name.replace('/', '_');
File tempFile;
try {
tempFile = File.createTempFile(rnd.nextInt(Integer.MAX_VALUE) + "_", name);
tempFile.deleteOnExit();
} catch (IOException e) {
throw new PluginDefineException("An I/O error occurred while creating temporary file to extract nested library '" + entry.getName() + "'.", e);
}
extractedTempLibraries.add(tempFile);
// copy the temp file contents to a temporary File
OutputStream out = null;
InputStream in = null;
try {
out = new FileOutputStream(tempFile);
in = new BufferedInputStream(jar.getInputStream(entry));
int numRead = 0;
while ((numRead = in.read(buffer)) != -1) {
out.write(buffer, 0, numRead);
}
} catch (IOException e) {
throw new PluginDefineException("An I/O error occurred while extracting nested library '" + entry.getName() + "' to temporary file '" + tempFile.getAbsolutePath() + "'.");
} finally {
if (out != null) {
out.close();
}
if (in != null) {
in.close();
}
}
}
incomplete = false;
} finally {
if (incomplete) {
deleteExtractedLibraries(extractedTempLibraries);
}
}
return extractedTempLibraries;
}
} catch (Throwable t) {
throw new PluginDefineException("Unknown I/O error while extracting contained jar files.", t);
} finally {
if (jar != null) {
try {
jar.close();
} catch (Throwable t) {
}
}
}
}
use of com.dtstack.taier.pluginapi.exception.PluginDefineException in project Taier by DTStack.
the class PackagedProgram method loadMainClass.
private static Class<?> loadMainClass(String className, ClassLoader cl) throws PluginDefineException {
ClassLoader contextCl = null;
try {
contextCl = Thread.currentThread().getContextClassLoader();
Thread.currentThread().setContextClassLoader(cl);
return Class.forName(className, false, cl);
} catch (ClassNotFoundException e) {
throw new PluginDefineException("The program's entry point class '" + className + "' was not found in the jar file.", e);
} catch (ExceptionInInitializerError e) {
throw new PluginDefineException("The program's entry point class '" + className + "' threw an error during initialization.", e);
} catch (LinkageError e) {
throw new PluginDefineException("The program's entry point class '" + className + "' could not be loaded due to a linkage failure.", e);
} catch (Throwable t) {
throw new PluginDefineException("The program's entry point class '" + className + "' caused an exception during initialization: " + t.getMessage(), t);
} finally {
if (contextCl != null) {
Thread.currentThread().setContextClassLoader(contextCl);
}
}
}
use of com.dtstack.taier.pluginapi.exception.PluginDefineException in project Taier by DTStack.
the class FlinkClient method getMessageFromJobArchive.
public String getMessageFromJobArchive(String jobId, String urlPath) throws Exception {
String archiveDir = flinkExtProp.getProperty(JobManagerOptions.ARCHIVE_DIR.key());
String jobArchivePath = archiveDir + ConfigConstrant.SP + jobId;
return KerberosUtils.login(flinkConfig, () -> {
try {
InputStream is = FileUtil.readStreamFromFile(jobArchivePath, hadoopConf.getConfiguration());
JsonParser jsonParser = new JsonParser();
try (InputStreamReader reader = new InputStreamReader(is, StandardCharsets.UTF_8)) {
JsonObject jobArchiveAll = (JsonObject) jsonParser.parse(reader);
Preconditions.checkNotNull(jobArchiveAll, "jobArchive is null");
JsonArray jsonArray = jobArchiveAll.getAsJsonArray("archive");
for (JsonElement ele : jsonArray) {
JsonObject obj = ele.getAsJsonObject();
if (StringUtils.equals(obj.get("path").getAsString(), urlPath)) {
String exception = obj.get("json").getAsString();
return exception;
}
}
}
throw new PluginDefineException(String.format("Not found Message from jobArchive, jobId[%s], urlPath[%s]", jobId, urlPath));
} catch (Exception e) {
throw new PluginDefineException(e);
}
}, hadoopConf.getConfiguration());
}
use of com.dtstack.taier.pluginapi.exception.PluginDefineException in project Taier by DTStack.
the class FlinkClient method runJobBySession.
/**
* Session模式运行任务
*/
private Pair<String, String> runJobBySession(JobGraph jobGraph) throws Exception {
try {
ClusterClient clusterClient = flinkClusterClientManager.getClusterClient(null);
JobExecutionResult jobExecutionResult = ClientUtils.submitJob(clusterClient, jobGraph, flinkConfig.getSubmitTimeout(), TimeUnit.MINUTES);
return Pair.create(jobExecutionResult.getJobID().toString(), null);
} catch (Exception e) {
flinkClusterClientManager.dealWithClientError();
throw new PluginDefineException(e);
}
}
use of com.dtstack.taier.pluginapi.exception.PluginDefineException in project Taier by DTStack.
the class FlinkClient method getJobStatus.
/**
* 直接调用rest api直接返回
* @param jobIdentifier
* @return
*/
@Override
public TaskStatus getJobStatus(JobIdentifier jobIdentifier) {
String jobId = jobIdentifier.getJobId();
String engineJobId = jobIdentifier.getEngineJobId();
String applicationId = jobIdentifier.getApplicationId();
if (StringUtils.isEmpty(engineJobId)) {
logger.warn("{} getJobStatus is NOTFOUND, because engineJobId is empty.", jobId);
return TaskStatus.NOTFOUND;
}
ClusterClient clusterClient = null;
try {
clusterClient = flinkClusterClientManager.getClusterClient(jobIdentifier);
} catch (Exception e) {
logger.error("taskId: {}, get clusterClient error:", jobId, e);
}
String jobUrlPath = String.format(ConfigConstrant.JOB_URL_FORMAT, engineJobId);
String response = null;
Exception urlException = null;
if (clusterClient != null) {
try {
String webInterfaceURL = clusterClient.getWebInterfaceURL();
String jobUrl = webInterfaceURL + jobUrlPath;
response = PoolHttpClient.get(jobUrl);
} catch (Exception e) {
urlException = e;
}
}
if (StringUtils.isEmpty(response)) {
try {
response = getMessageFromJobArchive(engineJobId, jobUrlPath);
} catch (Exception e) {
if (urlException != null) {
logger.error("taskId: {}, Get job status error from webInterface: ", jobId, urlException);
}
logger.error("taskId: {}, request job status error from jobArchive: ", jobId, e);
}
}
if (StringUtils.isEmpty(response)) {
if (StringUtils.isNotEmpty(applicationId)) {
TaskStatus taskStatus = getPerJobStatus(applicationId);
logger.info("taskId: {}, try getPerJobStatus with yarnClient, status: {}", jobId, taskStatus.name());
return taskStatus;
}
return TaskStatus.NOTFOUND;
}
try {
if (response == null) {
throw new PluginDefineException("Get status response is null");
}
Map<String, Object> statusMap = PublicUtil.jsonStrToObject(response, Map.class);
Object stateObj = statusMap.get("state");
if (stateObj == null) {
return TaskStatus.NOTFOUND;
}
String state = (String) stateObj;
state = StringUtils.upperCase(state);
return TaskStatus.getTaskStatus(state);
} catch (Exception e) {
logger.error("taskId: {}, getJobStatus error: ", jobId, e);
return TaskStatus.NOTFOUND;
}
}
Aggregations