Search in sources :

Example 41 with PluginDefineException

use of com.dtstack.taier.pluginapi.exception.PluginDefineException in project Taier by DTStack.

the class PoolHttpClient method getRequest.

private static String getRequest(String url, Header[] headers) throws IOException {
    String respBody = null;
    HttpGet httpGet = null;
    CloseableHttpResponse response = null;
    int statusCode = 0;
    try {
        httpGet = new HttpGet(url);
        if (headers != null && headers.length > 0) {
            httpGet.setHeaders(headers);
        }
        response = httpClient.execute(httpGet);
        statusCode = response.getStatusLine().getStatusCode();
        if (statusCode == HttpStatus.SC_OK) {
            HttpEntity entity = response.getEntity();
            respBody = EntityUtils.toString(entity, charset);
        } else if (statusCode == HttpStatus.SC_UNAUTHORIZED) {
            throw new PluginDefineException("登陆状态失效" + statusCode);
        } else {
            LOGGER.warn("request url:{} fail:{}", url, response.getStatusLine().getStatusCode());
            if (response.getStatusLine().getStatusCode() == HttpStatus.SC_NOT_FOUND) {
                throw new PluginDefineException("status code " + HttpStatus.SC_NOT_FOUND);
            } else if (response.getStatusLine().getStatusCode() == HttpStatus.SC_INTERNAL_SERVER_ERROR) {
                throw new PluginDefineException("status code " + HttpStatus.SC_INTERNAL_SERVER_ERROR);
            }
        }
    } catch (IOException e) {
        LOGGER.error("url:{}--->http request error:", url, e);
        throw e;
    } finally {
        if (HttpStatus.SC_OK != statusCode && null != httpGet) {
            httpGet.abort();
        }
        if (response != null) {
            try {
                response.close();
            } catch (IOException e) {
                LOGGER.error("", e);
            }
        }
    }
    return respBody;
}
Also used : HttpEntity(org.apache.http.HttpEntity) PluginDefineException(com.dtstack.taier.pluginapi.exception.PluginDefineException) HttpGet(org.apache.http.client.methods.HttpGet) CloseableHttpResponse(org.apache.http.client.methods.CloseableHttpResponse) IOException(java.io.IOException)

Example 42 with PluginDefineException

use of com.dtstack.taier.pluginapi.exception.PluginDefineException in project Taier by DTStack.

the class PerJobClientFactory method deleteTaskIfExist.

/**
 * kill application which name and queue are the same as this jobClient
 * when run in stream-computing mode
 */
public void deleteTaskIfExist(JobClient jobClient) {
    if (ComputeType.BATCH.equals(jobClient.getComputeType())) {
        return;
    }
    try {
        String taskName = jobClient.getJobName();
        String queueName = flinkConfig.getQueue();
        YarnClient yarnClient = flinkClientBuilder.getYarnClient();
        EnumSet<YarnApplicationState> enumSet = EnumSet.noneOf(YarnApplicationState.class);
        enumSet.add(YarnApplicationState.ACCEPTED);
        enumSet.add(YarnApplicationState.RUNNING);
        List<ApplicationReport> existApps = yarnClient.getApplications(enumSet).stream().filter(report -> report.getQueue().endsWith(queueName)).filter(report -> report.getName().equals(taskName)).collect(Collectors.toList());
        for (ApplicationReport report : existApps) {
            ApplicationId appId = report.getApplicationId();
            LOG.info("try to kill application " + appId.toString() + " which name is " + report.getName());
            yarnClient.killApplication(appId);
        }
    } catch (Exception e) {
        LOG.error("Delete task error ", e);
        throw new PluginDefineException("Delete task error");
    }
}
Also used : ApplicationReport(org.apache.hadoop.yarn.api.records.ApplicationReport) SecurityOptions(org.apache.flink.configuration.SecurityOptions) java.util(java.util) ConfigConstrant(com.dtstack.taier.flink.constrant.ConfigConstrant) FlinkClientBuilder(com.dtstack.taier.flink.FlinkClientBuilder) YarnClient(org.apache.hadoop.yarn.client.api.YarnClient) URL(java.net.URL) ClassLoaderType(com.dtstack.taier.base.enums.ClassLoaderType) LoggerFactory(org.slf4j.LoggerFactory) YarnClusterDescriptor(org.apache.flink.yarn.YarnClusterDescriptor) JarFileInfo(com.dtstack.taier.pluginapi.JarFileInfo) ClusterMode(com.dtstack.taier.flink.base.enums.ClusterMode) StringUtils(org.apache.commons.lang3.StringUtils) ParamAction(com.dtstack.taier.pluginapi.pojo.ParamAction) KerberosUtils(com.dtstack.taier.base.util.KerberosUtils) YarnConfiguration(org.apache.hadoop.yarn.conf.YarnConfiguration) CollectionUtils(org.apache.commons.collections.CollectionUtils) ConverterUtils(org.apache.hadoop.yarn.util.ConverterUtils) FlinkConfig(com.dtstack.taier.flink.FlinkConfig) ApplicationReport(org.apache.hadoop.yarn.api.records.ApplicationReport) EJobType(com.dtstack.taier.pluginapi.enums.EJobType) HighAvailabilityMode(org.apache.flink.runtime.jobmanager.HighAvailabilityMode) FlinkUtil(com.dtstack.taier.flink.util.FlinkUtil) ComputeType(com.dtstack.taier.pluginapi.enums.ComputeType) JobIdentifier(com.dtstack.taier.pluginapi.JobIdentifier) RemovalNotification(com.google.common.cache.RemovalNotification) Logger(org.slf4j.Logger) Configuration(org.apache.flink.configuration.Configuration) FileUtils(org.apache.commons.io.FileUtils) IOException(java.io.IOException) Collectors(java.util.stream.Collectors) File(java.io.File) YarnConfigOptions(org.apache.flink.yarn.configuration.YarnConfigOptions) ExecutionException(java.util.concurrent.ExecutionException) TimeUnit(java.util.concurrent.TimeUnit) JobClient(com.dtstack.taier.pluginapi.JobClient) FileSystem(org.apache.flink.core.fs.FileSystem) ApplicationId(org.apache.hadoop.yarn.api.records.ApplicationId) CoreOptions(org.apache.flink.configuration.CoreOptions) FileUtil(com.dtstack.taier.flink.util.FileUtil) ClusterClient(org.apache.flink.client.program.ClusterClient) ConfigConstant(com.dtstack.taier.pluginapi.constrant.ConfigConstant) YarnApplicationState(org.apache.hadoop.yarn.api.records.YarnApplicationState) RemovalListener(com.google.common.cache.RemovalListener) RandomStringUtils(org.apache.commons.lang3.RandomStringUtils) CacheBuilder(com.google.common.cache.CacheBuilder) Cache(com.google.common.cache.Cache) PluginDefineException(com.dtstack.taier.pluginapi.exception.PluginDefineException) HighAvailabilityOptions(org.apache.flink.configuration.HighAvailabilityOptions) PluginDefineException(com.dtstack.taier.pluginapi.exception.PluginDefineException) YarnApplicationState(org.apache.hadoop.yarn.api.records.YarnApplicationState) ApplicationId(org.apache.hadoop.yarn.api.records.ApplicationId) YarnClient(org.apache.hadoop.yarn.client.api.YarnClient) IOException(java.io.IOException) ExecutionException(java.util.concurrent.ExecutionException) PluginDefineException(com.dtstack.taier.pluginapi.exception.PluginDefineException)

Example 43 with PluginDefineException

use of com.dtstack.taier.pluginapi.exception.PluginDefineException in project Taier by DTStack.

the class PerJobClientFactory method getClusterClient.

@Override
public ClusterClient getClusterClient(JobIdentifier jobIdentifier) {
    String applicationId = jobIdentifier.getApplicationId();
    String jobId = jobIdentifier.getJobId();
    ClusterClient clusterClient = null;
    try {
        clusterClient = KerberosUtils.login(flinkConfig, () -> {
            try {
                return perJobClientCache.get(applicationId, () -> {
                    ParamAction action = new ParamAction();
                    action.setJobId(jobId);
                    action.setName("jobId-" + jobId);
                    action.setTaskType(EJobType.SQL.getType());
                    action.setComputeType(ComputeType.STREAM.getType());
                    action.setTenantId(-1L);
                    String taskParams = "flinkTaskRunMode=per_job";
                    action.setTaskParams(taskParams);
                    JobClient jobClient = new JobClient(action);
                    try (YarnClusterDescriptor perJobYarnClusterDescriptor = this.createPerJobClusterDescriptor(jobClient)) {
                        return perJobYarnClusterDescriptor.retrieve(ConverterUtils.toApplicationId(applicationId)).getClusterClient();
                    }
                });
            } catch (ExecutionException e) {
                throw new PluginDefineException(e);
            }
        }, flinkClientBuilder.getYarnConf());
    } catch (Exception e) {
        LOG.error("job[{}] get perJobClient exception:{}", jobId, e);
        throw new PluginDefineException(e);
    }
    return clusterClient;
}
Also used : ParamAction(com.dtstack.taier.pluginapi.pojo.ParamAction) ClusterClient(org.apache.flink.client.program.ClusterClient) PluginDefineException(com.dtstack.taier.pluginapi.exception.PluginDefineException) YarnClusterDescriptor(org.apache.flink.yarn.YarnClusterDescriptor) ExecutionException(java.util.concurrent.ExecutionException) JobClient(com.dtstack.taier.pluginapi.JobClient) IOException(java.io.IOException) ExecutionException(java.util.concurrent.ExecutionException) PluginDefineException(com.dtstack.taier.pluginapi.exception.PluginDefineException)

Example 44 with PluginDefineException

use of com.dtstack.taier.pluginapi.exception.PluginDefineException in project Taier by DTStack.

the class SessionClientFactory method initZkClient.

private void initZkClient() {
    String zkAddress = flinkConfiguration.getValue(HighAvailabilityOptions.HA_ZOOKEEPER_QUORUM);
    if (StringUtils.isBlank(zkAddress)) {
        throw new PluginDefineException("zkAddress is error");
    }
    this.zkClient = CuratorFrameworkFactory.builder().connectString(zkAddress).retryPolicy(new ExponentialBackoffRetry(1000, 3)).connectionTimeoutMs(flinkConfig.getZkConnectionTimeout()).sessionTimeoutMs(flinkConfig.getZkSessionTimeout()).build();
    this.zkClient.start();
    try {
        if (null == this.leaderLatch) {
            this.leaderLatch = getLeaderLatch();
            this.leaderLatch.addListener(new LeaderLatchListener() {

                @Override
                public void isLeader() {
                    isLeader.set(true);
                    LOG.info(">>>My monitor role is Leader.");
                }

                @Override
                public void notLeader() {
                    isLeader.set(false);
                    LOG.info(">>>My monitor role is Follower.");
                }
            });
            this.leaderLatch.start();
            // 这里需要sleep一下,避免leader还未选举完就走到下一步 默认5S
            Thread.sleep(flinkConfig.getMonitorElectionWaitTime());
        }
    } catch (Exception e) {
        LOG.error("join leader election failed.", e);
    }
    LOG.warn("connector zk success...");
}
Also used : PluginDefineException(com.dtstack.taier.pluginapi.exception.PluginDefineException) ExponentialBackoffRetry(org.apache.flink.shaded.curator.org.apache.curator.retry.ExponentialBackoffRetry) LeaderLatchListener(org.apache.flink.shaded.curator.org.apache.curator.framework.recipes.leader.LeaderLatchListener) ProgramMissingJobException(org.apache.flink.client.program.ProgramMissingJobException) FlinkException(org.apache.flink.util.FlinkException) MalformedURLException(java.net.MalformedURLException) IOException(java.io.IOException) PluginDefineException(com.dtstack.taier.pluginapi.exception.PluginDefineException)

Example 45 with PluginDefineException

use of com.dtstack.taier.pluginapi.exception.PluginDefineException in project Taier by DTStack.

the class SessionClientFactory method getKeytabFilesAndSetSecurityConfig.

private List<File> getKeytabFilesAndSetSecurityConfig(Configuration config) {
    Map<String, File> keytabs = new HashMap<>();
    String remoteDir = flinkConfig.getRemoteDir();
    // 任务提交keytab
    String clusterKeytabDirPath = ConfigConstant.LOCAL_KEYTAB_DIR_PARENT + remoteDir;
    File clusterKeytabDir = new File(clusterKeytabDirPath);
    File[] clusterKeytabFiles = clusterKeytabDir.listFiles();
    if (clusterKeytabFiles == null || clusterKeytabFiles.length == 0) {
        throw new PluginDefineException("not find keytab file from " + clusterKeytabDirPath);
    }
    for (File file : clusterKeytabFiles) {
        String fileName = file.getName();
        String keytabPath = file.getAbsolutePath();
        String keytabFileName = flinkConfig.getPrincipalFile();
        if (StringUtils.equals(fileName, keytabFileName)) {
            String principal = flinkConfig.getPrincipal();
            if (StringUtils.isEmpty(principal)) {
                principal = KerberosUtils.getPrincipal(keytabPath);
            }
            config.setString(SecurityOptions.KERBEROS_LOGIN_KEYTAB, keytabPath);
            config.setString(SecurityOptions.KERBEROS_LOGIN_PRINCIPAL, principal);
            continue;
        }
        keytabs.put(file.getName(), file);
    }
    return keytabs.entrySet().stream().map(entry -> entry.getValue()).collect(Collectors.toList());
}
Also used : SessionCheckInterval(com.dtstack.taier.flink.entity.SessionCheckInterval) SecurityOptions(org.apache.flink.configuration.SecurityOptions) Arrays(java.util.Arrays) FlinkConfUtil(com.dtstack.taier.flink.util.FlinkConfUtil) ClientUtils(org.apache.flink.client.ClientUtils) FileSystem(org.apache.hadoop.fs.FileSystem) URL(java.net.URL) JobGraph(org.apache.flink.runtime.jobgraph.JobGraph) LoggerFactory(org.slf4j.LoggerFactory) ErrorMessageConsts(com.dtstack.taier.flink.constrant.ErrorMessageConsts) YarnClusterDescriptor(org.apache.flink.yarn.YarnClusterDescriptor) StringUtils(org.apache.commons.lang3.StringUtils) CuratorFramework(org.apache.flink.shaded.curator.org.apache.curator.framework.CuratorFramework) KerberosUtils(com.dtstack.taier.base.util.KerberosUtils) LeaderLatchListener(org.apache.flink.shaded.curator.org.apache.curator.framework.recipes.leader.LeaderLatchListener) ExponentialBackoffRetry(org.apache.flink.shaded.curator.org.apache.curator.retry.ExponentialBackoffRetry) ProgramMissingJobException(org.apache.flink.client.program.ProgramMissingJobException) AtomicInteger(java.util.concurrent.atomic.AtomicInteger) FlinkConfig(com.dtstack.taier.flink.FlinkConfig) Map(java.util.Map) ApplicationReport(org.apache.hadoop.yarn.api.records.ApplicationReport) Path(org.apache.hadoop.fs.Path) CuratorFrameworkFactory(org.apache.flink.shaded.curator.org.apache.curator.framework.CuratorFrameworkFactory) EnumSet(java.util.EnumSet) TaskStatus(com.dtstack.taier.pluginapi.enums.TaskStatus) JobIdentifier(com.dtstack.taier.pluginapi.JobIdentifier) LeaderLatch(org.apache.flink.shaded.curator.org.apache.curator.framework.recipes.leader.LeaderLatch) Set(java.util.Set) JobManagerOptions(org.apache.flink.configuration.JobManagerOptions) LinkedBlockingQueue(java.util.concurrent.LinkedBlockingQueue) Collectors(java.util.stream.Collectors) CheckpointingOptions(org.apache.flink.configuration.CheckpointingOptions) YarnConfigOptions(org.apache.flink.yarn.configuration.YarnConfigOptions) JobExecutionResult(org.apache.flink.api.common.JobExecutionResult) List(java.util.List) ApplicationId(org.apache.hadoop.yarn.api.records.ApplicationId) FileUtil(com.dtstack.taier.flink.util.FileUtil) ClusterClient(org.apache.flink.client.program.ClusterClient) ConfigConstant(com.dtstack.taier.pluginapi.constrant.ConfigConstant) PoolHttpClient(com.dtstack.taier.pluginapi.http.PoolHttpClient) YarnApplicationState(org.apache.hadoop.yarn.api.records.YarnApplicationState) JSONObject(com.alibaba.fastjson.JSONObject) PackagedProgram(org.apache.flink.client.program.PackagedProgram) ClusterClientProvider(org.apache.flink.client.program.ClusterClientProvider) FlinkException(org.apache.flink.util.FlinkException) ConfigConstrant(com.dtstack.taier.flink.constrant.ConfigConstrant) FlinkClientBuilder(com.dtstack.taier.flink.FlinkClientBuilder) ThreadPoolExecutor(java.util.concurrent.ThreadPoolExecutor) YarnClient(org.apache.hadoop.yarn.client.api.YarnClient) ClusterSpecification(org.apache.flink.client.deployment.ClusterSpecification) AtomicBoolean(java.util.concurrent.atomic.AtomicBoolean) HashMap(java.util.HashMap) CustomThreadFactory(com.dtstack.taier.pluginapi.CustomThreadFactory) YarnConfiguration(org.apache.hadoop.yarn.conf.YarnConfiguration) HashSet(java.util.HashSet) Lists(com.google.common.collect.Lists) CollectionUtils(org.apache.commons.collections.CollectionUtils) Service(org.apache.hadoop.service.Service) SessionHealthCheckedInfo(com.dtstack.taier.flink.entity.SessionHealthCheckedInfo) ExecutorService(java.util.concurrent.ExecutorService) FlinkUtil(com.dtstack.taier.flink.util.FlinkUtil) PackagedProgramUtils(org.apache.flink.client.program.PackagedProgramUtils) Logger(org.slf4j.Logger) MalformedURLException(java.net.MalformedURLException) Configuration(org.apache.flink.configuration.Configuration) IOException(java.io.IOException) FilesystemManager(com.dtstack.taier.base.filesystem.FilesystemManager) File(java.io.File) TimeUnit(java.util.concurrent.TimeUnit) JSON(com.alibaba.fastjson.JSON) JobID(org.apache.flink.api.common.JobID) PluginDefineException(com.dtstack.taier.pluginapi.exception.PluginDefineException) SavepointRestoreSettings(org.apache.flink.runtime.jobgraph.SavepointRestoreSettings) HighAvailabilityOptions(org.apache.flink.configuration.HighAvailabilityOptions) HashMap(java.util.HashMap) PluginDefineException(com.dtstack.taier.pluginapi.exception.PluginDefineException) File(java.io.File)

Aggregations

PluginDefineException (com.dtstack.taier.pluginapi.exception.PluginDefineException)58 IOException (java.io.IOException)30 File (java.io.File)13 MalformedURLException (java.net.MalformedURLException)13 YarnClient (org.apache.hadoop.yarn.client.api.YarnClient)11 ApplicationId (org.apache.hadoop.yarn.api.records.ApplicationId)10 JarFileInfo (com.dtstack.taier.pluginapi.JarFileInfo)8 YarnConfiguration (org.apache.hadoop.yarn.conf.YarnConfiguration)8 YarnException (org.apache.hadoop.yarn.exceptions.YarnException)8 ApplicationReport (org.apache.hadoop.yarn.api.records.ApplicationReport)7 YarnApplicationState (org.apache.hadoop.yarn.api.records.YarnApplicationState)7 ClusterClient (org.apache.flink.client.program.ClusterClient)6 JSONObject (com.alibaba.fastjson.JSONObject)5 Configuration (org.apache.flink.configuration.Configuration)5 Path (org.apache.hadoop.fs.Path)5 KerberosUtils (com.dtstack.taier.base.util.KerberosUtils)4 FlinkConfig (com.dtstack.taier.flink.FlinkConfig)4 ConfigConstant (com.dtstack.taier.pluginapi.constrant.ConfigConstant)4 URL (java.net.URL)4 Matcher (java.util.regex.Matcher)4