Search in sources :

Example 1 with UserGroupInformationService

use of org.apache.oozie.service.UserGroupInformationService in project oozie by apache.

the class LauncherHelper method getActionData.

/**
 * Utility function to load the contents of action data sequence file into
 * memory object
 *
 * @param fs Action Filesystem
 * @param actionDir Path
 * @param conf Configuration
 * @return Map action data
 * @throws IOException
 * @throws InterruptedException
 */
public static Map<String, String> getActionData(final FileSystem fs, final Path actionDir, final Configuration conf) throws IOException, InterruptedException {
    UserGroupInformationService ugiService = Services.get().get(UserGroupInformationService.class);
    UserGroupInformation ugi = ugiService.getProxyUser(conf.get(OozieClient.USER_NAME));
    return ugi.doAs(new PrivilegedExceptionAction<Map<String, String>>() {

        @Override
        public Map<String, String> run() throws IOException {
            Map<String, String> ret = new HashMap<String, String>();
            Path seqFilePath = getActionDataSequenceFilePath(actionDir);
            if (fs.exists(seqFilePath)) {
                SequenceFile.Reader seqFile = new SequenceFile.Reader(fs, seqFilePath, conf);
                Text key = new Text(), value = new Text();
                while (seqFile.next(key, value)) {
                    ret.put(key.toString(), value.toString());
                }
                seqFile.close();
            } else {
                // maintain backward-compatibility. to be deprecated
                org.apache.hadoop.fs.FileStatus[] files = fs.listStatus(actionDir);
                InputStream is;
                BufferedReader reader = null;
                Properties props;
                if (files != null && files.length > 0) {
                    for (int x = 0; x < files.length; x++) {
                        Path file = files[x].getPath();
                        if (file.equals(new Path(actionDir, "externalChildIds.properties"))) {
                            is = fs.open(file);
                            reader = new BufferedReader(new InputStreamReader(is));
                            ret.put(LauncherAMUtils.ACTION_DATA_EXTERNAL_CHILD_IDS, IOUtils.getReaderAsString(reader, -1));
                        } else if (file.equals(new Path(actionDir, "newId.properties"))) {
                            is = fs.open(file);
                            reader = new BufferedReader(new InputStreamReader(is));
                            props = PropertiesUtils.readProperties(reader, -1);
                            ret.put(LauncherAMUtils.ACTION_DATA_NEW_ID, props.getProperty("id"));
                        } else if (file.equals(new Path(actionDir, LauncherAMUtils.ACTION_DATA_OUTPUT_PROPS))) {
                            int maxOutputData = conf.getInt(LauncherAMUtils.CONF_OOZIE_ACTION_MAX_OUTPUT_DATA, 2 * 1024);
                            is = fs.open(file);
                            reader = new BufferedReader(new InputStreamReader(is));
                            ret.put(LauncherAMUtils.ACTION_DATA_OUTPUT_PROPS, PropertiesUtils.propertiesToString(PropertiesUtils.readProperties(reader, maxOutputData)));
                        } else if (file.equals(new Path(actionDir, LauncherAMUtils.ACTION_DATA_STATS))) {
                            int statsMaxOutputData = conf.getInt(LauncherAMUtils.CONF_OOZIE_EXTERNAL_STATS_MAX_SIZE, Integer.MAX_VALUE);
                            is = fs.open(file);
                            reader = new BufferedReader(new InputStreamReader(is));
                            ret.put(LauncherAMUtils.ACTION_DATA_STATS, PropertiesUtils.propertiesToString(PropertiesUtils.readProperties(reader, statsMaxOutputData)));
                        } else if (file.equals(new Path(actionDir, LauncherAMUtils.ACTION_DATA_ERROR_PROPS))) {
                            is = fs.open(file);
                            reader = new BufferedReader(new InputStreamReader(is));
                            ret.put(LauncherAMUtils.ACTION_DATA_ERROR_PROPS, IOUtils.getReaderAsString(reader, -1));
                        }
                    }
                }
            }
            return ret;
        }
    });
}
Also used : Path(org.apache.hadoop.fs.Path) InputStreamReader(java.io.InputStreamReader) InputStream(java.io.InputStream) InputStreamReader(java.io.InputStreamReader) BufferedReader(java.io.BufferedReader) Text(org.apache.hadoop.io.Text) IOException(java.io.IOException) Properties(java.util.Properties) UserGroupInformationService(org.apache.oozie.service.UserGroupInformationService) SequenceFile(org.apache.hadoop.io.SequenceFile) BufferedReader(java.io.BufferedReader) HashMap(java.util.HashMap) Map(java.util.Map) UserGroupInformation(org.apache.hadoop.security.UserGroupInformation)

Example 2 with UserGroupInformationService

use of org.apache.oozie.service.UserGroupInformationService in project oozie by apache.

the class TestJavaActionExecutor method testChildKill.

public void testChildKill() throws Exception {
    final JobConf clusterConf = createJobConf();
    FileSystem fileSystem = FileSystem.get(clusterConf);
    Path confFile = new Path("/tmp/cluster-conf.xml");
    OutputStream out = fileSystem.create(confFile);
    clusterConf.writeXml(out);
    out.close();
    String confFileName = fileSystem.makeQualified(confFile).toString() + "#core-site.xml";
    final String actionXml = "<java>" + "<job-tracker>" + getJobTrackerUri() + "</job-tracker>" + "<name-node>" + getNameNodeUri() + "</name-node>" + "<main-class> " + SleepJob.class.getName() + " </main-class>" + "<arg>-mt</arg>" + "<arg>300000</arg>" + "<archive>" + confFileName + "</archive>" + "</java>";
    final Context context = createContext(actionXml, null);
    final String runningJob = submitAction(context);
    YarnApplicationState state = waitUntilYarnAppState(runningJob, EnumSet.of(YarnApplicationState.RUNNING));
    assertEquals(YarnApplicationState.RUNNING, state);
    WorkflowJob wfJob = context.getWorkflow();
    Configuration conf = null;
    if (wfJob.getConf() != null) {
        conf = new XConfiguration(new StringReader(wfJob.getConf()));
    }
    String launcherTag = LauncherHelper.getActionYarnTag(conf, wfJob.getParentId(), context.getAction());
    JavaActionExecutor ae = new JavaActionExecutor();
    final Configuration jobConf = ae.createBaseHadoopConf(context, XmlUtils.parseXml(actionXml));
    jobConf.set(LauncherMain.CHILD_MAPREDUCE_JOB_TAGS, LauncherHelper.getTag(launcherTag));
    jobConf.setLong(LauncherMain.OOZIE_JOB_LAUNCH_TIME, context.getAction().getStartTime().getTime());
    // We have to use a proper UGI for retrieving the child apps, because the WF is
    // submitted as a test user, not as the current login user
    UserGroupInformationService ugiService = Services.get().get(UserGroupInformationService.class);
    final UserGroupInformation ugi = ugiService.getProxyUser(getTestUser());
    final Set<ApplicationId> childSet = new HashSet<>();
    // wait until we have a child MR job
    waitFor(60_000, new Predicate() {

        @Override
        public boolean evaluate() throws Exception {
            return ugi.doAs(new PrivilegedExceptionAction<Boolean>() {

                @Override
                public Boolean run() throws Exception {
                    childSet.clear();
                    childSet.addAll(LauncherMain.getChildYarnJobs(jobConf));
                    return childSet.size() > 0;
                }
            });
        }
    });
    assertEquals(1, childSet.size());
    // kill the action - based on the job tag, the SleepJob is expected to be killed too
    ae.kill(context, context.getAction());
    HadoopAccessorService hadoopAccessorService = getHadoopAccessorService();
    Configuration config = hadoopAccessorService.createConfiguration(getJobTrackerUri());
    YarnClient yarnClient = hadoopAccessorService.createYarnClient(getTestUser(), config);
    // check that both the launcher & MR job were successfully killed
    ApplicationId jobId = childSet.iterator().next();
    assertEquals(YarnApplicationState.KILLED, yarnClient.getApplicationReport(jobId).getYarnApplicationState());
    assertTrue(ae.isCompleted(context.getAction().getExternalStatus()));
    assertEquals(WorkflowAction.Status.DONE, context.getAction().getStatus());
    assertEquals(JavaActionExecutor.KILLED, context.getAction().getExternalStatus());
    assertEquals(FinalApplicationStatus.KILLED, yarnClient.getApplicationReport(ConverterUtils.toApplicationId(runningJob)).getFinalApplicationStatus());
}
Also used : Path(org.apache.hadoop.fs.Path) Configuration(org.apache.hadoop.conf.Configuration) XConfiguration(org.apache.oozie.util.XConfiguration) FSDataOutputStream(org.apache.hadoop.fs.FSDataOutputStream) OutputStream(java.io.OutputStream) FileOutputStream(java.io.FileOutputStream) YarnApplicationState(org.apache.hadoop.yarn.api.records.YarnApplicationState) PrivilegedExceptionAction(java.security.PrivilegedExceptionAction) HadoopAccessorService(org.apache.oozie.service.HadoopAccessorService) ActionExecutorException(org.apache.oozie.action.ActionExecutorException) IOException(java.io.IOException) YarnClient(org.apache.hadoop.yarn.client.api.YarnClient) UserGroupInformationService(org.apache.oozie.service.UserGroupInformationService) XConfiguration(org.apache.oozie.util.XConfiguration) FileSystem(org.apache.hadoop.fs.FileSystem) StringReader(java.io.StringReader) WorkflowJob(org.apache.oozie.client.WorkflowJob) ApplicationId(org.apache.hadoop.yarn.api.records.ApplicationId) JobConf(org.apache.hadoop.mapred.JobConf) UserGroupInformation(org.apache.hadoop.security.UserGroupInformation) HashSet(java.util.HashSet)

Example 3 with UserGroupInformationService

use of org.apache.oozie.service.UserGroupInformationService in project oozie by apache.

the class FsActionExecutor method delete.

/**
 * Delete path
 *
 * @param context
 * @param fsConf
 * @param nameNodePath
 * @param path
 * @param skipTrash flag to skip the trash.
 * @throws ActionExecutorException
 */
public void delete(Context context, XConfiguration fsConf, Path nameNodePath, Path path, boolean skipTrash) throws ActionExecutorException {
    LOG.info("Deleting [{0}]. Skipping trash: [{1}]", path, skipTrash);
    URI uri = path.toUri();
    URIHandler handler;
    org.apache.oozie.dependency.URIHandler.Context hcatContext = null;
    try {
        handler = Services.get().get(URIHandlerService.class).getURIHandler(uri);
        if (handler instanceof FSURIHandler) {
            // Use legacy code to handle hdfs partition deletion
            path = resolveToFullPath(nameNodePath, path, true);
            final FileSystem fs = getFileSystemFor(path, context, fsConf);
            Path[] pathArr = FileUtil.stat2Paths(fs.globStatus(path));
            if (pathArr != null && pathArr.length > 0) {
                checkGlobMax(pathArr);
                for (final Path p : pathArr) {
                    if (fs.exists(p)) {
                        if (!skipTrash) {
                            // Moving directory/file to trash of user.
                            UserGroupInformationService ugiService = Services.get().get(UserGroupInformationService.class);
                            UserGroupInformation ugi = ugiService.getProxyUser(fs.getConf().get(OozieClient.USER_NAME));
                            ugi.doAs(new PrivilegedExceptionAction<FileSystem>() {

                                @Override
                                public FileSystem run() throws Exception {
                                    Trash trash = new Trash(fs.getConf());
                                    if (!trash.moveToTrash(p)) {
                                        throw new ActionExecutorException(ActionExecutorException.ErrorType.ERROR, "FS005", "Could not move path [{0}] to trash on delete", p);
                                    }
                                    return null;
                                }
                            });
                        } else if (!fs.delete(p, true)) {
                            throw new ActionExecutorException(ActionExecutorException.ErrorType.ERROR, "FS005", "delete, path [{0}] could not delete path", p);
                        }
                    }
                }
            }
        } else {
            hcatContext = handler.getContext(uri, fsConf, context.getWorkflow().getUser(), false);
            handler.delete(uri, hcatContext);
        }
    } catch (Exception ex) {
        throw convertException(ex);
    } finally {
        if (hcatContext != null) {
            hcatContext.destroy();
        }
    }
}
Also used : Path(org.apache.hadoop.fs.Path) ActionExecutorException(org.apache.oozie.action.ActionExecutorException) FSURIHandler(org.apache.oozie.dependency.FSURIHandler) URI(java.net.URI) Trash(org.apache.hadoop.fs.Trash) URISyntaxException(java.net.URISyntaxException) ActionExecutorException(org.apache.oozie.action.ActionExecutorException) HadoopAccessorException(org.apache.oozie.service.HadoopAccessorException) IOException(java.io.IOException) AccessControlException(org.apache.hadoop.security.AccessControlException) UserGroupInformationService(org.apache.oozie.service.UserGroupInformationService) FileSystem(org.apache.hadoop.fs.FileSystem) URIHandler(org.apache.oozie.dependency.URIHandler) FSURIHandler(org.apache.oozie.dependency.FSURIHandler) UserGroupInformation(org.apache.hadoop.security.UserGroupInformation)

Aggregations

IOException (java.io.IOException)3 Path (org.apache.hadoop.fs.Path)3 UserGroupInformation (org.apache.hadoop.security.UserGroupInformation)3 UserGroupInformationService (org.apache.oozie.service.UserGroupInformationService)3 FileSystem (org.apache.hadoop.fs.FileSystem)2 ActionExecutorException (org.apache.oozie.action.ActionExecutorException)2 BufferedReader (java.io.BufferedReader)1 FileOutputStream (java.io.FileOutputStream)1 InputStream (java.io.InputStream)1 InputStreamReader (java.io.InputStreamReader)1 OutputStream (java.io.OutputStream)1 StringReader (java.io.StringReader)1 URI (java.net.URI)1 URISyntaxException (java.net.URISyntaxException)1 PrivilegedExceptionAction (java.security.PrivilegedExceptionAction)1 HashMap (java.util.HashMap)1 HashSet (java.util.HashSet)1 Map (java.util.Map)1 Properties (java.util.Properties)1 Configuration (org.apache.hadoop.conf.Configuration)1