Search in sources :

Example 51 with Writer

use of java.io.Writer in project hive by apache.

the class TestAddResource method setup.

@Before
public void setup() throws IOException {
    conf = new HiveConf();
    t = ResourceType.JAR;
    //Generate test jar files
    for (int i = 1; i <= 5; i++) {
        Writer output = null;
        String dataFile = TEST_JAR_DIR + "testjar" + i + ".jar";
        File file = new File(dataFile);
        output = new BufferedWriter(new FileWriter(file));
        output.write("sample");
        output.close();
    }
}
Also used : FileWriter(java.io.FileWriter) HiveConf(org.apache.hadoop.hive.conf.HiveConf) File(java.io.File) BufferedWriter(java.io.BufferedWriter) FileWriter(java.io.FileWriter) Writer(java.io.Writer) BufferedWriter(java.io.BufferedWriter) Before(org.junit.Before)

Example 52 with Writer

use of java.io.Writer in project hive by apache.

the class SparkClientImpl method startDriver.

private Thread startDriver(final RpcServer rpcServer, final String clientId, final String secret) throws IOException {
    Runnable runnable;
    final String serverAddress = rpcServer.getAddress();
    final String serverPort = String.valueOf(rpcServer.getPort());
    if (conf.containsKey(SparkClientFactory.CONF_KEY_IN_PROCESS)) {
        // Mostly for testing things quickly. Do not do this in production.
        // when invoked in-process it inherits the environment variables of the parent
        LOG.warn("!!!! Running remote driver in-process. !!!!");
        runnable = new Runnable() {

            @Override
            public void run() {
                List<String> args = Lists.newArrayList();
                args.add("--remote-host");
                args.add(serverAddress);
                args.add("--remote-port");
                args.add(serverPort);
                args.add("--client-id");
                args.add(clientId);
                args.add("--secret");
                args.add(secret);
                for (Map.Entry<String, String> e : conf.entrySet()) {
                    args.add("--conf");
                    args.add(String.format("%s=%s", e.getKey(), conf.get(e.getKey())));
                }
                try {
                    RemoteDriver.main(args.toArray(new String[args.size()]));
                } catch (Exception e) {
                    LOG.error("Error running driver.", e);
                }
            }
        };
    } else {
        // If a Spark installation is provided, use the spark-submit script. Otherwise, call the
        // SparkSubmit class directly, which has some caveats (like having to provide a proper
        // version of Guava on the classpath depending on the deploy mode).
        String sparkHome = Strings.emptyToNull(conf.get(SPARK_HOME_KEY));
        if (sparkHome == null) {
            sparkHome = Strings.emptyToNull(System.getenv(SPARK_HOME_ENV));
        }
        if (sparkHome == null) {
            sparkHome = Strings.emptyToNull(System.getProperty(SPARK_HOME_KEY));
        }
        String sparkLogDir = conf.get("hive.spark.log.dir");
        if (sparkLogDir == null) {
            if (sparkHome == null) {
                sparkLogDir = "./target/";
            } else {
                sparkLogDir = sparkHome + "/logs/";
            }
        }
        String osxTestOpts = "";
        if (Strings.nullToEmpty(System.getProperty("os.name")).toLowerCase().contains("mac")) {
            osxTestOpts = Strings.nullToEmpty(System.getenv(OSX_TEST_OPTS));
        }
        String driverJavaOpts = Joiner.on(" ").skipNulls().join("-Dhive.spark.log.dir=" + sparkLogDir, osxTestOpts, conf.get(DRIVER_OPTS_KEY));
        String executorJavaOpts = Joiner.on(" ").skipNulls().join("-Dhive.spark.log.dir=" + sparkLogDir, osxTestOpts, conf.get(EXECUTOR_OPTS_KEY));
        // Create a file with all the job properties to be read by spark-submit. Change the
        // file's permissions so that only the owner can read it. This avoid having the
        // connection secret show up in the child process's command line.
        File properties = File.createTempFile("spark-submit.", ".properties");
        if (!properties.setReadable(false) || !properties.setReadable(true, true)) {
            throw new IOException("Cannot change permissions of job properties file.");
        }
        properties.deleteOnExit();
        Properties allProps = new Properties();
        // first load the defaults from spark-defaults.conf if available
        try {
            URL sparkDefaultsUrl = Thread.currentThread().getContextClassLoader().getResource("spark-defaults.conf");
            if (sparkDefaultsUrl != null) {
                LOG.info("Loading spark defaults: " + sparkDefaultsUrl);
                allProps.load(new ByteArrayInputStream(Resources.toByteArray(sparkDefaultsUrl)));
            }
        } catch (Exception e) {
            String msg = "Exception trying to load spark-defaults.conf: " + e;
            throw new IOException(msg, e);
        }
        // then load the SparkClientImpl config
        for (Map.Entry<String, String> e : conf.entrySet()) {
            allProps.put(e.getKey(), conf.get(e.getKey()));
        }
        allProps.put(SparkClientFactory.CONF_CLIENT_ID, clientId);
        allProps.put(SparkClientFactory.CONF_KEY_SECRET, secret);
        allProps.put(DRIVER_OPTS_KEY, driverJavaOpts);
        allProps.put(EXECUTOR_OPTS_KEY, executorJavaOpts);
        String isTesting = conf.get("spark.testing");
        if (isTesting != null && isTesting.equalsIgnoreCase("true")) {
            String hiveHadoopTestClasspath = Strings.nullToEmpty(System.getenv("HIVE_HADOOP_TEST_CLASSPATH"));
            if (!hiveHadoopTestClasspath.isEmpty()) {
                String extraDriverClasspath = Strings.nullToEmpty((String) allProps.get(DRIVER_EXTRA_CLASSPATH));
                if (extraDriverClasspath.isEmpty()) {
                    allProps.put(DRIVER_EXTRA_CLASSPATH, hiveHadoopTestClasspath);
                } else {
                    extraDriverClasspath = extraDriverClasspath.endsWith(File.pathSeparator) ? extraDriverClasspath : extraDriverClasspath + File.pathSeparator;
                    allProps.put(DRIVER_EXTRA_CLASSPATH, extraDriverClasspath + hiveHadoopTestClasspath);
                }
                String extraExecutorClasspath = Strings.nullToEmpty((String) allProps.get(EXECUTOR_EXTRA_CLASSPATH));
                if (extraExecutorClasspath.isEmpty()) {
                    allProps.put(EXECUTOR_EXTRA_CLASSPATH, hiveHadoopTestClasspath);
                } else {
                    extraExecutorClasspath = extraExecutorClasspath.endsWith(File.pathSeparator) ? extraExecutorClasspath : extraExecutorClasspath + File.pathSeparator;
                    allProps.put(EXECUTOR_EXTRA_CLASSPATH, extraExecutorClasspath + hiveHadoopTestClasspath);
                }
            }
        }
        Writer writer = new OutputStreamWriter(new FileOutputStream(properties), Charsets.UTF_8);
        try {
            allProps.store(writer, "Spark Context configuration");
        } finally {
            writer.close();
        }
        // Define how to pass options to the child process. If launching in client (or local)
        // mode, the driver options need to be passed directly on the command line. Otherwise,
        // SparkSubmit will take care of that for us.
        String master = conf.get("spark.master");
        Preconditions.checkArgument(master != null, "spark.master is not defined.");
        String deployMode = conf.get("spark.submit.deployMode");
        List<String> argv = Lists.newLinkedList();
        if (sparkHome != null) {
            argv.add(new File(sparkHome, "bin/spark-submit").getAbsolutePath());
        } else {
            LOG.info("No spark.home provided, calling SparkSubmit directly.");
            argv.add(new File(System.getProperty("java.home"), "bin/java").getAbsolutePath());
            if (master.startsWith("local") || master.startsWith("mesos") || SparkClientUtilities.isYarnClientMode(master, deployMode) || master.startsWith("spark")) {
                String mem = conf.get("spark.driver.memory");
                if (mem != null) {
                    argv.add("-Xms" + mem);
                    argv.add("-Xmx" + mem);
                }
                String cp = conf.get("spark.driver.extraClassPath");
                if (cp != null) {
                    argv.add("-classpath");
                    argv.add(cp);
                }
                String libPath = conf.get("spark.driver.extraLibPath");
                if (libPath != null) {
                    argv.add("-Djava.library.path=" + libPath);
                }
                String extra = conf.get(DRIVER_OPTS_KEY);
                if (extra != null) {
                    for (String opt : extra.split("[ ]")) {
                        if (!opt.trim().isEmpty()) {
                            argv.add(opt.trim());
                        }
                    }
                }
            }
            argv.add("org.apache.spark.deploy.SparkSubmit");
        }
        if (SparkClientUtilities.isYarnClusterMode(master, deployMode)) {
            String executorCores = conf.get("spark.executor.cores");
            if (executorCores != null) {
                argv.add("--executor-cores");
                argv.add(executorCores);
            }
            String executorMemory = conf.get("spark.executor.memory");
            if (executorMemory != null) {
                argv.add("--executor-memory");
                argv.add(executorMemory);
            }
            String numOfExecutors = conf.get("spark.executor.instances");
            if (numOfExecutors != null) {
                argv.add("--num-executors");
                argv.add(numOfExecutors);
            }
        }
        // long-running application.
        if ("kerberos".equals(hiveConf.get(HADOOP_SECURITY_AUTHENTICATION))) {
            String principal = SecurityUtil.getServerPrincipal(hiveConf.getVar(ConfVars.HIVE_SERVER2_KERBEROS_PRINCIPAL), "0.0.0.0");
            String keyTabFile = hiveConf.getVar(ConfVars.HIVE_SERVER2_KERBEROS_KEYTAB);
            if (hiveConf.getBoolVar(HiveConf.ConfVars.HIVE_SERVER2_ENABLE_DOAS)) {
                List<String> kinitArgv = Lists.newLinkedList();
                kinitArgv.add("kinit");
                kinitArgv.add(principal);
                kinitArgv.add("-k");
                kinitArgv.add("-t");
                kinitArgv.add(keyTabFile + ";");
                kinitArgv.addAll(argv);
                argv = kinitArgv;
            } else {
                // if doAs is not enabled, we pass the principal/keypad to spark-submit in order to
                // support the possible delegation token renewal in Spark
                argv.add("--principal");
                argv.add(principal);
                argv.add("--keytab");
                argv.add(keyTabFile);
            }
        }
        if (hiveConf.getBoolVar(HiveConf.ConfVars.HIVE_SERVER2_ENABLE_DOAS)) {
            try {
                String currentUser = Utils.getUGI().getShortUserName();
                // do not do impersonation in CLI mode
                if (!currentUser.equals(System.getProperty("user.name"))) {
                    LOG.info("Attempting impersonation of " + currentUser);
                    argv.add("--proxy-user");
                    argv.add(currentUser);
                }
            } catch (Exception e) {
                String msg = "Cannot obtain username: " + e;
                throw new IllegalStateException(msg, e);
            }
        }
        argv.add("--properties-file");
        argv.add(properties.getAbsolutePath());
        argv.add("--class");
        argv.add(RemoteDriver.class.getName());
        String jar = "spark-internal";
        if (SparkContext.jarOfClass(this.getClass()).isDefined()) {
            jar = SparkContext.jarOfClass(this.getClass()).get();
        }
        argv.add(jar);
        argv.add("--remote-host");
        argv.add(serverAddress);
        argv.add("--remote-port");
        argv.add(serverPort);
        //as --properties-file contains the spark.* keys that are meant for SparkConf object.
        for (String hiveSparkConfKey : RpcConfiguration.HIVE_SPARK_RSC_CONFIGS) {
            String value = RpcConfiguration.getValue(hiveConf, hiveSparkConfKey);
            argv.add("--conf");
            argv.add(String.format("%s=%s", hiveSparkConfKey, value));
        }
        String cmd = Joiner.on(" ").join(argv);
        LOG.info("Running client driver with argv: {}", cmd);
        ProcessBuilder pb = new ProcessBuilder("sh", "-c", cmd);
        // Prevent hive configurations from being visible in Spark.
        pb.environment().remove("HIVE_HOME");
        pb.environment().remove("HIVE_CONF_DIR");
        // Add credential provider password to the child process's environment
        // In case of Spark the credential provider location is provided in the jobConf when the job is submitted
        String password = getSparkJobCredentialProviderPassword();
        if (password != null) {
            pb.environment().put(Constants.HADOOP_CREDENTIAL_PASSWORD_ENVVAR, password);
        }
        if (isTesting != null) {
            pb.environment().put("SPARK_TESTING", isTesting);
        }
        final Process child = pb.start();
        int childId = childIdGenerator.incrementAndGet();
        final List<String> childErrorLog = new ArrayList<String>();
        redirect("stdout-redir-" + childId, new Redirector(child.getInputStream()));
        redirect("stderr-redir-" + childId, new Redirector(child.getErrorStream(), childErrorLog));
        runnable = new Runnable() {

            @Override
            public void run() {
                try {
                    int exitCode = child.waitFor();
                    if (exitCode != 0) {
                        StringBuilder errStr = new StringBuilder();
                        for (String s : childErrorLog) {
                            errStr.append(s);
                            errStr.append('\n');
                        }
                        rpcServer.cancelClient(clientId, "Child process exited before connecting back with error log " + errStr.toString());
                        LOG.warn("Child process exited with code {}", exitCode);
                    }
                } catch (InterruptedException ie) {
                    LOG.warn("Waiting thread interrupted, killing child process.");
                    Thread.interrupted();
                    child.destroy();
                } catch (Exception e) {
                    LOG.warn("Exception while waiting for child process.", e);
                }
            }
        };
    }
    Thread thread = new Thread(runnable);
    thread.setDaemon(true);
    thread.setName("Driver");
    thread.start();
    return thread;
}
Also used : ArrayList(java.util.ArrayList) Properties(java.util.Properties) URL(java.net.URL) ArrayList(java.util.ArrayList) List(java.util.List) IOException(java.io.IOException) TimeoutException(java.util.concurrent.TimeoutException) SparkException(org.apache.spark.SparkException) IOException(java.io.IOException) ByteArrayInputStream(java.io.ByteArrayInputStream) FileOutputStream(java.io.FileOutputStream) OutputStreamWriter(java.io.OutputStreamWriter) File(java.io.File) Map(java.util.Map) OutputStreamWriter(java.io.OutputStreamWriter) Writer(java.io.Writer)

Example 53 with Writer

use of java.io.Writer in project storm by apache.

the class Container method writeLogMetadata.

/**
     * Write out the file used by the log viewer to allow/reject log access
     * @param user the user this is going to run as
     * @throws IOException on any error
     */
@SuppressWarnings("unchecked")
protected void writeLogMetadata(String user) throws IOException {
    _type.assertFull();
    Map<String, Object> data = new HashMap<>();
    data.put(Config.TOPOLOGY_SUBMITTER_USER, user);
    data.put("worker-id", _workerId);
    Set<String> logsGroups = new HashSet<>();
    if (_topoConf.get(Config.LOGS_GROUPS) != null) {
        List<String> groups = (List<String>) _topoConf.get(Config.LOGS_GROUPS);
        for (String group : groups) {
            logsGroups.add(group);
        }
    }
    if (_topoConf.get(Config.TOPOLOGY_GROUPS) != null) {
        List<String> topGroups = (List<String>) _topoConf.get(Config.TOPOLOGY_GROUPS);
        logsGroups.addAll(topGroups);
    }
    data.put(Config.LOGS_GROUPS, logsGroups.toArray());
    Set<String> logsUsers = new HashSet<>();
    if (_topoConf.get(Config.LOGS_USERS) != null) {
        List<String> logUsers = (List<String>) _topoConf.get(Config.LOGS_USERS);
        for (String logUser : logUsers) {
            logsUsers.add(logUser);
        }
    }
    if (_topoConf.get(Config.TOPOLOGY_USERS) != null) {
        List<String> topUsers = (List<String>) _topoConf.get(Config.TOPOLOGY_USERS);
        for (String logUser : topUsers) {
            logsUsers.add(logUser);
        }
    }
    data.put(Config.LOGS_USERS, logsUsers.toArray());
    File file = ConfigUtils.getLogMetaDataFile(_conf, _topologyId, _port);
    Yaml yaml = new Yaml();
    try (Writer writer = _ops.getWriter(file)) {
        yaml.dump(data, writer);
    }
}
Also used : HashMap(java.util.HashMap) ArrayList(java.util.ArrayList) List(java.util.List) File(java.io.File) Yaml(org.yaml.snakeyaml.Yaml) Writer(java.io.Writer) HashSet(java.util.HashSet)

Example 54 with Writer

use of java.io.Writer in project tomcat by apache.

the class PersistentProviderRegistrations method writeProviders.

static void writeProviders(Providers providers, File configFile) {
    File configFileOld = new File(configFile.getAbsolutePath() + ".old");
    File configFileNew = new File(configFile.getAbsolutePath() + ".new");
    // Remove left over temporary files if present
    if (configFileOld.exists()) {
        if (configFileOld.delete()) {
            throw new SecurityException(sm.getString("persistentProviderRegistrations.existsDeleteFail", configFileOld.getAbsolutePath()));
        }
    }
    if (configFileNew.exists()) {
        if (configFileNew.delete()) {
            throw new SecurityException(sm.getString("persistentProviderRegistrations.existsDeleteFail", configFileNew.getAbsolutePath()));
        }
    }
    // Write out the providers to the temporary new file
    try (OutputStream fos = new FileOutputStream(configFileNew);
        Writer writer = new OutputStreamWriter(fos, StandardCharsets.UTF_8)) {
        writer.write("<?xml version='1.0' encoding='utf-8'?>\n" + "<jaspic-providers\n" + "    xmlns=\"http://tomcat.apache.org/xml\"\n" + "    xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\"\n" + "    xsi:schemaLocation=\"http://tomcat.apache.org/xml jaspic-providers.xsd\"\n" + "    version=\"1.0\">\n");
        for (Provider provider : providers.providers) {
            writer.write("  <provider className=\"");
            writer.write(provider.getClassName());
            writer.write("\" layer=\"");
            writer.write(provider.getLayer());
            writer.write("\" appContext=\"");
            writer.write(provider.getAppContext());
            if (provider.getDescription() != null) {
                writer.write("\" description=\"");
                writer.write(provider.getDescription());
            }
            writer.write("\">\n");
            for (Entry<String, String> entry : provider.getProperties().entrySet()) {
                writer.write("    <property name=\"");
                writer.write(entry.getKey());
                writer.write("\" value=\"");
                writer.write(entry.getValue());
                writer.write("\"/>\n");
            }
            writer.write("  </provider>\n");
        }
        writer.write("</jaspic-providers>\n");
    } catch (IOException e) {
        configFileNew.delete();
        throw new SecurityException(e);
    }
    // Move the current file out of the way
    if (configFile.isFile()) {
        if (!configFile.renameTo(configFileOld)) {
            throw new SecurityException(sm.getString("persistentProviderRegistrations.moveFail", configFile.getAbsolutePath(), configFileOld.getAbsolutePath()));
        }
    }
    // Move the new file into place
    if (!configFileNew.renameTo(configFile)) {
        throw new SecurityException(sm.getString("persistentProviderRegistrations.moveFail", configFileNew.getAbsolutePath(), configFile.getAbsolutePath()));
    }
    // Remove the old file
    if (configFileOld.exists() && !configFileOld.delete()) {
        log.warn(sm.getString("persistentProviderRegistrations.deleteFail", configFileOld.getAbsolutePath()));
    }
}
Also used : OutputStream(java.io.OutputStream) FileOutputStream(java.io.FileOutputStream) FileOutputStream(java.io.FileOutputStream) OutputStreamWriter(java.io.OutputStreamWriter) IOException(java.io.IOException) File(java.io.File) Writer(java.io.Writer) OutputStreamWriter(java.io.OutputStreamWriter)

Example 55 with Writer

use of java.io.Writer in project tomcat by apache.

the class WebdavStatus method doLock.

/**
     * LOCK Method.
     * @param req The Servlet request
     * @param resp The Servlet response
     * @throws ServletException If an error occurs
     * @throws IOException If an IO error occurs
     */
protected void doLock(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException {
    if (readOnly) {
        resp.sendError(WebdavStatus.SC_FORBIDDEN);
        return;
    }
    if (isLocked(req)) {
        resp.sendError(WebdavStatus.SC_LOCKED);
        return;
    }
    LockInfo lock = new LockInfo();
    // Parsing lock request
    // Parsing depth header
    String depthStr = req.getHeader("Depth");
    if (depthStr == null) {
        lock.depth = maxDepth;
    } else {
        if (depthStr.equals("0")) {
            lock.depth = 0;
        } else {
            lock.depth = maxDepth;
        }
    }
    // Parsing timeout header
    int lockDuration = DEFAULT_TIMEOUT;
    String lockDurationStr = req.getHeader("Timeout");
    if (lockDurationStr == null) {
        lockDuration = DEFAULT_TIMEOUT;
    } else {
        int commaPos = lockDurationStr.indexOf(',');
        // If multiple timeouts, just use the first
        if (commaPos != -1) {
            lockDurationStr = lockDurationStr.substring(0, commaPos);
        }
        if (lockDurationStr.startsWith("Second-")) {
            lockDuration = Integer.parseInt(lockDurationStr.substring(7));
        } else {
            if (lockDurationStr.equalsIgnoreCase("infinity")) {
                lockDuration = MAX_TIMEOUT;
            } else {
                try {
                    lockDuration = Integer.parseInt(lockDurationStr);
                } catch (NumberFormatException e) {
                    lockDuration = MAX_TIMEOUT;
                }
            }
        }
        if (lockDuration == 0) {
            lockDuration = DEFAULT_TIMEOUT;
        }
        if (lockDuration > MAX_TIMEOUT) {
            lockDuration = MAX_TIMEOUT;
        }
    }
    lock.expiresAt = System.currentTimeMillis() + (lockDuration * 1000);
    int lockRequestType = LOCK_CREATION;
    Node lockInfoNode = null;
    DocumentBuilder documentBuilder = getDocumentBuilder();
    try {
        Document document = documentBuilder.parse(new InputSource(req.getInputStream()));
        // Get the root element of the document
        Element rootElement = document.getDocumentElement();
        lockInfoNode = rootElement;
    } catch (IOException e) {
        lockRequestType = LOCK_REFRESH;
    } catch (SAXException e) {
        lockRequestType = LOCK_REFRESH;
    }
    if (lockInfoNode != null) {
        // Reading lock information
        NodeList childList = lockInfoNode.getChildNodes();
        StringWriter strWriter = null;
        DOMWriter domWriter = null;
        Node lockScopeNode = null;
        Node lockTypeNode = null;
        Node lockOwnerNode = null;
        for (int i = 0; i < childList.getLength(); i++) {
            Node currentNode = childList.item(i);
            switch(currentNode.getNodeType()) {
                case Node.TEXT_NODE:
                    break;
                case Node.ELEMENT_NODE:
                    String nodeName = currentNode.getNodeName();
                    if (nodeName.endsWith("lockscope")) {
                        lockScopeNode = currentNode;
                    }
                    if (nodeName.endsWith("locktype")) {
                        lockTypeNode = currentNode;
                    }
                    if (nodeName.endsWith("owner")) {
                        lockOwnerNode = currentNode;
                    }
                    break;
            }
        }
        if (lockScopeNode != null) {
            childList = lockScopeNode.getChildNodes();
            for (int i = 0; i < childList.getLength(); i++) {
                Node currentNode = childList.item(i);
                switch(currentNode.getNodeType()) {
                    case Node.TEXT_NODE:
                        break;
                    case Node.ELEMENT_NODE:
                        String tempScope = currentNode.getNodeName();
                        if (tempScope.indexOf(':') != -1) {
                            lock.scope = tempScope.substring(tempScope.indexOf(':') + 1);
                        } else {
                            lock.scope = tempScope;
                        }
                        break;
                }
            }
            if (lock.scope == null) {
                // Bad request
                resp.setStatus(WebdavStatus.SC_BAD_REQUEST);
            }
        } else {
            // Bad request
            resp.setStatus(WebdavStatus.SC_BAD_REQUEST);
        }
        if (lockTypeNode != null) {
            childList = lockTypeNode.getChildNodes();
            for (int i = 0; i < childList.getLength(); i++) {
                Node currentNode = childList.item(i);
                switch(currentNode.getNodeType()) {
                    case Node.TEXT_NODE:
                        break;
                    case Node.ELEMENT_NODE:
                        String tempType = currentNode.getNodeName();
                        if (tempType.indexOf(':') != -1) {
                            lock.type = tempType.substring(tempType.indexOf(':') + 1);
                        } else {
                            lock.type = tempType;
                        }
                        break;
                }
            }
            if (lock.type == null) {
                // Bad request
                resp.setStatus(WebdavStatus.SC_BAD_REQUEST);
            }
        } else {
            // Bad request
            resp.setStatus(WebdavStatus.SC_BAD_REQUEST);
        }
        if (lockOwnerNode != null) {
            childList = lockOwnerNode.getChildNodes();
            for (int i = 0; i < childList.getLength(); i++) {
                Node currentNode = childList.item(i);
                switch(currentNode.getNodeType()) {
                    case Node.TEXT_NODE:
                        lock.owner += currentNode.getNodeValue();
                        break;
                    case Node.ELEMENT_NODE:
                        strWriter = new StringWriter();
                        domWriter = new DOMWriter(strWriter, true);
                        domWriter.print(currentNode);
                        lock.owner += strWriter.toString();
                        break;
                }
            }
            if (lock.owner == null) {
                // Bad request
                resp.setStatus(WebdavStatus.SC_BAD_REQUEST);
            }
        } else {
            lock.owner = "";
        }
    }
    String path = getRelativePath(req);
    lock.path = path;
    WebResource resource = resources.getResource(path);
    Enumeration<LockInfo> locksList = null;
    if (lockRequestType == LOCK_CREATION) {
        // Generating lock id
        String lockTokenStr = req.getServletPath() + "-" + lock.type + "-" + lock.scope + "-" + req.getUserPrincipal() + "-" + lock.depth + "-" + lock.owner + "-" + lock.tokens + "-" + lock.expiresAt + "-" + System.currentTimeMillis() + "-" + secret;
        String lockToken = MD5Encoder.encode(ConcurrentMessageDigest.digestMD5(lockTokenStr.getBytes(StandardCharsets.ISO_8859_1)));
        if (resource.isDirectory() && lock.depth == maxDepth) {
            // Locking a collection (and all its member resources)
            // Checking if a child resource of this collection is
            // already locked
            Vector<String> lockPaths = new Vector<>();
            locksList = collectionLocks.elements();
            while (locksList.hasMoreElements()) {
                LockInfo currentLock = locksList.nextElement();
                if (currentLock.hasExpired()) {
                    resourceLocks.remove(currentLock.path);
                    continue;
                }
                if ((currentLock.path.startsWith(lock.path)) && ((currentLock.isExclusive()) || (lock.isExclusive()))) {
                    // A child collection of this collection is locked
                    lockPaths.addElement(currentLock.path);
                }
            }
            locksList = resourceLocks.elements();
            while (locksList.hasMoreElements()) {
                LockInfo currentLock = locksList.nextElement();
                if (currentLock.hasExpired()) {
                    resourceLocks.remove(currentLock.path);
                    continue;
                }
                if ((currentLock.path.startsWith(lock.path)) && ((currentLock.isExclusive()) || (lock.isExclusive()))) {
                    // A child resource of this collection is locked
                    lockPaths.addElement(currentLock.path);
                }
            }
            if (!lockPaths.isEmpty()) {
                // One of the child paths was locked
                // We generate a multistatus error report
                Enumeration<String> lockPathsList = lockPaths.elements();
                resp.setStatus(WebdavStatus.SC_CONFLICT);
                XMLWriter generatedXML = new XMLWriter();
                generatedXML.writeXMLHeader();
                generatedXML.writeElement("D", DEFAULT_NAMESPACE, "multistatus", XMLWriter.OPENING);
                while (lockPathsList.hasMoreElements()) {
                    generatedXML.writeElement("D", "response", XMLWriter.OPENING);
                    generatedXML.writeElement("D", "href", XMLWriter.OPENING);
                    generatedXML.writeText(lockPathsList.nextElement());
                    generatedXML.writeElement("D", "href", XMLWriter.CLOSING);
                    generatedXML.writeElement("D", "status", XMLWriter.OPENING);
                    generatedXML.writeText("HTTP/1.1 " + WebdavStatus.SC_LOCKED + " " + WebdavStatus.getStatusText(WebdavStatus.SC_LOCKED));
                    generatedXML.writeElement("D", "status", XMLWriter.CLOSING);
                    generatedXML.writeElement("D", "response", XMLWriter.CLOSING);
                }
                generatedXML.writeElement("D", "multistatus", XMLWriter.CLOSING);
                Writer writer = resp.getWriter();
                writer.write(generatedXML.toString());
                writer.close();
                return;
            }
            boolean addLock = true;
            // Checking if there is already a shared lock on this path
            locksList = collectionLocks.elements();
            while (locksList.hasMoreElements()) {
                LockInfo currentLock = locksList.nextElement();
                if (currentLock.path.equals(lock.path)) {
                    if (currentLock.isExclusive()) {
                        resp.sendError(WebdavStatus.SC_LOCKED);
                        return;
                    } else {
                        if (lock.isExclusive()) {
                            resp.sendError(WebdavStatus.SC_LOCKED);
                            return;
                        }
                    }
                    currentLock.tokens.addElement(lockToken);
                    lock = currentLock;
                    addLock = false;
                }
            }
            if (addLock) {
                lock.tokens.addElement(lockToken);
                collectionLocks.addElement(lock);
            }
        } else {
            // Locking a single resource
            // Retrieving an already existing lock on that resource
            LockInfo presentLock = resourceLocks.get(lock.path);
            if (presentLock != null) {
                if ((presentLock.isExclusive()) || (lock.isExclusive())) {
                    // If either lock is exclusive, the lock can't be
                    // granted
                    resp.sendError(WebdavStatus.SC_PRECONDITION_FAILED);
                    return;
                } else {
                    presentLock.tokens.addElement(lockToken);
                    lock = presentLock;
                }
            } else {
                lock.tokens.addElement(lockToken);
                resourceLocks.put(lock.path, lock);
                // Checking if a resource exists at this path
                if (!resource.exists()) {
                    // "Creating" a lock-null resource
                    int slash = lock.path.lastIndexOf('/');
                    String parentPath = lock.path.substring(0, slash);
                    Vector<String> lockNulls = lockNullResources.get(parentPath);
                    if (lockNulls == null) {
                        lockNulls = new Vector<>();
                        lockNullResources.put(parentPath, lockNulls);
                    }
                    lockNulls.addElement(lock.path);
                }
                // Add the Lock-Token header as by RFC 2518 8.10.1
                // - only do this for newly created locks
                resp.addHeader("Lock-Token", "<opaquelocktoken:" + lockToken + ">");
            }
        }
    }
    if (lockRequestType == LOCK_REFRESH) {
        String ifHeader = req.getHeader("If");
        if (ifHeader == null)
            ifHeader = "";
        // Checking resource locks
        LockInfo toRenew = resourceLocks.get(path);
        Enumeration<String> tokenList = null;
        if (toRenew != null) {
            // At least one of the tokens of the locks must have been given
            tokenList = toRenew.tokens.elements();
            while (tokenList.hasMoreElements()) {
                String token = tokenList.nextElement();
                if (ifHeader.indexOf(token) != -1) {
                    toRenew.expiresAt = lock.expiresAt;
                    lock = toRenew;
                }
            }
        }
        // Checking inheritable collection locks
        Enumeration<LockInfo> collectionLocksList = collectionLocks.elements();
        while (collectionLocksList.hasMoreElements()) {
            toRenew = collectionLocksList.nextElement();
            if (path.equals(toRenew.path)) {
                tokenList = toRenew.tokens.elements();
                while (tokenList.hasMoreElements()) {
                    String token = tokenList.nextElement();
                    if (ifHeader.indexOf(token) != -1) {
                        toRenew.expiresAt = lock.expiresAt;
                        lock = toRenew;
                    }
                }
            }
        }
    }
    // Set the status, then generate the XML response containing
    // the lock information
    XMLWriter generatedXML = new XMLWriter();
    generatedXML.writeXMLHeader();
    generatedXML.writeElement("D", DEFAULT_NAMESPACE, "prop", XMLWriter.OPENING);
    generatedXML.writeElement("D", "lockdiscovery", XMLWriter.OPENING);
    lock.toXML(generatedXML);
    generatedXML.writeElement("D", "lockdiscovery", XMLWriter.CLOSING);
    generatedXML.writeElement("D", "prop", XMLWriter.CLOSING);
    resp.setStatus(WebdavStatus.SC_OK);
    resp.setContentType("text/xml; charset=UTF-8");
    Writer writer = resp.getWriter();
    writer.write(generatedXML.toString());
    writer.close();
}
Also used : InputSource(org.xml.sax.InputSource) DOMWriter(org.apache.catalina.util.DOMWriter) Node(org.w3c.dom.Node) Element(org.w3c.dom.Element) NodeList(org.w3c.dom.NodeList) WebResource(org.apache.catalina.WebResource) IOException(java.io.IOException) Document(org.w3c.dom.Document) XMLWriter(org.apache.catalina.util.XMLWriter) SAXException(org.xml.sax.SAXException) StringWriter(java.io.StringWriter) DocumentBuilder(javax.xml.parsers.DocumentBuilder) Vector(java.util.Vector) XMLWriter(org.apache.catalina.util.XMLWriter) DOMWriter(org.apache.catalina.util.DOMWriter) StringWriter(java.io.StringWriter) Writer(java.io.Writer)

Aggregations

Writer (java.io.Writer)1259 OutputStreamWriter (java.io.OutputStreamWriter)512 IOException (java.io.IOException)414 StringWriter (java.io.StringWriter)300 File (java.io.File)269 FileOutputStream (java.io.FileOutputStream)196 BufferedWriter (java.io.BufferedWriter)178 FileWriter (java.io.FileWriter)174 PrintWriter (java.io.PrintWriter)159 OutputStream (java.io.OutputStream)120 Test (org.junit.Test)109 InputStreamReader (java.io.InputStreamReader)71 ByteArrayOutputStream (java.io.ByteArrayOutputStream)64 BufferedReader (java.io.BufferedReader)62 Reader (java.io.Reader)62 HashMap (java.util.HashMap)59 Map (java.util.Map)59 ArrayList (java.util.ArrayList)58 InputStream (java.io.InputStream)54 Properties (java.util.Properties)39