Search in sources :

Example 16 with Writer

use of java.io.Writer in project groovy by apache.

the class TemplateServlet method service.

/**
     * Services the request with a response.
     * <p>
     * First the request is parsed for the source file uri. If the specified file
     * could not be found or can not be read an error message is sent as response.
     *
     * @param request  The http request.
     * @param response The http response.
     * @throws IOException      if an input or output error occurs while the servlet is handling the HTTP request
     * @throws ServletException if the HTTP request cannot be handled
     */
public void service(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
    if (verbose) {
        log("Creating/getting cached template...");
    }
    //
    // Get the template source file handle.
    //
    Template template;
    long getMillis;
    String name;
    File file = getScriptUriAsFile(request);
    if (file != null) {
        name = file.getName();
        if (!file.exists()) {
            response.sendError(HttpServletResponse.SC_NOT_FOUND);
            // throw new IOException(file.getAbsolutePath());
            return;
        }
        if (!file.canRead()) {
            response.sendError(HttpServletResponse.SC_FORBIDDEN, "Can not read \"" + name + "\"!");
            // throw new IOException(file.getAbsolutePath());
            return;
        }
        getMillis = System.currentTimeMillis();
        template = getTemplate(file);
        getMillis = System.currentTimeMillis() - getMillis;
    } else {
        name = getScriptUri(request);
        URL url = servletContext.getResource(name);
        getMillis = System.currentTimeMillis();
        template = getTemplate(url);
        getMillis = System.currentTimeMillis() - getMillis;
    }
    //
    // Create new binding for the current request.
    //
    ServletBinding binding = new ServletBinding(request, response, servletContext);
    setVariables(binding);
    //
    // Prepare the response buffer content type _before_ getting the writer.
    // and set status code to ok
    //
    response.setContentType(CONTENT_TYPE_TEXT_HTML + "; charset=" + encoding);
    response.setStatus(HttpServletResponse.SC_OK);
    //
    // Get the output stream writer from the binding.
    //
    Writer out = (Writer) binding.getVariable("out");
    if (out == null) {
        out = response.getWriter();
    }
    //
    if (verbose) {
        log("Making template \"" + name + "\"...");
    }
    // String made = template.make(binding.getVariables()).toString();
    // log(" = " + made);
    long makeMillis = System.currentTimeMillis();
    template.make(binding.getVariables()).writeTo(out);
    makeMillis = System.currentTimeMillis() - makeMillis;
    if (generateBy) {
        StringBuilder sb = new StringBuilder(100);
        sb.append("\n<!-- Generated by Groovy TemplateServlet [create/get=");
        sb.append(Long.toString(getMillis));
        sb.append(" ms, make=");
        sb.append(Long.toString(makeMillis));
        sb.append(" ms] -->\n");
        out.write(sb.toString());
    }
    //
    // flush the response buffer.
    //
    response.flushBuffer();
    if (verbose) {
        log("Template \"" + name + "\" request responded. [create/get=" + getMillis + " ms, make=" + makeMillis + " ms]");
    }
}
Also used : File(java.io.File) URL(java.net.URL) Writer(java.io.Writer) Template(groovy.text.Template)

Example 17 with Writer

use of java.io.Writer in project groovy by apache.

the class GroovyScriptEngineImpl method eval.

// package-privates
Object eval(Class<?> scriptClass, final ScriptContext ctx) throws ScriptException {
    /*
         * We use the following Binding instance so that global variable lookup
         * will be done in the current ScriptContext instance.
         */
    Binding binding = new Binding(ctx.getBindings(ScriptContext.ENGINE_SCOPE)) {

        @Override
        public Object getVariable(String name) {
            synchronized (ctx) {
                int scope = ctx.getAttributesScope(name);
                if (scope != -1) {
                    return ctx.getAttribute(name, scope);
                }
                // Redirect script output to context writer, if out var is not already provided
                if ("out".equals(name)) {
                    Writer writer = ctx.getWriter();
                    if (writer != null) {
                        return (writer instanceof PrintWriter) ? (PrintWriter) writer : new PrintWriter(writer, true);
                    }
                }
                // Provide access to engine context, if context var is not already provided
                if ("context".equals(name)) {
                    return ctx;
                }
            }
            throw new MissingPropertyException(name, getClass());
        }

        @Override
        public void setVariable(String name, Object value) {
            synchronized (ctx) {
                int scope = ctx.getAttributesScope(name);
                if (scope == -1) {
                    scope = ScriptContext.ENGINE_SCOPE;
                }
                ctx.setAttribute(name, value, scope);
            }
        }
    };
    try {
        // then simply return that class
        if (!Script.class.isAssignableFrom(scriptClass)) {
            return scriptClass;
        } else {
            // it's a script
            Script scriptObject = InvokerHelper.createScript(scriptClass, binding);
            // save all current closures into global closures map
            Method[] methods = scriptClass.getMethods();
            for (Method m : methods) {
                String name = m.getName();
                globalClosures.put(name, new MethodClosure(scriptObject, name));
            }
            MetaClass oldMetaClass = scriptObject.getMetaClass();
            /*
                * We override the MetaClass of this script object so that we can
                * forward calls to global closures (of previous or future "eval" calls)
                * This gives the illusion of working on the same "global" scope.
                */
            scriptObject.setMetaClass(new DelegatingMetaClass(oldMetaClass) {

                @Override
                public Object invokeMethod(Object object, String name, Object args) {
                    if (args == null) {
                        return invokeMethod(object, name, MetaClassHelper.EMPTY_ARRAY);
                    }
                    if (args instanceof Tuple) {
                        return invokeMethod(object, name, ((Tuple) args).toArray());
                    }
                    if (args instanceof Object[]) {
                        return invokeMethod(object, name, (Object[]) args);
                    } else {
                        return invokeMethod(object, name, new Object[] { args });
                    }
                }

                @Override
                public Object invokeMethod(Object object, String name, Object[] args) {
                    try {
                        return super.invokeMethod(object, name, args);
                    } catch (MissingMethodException mme) {
                        return callGlobal(name, args, ctx);
                    }
                }

                @Override
                public Object invokeStaticMethod(Object object, String name, Object[] args) {
                    try {
                        return super.invokeStaticMethod(object, name, args);
                    } catch (MissingMethodException mme) {
                        return callGlobal(name, args, ctx);
                    }
                }
            });
            return scriptObject.run();
        }
    } catch (Exception e) {
        throw new ScriptException(e);
    }
}
Also used : Binding(groovy.lang.Binding) Script(groovy.lang.Script) CompiledScript(javax.script.CompiledScript) MissingPropertyException(groovy.lang.MissingPropertyException) Method(java.lang.reflect.Method) MethodClosure(org.codehaus.groovy.runtime.MethodClosure) MissingPropertyException(groovy.lang.MissingPropertyException) ScriptException(javax.script.ScriptException) MissingMethodException(groovy.lang.MissingMethodException) IOException(java.io.IOException) CompilationFailedException(org.codehaus.groovy.control.CompilationFailedException) ScriptException(javax.script.ScriptException) MissingMethodException(groovy.lang.MissingMethodException) MetaClass(groovy.lang.MetaClass) DelegatingMetaClass(groovy.lang.DelegatingMetaClass) DelegatingMetaClass(groovy.lang.DelegatingMetaClass) PrintWriter(java.io.PrintWriter) Writer(java.io.Writer) Tuple(groovy.lang.Tuple) PrintWriter(java.io.PrintWriter)

Example 18 with Writer

use of java.io.Writer in project hadoop by apache.

the class TestHttpFSServerNoACLs method createHttpFSServer.

/**
   * Create an HttpFS Server to talk to the MiniDFSCluster we created.
   * @throws Exception
   */
private void createHttpFSServer() throws Exception {
    File homeDir = TestDirHelper.getTestDir();
    Assert.assertTrue(new File(homeDir, "conf").mkdir());
    Assert.assertTrue(new File(homeDir, "log").mkdir());
    Assert.assertTrue(new File(homeDir, "temp").mkdir());
    HttpFSServerWebApp.setHomeDirForCurrentThread(homeDir.getAbsolutePath());
    File secretFile = new File(new File(homeDir, "conf"), "secret");
    Writer w = new FileWriter(secretFile);
    w.write("secret");
    w.close();
    // HDFS configuration
    File hadoopConfDir = new File(new File(homeDir, "conf"), "hadoop-conf");
    if (!hadoopConfDir.mkdirs()) {
        throw new IOException();
    }
    String fsDefaultName = nnConf.get(CommonConfigurationKeysPublic.FS_DEFAULT_NAME_KEY);
    Configuration conf = new Configuration(false);
    conf.set(CommonConfigurationKeysPublic.FS_DEFAULT_NAME_KEY, fsDefaultName);
    // Explicitly turn off ACLs, just in case the default becomes true later
    conf.setBoolean(DFSConfigKeys.DFS_NAMENODE_ACLS_ENABLED_KEY, false);
    File hdfsSite = new File(hadoopConfDir, "hdfs-site.xml");
    OutputStream os = new FileOutputStream(hdfsSite);
    conf.writeXml(os);
    os.close();
    // HTTPFS configuration
    conf = new Configuration(false);
    conf.set("httpfs.hadoop.config.dir", hadoopConfDir.toString());
    conf.set("httpfs.proxyuser." + HadoopUsersConfTestHelper.getHadoopProxyUser() + ".groups", HadoopUsersConfTestHelper.getHadoopProxyUserGroups());
    conf.set("httpfs.proxyuser." + HadoopUsersConfTestHelper.getHadoopProxyUser() + ".hosts", HadoopUsersConfTestHelper.getHadoopProxyUserHosts());
    conf.set("httpfs.authentication.signature.secret.file", secretFile.getAbsolutePath());
    File httpfsSite = new File(new File(homeDir, "conf"), "httpfs-site.xml");
    os = new FileOutputStream(httpfsSite);
    conf.writeXml(os);
    os.close();
    ClassLoader cl = Thread.currentThread().getContextClassLoader();
    URL url = cl.getResource("webapp");
    if (url == null) {
        throw new IOException();
    }
    WebAppContext context = new WebAppContext(url.getPath(), "/webhdfs");
    Server server = TestJettyHelper.getJettyServer();
    server.setHandler(context);
    server.start();
}
Also used : WebAppContext(org.eclipse.jetty.webapp.WebAppContext) Configuration(org.apache.hadoop.conf.Configuration) Server(org.eclipse.jetty.server.Server) FileWriter(java.io.FileWriter) OutputStream(java.io.OutputStream) FileOutputStream(java.io.FileOutputStream) FileOutputStream(java.io.FileOutputStream) IOException(java.io.IOException) File(java.io.File) FileWriter(java.io.FileWriter) Writer(java.io.Writer) URL(java.net.URL)

Example 19 with Writer

use of java.io.Writer in project hadoop by apache.

the class TestHttpFSServerNoXAttrs method createHttpFSServer.

/**
   * Create an HttpFS Server to talk to the MiniDFSCluster we created.
   * @throws Exception
   */
private void createHttpFSServer() throws Exception {
    File homeDir = TestDirHelper.getTestDir();
    Assert.assertTrue(new File(homeDir, "conf").mkdir());
    Assert.assertTrue(new File(homeDir, "log").mkdir());
    Assert.assertTrue(new File(homeDir, "temp").mkdir());
    HttpFSServerWebApp.setHomeDirForCurrentThread(homeDir.getAbsolutePath());
    File secretFile = new File(new File(homeDir, "conf"), "secret");
    Writer w = new FileWriter(secretFile);
    w.write("secret");
    w.close();
    // HDFS configuration
    File hadoopConfDir = new File(new File(homeDir, "conf"), "hadoop-conf");
    if (!hadoopConfDir.mkdirs()) {
        throw new IOException();
    }
    String fsDefaultName = nnConf.get(CommonConfigurationKeysPublic.FS_DEFAULT_NAME_KEY);
    Configuration conf = new Configuration(false);
    conf.set(CommonConfigurationKeysPublic.FS_DEFAULT_NAME_KEY, fsDefaultName);
    // Explicitly turn off XAttr support
    conf.setBoolean(DFSConfigKeys.DFS_NAMENODE_XATTRS_ENABLED_KEY, false);
    File hdfsSite = new File(hadoopConfDir, "hdfs-site.xml");
    OutputStream os = new FileOutputStream(hdfsSite);
    conf.writeXml(os);
    os.close();
    // HTTPFS configuration
    conf = new Configuration(false);
    conf.set("httpfs.hadoop.config.dir", hadoopConfDir.toString());
    conf.set("httpfs.proxyuser." + HadoopUsersConfTestHelper.getHadoopProxyUser() + ".groups", HadoopUsersConfTestHelper.getHadoopProxyUserGroups());
    conf.set("httpfs.proxyuser." + HadoopUsersConfTestHelper.getHadoopProxyUser() + ".hosts", HadoopUsersConfTestHelper.getHadoopProxyUserHosts());
    conf.set("httpfs.authentication.signature.secret.file", secretFile.getAbsolutePath());
    File httpfsSite = new File(new File(homeDir, "conf"), "httpfs-site.xml");
    os = new FileOutputStream(httpfsSite);
    conf.writeXml(os);
    os.close();
    ClassLoader cl = Thread.currentThread().getContextClassLoader();
    URL url = cl.getResource("webapp");
    if (url == null) {
        throw new IOException();
    }
    WebAppContext context = new WebAppContext(url.getPath(), "/webhdfs");
    Server server = TestJettyHelper.getJettyServer();
    server.setHandler(context);
    server.start();
}
Also used : WebAppContext(org.eclipse.jetty.webapp.WebAppContext) Configuration(org.apache.hadoop.conf.Configuration) Server(org.eclipse.jetty.server.Server) FileWriter(java.io.FileWriter) OutputStream(java.io.OutputStream) FileOutputStream(java.io.FileOutputStream) FileOutputStream(java.io.FileOutputStream) IOException(java.io.IOException) File(java.io.File) FileWriter(java.io.FileWriter) Writer(java.io.Writer) URL(java.net.URL)

Example 20 with Writer

use of java.io.Writer in project hadoop by apache.

the class TestSecureEncryptionZoneWithKMS method init.

@BeforeClass
public static void init() throws Exception {
    baseDir = getTestDir();
    FileUtil.fullyDelete(baseDir);
    assertTrue(baseDir.mkdirs());
    Properties kdcConf = MiniKdc.createConf();
    kdc = new MiniKdc(kdcConf, baseDir);
    kdc.start();
    baseConf = new HdfsConfiguration();
    SecurityUtil.setAuthenticationMethod(AuthenticationMethod.KERBEROS, baseConf);
    UserGroupInformation.setConfiguration(baseConf);
    assertTrue("Expected configuration to enable security", UserGroupInformation.isSecurityEnabled());
    File keytabFile = new File(baseDir, "test.keytab");
    keytab = keytabFile.getAbsolutePath();
    // Windows will not reverse name lookup "127.0.0.1" to "localhost".
    String krbInstance = Path.WINDOWS ? "127.0.0.1" : "localhost";
    kdc.createPrincipal(keytabFile, HDFS_USER_NAME + "/" + krbInstance, SPNEGO_USER_NAME + "/" + krbInstance, OOZIE_USER_NAME + "/" + krbInstance, OOZIE_PROXIED_USER_NAME + "/" + krbInstance);
    hdfsPrincipal = HDFS_USER_NAME + "/" + krbInstance + "@" + kdc.getRealm();
    spnegoPrincipal = SPNEGO_USER_NAME + "/" + krbInstance + "@" + kdc.getRealm();
    ooziePrincipal = OOZIE_USER_NAME + "/" + krbInstance + "@" + kdc.getRealm();
    // Allow oozie to proxy user
    baseConf.set("hadoop.proxyuser.oozie.hosts", "*");
    baseConf.set("hadoop.proxyuser.oozie.groups", "*");
    baseConf.set("hadoop.user.group.static.mapping.overrides", OOZIE_PROXIED_USER_NAME + "=oozie");
    baseConf.set(DFS_NAMENODE_KERBEROS_PRINCIPAL_KEY, hdfsPrincipal);
    baseConf.set(DFS_NAMENODE_KEYTAB_FILE_KEY, keytab);
    baseConf.set(DFS_DATANODE_KERBEROS_PRINCIPAL_KEY, hdfsPrincipal);
    baseConf.set(DFS_DATANODE_KEYTAB_FILE_KEY, keytab);
    baseConf.set(DFS_WEB_AUTHENTICATION_KERBEROS_PRINCIPAL_KEY, spnegoPrincipal);
    baseConf.setBoolean(DFS_BLOCK_ACCESS_TOKEN_ENABLE_KEY, true);
    baseConf.set(DFS_DATA_TRANSFER_PROTECTION_KEY, "authentication");
    baseConf.set(DFS_HTTP_POLICY_KEY, HttpConfig.Policy.HTTPS_ONLY.name());
    baseConf.set(DFS_NAMENODE_HTTPS_ADDRESS_KEY, "localhost:0");
    baseConf.set(DFS_DATANODE_HTTPS_ADDRESS_KEY, "localhost:0");
    baseConf.set(DFS_JOURNALNODE_HTTPS_ADDRESS_KEY, "localhost:0");
    baseConf.setInt(IPC_CLIENT_CONNECT_MAX_RETRIES_ON_SASL_KEY, 10);
    // Set a small (2=4*0.5) KMSClient EDEK cache size to trigger
    // on demand refill upon the 3rd file creation
    baseConf.set(KMS_CLIENT_ENC_KEY_CACHE_SIZE, "4");
    baseConf.set(KMS_CLIENT_ENC_KEY_CACHE_LOW_WATERMARK, "0.5");
    keystoresDir = baseDir.getAbsolutePath();
    sslConfDir = KeyStoreTestUtil.getClasspathDir(TestSecureEncryptionZoneWithKMS.class);
    KeyStoreTestUtil.setupSSLConfig(keystoresDir, sslConfDir, baseConf, false);
    baseConf.set(DFS_CLIENT_HTTPS_KEYSTORE_RESOURCE_KEY, KeyStoreTestUtil.getClientSSLConfigFileName());
    baseConf.set(DFS_SERVER_HTTPS_KEYSTORE_RESOURCE_KEY, KeyStoreTestUtil.getServerSSLConfigFileName());
    File kmsFile = new File(baseDir, "kms-site.xml");
    if (kmsFile.exists()) {
        FileUtil.fullyDelete(kmsFile);
    }
    Configuration kmsConf = new Configuration(true);
    kmsConf.set(KMSConfiguration.KEY_PROVIDER_URI, "jceks://file@" + new Path(baseDir.toString(), "kms.keystore").toUri());
    kmsConf.set("hadoop.kms.authentication.type", "kerberos");
    kmsConf.set("hadoop.kms.authentication.kerberos.keytab", keytab);
    kmsConf.set("hadoop.kms.authentication.kerberos.principal", "HTTP/localhost");
    kmsConf.set("hadoop.kms.authentication.kerberos.name.rules", "DEFAULT");
    kmsConf.set("hadoop.kms.acl.GENERATE_EEK", "hdfs");
    Writer writer = new FileWriter(kmsFile);
    kmsConf.writeXml(writer);
    writer.close();
    // Start MiniKMS
    MiniKMS.Builder miniKMSBuilder = new MiniKMS.Builder();
    miniKMS = miniKMSBuilder.setKmsConfDir(baseDir).build();
    miniKMS.start();
}
Also used : Path(org.apache.hadoop.fs.Path) Configuration(org.apache.hadoop.conf.Configuration) KMSConfiguration(org.apache.hadoop.crypto.key.kms.server.KMSConfiguration) FileWriter(java.io.FileWriter) Properties(java.util.Properties) MiniKMS(org.apache.hadoop.crypto.key.kms.server.MiniKMS) MiniKdc(org.apache.hadoop.minikdc.MiniKdc) File(java.io.File) Writer(java.io.Writer) FileWriter(java.io.FileWriter) BeforeClass(org.junit.BeforeClass)

Aggregations

Writer (java.io.Writer)1259 OutputStreamWriter (java.io.OutputStreamWriter)512 IOException (java.io.IOException)414 StringWriter (java.io.StringWriter)300 File (java.io.File)269 FileOutputStream (java.io.FileOutputStream)196 BufferedWriter (java.io.BufferedWriter)178 FileWriter (java.io.FileWriter)174 PrintWriter (java.io.PrintWriter)159 OutputStream (java.io.OutputStream)120 Test (org.junit.Test)109 InputStreamReader (java.io.InputStreamReader)71 ByteArrayOutputStream (java.io.ByteArrayOutputStream)64 BufferedReader (java.io.BufferedReader)62 Reader (java.io.Reader)62 HashMap (java.util.HashMap)59 Map (java.util.Map)59 ArrayList (java.util.ArrayList)58 InputStream (java.io.InputStream)54 Properties (java.util.Properties)39