不能使用kerberos票据用java代码创建hadoop文件



我们的hadoop集群使用kerberos,所以我们需要先使用kinit,然后使用像"hadoop fs -ls/"这样的命令。现在我使用jaas和gssapi在集群中登录和创建文件,但是失败了。下面是我的代码:

import java.security.PrivilegedAction;
import javax.security.auth.Subject;
import javax.security.auth.login.LoginContext;
import javax.security.auth.login.LoginException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;  
import org.ietf.jgss.*;
public static void main(String[] args) throws LoginException
{
    System.setProperty("sun.security.krb5.debug", "false");
    System.setProperty("java.security.krb5.realm", "H236");
    System.setProperty("java.security.krb5.kdc", "172.16.0.236");
    System.setProperty( "javax.security.auth.useSubjectCredsOnly", "false");
    System.setProperty("java.security.auth.login.config",
            "/etc/hadoop/conf/jaas.conf");
           LoginContext lc = null;
           lc = new LoginContext("Client");
           lc.login();
           System.out.println("Authentication succeeded!");
       Subject subject = lc.getSubject();
       Subject.doAs( subject, new PrivilegedAction<byte[]>() 
           {
        public byte[] run() 
        {
            Configuration conf = new Configuration();
            try
            {
                Oid krb5Mechanism = new Oid("1.2.840.113554.1.2.2");
                GSSManager manager = GSSManager.getInstance();
                GSSName clientName = manager.createName("hdfs/172.16.0.239@H236", 
                        GSSName.NT_USER_NAME);
                GSSCredential clientCreds = manager.createCredential(clientName,
                        GSSCredential.DEFAULT_LIFETIME,
                        krb5Mechanism,
                        GSSCredential.INITIATE_ONLY);
                GSSName serverName = manager.createName("hdfs@172.16.0.239", 
                        GSSName.NT_HOSTBASED_SERVICE);
                GSSContext context = manager.createContext(serverName,
                        krb5Mechanism,
                                            clientCreds,
                                            GSSContext.DEFAULT_LIFETIME);
                context.requestMutualAuth(true);
                context.requestConf(false);
                context.requestInteg(true);
                System.out.println(clientCreds.getName().toString());
                System.out.println(clientCreds.getRemainingLifetime());                     
                    byte[] outToken = context.initSecContext(new byte[0], 0, 0);
                //create file on hadoop cluster
                FileSystem fs = FileSystem.get(conf);
                Path f = new Path("hdfs:///hdfs/123");
                FSDataOutputStream s = fs.create(f, true);
                System.out.println("donen");                   
                                    int i = 0;
                for (i = 0; i < 100; ++i)
                    s.writeChars("test");
                s.close();
            }catch (Exception e)
                       {
                                    e.printStackTrace();
                       }
                            return null;
                  }//endof run
        });
   }//endof main

jaas.conf如下所示:

Client {
          com.sun.security.auth.module.Krb5LoginModule required
          debug=true
          storeKey=true
          doNotPrompt=true
          useKeyTab=true
          keyTab="/etc/hadoop/conf/hdfs.keytab"
          principal="hdfs/172.16.0.239@H236";
         };

我的登录用户名是root,在使用"hadoop jar ./client.jar"运行这段代码之前,我运行kdestory来删除kerberos缓存,然后我得到下面的错误:

Authentication succeeded!
ERROR security.UserGroupInformation: PriviledgedActionException as:root (auth:KERBEROS) cause:javax.sec
urity.sasl.SaslException: GSS initiate failed [Caused by GSSException: No valid credentials provided      
(Mechanism level: Attempt to obtain new INITIATE credentials failed! (null))]
ipc.Client: Exception encountered while connecting to the server : javax.security.sasl.SaslExcepti
on: GSS initiate failed [Caused by GSSException: No valid credentials provided (Mechanism level: Attempt to    
obtain new INITIATE credentials failed! (null))]
ERROR security.UserGroupInformation: PriviledgedActionException as:root (auth:KERBEROS) cause:java.io.I
OException: javax.security.sasl.SaslException: GSS initiate failed [Caused by GSSException: No valid   
credentials provided (Mechanism level: Attempt to obtain new INITIATE credentials failed! (null))]
WARN retry.RetryInvocationHandler: Exception while invoking class org.apache.hadoop.hdfs.protocolPB.Cli
entNamenodeProtocolTranslatorPB.create. Not retrying because the invoked method is not idempotent,   
and unable to determine whether it was invoked
java.io.IOException: Failed on local exception: java.io.IOException: javax.security.sasl.SaslException: 
GSSinitiate failed [Caused by GSSException: No valid credentials provided (Mechanism level: Attempt to 
obtain new INITIATE credentials failed! (null))]; Host Details : local host is: "XP236/172.16.0.236"; destination 
host is: "172.16.0.236":8020;at org.apache.hadoop.net.NetUtils.wrapException(NetUtils.java:760)

我不知道如何使它工作,谁能帮助我,非常感谢。

请使用以下代码访问安全hadoop通过代理用户代码访问,也可以在core -site .xml中配置。类似于ooozie访问

import java.security.PrivilegedExceptionAction;
import java.text.SimpleDateFormat;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.security.UserGroupInformation;
public class HdfsTest {
public static void main(String args[]) {
    final Configuration conf = new Configuration();
    try {
        conf.set("fs.defaultFS",
                "hdfs://ibm-r1-node7.ibmbi-nextgen.com:8020");
        UserGroupInformation.setConfiguration(conf);
        UserGroupInformation ugi = UserGroupInformation.createProxyUser(
                args[0],   UserGroupInformation.getUGIFromTicketCache(
                        "/tmp/krb5cc_0", args[1]));
        System.out.println("--------------status---:"
                + UserGroupInformation.isLoginKeytabBased());
        System.out.println("---------AFTER LOGIN-----:");
        ugi.doAs(new PrivilegedExceptionAction<Void>() {
            public Void run() throws Exception {
                FileSystem fs = FileSystem.get(conf);
                Path path = new Path("hdfs://10.132.100.224:8020/tmp/root");
                FileStatus[] statusArray = fs.listStatus(path);
                System.out.println("------------------------------"
                        + fs.listStatus(path));
                int count = 0;
                SimpleDateFormat sdf = null;
                for (FileStatus status : statusArray) {
                    Long blockSize = status.getBlockSize();
                    String permission = status.getPermission() + "";
                    int replication = status.getReplication();
                    String owner = status.getOwner();
                    String paths = status.getPath() + "";
                    boolean file = status.isFile();
                    Long length = status.getLen();
                    String group = status.getGroup();
                    System.out.println("BlockSize   :" + blockSize);
                    System.out.println("Group   :" + group);
                    System.out.println("Length  :" + length);
                    System.out.println("Owner   :" + owner);
                    System.out.println("Replication :" + replication);
                    System.out.println("File     :" + file);
                    System.out.println("Permission  :" + permission);
                    System.out.println("Path    :" + paths);
                    count++;
                    System.out
                            .println("-------------without auth-----count---------------"
                                    + count);
                }
                return null;
            }
        });
    } catch (Exception e) {
        System.out.println("--------EXCEPTION________________");
        e.printStackTrace();
    }
}

}