2013-05-10 116 views
2

我們的hadoop集羣使用kerberos,所以我們需要首先使用kinit,然後使用類似「hadoop fs -ls /」的命令。 現在我用JAAS和GSSAPI登錄和創建羣集文件,但failed.here是我的代碼:無法使用kerberos票據創建帶有java代碼的hadoop文件

import java.security.PrivilegedAction; 
import javax.security.auth.Subject; 
import javax.security.auth.login.LoginContext; 
import javax.security.auth.login.LoginException; 
import org.apache.hadoop.conf.Configuration; 
import org.apache.hadoop.fs.FSDataOutputStream; 
import org.apache.hadoop.fs.FileSystem; 
import org.apache.hadoop.fs.Path; 
import org.ietf.jgss.*; 

public static void main(String[] args) throws LoginException 
{ 
    System.setProperty("sun.security.krb5.debug", "false"); 
    System.setProperty("java.security.krb5.realm", "H236"); 
    System.setProperty("java.security.krb5.kdc", "172.16.0.236"); 
    System.setProperty("javax.security.auth.useSubjectCredsOnly", "false"); 
    System.setProperty("java.security.auth.login.config", 
      "/etc/hadoop/conf/jaas.conf"); 
      LoginContext lc = null; 
      lc = new LoginContext("Client"); 
      lc.login(); 
      System.out.println("Authentication succeeded!"); 

     Subject subject = lc.getSubject(); 
     Subject.doAs(subject, new PrivilegedAction<byte[]>() 
      { 
     public byte[] run() 
     { 
      Configuration conf = new Configuration(); 

      try 
      { 
       Oid krb5Mechanism = new Oid("1.2.840.113554.1.2.2"); 
       GSSManager manager = GSSManager.getInstance(); 
       GSSName clientName = manager.createName("hdfs/[email protected]", 
         GSSName.NT_USER_NAME); 
       GSSCredential clientCreds = manager.createCredential(clientName, 
         GSSCredential.DEFAULT_LIFETIME, 
         krb5Mechanism, 
         GSSCredential.INITIATE_ONLY); 
       GSSName serverName = manager.createName("[email protected]", 
         GSSName.NT_HOSTBASED_SERVICE); 
       GSSContext context = manager.createContext(serverName, 
         krb5Mechanism, 
              clientCreds, 
              GSSContext.DEFAULT_LIFETIME); 
       context.requestMutualAuth(true); 
       context.requestConf(false); 
       context.requestInteg(true); 
       System.out.println(clientCreds.getName().toString()); 
       System.out.println(clientCreds.getRemainingLifetime());      

        byte[] outToken = context.initSecContext(new byte[0], 0, 0); 
       //create file on hadoop cluster 
       FileSystem fs = FileSystem.get(conf); 
       Path f = new Path("hdfs:///hdfs/123"); 
       FSDataOutputStream s = fs.create(f, true); 
       System.out.println("done\n");     
            int i = 0; 
       for (i = 0; i < 100; ++i) 
        s.writeChars("test"); 
       s.close(); 

      }catch (Exception e) 
         { 
            e.printStackTrace(); 
         } 
          return null; 
        }//endof run 
     }); 
    }//endof main 

的Jaas.conf象下面這樣:

Client { 
      com.sun.security.auth.module.Krb5LoginModule required 
      debug=true 
      storeKey=true 
      doNotPrompt=true 
      useKeyTab=true 
      keyTab="/etc/hadoop/conf/hdfs.keytab" 
      principal="hdfs/[email protected]"; 
     }; 

我的登錄用戶名爲root,前使用「Hadoop的罐子./client.jar」運行這段代碼,我跑kdestory刪除Kerberos緩存,然後我得到的錯誤如下:

Authentication succeeded! 
ERROR security.UserGroupInformation: PriviledgedActionException as:root (auth:KERBEROS) cause:javax.sec 
urity.sasl.SaslException: GSS initiate failed [Caused by GSSException: No valid credentials provided  
(Mechanism level: Attempt to obtain new INITIATE credentials failed! (null))] 
ipc.Client: Exception encountered while connecting to the server : javax.security.sasl.SaslExcepti 
on: GSS initiate failed [Caused by GSSException: No valid credentials provided (Mechanism level: Attempt to  
obtain new INITIATE credentials failed! (null))] 
ERROR security.UserGroupInformation: PriviledgedActionException as:root (auth:KERBEROS) cause:java.io.I 
OException: javax.security.sasl.SaslException: GSS initiate failed [Caused by GSSException: No valid 
credentials provided (Mechanism level: Attempt to obtain new INITIATE credentials failed! (null))] 
WARN retry.RetryInvocationHandler: Exception while invoking class org.apache.hadoop.hdfs.protocolPB.Cli 
entNamenodeProtocolTranslatorPB.create. Not retrying because the invoked method is not idempotent, 
and unable to determine whether it was invoked 
java.io.IOException: Failed on local exception: java.io.IOException: javax.security.sasl.SaslException: 
GSSinitiate failed [Caused by GSSException: No valid credentials provided (Mechanism level: Attempt to 
obtain new INITIATE credentials failed! (null))]; Host Details : local host is: "XP236/172.16.0.236"; destination 
host is: "172.16.0.236":8020;at org.apache.hadoop.net.NetUtils.wrapException(NetUtils.java:760) 

我不知道如何使它工作,任何人可以幫助我,謝謝 很多。

+0

的Kerberos不配置此不適用於IP,不會。首先使用主機名或忘記它。 – 2013-05-25 19:01:55

回答

1

請用下面的代碼訪問安全的Hadoop 通過代理用戶代碼訪問,在覈心上門的.xml也... ooozie類似於如何訪問

import java.security.PrivilegedExceptionAction; 
import java.text.SimpleDateFormat; 
import org.apache.hadoop.conf.Configuration; 
import org.apache.hadoop.fs.FileStatus; 
import org.apache.hadoop.fs.FileSystem; 
import org.apache.hadoop.fs.Path; 
import org.apache.hadoop.security.UserGroupInformation; 

public class HdfsTest { 

public static void main(String args[]) { 
    final Configuration conf = new Configuration(); 
    try { 

     conf.set("fs.defaultFS", 
       "hdfs://ibm-r1-node7.ibmbi-nextgen.com:8020"); 
     UserGroupInformation.setConfiguration(conf); 

     UserGroupInformation ugi = UserGroupInformation.createProxyUser(
       args[0], UserGroupInformation.getUGIFromTicketCache(
         "/tmp/krb5cc_0", args[1])); 

     System.out.println("--------------status---:" 
       + UserGroupInformation.isLoginKeytabBased()); 

     System.out.println("---------AFTER LOGIN-----:"); 

     ugi.doAs(new PrivilegedExceptionAction<Void>() { 
      public Void run() throws Exception { 

       FileSystem fs = FileSystem.get(conf); 
       Path path = new Path("hdfs://10.132.100.224:8020/tmp/root"); 

       FileStatus[] statusArray = fs.listStatus(path); 
       System.out.println("------------------------------" 
         + fs.listStatus(path)); 
       int count = 0; 

       SimpleDateFormat sdf = null; 
       for (FileStatus status : statusArray) { 

        Long blockSize = status.getBlockSize(); 

        String permission = status.getPermission() + ""; 
        int replication = status.getReplication(); 
        String owner = status.getOwner(); 
        String paths = status.getPath() + ""; 
        boolean file = status.isFile(); 
        Long length = status.getLen(); 
        String group = status.getGroup(); 
        System.out.println("BlockSize :" + blockSize); 
        System.out.println("Group :" + group); 
        System.out.println("Length :" + length); 
        System.out.println("Owner :" + owner); 
        System.out.println("Replication :" + replication); 
        System.out.println("File  :" + file); 
        System.out.println("Permission :" + permission); 
        System.out.println("Path :" + paths); 
        count++; 
        System.out 
          .println("-------------without auth-----count---------------" 
            + count); 
       } 

       return null; 
      } 
     }); 

    } catch (Exception e) { 
     System.out.println("--------EXCEPTION________________"); 
     e.printStackTrace(); 

    } 
} 

}

相關問題