hdfs,hive,hbase,与kerberos的java操作

import java.io.IOException;
import java.net.URI;
import java.net.URISyntaxException;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.security.UserGroupInformation;

/**
* @author niehw 2017-02-17
*
*/
public class HdfsKerberosDemo {

public static void main(String[] args) throws IOException {
    System.out.println("正在列出文件.......................");
    Configuration conf = new Configuration();
    conf.set("fs.hdfs.impl", org.apache.hadoop.hdfs.DistributedFileSystem.class.getName());
    conf.set("fs.file.impl", org.apache.hadoop.fs.LocalFileSystem.class.getName());

    URI uri=null;
    try {
        uri = new URI("hdfs://nameservice1:8020");
    } catch (URISyntaxException e1) {
        e1.printStackTrace();
    }
     UserGroupInformation.setConfiguration(conf);
        conf.set("hadoop.security.authentication", "kerberos");
        conf.set("java.security.krb5.kdc", "  /var/kerberos/krb5kdc/kdc.conf");
        conf.set("java.security.krb5.realm", "TDH");
        UserGroupInformation.setConfiguration(conf);
        try {
            UserGroupInformation.loginUserFromKeytab("hdfs/tdh01@TDH", "/etc/hdfs1/hdfs.keytab");
        } catch (IOException e) {
            System.out.println("nie__________hw"+e.getMessage());
            e.printStackTrace();
        }
        FileSystem fs = FileSystem.get(uri,conf);  
        Path srcPath = new Path("/wwc");
        boolean isok = fs.mkdirs(srcPath);
        if(isok){
            System.out.println("create dir ok!");
        }else{
            System.out.println("create dir failure");
        }
        fs.close();
}
}

#

import java.io.IOException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.client.Get;
import org.apache.hadoop.hbase.client.HBaseAdmin;
import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.security.UserGroupInformation;
/**
* @author niehw 2017-02-17
*
*/
public class HbaseKerberosDemo {

private static Configuration conf = null;

static {
    Configuration HBASE_CONFIG = new Configuration();
    HBASE_CONFIG.set("hbase.zookeeper.quorum", "192.9.145.231,192.9.145.232,192.9.145.233");
    HBASE_CONFIG.set("hbase.master.kerberos.principal", "hbase/_HOST@TDH");
    HBASE_CONFIG.set("hbase.regionserver.kerberos.principal", "hbase/_HOST@TDH");
    HBASE_CONFIG.set("hbase.security.authentication", "kerberos");
    HBASE_CONFIG.set("hadoop.security.authentication", "kerberos");
    HBASE_CONFIG.set("zookeeper.znode.parent", "/hyperbase1");
    conf = HBaseConfiguration.create(HBASE_CONFIG);
    System.out.println("结束");

}

public static void creatTable(String tableName, String[] familys) throws Exception {
    HBaseAdmin admin = new HBaseAdmin(conf);
    if (admin.tableExists(tableName)) {
        System.out.println("table already exists!");
    } else {
        HTableDescriptor tableDesc = new HTableDescriptor(tableName);
        for (int i = 0; i < familys.length; i++) {
            tableDesc.addFamily(new HColumnDescriptor(familys[i]));
        }
        admin.createTable(tableDesc);
        System.out.println("create table " + tableName + " ok.");
    }
}

public static void deleteTable(String tableName) throws Exception {

    HBaseAdmin admin = new HBaseAdmin(conf);
    admin.disableTable(tableName);
    admin.deleteTable(tableName);
    System.out.println("delete table " + tableName + " ok.");
}

public static void addRecord(String tableName, String rowKey, String family, String qualifier, String value)
        throws Exception {
    try {
        HTable table = new HTable(conf, tableName);
        Put put = new Put(Bytes.toBytes(rowKey));
        put.add(Bytes.toBytes(family), Bytes.toBytes(qualifier), Bytes.toBytes(value));
        table.put(put);
        System.out.println("insert recored " + rowKey + " to table " + tableName + " ok.");
    } catch (IOException e) {
        e.printStackTrace();
    }
}

public static void getOneRecord(String tableName, String rowKey) throws IOException {
    HTable table = new HTable(conf, tableName);
    Get get = new Get(rowKey.getBytes());
    Result rs = table.get(get);
    KeyValue[] raw = rs.raw();
    for (KeyValue kv : raw) {
        System.out.print(new String(kv.getRow()) + " ");
        System.out.print(new String(kv.getFamily()) + ":");
        System.out.print(new String(kv.getQualifier()) + " ");
        System.out.print(kv.getTimestamp() + " ");
        System.out.println(new String(kv.getValue()));
    }
}

public static void main(String[] args) {
    UserGroupInformation.setConfiguration(conf);

    try {
        //UserGroupInformation.loginUserFromKeytab("hbase/tdh01","d://etc_keytab/hbase.keytab");
        UserGroupInformation.loginUserFromKeytab("hbase/tdh01", "/etc/hyperbase1/hbase.keytab");

    } catch (IOException e) {
        // TODO Auto-generated catch block
        e.printStackTrace();
    }
    String tablename = "studentnie";
    String[] familys = { "cf" };
    String num = args[0].trim();
    num="4";
    System.out.println("test---------------------test--------------test" + num);
    if (num.equals("1")) {
        System.out.println("enter creatTable");
        try {
            HbaseKerberosDemo.creatTable(tablename, familys);

        } catch (Exception e) {
            // TODO Auto-generated catch block
            e.printStackTrace();
        }
    }

    if (num.equals("2")) {
        System.out.println("enter deleteTable");
        try {
            HbaseKerberosDemo.deleteTable(tablename);
        } catch (Exception e) {
            // TODO Auto-generated catch block
            e.printStackTrace();
        }
    }

    if (num.equals("3")) {
        System.out.println("enter addRecord");
        try {
            HbaseKerberosDemo.addRecord("studentnie", "1_1", "cf", "name", "niehw");
        } catch (Exception e) {
            // TODO Auto-generated catch block
            e.printStackTrace();
        }
    }

    if (num.equals("4")) {
        System.out.println("enter getOneRecord");
        try {
            HbaseKerberosDemo.getOneRecord(tablename, "1_1");
        } catch (IOException e) {
            // TODO Auto-generated catch block
            e.printStackTrace();
        }
    }

}
}

#

import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;

import org.apache.hadoop.security.UserGroupInformation;

/**
* @author niehw 2017-02-17
*
*/
public class HiveKerberosDemo {

private static String driverName = "org.apache.hive.jdbc.HiveDriver";

public static void main(String[] args) throws SQLException {
    try {
        org.apache.hadoop.conf.Configuration conf = new org.apache.hadoop.conf.Configuration();
        conf.set("hadoop.security.authentication", "kerberos");
        conf.set("java.security.krb5.kdc", "  /var/kerberos/krb5kdc/kdc.conf");
        conf.set("java.security.krb5.realm", "TDH");
        UserGroupInformation.setConfiguration(conf);
        UserGroupInformation.loginUserFromKeytab("hive/tdh01@TDH", "/etc/inceptorsql1/hive.keytab");
        Class.forName(driverName);
    } catch (Exception e) {
        e.printStackTrace();
        System.exit(1);
    }
    String jdbcURL = "jdbc:hive2://tdh01:10000/default;principal=hive/tdh01@TDH";
    Connection conn = DriverManager.getConnection(jdbcURL);
    Statement stmt = conn.createStatement();
    ResultSet rs = stmt.executeQuery("select * from tags");
    while (rs.next()) {
        System.out.println(rs.getString(1));
        System.out.println(rs.getString(2));
    }
    rs.close();
    stmt.close();
    conn.close();
}

 本文转载:https://www.2cto.com/net/201702/601124.html

相关推荐