Steps and Sample to connect Kerberosed Hive form Local Windows desktop/laptop

Follow following steps to connect Kerberosed Hive form Local Windows desktop/laptop:

  1. Install MIT Kerberos Distribution software from “http://web.mit.edu/kerberos/dist/”
  2. Configure  winutils.exe for Hadoop connection from “https://github.com/steveloughran/winutils/tree/master/hadoop-2.7.1/bin”
  3. Set up the Kerberos configuration file in the default location.
    • Obtain a krb5.conf configuration file from your Kerberos cluster default location in Hadoop cluster (/etc/krb5.conf).
    • The default location is C:\ProgramData\MIT\Kerberos5. This directory may be hidden by the Windows operating system. So enable hidden file viewing..
    • Rename the configuration file from krb5.conf to krb5.ini.
    • Copy krb5.ini to the default location and overwrite the empty sample file.
  4. Set up the Kerberos credential cache file.
    • Create a writable directory. For example, C:\temp
    • Create environment variable KRB5CCNAME and Enter variable value: <writable directory from step 1>\krb5cache. For example, C:\temp\krb5cache
  5. Copy winutils to local env as bin folder containing winutils.exe sample: C:\winutils\bin\winutils.exe
  6. Gather and set below information in the sample or wm configuration  files:

userPrincipalName = “user@REALAM.COM”;

keytabPath = “C:/users/user/user.keytab”;

kdcHost = “kdc.host.com”;

releam = “REALAM.COM”;

winUtilHome = “C:\\winutils\\”;

 

#####################################################################

Code Sample :

import java.sql.Connection;

import java.sql.DriverManager;

import java.sql.ResultSet;

import java.sql.ResultSetMetaData;

import java.sql.SQLException;

import java.sql.Statement;

 

import org.apache.hadoop.conf.Configuration;

import org.apache.hadoop.security.UserGroupInformation;

 

 

 

public class KerborsedJDBCSample {

 

public static void main(String[] args) {

String userPrincipalName = “user@REALAM.COM”;

String keytabPath = “C:/users/user/user.keytab”;

 

String jdbcURL = “jdbc:hive2://{hostName}:{port}/{databaseName}”;

String hivePrinicipal = “principal=hive/{hostName}@{releam}”;

Boolean isKerberosed = true;

String kdcHost = “kdc.host.com”;

String hiveHost = “hive.host.com”;

String hiveDatabase = “testdb”;

String hiveDriverClass = “org.apache.hive.jdbc.HiveDriver”;

String hiveUserName = “user”;

String hivePassword = “adasdas”;

Integer hivePort = 10000;

String releam = “REALAM.COM”;

String winUtilHome = “C:\\winutils\\”;

String query=”select * from temptable limit 10″;

jdbcSample(userPrincipalName, keytabPath, jdbcURL, hivePrinicipal, isKerberosed, kdcHost, hiveHost,hiveDatabase, hiveDriverClass, hiveUserName, hivePassword, hivePort, releam, winUtilHome, query);

}

 

private static void jdbcSample(String userPrincipalName, String keytabPath, String jdbcURL, String hivePrinicipal,

Boolean isKerberosed, String kdcHost, String hiveHost, String hiveDatabase, String hiveDriverClass,

String hiveUserName, String hivePassword, Integer hivePort, String releam, String winUtilHome,

String query) {

if (isKerberosed) {

System.setProperty(“hadoop.home.dir”, winUtilHome);

System.setProperty(“java.security.krb5.realm”, releam);

System.setProperty(“java.security.krb5.kdc”, kdcHost);

jdbcURL = jdbcURL + “;” + hivePrinicipal;

loginViaKeyTab(userPrincipalName, keytabPath);

 

}

 

jdbcURL = jdbcURL.replaceAll(“\\{hostName\\}”, hiveHost).replaceAll(“\\{port\\}”, “” + hivePort)

.replaceAll(“\\{databaseName\\}”, hiveDatabase).replaceAll(“\\{releam\\}”, releam);

Connection connection=null;

Statement statement=null;

ResultSet rs=null;

try {

Class.forName(hiveDriverClass);

connection = DriverManager.getConnection(jdbcURL, hiveUserName, hivePassword);

statement = connection.createStatement();

rs = statement.executeQuery(query);

ResultSetMetaData rsmd = rs.getMetaData();

int columnsNumber = rsmd.getColumnCount();

 

while (rs.next()) {

for(int i = 1 ; i <= columnsNumber; i++){

 

System.out.print(rs.getString(i) + ” “);

 

}

System.out.println();

 

}

 

} catch (Exception e) {

// TODO Auto-generated catch block

e.printStackTrace();

}finally {

try {

rs.close();

statement.close();

connection.close();

 

} catch (SQLException e) {

// TODO Auto-generated catch block

e.printStackTrace();

}

}

}

 

public static void loginViaKeyTab(String principalName, String keytabPath) {

 

try {

Configuration conf = new Configuration();

conf.set(“hadoop.security.authentication”, “Kerberos”);

UserGroupInformation.setConfiguration(conf);

UserGroupInformation.loginUserFromKeytab(principalName, keytabPath);

} catch (Exception e) {

e.printStackTrace();

}

 

}

 

}

Advertisements

About suniluiit

Technical Architect working in Big data and Cloud technologies for the last 5 years with overall software industry experience of around 9 years. Architected and Working on Impetus Workload Migration Product which allows organizations to saves 50%-80% manual offloading time and cost. It provides faster parallel & scalable data migration to Hadoop along with incremental data options. It also maximize the existing investments in code and reuse of SQL scripts. Architected and Developed cloud agnostic application for deployment and configuration management of the enterprise application including technologies stacks like CQ5, Cassandra, Solr, Application Server, Web server, Haproxy, F5 and messaging server. Experienced in working and leading R&D teams for building new expertise in fields such as Big data, ETL offloading to Big Data and Cloud computing. Worked on some of the impetus open source product around Big Data and Social Media http://code.google.com/p/hadoop-toolkit/ http://code.google.com/p/zing https://github.com/impetus-opensource Specialties: Big data, Hadoop, HIve, Sqoop, Spark, J2EE/ SOA, NoSQL, Cassandra, HBase, Cloud Computing (Private/Hybrid/Public- AWS, Google, Azure, Rackspace, Openstack, VMWare, Terremark, RabbitMQ, Kafka, Memcached, Puppet, HypericHQ, Splunk etc.
This entry was posted in Uncategorized and tagged , . Bookmark the permalink.

Leave a Reply

Fill in your details below or click an icon to log in:

WordPress.com Logo

You are commenting using your WordPress.com account. Log Out / Change )

Twitter picture

You are commenting using your Twitter account. Log Out / Change )

Facebook photo

You are commenting using your Facebook account. Log Out / Change )

Google+ photo

You are commenting using your Google+ account. Log Out / Change )

Connecting to %s