[root@master yum.repos.d]# cat /etc/hosts
10.16.30.97 master
10.16.30.98 slave1
10.16.30.99 slave2
10.16.30.100 slave3
10.16.30.101 slave4
127.0.0.1 localhost localhost.localdomain localhost4 localhost4.localdomain4
::1 localhost localhost.localdomain localhost6 localhost6.localdomain6
10.16.30.97 master
10.16.30.98 slave1
10.16.30.99 slave2
10.16.30.100 slave3
10.16.30.101 slave4
uname -a
或
cat /proc/version
或
cat /etc/issue
或
lsb_release -a 适合所有linux
yum install krb5-server.x86_64 krb5-devel.x86_64
yum install krb5-devel.x86_64
[logging]
default = FILE:/var/log/kerberos/krb5libs.log
kdc = FILE:/var/log/kerberos/krb5kdc.log
admin_server = FILE:/var/log/kerberos/kadmind.log
[libdefaults]
default_realm = MSXF.HADOOP
dns_lookup_realm = false
dns_lookup_kdc = false
ticket_lifetime = 86400
renew_lifetime = 604800
max_life = 12h 0m 0s
forwardable = true
renewable = true
udp_preference_limit = 1
[realms]
MSXF.HADOOP = {
kdc = master:88
admin_server = master:749
}
[domain_realm]
[kdc]
profile=/var/kerberos/krb5kdc/kdc.conf
[kdcdefaults]
kdc_ports = 88
kdc_tcp_ports = 88
[realms]
MSXF.HADOOP = {
master_key_type = aes128-cts
max_life = 1d
max_renewable_life = 7d
acl_file = /var/kerberos/krb5kdc/kadm5.acl
dict_file = /usr/share/dict/words
admin_keytab = /var/kerberos/krb5kdc/kadm5.keytab
supported_enctypes = aes128-cts:normal des3-hmac-sha1:normal arcfour-hmac:normal des-hmac-sha1:normal des-cbc-md5:normal des-cbc-crc:normal
}
#[realms]
# EXAMPLE.COM = {
# #master_key_type = aes256-cts
# acl_file = /var/kerberos/krb5kdc/kadm5.acl
# dict_file = /usr/share/dict/words
# admin_keytab = /var/kerberos/krb5kdc/kadm5.keytab
# supported_enctypes = aes256-cts:normal aes128-cts:normal des3-hmac-sha1:normal arcfour-hmac:normal des-hmac-sha1:normal des-cbc-md5:normal des-cbc-crc:normal
# }
[root@master krb5kdc]# kdb5_util create -r MSXF.HADOOP -s
#kadmin.local: addprinc root/admin
#kadmin.local: addprinc test
chkconfig --level 35 krb5kdc on
chkconfig --level 35 kadmin on
service krb5kdc restart
service kadmin restart
# kinit tes
Password for test@for_hadoop:
#
yum install openldap-clients
yum install krb5-workstation
yum install krb5-libs
rpm -qa | grep 服务名
#kadmin.local: addprinc cloudera-scm/admin@MSXF.HADOOP
#kadmin.local: addprinc hdfs
[root@master tmp]# cat /tmp/events.csv
10.1.2.3,US,android,createNote
10.200.88.99,FR,windows,updateNote
10.1.2.3,US,android,updateNote
10.200.88.77,FR,ios,createNote
10.1.4.5,US,windows,updateTag
create database sensitive;
create table sensitive.events (
ip STRING, country STRING, client STRING, action STRING
) ROW FORMAT DELIMITED FIELDS TERMINATED BY ',';
load data local inpath '/tmp/events.csv' overwrite into table sensitive.events;
create database filtered;
create view filtered.events as select country, client, action from sensitive.events;
create view filtered.events_usonly as select * from filtered.events where country = 'US';
<property>
<name>hive.server2.authentication.kerberos.principalname>
<value>hive/_HOST@YOUR-REALM.COMvalue>
property>
kinit -kt hive.keytab hive/slave2@MSXF.HADOOP
beeline -u "jdbc:hive2://slave2:10000/default;principal=hive/[email protected]"
create role admin_role;
GRANT ALL ON SERVER server1 TO ROLE admin_role;
GRANT ROLE admin_role TO GROUP admin;
GRANT ROLE admin_role TO GROUP hive;
create role test_role;
GRANT ALL ON DATABASE filtered TO ROLE test_role;
GRANT ROLE test_role TO GROUP test;
# useradd test
beeline -u "jdbc:hive2://slave2:10000/default;principal=hive/[email protected]"
0: jdbc:hive2://slave2:10000/default> set system:user.name
0: jdbc:hive2://slave2:10000/default> ;
+------------------------+--+
| set |
+------------------------+--+
| system:user.name=hive |
+------------------------+--+
1 row selected (0.177 seconds)
0: jdbc:hive2://slave2:10000/default>
0: jdbc:hive2://slave2:10000/default> show roles;
INFO : Compiling command(queryId=hive_20160708141919_d7a60886-7444-41db-9619-eca329a5519c): show roles
INFO : Semantic Analysis Completed
INFO : Returning Hive schema: Schema(fieldSchemas:[FieldSchema(name:role, type:string, comment:from deserializer)], properties:null)
INFO : Completed compiling command(queryId=hive_20160708141919_d7a60886-7444-41db-9619-eca329a5519c); Time taken: 0.121 seconds
INFO : Executing command(queryId=hive_20160708141919_d7a60886-7444-41db-9619-eca329a5519c): show roles
INFO : Starting task [Stage-0:DDL] in serial mode
INFO : Completed executing command(queryId=hive_20160708141919_d7a60886-7444-41db-9619-eca329a5519c); Time taken: 0.078 seconds
INFO : OK
+-------------+--+
| role |
+-------------+--+
| test_role |
| admin_role |
+-------------+--+
2 rows selected (0.226 seconds)
0: jdbc:hive2://slave2:10000/default>
0: jdbc:hive2://slave2:10000/default> SHOW GRANT ROLE test_role;
0: jdbc:hive2://slave2:10000/default> SHOW GRANT ROLE admin_role;
0: jdbc:hive2://slave2:10000/default> show databases;
0: jdbc:hive2://cdh1:10000/> show databases;
0: jdbc:hive2://cdh1:10000/> use filtered;
0: jdbc:hive2://cdh1:10000/> select * from filtered.events;
beeline -u "jdbc:hive2://slave2:10000/default;principal=hive/[email protected]"
查看当前系统用户是谁:
0: jdbc:hive2://cdh1:10000/> set system:user.name;+------------------------+--+
| set |+------------------------+--+
| system:user.name=hive |+------------------------+--+
1 row selected (0.188 seconds)
test用户不是管理员,是不能查看所有角色的:
0: jdbc:hive2://cdh1:10000/> show roles;ERROR : Error processing Sentry command: Access denied to test. Server Stacktrace: org.apache.sentry.provider.db.SentryAccessDeniedException: Access denied to test
test用户可以列出所有数据库:
0: jdbc:hive2://cdh1:10000/> show databases;+----------------+--+
| database_name |+----------------+--+
| default || filtered || sensitive |+----------------+--+
3 rows selected (0.079 seconds)
test用户可以filtered库:
0: jdbc:hive2://cdh1:10000/> use filtered;
0: jdbc:hive2://cdh1:10000/> select * from events;
like:
但是,test用户没有权限访问sensitive库
beeline -u "jdbc:hive2://slave2:10000/default;principal=hive/[email protected]"
#cd /var/run/cloudera-scm-agent/process/90-impala-IMPALAD
#ll | grep impala
#cd 90-impala-IMPALAD/
# kinit -kt impala.keytab impala/slave2
# impala-shell
> invalidate metadata;
kadmin.local: addprinc bbb
# useradd -g test -u 1000 bbb