本文旨在展示CDH基於Kerberos身份認證和基於Sentry的許可權控制功能的測試示例。 1. 準備測試數據 1 2 3 4 5 6 cat /tmp/events.csv 10.1.2.3,US,android,createNote 10.200.88.99,FR,windows,updateN ...
本文旨在展示CDH基於Kerberos身份認證和基於Sentry的許可權控制功能的測試示例。
1. 準備測試數據
1 2 3 4 5 6 |
cat /tmp/events.csv
10.1 . 2.3 ,US,android,createNote
10.200 . 88.99 ,FR,windows,updateNote
10.1 . 2.3 ,US,android,updateNote
10.200 . 88.77 ,FR,ios,createNote
10.1 . 4.5 ,US,windows,updateTag
|
2. 創建用戶
2.1. 創建系統用戶
在集群所有節點創建系統用戶並設置密碼
1 2 3 4 5 6 |
useradd user1
passwd user1
useradd user2
passwd user2
useradd user3
passwd user3
|
2.2. 創建kerberos用戶
1 2 3 |
kadmin.local -q "addprinc user1"
kadmin.local -q "addprinc user2"
kadmin.local -q "addprinc user3"
|
3. 創建資料庫和表
3.1. 創建資料庫
admin為sentry的超級管理員,該用戶配置許可權時已設置
1 |
kinit admin
|
通過beeline連接 hiveserver2,運行下麵命令創建hive庫的超級管理員角色, 並將該角色賦予admin組,使admin有操作hive庫的權力
1 2 3 4 |
beeline -u "jdbc:hive2://vmw208:10000/;principal=hive/[email protected]"
create role admin_role;
GRANT ALL ON SERVER server1 TO ROLE admin_role;
GRANT ROLE admin_role TO GROUP admin;
|
創建兩個測試資料庫
1 2 |
create database db1;
create database db2;
|
3.2. 創建表
在兩個測試資料庫中各創建一張測試表,並導入測試數據
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 |
create table db1.table1 (
ip STRING, country STRING, client STRING, action STRING
) ROW FORMAT DELIMITED FIELDS TERMINATED BY ',' ;
create table db2.table1 (
ip STRING, country STRING, client STRING, action STRING
) ROW FORMAT DELIMITED FIELDS TERMINATED BY ',' ;
create table db2.table2 (
ip STRING, country STRING, client STRING, action STRING
) ROW FORMAT DELIMITED FIELDS TERMINATED BY ',' ;
load data local inpath '/home/iie/events.csv' overwrite into table db1.table1;
load data local inpath '/home/iie/events.csv' overwrite into table db2.table1;
load data local inpath '/home/iie/events.csv' overwrite into table db2.table2;
|
4. 賦予用戶許可權
4.1. 給user1賦予db1的所有許可權
1 2 3 |
create role user1_role;
GRANT ALL ON DATABASE db1 TO ROLE user1_role;
GRANT ROLE user1_role TO GROUP user1;
|
4.2. 給user2賦予db2的所有許可權
1 2 3 |
create role user2_role;
GRANT ALL ON DATABASE db2 TO ROLE user2_role;
GRANT ROLE user2_role TO GROUP user2;
|
4.3. 給user3賦予db2.table1的所有許可權
1 2 3 4 |
create role user3_role;
use db2;
GRANT select ON table table1 TO ROLE user3_role;
GRANT ROLE user3_role TO GROUP user3;
|
5. 測試用戶許可權
5.1. Hive測試
5.1.1. admin用戶擁有整個hive庫的許可權
1 2 3 4 5 6 7 8 9 10 11 12 13 |
kinit admin
beeline -u "jdbc:hive2://vmw208:10000/;principal=hive/[email protected]"
show databases;
5.1 . 2 . user1用戶只具有db1和 default 的許可權
kinit user1
beeline -u "jdbc:hive2://vmw208:10000/;principal=hive/[email protected]"
0 : jdbc:hive2: //vmw208:10000/> show databases;
+----------------+--+
| database_name |
+----------------+--+
| db1 |
| default |
+----------------+--+
|
5.1.3. user2用戶只具有db2和default的許可權
1 2 3 4 5 6 7 8 9 |
kinit user2
beeline -u "jdbc:hive2://vmw208:10000/;principal=hive/[email protected]"
0 : jdbc:hive2: //vmw208:10000/> show databases;
+----------------+--+
| database_name |
+----------------+--+
| db2 |
| default |
+----------------+--+
|
5.1.4. user3用戶只具有db2.table1和default的許可權
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 |
kinit user2
beeline -u "jdbc:hive2://vmw208:10000/;principal=hive/[email protected]"
0 : jdbc:hive2: //vmw208:10000/> show databases;
+----------------+--+
| database_name |
+----------------+--+
| db2 |
| default |
+----------------+--+
0 : jdbc:hive2: //node0a17:10000/> use db2;
0 : jdbc:hive2: //node0a17:10000/> show tables;
INFO : OK
+-----------+--+
| tab_name |
+-----------+--+
| table1 |
+-----------+--+
|
5.2. Hdfs測試
配置hdfs acl與sentry同步後,hdfs許可權與sentry監控的目錄(/user/hive/warehouse)的許可權同步
5.2.1. 切換到hive用戶,查看hive庫文件的許可權
設置hdfs acl與sentry同步後,sentry監控的hive庫的許可權改動會同步到對應的hdfs文件許可權
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 |
[root @vmw208 home]# kinit hive
[root @vmw208 home]# hdfs dfs -getfacl -R /user/hive/warehouse/
# file: /user/hive/warehouse
# owner: hive
# group: hive
user::rwx
user:hive:rwx
group::---
group:hive:rwx
mask::rwx
other::--x
# file: /user/hive/warehouse/db1.db
# owner: hive
# group: hive
user::rwx
user:hive:rwx
group:user1:rwx
group::---
group:hive:rwx
mask::rwx
other::--x
# file: /user/hive/warehouse/db1.db/table1
# owner: hive
# group: hive
user::rwx
user:hive:rwx
group:user1:rwx
group::---
group:hive:rwx
mask::rwx
other::--x
# file: /user/hive/warehouse/db1.db/table1/events.csv
# owner: hive
# group: hive
user::rwx
user:hive:rwx
group:user1:rwx
group::---
group:hive:rwx
mask::rwx
other::--x
# file: /user/hive/warehouse/db2.db
# owner: hive
# group: hive
user::rwx
user:hive:rwx
group:user2:rwx
group::---
group:hive:rwx
mask::rwx
other::--x
# file: /user/hive/warehouse/db2.db/table1
# owner: hive
# group: hive
user::rwx
user:hive:rwx
group:user2:rwx
group::---
group:hive:rwx
mask::rwx
other::--x
# file: /user/hive/warehouse/db2.db/table1/events.csv
# owner: hive
# group: hive
user::rwx
user:hive:rwx
group:user2:rwx
group::---
group:hive:rwx
mask::rwx
other::--x
|
5.2.2. 切換到user1用戶,查看hdfs文件
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 |
[root @vmw208 home]# kinit user1
Password for user1 @HADOOP .COM:
[root @vmw208 home]# hdfs dfs -ls /user/hive/warehouse/db2.db
ls: Permission denied: user=user1, access=READ_EXECUTE, inode= "/user/hive/warehouse/db2.db" :hive:hive:drwxrwx—x
[root @vmw208 home]# hdfs dfs -cat /user/hive/warehouse/db2.db/table1/events.csv
cat: Permission denied: user=user1, access=READ, inode= "/user/hive/warehouse/db2.db/table1/events.csv" :hive:hive:-rwxrwx--x
[root @vmw208 home]# hdfs dfs -ls /user/hive/warehouse/db1.db
Found 1 items
drwxrwx--x+ - hive hive 0 2016 - 09 - 29 16 : 54 /user/hive/warehouse/db1.db/table1
[root @vmw208 home]# hdfs dfs -cat /user/hive/warehouse/db1.db/table1/events.csv
10.1 . 2.3 ,US,android,createNote
10.200 . 88.99 ,FR,windows,updateNote
10.1 . 2.3 ,US,android,updateNote
10.200 . 88.77 ,FR,ios,createNote
10.1 . 4.5 ,US,windows,updateTag
|
5.2.3. 切換到user2用戶,查看hdfs文件
1 2 3 4 5 6 7 8 9 10 |
[root @vmw208 home]# kinit user2
Password for user2 @HADOOP .COM:
[root @vmw208 home]# hdfs dfs -cat /user/hive/warehouse/db1.db/table1/events.csv
cat: Permission denied: user=user2, access=READ, inode= "/user/hive/warehouse/db1.db/table1/events.csv" :hive:hive:-rwxrwx--x
[root @vmw208 home]# hdfs dfs -cat /user/hive/warehouse/db2.db/table1/events.csv
10.1 . 2.3 ,US,android,createNote
10.200 . 88.99 ,FR,windows,updateNote
10.1 . 2.3 ,US,android,updateNote
10.200 . 88.77 ,FR,ios,createNote
10.1 . 4.5 ,US,windows,updateTag
|
5.3. Spark測試
5.3.1. Spark讀hive表數據並列印到控制台
(1) 切換到user1用戶測試
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 |
[root @vmw209 xdf]# kinit user1
Password for user1 @HADOOP .COM:
[root @vmw209 xdf]# spark-submit -- class iie.hadoop.permission.QueryTable --master local /home/xdf/spark.jar db2 table1
……
Exception in thread "main" org.apache.hadoop.security.AccessControlException: Permission denied: user=user1, access=READ_EXECUTE, inode= "/user/hive/warehouse/db2.db/table1" :hive:hive:drwxrwx—x
[root @vmw209 xdf]# spark-submit -- class iie.hadoop.permission.QueryTable --master local /home/xdf/spark.jar db1 table1
……
+------------+-------+-------+----------+
| ip|country| client| action|
+------------+-------+-------+----------+
| 10.1 . 2.3 | US|android|createNote|
| 10.200 . 88.99 | FR|windows|updateNote|
| 10.1 . 2.3 | US|android|updateNote|
| 10.200 . 88.77 | FR| ios|createNote|
| 10.1 . 4.5 | US|windows| updateTag|
+------------+-------+-------+----------+
|
(2) 切換到user2用戶測試
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 |
[root @vmw209 xdf]# kinit user2
Password for user2 @HADOOP .COM:
[root @vmw209 xdf]# spark-submit -- class iie.hadoop.permission.QueryTable --master local /home/xdf/spark.jar db1 table1
……
Exception in thread "main" org.apache.hadoop.security.AccessControlException: Permission denied: user=user2, access=READ_EXECUTE, inode= "/user/hive/warehouse/db1.db/table1" :hive:hive:drwxrwx—x
[root @vmw209 xdf]# spark-submit -- class iie.hadoop.permission.QueryTable --master local /home/xdf/spark.jar db2 table1
……
+------------+-------+-------+----------+
| ip|country| client| action|
+------------+-------+-------+----------+
| 10.1 . 2.3 | US|android|createNote|
| 10.200 . 88.99 | FR|windows|updateNote|
| 10.1 . 2.3 | US|android|updateNote|
| 10.200 . 88.77 | FR| ios|createNote|
| 10.1 . 4.5 | US|windows| updateTag|
+------------+-------+-------+----------+
|
5.3.2. Spark讀文件數據寫入hive表中
調用工具程式spark.jar讀本地文件/home/xdf/events.csv數據寫到db2.table2
切換到user2用戶測試
1 2 3 4 5 6 7 |
kinit user2
beeline -u "jdbc:hive2://vmw208:10000/;principal=hive/[email protected]"
use db2;
create table table2 (
ip STRING, country STRING, client STRING, action STRING
) ROW FORMAT DELIMITED FIELDS TERMINATED BY ',' ;
[root @vmw209 xdf]# spark-submit -- class iie.hadoop.permission.HCatWriterTest --master local /home/xdf/spark.jar /home/xdf/events.csv db2 table2
|
成功!
寫到db1.table1報錯,沒有許可權!
1 |
Exception in thread "main" org.apache.hive.hcatalog.common.HCatException : 2004 : HCatOutputFormat not initialized, setOutput has to be called. Cause : org.apache.hadoop.security.AccessControlException: Permission denied: user=user2, access=WRITE, inode= "/user/hive/warehouse/db1.db/table1" :hive:hive:drwxrwx--x
|
上面只是測試環境,因為kinit + 密碼的方式有時效限制,不適合在生產環境運行,幸好spark提供了相關的參數:
1 2 3 4 |
spark-submit
……
--principal # 用戶對應的kerberos principle
--keytab # 對應用戶principle生成的密鑰文件
|
spark的許可權管理通過對hdfs/hive的文件目錄設置許可權來管理,不同的用戶擁有不同的許可權,用戶在提交spark任務時,指定對應用戶的kerberos principle和keytab來實現許可權管理。任務提交命令如下:
1 |
spark-submit -- class iie.hadoop.permission.QueryTable --master yarn-cluster --principal=user1 @HADOOP .COM --keytab=/home/user1/user1.keytab /home/user1/spark.jar db1 table1
|
其中--principal 和--keytab與用戶一一對應
註意:spark-submit只有在yarn-cluster模式下,--principal 和--keytab才有效
5.4. Kafka測試
5.4.1. 認證
用戶kafka為kafka許可權控制的超級管理員
1 |
[root @node10 iie]#kinit -kt /home/iie/kafka.keytab kafka
|
5.4.2. 創建topic
創建topic1和topic2
1 2 |
[root @node10 iie]#kafka-topics --zookeeper node11: 2181 /kafka --create --topic topic1 --partitions 2 --replication-factor 1
[root @node10 iie]#kafka-topics --zookeeper node11: 2181 /kafka --create --topic topic2 --partitions 2 --replication-factor 1
|
5.4.3. 賦權
給user1用戶附topic1的讀寫許可權
1 2 |
[root @node10 iie]#kafka-acls --authorizer-properties zookeeper.connect=node11: 2181 /kafka --add --allow-principal User:user1 --allow-host node10 --producer --topic topic1 --group console-consumer- 9175
[root @node10 iie]#kafka-acls --authorizer-properties zookeeper.connect=node11: 2181 /kafka --add --allow-principal User:user1 --allow-host node10 --consumer --topic topic1 --group console-consumer- 9175
|
給user2用戶附topic2的讀寫許可權
1 2 |
[root @node10 iie]#kafka-acls --authorizer-properties zookeeper.connect=node11: 2181 /kafka --add --allow-principal User:user2 --allow-host node10 --producer --topic topic2 --group console-consumer- 9175
[root @node10 iie]#kafka-acls --authorizer-properties zookeeper.connect=node11: 2181 /kafka --add --allow-principal User:user2 --allow-host node10 --consumer --topic topic2 --group console-consumer- 9175
|
5.4.4. 查看許可權
1 2 3 4 5 6 7 |
[root @node10 iie]#kafka-acls --authorizer-properties zookeeper.connect=node11: 2181 /kafka --list
Current ACLs for resource `Topic:topic1`:
User:user1 has Allow permission for operations: Write from hosts: node10
User:user1 has Allow permission for operations: Read from hosts: node10
Current ACLs for resource `Topic:topic2`:
User:user2 has Allow permission for operations: Read from hosts: node10
User:user2 has Allow permission for operations: Write from hosts: node10
|
5.4.5. 創建生產消費配置文件
創建consumer.properties
1 2 3 4 5 |
cat /etc/kafka/conf/consumer.properties
security.protocol=SASL_PLAINTEXT
sasl.mechanism=GSSAPI
sasl.kerberos.service.name=kafka
group.id=console-consumer- 9175
|
創建producer.properties
1 2 3 4 |
cat /etc/kafka/conf/producer.properties
security.protocol=SASL_PLAINTEXT
sasl.mechanism=GSSAPI
sasl.kerberos.service.name=kafka
|
5.4.6. 生產數據
命令行生產數據
1 2 3 4 |
[root @node10 iie]#kinit user1
[root @node10 iie]#kafka-console-producer --broker-list node12: 9092 --topic topic1 --producer.config /etc/kafka/conf/producer.properties
123123
123123
|
5.4.7. 消費數據
命令行消費數據