step1:
添加jar包至
生产hive客户端路径/user/lib/hive
生产hive服务器路径/opt/cloudera/parcels/CDH/lib/hive/
step2:
admin用户配置CM中hive组件中hive-site参数
<1> Hive Service Advanced Configuration Snippet (Safety Valve) for hive-site.xml
name:hive.security.authorization.enabled value:true
name:hive.security.authorization.createtable.owner.grants value:ALL
name:hive.security.authorization.task.factory value:org.apache.hadoop.hive.ql.parse.authorization.HiveAuthorizationTaskFactoryImpl
name:hive.warehouse.subdir.inherit.perms value:true
name:hive.semantic.analyzer.hook vlaue:cn.jpush.HiveAdmin
name:hive.metastore.authorization.storage.checks value:true
<2> Hive Client Advanced Configuration Snippet (Safety Valve) for hive-site.xml
name:hive.security.authorization.enabled value:true
name:hive.security.authorization.createtable.owner.grants value:ALL
name:hive.security.authorization.task.factory value:org.apache.hadoop.hive.ql.parse.authorization.HiveAuthorizationTaskFactoryImpl
name:hive.warehouse.subdir.inherit.perms value:true
name:hive.semantic.analyzer.hook vlaue:cn.jpush.HiveAdmin
name:hive.metastore.authorization.storage.checks value:true
<4> HiveServer2 Advanced Configuration Snippet (Safety Valve) for hive-site.xml
name:hive.semantic.analyzer.hook value:cn.jpush.HiveAdmin
– Step3:
– <3.0>修改集群2台namenode服务器文件 /etc/group,为所有用户(除ronghui)添加到data组中。
– <3.1>为date组用户添加kaikai库的rwx权限
– hdfs dfs -setfacl -m group:data:rwx /user/hive/warehouse/kaikai.db
– (删除目前对partner组限定的facl权限,775权限,虽然其他组成员在hdfs有读权限,但是没有写入权限,同时grant方式禁止partner组读取data组的库数据。)
– hdfs dfs -setfacl -b /user/hive/warehouse/sed.db
– <3.2>切换hdfs用户修改/user/warehouse目录下的文件,都改为push用户:data组,包含warehouse文件夹。
– 便于以后有了调度系统,直接删除data组的大部分用户,使用push用户上线,将文件都改为755权限更安全。
– (/user/hive/external已经是push:data,所以不用管。)
– hdfs dfs -chown -R push:data /user/hive/warehouse
– hdfs dfs -chown -R push:data /user/hive/external
– 之后的管理员才能建立的新数据库。因为hive默认是warehouse是hive:hive的user:group关系。
– hdfs dfs -chown -R push:data /user/hive/warehouse/tmp.db
– <3.3>继续使用hdfs用户修改权限为775。
– hdfs dfs -chmod -R 775 /user/hive/warehouse/ads.db
Step4:开始修改hive元数据。
hive -f grant_order.hql
PS:可以在Hive元数据中查看ROLES,DBS,DB_PRIVS,GLOBAL_PRIVS,ROLE_MAP表看以上赋权命令对应的数据记录信息
HiveAdmin class代码
package cn.jpush;
/**
* Created by wmky_kk on 2017-06-01.
* add test
*/
import org.apache.hadoop.hive.ql.parse.*;
import org.apache.hadoop.hive.ql.session.SessionState;
public class HiveAdmin extends AbstractSemanticAnalyzerHook {
// private static String admin = "youck";
private static String[] admin = {"youck","root"};
@Override
public ASTNode preAnalyze(HiveSemanticAnalyzerHookContext context, ASTNode ast)throws SemanticException {
switch(ast.getToken().getType()) {
// shows how a statement can have multiple case labels
case HiveParser.TOK_CREATEDATABASE:
case HiveParser.TOK_DROPDATABASE:
case HiveParser.TOK_CREATEROLE:
case HiveParser.TOK_DROPROLE:
case HiveParser.TOK_GRANT:
case HiveParser.TOK_REVOKE:
case HiveParser.TOK_GRANT_ROLE:
case HiveParser.TOK_REVOKE_ROLE:
String userName = null;
if(SessionState.get() != null
&&SessionState.get().getAuthenticator()!= null){
userName=SessionState.get().getAuthenticator().getUserName();
}
if (!(admin[0].equalsIgnoreCase(userName)||admin[1].equalsIgnoreCase(userName)||
admin[2].equalsIgnoreCase(userName)||admin[3].equalsIgnoreCase(userName))) {
throw new SemanticException(userName + " can't use ADMIN options,except " + "\"youck\",\"root\",\"xujun\",\"huangyq\"" + ".");
}
break;
default:
break;
}
return ast;
}
/* public static void main(String[] args) throws SemanticException {
String[] admin = { "admin","root" };
String userName = "root";
for (String tmp : admin) {
// System.out.println(tmp);
if (!tmp.equalsIgnoreCase(userName)) {
// System.out.print(userName+ " can't use ADMIN options, except " + admin[0] + "," + admin[1] + ".");
throw new SemanticException(userName
+ " can't use ADMIN options, except " + admin[0] + ","
+ admin[1] + ".");
}
}
}*/
}