首页 > 编程语言 >Apache Ranger系列九:修改源码支持URI类型为s3的操作

Apache Ranger系列九:修改源码支持URI类型为s3的操作

时间:2022-12-26 15:14:43浏览次数:37  
标签:case s3 URI uri 源码 user objType HiveObjectType

问题描述:ranger在checkPrivileges(org.apache.ranger.authorization.hive.authorizer.RangerHiveAuthorizer)时,当类型是uri时会校验格式,必须是hdfs:,file:

if (hiveObjType == HiveObjectType.URI && isPathInFSScheme(path)) {
    FsAction permission = getURIAccessType(hiveOpType);

    if (!isURIAccessAllowed(user, permission, path, getHiveConf())) {
	throw new HiveAccessControlException(String.format("Permission denied: user [%s] does not have [%s] privilege on [%s]", user, permission.name(), path));
    }
    continue;
}

private boolean isPathInFSScheme(String uri) {
// This is to find if HIVE URI operation done is for hdfs,file scheme
// else it may be for s3 which needs another set of authorization calls.
boolean ret = false;
String[] fsScheme = hivePlugin.getFSScheme();
if (fsScheme != null) {
for (String scheme : fsScheme) {
if (!uri.isEmpty() && uri.startsWith(scheme)) {
ret = true;
break;
}
}
}
return ret;
}

private static String RANGER_PLUGIN_HIVE_ULRAUTH_FILESYSTEM_SCHEMES_DEFAULT = "hdfs:,file:";

开启debug格式,查看执行日志的请求信息。

2022-12-12T06:44:33,683 DEBUG [b70562ff-5cbd-44e8-acb8-71b8799ed734 HiveServer2-Handler-Pool: Thread-63([])]: authorizer.RangerHiveAuthorizer (RangerHiveAuthorizer.java:checkPrivileges(788)) - 'checkPrivileges':{'hiveOpType':CREATETABLE,
'inputHObjs':['HivePrivilegeObject':{'type':DFS_URI, 'dbName':null, 'objectType':DFS_URI, 'objectName':s3a://xxxx/xxx/xxx/xxxx_daily_test, 'columns':[], 'partKeys':[], 'commandParams':[], 'actionType':OTHER}],

'outputHObjs':['HivePrivilegeObject':{'type':DATABASE, 'dbName':dev, 'objectType':DATABASE, 'objectName':null, 'columns':[], 'partKeys':[], 'commandParams':[], 'actionType':OTHER},'HivePrivilegeObject':{'type':TABLE_OR_VIEW, 'dbName':dev, 'objectType':TABLE_OR_VIEW, 'objectName':xxxx_daily_test, 'columns':[], 'partKeys':[], 'commandParams':[], 'actionType':OTHER}],
'context':{'clientType':HIVESERVER2, 'commandString':create table if not exists dev.xxxx_daily_test ( event_date string,
reason string)PARTITIONED BY
(
pdate string
)
STORED AS PARQUET
LOCATION 's3a://xxxx/xxxx/xxx/xxxx_daily_test', 'ipAddress':172.31.176.136, 'forwardedAddresses':null, 'sessionString':b70562ff-5cbd-44e8-acb8-71b8799ed734}, 'user':xxxxx, 'groups':[]}

  

修改代码为自定义的校验方式

	// add s3 path check
	if(hiveObjType == HiveObjectType.URI) {
	    FsAction permission = getURIAccessType(hiveOpType);
	    if(!isURIAccessAllowedForS3(user, path)) {
	        throw new HiveAccessControlException(String.format("Permission denied: user [%s] does not have [%s] privilege on [%s]", user, permission.name(), path));
	    }
	    continue;
	}

	private boolean isURIAccessAllowedForS3(String userName, String uri) {
		boolean ret = false;

			try {
				// give all file privilege
				if (userName.equalsIgnoreCase("hadoop")) {
					ret = true;
				} else {
					if (uri.startsWith("s3a://user-devs") || uri.startsWith("s3://user-devs")) {
						ret = true;
					} else {
						LOG.error("uri[" + uri + "] should start with s3a://user-devs or s3://user-devs for user[" + userName + "].");
						ret = false;
					}
				}
			} catch(Exception excp) {
				ret = false;
				LOG.error("Error getting permissions for " + uri, excp);
			}

		return ret;
	}

  参考类型变化

			case DATABASE:
				objType = HiveObjectType.DATABASE;
			break;

			case PARTITION:
				objType = HiveObjectType.PARTITION;
			break;

			case TABLE_OR_VIEW:
				if(hiveOpTypeName.contains("index")) {
					objType = HiveObjectType.INDEX;
				} else if(! StringUtil.isEmpty(hiveObj.getColumns())) {
					objType = HiveObjectType.COLUMN;
				} else if(hiveOpTypeName.contains("view")) {
					objType = HiveObjectType.VIEW;
				} else {
					objType = HiveObjectType.TABLE;
				}
			break;

			case FUNCTION:
				objType = HiveObjectType.FUNCTION;
				if (isTempUDFOperation(hiveOpTypeName, hiveObj)) {
					objType = HiveObjectType.GLOBAL;
				}
			break;

			case DFS_URI:
			case LOCAL_URI:
                objType = HiveObjectType.URI;
            break;

			case COMMAND_PARAMS:
			case GLOBAL:
				if ( "add".equals(hiveOpTypeName) || "compile".equals(hiveOpTypeName)) {
					objType = HiveObjectType.GLOBAL;
				}
			break;

			case SERVICE_NAME:
				objType = HiveObjectType.SERVICE_NAME;
			break;

			case COLUMN:
				// Thejas: this value is unused in Hive; the case should not be hit.
			break;

  

重新编译

cd /Users/xxxx/sourceCodeProj/ranger/hive-agent
mvn -DskipTests=true clean package

 

标签:case,s3,URI,uri,源码,user,objType,HiveObjectType
From: https://www.cnblogs.com/aichihuluobo/p/17005769.html

相关文章