Skip to content

Instantly share code, notes, and snippets.

@squito
Last active May 1, 2021 03:27
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save squito/0f5ed88653cab5e5cde2e9ebe5abf547 to your computer and use it in GitHub Desktop.
Save squito/0f5ed88653cab5e5cde2e9ebe5abf547 to your computer and use it in GitHub Desktop.
Nested UGIs , doAs, proxy users
// run a spark-shell as super_user_1
scala> AccessControlCheck.hdfsLs(sc.hadoopConfiguration)
about to try to hdfs ls
res0: String = success
scala> AccessControlCheck.doAs("super_user_2", true) { () => AccessControlCheck.hdfsLs(sc.hadoopConfiguration) }
about to try to hdfs ls
res1: String = success
scala> AccessControlCheck.doAs("regular_user", true) { () => AccessControlCheck.hdfsLs(sc.hadoopConfiguration) }
about to try to hdfs ls
res2: String =
"Permission denied: user=regular_user, access=READ_EXECUTE, inode="/restriced/path":hive:hive:drwxrwx---
at org.apache.hadoop.hdfs.server.namenode.FSPermissionChecker.check(FSPermissionChecker.java:561)
at org.apache.hadoop.hdfs.server.namenode.FSPermissionChecker.checkPermission(FSPermissionChecker.java:399)
at org.apache.ranger.authorization.hadoop.RangerHdfsAuthorizer$RangerAccessControlEnforcer.checkDefaultEnforcer(RangerHdfsAuthorizer.java:763)
at org.apache.ranger.authorization.hadoop.RangerHdfsAuthorizer$RangerAccessControlEnforcer.checkRangerPermission(RangerHdfsAuthorizer.java:537)
at org.apache.ranger.authorization.hadoop.RangerHdfsAuthorizer$RangerAccessControlEnforcer.checkPermissionWithContext(RangerHdfsAuthorizer.java:353)
at org.apache.h..."
// when the nested ugi is created with the "current" user", we get a permissions error;
// regular_user cannot proxy as super_user_2
scala> AccessControlCheck.doAs("regular_user", true) { () =>
AccessControlCheck.doAs("super_user_2", false) { () => AccessControlCheck.hdfsLs(sc.hadoopConfiguration) }
}
about to try to hdfs ls
21/04/30 21:27:53 WARN ipc.Client: Exception encountered while connecting to the server : org.apache.hadoop.security.AccessControlException: Client cannot authenticate via:[TOKEN, KERBEROS]
res4: String = DestHost:destPort xxx.foo.com:8020 , LocalHost:localPort xxx.foo.com/123.123.123.123:0. Failed on local exception: java.io.IOException: org.apache.hadoop.security.AccessControlException: Client cannot authenticate via:[TOKEN, KERBEROS]
// but when the nested ugi is created with the "login" user, it works fine;
// super_user_1 IS allowed to proxy as super_user_2
scala> AccessControlCheck.doAs("regular_user", true) {
() => AccessControlCheck.doAs("super_user_2", true) {() => AccessControlCheck.hdfsLs(sc.hadoopConfiguration) }
}
about to try to hdfs ls
res5: String = success
import java.security.PrivilegedExceptionAction
import org.apache.hadoop.security.UserGroupInformation
import org.apache.hadoop.fs.Path
import org.apache.hadoop.fs.FileSystem
import org.apache.hadoop.conf.Configuration
object AccessControlCheck {
val privilegedPath = "/some/path/with/limited/access"
def doAs[T](asUser: String, loginUser: Boolean)(f: () => T): T = {
val realUser = if (loginUser) UserGroupInformation.getLoginUser() else UserGroupInformation.getCurrentUser()
val ugi = UserGroupInformation.createProxyUser(asUser, realUser)
ugi.doAs( new PrivilegedExceptionAction[T] {
def run(): T = {
f()
}
})
}
def hdfsLs(conf: Configuration): String = {
val fs = FileSystem.get(conf)
println("about to try to hdfs ls")
try {
// Try to do something which only super-users can do, and regular users would be denied.
// Nothing special about hdfs in particular, this is just an easy example.
fs.listFiles(new Path(privilegedPath), false)
"success"
} catch {
case e: Exception => e.getMessage()
}
}
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment