1
2
3
4
5
6
7
8
|
try
{
CommandProcessor proc = CommandProcessorFactory.get(tokens, (HiveConf) conf);
// 根据命令判断具体的CommandProcessor 实现类
ret = processLocalCmd(cmd, proc, ss);
}
catch
(SQLException e) {
console.printError(
"Failed processing command "
+ tokens[
0
] +
" "
+ e.getLocalizedMessage(),
org.apache.hadoop.util.StringUtils.stringifyException(e));
ret =
1
;
}
|
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
|
public
enum
HiveCommand {
SET(),
RESET(),
DFS(),
ADD(),
DELETE(),
COMPILE();
private
static
final
Set
new
HashSet
static
{
for
(HiveCommand command : HiveCommand. values()) {
COMMANDS.add(command.name());
}
}
public
static
HiveCommand find(String[] command) {
if
(
null
== command){
return
null
;
}
String cmd = command[
0
];
if
(cmd !=
null
) {
cmd = cmd.trim().toUpperCase();
if
(command. length >
1
&&
"role"
.equalsIgnoreCase(command[
1
])) {
// special handling for set role r1 statement
return
null
;
}
else
if
(COMMANDS .contains(cmd)) {
return
HiveCommand. valueOf(cmd);
}
}
return
null
;
}
}
|
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
|
getForHiveCommand方法中:
public
static
CommandProcessor getForHiveCommand(String[] cmd, HiveConf conf)
throws
SQLException {
HiveCommand hiveCommand = HiveCommand.find(cmd);
// sql语句返回值为null
if
(hiveCommand ==
null
|| isBlank(cmd[
0
])) {
return
null
;
}
if
(conf ==
null
) {
conf =
new
HiveConf();
}
Set
new
HashSet
for
(String availableCommand : conf.getVar(HiveConf.ConfVars.HIVE_SECURITY_COMMAND_WHITELIST).split(
","
)) {
availableCommands.add(availableCommand.toLowerCase().trim());
}
if
(!availableCommands.contains(cmd[
0
].trim().toLowerCase())) {
throw
new
SQLException(
"Insufficient privileges to execute "
+ cmd[
0
],
"42000"
);
}
switch
(hiveCommand) {
// 每种语句对应的具体的processor类
case
SET:
return
new
SetProcessor();
case
RESET:
return
new
ResetProcessor();
case
DFS:
SessionState ss = SessionState.get();
return
new
DfsProcessor(ss.getConf());
case
ADD:
return
new
AddResourceProcessor();
case
DELETE:
return
new
DeleteResourceProcessor();
case
COMPILE:
return
new
CompileProcessor();
default
:
throw
new
AssertionError(
"Unknown HiveCommand "
+ hiveCommand);
}
}
get方法:
public
static
CommandProcessor get(String[] cmd, HiveConf conf)
throws
SQLException {
CommandProcessor result = getForHiveCommand(cmd, conf);
if
(result !=
null
) {
return
result;
// 如果result不为空,即命令在HiveCommand 的迭代器中定义的话,直接返回对应的结果
}
if
(isBlank(cmd[
0
])) {
return
null
;
}
else
{
// 为空的话返回Driver类的实例
if
(conf ==
null
) {
return
new
Driver();
}
Driver drv = mapDrivers.get(conf);
if
(drv ==
null
) {
drv =
new
Driver();
mapDrivers.put(conf, drv);
}
drv.init();
return
drv;
}
}
|
1
|
AddResourceProcessor/CompileProcessor/DeleteResourceProcessor/DfsProcessor/ResetProcessor/SetProcessor/Driver
|
1
2
|
SessionState.add_resource方法---->调用SessionState.downloadResource--->
调用FileSystem的copyToLocalFile方法,把文件下载到本地
|
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
|
public
CommandProcessorResponse run(String command) {
SessionState ss = SessionState.get();
command =
new
VariableSubstitution().substitute(ss.getConf(),command);
String[] tokens = command.split(
"\\s+"
);
SessionState.ResourceType t;
if
(tokens. length <
2
|| (t = SessionState.find_resource_type(tokens[
0
])) ==
null
) {
console.printError(
"Usage: add ["
+ StringUtils.join(SessionState .ResourceType.values(),
"|"
)
+
"]
);
return
new
CommandProcessorResponse(
1
);
}
for
(
int
i =
1
; i < tokens.length ; i++) {
String resourceFile = ss.add_resource(t, tokens[i]);
if
(resourceFile ==
null
){
String errMsg = tokens[i]+
" does not exist."
;
return
new
CommandProcessorResponse(
1
,errMsg,
null
);
}
}
return
new
CommandProcessorResponse(
0
);
}
|
1
2
3
4
5
6
7
8
9
10
11
12
|
SessionState的delete_resource方法
public
boolean
delete_resource(ResourceType t, String value) {
if
(resource_map.get(t) ==
null
) {
return
false
;
}
if
(t. hook !=
null
) {
if
(!t. hook.postHook( resource_map.get(t), value)) {
return
false
;
}
}
return
( resource_map.get(t).remove(value));
}
|
1
2
3
|
hive> set system:user.name=xxxx;
hive> set system:user.name;
system:user.name=xxxx
|