AES加密解密类:
package com.zmlearn.udf;
import java.security.SecureRandom;
import javax.crypto.Cipher;
import javax.crypto.KeyGenerator;
import javax.crypto.SecretKey;
import javax.crypto.spec.SecretKeySpec;
/**
* AES加密器
*
* @author padnans
*/
public class AES {
/**
* AES加密
*/
public static String encrypt(String seed, String cleartext) throws Exception {
byte[] rawKey = getRawKey(seed.getBytes());
byte[] result = encrypt(rawKey, cleartext.getBytes());
return toHex(result);
}
/**
* AES解密
*/
public static String decrypt(String seed, String encrypted) throws Exception {
byte[] rawKey = getRawKey(seed.getBytes());
byte[] enc = toByte(encrypted);
byte[] result = decrypt(rawKey, enc);
return new String(result);
}
private static byte[] getRawKey(byte[] seed) throws Exception {
KeyGenerator kgen = KeyGenerator.getInstance("AES");
SecureRandom sr = SecureRandom.getInstance("SHA1PRNG");
sr.setSeed(seed);
kgen.init(256, sr); // 192 and 256 bits may not be available
SecretKey skey = kgen.generateKey();
byte[] raw = skey.getEncoded();
return raw;
}
private static byte[] encrypt(byte[] raw, byte[] clear) throws Exception {
SecretKeySpec skeySpec = new SecretKeySpec(raw, "AES");
Cipher cipher = Cipher.getInstance("AES");
cipher.init(Cipher.ENCRYPT_MODE, skeySpec);
byte[] encrypted = cipher.doFinal(clear);
return encrypted;
}
private static byte[] decrypt(byte[] raw, byte[] encrypted) throws Exception {
SecretKeySpec skeySpec = new SecretKeySpec(raw, "AES");
Cipher cipher = Cipher.getInstance("AES");
cipher.init(Cipher.DECRYPT_MODE, skeySpec);
byte[] decrypted = cipher.doFinal(encrypted);
return decrypted;
}
public static String toHex(String txt) {
return toHex(txt.getBytes());
}
public static String fromHex(String hex) {
return new String(toByte(hex));
}
public static byte[] toByte(String hexString) {
int len = hexString.length() / 2;
byte[] result = new byte[len];
for (int i = 0; i < len; i++)
result[i] = Integer.valueOf(hexString.substring(2 * i, 2 * i + 2), 16).byteValue();
return result;
}
public static String toHex(byte[] buf) {
if (buf == null)
return "";
StringBuffer result = new StringBuffer(2 * buf.length);
for (int i = 0; i < buf.length; i++) {
appendHex(result, buf[i]);
}
return result.toString();
}
private final static String HEX = "0123456789ABCDEF";
private static void appendHex(StringBuffer sb, byte b) {
sb.append(HEX.charAt((b >> 4) & 0x0f)).append(HEX.charAt(b & 0x0f));
}
public static void main(String[] args) {
String content = "4646";
//String password = "zsyy";
String password = "zmbd";
System.out.println("明文:" + content);
System.out.println("key:" + password);
try {
String encryptResult = AES.encrypt(password, content);
System.out.println("密文:" + encryptResult);
String decryptResult = decrypt(password, encryptResult);
System.out.println("解密:" + decryptResult);
} catch (Exception e) {
e.printStackTrace();
}
}
}
UDF函数(AES256 加密解密):
package com.zmlearn.udf;
import org.apache.hadoop.hive.ql.exec.UDF;
public class AESUDF extends UDF{
String password = "zmbd";
public String evaluate ( String type , String content) throws Exception {
if (content == null) return null;
if(! type.equals("encode") && ! type.equals("decode")){
throw new Exception("Parmeter one is needed encode/decode");
}
if( type.equals("encode")){
//进行加密
return AES.encrypt(password, content);
}else {
//进行解密
return AES.decrypt(password, content);
}
}
public static void main(String[] args) {
try {
System.out.println(new AESUDF().evaluate("encode","test"));
System.out.println(new AESUDF().evaluate("decode","65A074417B34314380F0B0EF2ED9B06C"));
} catch (Exception e) {
e.printStackTrace();
}
}
}
异常: java.security.InvalidKeyException: Illegal key size
今天在做接口测试的时候遇到个异常:
java.security.InvalidKeyException: Illegal key size。
或者
FAILED: SemanticException [Error 10014]: Line 1:7 Wrong arguments ‘‘2’’: org.apache.hadoop.hive.ql.metadata.HiveException: Unable to execute method public java.lang.String com.zmlearn.udf.AESUDF.evaluate(java.lang.String,java.lang.String) throws java.lang.Exception on object com.zmlearn.udf.AESUDF@7d8cf9ac of class com.zmlearn.udf.AESUDF with arguments {encode:java.lang.String, 2:java.lang.String} of size 2
异常原因:如果密钥大于128, 会抛出java.security.InvalidKeyException: Illegal key size 异常. 因为密钥长度是受限制的, java运行时环境读到的是受限的policy文件. 文件位于${java_home}/jre/lib/security, 这种限制是因为美国对软件出口的控制.
解决方案:去官方下载JCE无限制权限策略文件。
jdk 5: http://www.oracle.com/technetwork/java/javasebusiness/downloads/java-archive-downloads-java-plat-419418.html#jce_policy-1.5.0-oth-JPR
jdk6: http://www.oracle.com/technetwork/java/javase/downloads/jce-6-download-429243.html
JDK7的下载地址: http://www.oracle.com/technetwork/java/javase/downloads/jce-7-download-432124.html
JDK8的下载地址: http://www.oracle.com/technetwork/java/javase/downloads/jce8-download-2133166.html
下载后解压,可以看到local_policy.jar和US_export_policy.jar以及readme.txt
如果安装了JRE,将两个jar文件放到%JRE_HOME%\lib\security目录下覆盖原来的文件
如果安装了JDK,还要将两个jar文件也放到%JDK_HOME%\jre\lib\security目录下覆盖原来文件。
hive(spark-sql一样的)注入udf函数参考链接:https://blog.csdn.net/yu0_zhang0/article/details/79060849
linux环境上也需要添加local_policy.jar和US_export_policy.jar到${java_home}/jre/lib/security目录下,否则注入之后也会报
java.security.InvalidKeyException: Illegal key size。
划重点:hive的话只需要在任务启动的那台机器添加这两个jar包。如果是spark-sql,由于是提交到集群分配到work上运行的,所以saprk-sql使用该udf函数需要每个work节点都添加这两个jar包。(简单点的话也可以全部机器都配置)