文章来源:http://blog.csdn.net/linlinv3/article/details/44963429
hadoop透明加密 kms
<!-- KMS Backend KeyProvider --> <property> <name>hadoop.kms.key.provider.uri</name> <value>jceks://file@/${user.home}/kms.keystore</value> <description> URI of the backing KeyProvider for the KMS. </description> </property> <property> <name>hadoop.security.keystore.java-keystore-provider.password-file</name> <value>kms.keystore</value> <description> If using the JavaKeyStoreProvider, the password for the keystore file. </description> </property>
<!-- KMS Cache --> <property> <name>hadoop.kms.cache.enable</name> <value>true</value> <description> Whether the KMS will act as a cache for the backing KeyProvider. When the cache is enabled, operations like getKeyVersion, getMetadata, and getCurrentKey will sometimes return cached data without consulting the backing KeyProvider. Cached values are flushed when keys are deleted or modified. </description> </property> <property> <name>hadoop.kms.cache.timeout.ms</name> <value>600000</value> <description> Expiry time for the KMS key version and key metadata cache, in milliseconds. This affects getKeyVersion and getMetadata. </description> </property> <property> <name>hadoop.kms.current.key.cache.timeout.ms</name> <value>30000</value> <description> Expiry time for the KMS current key cache, in milliseconds. This affects getCurrentKey operations. </description> </property>
Audit logs are aggregated for API accesses to the GET_KEY_VERSION, GET_CURRENT_KEY, DECRYPT_EEK, GENERATE_EEK operations.
<!-- KMS Audit --> <property> <name>hadoop.kms.audit.aggregation.window.ms</name> <value>10000</value> <description> Duplicate audit log events within the aggregation window (specified in ms) are quashed to reduce log traffic. A single message for aggregated events is printed at the end of the window, along with a count of the number of aggregated eventsfu. </description> </property>
<!-- KMS Security -->
<property>
<name>hadoop.kms.authentication.type</name>
<value>simple</value>
<description>
Authentication type for the KMS. Can be either "simple"
or "kerberos".
</description>
</property>
export KMS_LOG=${KMS_HOME}/logs/kms export KMS_HTTP_PORT=16000 export KMS_ADMIN_PORT=16001
/etc/hadoop/conf/hdfs-site.xml
<property> <name>dfs.encryption.key.provider.uri</name> <value>kms://http@localhost:16000/kms</value> </property>
/etc/hadoop/confcore-site.xml
<property> <name>hadoop.security.key.provider.path</name> <value>kms://http@localhost:16000/kms</value> </property>
# su - hdfs # hadoop key create key1 # hadoop key list -metadata
# hdfs dfs -mkdir /secureweblogs # hdfs crypto -createZone -keyName key1 -path /secureweblogs # hdfs crypto -listZones
生成秘钥 keytool -delete -alias 'kms.keystore'; 删除别名为kms.keystore的秘钥keytool -genkey -alias 'kms.keystore'; 生成 别名为kms.keystore的秘钥使用生成秘钥的命令后,会在用户根目录下生成相应的文件
hdfs dfs -cat /.reserved/raw/zone1/localfile.dat zone1 是加密区 localfile.dat 是加密文件 ,查看该文件 显示的是被加密的文件(乱码);