SpringBoot + Mybatis Plus
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0modelVersion>
<parent>
<groupId>org.springframework.bootgroupId>
<artifactId>spring-boot-starter-parentartifactId>
<version>2.7.17version>
<relativePath/>
parent>
<groupId>com.songshuanggroupId>
<artifactId>dwmetaartifactId>
<version>1.0-SNAPSHOTversion>
<properties>
<maven.compiler.source>8maven.compiler.source>
<maven.compiler.target>8maven.compiler.target>
<project.build.sourceEncoding>UTF-8project.build.sourceEncoding>
properties>
<dependencies>
<dependency>
<groupId>org.springframework.bootgroupId>
<artifactId>spring-boot-starter-webartifactId>
dependency>
<dependency>
<groupId>org.projectlombokgroupId>
<artifactId>lombokartifactId>
<optional>trueoptional>
dependency>
<dependency>
<groupId>org.springframework.bootgroupId>
<artifactId>spring-boot-starter-testartifactId>
<scope>testscope>
dependency>
<dependency>
<groupId>org.apache.hivegroupId>
<artifactId>hive-metastoreartifactId>
<version>3.1.2version>
dependency>
<dependency>
<groupId>com.alibabagroupId>
<artifactId>fastjsonartifactId>
<version>1.2.72version>
dependency>
dependencies>
project>
null
hive.client.uri:hive元数据服务metastore地址
hdfs.admin.user:hdfs用户
hdfs.uri:hdfs NameNode RPC端口
hive.client.uri=thrift://hadoop102:9083
hdfs.admin.user=hadoop
hdfs.uri=hdfs://hadoop102:9820
package com.songshuang.dga;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
//当前这个类是App的主启动类
@SpringBootApplication
public class MainApp {
public static void main(String[] args) {
//启动app
SpringApplication.run(MainApp.class, args);
}
}
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.hive.metastore.HiveMetaStoreClient;
import org.apache.hadoop.hive.metastore.api.MetaException;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Scope;
import java.net.URI;
/*
所有的客户端,都应该随用随建,用完就关。
*/
@Configuration
public class DgaConfig {
@Value("${hive.client.uri}")
private String hiveUri;
@Bean
@Scope("prototype")
public HiveMetaStoreClient createHiveMetastoreClient(){
org.apache.hadoop.conf.Configuration conf = new org.apache.hadoop.conf.Configuration();
//客户端连接服务端,配置地址和端口
conf.set("hive.metastore.uris",hiveUri);
try {
HiveMetaStoreClient client = new HiveMetaStoreClient(conf);
return client;
} catch (MetaException e) {
throw new RuntimeException(e);
}
}
@Value("${hdfs.admin.user}")
private String hdfsAdmin;
@Value("${hdfs.uri}")
private String hdfsUri;
@Bean
@Scope("prototype")
public FileSystem createHDFSClient(){
try {
FileSystem hdfsClient = FileSystem.get(new URI(hdfsUri), new org.apache.hadoop.conf.Configuration(), hdfsAdmin);
return hdfsClient;
} catch (Exception e) {
throw new RuntimeException(e);
}
}
}
连接Metastore服务抽取Hive元数据;连接NameNode抽取HDFS元数据;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.FsStatus;
import org.apache.hadoop.hive.metastore.HiveMetaStoreClient;
import org.apache.thrift.TException;
import org.junit.jupiter.api.Test;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.context.ApplicationContext;
import java.io.IOException;
/**
* @date 2024/1/29 16:27
*/
@SpringBootTest
public class MetaTest {
@Autowired
private ApplicationContext context;
@Test
public void testHiveClient() throws TException {
HiveMetaStoreClient client = context.getBean(HiveMetaStoreClient.class);
//获取库下所有的表
System.out.println(client.getAllTables("dw_ods"));
//获取某张表的元数据信息
System.out.println(client.getTable("dw_ods", "ods_activity_info_full"));
client.close();
}
@Test
public void testHDFSClient() throws IOException {
//1.获取hdfs客户端
FileSystem hdfsClient = context.getBean(FileSystem.class);
//2.遍历tableMetaInfos,为每一个TableMetaInfo补充hdfs的元数据信息
FsStatus status = hdfsClient.getStatus();
long capacity = status.getCapacity();
long remaining = status.getRemaining();
long used = status.getUsed();
System.out.println("capacity:" + capacity + "remaining:" + remaining + "used:" + used );
}
}
<dependency>
<groupId>com.alibabagroupId>
<artifactId>druid-spring-boot-starterartifactId>
<version>1.2.15version>
dependency>
<dependency>
<groupId>mysqlgroupId>
<artifactId>mysql-connector-javaartifactId>
<version>8.0.27version>
dependency>
<dependency>
<groupId>com.baomidougroupId>
<artifactId>dynamic-datasource-spring-boot-starterartifactId>
<version>2.5.8version>
dependency>
<dependency>
<groupId>com.baomidougroupId>
<artifactId>mybatis-plus-boot-starterartifactId>
<version>3.4.1version>
dependency>
<dependency>
<groupId>com.baomidougroupId>
<artifactId>mybatis-plus-generatorartifactId>
<version>3.5.3.1version>
dependency>
<dependency>
<groupId>org.apache.velocitygroupId>
<artifactId>velocity-engine-coreartifactId>
<version>2.3version>
dependency>
<dependency>
<groupId>org.springframework.bootgroupId>
<artifactId>spring-boot-starter-freemarkerartifactId>
dependency>
spring:
datasource:
dynamic:
primary: dga #设置默认的数据源或者数据源组
strict: false #严格匹配数据源,默认false. true未匹配到指定数据源时抛异常,false使用默认数据源
datasource:
dga:
url: jdbc:mysql://mall:3306/dga?useSSL=false&useUnicode=true&characterEncoding=UTF-8
username: root
password: "123456"
driver-class-name: com.mysql.cj.jdbc.Driver
druid:
initial-size: 5
max-active: 20
max-wait: 60000
min-idle: 5
test-on-borrow: true
test-on-return: false
test-while-idle: true
autoconfigure:
exclude: com.alibaba.druid.spring.boot.autoconfigure.DruidDataSourceAutoConfigure
mybatis-plus:
mapper-locations: classpath*:/sqls/*Mapper.xml
configuration:
mapUnderscoreToCamelCase: true
logging:
level:
com:
songshuang:
dga:
meta:
mapper: debug
server:
port: 80
import com.baomidou.mybatisplus.annotation.IdType;
import com.baomidou.mybatisplus.annotation.TableId;
import com.baomidou.mybatisplus.annotation.TableName;
import java.io.Serializable;
import java.sql.Timestamp;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.NoArgsConstructor;
/**
*
* 元数据表附加信息
*
*
* @since 2024-01-29
*/
@Data
@NoArgsConstructor
@AllArgsConstructor
@TableName("table_meta_info_extra")
public class TableMetaInfoExtra implements Serializable {
private static final long serialVersionUID = 1L;
/**
* id
*/
@TableId(value = "id", type = IdType.AUTO)
private Long id;
/**
* 表名
*/
private String tableName;
/**
* 库名
*/
private String schemaName;
/**
* 技术负责人
*/
private String tecOwnerUserName;
/**
* 业务负责人
*/
private String busiOwnerUserName;
/**
* 存储周期类型
*/
private String lifecycleType;
/**
* 生命周期(天)
*/
private Long lifecycleDays;
/**
* 安全级别
*/
private String securityLevel;
/**
* 数仓所在层级
*/
private String dwLevel;
/**
* 创建时间 (自动生成)
*/
private Timestamp createTime;
/**
* 更新时间 (自动生成)
*/
private Timestamp updateTime;
}
import com.baomidou.mybatisplus.extension.service.IService;
import com.songshuang.dga.meta.bean.TableMetaInfoExtra;
import org.apache.hadoop.hive.metastore.api.MetaException;
public interface TableMetaInfoExtraService extends IService<TableMetaInfoExtra> {
//生成所有表的辅助信息。
void initMetaInfoExtra(String db) throws MetaException;
}
import com.songshuang.dga.config.MetaConstant;
import com.songshuang.dga.meta.bean.TableMetaInfoExtra;
import com.songshuang.dga.meta.mapper.TableMetaInfoExtraMapper;
import com.songshuang.dga.meta.service.TableMetaInfoExtraService;
import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper;
import com.baomidou.mybatisplus.extension.service.impl.ServiceImpl;
import org.apache.commons.lang3.RandomUtils;
import org.apache.hadoop.hive.metastore.HiveMetaStoreClient;
import org.apache.hadoop.hive.metastore.api.MetaException;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.ApplicationContext;
import org.springframework.stereotype.Service;
import java.sql.Timestamp;
import java.util.List;
import java.util.Set;
import java.util.stream.Collectors;
/**
*
* 元数据表附加信息 服务实现类
*
*
* @since 2024-01-29
*/
@Service
public class TableMetaInfoExtraServiceImpl extends ServiceImpl<TableMetaInfoExtraMapper, TableMetaInfoExtra> implements TableMetaInfoExtraService {
@Autowired
private ApplicationContext context;
/*
辅助信息不经常变动。
只有当你去创建新表的时候,才需要想数据库中写入新表的辅助信息。
如果一张表已经有了辅助信息,无需写入
调用initMetaInfoExtra(),只需要写入新表(今天刚创建表的元数据信息)
*/
@Override
public void initMetaInfoExtra(String db) throws MetaException {
//查询当前db中的新表
//第一步: 先查询table_meta_info_extra中当前db已经有信息的表。 老表
Set<String> existsTableNames = list(new QueryWrapper<TableMetaInfoExtra>().eq("schema_name", db))
.stream()
.map(info -> info.getTableName())
.collect(Collectors.toSet());
//第二步: 查询db下所有的表,根据老表,过滤得到新表
HiveMetaStoreClient client = context.getBean(HiveMetaStoreClient.class);
List<String> allTables = client.getAllTables(db);
List<String> newTables = allTables.stream()
.filter(name -> !existsTableNames.contains(name))
.collect(Collectors.toList());
//为新表生成辅助信息,存入到数据库中
List<TableMetaInfoExtra> infos = newTables.stream()
.map(name -> {
TableMetaInfoExtra extra = new TableMetaInfoExtra();
extra.setSchemaName(db);
extra.setTableName(name);
//其他的信息应该由员工手动录入,这里为了后续方便,初始化一些默认值,假设员工已经录入了
initExtraInfo(extra);
extra.setCreateTime(new Timestamp(System.currentTimeMillis()));
return extra;
})
.collect(Collectors.toList());
saveBatch(infos);
}
private void initExtraInfo(TableMetaInfoExtra extra) {
String [] bon = {"张三","李四","王五","赵六"};
String [] ton = {"张小三","李中四","王大五","赵老六"};
extra.setBusiOwnerUserName(bon[RandomUtils.nextInt(0,bon.length)]);
extra.setTecOwnerUserName(ton[RandomUtils.nextInt(0,ton.length)]);
extra.setLifecycleType(MetaConstant.LIFECYCLE_TYPE_UNSET);
extra.setLifecycleDays(-1l);
extra.setSecurityLevel(MetaConstant.SECURITY_LEVEL_UNSET);
extra.setDwLevel(extra.getTableName().substring(0,3).toUpperCase());
}
}
import com.baomidou.mybatisplus.core.mapper.BaseMapper;
import com.songshuang.dga.meta.bean.TableMetaInfoExtra;
import org.apache.ibatis.annotations.Mapper;
/**
* @date 2024/1/29 19:44
*/
@Mapper
public interface TableMetaInfoExtraMapper extends BaseMapper<TableMetaInfoExtra> {
}
@Autowired
private TableMetaInfoExtraService extraService;
@Test
public void testExtraInfo() throws Exception {
extraService.initMetaInfoExtra("dw_ods");
}