@Component
public class ConsumerContainer {
protected final Logger logger = LoggerFactory.getLogger(this.getClass());
/**
* 使用map对象,便于根据topic存储对应的KafkaConsumer
*/
private Map kafkaConsumerThreadMap = new HashMap<>();
@Resource(name = "kafkaPropsConfig")
private Map props;
/**
* 添加消费者
* @param topic
* @param consumer
*/
public synchronized void addConsumer(String topic, Consumer consumer){
if(kafkaConsumerThreadMap.get(topic)!=null){
logger.warn("重复创建消费者:{}",topic);
}
KafkaConsumer stringStringKafkaConsumer = new KafkaConsumer<>(props);
stringStringKafkaConsumer.subscribe(Arrays.asList(topic));
ConsumerRecords poll = stringStringKafkaConsumer.poll(3000);
//创建消费者线程
KafkaConsumerThread kafkaConsumerThread = new KafkaConsumerThread(stringStringKafkaConsumer,consumer);
kafkaConsumerThread.start();
kafkaConsumerThreadMap.put(topic,kafkaConsumerThread);
logger.info("创建消费者成功:{}",topic);
}
/**
* 删除消费者
* @param topic
*/
public synchronized void deleteConsumer(String topic){
KafkaConsumerThread kafkaConsumerThread = kafkaConsumerThreadMap.get(topic);
if (kafkaConsumerThread == null) {
logger.warn("该消费者已经被删除:{}",topic);
return;
}
//打断消费者线程
kafkaConsumerThread.interrupt();
kafkaConsumerThreadMap.remove(topic);
logger.info("消费者删除成功:{}",topic);
}
public synchronized List listConsumer(){
List list = new ArrayList<>();
for (String s : kafkaConsumerThreadMap.keySet()) {
list.add(s);
}
return list;
}
}
@Data
@AllArgsConstructor
@NoArgsConstructor
public class DataAcquistion {
String id;
String vision;
String method;
Map properties;
}
public interface DataAcquistionDao {
List selectIEquipment();
EquipmentEntity selectIEquipmentByCode(String code);
EquipmentEntity selectIEquipmentByGid(String gid);
DataSubmatrixEntity selectDataSubmatrixByDataObjId(String dataObjId);
List selectLocalColumnByDsId(String DsId);
}
@Repository
public class DataAcquistionDaoImpl implements DataAcquistionDao {
@Autowired
private BaseDao baseDao;
@Autowired
private BaseDao localColumnBaseDao;
@Autowired
private BaseDao dataSubmatrixBaseDao;
@Override
public List selectIEquipment() {
return baseDao.listByCriterion(EquipmentEntity.class);
}
@Override
public EquipmentEntity selectIEquipmentByCode(String code) {
return baseDao.get("rscode",code,EquipmentEntity.class);
}
@Override
public EquipmentEntity selectIEquipmentByGid(String gid) {
return baseDao.get(gid,EquipmentEntity.class);
}
@Override
public DataSubmatrixEntity selectDataSubmatrixByDataObjId(String dataObjId) {
return dataSubmatrixBaseDao.get("dataObjId",dataObjId,DataSubmatrixEntity.class);
}
@Override
public List selectLocalColumnByDsId(String DsId) {
return localColumnBaseDao.listByCriterion(LocalColumnEntity.class, Restrictions.eq("dataSubmatrixId",DsId));
}
}
public interface DataAcquistionService {
/**
* 根据code订阅主题
* @param code
* @return
*/
boolean createConsumerByCode(String code);
/**
* 订阅所有主题
* @return
*/
boolean createAllConsumer();
/**
* 查询所有未订阅的主题
* @return
*/
List listNoTopicConsumer();
boolean createOtherConsumer();
boolean getConsumerStaus(String code);
boolean deleteAllTopic();
boolean deleteTopicbyCode(String code);
boolean deleteAllConsumer();
boolean deleteConsumerByTopic(String code);
boolean saveDataAcquistion(String topic, String message);
}
@Service
public class DataAcquistionServiceImpl implements DataAcquistionService {
protected final Logger logger = LoggerFactory.getLogger(this.getClass());
KafkaHelper kafkaHelper = ApplicationContextHelper.getBean(KafkaHelper.class);
@Autowired
private DataAcquistionDao dataAcquistionDao;
@Autowired
private IEquipmentService m_iEquipmentService;
@Autowired
private ITDMConfigService m_iTDMConfigService;
@Autowired
protected Map m_tProductDataSpaceDaoMap;
@Autowired
@Qualifier("ENTWareDeviceData")
private IDataStorageService m_iDataStorageService;
public static Map dictMap =new HashMap();
// @Autowired
// private IDataSubmatrixService m_iDataSubmatrixService;
@PostConstruct
protected void initKafkaConfig() {
/*List tEquipment = m_iEquipmentService.getBusinessObj();
for (IEquipment iEquipment : tEquipment) {
if (iEquipment.getEntity().getRscode() == null){
continue;
}
dictMap.put(iEquipment.getEntity().getRscode(),iEquipment.getEntity().getGID());
}*/
List equipmentEntities = dataAcquistionDao.selectIEquipment();
for (EquipmentEntity equipmentEntity : equipmentEntities) {
if (equipmentEntity.getRscode() == null){
continue;
}
dictMap.put(equipmentEntity.getRscode(),equipmentEntity.getGID());
}
String strTemp = m_iTDMConfigService.getSysSignItem("RfSoft.TDMProduct.EquipmentKafka", "DefaultStart").getValue();
if (strTemp.equals("true")){
try {
List list = kafkaHelper.list();
for (String topic : list) {
kafkaHelper.addConsumer(topic,new PTVMateConsumer());
}
} catch (Exception e) {
throw new RuntimeException(e);
}
}
}
@Override
public boolean createConsumerByCode(String code) {
if (StringUtils.isBlank(code)){
logger.error("code为空,创建消费者失败");
return false;
}
kafkaHelper.addConsumer("thing_"+code+"_property",new PTVMateConsumer());
return true;
}
@Override
public boolean createAllConsumer() {
List tEquipment = m_iEquipmentService.getBusinessObj();
for (IEquipment iEquipment : tEquipment) {
String rscode = iEquipment.getEntity().getRscode();
if (StringUtils.isBlank(rscode)){
continue;
}
kafkaHelper.addConsumer("thing_"+rscode+"_property",new PTVMateConsumer());
}
return true;
}
@Override
public List listNoTopicConsumer() {
List list = kafkaHelper.listConsumer();
List noConsumer = new ArrayList<>();
List tEquipment = m_iEquipmentService.getBusinessObj();
for (IEquipment iEquipment : tEquipment) {
String rscode = iEquipment.getEntity().getRscode();
if (StringUtils.isBlank(rscode)){
continue;
}
if (!list.contains(rscode)){
noConsumer.add("thing_"+rscode+"_property");
}
}
return noConsumer;
}
@Override
public boolean createOtherConsumer() {
List list = listNoTopicConsumer();
for (String s : list) {
kafkaHelper.addConsumer(s,new PTVMateConsumer());
}
return true;
}
@Override
public boolean getConsumerStaus(String code) {
List list = listNoTopicConsumer();
if (list.contains(code)){
return false;
}
return true;
}
@Override
public boolean deleteAllTopic() {
try {
List list = kafkaHelper.list();
for (String s : list) {
boolean b = kafkaHelper.deleteTopic(s);
if (b == false){
return false;
}
}
} catch (Exception e) {
throw new RuntimeException(e);
}
return true;
}
@Override
public boolean deleteTopicbyCode(String code) {
return kafkaHelper.deleteTopic(code);
}
@Override
public boolean deleteAllConsumer() {
try {
List list = kafkaHelper.list();
for (String s : list) {
kafkaHelper.deleteConsumer(s);
}
} catch (Exception e) {
throw new RuntimeException(e);
}
return true;
}
@Override
public boolean deleteConsumerByTopic(String topic) {
kafkaHelper.deleteConsumer(topic);
return true;
}
@Override
public boolean saveDataAcquistion(String topic,String message) {
Object parse = JSONObject.parse(message);
Map dataAcquistion = (Map) parse;
Map map = (Map) dataAcquistion.get("properties");
// List equipments = m_iEquipmentService.getBusinessObjByProperty("rscode",topic);
String[] s = topic.split("_");
String rscode = null;
if (s.length>1){
rscode = s[1];
}else {
return false;
}
String gid = dictMap.get(rscode);
if (StringUtils.isBlank(gid)){
EquipmentEntity equipmentEntity = dataAcquistionDao.selectIEquipmentByCode(topic);
// if (equipments.size()!=1){
// return false;
// }
// String gid = equipments.get(0).getEntity().getGID();
gid = equipmentEntity.getGID();
}
// m_iDataStorageService.writeDataToDB();
/* IDataSubmatrix iDataSubmatrix = m_iDataSubmatrixService.getBusinessObjByProperty("dataObjId", gid).stream().findFirst().orElse(null);
List tLocalColumn = iDataSubmatrix.getChannel();
List tField = tLocalColumn.stream().filter(t -> t.getEntity() != null && org.springframework.util.StringUtils.hasText(t.getEntity().getFieldName()) == true).
map(t -> t.getEntity().getFieldName()).collect(Collectors.toList());*/
List> tData = new ArrayList<>();
// Map properties = dataAcquistion.getProperties();
String strCurrTime = DateTime.getCurrDateTime("yyyy-MM-dd HH:mm:ss.SSS");
Long lngCurrTime = DateTime.convertStringToLongTime(strCurrTime, "yyyy-MM-dd HH:mm:ss.SSS");
map.put("DATETIMES", String.valueOf(lngCurrTime));
/*
for (String strField : tField) {
if (StringUtils.isBlank(properties.get(strField))){
properties.put(strField,null);
}
}*/
tData.add(map);
String strData = JSONString.object2Json(tData);
boolean blnResult = false;
try {
// blnResult = m_iDataStorageService.writeDataToDB(strData, gid);
blnResult = saveData(strData, gid,map);
//super.printString(response, String.valueOf(blnResult));
} catch (Exception ex) {
ex.printStackTrace();
// super.printString(response, ex.getMessage());
}
return blnResult;
/*String strData = "";
Map map = JSON.parseObject(dataAcquistion);
Map map1 = new LinkedHashMap<>();
for (Map.Entry stringObjectEntry : map.entrySet()) {
if (!stringObjectEntry.getKey().equals("properties")) {
map1.put(stringObjectEntry.getKey(), stringObjectEntry.getValue());
} else {
Map map2 = JSON.parseObject(stringObjectEntry.getValue().toString());
for (Map.Entry objectEntry : map2.entrySet()) {
map1.put(objectEntry.getKey(), objectEntry.getValue());
}
}
}
strData = JSONString.object2Json(map1);
boolean blnResult;
try {
blnResult = m_iDataStorageService.writeDataToDB(strData, map.get("id").toString());
} catch (Exception e) {
throw new RuntimeException(e);
}
return blnResult;*/
}
protected static String m_strPKFieldName = "DATETIMES";
protected static String m_strPKFieldType = "BIGINT";
public boolean saveData(String strData, String strDataObjectId,Map map){
// EquipmentEntity equipmentEntity = dataAcquistionDao.selectIEquipmentByGid(strDataObjectId);
DataSubmatrixEntity dataSubmatrixEntity = dataAcquistionDao.selectDataSubmatrixByDataObjId(strDataObjectId);
List localColumnEntities = dataAcquistionDao.selectLocalColumnByDsId(dataSubmatrixEntity.getGID());
// Map parse = (Map) JSONObject.parse(strData);
List keys = new ArrayList<>();
for (String s : map.keySet()) {
keys.add(s);
}
List tField = new ArrayList<>();
for (LocalColumnEntity localColumnEntity : localColumnEntities) {
if (keys.contains(localColumnEntity.getName())){
tField.add(localColumnEntity.getName());
}
}
List> tData = JSONString.json2MapList(strData);
if (tData == null || tData.size() < 1)
{
logger.error("传入的JSON格式不正确,解析时出现问题。");
return false;
}
Integer intDataRow = 0;
List tDataKey = new ArrayList<>();
//把传入的数据转换为指定的格式
Map> iDataMap = new LinkedCaseInsensitiveMap<>();
//只取数据库中已经存在的字段的值
for (Map iItemMap: tData)
{
for (String strField : tField)
{
//检查传入的数据中有没有数据库的字段
if (iItemMap.containsKey(strField))
{
String strValue = iItemMap.get(strField).toString();
List tValue = null;
//数据对象(iDataMap)上如果指定的对象,不需要创建List(必须不创建), 如果没有需要创建一个List
if (iDataMap.containsKey(strField))
{
tValue = iDataMap.get(strField);
}
else//没有List的实例
{
tValue = new ArrayList<>();
iDataMap.put(strField, tValue);
}
//处理主键字段和非主键字段
if (strField.toLowerCase().equals(m_strPKFieldName.toLowerCase()))
{
//主键字段
tValue.add(Long.valueOf(strValue));
}
else
{
//非主键字段
tValue.add(Double.valueOf(strValue));
}
tDataKey.add(strField);
intDataRow++;
}
}
}
//传入的数据可能只有部分通道
//boolean blnResult = this.saveData(iDataMap, iDataSubmatrix, tField);
// boolean blnResult = this.saveData(iDataMap, iDataSubmatrix, tDataKey);
if (iDataMap == null || iDataMap.size() < 1)
{
logger.error("传入的数据是null,数据入库失败。");
return false;
}
String strDaoIoCName = dataSubmatrixEntity.getDaoIoCName();
String strTableName = dataSubmatrixEntity.getTableName();
if (tField == null || tField.size() < 1)
{
logger.error("没有字段信息,写入数据失败。");
return false;
}
boolean blnResult = false;
IProductDataSpaceDao iProductDataSpaceDao = null;
String strMessage = "";
if(m_tProductDataSpaceDaoMap.containsKey(strDaoIoCName))
{
iProductDataSpaceDao = m_tProductDataSpaceDaoMap.get(strDaoIoCName);
}
else
{
strMessage = "创建数据解析接口(IProductDataSpaceDao)对象失败。使用的创建标识:【"+ strDaoIoCName +"】";
logger.error(strMessage);
return false;
}
TableAttrEntity oTableAttrEntity = EntityBean.getEntityInstance(TableAttrEntity.class);
oTableAttrEntity.setTableName(strTableName);
List tTableColumnEntity = new ArrayList<>();
for (String strFiled : tField)
{
TableColumnEntity oTableColumnEntity = EntityBean.getEntityInstance(TableColumnEntity.class);
//处理主键字段,第一列,一般是时间格式
if (strFiled.toUpperCase().equals(m_strPKFieldName.toUpperCase()))
{
oTableColumnEntity.setName(strFiled);
oTableColumnEntity.setType(m_strPKFieldType);
oTableColumnEntity.setPKField(true);
oTableColumnEntity.setLength(19);
}
else
{
oTableColumnEntity.setName(strFiled);
oTableColumnEntity.setType("DOUBLE");
oTableColumnEntity.setPKField(false);
oTableColumnEntity.setLength(53);
}
tTableColumnEntity.add(oTableColumnEntity);
}
oTableAttrEntity.setField(tTableColumnEntity);
// TableAttrEntity oTableAttrEntity = super.generateTableEntity(strTableName, tField);
//入库前数据修正,可以不进行修正 环试数据不验证
//tDataMap = iDataRuleService.reviseData(strTableName, tDataMap, strDaoName);
//把数据写入数据库
try {
blnResult = iProductDataSpaceDao.insert(oTableAttrEntity, iDataMap);
} catch (Exception e) {
throw new RuntimeException(e);
}
//清除List中的数据
for (String strField : tField)
{
if (iDataMap.containsKey(strField))
{
iDataMap.get(strField).clear();
}
}
return blnResult;
}
}
public class KafkaConsumerThread extends Thread {
protected final Logger logger = LoggerFactory.getLogger(this.getClass());
DataAcquistionService dataAcquistionService = ApplicationContextHelper.getBean(DataAcquistionService.class);
private KafkaConsumer kafkaConsumer;
private Consumer consumer;
KafkaConsumerThread(KafkaConsumer kafkaConsumer, Consumer consumer){
this.kafkaConsumer=kafkaConsumer;
this.consumer=consumer;
}
@Override
public void run() {
try {
while (true){
if (isInterrupted()) {
throw new InterruptedException();
}
//拉取topic消息
ConsumerRecords poll = kafkaConsumer.poll(Duration.ofMillis(1000));
for (ConsumerRecord stringStringConsumerRecord : poll) {
String value = stringStringConsumerRecord.value();
String topic = stringStringConsumerRecord.topic();
boolean b = dataAcquistionService.saveDataAcquistion(topic, value);
// Boolean b = restTemplate.postForObject("http:localhost:2000/kafka/save/"+topic+"/"+value,null, Boolean.class);
if (b==false){
logger.info("topic格式不正确");
}
// dataAcquistionService.saveDataAcquistion()
consumer.accept(stringStringConsumerRecord.value());
}
}
} catch (InterruptedException e) {
Set subscription = kafkaConsumer.subscription();
logger.info("topic:{} 已停止监听,线程停止!", StringUtils.join(subscription,","),e);
}catch (Exception e){
Set subscription = kafkaConsumer.subscription();
logger.info("topic:{} 消费者运行异常!", StringUtils.join(subscription,","),e);
}finally {
//关闭消费者
try {
kafkaConsumer.close();
} catch (Exception ex) {
}
}
}
}
@RestController
@RequestMapping("/kafka")
public class KafkaController {
protected final Logger logger = LoggerFactory.getLogger(this.getClass());
KafkaHelper kafkaHelper = ApplicationContextHelper.getBean(KafkaHelper.class);
@Autowired
private DataAcquistionService dataAcquistionService;
@Autowired
private KafkaTemplate kafkaTemplate;
@PostMapping("/createConsumerByCode/{code}")
public boolean createConsumerConfigByCode(@PathVariable String code){
return dataAcquistionService.createConsumerByCode(code);
}
@PostMapping("/createAllConsumer")
public void createAllConsumer(){
dataAcquistionService.createAllConsumer();
}
@GetMapping("/listNoTopicConsumer")
public List listNoTopicConsumer(){
return dataAcquistionService.listNoTopicConsumer();
}
@PostMapping("/createOtherConsumer")
public boolean createOtherConsumer(){
return dataAcquistionService.createOtherConsumer();
}
@GetMapping("/getConsumerStaus/{code}")
public boolean getConsumerStaus(@PathVariable String code){
return dataAcquistionService.getConsumerStaus(code);
}
@PostMapping("/send")
public void send(@RequestParam String topic,@RequestParam String message){
kafkaTemplate.send(topic,message);
}
@PostMapping("/createTopic/{topic}")
public boolean createTopic(@PathVariable String topic){
return kafkaHelper.createTopic(topic);
}
@PostMapping("/deleteConsumerByTopic/{topic}")
public boolean deleteConsumerByTopic(@PathVariable String topic){
return dataAcquistionService.deleteConsumerByTopic(topic);
}
@PostMapping("/deleteAllConsumer")
public boolean deleteAllConsumer(){
return dataAcquistionService.deleteAllConsumer();
}
@GetMapping("/test")
public boolean test(){
ServletRequestAttributes sra = (ServletRequestAttributes) RequestContextHolder.getRequestAttributes();
RequestContextHolder.setRequestAttributes(sra, true);
SpringIoCBean.getBean("EquipmentModel");
return false;
}
@GetMapping("/save/{topic}/{value}")
public boolean save(@PathVariable String topic,@PathVariable String value){
return dataAcquistionService.saveDataAcquistion(topic, value);
}
}
@Configuration/*(value = "KafkaUtils")*/
public class KafkaHelper {
protected final Logger logger = LoggerFactory.getLogger(this.getClass());
private static Integer partitions;
private static short fetchers;
@Autowired
private KafkaTemplate kafkaTemplate;
// @Resource(name = "kafkaPropsConfig")
// private Map props;
@Autowired
private AdminClient adminClient;
// private AdminClient adminClient = AdminClient.create(props);
@Autowired
private ConsumerContainer consumerContainer;
/**
* 创建topic
* @param name
* @return
* @throws ExecutionException
* @throws InterruptedException
*/
public boolean createTopic(String name){
logger.info("kafka创建topic:{}",name);
if (partitions==null){
partitions = 4;
}
if (fetchers == 0){
fetchers = 1;
}
NewTopic topic = new NewTopic(name, partitions, fetchers);
CreateTopicsResult topics = adminClient.createTopics(Arrays.asList(topic));
try {
topics.all().get();
} catch (Exception e) {
logger.error("kafka创建topic失败",e);
return false;
}
return true;
}
/**
* 查询所有Topic
* @return
* @throws Exception
*/
public List list() throws Exception {
ListTopicsResult listTopicsResult = adminClient.listTopics();
Set names = listTopicsResult.names().get();
return new ArrayList<>(names);
}
/**
* 删除topic
* @param name
* @return
* @throws ExecutionException
* @throws InterruptedException
*/
public boolean deleteTopic(String name){
logger.info("kafka删除topic:{}",name);
DeleteTopicsResult deleteTopicsResult = adminClient.deleteTopics(Arrays.asList(name));
try {
deleteTopicsResult.all().get();
} catch (Exception e) {
logger.error("kafka删除topic失败",e);
return false;
}
return true;
}
/**
* 获取topic详情
* @param name
* @return
* @throws ExecutionException
* @throws InterruptedException
*/
public TopicDescription describeTopic(String name){
DescribeTopicsResult describeTopicsResult = adminClient.describeTopics(Arrays.asList(name));
try {
Map stringTopicDescriptionMap = describeTopicsResult.all().get();
if(stringTopicDescriptionMap.get(name)!=null){
return stringTopicDescriptionMap.get(name);
}
} catch (Exception e) {
logger.error(" 获取topic详情异常:",e);
}
return null;
}
/**
* 推送事件
* @param e
*/
public void pushEvent(IEvent e) {
if(StringUtils.isEmpty(e.getTopic())) {
return;
}
logger.info("发送kafka消息: topic = {}, info = {}", e.getTopic(), e.getInfo());
kafkaTemplate.send(e.getTopic(), JSONObject.toJSONString(e.getInfo()));
}
/**
* 添加消费者
* @param topic
* @param consumer
*/
public void addConsumer(String topic, Consumer consumer){
logger.info("将为topic:{} 创建消费者",topic);
consumerContainer.addConsumer(topic,consumer);
}
/**
* 删除消费者
* @param topic
*/
public void deleteConsumer(String topic){
logger.info("将删除topic:{} 消费者",topic);
consumerContainer.deleteConsumer(topic);
}
public List listConsumer(){
List list = consumerContainer.listConsumer();
return list;
}
}
@Configuration
public class KafkaProps {
@Value("${spring.kafka.bootstrap-servers}")
private String bootstrapServers;
@Bean("kafkaPropsConfig")
public Map getProps(){
Map props = new HashMap<>(5);
props.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, true);
props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers);
props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG,StringSerializer.class);
props.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG,"latest");
props.put(ConsumerConfig.GROUP_ID_CONFIG, "test");
return props;
}
@Bean
public AdminClient adminClient(){
Map props = getProps();
AdminClient adminClient = AdminClient.create(props);
return adminClient;
}
public ProducerFactory producerFactory() {
return new DefaultKafkaProducerFactory<>(getProps());
}
@Bean
public KafkaTemplate kafkaTemplate() {
KafkaTemplate kafkaTemplate = new KafkaTemplate<>(producerFactory());
return kafkaTemplate;
}
}
public class PTVMateConsumer implements Consumer {
protected final Logger logger = LoggerFactory.getLogger(this.getClass());
@Override
public void accept(String command) {
logger.info("监听元数据, command = {}", command);;
}
}