hadoop@ubuntu:~/app/kafka$ ls
bin config kafka_log libs LICENSE logs NOTICE site-docs start.sh
hadoop@ubuntu:~/app/kafka$:bin/kafka-topics.sh --create --zookeeper 192.168.0.131:2181 --partitions 2 --replication-factor 1 --topic avro_topic
WARNING: Due to limitations in metric names, topics with a period ('.') or underscore ('_') could collide. To avoid issues it is best to use either, but not both.
Created topic "avro_topic".
3.2 生产者端的话单序列化类
package com.dhhy.avro;import org.apache.avro.io.BinaryEncoder;import org.apache.avro.io.DatumWriter;import org.apache.avro.io.EncoderFactory;import org.apache.avro.specific.SpecificDatumWriter;import org.apache.kafka.common.errors.SerializationException;import org.apache.kafka.common.serialization.Serializer;import java.io.ByteArrayOutputStream;import java.io.IOException;import java.util.Map;/**
* 序列化类
* Created by JayLai on 2020-03-24 22:17:40
*/publicclassAvroSerializerimplementsSerializer<User>{@Overridepublicvoidconfigure(Map<String,?> map,boolean b){}@Overridepublicbyte[]serialize(String topic, User data){if(data == null){return null;}
DatumWriter<User> writer =newSpecificDatumWriter<>(data.getSchema());
ByteArrayOutputStream out =newByteArrayOutputStream();
BinaryEncoder encoder = EncoderFactory.get().directBinaryEncoder(out, null);try{
writer.write(data, encoder);}catch(IOException e){thrownewSerializationException(e.getMessage());}return out.toByteArray();}@Overridepublicvoidclose(){}}
3.3 生产者
package com.dhhy.avro;import com.google.gson.Gson;import org.apache.kafka.clients.producer.Callback;import org.apache.kafka.clients.producer.KafkaProducer;import org.apache.kafka.clients.producer.ProducerRecord;import org.apache.kafka.clients.producer.RecordMetadata;import java.util.HashMap;import java.util.Map;import java.util.Properties;/**
* Created by JayLai on 2020-02-24 19:55:29
*/publicclassAvroProducer{publicstaticfinal String brokerList ="192.168.0.131:9092,192.168.0.132:9092,192.168.0.133:9092";publicstaticfinal String topic ="avro_topic";staticint count =0;/**
* 创建User对象
* @return
*/publicstatic User createUser(){
User user = User.newBuilder().setName("Jay").setId(++count).setPhonenum("18814123456").build();return user;}publicstaticvoidmain(String[] args){
User[] users =newUser[10];for(int i =0; i <10; i++){
users[i]=createUser();}
Properties properties =newProperties();
properties.put("bootstrap.servers", brokerList);
properties.put("key.serializer","org.apache.kafka.common.serialization.StringSerializer");
properties.put("value.serializer","com.dhhy.avro.AvroSerializer");//自己直生产者客户端参数并创建KafkaProducer实例
KafkaProducer<String, User> producer =newKafkaProducer<>(properties);//发送消息
Map<String, Object> map =newHashMap<>();
Gson gson=newGson();try{for(User user : users){
ProducerRecord<String, User> record =newProducerRecord<>(topic, user);
producer.send(record,newCallback(){@OverridepublicvoidonCompletion(RecordMetadata metadata, Exception exception){if(exception != null){
exception.printStackTrace();}else{
map.put("topic", metadata.topic());
map.put("partition", metadata.partition());
map.put("offset", metadata.offset());
map.put("user", user);
System.out.println(gson.toJson(map));}}});}}catch(Exception e){
e.printStackTrace();}finally{//关闭生产着客户端实例if(producer != null){
producer.close();}}}}
3.4 消费者端的话单反序列化类
package com.dhhy.avro;import org.apache.avro.io.BinaryDecoder;import org.apache.avro.io.DatumReader;import org.apache.avro.io.DecoderFactory;import org.apache.avro.specific.SpecificDatumReader;import org.apache.kafka.common.serialization.Deserializer;import java.io.ByteArrayInputStream;import java.io.IOException;import java.util.Map;/**
* 反序列化类
* Created by JayLai on 2020-03-24 22:30:03
*/publicclassAvroDeserializerimplementsDeserializer<User>{@Overridepublicvoidconfigure(Map<String,?> map,boolean b){}@Overridepublicvoidclose(){}@Overridepublic User deserialize(String topic,byte[] data){if(data == null){return null;}
User user =newUser();
ByteArrayInputStream in =newByteArrayInputStream(data);
DatumReader<User> userDatumReader =newSpecificDatumReader<>(User.getClassSchema());
BinaryDecoder decoder = DecoderFactory.get().directBinaryDecoder(in, null);try{
user = userDatumReader.read(null, decoder);}catch(IOException e){
e.printStackTrace();}return user;}}3.4 消费者
package com.dhhy.avro;import org.apache.kafka.clients.consumer.ConsumerRecord;import org.apache.kafka.clients.consumer.ConsumerRecords;import org.apache.kafka.clients.consumer.KafkaConsumer;import java.time.Duration;import java.util.Collections;import java.util.Properties;import java.util.concurrent.atomic.AtomicBoolean;/**
* Created by JayLai on 2020-03-24 23:34:17
*/publicclassAvroConsumer{publicstaticfinal String brokerList ="192.168.0.131:9092,192.168.0.132:9092,192.168.0.133:9092";publicstaticfinal String topic ="avro_topic";publicstaticfinal String groupId ="avro_group_001";publicstaticfinal AtomicBoolean isRunning =newAtomicBoolean(true);publicstaticvoidmain(String[] args){
Properties properties =newProperties();
properties.put("key.deserializer","org.apache.kafka.common.serialization.StringDeserializer");
properties.put("value.deserializer","com.dhhy.avro.AvroDeserializer");
properties.put("bootstrap.servers", brokerList);
properties.put ("group.id", groupId);
properties.put ("auto.offset.reset","earliest");//创建一个消费者客户端实例
KafkaConsumer<String, User> consumer =newKafkaConsumer(properties);//订阅主题
consumer.subscribe(Collections.singletonList(topic));
consumer.partitionsFor(topic);//循环消货消息while(isRunning.get()){
ConsumerRecords<String, User> records =
consumer.poll(5000);
System.out.println(records.count());for(ConsumerRecord<String, User> record :records ){
System.out.println("{topic:"+ record.topic()+" ,partition:"+ record.partition()+" ,offset:"+ record.offset()+" ,key:"+ record.topic()+" ,value:"+record.value().toString()+"}");}}}}
//关键字的使用探讨/*访问关键词private 只能在本类中访问public 只能在本工程中访问protected 只能在包中和子类中访问默认的 只能在包中访问*//*final 类 方法 变量 final 类 不能被继承 final 方法 不能被子类覆盖,但可以继承 final 变量 只能有一次赋值,赋值后不能改变 final 不能用来修饰构造方法*///this()
What’s new in Zabbix 2.0?
去年开始使用Zabbix的时候,是1.8.X的版本,今年Zabbix已经跨入了2.0的时代。看了2.0的release notes,和performance相关的有下面几个:
:: Performance improvements::Trigger related da
修改jboss端口
%JBOSS_HOME%\server\{服务实例名}\conf\bindingservice.beans\META-INF\bindings-jboss-beans.xml
中找到
<!-- The ports-default bindings are obtained by taking the base bindin
@echo off
::演示:删除指定路径下指定天数之前(以文件名中包含的日期字符串为准)的文件。
::如果演示结果无误,把del前面的echo去掉,即可实现真正删除。
::本例假设文件名中包含的日期字符串(比如:bak-2009-12-25.log)
rem 指定待删除文件的存放路径
set SrcDir=C:/Test/BatHome
rem 指定天数
set DaysAgo=1
HTML5的video和audio标签是用来在网页中加入视频和音频的标签,在支持html5的浏览器中不需要预先加载Adobe Flash浏览器插件就能轻松快速的播放视频和音频文件。而html5media.js可以在不支持html5的浏览器上使video和audio标签生效。 How to enable <video> and <audio> tags in