如今,许多网站都有交流区,该项目即是完成一个实用的提供交流讨论的地方。
comment
CREATE TABLE `comment` (
`id` int(11) NOT NULL AUTO_INCREMENT,
`user_id` int(11) DEFAULT NULL,
`entity_type` int(11) DEFAULT NULL,
`entity_id` int(11) DEFAULT NULL,
`target_id` int(11) DEFAULT NULL,
`content` text,
`status` int(11) DEFAULT NULL,
`create_time` timestamp NULL DEFAULT NULL,
PRIMARY KEY (`id`),
KEY `index_user_id` (`user_id`),
KEY `index_entity_id` (`entity_id`)
) ENGINE=InnoDB AUTO_INCREMENT=240 DEFAULT CHARSET=utf8;
discuss_post
CREATE TABLE `discuss_post` (
`id` int(11) NOT NULL AUTO_INCREMENT,
`user_id` varchar(45) DEFAULT NULL,
`title` varchar(100) DEFAULT NULL,
`content` text,
`type` int(11) DEFAULT NULL COMMENT '0-普通; 1-置顶;',
`status` int(11) DEFAULT NULL COMMENT '0-正常; 1-精华; 2-拉黑;',
`create_time` timestamp NULL DEFAULT NULL,
`comment_count` int(11) DEFAULT NULL,
`score` double DEFAULT NULL,
`post_url` varchar(200) DEFAULT NULL,
PRIMARY KEY (`id`),
KEY `index_user_id` (`user_id`)
) ENGINE=InnoDB AUTO_INCREMENT=300285 DEFAULT CHARSET=utf8;
user_id:表示发帖人的id
title:表示标题
content:表示帖子内容
type:表示帖子类型 0表示普通帖子 1表示置顶帖子
status:表示帖子的状态 0表示正常帖子 1表示精华帖 2表示拉黑帖
create_time:表示发帖日期
comment_count:表示帖子的评论数,是一个冗余参数,目的是为了提供查询效率
score:帖子权重
post_url:帖子携带的图片
login_ticket
CREATE TABLE `login_ticket` (
`id` int(11) NOT NULL AUTO_INCREMENT,
`user_id` int(11) NOT NULL,
`ticket` varchar(45) NOT NULL,
`status` int(11) DEFAULT '0' COMMENT '0-有效; 1-无效;',
`expired` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
PRIMARY KEY (`id`),
KEY `index_ticket` (`ticket`(20))
) ENGINE=InnoDB AUTO_INCREMENT=15 DEFAULT CHARSET=utf8;
user_id:用户id
ticket:凭证编号
status:凭证状态 0表示有效凭证 1表示无效凭证
expired:用于检测这个凭证是否已经过期
message
CREATE TABLE `message` (
`id` int(11) NOT NULL AUTO_INCREMENT,
`from_id` int(11) DEFAULT NULL,
`to_id` int(11) DEFAULT NULL,
`conversation_id` varchar(45) NOT NULL,
`content` text,
`status` int(11) DEFAULT NULL COMMENT '0-未读;1-已读;2-删除;',
`create_time` timestamp NULL DEFAULT NULL,
PRIMARY KEY (`id`),
KEY `index_from_id` (`from_id`),
KEY `index_to_id` (`to_id`),
KEY `index_conversation_id` (`conversation_id`)
) ENGINE=InnoDB AUTO_INCREMENT=368 DEFAULT CHARSET=utf8;
from_id:发消息的人的id 1-系统
to_id:发送目标的人的id
conversation_id:规则以from_id和to_id用_拼接且小的在前大的在后
content:消息内容
status:消息的状态 0表示未读 1表示已读 2表示删除
create_time:消息的发送时间
user
CREATE TABLE `user` (
`id` int(11) NOT NULL AUTO_INCREMENT,
`username` varchar(50) DEFAULT NULL,
`password` varchar(50) DEFAULT NULL,
`salt` varchar(50) DEFAULT NULL,
`email` varchar(100) DEFAULT NULL,
`type` int(11) DEFAULT NULL COMMENT '0-普通用户; 1-超级管理员; 2-版主;',
`status` int(11) DEFAULT NULL COMMENT '0-未激活; 1-已激活;',
`activation_code` varchar(100) DEFAULT NULL,
`header_url` varchar(200) DEFAULT NULL,
`create_time` timestamp NULL DEFAULT NULL,
PRIMARY KEY (`id`),
KEY `index_username` (`username`(20)),
KEY `index_email` (`email`(20))
) ENGINE=InnoDB AUTO_INCREMENT=151 DEFAULT CHARSET=utf8;
username:用户名称
password:用户密码
salt:用来加密
email:用户的邮箱
type:用户的类型 0表示普通用户 1表示超级管理员 2表示版主
status: 0表示未激活 1表示激活
activation_code:激活码
header_url:头像地址
create_time:用户注册时间
实现首页最新帖子的功能比较简单,我们只需要通过操作数据库获取所有帖子即可。
当然,首页中每个帖子显示点赞量和回帖数的逻辑也较为简单,只需要操作数据库即可
对于每一个帖子都进行分数计算的话要求更多资源,我们可以通过任务调度器统计某一时间段内发生变化的帖子再进行分数统计,以免浪费资源。
最热榜单需要定时更新,定时功能可以使用Spring Quartz来实现这一功能。最热通过计算分数排序得到。计算分数在发帖、点赞、加精中进行,因为这也是一个频繁的操作,所以要存入Redis。
添加依赖
<!--quartz依赖--->
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-quartz</artifactId>
</dependency>
配置
#quartz
spring.quartz.job-store-type=jdbc
spring.quartz.scheduler-name=communityScheduler
spring.quartz.properties.org.quartz.scheduler.instanceId=AUTO
spring.quartz.properties.org.quartz.jobStore.class=org.springframework.scheduling.quartz.LocalDataSourceJobStore
spring.quartz.properties.org.quartz.jobStore.driverDelegateClass=org.quartz.impl.jdbcjobstore.StdJDBCDelegate
spring.quartz.properties.org.quartz.jobStore.isClustered=true
spring.quartz.properties.org.quartz.threadPool.class=org.quartz.simpl.SimpleThreadPool
spring.quartz.properties.org.quartz.threadPool.threadCount=5
设置定时任务
1.定义执行任务的Job,要实现Quartz提供的Job接口。Job接口里面只有一个execute()
方法。
public class PostScoreRefreshJob implements Job, CommunityConstant {
private static final Logger logger = LoggerFactory.getLogger(PostScoreRefreshJob.class);
@Autowired
private RedisTemplate redisTemplate;
@Autowired
private DiscussPostService discussPostService;
@Autowired
private LikeService likeService;
@Autowired
private ElasticsearchService elasticsearchService;
private static final Date epoch;
static {
try {
epoch = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").parse("2014-08-01 00:00:00");
} catch (ParseException e) {
throw new RuntimeException("初始化牛客纪元失败!", e);
}
}
@Override
public void execute(JobExecutionContext context) throws JobExecutionException {
String redisKey = RedisKeyUtil.getPostScoreKey();
BoundSetOperations operations = redisTemplate.boundSetOps(redisKey);
if (operations.size() == 0) {
logger.info("[任务取消] 没有需要刷新的帖子!");
return;
}
logger.info("[任务开始] 正在刷新帖子分数: " + operations.size());
while (operations.size() > 0) {
this.refresh((Integer) operations.pop());
}
logger.info("[任务结束] 帖子分数刷新完毕!");
}
private void refresh(int postId) {
DiscussPost post = discussPostService.findDiscussPostById(postId);
if (post == null) {
logger.error("该帖子不存在: id = " + postId);
return;
}
// 是否精华
boolean wonderful = post.getStatus() == 1;
// 评论数量
int commentCount = post.getCommentCount();
// 点赞数量
long likeCount = likeService.findEntityLikeCount(ENTITY_TYPE_POST, postId);
// 计算权重
double w = (wonderful ? 75 : 0) + commentCount * 10 + likeCount * 2;
// 分数 = 帖子权重 + 距离天数
double score = Math.log10(Math.max(w, 1))
+ (post.getCreateTime().getTime() - epoch.getTime()) / (1000 * 3600 * 24);
// 更新帖子分数
discussPostService.updateScore(postId, score);
// 同步搜索数据
post.setScore(score);
elasticsearchService.saveDiscussPost(post);
}
}
2.创建Schedule和Trigger,并执行定时任务。
@Configuration
public class QuartzConfig {
// 刷新帖子分数任务
@Bean
public JobDetailFactoryBean postScoreRefreshJobDetail() {
JobDetailFactoryBean factoryBean = new JobDetailFactoryBean();
factoryBean.setJobClass(PostScoreRefreshJob.class);
factoryBean.setName("postScoreRefreshJob");
factoryBean.setGroup("communityJobGroup");
factoryBean.setDurability(true);
factoryBean.setRequestsRecovery(true);
return factoryBean;
}
// 配置Trigger(SimpleTriggerFactoryBean, CronTriggerFactoryBean)
@Bean
public SimpleTriggerFactoryBean postScoreRefreshTrigger(JobDetail postScoreRefreshJobDetail) {
SimpleTriggerFactoryBean factoryBean = new SimpleTriggerFactoryBean();
factoryBean.setJobDetail(postScoreRefreshJobDetail);
factoryBean.setName("postScoreRefreshTrigger");
factoryBean.setGroup("communityTriggerGroup");
factoryBean.setRepeatInterval(1000 * 60 * 5);
factoryBean.setJobDataMap(new JobDataMap());
return factoryBean;
}
}
注册功能使用到发送邮件的方式:
首先将用户信息插入到数据库表中,此时该用户的账号还未激活,必须提供激活码,激活码如何获取?通过发送邮件,邮件中存在超链接可以通过点击它来跳转到新地址,新地址携带了激活码和用户Id,再通过后端验证这两个激活码是否相同来激活。
# MailProperties
spring.mail.host=smtp.qq.com
spring.mail.port=465
spring.mail.username=1021634732@qq.com
spring.mail.password=xxxxxxxx
spring.mail.protocol=smtp
spring.mail.properties.mail.smtp.socketFactory.class=javax.net.ssl.SSLSocketFactory
spring.mail.properties.mail.smtp.ssl.enable=true
发送邮件步骤:
public void sendMail(String to ,String subject, String content){
try {
MimeMessage message = mailSender.createMimeMessage();
MimeMessageHelper helper = new MimeMessageHelper(message);
helper.setFrom(from);
helper.setTo(to);
helper.setSubject(subject);
helper.setText(content,true);
mailSender.send(helper.getMimeMessage());
} catch (MessagingException e) {
e.printStackTrace();
}
}
在登录阶段,服务器端生成token凭证存放到Cookie中,当大量用户登录时,为了节省资源使用Redis存放token。
public static String getTicketKey(String ticket) {
return PREFIX_TICKET + SPLIT + ticket;
}
LoginTicket loginTicket = new LoginTicket();
loginTicket.setUserId(user.getId());
loginTicket.setTicket(CommunityUtil.generateUUID());
loginTicket.setStatus(0);
loginTicket.setExpired(new Date(System.currentTimeMillis() + expiredSeconds * 1000));
String redisKey = RedisKeyUtil.getTicketKey(loginTicket.getTicket());
redisTemplate.opsForValue().set(redisKey,loginTicket);
//成功登录后返回ticket给map
map.put("ticket",loginTicket.getTicket());
多个浏览器同时发送Cookie给客户端,此时要使用多线程,避免单例模式带来的问题。每个线程都存放一个User对象,这样就能更好的避免线程不安全
项目中的HostHolder
类就是用于替换Session,将User对象存入ThreadLocal
里面。
@Component
public class HostHolder {
private ThreadLocal<User> users = new ThreadLocal<>();
public void setUsers(User user){
users.set(user);
}
public User getUser(){
return users.get();
}
public void clear(){
users.remove();
}
}
设置拦截器:有些功能只有登录后才能访问,此时就要设置拦截器来拦截请求。拦截器的设置应该都很熟练了,不再赘述:
@Component
public class LoginTicketInterceptor implements HandlerInterceptor {
@Override
public boolean preHandle(HttpServletRequest request, HttpServletResponse response, Object handler) throws Exception {
//
//
}
@Override
public void postHandle(HttpServletRequest request, HttpServletResponse response, Object handler, ModelAndView modelAndView) throws Exception {
/
}
@Override
public void afterCompletion(HttpServletRequest request, HttpServletResponse response, Object handler, Exception ex) throws Exception {
}
}
@Configuration
public class WebMvcConfig implements WebMvcConfigurer {
@Autowired
private LoginTicketInterceptor loginTicketInterceptor;
@Override
public void addInterceptors(InterceptorRegistry registry) {
registry.addInterceptor(loginTicketInterceptor).excludePathPatterns("/**/*.css", "/**/*.js", "/**/*.png", "/**/*.jpg", "/**/*.jpeg");
}
}
添加配置
@Configuration
public class KaptchaConfig {
@Bean
public Producer kaptchaProducer(){
Properties properties = new Properties();
properties.setProperty("kaptcha.image.width","100");
properties.setProperty("kaptcha.image.height","40");
properties.setProperty("kaptcha.textproducer.font.size","40");
properties.setProperty("kaptcha.textproducer.font.color","0,0,0");
properties.setProperty("kaptcha.textproducer.char.string","0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijkmnoqrtuvwxyzlps");
properties.setProperty("kaptcha.textproducer.char.length","4");
properties.setProperty("kaptcha.noise.impl","com.google.code.kaptcha.impl.NoNoise");
DefaultKaptcha kaptcha = new DefaultKaptcha();
Config config = new Config(properties);
kaptcha.setConfig(config);
return kaptcha;
}
}
存放验证码,由于验证码是大量生成的,并且变化很快。所以我们为了节省资源,就将它存入Redis里面。
设置验证码存放在Redis中的key
//登录验证码
public static String getKaptchaKey(String owner) {
return PREFIX_KAPTCHA + SPLIT + owner;
}
@GetMapping(path = "/kaptcha")
public void getKaptcha(HttpServletResponse response){
String text = kaptchaProducer.createText();
BufferedImage image = kaptchaProducer.createImage(text);
// 生成验证码归属,将验证码存入Redis
String kaptchaOwner = CommunityUtil.generateUUID();
String redisKey = RedisKeyUtil.getKaptchaKey(kaptchaOwner);
redisTemplate.opsForValue().set(redisKey, text, 60, TimeUnit.SECONDS);
// 将验证码归属存入cookie
Cookie cookie = new Cookie("kaptchaOwner", kaptchaOwner);
cookie.setMaxAge(60);
response.addCookie(cookie);
response.setContentType("image/png");
try {
OutputStream os = response.getOutputStream();
ImageIO.write(image,"png",os);
} catch (IOException e) {
logger.error("响应验证码失败!",e.getMessage());
}
}
处理每个请求时,都要根据凭证查询用户信息,访问频率较高。
private static final String PREFIX_USER = "user";
public static String getUserKey(int userId) {
return PREFIX_USER + SPLIT + userId;
}
UserService
方法
// 1.优先从缓存中取值
private User getCache(int userId) {
String redisKey = RedisKeyUtil.getUserKey(userId);
return (User) redisTemplate.opsForValue().get(redisKey);
}
// 2.取不到时初始化缓存数据
private User initCache(int userId) {
User user = userMapper.selectById(userId);
String redisKey = RedisKeyUtil.getUserKey(userId);
redisTemplate.opsForValue().set(redisKey, user, 3600, TimeUnit.SECONDS);
return user;
}
// 3.数据变更时清除缓存数据
private void clearCache(int userId) {
String redisKey = RedisKeyUtil.getUserKey(userId);
redisTemplate.delete(redisKey);
}
设置自定义注解:为了判断用户是否已经登录,同时不需要给每一个需要检验用户是否已经登录的方法进行判断,我们就可以使用注解。
@Target(ElementType.METHOD)
@Retention(RetentionPolicy.RUNTIME)
public @interface LoginRequired {
}
设置拦截器
@Component
public class LoginRequireInterceptor implements HandlerInterceptor {
@Autowired
private HostHolder hostHolder;
@Override
public boolean preHandle(HttpServletRequest request, HttpServletResponse response, Object handler) throws Exception {
if(handler instanceof HandlerMethod){
HandlerMethod handlerMethod = (HandlerMethod) handler;
Method method = handlerMethod.getMethod();
LoginRequired loginRequired = method.getAnnotation(LoginRequired.class);
if(loginRequired != null && hostHolder.getUser() == null){
response.sendRedirect(request.getContextPath() + "/login");
return false;
}
}
return true;
}
}
文件上传
ublic String uploadHeader(MultipartFile headerImage, Model model){
if(headerImage == null){
model.addAttribute("error","你还没有选择图片");
return "/site/setting";
}
String fileName = headerImage.getOriginalFilename();
String suffix = fileName.substring(fileName.lastIndexOf("."));
if (StringUtils.isBlank(suffix)){
model.addAttribute("error","文件的格式不正确");
return "/site/setting";
}
fileName = CommunityUtil.generateUUID() + suffix;
File des = new File(uploadPath + "/" + fileName);
try {
headerImage.transferTo(des);
} catch (IOException e) {
logger.error("上传文件失败:" + e.getMessage());
throw new RuntimeException("上传文件失败,服务器发生异常!",e);
}
User user = hostHolder.getUser();
String headerUrl = domain + "/user/header/" + fileName;
userService.updateHeader(user.getId(),headerUrl);
return "redirect:/index";
}
@LoginRequired
@PostMapping("/updatePassword")
public String updatePassword(String originalPassword, String newPassword, String confirmPassword, Model model, @CookieValue("ticket") String ticket) {
if (originalPassword == null) {
model.addAttribute("originalPasswordMsg", "请输入原始密码!");
return "site/setting";
}
if (newPassword == null) {
model.addAttribute("newPasswordMsg", "请输入新密码!");
return "site/setting";
}
if (confirmPassword == null) {
model.addAttribute("confirmPasswordMsg", "请输入新密码!");
return "site/setting";
}
User user = hostHolder.getUser();
if (!CommunityUtil.md5(originalPassword + user.getSalt()).equals(user.getPassword())) {
model.addAttribute("originalPasswordMsg", "密码错误!");
return "/site/setting";
}
if (!confirmPassword.equals(newPassword)) {
model.addAttribute("confirmPasswordMsg", "两次输入的密码不一致!");
return "site/setting";
}
userService.updatePassword(user.getId(), CommunityUtil.md5(newPassword + user.getSalt()));
userService.logout(ticket);
return "redirect:/login";
}
关注的逻辑和点赞逻辑相似。
Redis key设计
//followee:userId:entityType -> zset(entityId,now)
public static String getFolloweeKey(int userId,int entityType){
return PREFIX_FOLLOWEE + SPLIT + userId + SPLIT + entityType;
}
//follower:entityType:entityId -> zset(userId,now)
public static String getFollowerKey(int entityType,int entityId){
return PREFIX_FOLLOWER + SPLIT + entityType + SPLIT + entityId;
}
Spring Security是一个专注与为Java应用程序提供身份认证和授权的框架,它的强大之处在于它可以轻松扩展以满足自定义的需求。
public Collection<? extends GrantedAuthority> getAuthorities(int userId){
User user = this.findUserById(userId);
List<GrantedAuthority> list = new ArrayList<>();
list.add(new GrantedAuthority() {
@Override
public String getAuthority() {
switch (user.getType()) {
case 1:
return AUTHORITY_ADMIN;
case 2:
return AUTHORITY_MODERATOR;
default:
return AUTHORITY_USER;
}
}
});
return list;
}
SecurityConfig
配置类添加配置信息
手动存入SpringSecurity
//封装用户请求信息
Authentication authentication = new UsernamePasswordAuthenticationToken(
user,user.getPassword(),userService.getAuthorities(user.getId()));
//存入SecurityContext
SecurityContextHolder.setContext(new SecurityContextImpl(authentication));
// 置顶
@RequestMapping(path = "/top", method = RequestMethod.POST)
@ResponseBody
public String setTop(int id) {
discussPostService.updateType(id, 1);
// 触发发帖事件
Event event = new Event()
.setTopic(TOPIC_PUBLISH)
.setUserId(hostHolder.getUser().getId())
.setEntityType(ENTITY_TYPE_POST)
.setEntityId(id);
eventProducer.fireEvent(event);
return CommunityUtil.getJSONString(0);
}
// 加精
@RequestMapping(path = "/wonderful", method = RequestMethod.POST)
@ResponseBody
public String setWonderful(int id) {
discussPostService.updateStatus(id, 1);
// 触发发帖事件
Event event = new Event()
.setTopic(TOPIC_PUBLISH)
.setUserId(hostHolder.getUser().getId())
.setEntityType(ENTITY_TYPE_POST)
.setEntityId(id);
eventProducer.fireEvent(event);
return CommunityUtil.getJSONString(0);
}
// 删除
@RequestMapping(path = "/delete", method = RequestMethod.POST)
@ResponseBody
public String setDelete(int id) {
discussPostService.updateStatus(id, 2);
// 触发删帖事件
Event event = new Event()
.setTopic(TOPIC_DELETE)
.setUserId(hostHolder.getUser().getId())
.setEntityType(ENTITY_TYPE_POST)
.setEntityId(id);
eventProducer.fireEvent(event);
return CommunityUtil.getJSONString(0);
}
.antMatchers(
"/discuss/top",
"/discuss/wonderful"
)
.hasAnyAuthority(
AUTHORITY_MODERATOR
)
.antMatchers(
"/discuss/delete"
)
.hasAnyAuthority(
AUTHORITY_ADMIN
)
使用Redis统计
根据用户IP计算访问量
使用HyperLoglog,性能好,且存储空间小。
// 合并这些数据
String redisKey = RedisKeyUtil.getUVKey(df.format(start), df.format(end));
redisTemplate.opsForHyperLogLog().union(redisKey, keyList.toArray());
// 返回统计的结果
return redisTemplate.opsForHyperLogLog().size(redisKey);
根据用户ID计算访问量
使用Bitmap,性能好、且可以统计精确的结果。
// 进行OR运算
return (long) redisTemplate.execute(new RedisCallback() {
@Override
public Object doInRedis(RedisConnection connection) throws DataAccessException {
String redisKey = RedisKeyUtil.getDAUKey(df.format(start), df.format(end));
connection.bitOp(RedisStringCommands.BitOperation.OR,
redisKey.getBytes(), keyList.toArray(new byte[0][0]));
return connection.bitCount(redisKey.getBytes());
}
});
在拦截器中进行统计
@Component
public class DataInterceptor implements HandlerInterceptor {
@Autowired
private DataService dataService;
@Autowired
private HostHolder hostHolder;
@Override
public boolean preHandle(HttpServletRequest request, HttpServletResponse response, Object handler) throws Exception {
//统计UV
String ip=request.getRemoteHost();
dataService.recordUV(ip);
//统计DAU
User user=hostHolder.getUser();
if(user!=null){
dataService.recordDAU(user.getId());
}
return true;
}
}
发布帖子用到了异步发送,通过AJAX使得网页能够增量更新呈现在页面,而不需要刷新整个页面。
异步请求:通过[jQuery]写,三个参数:访问路径,发送的JSONString字符串,回调函数(参数是服务器返回值)
将对象转化为Json字符串(字节流)是序列化方式的一种。
public static String getJSONString(int code, String msg, Map<String,Object> map){
JSONObject json = new JSONObject();
json.put("code",code);
json.put("msg",msg);
if(map != null){
for (String key : map.keySet()) {
json.put(key,map.get(key));
}
}
return json.toJSONString();
}
public static String getJSONString(int code,String msg){
return getJSONString(code,msg,null);
}
public static String getJSONString(int code){
return getJSONString(code,null,null);
}
jQuery写法示例:
定义前缀树
过滤敏感词使用树的结构,因为查找效率高。
private class TrieNode{
//标识是否为敏感词结尾
private boolean isKeyEnd = false;
private Map<Character,TrieNode> childNode = new HashMap<>();
public boolean isKeyEnd(){
return isKeyEnd;
}
public void setKeyEnd(boolean keyEnd){
isKeyEnd = keyEnd;
}
public void addChildNode(Character c,TrieNode node){
childNode.put(c,node);
}
public TrieNode getChildNode(Character c){
return childNode.get(c);
}
}
其中,private Map
是形成树的关键。使用Map结构,value存放的是子树。
根据敏感词,初始化前缀树
敏感词通过网上获取sensitive-words.txt
@PostConstruct
public void init(){
try(
InputStream is = this.getClass().getClassLoader().getResourceAsStream("sensitive-words.txt");
BufferedReader reader = new BufferedReader(new InputStreamReader(is));
) {
String keyword;
while ((keyword = reader.readLine()) != null){
this.addKeyWord(keyword);
}
} catch (IOException e) {
logger.error("加载敏感词失败:" + e.getMessage());
}
}
@PostConstruct
:Java提供的注解,被该注解修饰的方法会在服务器加载Servlet时运行,并且只会被服务器执行一次,在构造函数之后执行,init()方法之前执行
在Bean中初始化执行顺序:
Constructor(构造方法) --> @Autowired(依赖注入) --> @PostConstruct(注释的方法)
将敏感词添加到前缀树中
private void addKeyWord(String keyword) {
TrieNode tempNode = rootNode;
for (int i = 0; i < keyword.length(); i++) {
char c = keyword.charAt(i);
TrieNode childNode = tempNode.getChildNode(c);
if(childNode == null){
childNode = new TrieNode();
tempNode.addChildNode(c,childNode);
}
//根节点指向下一节点
tempNode = childNode;
//设置结束标志
if (i == keyword.length() - 1){
tempNode.setKeyEnd(true);
}
}
}
开始过滤敏感词,使用指针来遍历每一个字符。
第一个指针:指向树
第二个指针:指向要判断字符串的头
第三个指针:指向这个字符串的结尾
public String filter(String text){
if(StringUtils.isBlank(text)){
return null;
}
//指针1
TrieNode tempNode = rootNode;
//指针2
int begin = 0;
//指针3
int position = 0;
//结果
StringBuilder sb = new StringBuilder();
while(position < text.length()){
char c = text.charAt(position);
if(isSymbol(c)){
//若指针1指向根节点,则将此符号计入结果,指针2向下走一步
if(tempNode == rootNode){
sb.append(c);
begin++;
}
//无论符号在开头或中间,指针3都向下走一步
position++;
continue;
}
//检查下级节点
tempNode = tempNode.getSubNode(c);
if(tempNode == null){
//以begin开头的字符串不是敏感词
sb.append(text.charAt(begin));
position = ++begin;
//重新指向根节点
tempNode = rootNode;
}else if(tempNode.isKeywordEnd){
//发现敏感词,将begin-position字符串替换掉
sb.append(REPLACEMENT);
begin = ++position;
//重新指向根节点
tempNode = rootNode;
}else{
//检查下一个字符
position++;
}
}
//将最后一批字符计入结果
sb.append(text.substring(begin));
return sb.toString();
}
@RequestMapping(path = "/detail/{discussPostId}", method = RequestMethod.GET)
public String getDiscussPost(@PathVariable("discussPostId") int discussPostId, Model model) {
// 帖子
DiscussPost post = discussPostService.findDiscussPostById(discussPostId);
model.addAttribute("post", post);
// 作者
User user = userService.findUserById(post.getUserId());
model.addAttribute("user", user);
return "/site/discuss-detail";
}
声明式事务通过使用注解
@Transactional(isolation = Isolation.READ_COMMITTED, propagation = Propagation.REQUIRED)
显示评论
@GetMapping("/detail/{discussPostId}")
public String getDiscussPost(@PathVariable("discussPostId") int discussPostId, Model model, Page page){
}
发布评论,使用事务保证其一致性,同时还要过滤敏感词
@Transactional(isolation = Isolation.READ_COMMITTED,propagation = Propagation.REQUIRED)
public int addComment(Comment comment){
if(comment == null){
throw new IllegalArgumentException("参数不能为空!");
}
comment.setContent(HtmlUtils.htmlEscape(comment.getContent()));
comment.setContent(sensitiveFilter.filter(comment.getContent()));
int rows = commentMapper.insertComment(comment);
//更新帖子评论数量
if(comment.getEntityType() == ENTITY_TYPE_POST){
int count = commentMapper.selectCountByEntity(comment.getEntityType(),comment.getEntityId());
discussPostService.updateCommentCount(comment.getEntityId(),count);
}
return rows;
}
点赞属于一个频繁的操作,我们将点赞的数据记录进Redis中。
引入依赖
<!-- 引入 redis 依赖 -->
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-data-redis</artifactId>
<version>1.5.7.RELEASE</version>
</dependency>
配置Redis
#redis
spring.redis.database=11
spring.redis.host=localhost
spring.redis.port=6379
将对象的状态信息转为存储或传输的形式需要序列化。
redis还是nosql 其数据类型在Java中没有对应 所以将其序列化
@Configuration
public class RedisConfig {
@Bean
@ConditionalOnSingleCandidate
public RedisTemplate<String,Object> redisTemplate(RedisConnectionFactory factory){
RedisTemplate<String,Object> template = new RedisTemplate<>();
template.setConnectionFactory(factory);
//设置key/value的序列化方式
template.setKeySerializer(RedisSerializer.string());
template.setValueSerializer(RedisSerializer.json());
template.setHashKeySerializer(RedisSerializer.string());
template.setHashValueSerializer(RedisSerializer.json());
template.afterPropertiesSet();
return template;
}
}
RedisKey的设计是Redis中的关键!应尽量设计复用性高的key。
private static final String SPLIT=":";
private static final String PREFIX_ENTITY_LIKE = "like:entity";
//like:entity:entityType:entityId -> set(userId)
public static String getEntityLikeKey(int entityType,int entityId){
return PREFIX_ENTITY_LIKE + SPLIT + entityType + SPLIT + entityId;
}
点赞数量
// 点赞
public void like(int userId, int entityType, int entityId) {
String entityLikeKey = RedisKeyUtil.getEntityLikeKey(entityType, entityId);
boolean isMember = redisTemplate.opsForSet().isMember(entityLikeKey, userId);
if (isMember) {
redisTemplate.opsForSet().remove(entityLikeKey, userId);
} else {
redisTemplate.opsForSet().add(entityLikeKey, userId);
}
}
// 查询某实体点赞的数量
public long findEntityLikeCount(int entityType, int entityId) {
String entityLikeKey = RedisKeyUtil.getEntityLikeKey(entityType, entityId);
return redisTemplate.opsForSet().size(entityLikeKey);
}
// 查询某人对某实体的点赞状态
public int findEntityLikeStatus(int userId, int entityType, int entityId) {
String entityLikeKey = RedisKeyUtil.getEntityLikeKey(entityType, entityId);
return redisTemplate.opsForSet().isMember(entityLikeKey, userId) ? 1 : 0;
}
private static final String PREFIX_USER_LIKE = "like:user";
// like:user:userId -> int
public static String getUserLikeKey(int userId) {
return PREFIX_USER_LIKE + SPLIT + userId;
}
重构like()方法
public void like(int userId,int entityType,int entityId ,int entityUserId){
//事务
redisTemplate.execute(new SessionCallback() {
@Override
public Object execute(RedisOperations operations) throws DataAccessException {
String entityLikeKey = RedisKeyUtil.getEntityLikeKey(entityType,entityId);
String userLikeKey = RedisKeyUtil.getUserLikeKey(entityUserId);
boolean isMember = operations.opsForSet().isMember(entityLikeKey,userId);
operations.multi();
if(isMember){
operations.opsForSet().remove(entityLikeKey,userId);
operations.opsForValue().decrement(userLikeKey);
}else {
operations.opsForSet().add(entityLikeKey,userId);
operations.opsForValue().increment(userLikeKey);
}
return operations.exec();
}
});
}
设置拦截器,在用户登录后才能查看未读消息数量、访问私信列表。
@Component
public class MessageInterceptor implements HandlerInterceptor {
@Autowired
private HostHolder hostHolder;
@Autowired
private MessageService messageService;
@Override
public void postHandle(HttpServletRequest request, HttpServletResponse response, Object handler, ModelAndView modelAndView) throws Exception {
User user = hostHolder.getUser();
if (user != null && modelAndView != null) {
int letterUnreadCount = messageService.findLetterUnreadCount(user.getId(), null);
int noticeUnreadCount = messageService.findNoticeUnreadCount(user.getId(), null);
modelAndView.addObject("allUnreadCount", letterUnreadCount + noticeUnreadCount);
}
}
}
@RequestMapping(path = "/letter/detail/{conversationId}",method = RequestMethod.GET)
public String getLetterDetail(@PathVariable("conversationId") String conversationId,Page page,Model model){
//分页信息
page.setLimit(5);
page.setPath("/letter/detail/"+conversationId);
page.setRows(messageService.findLetterCount(conversationId));
//私信列表
List<Message> letterList=messageService.findLetters(conversationId,page.getOffset(),page.getLimit());
List<Map<String,Object>> letters=new ArrayList<>();
if(letterList!=null){
for (Message message:letterList){
Map<String,Object> map=new HashMap<>();
map.put("letter",message);
map.put("fromUser",userService.findUserById(message.getFromId()));
letters.add(map);
}
}
model.addAttribute("letters",letters);
//私信目标
model.addAttribute("target",getLetterTarget(conversationId));
return "/site/letter-detail";
}
private User getLetterTarget(String conversationId){
String[] ids=conversationId.split("_");
int id0=Integer.parseInt(ids[0]);
int id1=Integer.parseInt(ids[1]);
if (hostHolder.getUser().getId()==id0){
return userService.findUserById(id1);
}else {
return userService.findUserById(id0);
}
}
发送私信同时也要使用异步的方式。
创建message对象,补充相关内容,拼接conversationId的时候把小的拼在前面
@PostMapping("/letter/send")
@ResponseBody
public String sendLetter(String toName, String content) {
User target = userService.findUserByName(toName);
if (target == null) {
return CommunityUtil.getJSONString(1, "目标用户不存在!");
}
Message message = new Message();
message.setFromId(hostHolder.getUser().getId());
message.setToId(target.getId());
if (message.getFromId() < message.getToId()) {
message.setConversationId(message.getFromId() + "_" + message.getToId());
} else {
message.setConversationId(message.getToId() + "_" + message.getFromId());
}
message.setContent(content);
message.setCreateTime(new Date());
messageService.addMessage(message);
return CommunityUtil.getJSONString(0);
}
系统通知并发量过大会导致队列阻塞,同时还存在这样的情况:生产者产生的消息分发给多个消费者,并且每个消费者都要接收到完整的消息内容。这种情况下我们就可以使用Kafka
消息队列来处理。
Kafka是一种发布-订阅模型。使用主题(Topic)作为消息通信载体,类似广播模式。
需要发送通知的事件:评论、点赞、关注。
启动 Zookeeper 服务, 默认端口 2181
bin\windows\zookeeper-server-start.bat config\zookeeper.properties
启动 Kafka 服务,默认端口 9092
bin\windows\kafka-server-start.bat config\server.properties
创建 Topic
bin\windows\kafka-topics.bat --create --bootstrap-server localhost:2181 --replication-factor 1 --partitions 1 --topic test
该命令 表示创建一个 名为 test 的主题,1 个副本,1个分区
启动生产者
bin\windows\kafka-console-producer.bat --broker-list localhost:9092 --topic test
该命令可以从控制台获取输入,将其作为 消息 存储至 kafka, 再由消费者读取,一次回车代表一条消息
启动消费者
bin\windows\kafka-console-consumer.bat --bootstrap-server localhost:9092 --topic test --from-beginning
配置
# kafka
spring.kafka.bootstrap-servers=localhost:9092
spring.kafka.consumer.group-id=test-consumer-group
spring.kafka.consumer.enable-auto-commit=true
spring.kafka.consumer.auto-commit-interval=3000
生产者
@Component
public class EventProducer {
@Resource
private KafkaTemplate kafkaTemplate;
// 处理事件
public void fireEvent(Event event){
// 将事件发布到指定的主题(评论、点赞、关注)
kafkaTemplate.send(event.getTopic(), JSONObject.toJSONString(event));
}
}
消费者
/**
* 主题: 评论
*/
String TOPIC_COMMENT = "comment";
/**
* 主题: 点赞
*/
String TOPIC_LIKE = "like";
/**
* 主题: 关注
*/
String TOPIC_FOLLOW = "follow";
============================================================================
@KafkaListener(topics = {TOPIC_COMMENT, TOPIC_LIKE, TOPIC_FOLLOW})
public void handleCommentMessage(ConsumerRecord record){
if(record == null || record.value() == null){
logger.error("消息内容为空!");
return;
}
Event event = JSONObject.parseObject(record.value().toString(),Event.class);
if(event == null){
logger.error("消息格式错误!");
}
// 发送站内通知
Message message = new Message();
message.setFromId(SYSTEM_USER_ID);
message.setToId(event.getEntityUserId());
message.setConversationId(event.getTopic());
message.setCreateTime(new Date());
Map<String, Object> content = new HashMap<>();
content.put("userId", event.getUserId());
content.put("entityType", event.getEntityType());
content.put("entityId", event.getEntityId());
if (!event.getData().isEmpty()) {
for (Map.Entry<String, Object> entry : event.getData().entrySet()) {
content.put(entry.getKey(), entry.getValue());
}
}
message.setContent(JSONObject.toJSONString(content));
messageService.addMessage(message);
}
在项目中使用到了@ControllerAdvice
,@ExceptionHandler
两个注解
@ControllerAdvice(annotations = Controller.class)
public class ExceptionAdvice {
private static final Logger logger = LoggerFactory.getLogger(ExceptionAdvice.class);
@ExceptionHandler({Exception.class})
public void handleException(Exception e, HttpServletRequest request, HttpServletResponse response) throws IOException{
logger.error("服务器发生异常:" + e.getMessage());
for (StackTraceElement element: e.getStackTrace()) {
logger.error(element.toString());
}
String xRequestedWith = request.getHeader("x-requested-with");
if("XMLHttpRequest".equals(xRequestedWith)){
response.setContentType("application/plain;charset=utf-8");
PrintWriter writer = response.getWriter();
writer.write(CommunityUtil.getJSONString(1,"服务器异常!"));
}else {
response.sendRedirect(request.getContextPath() + "/error");
}
}
}
统一日志管理并不是业务功能,而是系统功能。业务功能在想对记录日志的位置进行改变时,需要一个一个修改业务bean,比较麻烦。
因此我们引入AOP
(面向切面),只要声明切点的位置,再去通知要做什么事。这样,我们只需要面向切面编程即可。
Aspect切面:
注解:
@Component
、@Aspect
声明切点的位置:
pointcut()
切入时间:
@Before
@After AfterReturning
@AfterThrowing``@Around
@Component
@Aspect
public class ServiceLogAspect {
private static final Logger logger = LoggerFactory.getLogger(ServiceLogAspect.class);
//对于所有的service类进行统一日志管理
@Pointcut("execution(* com.example.community.service.*.*(..))")
public void pointcut(){}
@Before("pointcut()")
public void before(JoinPoint joinPoint){
ServletRequestAttributes attributes = (ServletRequestAttributes) RequestContextHolder.getRequestAttributes();
if(attributes == null){
return;
}
HttpServletRequest request = attributes.getRequest();
String ip = request.getRemoteHost();
String now = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(new Date());
String target = joinPoint.getSignature().getDeclaringTypeName() + "." + joinPoint.getSignature().getName();
logger.info(String.format("[用户:%s],在[%s],访问了[%s]",ip,now,target));
}
}
es是一个开源的高扩展的分布式全文检索引擎,它可以近乎实时的存储、检索数据;本
身扩展性很好,可以扩展到上百台服务器,处理PB级别的数据。
在本项目中,整合es来实现搜索功能。
引入依赖
<dependency>
<groupId>org.springframework.bootgroupId>
<artifactId>spring-boot-starter-data-elasticsearchartifactId>
dependency>
配置Elasticsearch
@PostConstruct
public void init(){
// 解决netty启动冲突问题
System.setProperty("es.set.netty.runtime.available.processors", "false");
}
# es
elasticSearch.url=127.0.0.1:9200
对DiscussPost实体类作处理
@Document(indexName = "discusspost", shards = 6, replicas = 3)
public class DiscussPost {
@TableId(value = "id",type = IdType.AUTO)
private Integer id;
@Field(type = FieldType.Integer)
private int userId;
@Field(type = FieldType.Text, analyzer = "ik_max_word", searchAnalyzer = "ik_smart")
private String title;
@Field(type = FieldType.Text, analyzer = "ik_max_word", searchAnalyzer = "ik_smart")
private String content;
@Field(type = FieldType.Integer)
private int type;
@Field(type = FieldType.Integer)
private int status;
@Field(type = FieldType.Date,format = DateFormat.basic_date)
private Date createTime;
@Field(type = FieldType.Integer)
private int commentCount;
@Field(type = FieldType.Double)
private double score;
}
创建一个Repository类用于操作
继承CrudRepository
@Repository
public interface DiscussPostRepository extends ElasticsearchRepository<DiscussPost,Integer> {
}
在发布事件中也需要同步到es服务器。
发布帖子时,将帖子异步提交到Elasticsearch服务器。
新建ElasticsearchService类,定义CRUD和搜索方法。
@Service
public class ElasticsearchService {
@Autowired
private DiscussPostRepository discussRepository;
@Resource
private ElasticsearchOperations elasticsearchOperations;
public void saveDiscussPost(DiscussPost post) {
discussRepository.save(post);
}
public void deleteDiscussPost(int id) {
discussRepository.deleteById(id);
}
public SearchPage<DiscussPost> searchDiscussPost(String keyword, int current, int limit) {
NativeSearchQuery searchQueryBuilder = new NativeSearchQueryBuilder()
.withQuery(QueryBuilders.multiMatchQuery(keyword, "title", "content"))
.withSorts(SortBuilders.fieldSort("type").order(SortOrder.DESC),
(SortBuilders.fieldSort("score").order(SortOrder.DESC)),
(SortBuilders.fieldSort("createTime").order(SortOrder.DESC)))
.withPageable(PageRequest.of(current, limit))
.withHighlightFields(
new HighlightBuilder.Field("title").preTags("").postTags(""),
new HighlightBuilder.Field("content").preTags("").postTags("")
).build();
//得到查询结果
SearchHits<DiscussPost> search = elasticsearchOperations.search(searchQueryBuilder, DiscussPost.class);
//将其结果返回并进行分页
SearchPage<DiscussPost> page = SearchHitSupport.searchPageFor(search, Page.empty().getPageable());
if (!page.isEmpty()) {
for (SearchHit<DiscussPost> discussPostSearch : page) {
DiscussPost discussPost = discussPostSearch.getContent();
//取高亮
List<String> title = discussPostSearch.getHighlightFields().get("title");
if(title!=null){
discussPost.setTitle(title.get(0));
}
List<String> content = discussPostSearch.getHighlightFields().get("content");
if(content!=null){
discussPost.setContent(content.get(0));
}
}
}
return page;
}
}
在DiscussPostController类发帖时,定义和触发发帖事件(Event、eventProducer.fireEvent(event))
发帖 --> 发送帖子Id到队列 --> 队列处理放入es
//触发发帖事件
Event event = new Event()
.setTopic(TOPIC_PUBLISH)
.setUserId(user.getId())
.setEntityId(ENTITY_TYPE_POST)
.setEntityId(post.getId());
eventProducer.fireEvent(event);
增加评论时,将帖子异步提交到Elasticsearch服务器。
在CommentController类发表评论时,定义和触发发帖事件
// 触发评论事件
Event event = new Event()
.setTopic(TOPIC_COMMENT)
.setUserId(hostHolder.getUser().getId())
.setEntityType(comment.getEntityType())
.setEntityId(comment.getEntityId())
.setData("postId", discussPostId);
if (comment.getEntityType() == ENTITY_TYPE_POST) {
DiscussPost target = discussPostService.findDiscussPostById(comment.getEntityId());
event.setEntityUserId(target.getUserId());
} else if (comment.getEntityType() == ENTITY_TYPE_COMMENT) {
Comment target = commentService.findCommentById(comment.getEntityId());
event.setEntityUserId(target.getUserId());
}
eventProducer.fireEvent(event);
if(comment.getEntityType() == ENTITY_TYPE_POST){
event = new Event()
.setTopic(TOPIC_PUBLISH)
.setUserId(comment.getUserId())
.setEntityType(ENTITY_TYPE_POST)
.setEntityId(discussPostId);
eventProducer.fireEvent(event);
}
在消费组件中增加一个方法,消费帖子发布事件。
在EventConsumer类增加消费发帖事件的方法
//消费发帖事件
@KafkaListener(topics = {TOPIC_PUBLISH})
public void handlePublishMessage(ConsumerRecord record){
if(record == null || record.value() == null){
logger.error("消息的内容为空!");
return;
}
Event event = JSONObject.parseObject(record.value().toString(),Event.class);
if(event == null){
logger.error("消息格式错误!");
}
DiscussPost post = discussPostService.findDiscussPostById(event.getEntityId());
elasticsearchService.saveDiscussPost(post);
}
在事件中查询帖子,存到Es服务器
添加配置
# wk
wk.image.command=D:/developer_tools/wkhtmltopdf/bin/wkhtmltoimage
wk.image.storage=d:/Java/data/wk-images
@Configuration
public class WkConfig {
private static final Logger logger = LoggerFactory.getLogger(WkConfig.class);
//
@Value("${wk.image.storage}")
private String wkImageStorage;
//要主动生成存放图片目录
@PostConstruct
public void init() {
// 创建WK图片目录
File file = new File(wkImageStorage);
if (!file.exists()) {
file.mkdir();
logger.info("创建WK图片目录: " + wkImageStorage);
}
}
}
编写逻辑
@RequestMapping(path = "/share", method = RequestMethod.GET)
@ResponseBody
public String share(String htmlUrl) {
// 文件名
String fileName = CommunityUtil.generateUUID();
// 异步生成长图
Event event = new Event()
.setTopic(TOPIC_SHARE)
.setData("htmlUrl", htmlUrl)
.setData("fileName", fileName)
.setData("suffix", ".png");
eventProducer.fireEvent(event);
// 返回访问路径
Map<String, Object> map = new HashMap<>();
map.put("shareUrl", domain + contextPath + "/share/image/" + fileName);
return CommunityUtil.getJSONString(0, null, map);
}
// 获取长图
@RequestMapping(path = "/share/image/{fileName}", method = RequestMethod.GET)
public void getShareImage(@PathVariable("fileName") String fileName, HttpServletResponse response) {
if (StringUtils.isBlank(fileName)) {
throw new IllegalArgumentException("文件名不能为空!");
}
response.setContentType("image/png");
File file = new File(wkImageStorage + "/" + fileName + ".png");
try {
OutputStream os = response.getOutputStream();
FileInputStream fis = new FileInputStream(file);
byte[] buffer = new byte[1024];
int b = 0;
while ((b = fis.read(buffer)) != -1) {
os.write(buffer, 0, b);
}
} catch (IOException e) {
logger.error("获取长图失败: " + e.getMessage());
}
}
加入消费队列
@KafkaListener(topics = TOPIC_SHARE)
public void handleShareMessage(ConsumerRecord record) {
if (record == null || record.value() == null) {
logger.error("消息的内容为空!");
return;
}
Event event = JSONObject.parseObject(record.value().toString(), Event.class);
if (event == null) {
logger.error("消息格式错误!");
return;
}
String htmlUrl = (String) event.getData().get("htmlUrl");
String fileName = (String) event.getData().get("fileName");
String suffix = (String) event.getData().get("suffix");
String cmd = wkImageCommand + " --quality 75 "
+ htmlUrl + " " + wkImageStorage + "/" + fileName + suffix;
try {
Runtime.getRuntime().exec(cmd);
logger.info("生成长图成功: " + cmd);
} catch (IOException e) {
logger.error("生成长图失败: " + e.getMessage());
}
}
为了节省服务器的资源,同时也可以记录用户的历史记录头像,可以将上传头像和上传截图这两个资源放到七牛云服务器中。
添加依赖
添加配置
# qiniu
qiniu.key.ak=
qiniu.key.sk=
qiniu.bucket.header.name=lupeishi-community-header
qiniu.bucket.header.url=http://rs9z1cnc1.hb-bkt.clouddn.com
qiniu.bucket.share.name=lupeishi-community-share
qiniu.bucket.share.url=http://rsa0c2v6h.hb-bkt.clouddn.com
### 上传头像(客户端上传)
@LoginRequired
@GetMapping("/setting")
public String getSettingPage(Model model) {
//上传文件名称
String fileName = CommunityUtil.generateUUID();
//设置响应信息
StringMap policy = new StringMap();
policy.put("returnBody", CommunityUtil.getJSONString(0));
//生成上传凭证
Auth auth = Auth.create(accessKey, secretKey);
String uploadToken = auth.uploadToken(headerBucketName, fileName, 3600, policy);
model.addAttribute("uploadToken", uploadToken);
model.addAttribute("fileName", fileName);
return "/site/setting";
}
由于SpringSecurity不支持带有双斜线的访问路径,所以要先配置。
@Configuration
public class FirewallConfig {
@Bean
public HttpFirewall allowUrlEncodedSlashHttpFirewall() {
StrictHttpFirewall firewall = new StrictHttpFirewall();
firewall.setAllowUrlEncodedSlash(true);
return firewall;
}
}
@GetMapping("/share")
@ResponseBody
public String share(String htmlUrl) {
// 文件名
String fileName = CommunityUtil.generateUUID();
// 异步生成长图
Event event = new Event()
.setTopic(TOPIC_SHARE)
.setData("htmlUrl", htmlUrl)
.setData("fileName", fileName)
.setData("suffix", ".png");
eventProducer.fireEvent(event);
// 返回访问路径
Map<String, Object> map = new HashMap<>();
map.put("shareUrl", domain + "/share/image/" + fileName);
return CommunityUtil.getJSONString(0, null, map);
}
同样,要将保存截图加入消费者队列中。
添加依赖
修改配置文件
# caffeine
caffeine.posts.max-size=15
caffeine.posts.expire-seconds=180
初始化
@PostConstruct
public void init() {
//初始化帖子列表缓存 缓存没有数据的时候将数据加入缓存,并且key就是方法传入的key
postListCache = Caffeine.newBuilder()
.maximumSize(maxSize)
.expireAfterWrite(expiredSeconds, TimeUnit.SECONDS)
.build(new CacheLoader<String, List<DiscussPost>>() {
@Nullable
@Override
public List<DiscussPost> load(@NonNull String key) throws Exception {
if (key == null || key.length() == 0) {
throw new IllegalArgumentException("参数错误!");
}
String[] params = key.split(":");
if (params == null || params.length != 2) {
throw new IllegalArgumentException("参数错误!");
}
int offset = Integer.valueOf(params[0]);
int limit = Integer.valueOf(params[1]);
//TODO:创建二级缓存:Redis->MySql
logger.debug("load post list from DB.");
return discussPostMapper.selectDiscussPosts(0, offset, limit, 1);
}
});
//初始化帖子总数缓存
postRowsCache = Caffeine.newBuilder()
.maximumSize(maxSize)
.expireAfterWrite(expiredSeconds, TimeUnit.SECONDS)
.build(new CacheLoader<Integer, Integer>() {
@Nullable
@Override
public Integer load(@NonNull Integer key) throws Exception {
logger.debug("load post rows from DB.");
return discussPostMapper.selectDiscussPostRows(key);
}
});
}
使用Redis缓存
Jmeter测试