黑马头条1
大约 5 分钟约 1506 字
黑马头条1
登录逻辑
@Override
public ResponseResult login(LoginDto loginDto) {
//nomal login need phone and password
if (!StringUtils.isAnyBlank(loginDto.getPhone(), loginDto.getPassword())) {
ApUser apUser = this.lambdaQuery()
.eq(ApUser::getPhone, loginDto.getPhone())
.one();
if (apUser == null) {
return ResponseResult.errorResult(AppHttpCodeEnum.DATA_NOT_EXIST, "用户不存在");
}
String salt = apUser.getSalt();
String password = loginDto.getPassword();
String encryptPassword = DigestUtils.md5DigestAsHex((password + salt).getBytes());
if (!encryptPassword.equals(apUser.getPassword())) {
return ResponseResult.errorResult(AppHttpCodeEnum.LOGIN_PASSWORD_ERROR);
}
//正确,返回jwt信息
HashMap<String, Object> map = new HashMap<>();
String token = AppJwtUtil.getToken(apUser.getId().longValue());
map.put("token", token);
apUser.setPassword("");
apUser.setSalt("");
map.put("user", apUser);
return ResponseResult.okResult(map);
}
HashMap<String, Object> map = new HashMap<>();
String token = AppJwtUtil.getToken(0L);
map.put("token", token);
return ResponseResult.okResult(map);
}
整合Swagger
在heima-leadnews-model和heima-leadnews-common模块中引入该依赖
<dependency>
<groupId>io.springfox</groupId>
<artifactId>springfox-swagger2</artifactId>
</dependency>
<dependency>
<groupId>io.springfox</groupId>
<artifactId>springfox-swagger-ui</artifactId>
</dependency>
配置
@Configuration
@EnableSwagger2
public class SwaggerConfiguration {
@Bean
public Docket buildDocket() {
return new Docket(DocumentationType.SWAGGER_2)
.apiInfo(buildApiInfo())
.select()
// 要扫描的API(Controller)基础包
.apis(RequestHandlerSelectors.basePackage("com.heima"))
.paths(PathSelectors.any())
.build();
}
private ApiInfo buildApiInfo() {
Contact contact = new Contact("黑马程序员","","");
return new ApiInfoBuilder()
.title("黑马头条-平台管理API文档")
.description("黑马头条后台api")
.contact(contact)
.version("1.0.0").build();
}
}
在heima-leadnews-common模块中的resources目录中新增以下目录和文件resources/META-INF/Spring.factories
org.springframework.boot.autoconfigure.EnableAutoConfiguration=\
com.heima.common.swagger.SwaggerConfiguration
访问地址:http://localhost:51801/swagger-ui.html
整合knife4j
<dependency>
<groupId>com.github.xiaoymin</groupId>
<artifactId>knife4j-spring-boot-starter</artifactId>
</dependency>
配置文件
package com.heima.common.knife4j;
import com.github.xiaoymin.knife4j.spring.annotations.EnableKnife4j;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Import;
import springfox.bean.validators.configuration.BeanValidatorPluginsConfiguration;
import springfox.documentation.builders.ApiInfoBuilder;
import springfox.documentation.builders.PathSelectors;
import springfox.documentation.builders.RequestHandlerSelectors;
import springfox.documentation.service.ApiInfo;
import springfox.documentation.spi.DocumentationType;
import springfox.documentation.spring.web.plugins.Docket;
import springfox.documentation.swagger2.annotations.EnableSwagger2;
@Configuration
@EnableSwagger2
@EnableKnife4j
@Import(BeanValidatorPluginsConfiguration.class)
public class Swagger2Configuration {
@Bean(value = "defaultApi2")
public Docket defaultApi2() {
Docket docket=new Docket(DocumentationType.SWAGGER_2)
.apiInfo(apiInfo())
//分组名称
.groupName("1.0")
.select()
//这里指定Controller扫描包路径
.apis(RequestHandlerSelectors.basePackage("com.heima"))
.paths(PathSelectors.any())
.build();
return docket;
}
private ApiInfo apiInfo() {
return new ApiInfoBuilder()
.title("黑马头条API文档")
.description("黑马头条API文档")
.version("1.0")
.build();
}
}
和swagger一样增加factory
com.heima.common.swagger.SwaggerConfiguration
接口地址:http://localhost:51801/doc.html#/home
网关
在heima-leadnews-gateway导入以下依赖
<dependencies>
<dependency>
<groupId>org.springframework.cloud</groupId>
<artifactId>spring-cloud-starter-gateway</artifactId>
</dependency>
<dependency>
<groupId>com.alibaba.cloud</groupId>
<artifactId>spring-cloud-starter-alibaba-nacos-discovery</artifactId>
</dependency>
<dependency>
<groupId>com.alibaba.cloud</groupId>
<artifactId>spring-cloud-starter-alibaba-nacos-config</artifactId>
</dependency>
<dependency>
<groupId>io.jsonwebtoken</groupId>
<artifactId>jjwt</artifactId>
</dependency>
</dependencies>
bootstrap.yml
server:
port: 51601
spring:
application:
name: leadnews-app-gateway
cloud:
nacos:
discovery:
server-addr: 192.168.200.130:8848
config:
server-addr: 192.168.200.130:8848
file-extension: yml
nacos中添加配置
id:leadnews-app-gateway
spring:
cloud:
gateway:
globalcors:
add-to-simple-url-handler-mapping: true
corsConfigurations:
'[/**]':
allowedHeaders: "*"
allowedOrigins: "*"
allowedMethods:
- GET
- POST
- DELETE
- PUT
- OPTION
routes:
# 平台管理
- id: user
uri: lb://leadnews-user
predicates:
- Path=/user/**
filters:
- StripPrefix= 1
全局Jwt过滤器:
@Component
@Slf4j
public class AuthorizeFilter implements Ordered, GlobalFilter {
@Override
public Mono<Void> filter(ServerWebExchange exchange, GatewayFilterChain chain) {
//1. get request and response
ServerHttpRequest request = exchange.getRequest();
ServerHttpResponse response = exchange.getResponse();
//check if user is login
if (request.getURI().getPath().contains("/login")) {
//pass
return chain.filter(exchange);
}
//get token
String token = request.getHeaders().getFirst("token");
//check token
if (StringUtils.isBlank(token)) {
response.setStatusCode(HttpStatus.UNAUTHORIZED);
return response.setComplete();
}
// check token is expired
try {
Claims claimsBody = AppJwtUtil.getClaimsBody(token);
int result = AppJwtUtil.verifyToken(claimsBody);
if (result == 1 || result == 2) {
response.setStatusCode(HttpStatus.UNAUTHORIZED);
return response.setComplete();
}
} catch (Exception e) {
response.setStatusCode(HttpStatus.UNAUTHORIZED);
return response.setComplete();
}
//pass
return chain.filter(exchange);
}
/**
* the lesser the order, the earlier the filter is invoked.
*
* @return
*/
@Override
public int getOrder() {
return 0;
}
}
文章列表 数据库拆分
关于文章一共有三张表
ap_article | 文章基本信息表 |
---|---|
ap_article_config | 文章配置表 |
ap_article_content | 文章内容表 |
表的拆分-垂直分表
垂直分表:将一个表的字段分散到多个表中,每个表存储其中一部分字段。
优势:
1.减少IO争抢,减少锁表的几率,查看文章概述与文章详情互不影响
2.充分发挥高频数据的操作效率,对文章概述数据操作的高效率不会被操作文章详情数据的低效率所拖累。
拆分规则:
1.把不常用的字段单独放在一张表
2.把text,blob等大字段拆分出来单独放在一张表
3.经常组合查询的字段单独放在一张表中
文章列表查询
# 按照发布时间倒序查询前10条数据
select *
from ap_article aa
order by aa.publish_time desc
limit 10;
# 频道筛选
select *
from ap_article aa
where aa.channel_id = 1
and aa.publish_time < '2063-04-10 11:00:00'
order by aa.publish_time desc
limit 10;
# 加载更多 例如当前时间为 2020-04-10 11:00:00
select *
from ap_article aa
where aa.channel_id = 1
and aa.publish_time < '2020-04-10 11:00:00'
order by aa.publish_time desc
limit 10;
# 加载最新
select *
from ap_article aa
left join ap_article_config aac on aa.id = aac.article_id
where aac.is_down != 1
and aac.is_delete != 1
and aa.channel_id = 1
and aa.publish_time > '2020-04-10 11:00:00'
order by aa.publish_time desc
limit 10
业务代码
/**
* 根据参数加载文章列表
*
* @param type 1为加载更多 2为加载最新
* @param dto
* @return
*/
@Override
public ResponseResult load(ArticleHomeDto dto, Short type) {
Integer size = dto.getSize();
if (size == null || size <= 0) {
size = 10;
}
size = Math.min(size, MAX_PAGE_SIZE);
if (!type.equals(ArticleConstants.LOADTYPE_LOAD_MORE) && !type.equals(ArticleConstants.LOADTYPE_LOAD_NEW)) {
type = ArticleConstants.LOADTYPE_LOAD_MORE;
}
if (StringUtils.isBlank(dto.getTag())) {
dto.setTag(ArticleConstants.DEFAULT_TAG);
}
if (dto.getMinBehotTime() == null) dto.setMinBehotTime(new Date());
if (dto.getMaxBehotTime() == null) dto.setMaxBehotTime(new Date());
List<ApArticle> apArticles = apArticleMapper.loadArticleList(dto, type);
return ResponseResult.okResult(apArticles);
} /**
* 根据参数加载文章列表
*
* @param type 1为加载更多 2为加载最新
* @param dto
* @return
*/
@Override
public ResponseResult load(ArticleHomeDto dto, Short type) {
Integer size = dto.getSize();
if (size == null || size <= 0) {
size = 10;
}
size = Math.min(size, MAX_PAGE_SIZE);
if (!type.equals(ArticleConstants.LOADTYPE_LOAD_MORE) && !type.equals(ArticleConstants.LOADTYPE_LOAD_NEW)) {
type = ArticleConstants.LOADTYPE_LOAD_MORE;
}
if (StringUtils.isBlank(dto.getTag())) {
dto.setTag(ArticleConstants.DEFAULT_TAG);
}
if (dto.getMinBehotTime() == null) dto.setMinBehotTime(new Date());
if (dto.getMaxBehotTime() == null) dto.setMaxBehotTime(new Date());
List<ApArticle> apArticles = apArticleMapper.loadArticleList(dto, type);
return ResponseResult.okResult(apArticles);
}
Freemarker模版文件+Minio文件上传
导入依赖:
<dependencies> <dependency> <groupId>org.springframework.boot</groupId> <artifactId>spring-boot-starter-freemarker</artifactId> </dependency> <dependency> <groupId>com.heima</groupId> <artifactId>heima-file-starter</artifactId> <version>1.0-SNAPSHOT</version> </dependency> </dependencies>
添加配置,可以添加到nacos中
minio: accessKey: minio secretKey: minio123 bucket: leadnews endpoint: http://localhost:9000 readPath: http://localhost:9000
手动上传css和js到服务器:
public static void main(String[] args) throws Exception {
FileInputStream fileInputStream = null;
fileInputStream = new FileInputStream("/tmp/plugins/css/index.css");
MinioClient minioClient = MinioClient.builder().credentials("minio", "minio123")
.endpoint("http://localhost:9000").build();
PutObjectArgs putObjectArgs = PutObjectArgs.builder()
.object("plugins/css/index.css")//文件名
.contentType("text/css")//文件类型
.bucket("leadnews")//桶名词 与minio创建的名词一致
.stream(fileInputStream, fileInputStream.available(), -1) //文件流
.build();
minioClient.putObject(putObjectArgs);
}
public static void main(String[] args) throws Exception {
FileInputStream fileInputStream = null;
fileInputStream = new FileInputStream("/tmp/plugins/js/index.js");
MinioClient minioClient = MinioClient.builder().credentials("minio", "minio123")
.endpoint("http://localhost:9000").build();
PutObjectArgs putObjectArgs = PutObjectArgs.builder()
.object("plugins/js/index.js")//文件名
.contentType("text/js")//文件类型
.bucket("leadnews")//桶名词 与minio创建的名词一致
.stream(fileInputStream, fileInputStream.available(), -1) //文件流
.build();
minioClient.putObject(putObjectArgs);
}
- 上传文件:
@SpringBootTest
class ArticleApplicationTest {
@Autowired
private ApArticleContentMapper apArticleContentMapper;
@Autowired
private FileStorageService fileStorageService;
@Autowired
private Configuration configuration;
@Autowired
private ApArticleMapper apArticleMapper;
@Test
void test() throws Exception {
ApArticleContent apArticleContent = apArticleContentMapper.selectOne(
Wrappers.<ApArticleContent>lambdaQuery()
.eq(ApArticleContent::getArticleId, 1390536764510310401L));
if (apArticleContent != null && StringUtils.isNotBlank(apArticleContent.getContent())) {
StringWriter out = new StringWriter();
Template template = configuration.getTemplate("article-ftl");
HashMap<String, Object> map = new HashMap<>();
map.put("content", JSON.parseArray(apArticleContent.getContent()));
template.process(map, out);
InputStream is = new ByteArrayInputStream(out.toString().getBytes());
String path = fileStorageService.uploadHtmlFile("",
apArticleContent.getArticleId() + ".html", is);
System.out.println(path);
ApArticle apArticle = new ApArticle();
apArticle.setId(apArticleContent.getArticleId());
apArticle.setStaticUrl(path);
apArticleMapper.updateById(apArticle);
}
}
}