Compare commits

...

42 Commits

Author SHA1 Message Date
9d621edaec update readme 2020-07-03 09:31:57 +08:00
56c0a81c1b update readme 2020-07-03 08:57:38 +08:00
6b3b88c147 Merge branch 'release_v2.6.1' into release_v2.6.2 2020-07-02 23:17:14 +08:00
894ee67f56 优化mac端浏览器的识别问题 2020-07-02 23:15:21 +08:00
a88891b72f v2.6.2发布 2020-06-29 16:45:51 +08:00
5c7724c813 修改首页缓存时间 2020-06-29 16:43:10 +08:00
79c9f3e0f1 Merge branch 'release_v2.6.0' into release_v2.6.1 2020-06-25 10:49:15 +08:00
575142f9f3 修复部分maven版本admin模块build失败的问题 2020-06-25 10:46:27 +08:00
83c2460c83 v2.6.1发布 2020-06-22 11:06:52 +08:00
c7642ac2ef 修复搜索引擎模式下按更新时间筛选的bug 2020-06-22 11:01:23 +08:00
5af570a514 v2.6.0发布 2020-06-15 18:08:09 +08:00
0d96ff81b6 v2.6.0发布 2020-06-15 17:47:12 +08:00
7733cf1e3f 更新版本号 2020-06-15 15:13:37 +08:00
c9c714e71e 增加单本采集任务 2020-06-15 15:08:15 +08:00
3cbb6bf3fb 遇到错误章节直接中断改为遇到错误章节跳过,解决某些网站由于大量章节错误而导致的大量书籍未爬取完的情况 2020-06-15 10:06:35 +08:00
750e234f60 更新 2020-06-09 04:26:32 +08:00
c1d485c42b 更新 2020-06-09 04:19:10 +08:00
0e2e6229cd 增加新源,过滤简介中的特殊字符,提高爬虫兼容性 2020-06-08 16:54:58 +08:00
f8a669eb01 关闭分表的升级通道 2020-06-01 17:33:46 +08:00
6d56399d0f 文档更新 2020-06-01 16:12:46 +08:00
1a0c1f864b v2.5.0发布 2020-06-01 15:40:37 +08:00
b193aca288 默认开启分表功能(适用于新用户),通过添加配置spring.autoconfigure.exclude=io.shardingsphere.shardingjdbc.spring.boot.SpringBootConfiguration来关闭分表功能(适用于老用户升级) 2020-06-01 11:31:05 +08:00
7a6d7a4442 暂时关闭分表功能,有需要自己开启 2020-06-01 09:24:37 +08:00
5505840b6e 暂时关闭分表功能,有需要的自己开启 2020-06-01 09:22:00 +08:00
xxy
8eb3048b91 文档更新 2020-05-29 10:10:25 +08:00
xxy
1b138c2aad 文档更新 2020-05-29 10:06:45 +08:00
f5e2dbaa6b 更改配置文件中redis配置位置 2020-05-26 18:52:38 +08:00
xxy
b6cb9aaedc 文档更新 2020-05-26 18:17:34 +08:00
f9f1e474ec 引入shardingjdbc,将小说内容表分为10个小表 2020-05-26 18:09:19 +08:00
xxy
06bf848e99 更新文档 2020-05-26 13:17:23 +08:00
6e171d20ed 集成fastDfs 2020-05-26 13:07:53 +08:00
c62acc288e 集成OSS对象存储 2020-05-26 11:31:30 +08:00
xxy
9975faed01 点击量计算优化 2020-05-26 02:27:39 +08:00
xxy
8fde3a3725 点击量计算优化 2020-05-26 02:13:28 +08:00
xxy
430504ee28 点击量计算优化 2020-05-26 02:10:37 +08:00
xxy
16447c60ec 引入redisson 2020-05-26 00:28:12 +08:00
xxy
be7cbe2d6f 欲引入Redisson框架实现分布式锁 2020-05-26 00:04:00 +08:00
xxy
8f1ed88b07 es采集优化 2020-05-25 23:38:46 +08:00
xxy
9b9851e7ab 文档更新 2020-05-25 23:16:08 +08:00
a55edf0408 es优化 2020-05-25 21:37:00 +08:00
856c4c0667 es优化 2020-05-25 21:26:47 +08:00
e4dd5bcb71 引入rabbitmq流量削峰,累积点击量后统一更新 2020-05-25 21:06:51 +08:00
58 changed files with 1874 additions and 283 deletions

View File

@ -1,7 +1,15 @@
[![index](./assets/热门云产品1040.100.jpg)](https://cloud.tencent.com/act/cps/redirect?redirect=1052&cps_key=736e609d66e0ac4e57813316cec6fd0b&from=console)
[![index]( https://s1.ax1x.com/2020/07/03/NOSA5q.jpg )]( https://cloud.tencent.com/act/cps/redirect?redirect=1052&cps_key=736e609d66e0ac4e57813316cec6fd0b&from=console )
# 小说精品屋-plus
#### 新项目小说精品屋-微服务版
基于小说精品屋-plus构建的Spring Cloud 微服务小说门户平台
Gitee仓库地址 https://gitee.com/xiongxyang/novel-cloud
GitHub仓库地址 https://github.com/201206030/novel-cloud
#### 演示地址
[点击前往](http://www.java2nb.com)(前台门户)
@ -40,7 +48,31 @@ novel-plus -- 父工程
```
#### 技术选型
Springboot+Mybatis+Mysql+ElasticSearch+Ehcache+Thymeleaf+Layui
| 技术 | 说明
| -------------------- | ---------------------------
| SpringBoot | Spring应用快速开发脚手架
| MyBatis | 持久层ORM框架
| MyBatis Dynamic SQL | Mybatis动态sql
| PageHelper | MyBatis分页插件
| MyBatisGenerator | 持久层代码生成插件
| Sharding-Jdbc | 代码层分库分表中间件
| JJWT | JWT登录支持
| SpringSecurity | 安全框架
| Shiro | 安全框架
| Ehcache | Java进程内缓存框架(默认缓存)
| Redis | 分布式缓存(缓存替换方案默认关闭一行配置开启)
| ElasticSearch | 搜索引擎(搜索增强方案默认关闭一行配置开启)
| RabbitMq | 消息队列(流量削峰默认关闭一行配置开启)
| OSS | 阿里云对象存储服务(图片存储方式之一一行配置即可切换)
| FastDfs |开源轻量级分布式文件系统(图片存储方式之一一行配置即可切换)
| Redisson | 实现分布式锁
| Lombok | 简化对象封装工具
| Docker | 应用容器引擎
| Mysql | 数据库服务
| Thymeleaf | 模板引擎
| Layui | 前端UI
#### PC站截图
@ -171,4 +203,4 @@ docker安装教程[点击前往](https://my.oschina.net/java2nb/blog/4271989)
精品小说屋所有相关项目均已在开源中国公开感兴趣的可进入[开源中国](https://www.oschina.net/p/fiction_house)按关键字`精品小说屋`搜索。
[![index](./assets/120060.jpg)](https://www.aliyun.com/minisite/goods?userCode=uf4nasee )
[![index](https://s1.ax1x.com/2020/07/03/NOSuMF.jpg)](https://www.aliyun.com/minisite/goods?userCode=uf4nasee )

Binary file not shown.

Before

Width:  |  Height:  |  Size: 8.1 KiB

After

Width:  |  Height:  |  Size: 185 KiB

View File

@ -137,10 +137,6 @@
<artifactId>velocity</artifactId>
<version>1.7</version>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-cache</artifactId>
</dependency>
<!--<dependency>-->
<!--<groupId>org.springframework.boot</groupId>-->
<!--<artifactId>spring-boot-starter-data-redis</artifactId>-->

View File

@ -12,7 +12,7 @@ spring:
driverClassName: com.mysql.jdbc.Driver
url: jdbc:mysql://127.0.0.1:3306/novel_plus?useUnicode=true&characterEncoding=utf8&serverTimezone=Asia/Shanghai
username: root
password:
password: test123456
#password:
initialSize: 1
minIdle: 3

View File

@ -5,7 +5,7 @@
<parent>
<artifactId>novel</artifactId>
<groupId>com.java2nb</groupId>
<version>2.1.2</version>
<version>2.6.2</version>
</parent>
<modelVersion>4.0.0</modelVersion>
@ -57,6 +57,22 @@
<version>${mybatis.version}</version>
</dependency>
<!-- 分库分表-->
<!-- sharding jdbc依赖 -->
<dependency>
<groupId>io.shardingsphere</groupId>
<artifactId>sharding-jdbc-spring-boot-starter</artifactId>
<version>${sharding.jdbc.version}</version>
</dependency>
<dependency>
<groupId>io.shardingsphere</groupId>
<artifactId>sharding-jdbc-spring-namespace</artifactId>
<version>${sharding.jdbc.version}</version>
</dependency>
<!--分页助手启动器-->
<dependency>
<groupId>com.github.pagehelper</groupId>

View File

@ -55,4 +55,14 @@ public interface CacheKey {
* 搜索引擎转换锁
* */
String ES_TRANS_LOCK = "esTransLock";
/**
* 上一次搜索引擎是否更新过小说点击量
* */
String ES_IS_UPDATE_VISIT = "esIsUpdateVisit";
/**
* 累积的小说点击量
* */
String BOOK_ADD_VISIT_COUNT = "bookAddVisitCount";
}

View File

@ -55,9 +55,18 @@ public enum ResponseStatus {
* */
INVITE_CODE_INVALID(4001, "邀请码无效!"),
AUTHOR_STATUS_FORBIDDEN(4002, "作者状态异常,暂不能管理小说!")
, BOOKNAME_EXISTS(4003,"已发布过同名小说!")
, BOOKNAME_EXISTS(4003,"已发布过同名小说!"),
/**
* 小说相关错误
*/
BOOK_EXISTS(5001,"该小说已存在")
,
/**
* 搜索引擎相关错误
* */
ES_SEARCH_FAIL(9001,"搜索引擎查询错误!"),
/**

View File

@ -10,9 +10,21 @@ public class CrawlSingleTask {
@Generated("org.mybatis.generator.api.MyBatisGenerator")
private Integer sourceId;
@Generated("org.mybatis.generator.api.MyBatisGenerator")
private String sourceName;
@Generated("org.mybatis.generator.api.MyBatisGenerator")
private String sourceBookId;
@Generated("org.mybatis.generator.api.MyBatisGenerator")
private Integer catId;
@Generated("org.mybatis.generator.api.MyBatisGenerator")
private String bookName;
@Generated("org.mybatis.generator.api.MyBatisGenerator")
private String authorName;
@Generated("org.mybatis.generator.api.MyBatisGenerator")
private Byte taskStatus;
@ -42,6 +54,16 @@ public class CrawlSingleTask {
this.sourceId = sourceId;
}
@Generated("org.mybatis.generator.api.MyBatisGenerator")
public String getSourceName() {
return sourceName;
}
@Generated("org.mybatis.generator.api.MyBatisGenerator")
public void setSourceName(String sourceName) {
this.sourceName = sourceName == null ? null : sourceName.trim();
}
@Generated("org.mybatis.generator.api.MyBatisGenerator")
public String getSourceBookId() {
return sourceBookId;
@ -52,6 +74,36 @@ public class CrawlSingleTask {
this.sourceBookId = sourceBookId == null ? null : sourceBookId.trim();
}
@Generated("org.mybatis.generator.api.MyBatisGenerator")
public Integer getCatId() {
return catId;
}
@Generated("org.mybatis.generator.api.MyBatisGenerator")
public void setCatId(Integer catId) {
this.catId = catId;
}
@Generated("org.mybatis.generator.api.MyBatisGenerator")
public String getBookName() {
return bookName;
}
@Generated("org.mybatis.generator.api.MyBatisGenerator")
public void setBookName(String bookName) {
this.bookName = bookName == null ? null : bookName.trim();
}
@Generated("org.mybatis.generator.api.MyBatisGenerator")
public String getAuthorName() {
return authorName;
}
@Generated("org.mybatis.generator.api.MyBatisGenerator")
public void setAuthorName(String authorName) {
this.authorName = authorName == null ? null : authorName.trim();
}
@Generated("org.mybatis.generator.api.MyBatisGenerator")
public Byte getTaskStatus() {
return taskStatus;

View File

@ -24,7 +24,7 @@ public final class BookContentDynamicSqlSupport {
public final SqlColumn<Long> indexId = column("index_id", JDBCType.BIGINT);
public final SqlColumn<String> content = column("content", JDBCType.LONGVARCHAR);
public final SqlColumn<String> content = column("content", JDBCType.VARCHAR);
public BookContent() {
super("book_content");

View File

@ -63,7 +63,7 @@ public interface BookContentMapper {
@Results(id="BookContentResult", value = {
@Result(column="id", property="id", jdbcType=JdbcType.BIGINT, id=true),
@Result(column="index_id", property="indexId", jdbcType=JdbcType.BIGINT),
@Result(column="content", property="content", jdbcType=JdbcType.LONGVARCHAR)
@Result(column="content", property="content", jdbcType=JdbcType.VARCHAR)
})
List<BookContent> selectMany(SelectStatementProvider selectStatement);

View File

@ -16,9 +16,21 @@ public final class CrawlSingleTaskDynamicSqlSupport {
@Generated("org.mybatis.generator.api.MyBatisGenerator")
public static final SqlColumn<Integer> sourceId = crawlSingleTask.sourceId;
@Generated("org.mybatis.generator.api.MyBatisGenerator")
public static final SqlColumn<String> sourceName = crawlSingleTask.sourceName;
@Generated("org.mybatis.generator.api.MyBatisGenerator")
public static final SqlColumn<String> sourceBookId = crawlSingleTask.sourceBookId;
@Generated("org.mybatis.generator.api.MyBatisGenerator")
public static final SqlColumn<Integer> catId = crawlSingleTask.catId;
@Generated("org.mybatis.generator.api.MyBatisGenerator")
public static final SqlColumn<String> bookName = crawlSingleTask.bookName;
@Generated("org.mybatis.generator.api.MyBatisGenerator")
public static final SqlColumn<String> authorName = crawlSingleTask.authorName;
@Generated("org.mybatis.generator.api.MyBatisGenerator")
public static final SqlColumn<Byte> taskStatus = crawlSingleTask.taskStatus;
@ -34,8 +46,16 @@ public final class CrawlSingleTaskDynamicSqlSupport {
public final SqlColumn<Integer> sourceId = column("source_id", JDBCType.INTEGER);
public final SqlColumn<String> sourceName = column("source_name", JDBCType.VARCHAR);
public final SqlColumn<String> sourceBookId = column("source_book_id", JDBCType.VARCHAR);
public final SqlColumn<Integer> catId = column("cat_id", JDBCType.INTEGER);
public final SqlColumn<String> bookName = column("book_name", JDBCType.VARCHAR);
public final SqlColumn<String> authorName = column("author_name", JDBCType.VARCHAR);
public final SqlColumn<Byte> taskStatus = column("task_status", JDBCType.TINYINT);
public final SqlColumn<Byte> excCount = column("exc_count", JDBCType.TINYINT);

View File

@ -35,7 +35,7 @@ import org.mybatis.dynamic.sql.util.mybatis3.MyBatis3Utils;
@Mapper
public interface CrawlSingleTaskMapper {
@Generated("org.mybatis.generator.api.MyBatisGenerator")
BasicColumn[] selectList = BasicColumn.columnList(id, sourceId, sourceBookId, taskStatus, excCount, createTime);
BasicColumn[] selectList = BasicColumn.columnList(id, sourceId, sourceName, sourceBookId, catId, bookName, authorName, taskStatus, excCount, createTime);
@Generated("org.mybatis.generator.api.MyBatisGenerator")
@SelectProvider(type=SqlProviderAdapter.class, method="select")
@ -63,7 +63,11 @@ public interface CrawlSingleTaskMapper {
@Results(id="CrawlSingleTaskResult", value = {
@Result(column="id", property="id", jdbcType=JdbcType.BIGINT, id=true),
@Result(column="source_id", property="sourceId", jdbcType=JdbcType.INTEGER),
@Result(column="source_name", property="sourceName", jdbcType=JdbcType.VARCHAR),
@Result(column="source_book_id", property="sourceBookId", jdbcType=JdbcType.VARCHAR),
@Result(column="cat_id", property="catId", jdbcType=JdbcType.INTEGER),
@Result(column="book_name", property="bookName", jdbcType=JdbcType.VARCHAR),
@Result(column="author_name", property="authorName", jdbcType=JdbcType.VARCHAR),
@Result(column="task_status", property="taskStatus", jdbcType=JdbcType.TINYINT),
@Result(column="exc_count", property="excCount", jdbcType=JdbcType.TINYINT),
@Result(column="create_time", property="createTime", jdbcType=JdbcType.TIMESTAMP)
@ -96,7 +100,11 @@ public interface CrawlSingleTaskMapper {
return MyBatis3Utils.insert(this::insert, record, crawlSingleTask, c ->
c.map(id).toProperty("id")
.map(sourceId).toProperty("sourceId")
.map(sourceName).toProperty("sourceName")
.map(sourceBookId).toProperty("sourceBookId")
.map(catId).toProperty("catId")
.map(bookName).toProperty("bookName")
.map(authorName).toProperty("authorName")
.map(taskStatus).toProperty("taskStatus")
.map(excCount).toProperty("excCount")
.map(createTime).toProperty("createTime")
@ -108,7 +116,11 @@ public interface CrawlSingleTaskMapper {
return MyBatis3Utils.insertMultiple(this::insertMultiple, records, crawlSingleTask, c ->
c.map(id).toProperty("id")
.map(sourceId).toProperty("sourceId")
.map(sourceName).toProperty("sourceName")
.map(sourceBookId).toProperty("sourceBookId")
.map(catId).toProperty("catId")
.map(bookName).toProperty("bookName")
.map(authorName).toProperty("authorName")
.map(taskStatus).toProperty("taskStatus")
.map(excCount).toProperty("excCount")
.map(createTime).toProperty("createTime")
@ -120,7 +132,11 @@ public interface CrawlSingleTaskMapper {
return MyBatis3Utils.insert(this::insert, record, crawlSingleTask, c ->
c.map(id).toPropertyWhenPresent("id", record::getId)
.map(sourceId).toPropertyWhenPresent("sourceId", record::getSourceId)
.map(sourceName).toPropertyWhenPresent("sourceName", record::getSourceName)
.map(sourceBookId).toPropertyWhenPresent("sourceBookId", record::getSourceBookId)
.map(catId).toPropertyWhenPresent("catId", record::getCatId)
.map(bookName).toPropertyWhenPresent("bookName", record::getBookName)
.map(authorName).toPropertyWhenPresent("authorName", record::getAuthorName)
.map(taskStatus).toPropertyWhenPresent("taskStatus", record::getTaskStatus)
.map(excCount).toPropertyWhenPresent("excCount", record::getExcCount)
.map(createTime).toPropertyWhenPresent("createTime", record::getCreateTime)
@ -158,7 +174,11 @@ public interface CrawlSingleTaskMapper {
static UpdateDSL<UpdateModel> updateAllColumns(CrawlSingleTask record, UpdateDSL<UpdateModel> dsl) {
return dsl.set(id).equalTo(record::getId)
.set(sourceId).equalTo(record::getSourceId)
.set(sourceName).equalTo(record::getSourceName)
.set(sourceBookId).equalTo(record::getSourceBookId)
.set(catId).equalTo(record::getCatId)
.set(bookName).equalTo(record::getBookName)
.set(authorName).equalTo(record::getAuthorName)
.set(taskStatus).equalTo(record::getTaskStatus)
.set(excCount).equalTo(record::getExcCount)
.set(createTime).equalTo(record::getCreateTime);
@ -168,7 +188,11 @@ public interface CrawlSingleTaskMapper {
static UpdateDSL<UpdateModel> updateSelectiveColumns(CrawlSingleTask record, UpdateDSL<UpdateModel> dsl) {
return dsl.set(id).equalToWhenPresent(record::getId)
.set(sourceId).equalToWhenPresent(record::getSourceId)
.set(sourceName).equalToWhenPresent(record::getSourceName)
.set(sourceBookId).equalToWhenPresent(record::getSourceBookId)
.set(catId).equalToWhenPresent(record::getCatId)
.set(bookName).equalToWhenPresent(record::getBookName)
.set(authorName).equalToWhenPresent(record::getAuthorName)
.set(taskStatus).equalToWhenPresent(record::getTaskStatus)
.set(excCount).equalToWhenPresent(record::getExcCount)
.set(createTime).equalToWhenPresent(record::getCreateTime);
@ -178,7 +202,11 @@ public interface CrawlSingleTaskMapper {
default int updateByPrimaryKey(CrawlSingleTask record) {
return update(c ->
c.set(sourceId).equalTo(record::getSourceId)
.set(sourceName).equalTo(record::getSourceName)
.set(sourceBookId).equalTo(record::getSourceBookId)
.set(catId).equalTo(record::getCatId)
.set(bookName).equalTo(record::getBookName)
.set(authorName).equalTo(record::getAuthorName)
.set(taskStatus).equalTo(record::getTaskStatus)
.set(excCount).equalTo(record::getExcCount)
.set(createTime).equalTo(record::getCreateTime)
@ -190,7 +218,11 @@ public interface CrawlSingleTaskMapper {
default int updateByPrimaryKeySelective(CrawlSingleTask record) {
return update(c ->
c.set(sourceId).equalToWhenPresent(record::getSourceId)
.set(sourceName).equalToWhenPresent(record::getSourceName)
.set(sourceBookId).equalToWhenPresent(record::getSourceBookId)
.set(catId).equalToWhenPresent(record::getCatId)
.set(bookName).equalToWhenPresent(record::getBookName)
.set(authorName).equalToWhenPresent(record::getAuthorName)
.set(taskStatus).equalToWhenPresent(record::getTaskStatus)
.set(excCount).equalToWhenPresent(record::getExcCount)
.set(createTime).equalToWhenPresent(record::getCreateTime)

View File

@ -1,11 +1,8 @@
spring:
profiles:
include: [common]
datasource:
url: jdbc:mysql://127.0.0.1:3306/novel_plus?useUnicode=true&characterEncoding=utf-8&useSSL=false&serverTimezone=Asia/Shanghai
username: root
password: test123456
driver-class-name: com.mysql.cj.jdbc.Driver
main:
allow-bean-definition-overriding: true
#Redis服务器IP
redis:
host: 127.0.0.1
@ -25,6 +22,48 @@ spring:
min-idle: 0
#连接超时时间毫秒
timeout: 30000
datasource:
url: jdbc:mysql://127.0.0.1:3306/novel_plus?useUnicode=true&characterEncoding=utf-8&useSSL=false&serverTimezone=Asia/Shanghai
username: root
password: test123456
driver-class-name: com.mysql.cj.jdbc.Driver
####使用shardingJdbc时
####所有的jdbcType都不能是LONGVARCHAR,否则会导致java.io.NotSerializableException: java.io.StringReader错误
##### 应该替换所有的 LONGVARCHAR 类型为VARCHAR
sharding:
jdbc:
datasource:
names: ds0 #,ds1
ds0:
type: com.zaxxer.hikari.HikariDataSource
driver-class-name: com.mysql.cj.jdbc.Driver
jdbc-url: jdbc:mysql://localhost:3306/novel_plus?useUnicode=true&characterEncoding=utf-8&useSSL=false&serverTimezone=Asia/Shanghai
username: root
password: test123456
# ds1:
# type: com.alibaba.druid.pool.DruidDataSource
# driver-class-name: com.mysql.jdbc.Driver
# url: jdbc:mysql://localhost:3306/novel_plus2
# username: root
# password: test123456
config:
sharding:
props:
sql.show: true
tables:
book_content: #book_content表
key-generator-column-name: id #主键
actual-data-nodes: ds${0}.book_content${0..9} #数据节点
# database-strategy: #分库策略
# inline:
# sharding-column: book_id
# algorithm-expression: ds${book_id % 10}
table-strategy: #分表策略
inline:
shardingColumn: index_id
algorithm-expression: book_content${index_id % 10}
@ -33,9 +72,6 @@ spring:
pic:
save:
type: 2 #图片保存方式, 1不保存使用网络图片 2本地保存
type: 2 #图片保存方式, 1不保存使用爬取的网络图片 2保存在自己的存储介质
storage: local #存储介质local本地OSS阿里云对象存储fastDfs分布式文件系统
path: /var/pic #图片保存路径

View File

@ -9,7 +9,7 @@
</commentGenerator>
<jdbcConnection
connectionURL="jdbc:mysql://127.0.0.1:3306/novel_plus?tinyInt1isBit=false&amp;useUnicode=true&amp;characterEncoding=utf-8&amp;serverTimezone=Asia/Shanghai&amp;nullCatalogMeansCurrent=true"
driverClass="com.mysql.jdbc.Driver" password=""
driverClass="com.mysql.jdbc.Driver" password="test123456"
userId="root" />
<!-- 默认false把JDBC DECIMAL NUMERIC 类型解析为 Integer true时把JDBC DECIMAL
@ -44,7 +44,7 @@
</javaClientGenerator>
<!--生成全部表tableName设为%-->
<table tableName="user_buy_record" domainObjectName="UserBuyRecord"/>
<table tableName="crawl_single_task"/>
<!-- 指定数据库表 -->
<!--<table schema="jly" tableName="job_position" domainObjectName="JobPositionTest"/>-->

View File

@ -5,7 +5,7 @@
<parent>
<artifactId>novel</artifactId>
<groupId>com.java2nb</groupId>
<version>2.1.2</version>
<version>2.6.2</version>
</parent>
<modelVersion>4.0.0</modelVersion>
@ -25,6 +25,8 @@
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-security</artifactId>
</dependency>
</dependencies>
<build>

View File

@ -3,11 +3,12 @@ package com.java2nb.novel.controller;
import com.github.pagehelper.PageInfo;
import com.java2nb.novel.core.bean.ResultBean;
import com.java2nb.novel.core.utils.BeanUtil;
import com.java2nb.novel.entity.CrawlSingleTask;
import com.java2nb.novel.entity.CrawlSource;
import com.java2nb.novel.service.CrawlService;
import com.java2nb.novel.vo.CrawlSingleTaskVO;
import com.java2nb.novel.vo.CrawlSourceVO;
import lombok.RequiredArgsConstructor;
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestParam;
@ -56,6 +57,38 @@ public class CrawlController {
return ResultBean.ok();
}
/**
* 新增单本采集任务
* */
@PostMapping("addCrawlSingleTask")
public ResultBean addCrawlSingleTask(CrawlSingleTask singleTask){
crawlService.addCrawlSingleTask(singleTask);
return ResultBean.ok();
}
/**
* 单本采集任务分页列表查询
* */
@PostMapping("listCrawlSingleTaskByPage")
public ResultBean listCrawlSingleTaskByPage(@RequestParam(value = "curr", defaultValue = "1") int page, @RequestParam(value = "limit", defaultValue = "10") int pageSize){
return ResultBean.ok(new PageInfo<>(BeanUtil.copyList(crawlService.listCrawlSingleTaskByPage(page,pageSize), CrawlSingleTaskVO.class)
));
}
/**
* 删除采集任务
* */
@PostMapping("delCrawlSingleTask")
public ResultBean delCrawlSingleTask(Long id){
crawlService.delCrawlSingleTask(id);
return ResultBean.ok();
}

View File

@ -96,8 +96,12 @@ public class CrawlParser {
String desc = bookDetailHtml.substring(bookDetailHtml.indexOf(ruleBean.getDescStart()) + ruleBean.getDescStart().length());
desc = desc.substring(0, desc.indexOf(ruleBean.getDescEnd()));
//过滤掉简介中的a标签
desc = desc.replaceAll("<a[^<]+</a>","");
//过滤掉简介中的特殊标签
desc = desc.replaceAll("<a[^<]+</a>","")
.replaceAll("<font[^<]+</font>","")
.replaceAll("<p>\\s*</p>","")
.replaceAll("<p>","")
.replaceAll("</p>","<br/>");
//设置书籍简介
book.setBookDesc(desc);
if (StringUtils.isNotBlank(ruleBean.getStatusPatten())) {
@ -237,8 +241,6 @@ public class CrawlParser {
}
} else {
break;
}

View File

@ -3,10 +3,7 @@ package com.java2nb.novel.core.listener;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.java2nb.novel.core.crawl.CrawlParser;
import com.java2nb.novel.core.crawl.RuleBean;
import com.java2nb.novel.entity.Book;
import com.java2nb.novel.entity.BookContent;
import com.java2nb.novel.entity.BookIndex;
import com.java2nb.novel.entity.CrawlSource;
import com.java2nb.novel.entity.*;
import com.java2nb.novel.service.BookService;
import com.java2nb.novel.service.CrawlService;
import com.java2nb.novel.utils.Constants;
@ -40,9 +37,9 @@ public class StarterListener implements ServletContextListener {
@Override
public void contextInitialized(ServletContextEvent sce) {
log.info("程序启动,开始执行自动更新线程。。。");
for (int i = 0; i < updateThreadCount; i++) {
new Thread(() -> {
log.info("程序启动,开始执行自动更新线程。。。");
while (true) {
try {
//1.查询最新目录更新时间在一个月之内的前100条需要更新的数据
@ -61,7 +58,7 @@ public class StarterListener implements ServletContextListener {
Book book = CrawlParser.parseBook(ruleBean, needUpdateBook.getCrawlBookId());
//这里只做老书更新
book.setId(needUpdateBook.getId());
if(needUpdateBook.getPicUrl()!=null && needUpdateBook.getPicUrl().startsWith(Constants.LOCAL_PIC_PREFIX)) {
if (needUpdateBook.getPicUrl() != null && needUpdateBook.getPicUrl().contains(Constants.LOCAL_PIC_PREFIX)) {
//本地图片则不更新
book.setPicUrl(null);
}
@ -83,6 +80,42 @@ public class StarterListener implements ServletContextListener {
}
}).start();
}
}
new Thread(() -> {
log.info("程序启动,开始执行单本采集任务线程。。。");
while (true) {
CrawlSingleTask task = null;
byte crawlStatus = 0;
try {
//获取采集任务
task = crawlService.getCrawlSingleTask();
if (task != null) {
//查询爬虫规则
CrawlSource source = crawlService.queryCrawlSource(task.getSourceId());
RuleBean ruleBean = new ObjectMapper().readValue(source.getCrawlRule(), RuleBean.class);
if (crawlService.parseBookAndSave(task.getCatId(), ruleBean, task.getSourceId(), task.getSourceBookId())) {
//采集成功
crawlStatus = 1;
}
}
Thread.sleep(1000 * 60);
} catch (Exception e) {
log.error(e.getMessage(), e);
}
if (task != null) {
crawlService.updateCrawlSingleTask(task, crawlStatus);
}
}
}).start();
}
}

View File

@ -1,6 +1,7 @@
package com.java2nb.novel.service;
import com.java2nb.novel.core.crawl.RuleBean;
import com.java2nb.novel.entity.CrawlSingleTask;
import com.java2nb.novel.entity.CrawlSource;
import java.util.List;
@ -39,6 +40,16 @@ public interface CrawlService {
* */
void updateCrawlSourceStatus(Integer sourceId, Byte sourceStatus);
/**
* 采集并保存小说
* @param catId 分类ID
* @param bookId 小说ID
* @param sourceId 源ID
* @param ruleBean 采集规则\
* @return true:成功false:失败
* */
boolean parseBookAndSave(int catId, RuleBean ruleBean, Integer sourceId, String bookId);
/**
* 根据爬虫状态查询爬虫源集合
* @param sourceStatus 状态0关闭1开启
@ -61,4 +72,37 @@ public interface CrawlService {
* @return 源信息
* */
CrawlSource queryCrawlSource(Integer sourceId);
/**
* 新增单本采集任务
* @param singleTask 任务信息对象
* */
void addCrawlSingleTask(CrawlSingleTask singleTask);
/**
* 单本采集任务分页列表查询
* @param page 当前页码
* @param pageSize 分页大小
* @return 单本采集任务集合
* */
List<CrawlSingleTask> listCrawlSingleTaskByPage(int page, int pageSize);
/**
* 删除采集任务
* @param id 任务ID
* */
void delCrawlSingleTask(Long id);
/**
* 获取采集任务
* @return 采集任务
* */
CrawlSingleTask getCrawlSingleTask();
/**
* 更新单本采集任务
* @param task 采集任务
* @param status 采集状态
* */
void updateCrawlSingleTask(CrawlSingleTask task, Byte status);
}

View File

@ -6,12 +6,12 @@ import com.java2nb.novel.core.cache.CacheKey;
import com.java2nb.novel.core.cache.CacheService;
import com.java2nb.novel.core.crawl.CrawlParser;
import com.java2nb.novel.core.crawl.RuleBean;
import com.java2nb.novel.core.enums.ResponseStatus;
import com.java2nb.novel.core.exception.BusinessException;
import com.java2nb.novel.core.utils.IdWorker;
import com.java2nb.novel.core.utils.SpringUtil;
import com.java2nb.novel.core.utils.ThreadUtil;
import com.java2nb.novel.entity.Book;
import com.java2nb.novel.entity.BookContent;
import com.java2nb.novel.entity.BookIndex;
import com.java2nb.novel.entity.*;
import com.java2nb.novel.entity.CrawlSource;
import com.java2nb.novel.mapper.*;
import com.java2nb.novel.service.BookService;
@ -33,8 +33,7 @@ import static com.java2nb.novel.core.utils.HttpUtil.getByHttpClient;
import static com.java2nb.novel.mapper.BookDynamicSqlSupport.crawlBookId;
import static com.java2nb.novel.mapper.BookDynamicSqlSupport.crawlSourceId;
import static com.java2nb.novel.mapper.CrawlSourceDynamicSqlSupport.*;
import static org.mybatis.dynamic.sql.SqlBuilder.isEqualTo;
import static org.mybatis.dynamic.sql.SqlBuilder.update;
import static org.mybatis.dynamic.sql.SqlBuilder.*;
import static org.mybatis.dynamic.sql.select.SelectDSL.select;
/**
@ -48,6 +47,8 @@ public class CrawlServiceImpl implements CrawlService {
private final CrawlSourceMapper crawlSourceMapper;
private final CrawlSingleTaskMapper crawlSingleTaskMapper;
private final BookService bookService;
@ -140,6 +141,62 @@ public class CrawlServiceImpl implements CrawlService {
return crawlSourceMapper.selectMany(render).get(0);
}
@Override
public void addCrawlSingleTask(CrawlSingleTask singleTask) {
if(bookService.queryIsExistByBookNameAndAuthorName(singleTask.getBookName(),singleTask.getAuthorName())){
throw new BusinessException(ResponseStatus.BOOK_EXISTS);
}
singleTask.setCreateTime(new Date());
crawlSingleTaskMapper.insertSelective(singleTask);
}
@Override
public List<CrawlSingleTask> listCrawlSingleTaskByPage(int page, int pageSize) {
PageHelper.startPage(page, pageSize);
SelectStatementProvider render = select(CrawlSingleTaskDynamicSqlSupport.crawlSingleTask.allColumns())
.from(CrawlSingleTaskDynamicSqlSupport.crawlSingleTask)
.orderBy(CrawlSingleTaskDynamicSqlSupport.createTime.descending())
.build()
.render(RenderingStrategies.MYBATIS3);
return crawlSingleTaskMapper.selectMany(render);
}
@Override
public void delCrawlSingleTask(Long id) {
crawlSingleTaskMapper.deleteByPrimaryKey(id);
}
@Override
public CrawlSingleTask getCrawlSingleTask() {
List<CrawlSingleTask> list = crawlSingleTaskMapper.selectMany(select(CrawlSingleTaskDynamicSqlSupport.crawlSingleTask.allColumns())
.from(CrawlSingleTaskDynamicSqlSupport.crawlSingleTask)
.where(CrawlSingleTaskDynamicSqlSupport.taskStatus,isEqualTo((byte)2))
.orderBy(CrawlSingleTaskDynamicSqlSupport.createTime)
.limit(1)
.build()
.render(RenderingStrategies.MYBATIS3));
return list.size() > 0 ? list.get(0) : null;
}
@Override
public void updateCrawlSingleTask(CrawlSingleTask task, Byte status) {
byte excCount = task.getExcCount();
excCount+=1;
task.setExcCount(excCount);
if(status == 1 || excCount == 5){
//当采集成功或者采集次数等于5则更新采集最终状态并停止采集
task.setTaskStatus(status);
}
crawlSingleTaskMapper.updateByPrimaryKeySelective(task);
}
/**
* 解析分类列表
*/
@ -173,35 +230,7 @@ public class CrawlServiceImpl implements CrawlService {
String bookId = bookIdMatcher.group(1);
Book book = CrawlParser.parseBook(ruleBean, bookId);
//这里只做新书入库,查询是否存在这本书
Book existBook = bookService.queryBookByBookNameAndAuthorName(book.getBookName(), book.getAuthorName());
//如果该小说不存在则可以解析入库但是标记该小说正在入库30分钟之后才允许再次入库
if (existBook == null) {
//没有该书,可以入库
book.setCatId(catId);
//根据分类ID查询分类
book.setCatName(bookService.queryCatNameByCatId(catId));
if (catId == 7) {
//女频
book.setWorkDirection((byte) 1);
} else {
//男频
book.setWorkDirection((byte) 0);
}
book.setCrawlBookId(bookId);
book.setCrawlSourceId(sourceId);
book.setCrawlLastTime(new Date());
book.setId(new IdWorker().nextId());
//解析章节目录
Map<Integer, List> indexAndContentList = CrawlParser.parseBookIndexAndContent(bookId, book, ruleBean, new HashMap<>(0));
bookService.saveBookAndIndexAndContent(book, (List<BookIndex>) indexAndContentList.get(CrawlParser.BOOK_INDEX_LIST_KEY), (List<BookContent>) indexAndContentList.get(CrawlParser.BOOK_CONTENT_LIST_KEY));
} else {
//只更新书籍的爬虫相关字段
bookService.updateCrawlProperties(existBook.getId(), sourceId, bookId);
}
parseBookAndSave(catId, ruleBean, sourceId, bookId);
} catch (Exception e) {
log.error(e.getMessage(), e);
}
@ -232,6 +261,43 @@ public class CrawlServiceImpl implements CrawlService {
}
@Override
public boolean parseBookAndSave(int catId, RuleBean ruleBean, Integer sourceId, String bookId) {
Book book = CrawlParser.parseBook(ruleBean, bookId);
if(book.getBookName() == null || book.getAuthorName() == null){
return false;
}
//这里只做新书入库,查询是否存在这本书
Book existBook = bookService.queryBookByBookNameAndAuthorName(book.getBookName(), book.getAuthorName());
//如果该小说不存在则可以解析入库但是标记该小说正在入库30分钟之后才允许再次入库
if (existBook == null) {
//没有该书,可以入库
book.setCatId(catId);
//根据分类ID查询分类
book.setCatName(bookService.queryCatNameByCatId(catId));
if (catId == 7) {
//女频
book.setWorkDirection((byte) 1);
} else {
//男频
book.setWorkDirection((byte) 0);
}
book.setCrawlBookId(bookId);
book.setCrawlSourceId(sourceId);
book.setCrawlLastTime(new Date());
book.setId(new IdWorker().nextId());
//解析章节目录
Map<Integer, List> indexAndContentList = CrawlParser.parseBookIndexAndContent(bookId, book, ruleBean, new HashMap<>(0));
bookService.saveBookAndIndexAndContent(book, (List<BookIndex>) indexAndContentList.get(CrawlParser.BOOK_INDEX_LIST_KEY), (List<BookContent>) indexAndContentList.get(CrawlParser.BOOK_CONTENT_LIST_KEY));
} else {
//只更新书籍的爬虫相关字段
bookService.updateCrawlProperties(existBook.getId(), sourceId, bookId);
}
return true;
}
@Override
public void updateCrawlSourceStatus(Integer sourceId, Byte sourceStatus) {
CrawlSource source = new CrawlSource();

View File

@ -0,0 +1,26 @@
package com.java2nb.novel.vo;
import com.fasterxml.jackson.annotation.JsonFormat;
import com.java2nb.novel.entity.CrawlSingleTask;
import com.java2nb.novel.entity.CrawlSource;
import lombok.Data;
import java.util.Date;
/**
* @author Administrator
*/
@Data
public class CrawlSingleTaskVO extends CrawlSingleTask {
@JsonFormat(timezone = "GMT+8", pattern = "yyyy-MM-dd HH:mm")
private Date createTime;
@Override
public String toString() {
return super.toString();
}
}

View File

@ -10,7 +10,7 @@
.user_l .log_list { width:350px }
.user_l .s_input { margin-bottom:25px; font-size:14px }
.s_input { width:348px; height:30px; line-height:38px\9; vertical-align:middle; border:1px solid #ddd; border-radius:2px }
.icon_name, .icon_key, .icon_code { width:312px; padding-left:36px; background:url(../images/icon_user.png) no-repeat 13px 13px }
.icon_name, .icon_key, .icon_code { width:312px; padding-left:36px}
.icon_key { background-position: 13px -51px }
.icon_code { background-position: 13px -117px; width:200px; float:left }
.code_pic { height:38px; float:right }
@ -37,7 +37,7 @@
.fast_tit .title { background:#fff; font-size:16px; padding:3px 14px; position:relative; display:inline-block; z-index:999 }
/*userinfo*/
.my_l { width:198px; float:left; font-size: 13px; padding-top: 20px; }
.my_l li a { display:block; height:42px; line-height:42px; padding-left:62px; border-left:4px solid #fff; background:url(../images/icon_user.png) no-repeat; margin-bottom:5px; color: #666 }
.my_l li a { display:block; height:42px; line-height:42px; padding-left:62px; border-left:4px solid #fff; margin-bottom:5px; color: #666 }
.my_l li .on { background-color:#fafafa; border-left:2px solid #f80; color:#000; border-radius: 0 2px 2px 0 }
.my_l .link_1 { background-position:32px -188px }
.my_l .link_2 { background-position:32px -230px }

View File

@ -0,0 +1,188 @@
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN"
"http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
<html xmlns="http://www.w3.org/1999/xhtml">
<head>
<head>
<meta http-equiv="Content-Type" content="text/html; charset=utf-8"/>
<meta http-equiv="X-UA-Compatible" content="IE=edge,chrome=1"/>
<title>爬虫管理系统-小说精品屋</title>
<link rel="stylesheet" href="/css/base.css?v=1"/>
<link rel="stylesheet" href="/css/user.css"/>
</head>
</head>
<body class="">
<div class="header">
<div class="mainNav" id="mainNav">
<div class="box_center cf"
style="text-align: center;height: 44px;line-height: 48px;color: #fff;font-size: 16px;">
小说精品屋爬虫管理
</div>
</div>
</div>
<div class="main box_center cf">
<div class="userBox cf">
<div class="my_l">
<ul class="log_list">
<li><a class="link_1" href="/">爬虫源管理</a></li>
<li><a class="link_1 on" href="/crawl/crawlSingleTask_list.html">单本采集管理</a></li>
<!--<li><a class="link_1 " href="/user/userinfo.html">批量小说爬取</a></li>
<li><a class="link_4 " href="/user/favorites.html">单本小说爬取</a></li>-->
</ul>
</div>
<div class="my_r">
<div class="my_bookshelf">
<div class="userBox cf">
<form method="post" action="./register.html" id="form2">
<div class="user_l">
<div></div>
<h3>采集信息填写示例均为笔趣阁http://www.mcmssc.com</h3>
<ul class="log_list">
<li><span id="LabErr"></span></li>
<input type="hidden" id="sourceName" class="s_input icon_name" >
<b>采集源:</b>
<li><select id="sourceId" class="s_input icon_key">
</select></li>
<b>采集分类:</b>
<li><select id="catId" class="s_input icon_key">
<option value="1">玄幻奇幻</option>
<option value="2">武侠仙侠</option>
<option value="3">都市言情</option>
<option value="4">历史军事</option>
<option value="5">科幻灵异</option>
<option value="6">网游竞技</option>
<option value="7">女生频道</option>
</select>
</li>
示例:<b>73_73911</b>
<li><input type="text" id="sourceBookId" class="s_input icon_key"
placeholder="采集的源站小说ID"/></li>
示例:<b>苏厨</b>
<li><input type="text" id="bookName" class="s_input icon_key"
placeholder="采集的小说名"/></li>
示例:<b>二子从周</b>
<li><input type="text" id="authorName" class="s_input icon_key"
placeholder="采集的小说作者名"/></li>
<!--示例:<b>https://m.xdingdiann.com/sort/0/1.html</b>
<li><input type="text" id="updateBookListUrl" class="s_input icon_key"
placeholder="小说更新列表url"></li>-->
<li><input type="button" onclick="addCrawlSingleTask()" name="btnRegister" value="提交"
id="btnRegister" class="btn_red"></li>
</ul>
</div>
</form>
</div>
</div>
</div>
</div>
</div>
</body>
<script src="/javascript/jquery-1.8.0.min.js" type="text/javascript"></script>
<script src="/layui/layui.all.js" type="text/javascript"></script>
<script src="/javascript/header.js" type="text/javascript"></script>
<script src="/javascript/user.js" type="text/javascript"></script>
<script language="javascript" type="text/javascript">
$(function () {
$.ajax({
type: "POST",
url: "/crawl/listCrawlByPage",
data: {'curr':1,'limit':100},
dataType: "json",
success: function (data) {
if (data.code == 200) {
var crawlSourceList = data.data.list;
if (crawlSourceList.length > 0) {
var crawlSourceListHtml = "";
for(var i=0;i<crawlSourceList.length;i++){
var crawlSource = crawlSourceList[i];
crawlSourceListHtml+=("<option value=\""+crawlSource.id+"\">"+crawlSource.sourceName+"</option>");
}
$("#sourceId").html(crawlSourceListHtml);
}
} else if (data.code == 1001) {
//未登录
location.href = '/user/login.html?originUrl=' + decodeURIComponent(location.href);
}else {
layer.alert(data.msg);
}
},
error: function () {
layer.alert('网络异常');
}
})
});
function addCrawlSingleTask() {
var sourceId = $("#sourceId").find("option:selected").val();
var catId = $("#catId").find("option:selected").val();
var sourceName = $("#sourceId").find("option:selected").html();
var sourceBookId = $("#sourceBookId").val();
if (!sourceBookId){
layer.alert("采集的源站小说ID不能为空");
return;
}
var bookName = $("#bookName").val();
if (!bookName){
layer.alert("采集的小说名不能为空");
return;
}
var authorName = $("#authorName").val();
if (!authorName){
layer.alert("采集的小说作者名不能为空");
return;
}
$.ajax({
type: "POST",
url: "/crawl/addCrawlSingleTask",
data: {'sourceId': sourceId, 'sourceName': sourceName,'catId':catId,'sourceBookId' : sourceBookId,'bookName':bookName,'authorName':authorName},
dataType: "json",
success: function (data) {
if (data.code == 200) {
window.location.href = '/crawl/crawlSingleTask_list.html';
} else {
layer.alert(data.msg);
}
},
error: function () {
layer.alert('网络异常');
}
})
}
</script>
</html>

View File

@ -0,0 +1,230 @@
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN"
"http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
<html xmlns="http://www.w3.org/1999/xhtml">
<head>
<head>
<meta http-equiv="Content-Type" content="text/html; charset=utf-8"/>
<meta http-equiv="X-UA-Compatible" content="IE=edge,chrome=1"/>
<title>爬虫管理系统-小说精品屋</title>
<link rel="stylesheet" href="/css/base.css?v=1"/>
<link rel="stylesheet" href="/css/user.css"/>
</head>
</head>
<body class="">
<div class="header">
<div class="mainNav" id="mainNav">
<div class="box_center cf"
style="text-align: center;height: 44px;line-height: 48px;color: #fff;font-size: 16px;">
小说精品屋爬虫管理
</div>
</div>
</div>
<div class="main box_center cf">
<div class="userBox cf">
<div class="my_l">
<ul class="log_list">
<li><a class="link_1" href="/">爬虫源管理</a></li>
<li><a class="link_1 on" href="/crawl/crawlSingleTask_list.html">单本采集管理</a></li>
<!-- <li><a class="link_1 " href="/user/userinfo.html">批量小说爬取</a></li>
<li><a class="link_4 " href="/user/favorites.html">单本小说爬取</a></li>-->
</ul>
</div>
<div class="my_r">
<div class="my_bookshelf">
<div class="title cf">
<h2 class="fl">单本采集任务列表</h2>
<div class="fr"><a href="/crawl/crawlSingleTask_add.html" class="btn_red">增加单本采集任务</a></div>
</div>
<div id="divData" class="updateTable">
<table cellpadding="0" cellspacing="0">
<thead>
<tr>
<th class="style">
序号
</th>
<th class="name">
采集小说名
</th>
<th class="name">
采集小说作者名
</th>
<th class="goread">
采集次数
</th>
<th class="goread">
状态
</th>
<th class="name">
创建时间
</th>
<th class="goread">
操作
</th>
</tr>
</thead>
<tbody id="crawlSourceList">
</tbody>
</table>
<div class="pageBox cf" id="shellPage">
</div>
</div>
<!--<div id="divData" class="updateTable">
<table cellpadding="0" cellspacing="0">
<thead>
<tr>
<th class="name">
爬虫源(已开启的爬虫源)
</th>
<th class="chapter">
成功爬取数量websocket实现
</th>
<th class="time">
目标爬取数量
</th>
<th class="goread">
状态(正在运行,已停止)(一次只能运行一个爬虫源)
</th>
<th class="goread">
操作(启动,停止)
</th>
</tr>
</thead>
<tbody id="bookShelfList">
</tbody>
</table>
<div class="pageBox cf" id="shellPage">
</div>
</div>-->
</div>
</div>
</div>
</div>
</body>
<script src="/javascript/jquery-1.8.0.min.js" type="text/javascript"></script>
<script src="/layui/layui.all.js" type="text/javascript"></script>
<script src="/javascript/header.js" type="text/javascript"></script>
<script src="/javascript/user.js" type="text/javascript"></script>
<script language="javascript" type="text/javascript">
search(1, 10);
function search(curr, limit) {
$.ajax({
type: "POST",
url: "/crawl/listCrawlSingleTaskByPage",
data: {'curr': curr, 'limit': limit},
dataType: "json",
success: function (data) {
if (data.code == 200) {
var crawlSourceList = data.data.list;
if (crawlSourceList.length > 0) {
var crawlSourceListHtml = "";
for (var i = 0; i < crawlSourceList.length; i++) {
var crawlSource = crawlSourceList[i];
crawlSourceListHtml += (" <tr class=\"book_list\" vals=\"291\">\n" +
" <td class=\"style bookclass\">\n" +
" [" + (i + 1) + "]\n" +
" </td>\n" +
" <td class=\"name\">\n" +
" " + crawlSource.bookName + "\n" +
" </td>\n" +
" <td class=\"name\">\n" +
" " + crawlSource.authorName + "\n" +
" </td>\n" +
" <td class=\"goread\">\n" +
" " + crawlSource.excCount + "\n" +
" </td>\n" +
" <td class=\"goread\">\n" +
" " + (crawlSource.taskStatus == 0 ? '采集失败' : (crawlSource.taskStatus == 1 ? '采集成功' : (crawlSource.excCount>0?'采集中':'排队中'))) + "\n" +
" </td>\n" +
" <td class=\"name\" valsc=\"291|2037554|1\">"
+ crawlSource.createTime + "</td>\n" +
" <td class=\"goread\" id='opt" + crawlSource.id + "'><a href='javascript:del(" + crawlSource.id + ")'>删除 </a></td> </tr>");
}
$("#crawlSourceList").html(crawlSourceListHtml);
layui.use('laypage', function () {
var laypage = layui.laypage;
//执行一个laypage实例
laypage.render({
elem: 'shellPage' //注意,这里的 test1 是 ID不用加 # 号
, count: data.data.total //数据总数,从服务端得到,
, curr: data.data.pageNum
, limit: data.data.pageSize
, jump: function (obj, first) {
//obj包含了当前分页的所有参数比如
console.log(obj.curr); //得到当前页,以便向服务端请求对应页的数据。
console.log(obj.limit); //得到每页显示的条数
//首次不执行
if (!first) {
search(obj.curr, obj.limit);
} else {
}
}
});
});
}
} else {
layer.alert(data.msg);
}
},
error: function () {
layer.alert('网络异常');
}
})
}
function del(id) {
$.ajax({
type: "POST",
url: "/crawl/delCrawlSingleTask",
data: {'id': id},
dataType: "json",
success: function (data) {
if (data.code == 200) {
location.reload();
} else {
layer.alert(data.msg);
}
},
error: function () {
layer.alert('网络异常');
}
})
}
</script>
</html>

View File

@ -29,6 +29,7 @@
<ul class="log_list">
<li><a class="link_1 on" href="/">爬虫源管理</a></li>
<li><a class="link_1" href="/crawl/crawlSingleTask_list.html">单本采集管理</a></li>
<!--<li><a class="link_1 " href="/user/userinfo.html">批量小说爬取</a></li>
<li><a class="link_4 " href="/user/favorites.html">单本小说爬取</a></li>-->
</ul>

View File

@ -28,6 +28,7 @@
<ul class="log_list">
<li><a class="link_1 on" href="/">爬虫源管理</a></li>
<li><a class="link_1" href="/crawl/crawlSingleTask_list.html">单本采集管理</a></li>
<!-- <li><a class="link_1 " href="/user/userinfo.html">批量小说爬取</a></li>
<li><a class="link_4 " href="/user/favorites.html">单本小说爬取</a></li>-->
</ul>

View File

@ -5,7 +5,7 @@
<parent>
<artifactId>novel</artifactId>
<groupId>com.java2nb</groupId>
<version>2.1.2</version>
<version>2.6.2</version>
</parent>
<modelVersion>4.0.0</modelVersion>
@ -27,6 +27,11 @@
<version>${jjwt.version}</version>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-amqp</artifactId>
</dependency>
<dependency>
<groupId>io.searchbox</groupId>
@ -34,16 +39,45 @@
<version>${jest.version}</version>
</dependency>
<!--aliyunOSS-->
<dependency>
<groupId>com.aliyun.oss</groupId>
<artifactId>aliyun-sdk-oss</artifactId>
<version>${aliyun-sdk-oss.version}</version>
</dependency>
<dependency>
<groupId>commons-fileupload</groupId>
<artifactId>commons-fileupload</artifactId>
<version>${commons-fileupload.version}</version>
</dependency>
<!--FastDfs-->
<dependency>
<groupId>com.github.tobato</groupId>
<artifactId>fastdfs-client</artifactId>
<version>${fastdfs-client.version}</version>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-data-elasticsearch</artifactId>
</dependency>
<!--引入redisson分布式锁-->
<!-- <dependency>
<groupId>org.redisson</groupId>
<artifactId>redisson-spring-boot-starter</artifactId>
<version>${redisson.version}</version>
</dependency>-->
<dependency>
<groupId>com.alipay.sdk</groupId>
<artifactId>alipay-sdk-java</artifactId>
<version>4.9.153.ALL</version>
<version>${alipay-sdk-java.version}</version>
</dependency>
</dependencies>

View File

@ -1,10 +1,13 @@
package com.java2nb.novel;
import com.github.tobato.fastdfs.FdfsClientConfig;
import io.shardingsphere.shardingjdbc.spring.boot.SpringBootConfiguration;
import org.mybatis.spring.annotation.MapperScan;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.cache.annotation.EnableCaching;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Import;
import org.springframework.scheduling.TaskScheduler;
import org.springframework.scheduling.annotation.EnableScheduling;
import org.springframework.scheduling.concurrent.ThreadPoolTaskScheduler;
@ -18,6 +21,7 @@ import org.springframework.transaction.annotation.EnableTransactionManagement;
@EnableScheduling
@EnableCaching
@MapperScan(basePackages = {"com.java2nb.novel.mapper"})
@Import(FdfsClientConfig.class)
public class FrontNovelApplication {
public static void main(String[] args) {

View File

@ -4,14 +4,14 @@ import com.github.pagehelper.PageInfo;
import com.java2nb.novel.core.bean.ResultBean;
import com.java2nb.novel.core.bean.UserDetails;
import com.java2nb.novel.core.enums.ResponseStatus;
import com.java2nb.novel.entity.Book;
import com.java2nb.novel.entity.BookComment;
import com.java2nb.novel.entity.BookIndex;
import com.java2nb.novel.search.BookSP;
import com.java2nb.novel.service.BookService;
import com.java2nb.novel.vo.BookVO;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.springframework.amqp.rabbit.core.RabbitTemplate;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestParam;
@ -19,7 +19,6 @@ import org.springframework.web.bind.annotation.RestController;
import javax.servlet.http.HttpServletRequest;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
@ -33,6 +32,11 @@ public class BookController extends BaseController{
private final BookService bookService;
private final RabbitTemplate rabbitTemplate;
@Value("${spring.rabbitmq.enable}")
private Integer enableMq;
/**
* 查询首页小说设置列表数据
@ -105,7 +109,11 @@ public class BookController extends BaseController{
* */
@PostMapping("addVisitCount")
public ResultBean addVisitCount(Long bookId){
bookService.addVisitCount(bookId);
if(enableMq == 1) {
rabbitTemplate.convertAndSend("ADD-BOOK-VISIT-EXCHANGE", null, bookId);
}else {
bookService.addVisitCount(bookId, 1);
}
return ResultBean.ok();
}

View File

@ -0,0 +1,13 @@
package com.java2nb.novel.core.config;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.EnableMBeanExport;
import org.springframework.jmx.support.RegistrationPolicy;
/**
* @author 11797
*/
@Configuration
@EnableMBeanExport(registration= RegistrationPolicy.IGNORE_EXISTING)
public class FdfsConfiguration {
}

View File

@ -0,0 +1,30 @@
package com.java2nb.novel.core.config;
import lombok.Data;
import org.springframework.beans.factory.InitializingBean;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.boot.context.properties.ConfigurationProperties;
import org.springframework.stereotype.Component;
/**
* @author 11797
*/
@Data
@Component
@ConfigurationProperties(prefix="novel.file")
public class OssProperties{
private String endpoint;
private String keyId;
private String keySecret;
private String fileHost;
private String bucketName;
private String webUrl;
}

View File

@ -0,0 +1,59 @@
package com.java2nb.novel.core.config;
import org.springframework.amqp.core.*;
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
/**
* @author 11797
*/
@Configuration
@ConditionalOnProperty(prefix = "spring.rabbitmq", name = "enable", havingValue = "1")
public class RabbitConfig {
/**
* 更新数据库队列
*/
@Bean
public Queue updateDbQueue() {
return new Queue("UPDATE-DB-QUEUE", true);
}
/**
* 更新数据库队列
*/
@Bean
public Queue updateEsQueue() {
return new Queue("UPDATE-ES-QUEUE", true);
}
/**
* 增加点击量交换机
*/
@Bean
public FanoutExchange addVisitExchange() {
return new FanoutExchange("ADD-BOOK-VISIT-EXCHANGE");
}
/**
* 更新搜索引擎队列绑定到增加点击量交换机中
*/
@Bean
public Binding updateEsBinding() {
return BindingBuilder.bind(updateEsQueue()).to(addVisitExchange());
}
/**
* 更新数据库绑定到增加点击量交换机中
*/
@Bean
public Binding updateDbBinding() {
return BindingBuilder.bind(updateDbQueue()).to(addVisitExchange());
}
}

View File

@ -0,0 +1,107 @@
package com.java2nb.novel.core.listener;
import com.java2nb.novel.core.cache.CacheKey;
import com.java2nb.novel.core.cache.CacheService;
import com.java2nb.novel.core.utils.Constants;
import com.java2nb.novel.entity.Book;
import com.java2nb.novel.service.BookService;
import com.java2nb.novel.service.SearchService;
import com.java2nb.novel.vo.EsBookVO;
import com.rabbitmq.client.Channel;
import io.searchbox.client.JestClient;
import io.searchbox.core.Index;
import lombok.RequiredArgsConstructor;
import lombok.SneakyThrows;
import lombok.extern.slf4j.Slf4j;
import org.springframework.amqp.core.Message;
import org.springframework.beans.BeanUtils;
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
import org.springframework.stereotype.Component;
import org.springframework.amqp.rabbit.annotation.RabbitListener;
import java.text.SimpleDateFormat;
/**
* @author 11797
*/
@Component
@Slf4j
@RequiredArgsConstructor
@ConditionalOnProperty(prefix = "spring.rabbitmq", name = "enable", havingValue = "1")
public class BookVisitAddListener {
private final BookService bookService;
private final CacheService cacheService;
private final SearchService searchService;
// private final RedissonClient redissonClient;
/**
* 更新数据库
* 流量削峰每本小说累积10个点击更新一次
*/
@SneakyThrows
@RabbitListener(queues = {"UPDATE-DB-QUEUE"})
public void updateDb(Long bookId, Channel channel, Message message) {
log.debug("收到更新数据库消息:" + bookId);
Thread.sleep(1000 * 2);
//TODO 操作共享资源visitCount集群环境下有线程安全问题引入Redisson框架实现分布式锁
//RLock lock = redissonClient.getLock("visitCount");
//lock.lock();
//目前visitCount不重要数据可丢失暂不实现分布式锁
Integer visitCount = (Integer) cacheService.getObject(CacheKey.BOOK_ADD_VISIT_COUNT+bookId);
if(visitCount == null){
visitCount = 0 ;
}
cacheService.setObject(CacheKey.BOOK_ADD_VISIT_COUNT+bookId,++visitCount);
if(visitCount >= Constants.ADD_MAX_VISIT_COUNT) {
bookService.addVisitCount(bookId,visitCount);
cacheService.del(CacheKey.BOOK_ADD_VISIT_COUNT+bookId);
}
//TODO 操作共享资源visitCount集群环境下有线程安全问题引入Redisson框架实现分布式锁
//lock.unlock();
}
/**
* 更新搜索引擎
* 流量削峰每本小说1个小时更新一次
*/
@RabbitListener(queues = {"UPDATE-ES-QUEUE"})
public void updateEs(Long bookId, Channel channel, Message message) {
log.debug("收到更新搜索引擎消息:" + bookId);
if (cacheService.get(CacheKey.ES_IS_UPDATE_VISIT + bookId) == null) {
cacheService.set(CacheKey.ES_IS_UPDATE_VISIT + bookId, "1", 60 * 60);
try {
Thread.sleep(1000 * 5);
Book book = bookService.queryBookDetail(bookId);
searchService.importToEs(book);
}catch (Exception e){
cacheService.del(CacheKey.ES_IS_UPDATE_VISIT + bookId);
log.error("更新搜索引擎失败"+bookId);
}
}
}
}

View File

@ -5,6 +5,7 @@ import com.java2nb.novel.core.cache.CacheService;
import com.java2nb.novel.core.utils.BeanUtil;
import com.java2nb.novel.entity.Book;
import com.java2nb.novel.service.BookService;
import com.java2nb.novel.service.SearchService;
import com.java2nb.novel.vo.EsBookVO;
import io.searchbox.client.JestClient;
import io.searchbox.core.DocumentResult;
@ -37,16 +38,20 @@ public class BookToEsSchedule {
private final CacheService cacheService;
private final JestClient jestClient;
private final SearchService searchService;
/**
* 10秒钟导入一次
* 1钟导入一次
*/
@Scheduled(fixedRate = 1000 * 10)
@Scheduled(fixedRate = 1000 * 60)
public void saveToEs() {
//TODO 引入Redisson框架实现分布式锁
//可以重复更新,只是效率可能略有降低,所以暂不实现分布式锁
if (cacheService.get(CacheKey.ES_TRANS_LOCK) == null) {
cacheService.set(CacheKey.ES_TRANS_LOCK, "1", 60 * 5);
cacheService.set(CacheKey.ES_TRANS_LOCK, "1", 60 * 20);
try {
//查询需要更新的小说
Date lastDate = (Date) cacheService.getObject(CacheKey.ES_LAST_UPDATE_TIME);
@ -57,17 +62,9 @@ public class BookToEsSchedule {
List<Book> books = bookService.queryBookByUpdateTimeByPage(lastDate, 100);
for (Book book : books) {
//导入到ES
EsBookVO esBookVO = new EsBookVO();
BeanUtils.copyProperties(book, esBookVO, "lastIndexUpdateTime");
esBookVO.setLastIndexUpdateTime(new SimpleDateFormat("yyyy/MM/dd HH:mm").format(book.getLastIndexUpdateTime()));
Index action = new Index.Builder(esBookVO).index("novel").type("book").id(book.getId().toString()).build();
jestClient.execute(action);
searchService.importToEs(book);
lastDate = book.getUpdateTime();
Thread.sleep(1000);
Thread.sleep(5000);
}

View File

@ -1,5 +1,6 @@
package com.java2nb.novel.core.schedule;
import com.java2nb.novel.core.utils.Constants;
import com.java2nb.novel.entity.Book;
import com.java2nb.novel.service.BookService;
import lombok.RequiredArgsConstructor;
@ -13,7 +14,7 @@ import org.springframework.stereotype.Service;
import java.util.List;
/**
* 网络图片转存本地任务
* 将爬取的网络图片转存为自己的存储介质本地OSSfastDfs任务
*
* @author Administrator
*/
@ -21,7 +22,7 @@ import java.util.List;
@Service
@RequiredArgsConstructor
@Slf4j
public class Network2LocalPicSchedule {
public class CrawlPicTransSchedule {
private final BookService bookService;
@ -41,7 +42,7 @@ public class Network2LocalPicSchedule {
log.info("Network2LocalPicSchedule。。。。。。。。。。。。");
List<Book> networkPicBooks = bookService.queryNetworkPicBooks(100);
List<Book> networkPicBooks = bookService.queryNetworkPicBooks(Constants.LOCAL_PIC_PREFIX,100);
for (Book book : networkPicBooks) {
bookService.updateBookPicToLocal(book.getPicUrl(), book.getId());
//3秒钟转化一张图片10分钟转化200张

View File

@ -18,19 +18,20 @@ public class BrowserUtil {
"lg-d", "lg-g", "lge-", "maui", "maxo", "midp", "mits", "mmef", "mobi", "mot-", "moto", "mwbp", "nec-",
"newt", "noki", "oper", "palm", "pana", "pant", "phil", "play", "port", "prox", "qwap", "sage", "sams",
"sany", "sch-", "sec-", "send", "seri", "sgh-", "shar", "sie-", "siem", "smal", "smar", "sony", "sph-",
"symb", "t-mo", "teli", "tim-", "tosh", "tsm-", "upg1", "upsi", "vk-v", "voda", "wap-", "wapa", "wapi",
"symb", "t-mo", "teli", "tim-", "tsm-", "upg1", "upsi", "vk-v", "voda", "wap-", "wapa", "wapi",
"wapp", "wapr", "webc", "winw", "winw", "xda", "xda-", "Googlebot-Mobile" };
/**
*
* @Title: JudgelsMobile @Description: TODO(判断是否是手机浏览器) @param @param
* request @param @return 设定文件 @return boolean 返回类型 @throws
* 判断是否是手机浏览器
*/
public static boolean isMobile(HttpServletRequest request) {
boolean isMobile = false;
if (request.getHeader("User-Agent") != null) {
String userAgent = request.getHeader("User-Agent");
if (userAgent != null) {
userAgent = userAgent.toLowerCase();
for (String mobileAgent : mobileAgents) {
if (request.getHeader("User-Agent").toLowerCase().indexOf(mobileAgent) > 0) {
if (userAgent.indexOf(mobileAgent) > 0) {
isMobile = true;
break;
}

View File

@ -12,7 +12,7 @@ public class Constants {
public static final String TEMPLATE_PATH_PREFIX_KEY = "templatePathPrefixKey";
/**
* 本地图片保存前缀
* 保存图片到自己的存储介质路径前缀
* */
public static final String LOCAL_PIC_PREFIX = "/localPic/";
@ -30,4 +30,9 @@ public class Constants {
* 首页设置的小说数量
* */
public static final int INDEX_BOOK_SETTING_NUM = 32;
/**
* 累积的最大点击量
* */
public static final Integer ADD_MAX_VISIT_COUNT = 10;
}

View File

@ -16,13 +16,13 @@ public interface FrontBookMapper extends BookMapper {
List<BookVO> searchByPage(BookSP params);
void addVisitCount(@Param("bookId") Long bookId, @Param("date") Date date);
void addVisitCount(@Param("bookId") Long bookId, @Param("visitCount") Integer visitCount);
List<Book> listRecBookByCatId(@Param("catId") Integer catId);
void addCommentCount(@Param("bookId") Long bookId);
List<Book> queryNetworkPicBooks(@Param("limit") Integer limit);
List<Book> queryNetworkPicBooks(@Param("localPicPrefix") String localPicPrefix, @Param("limit") Integer limit);
/**
* 按评分随机查询小说集合

View File

@ -112,8 +112,8 @@ public interface BookService {
/**
* 增加点击次数
* @param bookId 书籍ID
* */
void addVisitCount(Long bookId);
* @param visitCount*/
void addVisitCount(Long bookId, Integer visitCount);
/**
* 查询章节数
@ -180,15 +180,17 @@ public interface BookService {
/**
* 查询网络图片的小说
*
* @param localPicPrefix
* @param limit 查询条数
* @return 返回小说集合
* */
List<Book> queryNetworkPicBooks(Integer limit);
List<Book> queryNetworkPicBooks(String localPicPrefix, Integer limit);
/**
* 更新小说网络图片到本地
* @param picUrl 网络图片路径
* 更新爬取的小说网络图片到自己的存储介质本地、OSS、fastDfs
* @param picUrl 爬取的网络图片路径
* @param bookId 小说ID
*/
void updateBookPicToLocal(String picUrl, Long bookId);
@ -198,6 +200,7 @@ public interface BookService {
* @param userId 用户ID
* @param page 页码
* @param pageSize 分页大小
* @return 小说集合
* */
List<Book> listBookPageByUserId(Long userId, int page, int pageSize);

View File

@ -0,0 +1,17 @@
package com.java2nb.novel.service;
/**
* @author 11797
*/
public interface FileService {
/**
* 将爬取的网络图片转存为自己的存储介质本地、OSS、fastDfs
* @param picSrc 爬取的网络图片路径
* @param picSavePath 保存路径
* @return 新图片地址
* */
String transFile(String picSrc, String picSavePath);
}

View File

@ -0,0 +1,27 @@
package com.java2nb.novel.service;
import com.github.pagehelper.PageInfo;
import com.java2nb.novel.entity.Book;
import com.java2nb.novel.search.BookSP;
/**
* @author 11797
*/
public interface SearchService {
/**
* 导入到es
* @param book 小说数据
*/
void importToEs(Book book);
/**
* 搜索
* @param params 搜索参数
* @param page 当前页码
* @param pageSize 每页大小
* @return 分页信息
*/
PageInfo searchBook(BookSP params, int page, int pageSize);
}

View File

@ -1,6 +1,5 @@
package com.java2nb.novel.service.impl;
import com.fasterxml.jackson.databind.JavaType;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.github.pagehelper.PageHelper;
import com.github.pagehelper.PageInfo;
@ -15,21 +14,15 @@ import com.java2nb.novel.mapper.*;
import com.java2nb.novel.search.BookSP;
import com.java2nb.novel.service.AuthorService;
import com.java2nb.novel.service.BookService;
import com.java2nb.novel.service.FileService;
import com.java2nb.novel.service.SearchService;
import com.java2nb.novel.vo.BookCommentVO;
import com.java2nb.novel.vo.BookSettingVO;
import com.java2nb.novel.vo.BookVO;
import com.java2nb.novel.vo.EsBookVO;
import io.searchbox.client.JestClient;
import io.searchbox.core.*;
import lombok.RequiredArgsConstructor;
import lombok.SneakyThrows;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.StringUtils;
import org.elasticsearch.index.query.BoolQueryBuilder;
import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.search.builder.SearchSourceBuilder;
import org.elasticsearch.search.fetch.subphase.highlight.HighlightBuilder;
import org.elasticsearch.search.sort.SortOrder;
import org.mybatis.dynamic.sql.SortSpecification;
import org.mybatis.dynamic.sql.render.RenderingStrategies;
import org.mybatis.dynamic.sql.select.render.SelectStatementProvider;
@ -39,7 +32,6 @@ import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import tk.mybatis.orderbyhelper.OrderByHelper;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
@ -89,7 +81,9 @@ public class BookServiceImpl implements BookService {
private final AuthorService authorService;
private final JestClient jestClient;
private final SearchService searchService;
private final FileService fileService;
@SneakyThrows
@ -200,139 +194,8 @@ public class BookServiceImpl implements BookService {
try {
List<EsBookVO> bookList = new ArrayList<>(0);
return searchService.searchBook(params,page,pageSize);
//使用搜索引擎搜索
BoolQueryBuilder boolQueryBuilder = QueryBuilders.boolQuery();
// 构造查询哪个字段
if (StringUtils.isNoneBlank(params.getKeyword())) {
boolQueryBuilder = boolQueryBuilder.must(QueryBuilders.queryStringQuery(params.getKeyword()));
}
// 作品方向
if (params.getWorkDirection() != null) {
boolQueryBuilder.filter(QueryBuilders.termQuery("workDirection", params.getWorkDirection()));
}
// 分类
if (params.getCatId() != null) {
boolQueryBuilder.filter(QueryBuilders.termQuery("catId", params.getCatId()));
}
if (params.getBookStatus() != null) {
boolQueryBuilder.filter(QueryBuilders.termQuery("bookStatus", params.getBookStatus()));
}
if (params.getWordCountMin() == null) {
params.setWordCountMin(0);
}
if (params.getWordCountMax() == null) {
params.setWordCountMax(Integer.MAX_VALUE);
}
boolQueryBuilder.filter(QueryBuilders.rangeQuery("wordCount").gte(params.getWordCountMin()).lte(params.getWordCountMax()));
if (params.getUpdateTimeMin() != null) {
boolQueryBuilder.filter(QueryBuilders.rangeQuery("lastIndexUpdateTime").gte(params.getUpdateTimeMin()));
}
SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder();
searchSourceBuilder.query(boolQueryBuilder);
Count count = new Count.Builder().addIndex("novel").addType("book")
.query(searchSourceBuilder.toString()).build();
CountResult results = jestClient.execute(count);
Double total = results.getCount();
// 高亮字段
HighlightBuilder highlightBuilder = new HighlightBuilder();
highlightBuilder.field("authorName");
highlightBuilder.field("bookName");
highlightBuilder.field("bookDesc");
highlightBuilder.field("lastIndexName");
highlightBuilder.field("catName");
highlightBuilder.preTags("<span style='color:red'>").postTags("</span>");
highlightBuilder.fragmentSize(20000);
searchSourceBuilder.highlighter(highlightBuilder);
//设置排序
if (params.getSort() != null) {
searchSourceBuilder.sort(StringUtil.camelName(params.getSort()), SortOrder.DESC);
}
// 设置分页
searchSourceBuilder.from((page - 1) * pageSize);
searchSourceBuilder.size(pageSize);
// 构建Search对象
Search search = new Search.Builder(searchSourceBuilder.toString()).addIndex("novel").addType("book").build();
log.debug(search.toString());
SearchResult result;
result = jestClient.execute(search);
if (result.isSucceeded()) {
log.debug(result.getJsonString());
Map resultMap = new ObjectMapper().readValue(result.getJsonString(), Map.class);
if (resultMap.get("hits") != null) {
Map hitsMap = (Map) resultMap.get("hits");
if (hitsMap.size() > 0 && hitsMap.get("hits") != null) {
List hitsList = (List) hitsMap.get("hits");
if (hitsList.size() > 0 && result.getSourceAsString() != null) {
JavaType jt = new ObjectMapper().getTypeFactory().constructParametricType(ArrayList.class, EsBookVO.class);
bookList = new ObjectMapper().readValue("[" + result.getSourceAsString() + "]", jt);
if (bookList != null) {
for (int i = 0; i < bookList.size(); i++) {
hitsMap = (Map) hitsList.get(i);
Map highlightMap = (Map) hitsMap.get("highlight");
if (highlightMap != null && highlightMap.size() > 0) {
List<String> authorNameList = (List<String>) highlightMap.get("authorName");
if (authorNameList != null && authorNameList.size() > 0) {
bookList.get(i).setAuthorName(authorNameList.get(0));
}
List<String> bookNameList = (List<String>) highlightMap.get("bookName");
if (bookNameList != null && bookNameList.size() > 0) {
bookList.get(i).setBookName(bookNameList.get(0));
}
List<String> bookDescList = (List<String>) highlightMap.get("bookDesc");
if (bookDescList != null && bookDescList.size() > 0) {
bookList.get(i).setBookDesc(bookDescList.get(0));
}
List<String> lastIndexNameList = (List<String>) highlightMap.get("lastIndexName");
if (lastIndexNameList != null && lastIndexNameList.size() > 0) {
bookList.get(i).setLastIndexName(lastIndexNameList.get(0));
}
List<String> catNameList = (List<String>) highlightMap.get("catName");
if (catNameList != null && catNameList.size() > 0) {
bookList.get(i).setCatName(catNameList.get(0));
}
}
}
}
}
}
}
PageInfo<EsBookVO> pageInfo = new PageInfo<>(bookList);
pageInfo.setTotal(total.longValue());
pageInfo.setPageNum(page);
pageInfo.setPageSize(pageSize);
return pageInfo;
}
}catch (Exception e){
log.error(e.getMessage(),e);
}
@ -479,8 +342,8 @@ public class BookServiceImpl implements BookService {
}
@Override
public void addVisitCount(Long bookId) {
bookMapper.addVisitCount(bookId, new Date());
public void addVisitCount(Long bookId, Integer visitCount) {
bookMapper.addVisitCount(bookId,visitCount);
}
@Override
@ -600,14 +463,14 @@ public class BookServiceImpl implements BookService {
}
@Override
public List<Book> queryNetworkPicBooks(Integer limit) {
return bookMapper.queryNetworkPicBooks(limit);
public List<Book> queryNetworkPicBooks(String localPicPrefix, Integer limit) {
return bookMapper.queryNetworkPicBooks(localPicPrefix,limit);
}
@Override
public void updateBookPicToLocal(String picUrl, Long bookId) {
picUrl = FileUtil.network2Local(picUrl, picSavePath, Constants.LOCAL_PIC_PREFIX);
picUrl = fileService.transFile(picUrl, picSavePath);
bookMapper.update(update(book)
.set(BookDynamicSqlSupport.picUrl)

View File

@ -0,0 +1,60 @@
package com.java2nb.novel.service.impl;
import com.github.tobato.fastdfs.domain.StorePath;
import com.github.tobato.fastdfs.service.FastFileStorageClient;
import com.java2nb.novel.core.utils.Constants;
import com.java2nb.novel.core.utils.FileUtil;
import com.java2nb.novel.service.FileService;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.io.FilenameUtils;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
import org.springframework.stereotype.Service;
import java.io.File;
import java.io.FileInputStream;
/**
* @author 11797
*/
@Service
@RequiredArgsConstructor
@Slf4j
@ConditionalOnProperty(prefix = "pic.save", name = "storage", havingValue = "fastDfs")
public class FastDfsFileServiceImpl implements FileService {
private final FastFileStorageClient storageClient;
@Value("${fdfs.webUrl}")
private String webUrl;
@Override
public String transFile(String picSrc, String picSavePath) {
File file;
String filePath = FileUtil.network2Local(picSrc, picSavePath, Constants.LOCAL_PIC_PREFIX);
if (filePath.contains(Constants.LOCAL_PIC_PREFIX)) {
file = new File(picSavePath + filePath);
} else {
//默认图片不存储
return filePath;
}
try {
FileInputStream inputStream = new FileInputStream(file);
StorePath storePath = storageClient.uploadFile(inputStream, file.length(),
FilenameUtils.getExtension(file.getName()), null);
//这里额外加上LOCAL_PIC_PREFIX路径表明该图片是个人资源而不是爬虫爬取的网络资源不需要再次进行转换
// 实际访问时再通过nginx的rewite指令来重写路径去掉LOCAL_PIC_PREFIX
return webUrl+Constants.LOCAL_PIC_PREFIX+storePath.getFullPath();
} catch (Exception e) {
log.error(e.getMessage(), e);
} finally {
//删除
file.delete();
}
return "/images/default.gif";
}
}

View File

@ -40,7 +40,7 @@ public class FriendLinkServiceImpl implements FriendLinkService {
.build()
.render(RenderingStrategies.MYBATIS3);
result = friendLinkMapper.selectMany(selectStatement);
cacheService.setObject(CacheKey.INDEX_LINK_KEY,result);
cacheService.setObject(CacheKey.INDEX_LINK_KEY,result,60 * 60 * 24);
}
return result;
}

View File

@ -0,0 +1,23 @@
package com.java2nb.novel.service.impl;
import com.java2nb.novel.core.utils.Constants;
import com.java2nb.novel.core.utils.FileUtil;
import com.java2nb.novel.service.FileService;
import lombok.RequiredArgsConstructor;
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
import org.springframework.stereotype.Service;
/**
* @author 11797
*/
@Service
@RequiredArgsConstructor
@ConditionalOnProperty(prefix = "pic.save", name = "storage", havingValue = "local")
public class LocalFileServiceImpl implements FileService {
@Override
public String transFile(String picSrc, String picSavePath){
return FileUtil.network2Local(picSrc, picSavePath, Constants.LOCAL_PIC_PREFIX);
}
}

View File

@ -42,7 +42,7 @@ public class NewsServiceImpl implements NewsService {
.build()
.render(RenderingStrategies.MYBATIS3);
result = newsMapper.selectMany(selectStatement);
cacheService.setObject(CacheKey.INDEX_NEWS_KEY,result);
cacheService.setObject(CacheKey.INDEX_NEWS_KEY,result,60 * 60 * 12);
}
return result;
}

View File

@ -0,0 +1,76 @@
package com.java2nb.novel.service.impl;
import com.aliyun.oss.OSSClient;
import com.aliyun.oss.model.CannedAccessControlList;
import com.aliyun.oss.model.CreateBucketRequest;
import com.aliyun.oss.model.PutObjectRequest;
import com.aliyun.oss.model.PutObjectResult;
import com.java2nb.novel.core.config.OssProperties;
import com.java2nb.novel.core.utils.Constants;
import com.java2nb.novel.core.utils.FileUtil;
import com.java2nb.novel.service.FileService;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
import org.springframework.stereotype.Service;
import java.io.File;
/**
* @author 11797
*/
@Service
@RequiredArgsConstructor
@ConditionalOnProperty(prefix = "pic.save", name = "storage", havingValue = "OSS")
@Slf4j
public class OssFileServiceImpl implements FileService {
private final OssProperties ossProperties;
@Override
public String transFile(String picSrc, String picSavePath) {
File file;
String filePath = FileUtil.network2Local(picSrc, picSavePath, Constants.LOCAL_PIC_PREFIX);
if (filePath.contains(Constants.LOCAL_PIC_PREFIX)) {
file = new File(picSavePath+filePath);
} else {
//默认图片不存储
return filePath;
}
filePath = filePath.replaceFirst(picSavePath,"");
filePath = filePath.startsWith("/") ? filePath.replaceFirst("/","") : filePath;
OSSClient ossClient = new OSSClient(ossProperties.getEndpoint(), ossProperties.getKeyId(), ossProperties.getKeySecret());
try {
//容器不存在,就创建
if (!ossClient.doesBucketExist(ossProperties.getBucketName())) {
ossClient.createBucket(ossProperties.getBucketName());
CreateBucketRequest createBucketRequest = new CreateBucketRequest(ossProperties.getBucketName());
createBucketRequest.setCannedACL(CannedAccessControlList.PublicRead);
ossClient.createBucket(createBucketRequest);
}
//上传文件
PutObjectResult result = ossClient.putObject(new PutObjectRequest(ossProperties.getBucketName(), filePath, file));
//设置权限 这里是公开读
ossClient.setBucketAcl(ossProperties.getBucketName(), CannedAccessControlList.PublicRead);
if(result != null) {
return ossProperties.getWebUrl() + "/" + filePath;
}
} catch (Exception e) {
log.error(e.getMessage(), e);
} finally {
//关闭
ossClient.shutdown();
file.delete();
}
return "/images/default.gif";
}
}

View File

@ -0,0 +1,202 @@
package com.java2nb.novel.service.impl;
import com.fasterxml.jackson.databind.JavaType;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.github.pagehelper.PageInfo;
import com.java2nb.novel.core.enums.ResponseStatus;
import com.java2nb.novel.core.exception.BusinessException;
import com.java2nb.novel.core.utils.StringUtil;
import com.java2nb.novel.entity.Book;
import com.java2nb.novel.search.BookSP;
import com.java2nb.novel.service.SearchService;
import com.java2nb.novel.vo.EsBookVO;
import io.searchbox.client.JestClient;
import io.searchbox.core.*;
import lombok.RequiredArgsConstructor;
import lombok.SneakyThrows;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.StringUtils;
import org.elasticsearch.index.query.BoolQueryBuilder;
import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.search.builder.SearchSourceBuilder;
import org.elasticsearch.search.fetch.subphase.highlight.HighlightBuilder;
import org.elasticsearch.search.sort.SortOrder;
import org.springframework.beans.BeanUtils;
import org.springframework.stereotype.Service;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
/**
* @author 11797
*/
@Service
@RequiredArgsConstructor
@Slf4j
public class SearchServiceImpl implements SearchService {
private final String INDEX = "novel";
private final String TYPE = "book";
private final JestClient jestClient;
@Override
@SneakyThrows
public void importToEs(Book book) {
//导入到ES
EsBookVO esBookVO = new EsBookVO();
BeanUtils.copyProperties(book, esBookVO, "lastIndexUpdateTime");
esBookVO.setLastIndexUpdateTime(new SimpleDateFormat("yyyy/MM/dd HH:mm").format(book.getLastIndexUpdateTime()));
Index action = new Index.Builder(esBookVO).index(INDEX).type(TYPE).id(book.getId().toString()).build();
jestClient.execute(action);
}
@SneakyThrows
@Override
public PageInfo searchBook(BookSP params, int page, int pageSize) {
List<EsBookVO> bookList = new ArrayList<>(0);
//使用搜索引擎搜索
BoolQueryBuilder boolQueryBuilder = QueryBuilders.boolQuery();
// 构造查询哪个字段
if (StringUtils.isNoneBlank(params.getKeyword())) {
boolQueryBuilder = boolQueryBuilder.must(QueryBuilders.queryStringQuery(params.getKeyword()));
}
// 作品方向
if (params.getWorkDirection() != null) {
boolQueryBuilder.filter(QueryBuilders.termQuery("workDirection", params.getWorkDirection()));
}
// 分类
if (params.getCatId() != null) {
boolQueryBuilder.filter(QueryBuilders.termQuery("catId", params.getCatId()));
}
if (params.getBookStatus() != null) {
boolQueryBuilder.filter(QueryBuilders.termQuery("bookStatus", params.getBookStatus()));
}
if (params.getWordCountMin() == null) {
params.setWordCountMin(0);
}
if (params.getWordCountMax() == null) {
params.setWordCountMax(Integer.MAX_VALUE);
}
boolQueryBuilder.filter(QueryBuilders.rangeQuery("wordCount").gte(params.getWordCountMin()).lte(params.getWordCountMax()));
if (params.getUpdateTimeMin() != null) {
boolQueryBuilder.filter(QueryBuilders.rangeQuery("lastIndexUpdateTime").gte(new SimpleDateFormat("yyyy/MM/dd HH:mm").format(params.getUpdateTimeMin())));
}
SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder();
searchSourceBuilder.query(boolQueryBuilder);
Count count = new Count.Builder().addIndex(INDEX).addType(TYPE)
.query(searchSourceBuilder.toString()).build();
CountResult results = jestClient.execute(count);
Double total = results.getCount();
// 高亮字段
HighlightBuilder highlightBuilder = new HighlightBuilder();
highlightBuilder.field("authorName");
highlightBuilder.field("bookName");
highlightBuilder.field("bookDesc");
highlightBuilder.field("lastIndexName");
highlightBuilder.field("catName");
highlightBuilder.preTags("<span style='color:red'>").postTags("</span>");
highlightBuilder.fragmentSize(20000);
searchSourceBuilder.highlighter(highlightBuilder);
//设置排序
if (params.getSort() != null) {
searchSourceBuilder.sort(StringUtil.camelName(params.getSort()), SortOrder.DESC);
}
// 设置分页
searchSourceBuilder.from((page - 1) * pageSize);
searchSourceBuilder.size(pageSize);
// 构建Search对象
Search search = new Search.Builder(searchSourceBuilder.toString()).addIndex(INDEX).addType(TYPE).build();
log.debug(search.toString());
SearchResult result;
result = jestClient.execute(search);
log.debug(result.getJsonString());
if (result.isSucceeded()) {
Map resultMap = new ObjectMapper().readValue(result.getJsonString(), Map.class);
if (resultMap.get("hits") != null) {
Map hitsMap = (Map) resultMap.get("hits");
if (hitsMap.size() > 0 && hitsMap.get("hits") != null) {
List hitsList = (List) hitsMap.get("hits");
if (hitsList.size() > 0 && result.getSourceAsString() != null) {
JavaType jt = new ObjectMapper().getTypeFactory().constructParametricType(ArrayList.class, EsBookVO.class);
bookList = new ObjectMapper().readValue("[" + result.getSourceAsString() + "]", jt);
if (bookList != null) {
for (int i = 0; i < bookList.size(); i++) {
hitsMap = (Map) hitsList.get(i);
Map highlightMap = (Map) hitsMap.get("highlight");
if (highlightMap != null && highlightMap.size() > 0) {
List<String> authorNameList = (List<String>) highlightMap.get("authorName");
if (authorNameList != null && authorNameList.size() > 0) {
bookList.get(i).setAuthorName(authorNameList.get(0));
}
List<String> bookNameList = (List<String>) highlightMap.get("bookName");
if (bookNameList != null && bookNameList.size() > 0) {
bookList.get(i).setBookName(bookNameList.get(0));
}
List<String> bookDescList = (List<String>) highlightMap.get("bookDesc");
if (bookDescList != null && bookDescList.size() > 0) {
bookList.get(i).setBookDesc(bookDescList.get(0));
}
List<String> lastIndexNameList = (List<String>) highlightMap.get("lastIndexName");
if (lastIndexNameList != null && lastIndexNameList.size() > 0) {
bookList.get(i).setLastIndexName(lastIndexNameList.get(0));
}
List<String> catNameList = (List<String>) highlightMap.get("catName");
if (catNameList != null && catNameList.size() > 0) {
bookList.get(i).setCatName(catNameList.get(0));
}
}
}
}
}
}
}
PageInfo<EsBookVO> pageInfo = new PageInfo<>(bookList);
pageInfo.setTotal(total.longValue());
pageInfo.setPageNum(page);
pageInfo.setPageSize(pageSize);
return pageInfo;
}
throw new BusinessException(ResponseStatus.ES_SEARCH_FAIL);
}
}

View File

@ -0,0 +1,16 @@
#FastDfs的配置 ====================================
#读取inputsream阻塞时间
fdfs:
connect-timeout: 600
so-timeout: 1500
#tracker地址
trackerList: 127.0.0.1:22122
#缩略图配置
thumbImage:
height: 150
width: 150
#通过nginx 访问地址
webUrl: http://127.0.0.1/
#获取连接池最大数量
pool.max-total: 200

View File

@ -0,0 +1,9 @@
#OSS
novel:
file:
endpoint: oss-cn-shenzhen.aliyuncs.com #不同的服务器地址不同
key-id: dhjjrue6767778878 #去OSS控制台获取
key-secret: dssdkkrkelrkellk44554 #去OSS控制台获取
bucket-name: novel #这个自己创建bucket时的命名控制台创建也行代码创建也行
file-host: pic #文件路径
web-url: https://xxyimg.oss-cn-hangzhou.aliyuncs.com #OSS文件的web访问地址

View File

@ -4,7 +4,19 @@ server:
spring:
profiles:
active: dev
include: alipay
include: alipay,oss,fastdfs
rabbitmq:
enable: 0
host: 127.0.0.1
username: guest
password: guest
virtual-host: /novel-plus
template:
# 缺省的交换机名称此处配置后发送消息如果不指定交换机就会使用这个
exchange: novel.exchange
publisher-confirms: false
elasticsearch:
@ -13,7 +25,9 @@ spring:
jest:
uris: http://127.0.0.1:9200
redisson:
singleServerConfig:
address: 127.0.0.1:6379
jwt:
secret: novel!#20191230

View File

@ -35,7 +35,7 @@
</select>
<update id="addVisitCount" >
update book set visit_count = visit_count + 1 , update_time = #{date}
update book set visit_count = visit_count + ${visitCount}
where id = #{bookId}
</update>
@ -56,6 +56,7 @@
select
id,pic_url from book
where pic_url like 'http%'
and pic_url not like concat('%',#{localPicPrefix},'%')
limit #{limit}
</select>

View File

@ -159,7 +159,7 @@
if(sortBy != false){
searchData.sort = sortBy;
}else{
}else if(!keyword){
sortBy = "last_index_update_time";
}

View File

@ -58,13 +58,13 @@
<ul class="layui-nav app" lay-filter="" style="display:none;padding:0 10px;text-align: center">
<li class="layui-nav-item"><a>分类</a>
<dl class="layui-nav-child">
<dd><a href="/book/book_ranking.html?catId=1">玄幻小说</a></dd>
<dd><a href="/book/book_ranking.html?catId=2">修真小说</a></dd>
<dd><a href="/book/book_ranking.html?catId=3">都市小说</a></dd>
<dd><a href="/book/book_ranking.html?catId=4">历史小说</a></dd>
<dd><a href="/book/book_ranking.html?catId=6">网游小说</a></dd>
<dd><a href="/book/book_ranking.html?catId=5">科幻小说</a></dd>
<dd><a href="/book/book_ranking.html?catId=7">女频小说</a></dd>
<dd><a href="/book/book_ranking.html?catId=1&sort=last_index_update_time">玄幻小说</a></dd>
<dd><a href="/book/book_ranking.html?catId=2&sort=last_index_update_time">修真小说</a></dd>
<dd><a href="/book/book_ranking.html?catId=3&sort=last_index_update_time">都市小说</a></dd>
<dd><a href="/book/book_ranking.html?catId=4&sort=last_index_update_time">历史小说</a></dd>
<dd><a href="/book/book_ranking.html?catId=6&sort=last_index_update_time">网游小说</a></dd>
<dd><a href="/book/book_ranking.html?catId=5&sort=last_index_update_time">科幻小说</a></dd>
<dd><a href="/book/book_ranking.html?catId=7&sort=last_index_update_time">女频小说</a></dd>
</dl>
</li>
<li class="layui-nav-item"><a>全本</a>
@ -95,13 +95,13 @@
</ul>
<ul class="layui-nav pc" lay-filter="" style="padding:0 36px;text-align: center">
<li class="layui-nav-item"><a href="/book/book_ranking.html?catId=1">玄幻小说</a></li>
<li class="layui-nav-item"><a href="/book/book_ranking.html?catId=2">修真小说</a></li>
<li class="layui-nav-item"><a href="/book/book_ranking.html?catId=3">都市小说</a></li>
<li class="layui-nav-item"><a href="/book/book_ranking.html?catId=4">历史小说</a></li>
<li class="layui-nav-item"><a href="/book/book_ranking.html?catId=6">网游小说</a></li>
<li class="layui-nav-item"><a href="/book/book_ranking.html?catId=5">科幻小说</a></li>
<li class="layui-nav-item"><a href="/book/book_ranking.html?catId=7">女频小说</a></li>
<li class="layui-nav-item"><a href="/book/book_ranking.html?catId=1&sort=last_index_update_time">玄幻小说</a></li>
<li class="layui-nav-item"><a href="/book/book_ranking.html?catId=2&sort=last_index_update_time">修真小说</a></li>
<li class="layui-nav-item"><a href="/book/book_ranking.html?catId=3&sort=last_index_update_time">都市小说</a></li>
<li class="layui-nav-item"><a href="/book/book_ranking.html?catId=4&sort=last_index_update_time">历史小说</a></li>
<li class="layui-nav-item"><a href="/book/book_ranking.html?catId=6&sort=last_index_update_time">网游小说</a></li>
<li class="layui-nav-item"><a href="/book/book_ranking.html?catId=5&sort=last_index_update_time">科幻小说</a></li>
<li class="layui-nav-item"><a href="/book/book_ranking.html?catId=7&sort=last_index_update_time">女频小说</a></li>
<li class="layui-nav-item"><a>完本小说</a>
<dl class="layui-nav-child"> <!-- 二级菜单 -->
<dd><a href="/book/book_ranking.html?bookStatus=1">全部小说</a></dd>

View File

@ -5,7 +5,7 @@
<groupId>com.java2nb</groupId>
<artifactId>novel</artifactId>
<version>2.1.2</version>
<version>2.6.2</version>
<modules>
<module>novel-common</module>
<module>novel-front</module>
@ -39,6 +39,12 @@
<elasticsearch.version>6.2.2</elasticsearch.version>
<jest.version>6.3.1</jest.version>
<redis.version>1.4.1.RELEASE</redis.version>
<redisson.version>3.12.5</redisson.version>
<sharding.jdbc.version>3.0.0</sharding.jdbc.version>
<aliyun-sdk-oss.version>2.4.0</aliyun-sdk-oss.version>
<commons-fileupload.version>1.3.1</commons-fileupload.version>
<fastdfs-client.version>1.26.1-RELEASE</fastdfs-client.version>
<alipay-sdk-java.version>4.9.153.ALL</alipay-sdk-java.version>
</properties>
<dependencyManagement>

2
sql/20200608.sql Normal file
View File

@ -0,0 +1,2 @@
INSERT INTO `crawl_source` (`id`, `source_name`, `crawl_rule`, `source_status`, `create_time`, `update_time`) VALUES
(6, '新笔趣阁', '{\n \"bookListUrl\": \"http://www.xbiquge.la/fenlei/{catId}_{page}.html\",\n \"catIdRule\": {\n \"catId1\": \"1\",\n \"catId2\": \"2\",\n \"catId3\": \"3\",\n \"catId4\": \"4\",\n \"catId5\": \"6\",\n \"catId6\": \"5\"\n },\n \"bookIdPatten\": \"<a\\\\s+href=\\\"http://www.xbiquge.la/(\\\\d+/\\\\d+)/\\\"\\\\s+target=\\\"_blank\\\">\",\n \"pagePatten\": \"<em\\\\s+id=\\\"pagestats\\\">(\\\\d+)/\\\\d+</em>\",\n \"totalPagePatten\": \"<em\\\\s+id=\\\"pagestats\\\">\\\\d+/(\\\\d+)</em>\",\n \"bookDetailUrl\": \"http://www.xbiquge.la/{bookId}/\",\n \"bookNamePatten\": \"<h1>([^/]+)</h1>\",\n \"authorNamePatten\": \"者:([^/]+)</p>\",\n \"picUrlPatten\": \"src=\\\"(http://www.xbiquge.la/files/article/image/\\\\d+/\\\\d+/\\\\d+s\\\\.jpg)\\\"\",\n \"bookStatusRule\": {},\n \"descStart\": \"<div id=\\\"intro\\\">\",\n \"descEnd\": \"</div>\",\n \"upadateTimePatten\": \"<p>最后更新:(\\\\d+-\\\\d+-\\\\d+\\\\s\\\\d+:\\\\d+:\\\\d+)</p>\",\n \"upadateTimeFormatPatten\": \"yyyy-MM-dd HH:mm:ss\",\n \"bookIndexUrl\": \"http://www.xbiquge.la/{bookId}/\",\n \"indexIdPatten\": \"<a\\\\s+href=\'/\\\\d+/\\\\d+/(\\\\d+)\\\\.html\'\\\\s+>[^/]+</a>\",\n \"indexNamePatten\": \"<a\\\\s+href=\'/\\\\d+/\\\\d+/\\\\d+\\\\.html\'\\\\s+>([^/]+)</a>\",\n \"bookContentUrl\": \"http://www.xbiquge.la/{bookId}/{indexId}.html\",\n \"contentStart\": \"<div id=\\\"content\\\">\",\n \"contentEnd\": \"<p>\"\n}', 0, '2020-05-23 22:46:58', '2020-05-23 22:46:58');

40
sql/20200615.sql Normal file
View File

@ -0,0 +1,40 @@
/*
Navicat MySQL Data Transfer
Source Server : localhost
Source Server Version : 50725
Source Host : localhost:3306
Source Database : novel_plus
Target Server Type : MYSQL
Target Server Version : 50725
File Encoding : 65001
Date: 2020-06-15 15:06:55
*/
SET FOREIGN_KEY_CHECKS=0;
-- ----------------------------
-- Table structure for crawl_single_task
-- ----------------------------
DROP TABLE IF EXISTS `crawl_single_task`;
CREATE TABLE `crawl_single_task` (
`id` bigint(20) NOT NULL AUTO_INCREMENT COMMENT '主键',
`source_id` int(11) DEFAULT NULL COMMENT '爬虫源ID',
`source_name` varchar(50) DEFAULT NULL COMMENT '爬虫源名',
`source_book_id` varchar(255) DEFAULT NULL COMMENT '源站小说ID',
`cat_id` int(11) DEFAULT NULL COMMENT '分类ID',
`book_name` varchar(50) DEFAULT NULL COMMENT '爬取的小说名',
`author_name` varchar(50) DEFAULT NULL COMMENT '爬取的小说作者名',
`task_status` tinyint(1) DEFAULT '2' COMMENT '任务状态0失败1成功2未执行',
`exc_count` tinyint(2) DEFAULT '0' COMMENT '已经执行次数最多执行5次',
`create_time` datetime DEFAULT NULL COMMENT '创建时间',
PRIMARY KEY (`id`)
) ENGINE=InnoDB AUTO_INCREMENT=8 DEFAULT CHARSET=utf8mb4 COMMENT='抓取单本小说任务表';
-- ----------------------------
-- Records of crawl_single_task
-- ----------------------------
INSERT INTO `crawl_single_task` VALUES ('6', '2', '百书斋', '1', '1', '1', '1', '0', '5', '2020-06-15 14:36:07');
INSERT INTO `crawl_single_task` VALUES ('7', '5', '笔趣阁', '108_108291', '1', '衍天志之不朽仙', '白衣少年丶', '1', '1', '2020-06-15 14:46:08');

View File

@ -1691,4 +1691,148 @@ INSERT INTO `user_read_history` VALUES ('113', '1255060328322027520', '125467697
INSERT INTO `user_read_history` VALUES ('117', '1255060328322027520', '1254946661743603712', '1254946914001629184', '2020-04-30 19:37:09', '2020-04-30 19:37:09');
INSERT INTO `user_read_history` VALUES ('118', '1255060328322027520', '1254957312633352192', '3335449', '2020-04-30 19:37:36', '2020-04-30 19:37:36');
DROP TABLE IF EXISTS `book_content0`;
CREATE TABLE `book_content0` (
`id` bigint(20) NOT NULL AUTO_INCREMENT COMMENT '主键',
`index_id` bigint(20) DEFAULT NULL COMMENT '目录ID',
`content` mediumtext COMMENT '小说章节内容',
PRIMARY KEY (`id`),
UNIQUE KEY `key_uq_indexId` (`index_id`) USING BTREE
) ENGINE=InnoDB AUTO_INCREMENT=1155 DEFAULT CHARSET=utf8mb4 COMMENT='小说内容表';
-- ----------------------------
-- Table structure for book_content1
-- ----------------------------
DROP TABLE IF EXISTS `book_content1`;
CREATE TABLE `book_content1` (
`id` bigint(20) NOT NULL AUTO_INCREMENT COMMENT '主键',
`index_id` bigint(20) DEFAULT NULL COMMENT '目录ID',
`content` mediumtext COMMENT '小说章节内容',
PRIMARY KEY (`id`),
UNIQUE KEY `key_uq_indexId` (`index_id`) USING BTREE
) ENGINE=InnoDB AUTO_INCREMENT=406 DEFAULT CHARSET=utf8mb4 COMMENT='小说内容表';
-- ----------------------------
-- Table structure for book_content2
-- ----------------------------
DROP TABLE IF EXISTS `book_content2`;
CREATE TABLE `book_content2` (
`id` bigint(20) NOT NULL AUTO_INCREMENT COMMENT '主键',
`index_id` bigint(20) DEFAULT NULL COMMENT '目录ID',
`content` mediumtext COMMENT '小说章节内容',
PRIMARY KEY (`id`),
UNIQUE KEY `key_uq_indexId` (`index_id`) USING BTREE
) ENGINE=InnoDB AUTO_INCREMENT=1222 DEFAULT CHARSET=utf8mb4 COMMENT='小说内容表';
-- ----------------------------
-- Table structure for book_content3
-- ----------------------------
DROP TABLE IF EXISTS `book_content3`;
CREATE TABLE `book_content3` (
`id` bigint(20) NOT NULL AUTO_INCREMENT COMMENT '主键',
`index_id` bigint(20) DEFAULT NULL COMMENT '目录ID',
`content` mediumtext COMMENT '小说章节内容',
PRIMARY KEY (`id`),
UNIQUE KEY `key_uq_indexId` (`index_id`) USING BTREE
) ENGINE=InnoDB AUTO_INCREMENT=410 DEFAULT CHARSET=utf8mb4 COMMENT='小说内容表';
-- ----------------------------
-- Table structure for book_content4
-- ----------------------------
DROP TABLE IF EXISTS `book_content4`;
CREATE TABLE `book_content4` (
`id` bigint(20) NOT NULL AUTO_INCREMENT COMMENT '主键',
`index_id` bigint(20) DEFAULT NULL COMMENT '目录ID',
`content` mediumtext COMMENT '小说章节内容',
PRIMARY KEY (`id`),
UNIQUE KEY `key_uq_indexId` (`index_id`) USING BTREE
) ENGINE=InnoDB AUTO_INCREMENT=1188 DEFAULT CHARSET=utf8mb4 COMMENT='小说内容表';
-- ----------------------------
-- Table structure for book_content5
-- ----------------------------
DROP TABLE IF EXISTS `book_content5`;
CREATE TABLE `book_content5` (
`id` bigint(20) NOT NULL AUTO_INCREMENT COMMENT '主键',
`index_id` bigint(20) DEFAULT NULL COMMENT '目录ID',
`content` mediumtext COMMENT '小说章节内容',
PRIMARY KEY (`id`),
UNIQUE KEY `key_uq_indexId` (`index_id`) USING BTREE
) ENGINE=InnoDB AUTO_INCREMENT=416 DEFAULT CHARSET=utf8mb4 COMMENT='小说内容表';
-- ----------------------------
-- Table structure for book_content6
-- ----------------------------
DROP TABLE IF EXISTS `book_content6`;
CREATE TABLE `book_content6` (
`id` bigint(20) NOT NULL AUTO_INCREMENT COMMENT '主键',
`index_id` bigint(20) DEFAULT NULL COMMENT '目录ID',
`content` mediumtext COMMENT '小说章节内容',
PRIMARY KEY (`id`),
UNIQUE KEY `key_uq_indexId` (`index_id`) USING BTREE
) ENGINE=InnoDB AUTO_INCREMENT=1180 DEFAULT CHARSET=utf8mb4 COMMENT='小说内容表';
-- ----------------------------
-- Table structure for book_content7
-- ----------------------------
DROP TABLE IF EXISTS `book_content7`;
CREATE TABLE `book_content7` (
`id` bigint(20) NOT NULL AUTO_INCREMENT COMMENT '主键',
`index_id` bigint(20) DEFAULT NULL COMMENT '目录ID',
`content` mediumtext COMMENT '小说章节内容',
PRIMARY KEY (`id`),
UNIQUE KEY `key_uq_indexId` (`index_id`) USING BTREE
) ENGINE=InnoDB AUTO_INCREMENT=404 DEFAULT CHARSET=utf8mb4 COMMENT='小说内容表';
-- ----------------------------
-- Table structure for book_content8
-- ----------------------------
DROP TABLE IF EXISTS `book_content8`;
CREATE TABLE `book_content8` (
`id` bigint(20) NOT NULL AUTO_INCREMENT COMMENT '主键',
`index_id` bigint(20) DEFAULT NULL COMMENT '目录ID',
`content` mediumtext COMMENT '小说章节内容',
PRIMARY KEY (`id`),
UNIQUE KEY `key_uq_indexId` (`index_id`) USING BTREE
) ENGINE=InnoDB AUTO_INCREMENT=1134 DEFAULT CHARSET=utf8mb4 COMMENT='小说内容表';
-- ----------------------------
-- Table structure for book_content9
-- ----------------------------
DROP TABLE IF EXISTS `book_content9`;
CREATE TABLE `book_content9` (
`id` bigint(20) NOT NULL AUTO_INCREMENT COMMENT '主键',
`index_id` bigint(20) DEFAULT NULL COMMENT '目录ID',
`content` mediumtext COMMENT '小说章节内容',
PRIMARY KEY (`id`),
UNIQUE KEY `key_uq_indexId` (`index_id`) USING BTREE
) ENGINE=InnoDB AUTO_INCREMENT=415 DEFAULT CHARSET=utf8mb4 COMMENT='小说内容表';
DROP TABLE IF EXISTS `crawl_single_task`;
CREATE TABLE `crawl_single_task` (
`id` bigint(20) NOT NULL AUTO_INCREMENT COMMENT '主键',
`source_id` int(11) DEFAULT NULL COMMENT '爬虫源ID',
`source_name` varchar(50) DEFAULT NULL COMMENT '爬虫源名',
`source_book_id` varchar(255) DEFAULT NULL COMMENT '源站小说ID',
`cat_id` int(11) DEFAULT NULL COMMENT '分类ID',
`book_name` varchar(50) DEFAULT NULL COMMENT '爬取的小说名',
`author_name` varchar(50) DEFAULT NULL COMMENT '爬取的小说作者名',
`task_status` tinyint(1) DEFAULT '2' COMMENT '任务状态0失败1成功2未执行',
`exc_count` tinyint(2) DEFAULT '0' COMMENT '已经执行次数最多执行5次',
`create_time` datetime DEFAULT NULL COMMENT '创建时间',
PRIMARY KEY (`id`)
) ENGINE=InnoDB AUTO_INCREMENT=8 DEFAULT CHARSET=utf8mb4 COMMENT='抓取单本小说任务表';
-- ----------------------------
-- Records of crawl_single_task
-- ----------------------------
INSERT INTO `crawl_single_task` VALUES ('6', '2', '百书斋', '1', '1', '1', '1', '0', '5', '2020-06-15 14:36:07');
INSERT INTO `crawl_single_task` VALUES ('7', '5', '笔趣阁', '108_108291', '1', '衍天志之不朽仙', '白衣少年丶', '1', '1', '2020-06-15 14:46:08');
UPDATE `crawl_source` SET `source_name` = '书趣阁', `crawl_rule` = '{\n \"bookListUrl\": \"http://m.shuquge.com/sort/{catId}/0_{page}.html\",\n \"catIdRule\": {\n \"catId1\": \"1\",\n \"catId2\": \"2\",\n \"catId3\": \"3\",\n \"catId4\": \"4\",\n \"catId5\": \"7\",\n \"catId6\": \"6\",\n \"catId7\": \"8\"\n },\n \"bookIdPatten\": \"href=\\\"/s/(\\\\d+)\\\\.html\\\"\",\n \"pagePatten\": \"第(\\\\d+)/\\\\d+页\",\n \"totalPagePatten\": \"\\\\d+/(\\\\d+)页\",\n \"bookDetailUrl\": \"http://m.shuquge.com/s/{bookId}.html\",\n \"bookNamePatten\": \"<a\\\\s+href=\\\"/s/\\\\d+\\\\.html\\\"><h2>([^/]+)</h2></a>\",\n \"authorNamePatten\": \"<p>作者:([^/]+)</p>\",\n \"picUrlPatten\": \"src=\\\"(http://www.shuquge.com/files/article/image/\\\\d+/\\\\d+/\\\\d+s\\\\.jpg)\\\"\",\n \"statusPatten\": \"<p>状态:([^/]+)</p>\",\n \"bookStatusRule\": {\n \"连载中\": 0,\n \"完本\": 1\n },\n \"descStart\": \"<div class=\\\"intro_info\\\">\",\n \"descEnd\": \"最新章节推荐地址\",\n \"bookIndexUrl\": \"http://www.shuquge.com/txt/{bookId}/index.html\",\n \"bookIndexStart\": \"<dt>《\",\n \"indexIdPatten\": \"<dd><a\\\\s+href=\\\"(\\\\d+)\\\\.html\\\">[^/]+</a></dd>\",\n \"indexNamePatten\": \"<dd><a\\\\s+href=\\\"\\\\d+\\\\.html\\\">([^/]+)</a></dd>\",\n \"bookContentUrl\": \"http://www.shuquge.com/txt/{bookId}/{indexId}.html\",\n \"contentStart\": \"<div id=\\\"content\\\" class=\\\"showtxt\\\">\",\n \"contentEnd\": \"http://www.shuquge.com\"\n}', `source_status` = 1, `create_time` = '2020-05-18 12:02:34', `update_time` = '2020-05-18 12:02:34' WHERE `id` = 4;
INSERT INTO `friend_link` ( `link_name`, `link_url`, `sort`, `is_open`, `create_user_id`, `create_time`, `update_user_id`, `update_time`) VALUES
('小羊影视', 'http://video.java2nb.com/', 11, 1, NULL, NULL, NULL, NULL),
('官方论坛', 'http://bbs.java2nb.com', 21, 1, NULL, NULL, NULL, NULL);