mirror of
https://github.com/201206030/novel.git
synced 2025-04-27 07:30:50 +00:00
新增本地图片模式下,自动将网络图片转为本地图片任务
This commit is contained in:
parent
01d8005507
commit
aa55cde2d3
@ -0,0 +1,58 @@
|
||||
package xyz.zinglizingli.books.core.schedule;
|
||||
|
||||
|
||||
import lombok.RequiredArgsConstructor;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.springframework.beans.factory.annotation.Value;
|
||||
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
|
||||
import org.springframework.scheduling.annotation.Scheduled;
|
||||
import org.springframework.stereotype.Service;
|
||||
import xyz.zinglizingli.books.core.enums.PicSaveType;
|
||||
import xyz.zinglizingli.books.po.Book;
|
||||
import xyz.zinglizingli.books.service.BookService;
|
||||
import xyz.zinglizingli.common.utils.FileUtil;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* 定时转换网络图片为本地图片
|
||||
*
|
||||
* @author 11797
|
||||
*/
|
||||
@ConditionalOnProperty(prefix = "pic.save",name = "type",havingValue = "2")
|
||||
@Service
|
||||
@RequiredArgsConstructor
|
||||
@Slf4j
|
||||
public class Network2LocalPicSchedule {
|
||||
|
||||
private final BookService bookService;
|
||||
|
||||
@Value("${pic.save.type}")
|
||||
private Integer picSaveType;
|
||||
|
||||
@Value("${pic.save.path}")
|
||||
private String picSavePath;
|
||||
|
||||
/**
|
||||
* 10分钟转一次
|
||||
*/
|
||||
@Scheduled(fixedRate = 1000 * 60 * 10)
|
||||
public void trans() {
|
||||
|
||||
log.info("Network2LocalPicSchedule。。。。。。。。。。。。");
|
||||
|
||||
|
||||
Integer offset = 0, limit = 100;
|
||||
List<Book> networkPicBooks;
|
||||
do {
|
||||
networkPicBooks = bookService.queryNetworkPicBooks(limit, offset);
|
||||
for (Book book : networkPicBooks) {
|
||||
bookService.updateBook(book, book.getId());
|
||||
}
|
||||
offset += limit;
|
||||
} while (networkPicBooks.size() > 0);
|
||||
|
||||
|
||||
}
|
||||
}
|
@ -46,4 +46,11 @@ public interface BookMapper {
|
||||
* 清理无效书籍(1个月前更新的无章节书籍)
|
||||
* */
|
||||
void clearInvilidBook();
|
||||
|
||||
/**
|
||||
* 查询网络图片的小说
|
||||
*
|
||||
* @param limit
|
||||
* @param offset*/
|
||||
List<Book> queryNetworkPicBooks(@Param("limit") Integer limit,@Param("offset") Integer offset);
|
||||
}
|
@ -20,15 +20,13 @@ import xyz.zinglizingli.books.core.enums.PicSaveType;
|
||||
import xyz.zinglizingli.books.mapper.*;
|
||||
import xyz.zinglizingli.books.po.*;
|
||||
import xyz.zinglizingli.books.core.utils.Constants;
|
||||
import xyz.zinglizingli.common.utils.FileUtil;
|
||||
import xyz.zinglizingli.common.utils.SpringUtil;
|
||||
import xyz.zinglizingli.common.utils.UUIDUtils;
|
||||
import xyz.zinglizingli.common.cache.CommonCacheUtil;
|
||||
import xyz.zinglizingli.common.utils.RestTemplateUtil;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.FileOutputStream;
|
||||
import java.io.InputStream;
|
||||
import java.io.OutputStream;
|
||||
import java.io.*;
|
||||
import java.util.*;
|
||||
import java.util.function.Function;
|
||||
import java.util.stream.Collectors;
|
||||
@ -140,32 +138,12 @@ public class BookService {
|
||||
/**
|
||||
* 更新书籍
|
||||
* */
|
||||
private void updateBook(Book book, Long bookId) {
|
||||
public void updateBook(Book book, Long bookId) {
|
||||
book.setId(bookId);
|
||||
String picSrc = book.getPicUrl();
|
||||
if(picSaveType == PicSaveType.LOCAL.getValue() && StringUtils.isNotBlank(picSrc)){
|
||||
try {
|
||||
//本地图片保存
|
||||
HttpHeaders headers = new HttpHeaders();
|
||||
HttpEntity<String> requestEntity = new HttpEntity<>(null, headers);
|
||||
ResponseEntity<Resource> resEntity = RestTemplateUtil.getInstance(Charsets.ISO_8859_1).exchange(picSrc, HttpMethod.GET, requestEntity, Resource.class);
|
||||
InputStream input = Objects.requireNonNull(resEntity.getBody()).getInputStream();
|
||||
Date currentDate = new Date();
|
||||
picSrc = "/localPic/" + DateUtils.formatDate(currentDate, "yyyy") + "/" + DateUtils.formatDate(currentDate, "MM") + "/" + DateUtils.formatDate(currentDate, "dd")
|
||||
+ UUIDUtils.getUUID32()
|
||||
+ picSrc.substring(picSrc.lastIndexOf("."));
|
||||
File picFile = new File(picSavePath + picSrc);
|
||||
File parentFile = picFile.getParentFile();
|
||||
if (!parentFile.exists()) {
|
||||
parentFile.mkdirs();
|
||||
}
|
||||
OutputStream out = new FileOutputStream(picFile);
|
||||
byte[] b = new byte[4096];
|
||||
for (int n; (n = input.read(b)) != -1; ) {
|
||||
out.write(b, 0, n);
|
||||
}
|
||||
out.close();
|
||||
input.close();
|
||||
picSrc = FileUtil.network2Local(picSrc,picSavePath);
|
||||
book.setPicUrl(picSrc);
|
||||
}catch (Exception e){
|
||||
log.error(e.getMessage(),e);
|
||||
@ -175,6 +153,8 @@ public class BookService {
|
||||
bookMapper.updateByPrimaryKeySelective(book);
|
||||
}
|
||||
|
||||
|
||||
|
||||
/**
|
||||
* 批量插入章节目录表和章节内容表(自动修复错误章节)
|
||||
* */
|
||||
@ -441,4 +421,13 @@ public class BookService {
|
||||
//清楚无效书籍
|
||||
bookMapper.clearInvilidBook();
|
||||
}
|
||||
|
||||
/**
|
||||
* 查询网络图片的小说
|
||||
*
|
||||
* @param limit
|
||||
* @param offset*/
|
||||
public List<Book> queryNetworkPicBooks(Integer limit, Integer offset) {
|
||||
return bookMapper.queryNetworkPicBooks(limit,offset);
|
||||
}
|
||||
}
|
||||
|
@ -0,0 +1,50 @@
|
||||
package xyz.zinglizingli.common.utils;
|
||||
|
||||
import org.apache.commons.codec.Charsets;
|
||||
import org.apache.http.client.utils.DateUtils;
|
||||
import org.springframework.core.io.Resource;
|
||||
import org.springframework.http.HttpEntity;
|
||||
import org.springframework.http.HttpHeaders;
|
||||
import org.springframework.http.HttpMethod;
|
||||
import org.springframework.http.ResponseEntity;
|
||||
|
||||
import java.io.*;
|
||||
import java.util.Date;
|
||||
import java.util.Objects;
|
||||
|
||||
/**
|
||||
* 文件操作工具类
|
||||
* @author 11797
|
||||
*/
|
||||
public class FileUtil {
|
||||
|
||||
/**
|
||||
* 网络图片转本地
|
||||
* */
|
||||
public static String network2Local(String picSrc, String picSavePath) throws IOException {
|
||||
//本地图片保存
|
||||
HttpHeaders headers = new HttpHeaders();
|
||||
HttpEntity<String> requestEntity = new HttpEntity<>(null, headers);
|
||||
ResponseEntity<Resource> resEntity = RestTemplateUtil.getInstance(Charsets.ISO_8859_1).exchange(picSrc, HttpMethod.GET, requestEntity, Resource.class);
|
||||
InputStream input = Objects.requireNonNull(resEntity.getBody()).getInputStream();
|
||||
Date currentDate = new Date();
|
||||
picSrc = "/localPic/" + DateUtils.formatDate(currentDate, "yyyy") + "/" + DateUtils.formatDate(currentDate, "MM") + "/" + DateUtils.formatDate(currentDate, "dd")
|
||||
+ UUIDUtils.getUUID32()
|
||||
+ picSrc.substring(picSrc.lastIndexOf("."));
|
||||
File picFile = new File(picSavePath + picSrc);
|
||||
File parentFile = picFile.getParentFile();
|
||||
if (!parentFile.exists()) {
|
||||
parentFile.mkdirs();
|
||||
}
|
||||
OutputStream out = new FileOutputStream(picFile);
|
||||
byte[] b = new byte[4096];
|
||||
for (int n; (n = input.read(b)) != -1; ) {
|
||||
out.write(b, 0, n);
|
||||
}
|
||||
out.close();
|
||||
input.close();
|
||||
return picSrc;
|
||||
}
|
||||
|
||||
|
||||
}
|
@ -6,7 +6,7 @@ crawl:
|
||||
#抓取频率
|
||||
period: 2000
|
||||
|
||||
#爬取的网站名称类型 1:笔趣岛 ,2:笔趣塔, 3:顶点 更多网站解析中,敬请期待
|
||||
#爬取的网站名称类型 1:笔趣岛 ,2:笔趣塔, 3:顶点,默认顶点 更多网站解析中,敬请期待
|
||||
biquta:
|
||||
crawlsource:
|
||||
enabled: true #是否开启此爬虫源
|
||||
|
@ -395,4 +395,11 @@
|
||||
where id not in (select book_id from book_index)
|
||||
and update_time <![CDATA[ <= ]]> DATE_SUB(curdate(),INTERVAL 10 DAY)
|
||||
</delete>
|
||||
|
||||
<select id="queryNetworkPicBooks" resultMap="BaseResultMap">
|
||||
select
|
||||
id,pic_url from book
|
||||
where pic_url like 'http://%' or pic_url like 'https://%'
|
||||
limit #{offset},#{limit}
|
||||
</select>
|
||||
</mapper>
|
Loading…
x
Reference in New Issue
Block a user