任务生成封版

master
土豆兄弟 4 years ago
parent ca9cdafe4b
commit 2d7aca22d2

@ -0,0 +1,42 @@
package me.zhengjie.utils;
/**
*
*
*/
public class ConvertUtil {
/**
*
* , ""
*
*
* @param second
* @return
*/
public static String secondToTime(long second){
if (second <= 0){
return "";
}else {
second = second /1000; // 毫秒数转成秒数
}
long days = second / 86400; //转换天数
second = second % 86400; //剩余秒数
long hours = second / 3600; //转换小时
second = second % 3600; //剩余秒数
long minutes = second /60; //转换分钟
second = second % 60; //剩余秒数
if(days>0){
return days + "天" + hours + "小时" + minutes + "分" + second + "秒";
} else if (hours > 0 ){
return hours + "小时" + minutes + "分" + second + "秒";
} else if (minutes > 0){
return minutes + "分" + second + "秒";
}else if (second > 0){
return second + "秒";
}else {
return "";
}
}
}

@ -58,6 +58,31 @@ public class ThreadPoolConfig {
@Value(value = "${merge.task.thread_pool.ThreadNamePrefix}")
private String mergeThreadNamePrefix = "ProduceLocalFileTaskExecutor-"; // fixme 这个属性暂时没用起来
/**
* produceBigData
*/
@Value(value = "${producebigdata.task.thread_pool.corePoolSize}")
private int produceBigDataCorePoolSize = 2;
@Value(value = "${producebigdata.task.thread_pool.maxPoolSize}")
private int produceBigDataMaxPoolSize = 16;
@Value(value = "${producebigdata.task.thread_pool.queueCapacity}")
private int produceBigDataQueueCapacity = 3;
@Value(value = "${producebigdata.task.thread_pool.ThreadNamePrefix}")
private String produceBigDataThreadNamePrefix = "ProduceBigDataTaskExecutor-"; // fixme 这个属性暂时没用起来
/**
* sendBigData
*/
@Value(value = "${SendBigData.task.thread_pool.corePoolSize}")
private int sendBigDataCorePoolSize = 2;
@Value(value = "${SendBigData.task.thread_pool.maxPoolSize}")
private int sendBigDataMaxPoolSize = 16;
@Value(value = "${SendBigData.task.thread_pool.queueCapacity}")
private int sendBigDataQueueCapacity = 3;
@Value(value = "${SendBigData.task.thread_pool.ThreadNamePrefix}")
private String sendBigDataThreadNamePrefix = "SendBigDataTaskExecutor-"; // fixme 这个属性暂时没用起来
/**
* - 线
*
@ -114,4 +139,32 @@ public class ThreadPoolConfig {
return threadPoolExecutor;
}
@Bean(value = "ProduceBigDataTaskExecutor")
public Executor produceBigDataTaskExecutor(){
ThreadPoolExecutor threadPoolExecutor = new ThreadPoolExecutor(
produceBigDataCorePoolSize,
produceBigDataMaxPoolSize,
3,
TimeUnit.SECONDS,
new LinkedBlockingDeque<>(produceBigDataQueueCapacity),
Executors.defaultThreadFactory(),
new ThreadPoolExecutor.DiscardOldestPolicy()
);
return threadPoolExecutor;
}
@Bean(value = "SendBigDataTaskExecutor")
public Executor SendBigDataTaskExecutor(){
ThreadPoolExecutor threadPoolExecutor = new ThreadPoolExecutor(
sendBigDataCorePoolSize,
sendBigDataMaxPoolSize,
3,
TimeUnit.SECONDS,
new LinkedBlockingDeque<>(sendBigDataQueueCapacity),
Executors.defaultThreadFactory(),
new ThreadPoolExecutor.DiscardOldestPolicy()
);
return threadPoolExecutor;
}
}

@ -19,6 +19,11 @@ import lombok.Data;
import cn.hutool.core.bean.BeanUtil;
import io.swagger.annotations.ApiModelProperty;
import cn.hutool.core.bean.copier.CopyOptions;
import org.hibernate.annotations.CreationTimestamp;
import org.hibernate.annotations.UpdateTimestamp;
import org.springframework.data.annotation.CreatedDate;
import org.springframework.data.annotation.LastModifiedDate;
import javax.persistence.*;
import javax.validation.constraints.*;
import java.sql.Timestamp;
@ -43,10 +48,12 @@ public class BuildRecord implements Serializable {
private Integer id;
@Column(name = "gmt_create")
@ApiModelProperty(value = "创建时间")
@CreationTimestamp
private Timestamp gmtCreate;
@Column(name = "gmt_modified")
@ApiModelProperty(value = "修改时间")
@UpdateTimestamp
private Timestamp gmtModified;
@Column(name = "task_name")

@ -29,6 +29,7 @@ import me.zhengjie.modules.buildrecord.service.dto.BuildRecordQueryCriteria;
import me.zhengjie.modules.buildrecord.task.ProduceBigDataTask;
import me.zhengjie.modules.buildrecord.task.SendBigDataTask;
import me.zhengjie.modules.buildrecord.task.dto.SendBigDataDTO;
import me.zhengjie.modules.taskrecord.service.dto.TaskRecordDto;
import me.zhengjie.utils.StringUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.domain.Pageable;
@ -118,17 +119,17 @@ public class BuildRecordController {
// @PreAuthorize("@el.check('taskRecord:add')")
@AnonymousAccess // fixme 需要测试完成后进行去除和使用上面的权限注解
public ResponseEntity<Object> buildTask(@RequestBody BuildRecordBuildVO taskRecordBuildVO){
BuildRecord resources = taskRecordBuildVO.getResource();
BuildRecord resources = taskRecordBuildVO.getResources();
// 任务名称不能重复
String taskName = resources.getTaskName();
if (!checkTaskNameForBuild(taskName)){
return new ResponseEntity<>(CommonResponse.createByError(ResponseCode.TASK_NAME_IS_EXIST), HttpStatus.OK);
}
// 任务ID必须进行填写
Long taskBuildId = resources.getTaskBuildId();
/* Long taskBuildId = resources.getTaskBuildId();
if (taskBuildId == null || taskBuildId <= 0){
return new ResponseEntity<>(CommonResponse.createByError(ResponseCode.ILLEGAL_ARGUMENT), HttpStatus.OK);
}
}*/
// 启动建立数据传输任务
produceBigDataTask.doRunTask(resources);
// 返回结果
@ -158,7 +159,7 @@ public class BuildRecordController {
@AnonymousAccess // fixme 需要测试完成后进行去除和使用上面的权限注解
public ResponseEntity<Object> sendTask(@RequestBody BuildRecordSendVO buildRecordSendVO){
// 参数校验
if (buildRecordSendVO == null || buildRecordSendVO.getLimit() == null || buildRecordSendVO.getTaskId() == null || StringUtils.isBlank(buildRecordSendVO.getSendName())){
if (buildRecordSendVO == null || buildRecordSendVO.getLimit() == null || buildRecordSendVO.getResource().getTaskBuildId() == null || StringUtils.isBlank(buildRecordSendVO.getSendName())){
return new ResponseEntity<>(CommonResponse.createByError(ResponseCode.EMPTY_ARGUMENT), HttpStatus.OK);
}
SendBigDataDTO sendBigDataDTO = new SendBigDataDTO();
@ -168,5 +169,50 @@ public class BuildRecordController {
return new ResponseEntity<>(CommonResponse.createBySuccess(), HttpStatus.OK);
}
/**
*
*
* @param sendTotal
* @param id Id
* @return
*/
@Log("发送课包数目校验")
@ApiOperation("发送课包数目校验")
// @PreAuthorize("@el.check('taskRecord:list')")
@PostMapping(value = "/checkSendTotal")
@AnonymousAccess // fixme 需要测试完成后进行去除和使用上面的权限注解
public ResponseEntity<Object> checkSendTotal(@RequestParam(value = "sendTotal", defaultValue = "0") Long sendTotal,
@RequestParam(value = "id")Integer id){
if (sendTotal == null || id == null){
return new ResponseEntity<>(CommonResponse.createByError(ResponseCode.EMPTY_ARGUMENT), HttpStatus.OK);
}
BuildRecordDto dto = buildRecordService.findById(id);
if (dto == null){
return new ResponseEntity<>(CommonResponse.createByError(ResponseCode.NO_RELEVANT_CONTENT_WAS_FOUND), HttpStatus.OK);
}
// 满足要求的公式 (本次发送数目 + 最后一次发送数目) < 该任务课包总条数
if (!isMatchCheckNum(dto, sendTotal)){
return new ResponseEntity<>(CommonResponse.createByError(ResponseCode.NO_MATCH_ARGUMENT_SET), HttpStatus.OK);
}
return new ResponseEntity<>(CommonResponse.createBySuccess(), HttpStatus.OK);
}
private boolean isMatchCheckNum(BuildRecordDto dto, Long sendTotal){
Long total = dto.getTotal();
if (total == null){
total = 0L;
}
Long send = dto.getSendTotal();
if (send == null){
send = 0L;
}
if ((sendTotal + send) <= total){
return Boolean.TRUE;
}
return Boolean.FALSE;
}
// ========================= 自定义的大数据平台相关的接口 start =========================
}

@ -12,7 +12,7 @@ public class BuildRecordBuildVO implements Serializable {
/**
*
*/
private BuildRecord resource;
private BuildRecord resources;

@ -19,11 +19,6 @@ public class BuildRecordSendVO implements Serializable {
*/
private Long limit;
/**
* Id,Id
*/
private Long taskId;
/**
* , 0 ,1-5
*/
@ -38,4 +33,9 @@ public class BuildRecordSendVO implements Serializable {
*
*/
private String sendName;
/**
*
*/
private String onlyName;
}

@ -1,22 +1,26 @@
package me.zhengjie.modules.buildrecord.task;
import cn.hutool.core.collection.CollectionUtil;
import com.alibaba.fastjson.JSON;
import cn.hutool.json.JSONObject;
import cn.hutool.json.JSONUtil;
import com.spatial4j.core.context.SpatialContext;
import com.spatial4j.core.distance.DistanceUtils;
import com.spatial4j.core.shape.Rectangle;
import lombok.extern.slf4j.Slf4j;
import me.zhengjie.modules.buildrecord.domain.BuildRecord;
import me.zhengjie.modules.buildrecord.service.BuildRecordService;
import me.zhengjie.modules.buildrecord.service.dto.BuildRecordDto;
import me.zhengjie.modules.buildrecord.task.convert.BuildTaskQueryParamJsonConvert;
import me.zhengjie.modules.edu.service.EduService;
import me.zhengjie.modules.tag.domain.Tag;
import me.zhengjie.modules.tag.service.TagService;
import me.zhengjie.modules.tag.service.dto.TagDto;
import me.zhengjie.utils.ConvertUtil;
import me.zhengjie.utils.StringUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.annotation.Scope;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.PageRequest;
import org.springframework.data.domain.Pageable;
import org.springframework.data.domain.Slice;
import org.springframework.scheduling.annotation.Async;
import org.springframework.stereotype.Component;
@ -35,6 +39,11 @@ public class ProduceBigDataTask {
*/
private static final int SUCCESS_BUILD_TAG = 1;
/**
* 0 -
*/
private static final int READY_SEND_TAG = 0;
/**
* -
*/
@ -44,7 +53,8 @@ public class ProduceBigDataTask {
private EduService eduService;
@Autowired
private TagService tagService;
@Autowired
private BuildRecordService buildRecordService;
@Async(value = "ProduceBigDataTaskExecutor")
@ -53,17 +63,24 @@ public class ProduceBigDataTask {
log.info("====== [ task start running, task name is {} ] ======", "ProduceBigDataTask");
runTask(task);
Long endMilliSecond = LocalDateTime.now().toInstant(ZoneOffset.of("+8")).toEpochMilli();
log.info("====== [ task start end, task name is {},cost milliSecond is {} ] ======", "ProduceBigDataTask", (endMilliSecond - satrtMilliSecond));
log.info("====== [ task start end, task name is {},cost milliSecond is {} ] ======", "ProduceBigDataTask", ConvertUtil.secondToTime(endMilliSecond - satrtMilliSecond));
}
private void runTask(BuildRecord task) {
// 先对任务表进行记录
BuildRecordDto buildRecordDto = buildRecordService.create(task);
if (buildRecordDto == null){
log.error("============ [create build rec is fail, please check it. ] ============");
return;
}
// 解析需要的查询参数
String params = task.getParams();
if (StringUtils.isBlank(params)){
return;
}
JSONObject jsonObject1 = JSONUtil.parseObj(params,true);
// 构建解析类对Json进行解析
BuildTaskQueryParamJsonConvert convert = JSON.parseObject(params, BuildTaskQueryParamJsonConvert.class);
BuildTaskQueryParamJsonConvert convert = JSONUtil.toBean(jsonObject1, BuildTaskQueryParamJsonConvert.class);
// 地图点进行计算,获取选点范围
String localCode = convert.getLocalCode();
Integer range = convert.getRange();
@ -75,26 +92,48 @@ public class ProduceBigDataTask {
log.error(" ====================== {please check localPoint calculate is ok ! } ==========================");
return;
}
BuildRecord buildRecord = new BuildRecord();
Integer count = tagService.queryAndBatchInsertData(buildRecordDto.getId(), convert.getCityCode(), convert.getStuGrade(), rectangle);
// 进行两表关联进行复杂查询
Pageable pageable = PageRequest.of(0,200);
// Pageable pageable = PageRequest.of(0,1000);
// 计算总数
/*
long count = eduService.countMatchData(convert.getCityCode(), convert.getStuGrade(), rectangle);
if (count >= 0){
buildRecord.setTotal(count);
}else{
updateBuildRec(buildRecordDto, buildRecord);
return;
}
*/
/* long count = 0L;
while(true){
// 分页进行查询结果
Page<String> page = eduService.queryMatchData(convert.getCityCode(), convert.getStuGrade(), rectangle, pageable);
Slice<String> page = eduService.queryMatchData(convert.getCityCode(), convert.getStuGrade(), rectangle, pageable);
// 记录总数
// buildRecord.setTotal(page.getNumberOfElements());
if (CollectionUtil.isEmpty(page)){
return;
// 没有查到记录
buildRecord.setTotal(0L);
break;
}
// 把待发送记录存入分表
List<TagDto> tagDtos = new ArrayList<>();
// 准备待插入分表的集合
List<Tag> tagDtos = new ArrayList<>();
page.forEach(
each->{
TagDto tagDto = new TagDto();
tagDto.setUid(each.trim());
tagDto.setPushStatus(0);
tagDto.setTaskId(task.getTaskBuildId());
tagDtos.add(tagDto);
Tag tag = new Tag();
tag.setUid(each.trim());
tag.setPushStatus(READY_SEND_TAG);
tag.setTaskId(Long.valueOf(buildRecordDto.getId()));
tagDtos.add(tag);
}
);
Long aLong = tagService.saveAll(tagDtos);
// 每次的记录数
count += tagDtos.size();
// 把待发送记录存入分表
Integer aLong = tagService.saveAll(tagDtos);
if (aLong <= 0){
log.error("==== [save send record fail, please check , ready insert record is {} , activity id is {}] ====", tagDtos.toString(), task.getTaskBuildId());
}
@ -102,12 +141,34 @@ public class ProduceBigDataTask {
break;
}
pageable = page.nextPageable();
task.setTotal(page.getTotalElements());
}*/
// 统计插入的总数
buildRecord.setTotal(Long.valueOf(count));
// 更新记录
updateBuildRec(buildRecordDto, buildRecord);
}
/**
*
*
* @param buildRecordDto
* @param buildRecord
*/
private void updateBuildRec(BuildRecordDto buildRecordDto, BuildRecord buildRecord){
// 生成完成后,对记录进行更新
Integer id = buildRecordDto.getId();
buildRecord.setIsBuild(SUCCESS_BUILD_TAG);
if (id != null && id >= 0){
buildRecord.setId(id);
try{
buildRecordService.update(buildRecord);
}catch (Exception e){
log.error("==== [ update buildRecord fail, please check ] ====",e);
}
}
// 生成完成后,对各种状态进行记录
task.setIsBuild(SUCCESS_BUILD_TAG);
}
private Rectangle calculateLocalPoint(String localCode, Integer range) {
// 处理切割横纵坐标字符串
String[] split = StringUtils.split(localCode.trim(), SPLIT_TAG);

@ -1,5 +1,7 @@
package me.zhengjie.modules.buildrecord.task;
import cn.hutool.core.bean.BeanUtil;
import cn.hutool.core.collection.CollectionUtil;
import cn.hutool.core.util.RandomUtil;
import cn.hutool.http.HttpResponse;
import com.alibaba.fastjson.JSON;
@ -10,9 +12,15 @@ import me.zhengjie.modules.buildrecord.domain.BuildRecord;
import me.zhengjie.modules.buildrecord.service.BuildRecordService;
import me.zhengjie.modules.buildrecord.service.dto.BuildRecordDto;
import me.zhengjie.modules.buildrecord.task.dto.SendBigDataDTO;
import me.zhengjie.modules.tag.domain.Tag;
import me.zhengjie.modules.tag.service.TagService;
import me.zhengjie.modules.tag.service.dto.TagDto;
import me.zhengjie.modules.tag.service.dto.TagQueryCriteria;
import me.zhengjie.modules.taskrecord.domain.TaskRecord;
import me.zhengjie.modules.taskrecord.service.TaskRecordService;
import me.zhengjie.modules.taskrecord.service.dto.TaskRecordDto;
import me.zhengjie.modules.taskrecord.service.dto.TaskRecordQueryCriteria;
import me.zhengjie.utils.ConvertUtil;
import me.zhengjie.utils.HttpUtil;
import me.zhengjie.utils.StringUtils;
import org.springframework.beans.factory.annotation.Autowired;
@ -21,10 +29,15 @@ import org.springframework.context.annotation.Scope;
import org.springframework.scheduling.annotation.Async;
import org.springframework.stereotype.Component;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Paths;
import java.time.LocalDateTime;
import java.time.ZoneOffset;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Set;
import java.util.stream.Collectors;
@Component
@ -69,6 +82,8 @@ public class SendBigDataTask {
private BuildRecordService buildRecordService;
@Autowired
private TagService tagService;
@Autowired
private TaskRecordService taskRecordService;
@Async(value = "SendBigDataTaskExecutor")
public void doRunTask(BuildRecord resource, SendBigDataDTO sendBigDataDTO){
@ -76,7 +91,7 @@ public class SendBigDataTask {
log.info("====== [ task start running, task name is {} ] ======", "SendBigDataTask");
runTask(resource, sendBigDataDTO);
Long endMilliSecond = LocalDateTime.now().toInstant(ZoneOffset.of("+8")).toEpochMilli();
log.info("====== [ task start end, task name is {},cost milliSecond is {} ] ======", "SendBigDataTask", (endMilliSecond - satrtMilliSecond));
log.info("====== [ task start end, task name is {},cost milliSecond is {} ] ======", "SendBigDataTask", ConvertUtil.secondToTime(endMilliSecond - satrtMilliSecond));
}
private void runTask(BuildRecord resource, SendBigDataDTO sendBigDataDTO) {
@ -88,7 +103,8 @@ public class SendBigDataTask {
BuildRecordDto buildRecordDto = buildRecordService.findById(id);
// 查询记录表,返回这个id去发送记录表中进行查找 - 构建发送体
TagQueryCriteria tagQueryCriteria = new TagQueryCriteria();
tagQueryCriteria.setTaskId((long)id);
tagQueryCriteria.setTaskId(Long.valueOf(id));
// fixme 这里可以分页进行查询 - 目前不进行优化 - 为全查
List<TagDto> tagDtos = tagService.queryAll(tagQueryCriteria);
// 遍历查询等待发送的列表
List<TagDto> collect = tagDtos.stream()
@ -96,10 +112,46 @@ public class SendBigDataTask {
.distinct()
.limit(sendBigDataDTO.getLimit())
.collect(Collectors.toList());
// 进行去重
String resultFilePath = null;
String onlyName = sendBigDataDTO.getOnlyName();
if (StringUtils.isNotBlank(onlyName)){
TaskRecordQueryCriteria queryCriteria = new TaskRecordQueryCriteria();
queryCriteria.setTaskName(onlyName);
List<TaskRecordDto> taskRecordDtos = taskRecordService.queryAll(queryCriteria);
if (CollectionUtil.isNotEmpty(taskRecordDtos) && taskRecordDtos.size() == 1){
resultFilePath = taskRecordDtos.get(0).getLocalFilePath();
}
}
if (StringUtils.isNotBlank(resultFilePath)){
List<String> fileLines = Lists.newArrayList();
try {
fileLines = Files.readAllLines(Paths.get(resultFilePath));
} catch (IOException e) {
log.error("================== {read file error , please check is , file path is : {} } ================================", resultFilePath, e);
}
// 过滤的集合
Set<String> filterCollect = collect.stream().map(TagDto::getUid).filter(fileLines::contains).collect(Collectors.toSet());
collect = collect.stream().filter(one -> !filterCollect.contains(one.getUid())).collect(Collectors.toList());
}
// 乱序
// Collections.shuffle(collect);
// 对需要发送的字段进行发送
batchSend(collect, sendBigDataDTO, buildRecordDto);
// 对发送后的状态进行更新
resource.setIsSend(FINISH_SEND_TAG);
try{
Long sendTotal = buildRecordDto.getSendTotal();
if (sendTotal == null){
sendTotal = 0L;
}
// 之前要进行校验
resource.setSendTotal( sendTotal + sendBigDataDTO.getLimit());
buildRecordService.update(resource);
}catch (Exception e){
log.error("==== [ update buildRecord fail, please check ] ====",e);
}
}
private void batchSend(List<TagDto> collect, SendBigDataDTO sendRecordDTO, BuildRecordDto buildRecordDto) {
@ -108,7 +160,7 @@ public class SendBigDataTask {
list->{
// 调用推送地址进行推送
PushDBJsonContent pushDBJsonContent = new PushDBJsonContent();
pushDBJsonContent.setActId(sendRecordDTO.getTaskId());
pushDBJsonContent.setActId(buildRecordDto.getTaskBuildId());
pushDBJsonContent.setActName(sendRecordDTO.getSendName());
// 加入每一个号码对应接口字段
List<PushDBJsonContent.Client> clientList = new ArrayList<>();
@ -151,8 +203,16 @@ public class SendBigDataTask {
return;
}
}
List<Tag> tags = new ArrayList<>();
collect.forEach(
one ->{
Tag tag = new Tag();
BeanUtil.copyProperties(one, tag);
tags.add(tag);
}
);
// 把修改完状态的进行更新
Long aLong = tagService.saveAll(collect);
Integer aLong = tagService.saveAll(tags);
if (aLong <= 0){
log.error("================= [update data fail , please check it .] =================");
}

@ -12,11 +12,6 @@ public class SendBigDataDTO {
*/
private Long limit;
/**
* Id,Id
*/
private Long taskId;
/**
* , 0 ,1-5
*/
@ -26,4 +21,11 @@ public class SendBigDataDTO {
*
*/
private String sendName;
/**
*
*/
private String onlyName;
}

@ -216,7 +216,7 @@ public class TaskRecordController {
if (send == null){
send = 0L;
}
if ((sendTotal + send) < total){
if ((sendTotal + send) <= total){
return Boolean.TRUE;
}
return Boolean.FALSE;

@ -6,6 +6,7 @@ import lombok.extern.slf4j.Slf4j;
import me.zhengjie.modules.taskrecord.domain.TaskRecord;
import me.zhengjie.modules.taskrecord.service.TaskRecordService;
import me.zhengjie.modules.taskrecord.service.dto.TaskRecordDto;
import me.zhengjie.utils.ConvertUtil;
import me.zhengjie.utils.FileUtil;
import me.zhengjie.utils.StringUtils;
import org.springframework.beans.factory.annotation.Autowired;
@ -86,7 +87,7 @@ public class MergeRecordFilesTask {
log.info("====== [ task start running, task name is {} ] ======", "MergeRecordFilesTask");
runTask(ids, tag, taskRecord);
Long endMilliSecond = LocalDateTime.now().toInstant(ZoneOffset.of("+8")).toEpochMilli();
log.info("====== [ task start end, task name is {},cost milliSecond is {} ] ======", "MergeRecordFilesTask", (endMilliSecond - satrtMilliSecond));
log.info("====== [ task start end, task name is {},cost milliSecond is {} ] ======", "MergeRecordFilesTask", ConvertUtil.secondToTime(endMilliSecond - satrtMilliSecond));
}
private void runTask(List<Integer> ids, String tag, TaskRecord taskRecord) {

@ -14,6 +14,7 @@ import me.zhengjie.modules.taskrecord.domain.TaskRecord;
import me.zhengjie.modules.taskrecord.service.TaskRecordService;
import me.zhengjie.modules.taskrecord.service.dto.TaskRecordDto;
import me.zhengjie.modules.taskrecord.service.dto.TaskRecordQueryCriteria;
import me.zhengjie.utils.ConvertUtil;
import me.zhengjie.utils.FileUtil;
import me.zhengjie.utils.StringUtils;
import org.springframework.beans.factory.annotation.Autowired;
@ -103,7 +104,7 @@ public class ProduceLocalFileTask {
log.info("====== [ task start running, task name is {} ] ======", "ProduceLocalFileTask");
runTask(task, tag);
Long endMilliSecond = LocalDateTime.now().toInstant(ZoneOffset.of("+8")).toEpochMilli();
log.info("====== [ task start end, task name is {},cost milliSecond is {} ] ======", "ProduceLocalFileTask", (endMilliSecond - satrtMilliSecond));
log.info("====== [ task start end, task name is {},cost milliSecond is {} ] ======", "ProduceLocalFileTask", ConvertUtil.secondToTime(endMilliSecond - satrtMilliSecond));
}
private void runTask(TaskRecord task, String tag) {

@ -10,6 +10,7 @@ import me.zhengjie.common.json.PushDBJsonContent;
import me.zhengjie.modules.taskrecord.service.TaskRecordService;
import me.zhengjie.modules.taskrecord.service.dto.TaskRecordDto;
import me.zhengjie.modules.taskrecord.task.dto.SendRecordDTO;
import me.zhengjie.utils.ConvertUtil;
import me.zhengjie.utils.HttpUtil;
import me.zhengjie.utils.StringUtils;
import org.springframework.beans.BeanUtils;
@ -82,7 +83,7 @@ public class SendRecordTask {
log.info("====== [ task start running, task name is {} ] ======", "SendRecordTask");
runTask(sendRecordDTO);
Long endMilliSecond = LocalDateTime.now().toInstant(ZoneOffset.of("+8")).toEpochMilli();
log.info("====== [ task start end, task name is {},cost milliSecond is {} ] ======", "SendRecordTask", (endMilliSecond - satrtMilliSecond));
log.info("====== [ task start end, task name is {},cost milliSecond is {} ] ======", "SendRecordTask", ConvertUtil.secondToTime(endMilliSecond - satrtMilliSecond));
}

@ -0,0 +1,8 @@
{
"properties": [
{
"name": "spring.datasource.druid.db-type",
"type": "java.lang.String",
"description": "Description for spring.datasource.druid.db-type."
}
] }

@ -1,48 +1,61 @@
#配置数据源
spring:
datasource:
druid:
db-type: com.alibaba.druid.pool.DruidDataSource
driverClassName: net.sf.log4jdbc.sql.jdbcapi.DriverSpy
url: jdbc:log4jdbc:mysql://47.99.218.9:3306/eladmin?serverTimezone=Asia/Shanghai&characterEncoding=utf8&useSSL=false
username: root
password: Yuyou@2020
# 初始连接数
initial-size: 5
# 最小连接数
min-idle: 10
# 最大连接数
max-active: 20
# 获取连接超时时间
max-wait: 5000
# 连接有效性检测时间
time-between-eviction-runs-millis: 60000
# 连接在池中最小生存的时间
min-evictable-idle-time-millis: 300000
# 连接在池中最大生存的时间
max-evictable-idle-time-millis: 900000
test-while-idle: true
test-on-borrow: false
test-on-return: false
# 检测连接是否有效
validation-query: select 1
# 配置监控统计
webStatFilter:
enabled: true
stat-view-servlet:
enabled: true
url-pattern: /druid/*
reset-enable: false
filter:
stat:
shardingsphere:
datasource:
names: eladmin
eladmin:
type: com.alibaba.druid.pool.DruidDataSource
driver-class-name: net.sf.log4jdbc.sql.jdbcapi.DriverSpy
url: jdbc:log4jdbc:mysql://localhost:3306/eladmin?serverTimezone=Asia/Shanghai&characterEncoding=utf8&useSSL=false&rewriteBatchedStatements=true
username: root
password: root
# 初始连接数
initial-size: 5
# 最小连接数
min-idle: 10
# 最大连接数
max-active: 20
# 获取连接超时时间
max-wait: 5000
# 连接有效性检测时间
time-between-eviction-runs-millis: 60000
# 连接在池中最小生存的时间
min-evictable-idle-time-millis: 300000
# 连接在池中最大生存的时间
max-evictable-idle-time-millis: 900000
test-while-idle: true
test-on-borrow: false
test-on-return: false
# 检测连接是否有效
validation-query: select 1
# 配置监控统计
webStatFilter:
enabled: true
# 记录慢SQL
log-slow-sql: true
slow-sql-millis: 1000
merge-sql: true
wall:
config:
multi-statement-allow: true
stat-view-servlet:
enabled: true
url-pattern: /druid/*
reset-enable: false
filter:
stat:
enabled: true
# 记录慢SQL
log-slow-sql: true
slow-sql-millis: 1000
merge-sql: true
wall:
config:
multi-statement-allow: true
sharding:
# 默认数据源
default-data-source-name: eladmin
# 配置分表策略
tables:
dc_tag:
actual-data-nodes: eladmin.dc_tag$->{0..9}
table-strategy:
inline:
sharding-column: task_id
algorithm-expression: dc_tag$->{task_id % 10}
# 登录相关配置
login:
@ -131,14 +144,28 @@ send:
corePoolSize: 2
maxPoolSize: 16
queueCapacity: 3
ThreadNamePrefix: 'ProduceLocalFileTaskExecutor-'
ThreadNamePrefix: 'SendLocalFileTaskExecutor-'
merge:
task:
thread_pool:
corePoolSize: 2
maxPoolSize: 16
queueCapacity: 3
ThreadNamePrefix: 'ProduceLocalFileTaskExecutor-'
ThreadNamePrefix: 'MergeFileTaskExecutor-'
producebigdata:
task:
thread_pool:
corePoolSize: 2
maxPoolSize: 16
queueCapacity: 3
ThreadNamePrefix: 'ProduceBigDataTaskExecutor-'
SendBigData:
task:
thread_pool:
corePoolSize: 2
maxPoolSize: 16
queueCapacity: 3
ThreadNamePrefix: 'SendBigDataTaskExecutor-'
# 增加日志相关的配置
logging:
level:
@ -150,3 +177,6 @@ logging:
org.hibernate.engine.QueryParameters: debug
org.hibernate.engine.query.HQLQueryPlan: debug
org.hibernate.type.descriptor.sql.BasicBinder: trace
tag:
split-table:
sum: 10

@ -1,50 +1,61 @@
#配置数据源
spring:
datasource:
druid:
db-type: com.alibaba.druid.pool.DruidDataSource
driverClassName: net.sf.log4jdbc.sql.jdbcapi.DriverSpy
url: jdbc:log4jdbc:mysql://47.99.218.9:3306/eladmin?serverTimezone=Asia/Shanghai&characterEncoding=utf8&useSSL=false
username: root
password: Yuyou@2020
# 初始连接数
initial-size: 5
# 最小连接数
min-idle: 10
# 最大连接数
max-active: 20
# 获取连接超时时间
max-wait: 5000
# 连接有效性检测时间
time-between-eviction-runs-millis: 60000
# 连接在池中最小生存的时间
min-evictable-idle-time-millis: 300000
# 连接在池中最大生存的时间
max-evictable-idle-time-millis: 900000
test-while-idle: true
test-on-borrow: false
test-on-return: false
# 检测连接是否有效
validation-query: select 1
# 配置监控统计
webStatFilter:
enabled: true
stat-view-servlet:
enabled: true
url-pattern: /druid/*
reset-enable: false
login-username: admin
login-password: 123456
filter:
stat:
shardingsphere:
datasource:
names: eladmin
eladmin:
type: com.alibaba.druid.pool.DruidDataSource
driver-class-name: net.sf.log4jdbc.sql.jdbcapi.DriverSpy
url: jdbc:log4jdbc:mysql://47.99.218.9:3306/eladmin?serverTimezone=Asia/Shanghai&characterEncoding=utf8&useSSL=false
username: root
password: Yuyou@2020
# 初始连接数
initial-size: 5
# 最小连接数
min-idle: 10
# 最大连接数
max-active: 20
# 获取连接超时时间
max-wait: 5000
# 连接有效性检测时间
time-between-eviction-runs-millis: 60000
# 连接在池中最小生存的时间
min-evictable-idle-time-millis: 300000
# 连接在池中最大生存的时间
max-evictable-idle-time-millis: 900000
test-while-idle: true
test-on-borrow: false
test-on-return: false
# 检测连接是否有效
validation-query: select 1
# 配置监控统计
webStatFilter:
enabled: true
# 记录慢SQL
log-slow-sql: true
slow-sql-millis: 1000
merge-sql: true
wall:
config:
multi-statement-allow: true
stat-view-servlet:
enabled: true
url-pattern: /druid/*
reset-enable: false
filter:
stat:
enabled: true
# 记录慢SQL
log-slow-sql: true
slow-sql-millis: 1000
merge-sql: true
wall:
config:
multi-statement-allow: true
sharding:
# 默认数据源
default-data-source-name: eladmin
# 配置分表策略
tables:
dc_tag:
actual-data-nodes: eladmin.dc_tag$->{0..9}
table-strategy:
inline:
sharding-column: task_id
algorithm-expression: dc_tag$->{task_id % 10}
# 登录相关配置
login:
@ -64,7 +75,7 @@ login:
heigth: 36
# 内容长度
length: 2
# 字体名称,为空则使用默认字体,如遇到线上乱码,设置其他字体即可
# 字体名称,为空则使用默认字体
font-name:
# 字体大小
font-size: 25
@ -76,35 +87,28 @@ jwt:
token-start-with: Bearer
# 必须使用最少88位的Base64对该令牌进行编码
base64-secret: ZmQ0ZGI5NjQ0MDQwY2I4MjMxY2Y3ZmI3MjdhN2ZmMjNhODViOTg1ZGE0NTBjMGM4NDA5NzYxMjdjOWMwYWRmZTBlZjlhNGY3ZTg4Y2U3YTE1ODVkZDU5Y2Y3OGYwZWE1NzUzNWQ2YjFjZDc0NGMxZWU2MmQ3MjY1NzJmNTE0MzI=
# 令牌过期时间 此处单位/毫秒 ,默认2小时,可在此网站生成 https://www.convertworld.com/zh-hans/time/milliseconds.html
token-validity-in-seconds: 7200000
# 令牌过期时间 此处单位/毫秒 ,默认4小时,可在此网站生成 https://www.convertworld.com/zh-hans/time/milliseconds.html
token-validity-in-seconds: 14400000
# 在线用户key
online-key: online-token-
# 验证码
code-key: code-key-
# token 续期检查时间范围默认30分钟单位默认毫秒在token即将过期的一段时间内用户操作了则给用户的token续期
# token 续期检查时间范围默认30分钟单位毫秒在token即将过期的一段时间内用户操作了则给用户的token续期
detect: 1800000
# 续期时间范围,默认 1小时这里单位毫秒
# 续期时间范围,默认1小时单位毫秒
renew: 3600000
# IP 本地解析
ip:
local-parsing: false
#是否允许生成代码生产环境设置为false
generator:
enabled: false
#如果生产环境要开启swagger需要配置请求地址
#springfox:
# documentation:
# swagger:
# v2:
# host: # 接口域名或外网ip
enabled: true
#是否开启 swagger-ui
swagger:
enabled: false
enabled: true
# IP 本地解析
ip:
local-parsing: true
# 文件存储路径
file:
@ -140,11 +144,39 @@ send:
corePoolSize: 2
maxPoolSize: 16
queueCapacity: 3
ThreadNamePrefix: 'ProduceLocalFileTaskExecutor-'
ThreadNamePrefix: 'SendLocalFileTaskExecutor-'
merge:
task:
thread_pool:
corePoolSize: 2
maxPoolSize: 16
queueCapacity: 3
ThreadNamePrefix: 'ProduceLocalFileTaskExecutor-'
ThreadNamePrefix: 'MergeFileTaskExecutor-'
producebigdata:
task:
thread_pool:
corePoolSize: 2
maxPoolSize: 16
queueCapacity: 3
ThreadNamePrefix: 'ProduceBigDataTaskExecutor-'
SendBigData:
task:
thread_pool:
corePoolSize: 2
maxPoolSize: 16
queueCapacity: 3
ThreadNamePrefix: 'SendBigDataTaskExecutor-'
# 增加日志相关的配置
logging:
level:
org.springframework.security:
- debug
- info
org.springframework.web: error
org.hibernate.SQL: debug
org.hibernate.engine.QueryParameters: debug
org.hibernate.engine.query.HQLQueryPlan: debug
org.hibernate.type.descriptor.sql.BasicBinder: trace
tag:
split-table:
sum: 10

@ -0,0 +1,182 @@
#配置数据源
spring:
shardingsphere:
datasource:
names: eladmin
eladmin:
type: com.alibaba.druid.pool.DruidDataSource
driver-class-name: net.sf.log4jdbc.sql.jdbcapi.DriverSpy
url: jdbc:log4jdbc:mysql://rm-bp1693kl5d490o5cn.mysql.rds.aliyuncs.com:3306/eladmin?serverTimezone=Asia/Shanghai&characterEncoding=utf8&useSSL=false
username: prod
password: yuyou@RDS9495
# 初始连接数
initial-size: 5
# 最小连接数
min-idle: 10
# 最大连接数
max-active: 20
# 获取连接超时时间
max-wait: 5000
# 连接有效性检测时间
time-between-eviction-runs-millis: 60000
# 连接在池中最小生存的时间
min-evictable-idle-time-millis: 300000
# 连接在池中最大生存的时间
max-evictable-idle-time-millis: 900000
test-while-idle: true
test-on-borrow: false
test-on-return: false
# 检测连接是否有效
validation-query: select 1
# 配置监控统计
webStatFilter:
enabled: true
stat-view-servlet:
enabled: true
url-pattern: /druid/*
reset-enable: false
filter:
stat:
enabled: true
# 记录慢SQL
log-slow-sql: true
slow-sql-millis: 1000
merge-sql: true
wall:
config:
multi-statement-allow: true
sharding:
# 默认数据源
default-data-source-name: eladmin
# 配置分表策略
tables:
dc_tag:
actual-data-nodes: eladmin.dc_tag$->{0..9}
table-strategy:
inline:
sharding-column: task_id
algorithm-expression: dc_tag$->{task_id % 10}
# 登录相关配置
login:
# 登录缓存
cache-enable: true
# 是否限制单用户登录
single: false
# 验证码
login-code:
# 验证码类型配置 查看 LoginProperties 类
code-type: arithmetic
# 登录图形验证码有效时间/分钟
expiration: 2
# 验证码高度
width: 111
# 验证码宽度
heigth: 36
# 内容长度
length: 2
# 字体名称,为空则使用默认字体
font-name:
# 字体大小
font-size: 25
#jwt
jwt:
header: Authorization
# 令牌前缀
token-start-with: Bearer
# 必须使用最少88位的Base64对该令牌进行编码
base64-secret: ZmQ0ZGI5NjQ0MDQwY2I4MjMxY2Y3ZmI3MjdhN2ZmMjNhODViOTg1ZGE0NTBjMGM4NDA5NzYxMjdjOWMwYWRmZTBlZjlhNGY3ZTg4Y2U3YTE1ODVkZDU5Y2Y3OGYwZWE1NzUzNWQ2YjFjZDc0NGMxZWU2MmQ3MjY1NzJmNTE0MzI=
# 令牌过期时间 此处单位/毫秒 默认4小时可在此网站生成 https://www.convertworld.com/zh-hans/time/milliseconds.html
token-validity-in-seconds: 14400000
# 在线用户key
online-key: online-token-
# 验证码
code-key: code-key-
# token 续期检查时间范围默认30分钟单位毫秒在token即将过期的一段时间内用户操作了则给用户的token续期
detect: 1800000
# 续期时间范围默认1小时单位毫秒
renew: 3600000
#是否允许生成代码生产环境设置为false
generator:
enabled: true
#是否开启 swagger-ui
swagger:
enabled: true
# IP 本地解析
ip:
local-parsing: true
# 文件存储路径
file:
mac:
path: ~/file/
avatar: ~/avatar/
linux:
path: /home/eladmin/file/
avatar: /home/eladmin/avatar/
windows:
path: C:\eladmin\file\
avatar: C:\eladmin\avatar\
# 文件大小 /M
maxSize: 100
avatarMaxSize: 5
# 配置请求发送路径
req:
db:
# 设置给大坝回传号码的地址
host: http://api.hzdaba.cn/aibot
url: /api/activity/addclient
# 线程池的相关配置
produce:
task:
thread_pool:
corePoolSize: 2
maxPoolSize: 16
queueCapacity: 3
ThreadNamePrefix: 'ProduceLocalFileTaskExecutor-'
send:
task:
thread_pool:
corePoolSize: 2
maxPoolSize: 16
queueCapacity: 3
ThreadNamePrefix: 'SendLocalFileTaskExecutor-'
merge:
task:
thread_pool:
corePoolSize: 2
maxPoolSize: 16
queueCapacity: 3
ThreadNamePrefix: 'MergeFileTaskExecutor-'
producebigdata:
task:
thread_pool:
corePoolSize: 2
maxPoolSize: 16
queueCapacity: 3
ThreadNamePrefix: 'ProduceBigDataTaskExecutor-'
SendBigData:
task:
thread_pool:
corePoolSize: 2
maxPoolSize: 16
queueCapacity: 3
ThreadNamePrefix: 'SendBigDataTaskExecutor-'
# 增加日志相关的配置
logging:
level:
org.springframework.security:
- debug
- info
org.springframework.web: error
org.hibernate.SQL: debug
org.hibernate.engine.QueryParameters: debug
org.hibernate.engine.query.HQLQueryPlan: debug
org.hibernate.type.descriptor.sql.BasicBinder: trace
tag:
split-table:
sum: 10

@ -5,7 +5,7 @@ spring:
freemarker:
check-template-location: false
profiles:
active: dev
active: test
jackson:
time-zone: GMT+8
data:
@ -19,6 +19,11 @@ spring:
hibernate:
ddl-auto: none
dialect: org.hibernate.dialect.MySQL5InnoDBDialect
jdbc: # 增加批量添加优化
batch_size: 2000
batch_versioned_data: true
order_inserts: true
order_updates: true
open-in-view: true
redis:

@ -0,0 +1,158 @@
package me.zhengjie;
import cn.hutool.core.util.RandomUtil;
import com.google.common.collect.Lists;
import me.zhengjie.utils.FileUtil;
import me.zhengjie.utils.StringUtils;
import org.junit.Test;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Paths;
import java.nio.file.StandardOpenOption;
import java.util.*;
import java.util.stream.Collectors;
import static java.util.stream.Collectors.toList;
public class FileTest {
// 去重
// 按长度分开
// 去掉“
// 去11位手机号
// 打乱,平均分15个包
@Test
public void testMerge() throws IOException {
// 读取文件
String path1 = "K:\\253.txt";
String path2 = "K:\\254.txt";
List<String> stringList = Files.readAllLines(Paths.get(path2));
List<String> list1 = new ArrayList<>();
List<String> list2 = new ArrayList<>();
stringList.forEach(
one->{
one = StringUtils.replace(one, "\"","");
if (one.length() == 32){
list1.add(one);
}else if (one.length() == 24){
list2.add(one);
}
}
);
Files.write(Paths.get("K:\\merge1.txt"),list1, StandardOpenOption.APPEND);
Files.write(Paths.get("K:\\merge2.txt"),list2, StandardOpenOption.APPEND);
}
@Test
public void testSplit() throws IOException {
String path = "K:\\merge1.txt";
List<String> stringList = Files.readAllLines(Paths.get(path));
List<String> list1 = Lists.newArrayList();
List<String> list2 = Lists.newArrayList();
List<String> list3 = Lists.newArrayList();
List<String> list4 = Lists.newArrayList();
List<String> list5 = Lists.newArrayList();
List<String> list6 = Lists.newArrayList();
List<String> list7 = Lists.newArrayList();
List<String> list8 = Lists.newArrayList();
List<String> list9 = Lists.newArrayList();
List<String> list10 = Lists.newArrayList();
Map<Integer, List<String>> map = new HashMap<>();
map.put(0,list1);
map.put(1,list2);
map.put(2,list3);
map.put(3,list4);
map.put(4,list5);
map.put(5,list6);
map.put(6,list7);
map.put(7,list8);
map.put(8,list9);
map.put(9,list10);
// 随机加入到list中
stringList.forEach(
one->{
int i = RandomUtil.randomInt(10);
List<String> stringList1 = map.get(i);
stringList1.add(one);
}
);
for (int i = 0; i < 10; i++) {
List<String> resultList = map.get(i);
Files.write(Paths.get("K:\\result"+i),resultList,StandardOpenOption.CREATE);
}
}
@Test
public void splitEachFile() throws IOException {
List<String> list = Files.readAllLines(Paths.get("J:\\111.6w.csv"));
List<KVBean> objects = Lists.newArrayList();
list.forEach(
one ->{
String[] split = StringUtils.split(one, ",");
if (split.length == 2){
KVBean kvBean = new KVBean();
kvBean.setK(split[0].trim().replaceAll("\"",""));
kvBean.setV(split[1].trim().replaceAll("\"",""));
objects.add(kvBean);
}
}
);
Map<String, Set<String>> collect = objects.stream()
.collect(Collectors.groupingBy(KVBean::getK, Collectors.mapping(KVBean::getV, Collectors.toSet())));
collect.forEach(
(key,value)->{
try {
Files.write(Paths.get("J:\\" + key), value, StandardOpenOption.CREATE);
} catch (IOException e) {
e.printStackTrace();
}
}
);
}
@Test
public void testFilterTwoList(){
List<String> list1 = new ArrayList<>();
list1.add("1");
list1.add("2");
list1.add("3");
list1.add("5");
list1.add("6");
List<String> list2 = new ArrayList<>();
list2.add("2");
list2.add("3");
list2.add("7");
list2.add("8");
// 并集
List<String> listAll = list1.parallelStream().collect(toList());
List<String> listAll2 = list2.parallelStream().collect(toList());
listAll.addAll(listAll2);
/* System.out.println("---得到并集 listAll---");
listAll.parallelStream().forEach(System.out :: println);*/
// 去重并集
List<String> listAllDistinct = listAll.stream().distinct().collect(toList());
System.out.println("---得到去重并集 listAllDistinct---");
listAllDistinct.parallelStream().forEachOrdered(System.out :: println);
System.out.println("---原来的List1---");
list1.parallelStream().forEachOrdered(System.out :: println);
System.out.println("---原来的List2---");
list2.parallelStream().forEachOrdered(System.out :: println);
}
}

@ -0,0 +1,37 @@
package me.zhengjie;
import cn.hutool.json.JSON;
import cn.hutool.json.JSONArray;
import cn.hutool.json.JSONObject;
import cn.hutool.json.JSONUtil;
import me.zhengjie.modules.buildrecord.task.convert.BuildTaskQueryParamJsonConvert;
import org.assertj.core.util.Lists;
import org.junit.Test;
public class JsonTest {
@Test
public void testJsonPase(){
BuildTaskQueryParamJsonConvert convert = new BuildTaskQueryParamJsonConvert();
convert.setCityCode(Lists.newArrayList(1,2));
convert.setStuGrade(Lists.newArrayList(1,3));
convert.setRange(2);
convert.setLocalCode("5.0,5.0");
JSON parse = JSONUtil.parse(convert);
System.out.println(parse.toString());
String str = "{\"cityCode\":[1,2],\"range\":2,\"localCode\":\"5.0,5.0\",\"stuGrade\":[1,3]}";
JSONObject jsonObject1 = JSONUtil.parseObj(str);
System.out.println(jsonObject1);
BuildTaskQueryParamJsonConvert convert1 = JSONUtil.toBean(jsonObject1, BuildTaskQueryParamJsonConvert.class);
System.out.println(convert1);
// JSONArray objects = JSONUtil.parseArray(str);
// System.out.println(objects);
//
// JSONObject jsonObject = JSONUtil.parseObj(str);
// System.out.println(jsonObject);
// BuildTaskQueryParamJsonConvert convert1 = JSONUtil.toBean(jsonObject, BuildTaskQueryParamJsonConvert.class);
}
}

@ -0,0 +1,10 @@
package me.zhengjie;
import lombok.Data;
@Data
public class KVBean {
private String k;
private String v;
}

@ -1,5 +1,6 @@
package me.zhengjie;
import com.google.common.collect.Lists;
import lombok.extern.slf4j.Slf4j;
import me.zhengjie.modules.abmessage.domain.AbMessage;
import me.zhengjie.modules.abmessage.service.AbMessageService;
@ -10,18 +11,27 @@ import me.zhengjie.modules.mnt.service.dto.AppQueryCriteria;
import me.zhengjie.modules.student.domain.Student;
import me.zhengjie.modules.student.repository.StudentRepository;
import me.zhengjie.modules.student.service.StudentService;
import me.zhengjie.modules.tag.service.TagService;
import me.zhengjie.utils.FileUtil;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.data.domain.*;
import org.springframework.jdbc.core.BatchPreparedStatementSetter;
import org.springframework.jdbc.core.JdbcTemplate;
import org.springframework.test.context.junit4.SpringRunner;
import javax.persistence.EntityManager;
import javax.persistence.PersistenceContext;
import javax.persistence.Query;
import java.nio.file.Files;
import java.nio.file.Paths;
import java.nio.file.StandardOpenOption;
import java.sql.PreparedStatement;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
import java.util.concurrent.atomic.AtomicInteger;
@ -46,6 +56,9 @@ public class SpringJPATest {
@Autowired
private EduRepository eduRepository;
@Autowired
private TagService tagService;
@Test
public void testQueryBySlice() throws Exception{
@ -181,14 +194,99 @@ public class SpringJPATest {
// allMatchDataBy3.forEach(System.out::println);
Pageable pageable = PageRequest.of(0, 2);
Page<String> page = eduRepository.findAllMatchDataByPage(city, stuGrade, minX, maxX, minY, maxY, pageable);
// Page<String> page = eduRepository.findAllMatchDataByPage(city, stuGrade, minX, maxX, minY, maxY, pageable);
System.out.println("查询的总页数" + page.getTotalPages());
/* System.out.println("查询的总页数" + page.getTotalPages());
System.out.println("查询的总记录数" + page.getTotalElements());
System.out.println("查询的当前第几页" + page.getNumber() + 1);
System.out.println("查询当前页面的集合" + page.getContent());
System.out.println("查询当前页面的记录数" + page.getNumberOfElements());
System.out.println("查询当前页面的记录数" + page.getNumberOfElements());*/
}
@PersistenceContext
private EntityManager entityManager;
/**
*
* 1 使 EntityManager
*/
@Test
public void testBatchInsert(){
// 构建基础的insert体
StringBuilder sb = new StringBuilder();
sb.append("INSERT INTO t_student(name, age) VALUES ");
// 准备要发送的任务
List<Student> students = new ArrayList<>();
for (int i = 0; i < 1_0_000; i++) {
Student student = new Student();
student.setAge(i);
student.setName("test" + i);
students.add(student);
}
// 拼接values
for(Student student : students) {
sb.append("(?, ?),");
}
String sql = sb.toString().substring(0, sb.length() - 1);
Query query = entityManager.createNativeQuery(sql);
// 拼接SQL
int paramIndex = 1;
for(Student student : students) {
query.setParameter(paramIndex++, student.getName());
query.setParameter(paramIndex++, student.getAge());
}
// 执行
long start = System.currentTimeMillis();
System.out.println("=============== start , time is , "+ start);
query.executeUpdate();
System.out.println("=============== end , cost time is , "+ (System.currentTimeMillis() - start));
}
@Autowired
private JdbcTemplate jdbcTemplate;
@Test
public void testBatchInsert1(){
// 准备要发送的任务
List<Student> students = new ArrayList<>();
for (int i = 0; i < 1_000_000; i++) {
Student student = new Student();
student.setAge(i);
student.setName("test" + i);
students.add(student);
}
// 执行
long start = System.currentTimeMillis();
System.out.println("=============== start , time is , "+ start);
int[] ints = jdbcTemplate.batchUpdate("INSERT INTO t_student(name, age) VALUES (?,?)", new BatchPreparedStatementSetter() {
@Override
public void setValues(PreparedStatement preparedStatement, int i) throws SQLException {
preparedStatement.setString(1, students.get(i).getName());
preparedStatement.setInt(2, students.get(i).getAge());
}
@Override
public int getBatchSize() {
return students.size();
}
});
System.out.println("=============== end , cost time is , "+ (System.currentTimeMillis() - start) +","+ints.length);
}
/**
*
*/
@Test
public void testQueryAndBatchInsert(){
List<Integer> cityCode = Arrays.asList(1101, 3101, 4401, 4403, 5101, 3301, 5001, 4201, 6101, 3205);
List<Integer> stuGrade = Arrays.asList(14, 13, 12, 11, 9, 10);
Integer integer = tagService.queryAndBatchInsertData(26, cityCode, stuGrade, null);
System.out.println(integer);
}
}

@ -1,7 +1,10 @@
package me.zhengjie;
import cn.hutool.core.collection.CollectionUtil;
import cn.hutool.core.date.DateTime;
import com.alibaba.fastjson.JSON;
import me.zhengjie.modules.abmessage.domain.AbMessage;
import me.zhengjie.utils.DateUtil;
import me.zhengjie.utils.FileUtil;
import org.junit.Test;
@ -10,7 +13,10 @@ import java.nio.file.Files;
import java.nio.file.Paths;
import java.nio.file.StandardOpenOption;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.Set;
import java.util.stream.Collectors;
/**
*
@ -56,4 +62,68 @@ public class TempTest {
System.out.println(JSON.parseObject(json, AbMessage.class));
}
@Test
public void testList(){
List<Integer> integers = Arrays.asList(1, 2, 3);
System.out.println(integers.get(0));
}
/**
*
*/
@Test
public void testList1(){
List<Integer> list = Arrays.asList(1, 2, 3, 4);
List<Integer> list1 = Arrays.asList(3, 4, 5);
Set<Integer> collect = list.stream().filter(list1::contains).collect(Collectors.toSet());
Set<Integer> collect1 = list.stream().filter(item->!collect.contains(item)).collect(Collectors.toSet());
collect1.forEach(System.out::println);
}
/**
* Collections.shuffle(list);
*/
@Test
public void testList2(){
List<Integer> list = Arrays.asList(1, 2, 3, 4, 5, 6, 7, 8, 9);
for (int i = 0; i < 10; i++) {
Collections.shuffle(list);
list.forEach(System.out::print);
System.out.println();
}
}
@Test
public void testTimeUtil(){
long time = 1148255 / 1000;
System.out.println(secondToTime(time));
}
/**
*
* @param second
* @return
*/
private String secondToTime(long second){
long days = second / 86400; //转换天数
second = second % 86400; //剩余秒数
long hours = second / 3600; //转换小时
second = second % 3600; //剩余秒数
long minutes = second /60; //转换分钟
second = second % 60; //剩余秒数
if(days>0){
return days + "天" + hours + "小时" + minutes + "分" + second + "秒";
} else if (hours > 0 ){
return hours + "小时" + minutes + "分" + second + "秒";
} else if (minutes > 0){
return minutes + "分" + second + "秒";
}else if (second > 0){
return second + "秒";
}else {
return "";
}
}
}

@ -125,7 +125,11 @@
<artifactId>swagger-models</artifactId>
<version>1.5.21</version>
</dependency>
<dependency>
<groupId>org.apache.shardingSphere</groupId>
<artifactId>sharding-jdbc-spring-boot-starter</artifactId>
<version>4.0.0-RC1</version>
</dependency>
<!--Mysql依赖包-->
<dependency>
<groupId>mysql</groupId>
@ -134,10 +138,15 @@
</dependency>
<!-- druid数据源驱动 -->
<dependency>
<!-- <dependency>
<groupId>com.alibaba</groupId>
<artifactId>druid-spring-boot-starter</artifactId>
<version>${druid.version}</version>
</dependency>-->
<dependency>
<groupId>com.alibaba</groupId>
<artifactId>druid</artifactId>
<version>1.1.22</version>
</dependency>
<dependency>
<groupId>org.lionsoul</groupId>

Loading…
Cancel
Save