Commit 2806c3a3 by huluobin

update

parent 1c15608d
...@@ -2,7 +2,7 @@ package com.bailuntec.domain.enumerate; ...@@ -2,7 +2,7 @@ package com.bailuntec.domain.enumerate;
public enum AccountPlatformType { public enum AccountPlatformType {
Amazon(15,"AMAZON"), Amazon(15, "AMAZON"),
Wish(8, "WISH"), Wish(8, "WISH"),
Cdiscount(10, "CDISCOUNT"), Cdiscount(10, "CDISCOUNT"),
Ebay(12, "EBAY"), Ebay(12, "EBAY"),
...@@ -30,4 +30,10 @@ public enum AccountPlatformType { ...@@ -30,4 +30,10 @@ public enum AccountPlatformType {
public int code() { public int code() {
return code; return code;
} }
public static class Shopify {
public static Integer code() {
return 1;
}
}
} }
...@@ -56,6 +56,15 @@ public interface DcAutoUprushConfigMapper { ...@@ -56,6 +56,15 @@ public interface DcAutoUprushConfigMapper {
*/ */
DcAutoUprushConfig selectOneByExample(DcAutoUprushConfigExample example); DcAutoUprushConfig selectOneByExample(DcAutoUprushConfigExample example);
/**
* <p>
* 不知道什么鸡巴
* </p>
*
* @param warehouseCode 仓库编码
* @param hqType 环球仓库类型
* @return 销量突增检测
*/
DcAutoUprushConfig selectOneByWarehouse(@Param("warehouseCode") String warehouseCode, @Param("hqType") String hqType); DcAutoUprushConfig selectOneByWarehouse(@Param("warehouseCode") String warehouseCode, @Param("hqType") String hqType);
/** /**
...@@ -123,4 +132,4 @@ public interface DcAutoUprushConfigMapper { ...@@ -123,4 +132,4 @@ public interface DcAutoUprushConfigMapper {
* @project https://github.com/itfsw/mybatis-generator-plugin * @project https://github.com/itfsw/mybatis-generator-plugin
*/ */
int upsertSelective(DcAutoUprushConfig record); int upsertSelective(DcAutoUprushConfig record);
} }
\ No newline at end of file
...@@ -2,8 +2,10 @@ package com.bailuntec.mapper; ...@@ -2,8 +2,10 @@ package com.bailuntec.mapper;
import com.bailuntec.domain.entity.DcBaseStock; import com.bailuntec.domain.entity.DcBaseStock;
import com.bailuntec.domain.example.DcBaseStockExample; import com.bailuntec.domain.example.DcBaseStockExample;
import com.dangdang.ddframe.job.api.ShardingContext;
import org.apache.ibatis.annotations.Param; import org.apache.ibatis.annotations.Param;
import java.time.LocalDateTime;
import java.util.List; import java.util.List;
public interface DcBaseStockMapper { public interface DcBaseStockMapper {
...@@ -130,23 +132,78 @@ public interface DcBaseStockMapper { ...@@ -130,23 +132,78 @@ public interface DcBaseStockMapper {
void insertDailyStock(); void insertDailyStock();
List<DcBaseStock> listStockById(@Param("v1") int v1, @Param("v2") int v2); List<DcBaseStock> listStockById(ShardingContext shardingContext, @Param("v1") int v1, @Param("v2") int v2);
List<DcBaseStock> listStockWarehouseById(@Param("v1") int v1, @Param("v2") int v2, @Param("warehouseCode") String warehouseCode); List<DcBaseStock> listStockWarehouseById(@Param("v1") int v1, @Param("v2") int v2, @Param("warehouseCode") String warehouseCode);
List<DcBaseStock> listParameterStockById(@Param("jobParameter")String jobParameter,@Param("v1") int v1, @Param("v2") int v2); List<DcBaseStock> listParameterStockById(@Param("shardingContext") ShardingContext shardingContext, @Param("v1") int v1, @Param("v2") int v2);
List<DcBaseStock> listAllStock(@Param("v1") int v1, @Param("v2") int v2); List<DcBaseStock> listAllStock(@Param("shardingContext") ShardingContext shardingContext, @Param("v1") int v1, @Param("v2") int v2);
List<DcBaseStock> listWarehouseStockById(@Param("warehouseCode")String warehouseCode,@Param("v1") int v1, @Param("v2") int v2); List<DcBaseStock> listWarehouseStockById(@Param("shardingContext") ShardingContext shardingContext, @Param("warehouseCode") String warehouseCode, @Param("v1") int v1, @Param("v2") int v2);
List<DcBaseStock> listInStockNotInAutoTurnover(); List<DcBaseStock> listInStockNotInAutoTurnover();
long countByParam(@Param("jobParameter") String jobParameter); // long countByParam(@Param("jobParameter") String jobParameter);
//
// long countAll();
//
// long countByWarehouseCode(@Param("warehouseCode") String warehouseCode);
long countAll(); void updateSkuUnitPrice();
long countByWarehouseCode(@Param("warehouseCode")String warehouseCode); // /**
// * <p>
// * 自动周转列表普通同步数量
// * </p>
// *
// * @return count
// */
// long countNormal();
void updateSkuUnitPrice(); /**
} * <p>
\ No newline at end of file * 自动周转列表普通同步库存列表
* </p>
*
* @param shardingContext shardingContext
* @param pageStart pageStart
* @param pageOffset pageOffset
* @return 库存列表
*/
List<DcBaseStock> listNormal(@Param("shardingContext") ShardingContext shardingContext, @Param("pageStart") int pageStart, @Param("pageOffset") int pageOffset);
long countByParam(@Param("shardingContext") ShardingContext shardingContext);
long countAll(@Param("shardingContext") ShardingContext shardingContext);
long countNormal(@Param("shardingContext") ShardingContext shardingContext);
long countByWarehouseCode(@Param("shardingContext") ShardingContext shardingContext);
/**
* <p>
* 补充晚上定时任务的数据 count
* </p>
*
* @param shardingContext shardingContext
* @param v1 当天凌晨三点
* @param v2 当天往前一周的零点
* @return count
*/
long countIncrement(@Param("shardingContext") ShardingContext shardingContext, @Param("v1") LocalDateTime v1, @Param("v2") LocalDateTime v2);
/**
* <p>
* 补充晚上定时任务的数据
* </p>
*
* @param shardingContext shardingContext
* @param v1 当天凌晨三点
* @param v2 当天往前一周的零点
* @param pageStart pageStart
* @param pageOffset pageOffset
* @return 补充晚上定时任务的数据
*/
List<DcBaseStock> increment(@Param("shardingContext") ShardingContext shardingContext, @Param("v1") LocalDateTime v1, @Param("v2") LocalDateTime v2, @Param("pageStart") int pageStart, @Param("pageOffset") int pageOffset);
}
This source diff could not be displayed because it is too large. You can view the blob instead.
...@@ -11,7 +11,8 @@ import lombok.extern.slf4j.Slf4j; ...@@ -11,7 +11,8 @@ import lombok.extern.slf4j.Slf4j;
import java.time.LocalDateTime; import java.time.LocalDateTime;
@Slf4j @Slf4j
public class PointJob implements SimpleJob { public abstract class PointJob implements SimpleJob {
@Override @Override
public void execute(ShardingContext shardingContext) { public void execute(ShardingContext shardingContext) {
JobPointLog jobPointLog = jobInit(shardingContext); JobPointLog jobPointLog = jobInit(shardingContext);
...@@ -38,6 +39,7 @@ public class PointJob implements SimpleJob { ...@@ -38,6 +39,7 @@ public class PointJob implements SimpleJob {
JobPointLog jobPointLog = null; JobPointLog jobPointLog = null;
try { try {
JobPointLogMapper mapper = SessionUtil.getSession().getMapper(JobPointLogMapper.class); JobPointLogMapper mapper = SessionUtil.getSession().getMapper(JobPointLogMapper.class);
jobPointLog = mapper.selectOneByExample(JobPointLogExample.newAndCreateCriteria().andJobNameEqualTo(shardingContext.getJobName()).example()); jobPointLog = mapper.selectOneByExample(JobPointLogExample.newAndCreateCriteria().andJobNameEqualTo(shardingContext.getJobName()).example());
if (jobPointLog == null) { if (jobPointLog == null) {
jobPointLog = new JobPointLog(shardingContext.getJobName(), 1, 100, 0, 1, LocalDateTime.now().minusDays(1), LocalDateTime.now()); jobPointLog = new JobPointLog(shardingContext.getJobName(), 1, 100, 0, 1, LocalDateTime.now().minusDays(1), LocalDateTime.now());
...@@ -51,8 +53,7 @@ public class PointJob implements SimpleJob { ...@@ -51,8 +53,7 @@ public class PointJob implements SimpleJob {
return jobPointLog; return jobPointLog;
} }
public void executeJob(ShardingContext shardingContext, JobPointLog jobPointLog) { public abstract void executeJob(ShardingContext shardingContext, JobPointLog jobPointLog);
}
} }
package com.bailuntec.mapper;
import com.bailuntec.utils.SessionUtil;
import com.dangdang.ddframe.job.api.ShardingContext;
import com.dangdang.ddframe.job.executor.ShardingContexts;
import org.junit.jupiter.api.Test;
import java.time.LocalDate;
import java.time.LocalDateTime;
import java.time.LocalTime;
import java.util.HashMap;
/**
* <p>
*
* </p>
*
* @author robbendev
* @since 2020/8/11 5:00 下午
*/
class DcBaseStockMapperTest {
@Test
void countIncrement() {
}
@Test
void increment() {
DcBaseStockMapper dcBaseStockMapper = SessionUtil.getSession().getMapper(DcBaseStockMapper.class);
ShardingContexts shardingContexts = new ShardingContexts("1", "jobName", 8, "INCREMENT", new HashMap<>(), 1);
LocalDateTime v1 = LocalDateTime.of(LocalDate.now(), LocalTime.MIN).plusHours(3);
LocalDateTime v2 = LocalDateTime.of(LocalDate.now(), LocalTime.MIN).minusDays(7);
dcBaseStockMapper.increment(new ShardingContext(shardingContexts, 1), v1, v2, 0, 100);
}
}
package com.bailuntec; package com.bailuntec;
import com.alibaba.druid.pool.DruidDataSource; import com.alibaba.druid.pool.DruidDataSource;
import com.bailuntec.listener.AutoTurnoverJobListener;
import com.bailuntec.job.AutoTurnoverJob; import com.bailuntec.job.AutoTurnoverJob;
import com.bailuntec.listener.AutoTurnoverJobListener;
import com.bailuntec.utils.PropertiesUtil; import com.bailuntec.utils.PropertiesUtil;
import com.dangdang.ddframe.job.config.JobCoreConfiguration; import com.dangdang.ddframe.job.config.JobCoreConfiguration;
import com.dangdang.ddframe.job.config.simple.SimpleJobConfiguration; import com.dangdang.ddframe.job.config.simple.SimpleJobConfiguration;
...@@ -24,33 +24,42 @@ public class Application { ...@@ -24,33 +24,42 @@ public class Application {
private static final String EVENT_RDB_STORAGE_URL = propertiesUtil.getPropertyAsString("EVENT_RDB_STORAGE_URL"); private static final String EVENT_RDB_STORAGE_URL = propertiesUtil.getPropertyAsString("EVENT_RDB_STORAGE_URL");
private static final String EVENT_RDB_STORAGE_USERNAME = propertiesUtil.getPropertyAsString("EVENT_RDB_STORAGE_USERNAME"); private static final String EVENT_RDB_STORAGE_USERNAME = propertiesUtil.getPropertyAsString("EVENT_RDB_STORAGE_USERNAME");
private static final String EVENT_RDB_STORAGE_PASSWORD = propertiesUtil.getPropertyAsString("EVENT_RDB_STORAGE_PASSWORD"); private static final String EVENT_RDB_STORAGE_PASSWORD = propertiesUtil.getPropertyAsString("EVENT_RDB_STORAGE_PASSWORD");
public static void main(String[] args) { public static void main(String[] args) {
new JobScheduler(createRegistryCenter(), createJobConfiguration(),createJobEventConfiguration(), new AutoTurnoverJobListener()).init(); new JobScheduler(createRegistryCenter(), createJobConfigurationNormal(), createJobEventConfiguration(), new AutoTurnoverJobListener()).init();
new JobScheduler(createRegistryCenter(), createJobConfigurationParam(),createJobEventConfiguration(), new AutoTurnoverJobListener()).init(); new JobScheduler(createRegistryCenter(), createJobConfigurationIncrement(), createJobEventConfiguration(), new AutoTurnoverJobListener()).init();
} }
private static CoordinatorRegistryCenter createRegistryCenter() { private static CoordinatorRegistryCenter createRegistryCenter() {
CoordinatorRegistryCenter regCenter = new ZookeeperRegistryCenter(new ZookeeperConfiguration(propertiesUtil.getPropertyAsString("ZOOKEEPER_SERVER"), propertiesUtil.getPropertyAsString("NAME_SPACE"))); CoordinatorRegistryCenter regCenter = new ZookeeperRegistryCenter(new ZookeeperConfiguration(propertiesUtil.getPropertyAsString("ZOOKEEPER_SERVER"), propertiesUtil.getPropertyAsString("NAME_SPACE")));
regCenter.init(); regCenter.init();
return regCenter; return regCenter;
} }
private static LiteJobConfiguration createJobConfiguration() { private static LiteJobConfiguration createJobConfigurationNormal() {
JobCoreConfiguration simpleCoreConfig = JobCoreConfiguration.newBuilder(propertiesUtil.getPropertyAsString("JOB_NAME"), propertiesUtil.getPropertyAsString("JOB_CRON"), propertiesUtil.getPropertyAsInt("SHARDING_TOTAL_COUNT")).build(); JobCoreConfiguration simpleCoreConfig = JobCoreConfiguration.newBuilder(
propertiesUtil.getPropertyAsString("JOB_NAME"),
propertiesUtil.getPropertyAsString("JOB_CRON"),
propertiesUtil.getPropertyAsInt("SHARDING_TOTAL_COUNT"))
.jobParameter("NORMAL")
.build();
SimpleJobConfiguration simpleJobConfig = new SimpleJobConfiguration(simpleCoreConfig, AutoTurnoverJob.class.getCanonicalName()); SimpleJobConfiguration simpleJobConfig = new SimpleJobConfiguration(simpleCoreConfig, AutoTurnoverJob.class.getCanonicalName());
LiteJobConfiguration simpleJobRootConfig = LiteJobConfiguration.newBuilder(simpleJobConfig).build(); return LiteJobConfiguration.newBuilder(simpleJobConfig).build();
return simpleJobRootConfig;
} }
private static LiteJobConfiguration createJobConfigurationParam() {
JobCoreConfiguration simpleCoreConfig = JobCoreConfiguration.newBuilder(propertiesUtil.getPropertyAsString("JOB_NAME_PARAM"), propertiesUtil.getPropertyAsString("JOB_CRON_PARAM"), propertiesUtil.getPropertyAsInt("SHARDING_TOTAL_COUNT_PARAM")).build(); private static LiteJobConfiguration createJobConfigurationIncrement() {
JobCoreConfiguration simpleCoreConfig = JobCoreConfiguration.newBuilder(
propertiesUtil.getPropertyAsString("JOB_NAME_INCREMENT"),
propertiesUtil.getPropertyAsString("JOB_CRON_INCREMENT"),
propertiesUtil.getPropertyAsInt("SHARDING_TOTAL_COUNT_INCREMENT"))
.jobParameter("INCREMENT")
.build();
SimpleJobConfiguration simpleJobConfig = new SimpleJobConfiguration(simpleCoreConfig, AutoTurnoverJob.class.getCanonicalName()); SimpleJobConfiguration simpleJobConfig = new SimpleJobConfiguration(simpleCoreConfig, AutoTurnoverJob.class.getCanonicalName());
LiteJobConfiguration simpleJobRootConfig = LiteJobConfiguration.newBuilder(simpleJobConfig).build(); return LiteJobConfiguration.newBuilder(simpleJobConfig).build();
return simpleJobRootConfig;
} }
private static JobEventConfiguration createJobEventConfiguration() { private static JobEventConfiguration createJobEventConfiguration() {
JobEventConfiguration jobEventRdbConfig = new JobEventRdbConfiguration(setUpEventTraceDataSource()); return new JobEventRdbConfiguration(setUpEventTraceDataSource());
return jobEventRdbConfig;
} }
private static DataSource setUpEventTraceDataSource() { private static DataSource setUpEventTraceDataSource() {
......
...@@ -7,7 +7,6 @@ import com.bailuntec.domain.example.DcAutoExceptionExample; ...@@ -7,7 +7,6 @@ import com.bailuntec.domain.example.DcAutoExceptionExample;
import com.bailuntec.domain.example.DcBaseStockExample; import com.bailuntec.domain.example.DcBaseStockExample;
import com.bailuntec.job.AutoTurnoverJob; import com.bailuntec.job.AutoTurnoverJob;
import com.bailuntec.mapper.DcAutoExceptionMapper; import com.bailuntec.mapper.DcAutoExceptionMapper;
import com.bailuntec.mapper.DcAutoTurnoverMapper;
import com.bailuntec.mapper.DcBaseStockMapper; import com.bailuntec.mapper.DcBaseStockMapper;
import com.bailuntec.utils.SessionUtil; import com.bailuntec.utils.SessionUtil;
import com.dangdang.ddframe.job.executor.ShardingContexts; import com.dangdang.ddframe.job.executor.ShardingContexts;
...@@ -23,22 +22,7 @@ import java.util.List; ...@@ -23,22 +22,7 @@ import java.util.List;
public class AutoTurnoverJobListener implements ElasticJobListener { public class AutoTurnoverJobListener implements ElasticJobListener {
@Override @Override
public void beforeJobExecuted(ShardingContexts shardingContexts) { public void beforeJobExecuted(ShardingContexts shardingContexts) {
try {
DcAutoTurnoverMapper autoTurnoverMapper = SessionUtil.getSession().getMapper(DcAutoTurnoverMapper.class);
if ( shardingContexts.getJobName().equals("show-auto-turnover")) {
autoTurnoverMapper.truncateAutoInboundTable();
autoTurnoverMapper.truncateAutoInventoryTable();
autoTurnoverMapper.truncateAutoShortSupplyTable();
autoTurnoverMapper.truncateAutoTurnoverTable();
autoTurnoverMapper.truncateAutoSalesTable();
autoTurnoverMapper.truncateAutoExceptionTable();
}
} catch (Exception e) {
e.printStackTrace();
throw new RuntimeException("Mybatis操作DB失败", e);
} finally {
SessionUtil.closeSession();
}
} }
@Override @Override
......
#EVENT_RDB_STORAGE_DRIVER=com.mysql.jdbc.Driver EVENT_RDB_STORAGE_DRIVER=com.mysql.jdbc.Driver
#EVENT_RDB_STORAGE_URL=jdbc:mysql://gz-cdb-kp7s5i79.sql.tencentcdb.com:61691/bailun_datacenter?serverTimezone=GMT%2B8&characterEncoding=utf-8 EVENT_RDB_STORAGE_URL=jdbc:mysql://gz-cdb-kp7s5i79.sql.tencentcdb.com:61691/bailun_datacenter?serverTimezone=GMT%2B8&characterEncoding=utf-8
#EVENT_RDB_STORAGE_USERNAME=root #EVENT_RDB_STORAGE_USERNAME=root
#EVENT_RDB_STORAGE_PASSWORD=123456 #EVENT_RDB_STORAGE_PASSWORD=123456
#ZOOKEEPER_SERVER=127.0.0.1:2181 #ZOOKEEPER_SERVER=127.0.0.1:2181
EVENT_RDB_STORAGE_DRIVER=com.mysql.jdbc.Driver #EVENT_RDB_STORAGE_DRIVER=com.mysql.jdbc.Driver
EVENT_RDB_STORAGE_URL=jdbc:mysql://10.0.8.15:3306/bailun_datacenter?useUnicode=true&characterEncoding=utf-8&serverTimezone=GMT%2B8 #EVENT_RDB_STORAGE_URL=jdbc:mysql://10.0.8.15:3306/bailun_datacenter?useUnicode=true&characterEncoding=utf-8&serverTimezone=GMT%2B8
EVENT_RDB_STORAGE_USERNAME=root EVENT_RDB_STORAGE_USERNAME=root
EVENT_RDB_STORAGE_PASSWORD=#7kfnymAM$Y9-Ntf EVENT_RDB_STORAGE_PASSWORD=#7kfnymAM$Y9-Ntf
ZOOKEEPER_SERVER=172.31.255.120:2181 ZOOKEEPER_SERVER=172.31.255.120:2181
NAME_SPACE=data-center NAME_SPACE=data-center
JOB_NAME=show-auto-turnover JOB_NAME=show-auto-turnover
JOB_CRON=0 30 0 * * ? * JOB_CRON=0 0 3 * * ?
SHARDING_TOTAL_COUNT=8 SHARDING_TOTAL_COUNT=8
JOB_NAME_PARAM=show-auto-turnover-param JOB_NAME_PARAM=show-auto-turnover-param
JOB_CRON_PARAM=0 30 0 * * ? 2099 JOB_CRON_PARAM=0 0 14 * * ?
SHARDING_TOTAL_COUNT_PARAM=8 SHARDING_TOTAL_COUNT_PARAM=8
\ No newline at end of file
JOB_NAME_INCREMENT=show-auto-turnover-param
JOB_CRON_INCREMENT=0 0 14 * * ?
SHARDING_TOTAL_COUNT_INCREMENT=8
#log4j.rootLogger=DEBUG,console,dailyFile,im
#log4j.additivity.org.apache=true
## 控制台(console)
#log4j.appender.console=org.apache.log4j.ConsoleAppender
#log4j.appender.console.Threshold=DEBUG
#log4j.appender.console.ImmediateFlush=true
#log4j.appender.console.Target=System.err
#log4j.appender.console.layout=org.apache.log4j.PatternLayout
#log4j.appender.console.layout.ConversionPattern=[%-5p] %d(%r) --> [%t] %l: %m %x %n
## 日志文件(logFile)
#log4j.appender.logFile=org.apache.log4j.FileAppender
#log4j.appender.logFile.Threshold=DEBUG
#log4j.appender.logFile.ImmediateFlush=true
#log4j.appender.logFile.Append=true
#log4j.appender.logFile.File=/logs/log.log4j
#log4j.appender.logFile.layout=org.apache.log4j.PatternLayout
#log4j.appender.logFile.layout.ConversionPattern=[%-5p] %d(%r) --> [%t] %l: %m %x %n
## 回滚文件(rollingFile)
#log4j.appender.rollingFile=org.apache.log4j.RollingFileAppender
#log4j.appender.rollingFile.Threshold=DEBUG
#log4j.appender.rollingFile.ImmediateFlush=true
#log4j.appender.rollingFile.Append=true
#log4j.appender.rollingFile.File=/logs/log.log4j
#log4j.appender.rollingFile.MaxFileSize=200KB
#log4j.appender.rollingFile.MaxBackupIndex=50
#log4j.appender.rollingFile.layout=org.apache.log4j.PatternLayout
#log4j.appender.rollingFile.layout.ConversionPattern=[%-5p] %d(%r) --> [%t] %l: %m %x %n
## 定期回滚日志文件(dailyFile)
#log4j.appender.dailyFile=org.apache.log4j.DailyRollingFileAppender
#log4j.appender.dailyFile.Threshold=DEBUG
#log4j.appender.dailyFile.ImmediateFlush=true
#log4j.appender.dailyFile.Append=true
#log4j.appender.dailyFile.File=/logs/log.log4j
#log4j.appender.dailyFile.DatePattern='.'yyyy-MM-dd
#log4j.appender.dailyFile.layout=org.apache.log4j.PatternLayout
#log4j.appender.dailyFile.layout.ConversionPattern=[%-5p] %d(%r) --> [%t] %l: %m %x %n
## 应用于socket
#log4j.appender.socket=org.apache.log4j.RollingFileAppender
#log4j.appender.socket.RemoteHost=localhost
#log4j.appender.socket.Port=5001
#log4j.appender.socket.LocationInfo=true
## Set up for Log Factor 5
#log4j.appender.socket.layout=org.apache.log4j.PatternLayout
#log4j.appender.socket.layout.ConversionPattern=[%-5p] %d(%r) --> [%t] %l: %m %x %n
## Log Factor 5 Appender
#log4j.appender.LF5_APPENDER=org.apache.log4j.lf5.LF5Appender
#log4j.appender.LF5_APPENDER.MaxNumberOfRecords=2000
## 发送日志到指定邮件
#log4j.appender.mail=org.apache.log4j.net.SMTPAppender
#log4j.appender.mail.Threshold=FATAL
#log4j.appender.mail.BufferSize=10
#log4j.appender.mail.From = xxx@mail.com
#log4j.appender.mail.SMTPHost=mail.com
#log4j.appender.mail.Subject=Log4J Message
#log4j.appender.mail.To= xxx@mail.com
#log4j.appender.mail.layout=org.apache.log4j.PatternLayout
#log4j.appender.mail.layout.ConversionPattern=[%-5p] %d(%r) --> [%t] %l: %m %x %n
## 应用于数据库
#log4j.appender.database=org.apache.log4j.jdbc.JDBCAppender
#log4j.appender.database.URL=jdbc:mysql://localhost:3306/test
#log4j.appender.database.driver=com.mysql.jdbc.Driver
#log4j.appender.database.user=root
#log4j.appender.database.password=
#log4j.appender.database.sql=INSERT INTO LOG4J (Message) VALUES('=[%-5p] %d(%r) --> [%t] %l: %m %x %n')
#log4j.appender.database.layout=org.apache.log4j.PatternLayout
#log4j.appender.database.layout.ConversionPattern=[%-5p] %d(%r) --> [%t] %l: %m %x %n
#
## 自定义Appender
#log4j.appender.im = net.cybercorlin.util.logger.appender.IMAppender
#log4j.appender.im.host = mail.cybercorlin.net
#log4j.appender.im.username = username
#log4j.appender.im.password = password
#log4j.appender.im.recipient = corlin@cybercorlin.net
#log4j.appender.im.layout=org.apache.log4j.PatternLayout
#log4j.appender.im.layout.ConversionPattern=[%-5p] %d(%r) --> [%t] %l: %m %x %n
...@@ -98,8 +98,8 @@ public class AutoTurnoverTest { ...@@ -98,8 +98,8 @@ public class AutoTurnoverTest {
AutoTurnoverJob autoTurnoverJob = new AutoTurnoverJob(); AutoTurnoverJob autoTurnoverJob = new AutoTurnoverJob();
HashMap<Integer, String> map = new HashMap<>(); HashMap<Integer, String> map = new HashMap<>();
map.put(1,""); map.put(1,"");
ShardingContext shardingContext = new ShardingContext(new ShardingContexts("","",4,"", map), 3); ShardingContext shardingContext = new ShardingContext(new ShardingContexts("","",8,"NORMAL", map), 5);
JobPointLog jobPointLog = new JobPointLog("", 1, 2000, 1, 1, LocalDateTime.now().minusSeconds(1), LocalDateTime.now()); JobPointLog jobPointLog = new JobPointLog("", 1, 400, 1, 1, LocalDateTime.now().minusSeconds(1), LocalDateTime.now());
autoTurnoverJob.executeJob(shardingContext, jobPointLog); autoTurnoverJob.executeJob(shardingContext, jobPointLog);
} }
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment