Commit 9e238f3c by huluobin

亚马逊广告

parent 8301711b
......@@ -47,6 +47,7 @@ public class AmazonAdDownloadReportJob implements SimpleJob {
JobAmazonAdLogMapper jobAmazonAdLogMapper = sqlSession.getMapper(JobAmazonAdLogMapper.class);
DcBaseCompanyAccountMapper dcBaseCompanyAccountMapper = sqlSession.getMapper(DcBaseCompanyAccountMapper.class);
DcBaseFinanceAmazonAdProductMapper dcBaseFinanceAmazonAdProductMapper = sqlSession.getMapper(DcBaseFinanceAmazonAdProductMapper.class);
//获取15分钟前未下载的报告 注:省略报告状态判断 直接根据时间
List<JobAmazonAdLog> jobAmazonAdLogList = jobAmazonAdLogMapper.selectUnDownload(shardingContext, LocalDateTime.now().minusMinutes(15));
......@@ -70,11 +71,12 @@ public class AmazonAdDownloadReportJob implements SimpleJob {
try (Response response = client.newCall(request).execute()) {
if (response.isSuccessful() && response.body() != null) {
log.info("开始解析账号Id为 {} 的Report----{}, 文件大小为:{} 报告时间:{}",
log.info("开始解析账号Id为 {} 报告时间:{} 的Report----{}, 文件大小为:{} ",
jobAmazonAdLog.getAccountId(),
jobAmazonAdLog.getReportDate(),
jobAmazonAdLog.getReportId(),
response.body().contentLength(),
jobAmazonAdLog.getReportDate());
response.body().contentLength()
);
try (GZIPInputStream gzipInputStream = new GZIPInputStream(response.body().byteStream())) {
Type type = new TypeToken<List<AmazonAdProduct>>() {
}.getType();
......@@ -110,6 +112,9 @@ public class AmazonAdDownloadReportJob implements SimpleJob {
}
}
}
jobAmazonAdLog.setStatus(true);
jobAmazonAdLogMapper.updateByPrimaryKeySelective(jobAmazonAdLog);
}
}
......
......@@ -43,70 +43,78 @@ public class AmazonAdGenerateReportIdJob implements SimpleJob {
.andAmazonAdAuthJsonNotEqualTo("")
.example());
dcBaseCompanyAccountList.forEach(dcBaseCompanyAccount -> {
try {
//
// LocalDateTime reportDate = LocalDateTime.now().minusDays(1).minusHours(12);
LocalDateTime reportDate = LocalDateTime.of(2020, 8, 25, 0, 0);
JSONObject jsonObject = new JSONObject();
jsonObject.put("reportDate", reportDate.format(DateTimeFormatter.ofPattern("yyyyMMdd")));
jsonObject.put("metrics", propertiesUtil.getPropertyAsString("METRICS"));
MediaType mediaType = MediaType.parse("application/json");
Response response;
AmazonAdAuth amazonAdAuth = JSON.parseObject(dcBaseCompanyAccount.getAmazonAdAuthJson(), AmazonAdAuth.class);
RequestBody body = RequestBody.create(mediaType, jsonObject.toJSONString());
Request request = new Request.Builder()
.url(switchSiteUrl(dcBaseCompanyAccount.getSiteEn(), null))
.post(body)
.addHeader("Authorization", amazonAdAuth.getAccessToken())
.addHeader("Amazon-Advertising-API-ClientId", amazonAdAuth.getClientId())
.addHeader("Amazon-Advertising-API-Scope", amazonAdAuth.getProfileId())
.addHeader("Content-Type", "application/json")
.build();
response = client.newCall(request).execute();
String string = response.body().string();
if (response.isSuccessful()) {
AmazonAdSuccessResult amazonAdSuccessResult = JSON.parseObject(string, AmazonAdSuccessResult.class);
JobAmazonAdLog jobAmazonAdLog = jobAmazonAdLogMapper.selectOneByExample(JobAmazonAdLogExample
.newAndCreateCriteria()
.andAccountIdEqualTo(dcBaseCompanyAccount.getAccountId())
.andReportDateEqualTo(reportDate.toLocalDate())
.example());
if (jobAmazonAdLog == null) {
jobAmazonAdLog = new JobAmazonAdLog(null,
dcBaseCompanyAccount.getAccountId(),
amazonAdSuccessResult.getReportId(),
reportDate.toLocalDate(),
false,
LocalDateTime.now(),
LocalDateTime.now(),
dcBaseCompanyAccount.getSiteEn(),
dcBaseCompanyAccount.getCompanyId());
}
jobAmazonAdLogMapper.upsert(jobAmazonAdLog);
log.info("生成报告成功,报告日志:{}", JSON.toJSONString(jobAmazonAdLog));
} else {
log.error(JSON.toJSONString(response));
throw new RuntimeException("生成报告失败");
}
} catch (Exception ex) {
log.error(ex.getMessage(), ex);
log.error("帐号id :{} 下载报告失败", dcBaseCompanyAccount.getAccountId());
}
});
dcBaseCompanyAccountList
// .stream()
// .filter(dcBaseCompanyAccount -> dcBaseCompanyAccount.getAccountId() == 671)
.forEach(dcBaseCompanyAccount -> {
try {
LocalDateTime reportDate = LocalDateTime.now().minusDays(1).minusHours(12);
// LocalDateTime reportDate = LocalDateTime.of(2020, 8, finalI, 0, 0);
JSONObject jsonObject = new JSONObject();
jsonObject.put("reportDate", reportDate.format(DateTimeFormatter.ofPattern("yyyyMMdd")));
jsonObject.put("metrics", propertiesUtil.getPropertyAsString("METRICS"));
MediaType mediaType = MediaType.parse("application/json");
Response response;
AmazonAdAuth amazonAdAuth = JSON.parseObject(dcBaseCompanyAccount.getAmazonAdAuthJson(), AmazonAdAuth.class);
RequestBody body = RequestBody.create(mediaType, jsonObject.toJSONString());
Request request = new Request.Builder()
.url(switchSiteUrl(dcBaseCompanyAccount.getSiteEn(), null))
.post(body)
.addHeader("Authorization", amazonAdAuth.getAccessToken())
.addHeader("Amazon-Advertising-API-ClientId", amazonAdAuth.getClientId())
.addHeader("Amazon-Advertising-API-Scope", amazonAdAuth.getProfileId())
.addHeader("Content-Type", "application/json")
.build();
response = client.newCall(request).execute();
String string = response.body().string();
if (response.isSuccessful()) {
AmazonAdSuccessResult amazonAdSuccessResult = JSON.parseObject(string, AmazonAdSuccessResult.class);
JobAmazonAdLog jobAmazonAdLog = jobAmazonAdLogMapper.selectOneByExample(JobAmazonAdLogExample
.newAndCreateCriteria()
.andAccountIdEqualTo(dcBaseCompanyAccount.getAccountId())
.andReportDateEqualTo(reportDate.toLocalDate())
.example());
if (jobAmazonAdLog == null) {
jobAmazonAdLog = new JobAmazonAdLog(null,
dcBaseCompanyAccount.getAccountId(),
amazonAdSuccessResult.getReportId(),
reportDate.toLocalDate(),
false,
LocalDateTime.now(),
LocalDateTime.now(),
dcBaseCompanyAccount.getSiteEn(),
dcBaseCompanyAccount.getCompanyId());
}
//如果已经生成了报告 是否设置为需要重新下载?//yes
else {
jobAmazonAdLog.setReportId(amazonAdSuccessResult.getReportId());
jobAmazonAdLog.setStatus(false);
jobAmazonAdLog.setBjModified(LocalDateTime.now());
}
jobAmazonAdLogMapper.upsert(jobAmazonAdLog);
log.info("生成报告成功,报告日志:{}", JSON.toJSONString(jobAmazonAdLog));
} else {
log.error(JSON.toJSONString(response));
throw new RuntimeException("生成报告失败");
}
} catch (Exception ex) {
log.error(ex.getMessage(), ex);
log.error("帐号id :{} 下载报告失败", dcBaseCompanyAccount.getAccountId());
}
});
}
}
protected static String switchSiteUrl(String siteEn, String param) {
......
......@@ -50,6 +50,7 @@ class AmazonAdDownloadReportJobTest {
GZIPInputStream inputStream = new GZIPInputStream(response.body().byteStream());
Type type = new TypeToken<List<AmazonAdProduct>>() {
}.getType();
List<AmazonAdProduct> amazonAdProductList = new Gson().fromJson(new InputStreamReader(inputStream), type);
log.warn("的Report解析完成");
}
......
......@@ -38,9 +38,13 @@ public class CompanyAccountSyncJob implements SimpleJob {
.build();
log.info("开始同步百伦帐号信息");
log.info("请求百伦pams api 开始");
try (Response response = client.newCall(request).execute();
SqlSession sqlSession = SessionUtil.getFactory().openSession(true)) {
log.info("请求百伦pams api 开始结束");
DcBaseCompanyAccountMapper dcBaseCompanyAccountMapper = sqlSession.getMapper(DcBaseCompanyAccountMapper.class);
String resultStr = response.body().string();
......@@ -57,6 +61,7 @@ public class CompanyAccountSyncJob implements SimpleJob {
if (dcBaseCompanyAccount == null) {
dcBaseCompanyAccount = new DcBaseCompanyAccount();
dcBaseCompanyAccount.setBjCreate(LocalDateTime.now());
}
BeanUtils.copyProperties(dcBaseCompanyAccount, companyAccountInfo);
dcBaseCompanyAccount.setBjModified(LocalDateTime.now());
......
......@@ -49,6 +49,9 @@ public class FbaStockJob extends PointJob {
Integer totalPage = null;
HashMap<String, DcBaseStockFba> hashmap = new HashMap<>();
do {
log.info("总共{}页", totalPage);
log.info("当前{}页", jobPointLog.getPageIndex());
OkHttpClient client = OkHttpUtil.getInstance();
MediaType mediaType = MediaType.parse("application/json");
LinkedHashMap<String, Object> map = new LinkedHashMap<>(3);
......
......@@ -2,6 +2,7 @@ package com.bailuntec.job;
import com.bailuntec.domain.entity.JobPointLog;
import com.dangdang.ddframe.job.api.ShardingContext;
import com.dangdang.ddframe.job.api.simple.SimpleJob;
import com.dangdang.ddframe.job.executor.ShardingContexts;
import org.junit.jupiter.api.Test;
......@@ -18,14 +19,18 @@ import java.util.HashMap;
class FbaStockJobTest {
@Test
void executeJob() {
void execute() {
FbaStockJob fbaStockJob = new FbaStockJob();
ShardingContext shardingContext = new ShardingContext(new ShardingContexts("1", "fba-stock-job", 1, "", new HashMap<>()), 0);
SimpleJob fbaStockJob = new FbaStockJob();
ShardingContext shardingContext = new ShardingContext(new ShardingContexts("1",
"fba-stock-job",
1,
"", new HashMap<>()), 0);
JobPointLog jobPointLog = new JobPointLog();
jobPointLog.setPageIndex(1);
jobPointLog.setPageSize(100);
fbaStockJob.executeJob(shardingContext, jobPointLog);
fbaStockJob.execute(shardingContext);
}
}
......@@ -138,7 +138,7 @@ public interface DcBaseOmsSkuMapper {
/**
* <p>
* 过去过去的sku销量
* 过去过去的sku平均销量
* </p>
*
* @param lowerTime lowerTime
......@@ -148,5 +148,5 @@ public interface DcBaseOmsSkuMapper {
Integer omsSkuSellerCount(@Param("lowerTime") LocalDateTime lowerTime,
@Param("upperTime") LocalDateTime upperTime,
@Param("bailunSku") String bailunSku,
@Param("warehouseCode") String warehouseCode);
@Param("warehouseCode") String warehouseCode, @Param("dyas") Integer days);
}
......@@ -491,5 +491,6 @@
where status = 0
and bj_create &lt; #{localDateTime}
and id % #{shardingContext.shardingTotalCount} = #{shardingContext.shardingItem}
order by bj_modified desc
</select>
</mapper>
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment