feature: (WIP) archiving job for po, poc, parcel and parcel_trace

pull/6221/head
Gauthier LO 2023-06-19 16:14:21 +02:00
parent 1c0eec21fa
commit 7953e9df2c
18 changed files with 280 additions and 3 deletions

118
db/backup/backup_script.bat Normal file
View File

@ -0,0 +1,118 @@
:: MySQL backup script
::
:: DESCRIPTION
::
:: Create a mysqldump gziped file for each database and put them into
:: separate folders.
::
:: DEPENDENCIES
::
:: 7zip for Windows (optional)
::
:: COMMAIL (optional)
:: - http://msdn.microsoft.com/en-us/library/e1y530dz(v=vs.90).aspx
::
:: REFERENCES
:: - http://dev.mysql.com/doc/refman/5.1/en/mysqldump.html
:: ::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
:: ----------------------------------------------------------------------------
:: START
:: ----------------------------------------------------------------------------
@echo off
color 0E
title MySQL backup.
:: Set some variables
set bkupdir=C:\Users\logau\Downloads\Test\db\backup
::Dir where your SQL executables are (needs to have mysqldump in this foder to work)
set mysqldir="C:\Program Files\MySQL\MySQL Server 8.0\"
set logdir=C:\Users\logau\Downloads\Test\db\logs
:: Database settings
set db=wia_app
set dbuser=gauthier
set dbpass=WIASourcing2021
::Path of your 7zip
set zip="C:\Program Files\7-Zip\7z.exe"
:: The number of day after which they get deleted
set deleteDay=30
set endtime=0
:GETTIME
:: get the date and then parse it into variables
for /f "tokens=1,2,3 delims=/ " %%i in ('date /t') do (
set dd=%%i
set mm=%%j
set yy=%%k
)
:: get the time and then parse it into variables
for /f "tokens=1,2,3 delims=:" %%i in ('time /t') do (
set hh=%%i
set ii=%%j
)
::on récupère les secondes
set ss=%time:~6,2%
:: If this is the second time through then go to the end of the file
if "%endtime%"=="1" goto END
:: Create the filename suffix
set fn=_%yy%%mm%%dd%_%hh%%ii%%ss%
:: Write to the log file
echo Beginning MySQLDump Process > %logdir%\LOG%fn%.txt
echo Start Time = %yy%-%mm%-%dd% %hh%:%ii%:%ss% >> %logdir%\LOG%fn%.txt
echo --------------------------- >> %logdir%\LOG%fn%.txt
echo. >> %logdir%\LOG%fn%.txt
:: Create the backup sub-directory if it does not exist
if not exist %bkupdir%\%db%\ (
echo Making Directory %db%
echo Making Directory %db% >> %logdir%\LOG%fn%.txt
mkdir %bkupdir%\%db%
) else (
echo Directory %db% Exists
echo Directory %db% Exists >> %logdir%\LOG%fn%.txt
::delete backups older than %deleteDay% days
echo Deleting files older than %deleteDay% days
echo Deleting files older than %deleteDay% days >> %logdir%\LOG%fn%.txt
forfiles /p %logdir%\ /s /m *.* /d -%deleteDay% /C "cmd /c echo deleting : @path"
forfiles /p %logdir%\ /s /m *.* /d -%deleteDay% /C "cmd /c echo deleting : @path >> %logdir%\LOG%fn%.txt"
forfiles /p %logdir%\ /s /m *.* /d -%deleteDay% /C "cmd /c del @path"
::delete logfiles older than %deleteDay% days
echo Deleting logs older than %deleteDay% days
echo Deleting logs older than %deleteDay% days >> %logdir%\LOG%fn%.txt
forfiles /p %bkupdir%\%db%\ /s /m *.* /d -%deleteDay% /C "cmd /c echo deleting : @path"
forfiles /p %bkupdir%\%db%\ /s /m *.* /d -%deleteDay% /C "cmd /c echo deleting : @path >> %logdir%\LOG%fn%.txt"
forfiles /p %bkupdir%\%db%\ /s /m *.* /d -%deleteDay% /C "cmd /c del @path"
)
:: Run mysqldump on each database and compress the data by piping through gZip
echo Backing up database %db%%fn%.sql.7z
echo Backing up database %db%%fn%.sql.7z >> %logdir%\LOG%fn%.txt
:: l'option SI de 7zip permet de récupérer le fichier à zipper via l'entrée standard Stdin
%mysqldir%\bin\mysqldump.exe -e --user=%dbuser% --password=%dbpass% -h localhost --databases %db% | %zip% a -si "%bkupdir%\%db%\%db%%fn%.sql.7z">>%logdir%\LOG%fn%.txt
echo Done.
echo Done. >> %logdir%\LOG%fn%.txt
:: Go back and get the end time for the script
set endtime=1
goto :GETTIME
:END
:: Write to the log file
echo. >> %logdir%\LOG%fn%.txt
echo --------------------------- >> %logdir%\LOG%fn%.txt
echo MySQLDump Process Finished >> %logdir%\LOG%fn%.txt
echo End Time = %yy%-%mm%-%dd% %hh%:%ii%:%ss% >> %logdir%\LOG%fn%.txt
echo. >> %logdir%\LOG%fn%.txt
:: Send the log file in an e-mail
:: c:\commail\commail -host=smtp.yourcompany.com -from="server <server@yourcompany.com>" -to=serveradmins@yourcompany.com -subject="MySQL Backup" -msg=%logdir%\LOG%fn%.txt
:: ----------------------------------------------------------------------------
:: END advanced

View File

@ -0,0 +1,97 @@
package org.jeecg.modules.business.domain.job;
import lombok.Setter;
import lombok.extern.slf4j.Slf4j;
import org.codehaus.jettison.json.JSONException;
import org.codehaus.jettison.json.JSONObject;
import org.jeecg.modules.business.entity.Parcel;
import org.jeecg.modules.business.entity.ParcelTrace;
import org.jeecg.modules.business.entity.PlatformOrder;
import org.jeecg.modules.business.entity.PlatformOrderContent;
import org.jeecg.modules.business.service.IParcelService;
import org.jeecg.modules.business.service.IParcelTraceService;
import org.jeecg.modules.business.service.IPlatformOrderContentService;
import org.jeecg.modules.business.service.IPlatformOrderService;
import org.quartz.Job;
import org.quartz.JobDataMap;
import org.quartz.JobExecutionContext;
import org.quartz.JobExecutionException;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
import java.time.LocalDateTime;
import java.time.ZoneId;
import java.util.List;
import java.util.stream.Collectors;
/**
* A job that archives entries from platform_order, platform_order_content, parcel, parcel_trace
*/
@Slf4j
@Component
public class DBArchivingJob implements Job {
@Autowired
@Setter
private IParcelService parcelService;
@Autowired
@Setter
private IParcelTraceService parcelTraceService;
@Autowired
@Setter
private IPlatformOrderService platformOrderService;
@Autowired
@Setter
private IPlatformOrderContentService platformOrderContentService;
private static final Integer DEFAULT_NUMBER_OF_DAYS = 365;
@Override
public void execute(JobExecutionContext context) throws JobExecutionException {
LocalDateTime endDateTime = LocalDateTime.now(ZoneId.of(ZoneId.SHORT_IDS.get("CTT")));
LocalDateTime startDateTime = endDateTime.minusDays(DEFAULT_NUMBER_OF_DAYS);
JobDataMap jobDataMap = context.getMergedJobDataMap();
String parameter = ((String) jobDataMap.get("parameter"));
if (parameter != null) {
try {
JSONObject jsonObject = new JSONObject(parameter);
if (!jsonObject.isNull("startDateTime")) {
String startDateStr = jsonObject.getString("startDateTime");
startDateTime = LocalDateTime.parse(startDateStr);
}
if (!jsonObject.isNull("endDateTime")) {
String endDateStr = jsonObject.getString("endDateTime");
endDateTime = LocalDateTime.parse(endDateStr);
}
}
catch (JSONException e) {
log.error("Error while parsing parameter as JSON, falling back to default parameters.");
}
}
if (!endDateTime.isAfter(startDateTime)) {
throw new RuntimeException("EndDateTime must be strictly greater than StartDateTime !");
}
System.out.println("startdatetime : " + startDateTime + "\nendDateTime : " + endDateTime);
String startDate = startDateTime.toString().substring(0,10);
endDateTime = endDateTime.plusDays(1);
String endDate = endDateTime.toString().substring(0,10);
System.out.println("startdatetime : " + startDateTime + "\nendDateTime : " + endDateTime);
System.out.println("startdate : " + startDate + "\nendDate : " + endDate);
// step1: sauvegarde des entrées dans des objets
// insertion des objets dans les tables d'archives
// drop les entrées dans l'ancienne table
List<PlatformOrder> platformOrders = platformOrderService.fetchPlatformOrdersToArchive(startDate, endDate);
List<String> platformOrderIDs = platformOrders.stream().map(PlatformOrder::getId).collect(Collectors.toList());
List<PlatformOrderContent> platformOrderContents = platformOrderContentService.fetchPlatformOrderContentsToArchive(platformOrderIDs);
List<String> platformOrderTrackingNumber = platformOrders.stream().map(PlatformOrder::getTrackingNumber).collect(Collectors.toList());
try {
List<Parcel> parcels = parcelService.fetchParcelsToArchive(platformOrderTrackingNumber);
List<String> parcelIDs = parcels.stream().map(Parcel::getId).collect(Collectors.toList());
List<ParcelTrace> parcelTraces = parcelTraceService.fetchParcelTracesToArchive(parcelIDs);
System.out.println("Parcel count : " + parcels.size());
System.out.println("Parcel_trace count : " + parcelTraces.size());
} catch (Exception ignored) {
}
System.out.println(platformOrderIDs);
}
}

View File

@ -1,6 +1,7 @@
package org.jeecg.modules.business.mapper;
import com.baomidou.mybatisplus.core.mapper.BaseMapper;
import org.apache.ibatis.annotations.Param;
import org.jeecg.modules.business.domain.api.equick.EQuickResponse;
import org.jeecg.modules.business.domain.api.jt.JTParcelTrace;
import org.jeecg.modules.business.domain.api.yd.YDTraceData;
@ -25,4 +26,5 @@ public interface ParcelMapper extends BaseMapper<Parcel> {
void insertOrUpdateEQParcels(List<EQuickResponse> parcels);
void insertOrIgnoreYDParcels(List<YDTraceData> parcels);
List<Parcel> fetchParcelsToArchive(@Param("trackingNumbers") List<String> trackingNumbers);
}

View File

@ -28,4 +28,5 @@ public interface ParcelTraceMapper extends BaseMapper<ParcelTrace> {
void insertOrUpdateEQTraces(@Param("traces") List<EQuickTraceData> traceDetails);
void insertOrIgnoreYDTraces(@Param("traces") List<YDTraceDetail> traceDetails);
List<ParcelTrace> fetchParcelTracesToArchive(@Param("parcelIDs") List<String> parcelIDs);
}

View File

@ -71,4 +71,5 @@ public interface PlatformOrderContentMapper extends BaseMapper<PlatformOrderCont
);
List<PlatformOrderContent> findUninvoicedShippedOrderContents();
List<PlatformOrderContent> fetchPlatformOrderContentsToArchive(@Param("orderIDs") List<String> orderIDs);
}

View File

@ -177,4 +177,5 @@ public interface PlatformOrderMapper extends BaseMapper<PlatformOrder> {
@Param("shops") List<String> shops,
@Param("erpStatuses") List<Integer> erpStatuses,
@Param("warehouses") List<String> warehouses);
List<PlatformOrder> fetchPlatformOrdersToArchive(@Param("startDate") String startDate, @Param("endDate") String endDate);
}

View File

@ -67,4 +67,12 @@
)
</foreach>
</insert>
<select id="fetchParcelsToArchive" resultType="org.jeecg.modules.business.entity.Parcel">
SELECT *
FROM parcel
WHERE third_bill_code IN
<foreach collection="trackingNumbers" separator="," open="(" close=")" item="number" index="index">
#{number}
</foreach>;
</select>
</mapper>

View File

@ -80,4 +80,12 @@
)
</foreach>
</insert>
<select id="fetchParcelTracesToArchive" resultType="org.jeecg.modules.business.entity.Parcel">
SELECT *
FROM parcel_trace
WHERE parcel_id IN
<foreach collection="parcelIDs" separator="," open="(" close=")" item="parcelID" index="index">
#{parcelID}
</foreach>;
</select>
</mapper>

View File

@ -241,4 +241,12 @@
</foreach>
</update>
<select id="fetchPlatformOrderContentsToArchive" resultType="org.jeecg.modules.business.entity.PlatformOrderContent">
SELECT *
FROM platform_order_content
WHERE platform_order_id IN
<foreach collection="orderIDs" separator="," open="(" close=")" index="index" item="orderID">
#{orderID}
</foreach>;
</select>
</mapper>

View File

@ -488,7 +488,7 @@
AND po.erp_status = 3;
</select>
<select id="fetchUninvoicedShippedOrderIDInShopsAndOrderTime" resultType="org.jeecg.modules.business.entity.PlatformOrder">
SELECT id
SELECT po.id
FROM platform_order po
JOIN logistic_channel lc ON po.logistic_channel_name = lc.zh_name
WHERE po.shop_id IN
@ -526,4 +526,10 @@
#{erpStatus}
</foreach>;
</select>
<select id="fetchPlatformOrdersToArchive" resultType="org.jeecg.modules.business.entity.PlatformOrder">
SELECT *
FROM platform_order po
WHERE erp_status IN (4,5)
AND order_time BETWEEN #{startDate} AND #{endDate};
</select>
</mapper>

View File

@ -46,4 +46,5 @@ public interface IParcelService extends IService<Parcel> {
void saveEQParcelAndTraces(List<EQuickResponse> parcelTraces);
void saveYDParcelAndTraces(List<YDTraceData> traceData);
List<Parcel> fetchParcelsToArchive(List<String> trackingNumbers);
}

View File

@ -14,4 +14,5 @@ import java.util.List;
public interface IParcelTraceService extends IService<ParcelTrace> {
public List<ParcelTrace> selectByMainId(String mainId);
List<ParcelTrace> fetchParcelTracesToArchive(List<String> parcelIDs);
}

View File

@ -34,4 +34,5 @@ public interface IPlatformOrderContentService extends IService<PlatformOrderCont
List<SkuWeightDiscountServiceFees> getAllSKUWeightsDiscountsServiceFees();
List<SkuQuantity> searchOrderContent(List<String> orderIDList);
List<PlatformOrderContent> fetchPlatformOrderContentsToArchive(List<String> orderIDs);
}

View File

@ -139,4 +139,13 @@ public interface IPlatformOrderService extends IService<PlatformOrder> {
List<PlatformOrderShopSync> fetchOrderInShopsReadyForShopifySync(List<String> shopCodes);
List<PlatformOrder> fetchUninvoicedShippedOrderIDInShops(String startDate, String endDate, List<String> shops, List<String> warehouses);
/**
* Fetch all platform orders between 2 dates and of status erp_status 4 or 5
* this list will then be archived
* @param startDate
* @param endDate
* @return List of PlatformOrder
*/
List<PlatformOrder> fetchPlatformOrdersToArchive(String startDate, String endDate);
}

View File

@ -218,4 +218,7 @@ public class ParcelServiceImpl extends ServiceImpl<ParcelMapper, Parcel> impleme
}
log.info("Finished inserting {} parcels and their traces into DB.", parcelTraces.size());
}
public List<Parcel> fetchParcelsToArchive(List<String> trackingNumbers) {
return parcelMapper.fetchParcelsToArchive(trackingNumbers);
}
}

View File

@ -25,4 +25,7 @@ public class ParcelTraceServiceImpl extends ServiceImpl<ParcelTraceMapper, Parce
public List<ParcelTrace> selectByMainId(String mainId) {
return parcelTraceMapper.selectByMainId(mainId);
}
public List<ParcelTrace> fetchParcelTracesToArchive(List<String> parcelIDs) {
return parcelTraceMapper.fetchParcelTracesToArchive(parcelIDs);
}
}

View File

@ -366,4 +366,8 @@ public class PlatformOrderServiceImpl extends ServiceImpl<PlatformOrderMapper, P
public List<PlatformOrder> fetchUninvoicedShippedOrderIDInShops(String startDate, String endDate, List<String> shops, List<String> warehouses) {
return platformOrderMap.fetchUninvoicedShippedOrderIDInShops(startDate, endDate, shops, warehouses);
}
@Override
public List<PlatformOrder> fetchPlatformOrdersToArchive(String startDate, String endDate) {
return platformOrderMap.fetchPlatformOrdersToArchive(startDate, endDate);
}
}

View File

@ -20,8 +20,11 @@ import java.util.Map;
@Service
public class PlatformOrderContentServiceImpl extends ServiceImpl<PlatformOrderContentMapper, PlatformOrderContent> implements IPlatformOrderContentService {
@Autowired
private PlatformOrderContentMapper platformOrderContentMapper;
private final PlatformOrderContentMapper platformOrderContentMapper;
public PlatformOrderContentServiceImpl(PlatformOrderContentMapper platformOrderContentMapper) {
this.platformOrderContentMapper = platformOrderContentMapper;
}
public List<SkuWeightDiscountServiceFees> getAllSKUWeightsDiscountsServiceFees() {
return platformOrderContentMapper.getAllWeightsDiscountsServiceFees();
}
@ -51,5 +54,7 @@ public class PlatformOrderContentServiceImpl extends ServiceImpl<PlatformOrderCo
}
}
@Override public List<PlatformOrderContent> fetchPlatformOrderContentsToArchive(List<String> orderIDs) {
return platformOrderContentMapper.fetchPlatformOrderContentsToArchive(orderIDs);
}
}