You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

124 lines
4.3 KiB

2 years ago
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text.Json;
using System.Threading.Tasks;
using Microsoft.Extensions.Logging;
2 years ago
using Win_in.Sfs.Shared.Domain.Shared;
2 years ago
using Win_in.Sfs.Wms.DataExchange.Domain;
using Win_in.Sfs.Wms.DataExchange.Domain.Fawtyg.Mes;
using Win_in.Sfs.Wms.DataExchange.Domain.Shared;
using Win_in.Sfs.Wms.DataExchange.WMS.ScrapNote;
namespace Win_in.Sfs.Wms.DataExchange.Fawtyg.MesAgent.Incoming;
public class ScrapReader : IReader
{
private readonly IScrapManager _ScrapManager;
private readonly IIncomingFromExternalManager _incomingFromExternalManager;
private readonly ILogger<ScrapReader> _logger;
public ScrapReader(
IScrapManager ScrapManager
, IIncomingFromExternalManager incomingFromExternalManager
, ILogger<ScrapReader> logger
)
{
_ScrapManager = ScrapManager;
_incomingFromExternalManager = incomingFromExternalManager;
_logger = logger;
}
public virtual async Task<List<IncomingFromExternal>> ReadAsync()
{
//从MES读取待处理scrap
var toBeProcessedScraps = await _ScrapManager.GetToBeProcessedListAsync().ConfigureAwait(false);
if (!toBeProcessedScraps.Any())
{
_logger.LogInformation("no scraps");
return new List<IncomingFromExternal>();
}
1 year ago
var sclist = toBeProcessedScraps.Take(100).ToList();
2 years ago
//scrap逐一转换为ScrapNote
var incomingDataList = BuildIncomingFromExternalFromScrapAsync(sclist);
await _incomingFromExternalManager.CreateManyAsync(incomingDataList).ConfigureAwait(false);
//更新MES数据状态
await _ScrapManager.UpdateProcessedListAsync(sclist).ConfigureAwait(false);
return incomingDataList;
}
private List<IncomingFromExternal> BuildIncomingFromExternalFromScrapAsync(List<Scrap> toBeProcessedScraps)
{
var incomingDataList = new List<IncomingFromExternal>();
foreach (var scrap in toBeProcessedScraps)
{
var incomingData = BuildIncomingFromExternal(scrap);
incomingData.SetEffectiveDate(DateTime.Now);
try
{
var crap = BuildScrapNoteOrderExchangeMes(scrap);
incomingData.DestinationDataContent = JsonSerializer.Serialize(crap);
}
catch (Exception ex)
{
incomingData.SetError(EnumExchangeDataErrorCode.Exception, ex.Message, ex.ToString());
}
incomingDataList.Add(incomingData);
}
return incomingDataList;
}
private IncomingFromExternal BuildIncomingFromExternal(Scrap scrap)
{
var incomingData = new IncomingFromExternal()
{
DataType = EnumIncomingDataType.Scrap.ToString(),
DataAction = EnumExchangeDataAction.Add,
SourceSystem = EnumSystemType.MES.ToString(),
2 years ago
SourceDataId = scrap.mesout_asd_id.ToString(),
SourceDataGroupCode = scrap.mesout_asd_id.ToString(),
2 years ago
SourceDataDetailCode = scrap.Mesout_asd_part,
SourceDataContent = JsonSerializer.Serialize(scrap),
WriteTime = DateTime.Now,
Writer = nameof(MesIncomingBackgroundWorker),
DestinationSystem = EnumSystemType.MES.ToString(),
};
return incomingData;
}
private static ScrapNoteExchangeDto BuildScrapNoteOrderExchangeMes(Scrap scrap)
{
string type = EnumTransSubType.Scrap_WIP.ToString();
if (scrap.mesout_asd_type == "4004")
{
type = EnumTransSubType.Scrap_Manual.ToString();
}
2 years ago
var crap = new ScrapNoteExchangeDto()
{
Worker = scrap.mesout_asd_user,
2 years ago
ActiveDate = DateTime.ParseExact(scrap.Mesout_asd_date, "yyyyMMdd", System.Globalization.CultureInfo.CurrentCulture),
Type = type
2 years ago
};
var crapDetail = new ScrapNoteDetailExchangeDto()
{
ToLocationErpCode = scrap.Mesout_asd_loc,
FromLocationErpCode = scrap.Mesout_asd_loc,
FromLocationCode = scrap.Mesout_asd_loc,
ToLocationCode = scrap.Mesout_asd_loc,
ItemCode = scrap.Mesout_asd_part,
Qty = scrap.mesout_asd_qty,
ReasonCode = scrap.Mesout_asd_code,
};
crap.Detail = crapDetail;
return crap;
}
}