268 lines
7.1 KiB
Python
268 lines
7.1 KiB
Python
|
#!/usr/bin/env python3
|
||
|
"""
|
||
|
Process entries in the staging database put there by
|
||
|
edi_867 and prepare them for X3 batch importing
|
||
|
"""
|
||
|
import pathlib
|
||
|
import datetime
|
||
|
import pprint
|
||
|
import shutil
|
||
|
|
||
|
import records # type: ignore
|
||
|
import yamamotoyama # type: ignore
|
||
|
import yamamotoyama.x3_imports # type: ignore
|
||
|
|
||
|
THIS_DIRECTORY = pathlib.Path(__file__).parent
|
||
|
IMPORTS_DIRECTORY = THIS_DIRECTORY / 'x3_imports'
|
||
|
|
||
|
SELECT_STATEMENT = """
|
||
|
select
|
||
|
PO
|
||
|
from staging.dbo.shandex_shipments SDH
|
||
|
where
|
||
|
SDH.is_sent = 0
|
||
|
and SDH.bpcord <> ''
|
||
|
"""
|
||
|
|
||
|
SELECT_STATEMENT_TESTING = """
|
||
|
select
|
||
|
PO
|
||
|
from staging.dbo.shandex_shipments SDH
|
||
|
where
|
||
|
PO = '4542_O0216777'
|
||
|
"""
|
||
|
|
||
|
HEADER_STATEMENT = """
|
||
|
select
|
||
|
[H]
|
||
|
,[salfcy]
|
||
|
,[stofcy]
|
||
|
,[sdhnum]
|
||
|
,[bpcinv]
|
||
|
,[bpcord]
|
||
|
,[bpaadd]
|
||
|
,[cur]
|
||
|
,[shidat]
|
||
|
,[cfmflg]
|
||
|
,[pjt]
|
||
|
,[bptnum]
|
||
|
,[ylicplate]
|
||
|
,[yclippership]
|
||
|
,[invdtaamt_2]
|
||
|
,[invdtaamt_3]
|
||
|
,[invdtaamt_4]
|
||
|
,[invdtaamt_5]
|
||
|
,[invdtaamt_6]
|
||
|
,[invdtaamt_7]
|
||
|
,[invdtaamt_8]
|
||
|
,[invdtaamt_9]
|
||
|
,[die]
|
||
|
,[die_1]
|
||
|
,[die_2]
|
||
|
,[die_3]
|
||
|
,[die_4]
|
||
|
,[die_5]
|
||
|
,[die_6]
|
||
|
,[die_7]
|
||
|
,[die_8]
|
||
|
,[die_9]
|
||
|
,[die_10]
|
||
|
,[die_11]
|
||
|
,[die_12]
|
||
|
,[die_13]
|
||
|
,[die_14]
|
||
|
,[die_15]
|
||
|
,[die_16]
|
||
|
,[die_17]
|
||
|
,[die_18]
|
||
|
,[die_19]
|
||
|
,[cce]
|
||
|
,[cce_1]
|
||
|
,[cce_2]
|
||
|
,[cce_3]
|
||
|
,[cce_4]
|
||
|
,[cce_5]
|
||
|
,[cce_6]
|
||
|
,[cce_7]
|
||
|
,[cce_8]
|
||
|
,[cce_9]
|
||
|
,[cce_10]
|
||
|
,[cce_11]
|
||
|
,[cce_12]
|
||
|
,[cce_13]
|
||
|
,[cce_14]
|
||
|
,[cce_15]
|
||
|
,[cce_16]
|
||
|
,[cce_17]
|
||
|
,[cce_18]
|
||
|
,[cce_19]
|
||
|
,[bpdnam]
|
||
|
,[bpdaddlig]
|
||
|
,[bpdaddlig_1]
|
||
|
,[bpdaddlig_2]
|
||
|
,[bpdposcod]
|
||
|
,[bpdcty]
|
||
|
,[bpdsat]
|
||
|
,[bpdcry]
|
||
|
,[bpdcrynam]
|
||
|
,[sdhtyp]
|
||
|
,[growei]
|
||
|
,[pacnbr]
|
||
|
,[star71]
|
||
|
,[star72]
|
||
|
,[star81]
|
||
|
,[star82]
|
||
|
from staging.dbo.shandex_shipments SDH
|
||
|
where SDH.PO = :po
|
||
|
"""
|
||
|
|
||
|
DETAIL_STATEMENT = """
|
||
|
select
|
||
|
[L]
|
||
|
,[sohnum]
|
||
|
,[soplin]
|
||
|
,[itmref]
|
||
|
,[itmdes]
|
||
|
,[sau]
|
||
|
,cast(sum(cast([qty] as int)) as nvarchar)[qty]
|
||
|
,[gropri]
|
||
|
,[star91]
|
||
|
,[star92]
|
||
|
from staging.dbo.shandex_shipment_details SDD
|
||
|
where SDD.PO = :po
|
||
|
group by
|
||
|
[L]
|
||
|
,[sohnum]
|
||
|
,[soplin]
|
||
|
,[itmref]
|
||
|
,[itmdes]
|
||
|
,[sau]
|
||
|
,[gropri]
|
||
|
,[star91]
|
||
|
,[star92]
|
||
|
"""
|
||
|
|
||
|
SUBDETAIL_STATEMENT = """
|
||
|
select distinct
|
||
|
[S]
|
||
|
,[sta]
|
||
|
,[pcu]
|
||
|
,[qtypcu]
|
||
|
,[loc]
|
||
|
,[lot]
|
||
|
,[sernum]
|
||
|
from staging.dbo.shandex_shipment_details SDD
|
||
|
where SDD.PO = :po
|
||
|
and SDD.itmref = :itmref
|
||
|
"""
|
||
|
|
||
|
UPDATE_STATEMENT = """
|
||
|
update staging.dbo.shandex_shipments
|
||
|
set is_sent = 1
|
||
|
where PO = :po
|
||
|
"""
|
||
|
|
||
|
HEADER_NAMES = ['H','salfcy','stofcy','sdhnum','bpcinv','bpcord',
|
||
|
'bpaadd','cur','shidat','cfmflg','pjt','bptnum','ylicplate','yclippership',
|
||
|
'invdtaamt_2','invdtaamt_3','invdtaamt_4','invdtaamt_5','invdtaamt_6','invdtaamt_7',
|
||
|
'invdtaamt_8','invdtaamt_9','die','die_1','die_2','die_3','die_4','die_5','die_6','die_7',
|
||
|
'die_8','die_9','die_10','die_11','die_12','die_13','die_14','die_15','die_16','die_17',
|
||
|
'die_18','die_19','cce','cce_1','cce_2','cce_3','cce_4','cce_5','cce_6','cce_7','cce_8','cce_9',
|
||
|
'cce_10','cce_11','cce_12','cce_13','cce_14','cce_15','cce_16','cce_17','cce_18','cce_19','bpdnam',
|
||
|
'bpdaddlig','bpdaddlig_1','bpdaddlig_2','bpdposcod','bpdcty','bpdsat','bpdcry','bpdcrynam','sdhtyp',
|
||
|
'growei','pacnbr','star71','star72','star81','star82']
|
||
|
|
||
|
DETAIL_NAMES = ['L','sohnum','soplin','itmref','itmdes','sau','qty','gropri','star91','star92']
|
||
|
|
||
|
SUBDETAIL_NAMES = ['S','sta','pcu','qtypcu','loc','lot','sernum']
|
||
|
|
||
|
def get_shipments(database):
|
||
|
with database.transaction():
|
||
|
result = database.query(SELECT_STATEMENT_TESTING).all()#TODO REMOVE TESTING
|
||
|
return result
|
||
|
|
||
|
def get_shipment_headers(database, po):
|
||
|
result = database.query(
|
||
|
HEADER_STATEMENT,
|
||
|
po=po
|
||
|
).first()
|
||
|
return result
|
||
|
|
||
|
def get_shipment_details(database, po):
|
||
|
result = database.query(
|
||
|
DETAIL_STATEMENT,
|
||
|
po=po
|
||
|
).all()
|
||
|
return result
|
||
|
|
||
|
def get_shipment_subdetails(database, po, itmref):
|
||
|
result = database.query(
|
||
|
SUBDETAIL_STATEMENT,
|
||
|
po=po,
|
||
|
itmref=itmref
|
||
|
).all()
|
||
|
return result
|
||
|
|
||
|
def create_imports(shipments, database):
|
||
|
for shipment in shipments:
|
||
|
time_stamp = datetime.datetime.now().strftime("%Y%m%d_%H%M%S")
|
||
|
with open(
|
||
|
IMPORTS_DIRECTORY / f"ZSHIP867_{shipment['PO']}_{time_stamp}.dat", 'w', encoding='utf-8', newline='\n'
|
||
|
) as import_file:
|
||
|
headers = get_shipment_headers(database, shipment['PO'])
|
||
|
details = get_shipment_details(database, shipment['PO'])
|
||
|
for name in HEADER_NAMES:
|
||
|
import_file.write(headers[name])
|
||
|
import_file.write(chr(31))
|
||
|
import_file.write('\n')
|
||
|
for record in details:
|
||
|
for name in DETAIL_NAMES:
|
||
|
import_file.write(record[name])
|
||
|
import_file.write(chr(31))
|
||
|
import_file.write('\n')
|
||
|
subdetails = get_shipment_subdetails(database, shipment['PO'], record['itmref'])
|
||
|
for subrecord in subdetails:
|
||
|
for name in SUBDETAIL_NAMES:
|
||
|
import_file.write(subrecord[name])
|
||
|
import_file.write(chr(31))
|
||
|
import_file.write('\n')
|
||
|
|
||
|
def combine_imports():
|
||
|
archive_directory = IMPORTS_DIRECTORY / "archive"
|
||
|
archive_directory.mkdir(exist_ok=True)
|
||
|
with (IMPORTS_DIRECTORY / "ZSHIP867.dat").open(
|
||
|
"w", encoding="utf-8", newline="\n"
|
||
|
) as combined_import_file:
|
||
|
for individual_import_filename in IMPORTS_DIRECTORY.glob(
|
||
|
"ZSHIP867_*.dat"
|
||
|
):
|
||
|
with individual_import_filename.open(
|
||
|
"r", encoding="utf-8", newline="\n"
|
||
|
) as individual_import_file:
|
||
|
for line in individual_import_file:
|
||
|
combined_import_file.write(line)
|
||
|
shutil.move(
|
||
|
individual_import_filename,
|
||
|
archive_directory / individual_import_filename.name,
|
||
|
)
|
||
|
|
||
|
def mark_sent(database, shipments):
|
||
|
with database.transaction():
|
||
|
for shipment in shipments:
|
||
|
result = database.query(
|
||
|
UPDATE_STATEMENT,
|
||
|
po=shipment['PO']
|
||
|
)
|
||
|
|
||
|
def main():
|
||
|
with yamamotoyama.get_connection() as database:
|
||
|
#retrieve everything that has a valid customer and hasn't already been sent to X3
|
||
|
shipments = get_shipments(database)
|
||
|
#turn each shipment into a X3 import file
|
||
|
create_imports(shipments, database)
|
||
|
combine_imports()
|
||
|
#udate the is_sent field so they are not processed again
|
||
|
mark_sent(database, shipments)
|
||
|
|
||
|
if __name__ == "__main__":
|
||
|
main()
|