file movement fixed to match recurring tasks in X3.

master
bleeson 2024-03-07 11:02:22 -08:00
parent d1c161deb9
commit d119d9936f
3 changed files with 123 additions and 71 deletions

View File

@ -19,4 +19,7 @@ step 1 read in file
----------------------------------------------------------------------------------------
TODO: schedule X3 batch server to process orders and then deliveries, but when do we receive files, how often should this be happening?
see ZECMSL recurring task

View File

@ -1,4 +1,12 @@
#!/usr/bin/env python3
"""
Pull shipment files from the Stash AWS FTP.
After copying file, move file to archive folder on FTP.
Create ZSHPORD import files, which X3 will consume on a schedule via ZECMSL recurring task.
Copy file to the archive and then pass it to the shipment maker directory.
TODO: Source needs to send us real data for final adjustments (site, discount, multilot, etc.)
"""
import csv
import pprint
import dataclasses
@ -9,28 +17,35 @@ import pathlib
import re
import shutil
import typing
import paramiko
import records # type: ignore
import yamamotoyama # type: ignore
import yamamotoyama.x3_imports # type: ignore
THIS_DIRECTORY = pathlib.Path(__file__).parent
SFTP_HOST = "s-8ade4d252cc44c50b.server.transfer.us-west-1.amazonaws.com"
SFTP_USERNAME = "yumiddleware2023"
SSH_DIRECTORY = edi_940.THIS_DIRECTORY / "ssh" #TODO fixme
SSH_DIRECTORY = THIS_DIRECTORY / "ssh" #TODO fixme
SSH_KNOWN_HOSTS_FILE = str(SSH_DIRECTORY / "known_hosts")
SSH_KEY_FILENAME = str(SSH_DIRECTORY / "id_ed25519")
THIS_DIRECTORY = pathlib.Path(__file__).parent
INCOMING_DIRECTORY = THIS_DIRECTORY / "incoming_orders"
SHIPMENTS_DIRECTORY = THIS_DIRECTORY / "incoming_shipments"
SOH_IMPORT_DIRECTORY = THIS_DIRECTORY / "to_import_SOH"
def main():
retrieve_x12_edi_files()
#retrieve_x12_edi_files()#TODO uncomment
for file in INCOMING_DIRECTORY.iterdir():
process_files()
shutil.move(file, INCOMING_DIRECTORY / "archive" / file.name)
combine_zshpords()
if file.name[-4:] != '.csv':
continue
else:
process_files(file)
shutil.move(file, SHIPMENTS_DIRECTORY / file.name)
#shutil.move(file, INCOMING_DIRECTORY / "archive" / file.name)#archives go in the shipping folder
combine_zshpords()
#TODO determine X3 processing schedule
def sftp_server() -> paramiko.SFTPClient:
@ -48,11 +63,18 @@ def retrieve_x12_edi_files():
"""
Connect to S3 bucket & pull down files.
"""
with sftp_server() as sftp_connection:
sftp_connection.chdir("/yu-edi-transfer/source-logi/dev/ecomm-inbound")#TODO set to prod
for filename in sftp_connection.listdir():
if edi_945.SOURCE_945_FILENAME_RE.match(filename):
sftp_connection.get(filename, edi_945.X12_DIRECTORY / filename)
with paramiko.SSHClient() as ssh_client:
ssh_client.load_system_host_keys()
ssh_client.load_host_keys(SSH_KNOWN_HOSTS_FILE)
ssh_client.set_missing_host_key_policy(paramiko.client.RejectPolicy)
ssh_client.connect(
hostname=SFTP_HOST, username=SFTP_USERNAME, key_filename=SSH_KEY_FILENAME
)
with ssh_client.open_sftp() as sftp_connection:
sftp_connection.chdir("/yu-edi-transfer/source-logi/dev/ecomm-inbound")#TODO set to prod
for filename in sftp_connection.listdir():
#if edi_945.SOURCE_945_FILENAME_RE.match(filename):#TODO fixme
sftp_connection.get(filename, INCOMING_DIRECTORY / filename)
new_filename = f"/yu-edi-transfer/source-logi/dev/ecomm-processed/{filename}"#TODO set to prod
sftp_connection.rename(filename, new_filename)
@ -60,12 +82,12 @@ def combine_zshpords():
"""
Collect all ZSHPORD imports into a single file for easy import.
"""
archive_directory = INCOMING_DIRECTORY / "archive"
archive_directory = SOH_IMPORT_DIRECTORY / "archive"
archive_directory.mkdir(exist_ok=True)
with (INCOMING_DIRECTORY / "ZSHPORD.dat").open(
with (SOH_IMPORT_DIRECTORY / "ZSHPORD.dat").open(
"a", encoding="utf-8", newline="\n"
) as combined_import_file:
for individual_import_filename in INCOMING_DIRECTORY.glob(
for individual_import_filename in SOH_IMPORT_DIRECTORY.glob(
"ZSHPORD_*.dat"
):
with individual_import_filename.open(
@ -106,8 +128,8 @@ def process_files(file):
sales_order.header.stofcy = ship_site
sales_order.header.bpdnam = customer_name
sales_order.header.invdtaamt_5 = ship_charge
sales_order.header.invdtaamt_7 = '0.33' #discount
sales_order.header.invdtaamt_8 = '0.51'#taxes
sales_order.header.invdtaamt_7 = '0.33' #TODO discount
sales_order.header.invdtaamt_8 = '0.51'#TODO taxes
#gather line data
line_product = row[0]
@ -123,7 +145,7 @@ def process_files(file):
)
time_stamp = datetime.datetime.now().strftime("%Y%m%d_%H%M%S")
with yamamotoyama.x3_imports.open_import_file(
SOH_IMPORT_DIRECTORY / f"ZSHPORD_{time_stamp}.dat"
SOH_IMPORT_DIRECTORY / f"ZSHPORD_{file.name}_{time_stamp}.dat"
) as import_file:
sales_order.output(import_file)

View File

@ -20,9 +20,37 @@ INCOMING_DIRECTORY = THIS_DIRECTORY / "incoming_shipments"
SDH_IMPORT_DIRECTORY = THIS_DIRECTORY / "to_import_SDH"
def main():
process_files()
#TODO shutil the files around, archive on the ftp
#TODO determine X3 processing schedule
for file in INCOMING_DIRECTORY.iterdir():
if file.name[-4:] != '.csv':
continue
else:
process_files(file)
shutil.move(file, INCOMING_DIRECTORY / "archive" / file.name)
combine_zship945s()
#TODO determine X3 processing schedule
#TODO determine X3 processing schedule
def combine_zship945s():
"""
Collect all ZSHPORD imports into a single file for easy import.
"""
archive_directory = SDH_IMPORT_DIRECTORY / "archive"
archive_directory.mkdir(exist_ok=True)
with (SDH_IMPORT_DIRECTORY / "ZSHIP945S.dat").open(
"a", encoding="utf-8", newline="\n"
) as combined_import_file:
for individual_import_filename in SDH_IMPORT_DIRECTORY.glob(
"ZSHIP945S_*.dat"
):
with individual_import_filename.open(
"r", encoding="utf-8", newline="\n"
) as individual_import_file:
for line in individual_import_file:
combined_import_file.write(line)
shutil.move(
individual_import_filename,
archive_directory / individual_import_filename.name,
)
def find_so_from_po(cust_po):
with yamamotoyama.get_connection('test') as db_connection:#TODO remove 'test'
@ -39,57 +67,56 @@ def find_so_from_po(cust_po):
order=cust_po,
).first()["SOHNUM_0"]
def process_files():
for file in INCOMING_DIRECTORY.iterdir():
with open(file) as source_file:
csv_reader = csv.reader(source_file)
warehouse_shipment = WarehouseShipment()
for num, row in enumerate(csv_reader):
if num == 0:
continue #skip header lines
if num == 1: #gather header information
sohnum = find_so_from_po(row[5])
order_date = row[8]
customer_name = row[9]
# shipadd1 = row[9] # address information is not stored in X3
# shipadd2 = row[10]
# shipcity = row[11]
# shipstate = row[12]
# shipzip = row[13]
tracking = row[14]
weight = row[16]
ship_charge = row[20]
taxes = "?" #TODO fixme
ship_site = "?" #TODO fixme
discount = "?" #TODO fixme
warehouse_shipment.sohnum = sohnum
#warehouse_shipment.header.sohnum = sohnum
warehouse_shipment.header.shidat = datetime.datetime.strptime(order_date,'%m/%d/%Y')
warehouse_shipment.header.ylicplate = tracking
warehouse_shipment.header.growei = weight
#gather line data
#TODO how are multiple lots processed?
line_product = row[0]
line_qty = row[2]
line_lot = row[3]
line_price = row[19]
subdetail = WarehouseShipmentSubDetail(
qtypcu=-1 * int(line_qty),
lot=line_lot,
)
warehouse_shipment.append(
WarehouseShipmentDetail(
sohnum=sohnum,
itmref=line_product,
qty=int(line_qty),
),
subdetail,
)
time_stamp = datetime.datetime.now().strftime("%Y%m%d_%H%M%S")
with yamamotoyama.x3_imports.open_import_file(
SDH_IMPORT_DIRECTORY / f"ZSHIP945S_{warehouse_shipment.sohnum}_{time_stamp}.dat"
) as import_file:
warehouse_shipment.output(import_file)
def process_files(file):
with open(file) as source_file:
csv_reader = csv.reader(source_file)
warehouse_shipment = WarehouseShipment()
for num, row in enumerate(csv_reader):
if num == 0:
continue #skip header lines
if num == 1: #gather header information
sohnum = find_so_from_po(row[5])
order_date = row[8]
customer_name = row[9]
# shipadd1 = row[9] # address information is not stored in X3
# shipadd2 = row[10]
# shipcity = row[11]
# shipstate = row[12]
# shipzip = row[13]
tracking = row[14]
weight = row[16]
ship_charge = row[20]
taxes = "?" #TODO fixme
ship_site = "?" #TODO fixme
discount = "?" #TODO fixme
warehouse_shipment.sohnum = sohnum
#warehouse_shipment.header.sohnum = sohnum
warehouse_shipment.header.shidat = datetime.datetime.strptime(order_date,'%m/%d/%Y')
warehouse_shipment.header.ylicplate = tracking
warehouse_shipment.header.growei = weight
#gather line data
#TODO how are multiple lots processed?
line_product = row[0]
line_qty = row[2]
line_lot = row[3]
line_price = row[19]
subdetail = WarehouseShipmentSubDetail(
qtypcu=-1 * int(line_qty),
lot=line_lot,
)
warehouse_shipment.append(
WarehouseShipmentDetail(
sohnum=sohnum,
itmref=line_product,
qty=int(line_qty),
),
subdetail,
)
time_stamp = datetime.datetime.now().strftime("%Y%m%d_%H%M%S")
with yamamotoyama.x3_imports.open_import_file(
SDH_IMPORT_DIRECTORY / f"ZSHIP945S_{warehouse_shipment.sohnum}_{time_stamp}.dat"
) as import_file:
warehouse_shipment.output(import_file)
@dataclasses.dataclass
class WarehouseShipmentSubDetail: