SL go live updates.
parent
309a3a7894
commit
73a4f79bd6
21
readme.txt
21
readme.txt
|
@ -3,22 +3,27 @@ Take shipping files from Source and turn them into X3 Sales Orders via ZSHPORD
|
|||
Use same files and turn them into X3 Deliveries for the above orders via ZSHIP945S
|
||||
|
||||
Problems:
|
||||
Source currently doesn't have site information or discount information on their export
|
||||
Until we have the actual data we'll build an accurate import
|
||||
If Source cannot provide this we will need to use the existing Shopify integration to pull orders.
|
||||
Source does not send accurate unit price, shipping charge, tax, and discounts. To
|
||||
bring something accurate in we use the old Shopify integration. This brings up
|
||||
another issue that the old integration brings in orders at creation, if these fields
|
||||
get modifed (like from a refund) we are brining in the wrong amount.
|
||||
|
||||
Questions:
|
||||
How does Source handle multiple lots on a line?
|
||||
|
||||
---------------------------------------------------------------------------------------
|
||||
|
||||
General process flow
|
||||
-read in file(s) from Source ecomm FTP and archive file on FTP
|
||||
-create ZSHPORD from file, pass file to shipment process, ask X3 to process import
|
||||
-create ZSHIP945S from file, archive, ask X3 to process import
|
||||
-load shipment information into SQL table to report on, pass file to order creation process
|
||||
-create ZSHPORD import from file, pass file to shipment process, ask X3 to process import
|
||||
-create ZSHIP945S import from file, archive, ask X3 to process import
|
||||
|
||||
Recurring tasks in X3 have been set up named ZECMSL and ZECMSLSHIP.
|
||||
|
||||
--------------------------------------------------------------------------------------
|
||||
|
||||
Unanswered:
|
||||
How will Source send files, individual files or a single bulk file?
|
||||
Reporting on eCommerce:
|
||||
To catch issues we need to look at:
|
||||
-possible sales order duplicates that have the same Shopify PO number
|
||||
-missing sales orders, that came into the shipping table, but failed to import in
|
||||
-missing deliveries, that failed during import, e.g. not enough stock
|
||||
|
|
|
@ -0,0 +1,115 @@
|
|||
#!/usr/bin/env python3
|
||||
"""
|
||||
Pull shipment files from the Stash AWS FTP.
|
||||
load shipping data into analytics database
|
||||
"""
|
||||
import csv
|
||||
import pprint
|
||||
import dataclasses
|
||||
import datetime
|
||||
import decimal
|
||||
import functools
|
||||
import pathlib
|
||||
import re
|
||||
import shutil
|
||||
import typing
|
||||
import paramiko
|
||||
|
||||
import records # type: ignore
|
||||
|
||||
import yamamotoyama # type: ignore
|
||||
import yamamotoyama.x3_imports # type: ignore
|
||||
|
||||
THIS_DIRECTORY = pathlib.Path(__file__).parent
|
||||
|
||||
SFTP_HOST = "s-8ade4d252cc44c50b.server.transfer.us-west-1.amazonaws.com"
|
||||
SFTP_USERNAME = "yumiddleware2023"
|
||||
SSH_DIRECTORY = THIS_DIRECTORY / "ssh" #TODO fixme
|
||||
SSH_KNOWN_HOSTS_FILE = str(SSH_DIRECTORY / "known_hosts")
|
||||
SSH_KEY_FILENAME = str(SSH_DIRECTORY / "id_ed25519")
|
||||
|
||||
INCOMING_DIRECTORY = THIS_DIRECTORY / "new_files_from_ftp"
|
||||
ORDER_DIRECTORY = THIS_DIRECTORY / "incoming_orders"
|
||||
ARCHIVE_DIRECTORY = THIS_DIRECTORY / "incoming_shipments" / "archive"
|
||||
|
||||
|
||||
def main():
|
||||
#retrieve_x12_edi_files()#TODO uncomment
|
||||
for file in INCOMING_DIRECTORY.iterdir():
|
||||
if file.name[-4:] != '.csv':
|
||||
continue
|
||||
else:
|
||||
process_files(file)
|
||||
shutil.move(file, ARCHIVE_DIRECTORY / file.name)
|
||||
|
||||
|
||||
def sftp_server() -> paramiko.SFTPClient:
|
||||
with paramiko.SSHClient() as ssh_client:
|
||||
ssh_client.load_system_host_keys()
|
||||
ssh_client.load_host_keys(SSH_KNOWN_HOSTS_FILE)
|
||||
ssh_client.set_missing_host_key_policy(paramiko.client.RejectPolicy)
|
||||
ssh_client.connect(
|
||||
hostname=SFTP_HOST, username=SFTP_USERNAME, key_filename=SSH_KEY_FILENAME
|
||||
)
|
||||
with ssh_client.open_sftp() as sftp_connection:
|
||||
yield sftp_connection
|
||||
|
||||
def retrieve_x12_edi_files():
|
||||
"""
|
||||
Connect to S3 bucket & pull down files.
|
||||
"""
|
||||
with paramiko.SSHClient() as ssh_client:
|
||||
ssh_client.load_system_host_keys()
|
||||
ssh_client.load_host_keys(SSH_KNOWN_HOSTS_FILE)
|
||||
ssh_client.set_missing_host_key_policy(paramiko.client.RejectPolicy)
|
||||
ssh_client.connect(
|
||||
hostname=SFTP_HOST, username=SFTP_USERNAME, key_filename=SSH_KEY_FILENAME
|
||||
)
|
||||
with ssh_client.open_sftp() as sftp_connection:
|
||||
sftp_connection.chdir("/yu-edi-transfer/source-logi/prod/ecomm-inbound")
|
||||
for filename in sftp_connection.listdir():
|
||||
#if edi_945.SOURCE_945_FILENAME_RE.match(filename):#TODO fixme
|
||||
sftp_connection.get(filename, INCOMING_DIRECTORY / filename)
|
||||
new_filename = f"/yu-edi-transfer/source-logi/prod/ecomm-processed/{filename}"
|
||||
sftp_connection.rename(filename, new_filename)
|
||||
|
||||
|
||||
def process_files(file):
|
||||
with open(file) as source_file:
|
||||
with yamamotoyama.get_connection() as db_connection:
|
||||
with db_connection.transaction() as _:
|
||||
with open(ORDER_DIRECTORY / f'sorted_{file.name}', 'w',newline='') as output:
|
||||
csv_reader = csv.reader(source_file)
|
||||
csv_writer = csv.writer(output)
|
||||
csv_writer.writerow(next(csv_reader, None)) #skip header
|
||||
csv_sort = sorted(csv_reader, key=lambda row: row[6], reverse=False)
|
||||
for row in csv_sort:
|
||||
csv_writer.writerow(row)
|
||||
site = row[0]
|
||||
item = row[1]
|
||||
des = row[2]
|
||||
qty = row[3]
|
||||
lot = row[4]
|
||||
order_id = row[6]
|
||||
ship_date = datetime.datetime.strptime(row[17],'%m/%d/%Y %I:%M:%S %p').strftime('%m/%d/%Y')# what comes from SL
|
||||
#ship_date = datetime.datetime.strptime(row[17],'%m/%d/%Y %H:%M').strftime('%m/%d/%Y')#default when we sort in Excel
|
||||
|
||||
db_connection.query(
|
||||
"""
|
||||
INSERT INTO [analytics].[dbo].[SL_ECOMM]
|
||||
([site],[item],[des],[qty],[lot],[order_id],[ship_date])
|
||||
VALUES
|
||||
(:site,:item,:des,:qty,:lot,:order_id,:ship_date)
|
||||
""",
|
||||
site=site,
|
||||
item=item,
|
||||
des=des,
|
||||
qty=qty,
|
||||
lot=lot,
|
||||
order_id=order_id,
|
||||
ship_date=ship_date
|
||||
)
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
|
@ -0,0 +1,420 @@
|
|||
#!/usr/bin/env python3
|
||||
"""
|
||||
Pull shipment files from the Stash AWS FTP.
|
||||
After copying file, move file to archive folder on FTP.
|
||||
Create ZSHPORD import files, which X3 will consume on a schedule via ZECMSL recurring task.
|
||||
Copy file to the archive and then pass it to the shipment maker directory.
|
||||
|
||||
TODO: Source needs to send us real data for final adjustments (site, discount, multilot, etc.)
|
||||
"""
|
||||
import csv
|
||||
import pprint
|
||||
import dataclasses
|
||||
import datetime
|
||||
import decimal
|
||||
import functools
|
||||
import pathlib
|
||||
import re
|
||||
import shutil
|
||||
import typing
|
||||
import paramiko
|
||||
|
||||
import records # type: ignore
|
||||
|
||||
import yamamotoyama # type: ignore
|
||||
import yamamotoyama.x3_imports # type: ignore
|
||||
|
||||
THIS_DIRECTORY = pathlib.Path(__file__).parent
|
||||
|
||||
SFTP_HOST = "s-8ade4d252cc44c50b.server.transfer.us-west-1.amazonaws.com"
|
||||
SFTP_USERNAME = "yumiddleware2023"
|
||||
SSH_DIRECTORY = THIS_DIRECTORY / "ssh" #TODO fixme
|
||||
SSH_KNOWN_HOSTS_FILE = str(SSH_DIRECTORY / "known_hosts")
|
||||
SSH_KEY_FILENAME = str(SSH_DIRECTORY / "id_ed25519")
|
||||
|
||||
INCOMING_DIRECTORY = THIS_DIRECTORY / "incoming_orders"
|
||||
SHIPMENTS_DIRECTORY = THIS_DIRECTORY / "incoming_shipments"
|
||||
SOH_IMPORT_DIRECTORY = THIS_DIRECTORY / "to_import_SOH"
|
||||
|
||||
def main():
|
||||
#retrieve_x12_edi_files()#TODO uncomment
|
||||
for file in INCOMING_DIRECTORY.iterdir():
|
||||
if file.name[-4:] != '.csv':
|
||||
continue
|
||||
else:
|
||||
process_files(file)
|
||||
shutil.move(file, SHIPMENTS_DIRECTORY / file.name)
|
||||
#archives are in the shipping folder
|
||||
combine_zshpords()
|
||||
|
||||
def sftp_server() -> paramiko.SFTPClient:
|
||||
with paramiko.SSHClient() as ssh_client:
|
||||
ssh_client.load_system_host_keys()
|
||||
ssh_client.load_host_keys(SSH_KNOWN_HOSTS_FILE)
|
||||
ssh_client.set_missing_host_key_policy(paramiko.client.RejectPolicy)
|
||||
ssh_client.connect(
|
||||
hostname=SFTP_HOST, username=SFTP_USERNAME, key_filename=SSH_KEY_FILENAME
|
||||
)
|
||||
with ssh_client.open_sftp() as sftp_connection:
|
||||
yield sftp_connection
|
||||
|
||||
def retrieve_x12_edi_files():
|
||||
"""
|
||||
Connect to S3 bucket & pull down files.
|
||||
"""
|
||||
with paramiko.SSHClient() as ssh_client:
|
||||
ssh_client.load_system_host_keys()
|
||||
ssh_client.load_host_keys(SSH_KNOWN_HOSTS_FILE)
|
||||
ssh_client.set_missing_host_key_policy(paramiko.client.RejectPolicy)
|
||||
ssh_client.connect(
|
||||
hostname=SFTP_HOST, username=SFTP_USERNAME, key_filename=SSH_KEY_FILENAME
|
||||
)
|
||||
with ssh_client.open_sftp() as sftp_connection:
|
||||
sftp_connection.chdir("/yu-edi-transfer/source-logi/dev/ecomm-inbound")#TODO set to prod
|
||||
for filename in sftp_connection.listdir():
|
||||
#if edi_945.SOURCE_945_FILENAME_RE.match(filename):#TODO fixme
|
||||
sftp_connection.get(filename, INCOMING_DIRECTORY / filename)
|
||||
new_filename = f"/yu-edi-transfer/source-logi/dev/ecomm-processed/{filename}"#TODO set to prod
|
||||
sftp_connection.rename(filename, new_filename)
|
||||
|
||||
def combine_zshpords():
|
||||
"""
|
||||
Collect all ZSHPORD imports into a single file for easy import.
|
||||
"""
|
||||
archive_directory = SOH_IMPORT_DIRECTORY / "archive"
|
||||
archive_directory.mkdir(exist_ok=True)
|
||||
with (SOH_IMPORT_DIRECTORY / "ZSHPORD.dat").open(
|
||||
"w", encoding="utf-8", newline="\n"
|
||||
) as combined_import_file:
|
||||
for individual_import_filename in SOH_IMPORT_DIRECTORY.glob(
|
||||
"ZSHPORD_*.dat"
|
||||
):
|
||||
with individual_import_filename.open(
|
||||
"r", encoding="utf-8", newline="\n"
|
||||
) as individual_import_file:
|
||||
for line in individual_import_file:
|
||||
combined_import_file.write(line)
|
||||
shutil.move(
|
||||
individual_import_filename,
|
||||
archive_directory / individual_import_filename.name,
|
||||
)
|
||||
|
||||
def process_files(file):
|
||||
with open(file) as source_file:
|
||||
csv_reader = csv.reader(source_file)
|
||||
sales_order = SalesOrder()
|
||||
previous_order = ''
|
||||
current_order = ''
|
||||
for num, row in enumerate(csv_reader):
|
||||
if num == 0:
|
||||
continue #skip header lines
|
||||
if num >= 1: #gather header information
|
||||
current_order = row[6]
|
||||
if current_order != previous_order:
|
||||
time_stamp = datetime.datetime.now().strftime("%Y%m%d_%H%M%S")
|
||||
if sales_order.header.cusordref != '':
|
||||
with yamamotoyama.x3_imports.open_import_file(
|
||||
SOH_IMPORT_DIRECTORY / f"ZSHPORD_{current_order}_{time_stamp}_{sales_order.header.cusordref}.dat"
|
||||
) as import_file:
|
||||
sales_order.output(import_file)
|
||||
sales_order = SalesOrder()
|
||||
previous_order = current_order
|
||||
ship_site = row[0]
|
||||
order_id = row[6]
|
||||
order_date = row[9]
|
||||
customer_name = row[10]
|
||||
# shipadd1 = row[9] # address information is not stored in X3
|
||||
# shipadd2 = row[10]
|
||||
# shipcity = row[11]
|
||||
# shipstate = row[12]
|
||||
# shipzip = row[13]
|
||||
tracking = row[16]
|
||||
weight = row[18]
|
||||
taxes = row[22]
|
||||
ship_charge = row[21]
|
||||
discount = row[24]
|
||||
sales_order.header.cusordref = order_id
|
||||
sales_order.header.orddat = datetime.datetime.strptime(order_date,'%m/%d/%Y %H:%M').strftime('%Y%m%d') #TODO strftim this
|
||||
sales_order.header.stofcy = ship_site
|
||||
sales_order.header.bpdnam = customer_name
|
||||
sales_order.header.invdtaamt_5 = ship_charge
|
||||
sales_order.header.invdtaamt_7 = discount
|
||||
sales_order.header.invdtaamt_8 = taxes
|
||||
|
||||
#gather line data
|
||||
line_product = row[1]
|
||||
line_qty = row[3]
|
||||
line_lot = row[4]
|
||||
line_price = row[20]
|
||||
sales_order.append(
|
||||
SalesOrderDetail(
|
||||
itmref=line_product,
|
||||
qty=int(line_qty),
|
||||
gropri=line_price
|
||||
)
|
||||
)
|
||||
time_stamp = datetime.datetime.now().strftime("%Y%m%d_%H%M%S")
|
||||
with yamamotoyama.x3_imports.open_import_file(
|
||||
SOH_IMPORT_DIRECTORY / f"ZSHPORD_{current_order}_{time_stamp}_{sales_order.header.cusordref}.dat"
|
||||
) as import_file:
|
||||
sales_order.output(import_file)
|
||||
|
||||
@dataclasses.dataclass
|
||||
class SalesOrderDetail:
|
||||
"""
|
||||
Information that goes on ann order detail line, taken from ZSHPORD template.
|
||||
"""
|
||||
|
||||
itmref: str = ""
|
||||
itmrefbpc: str = ""
|
||||
itmdes: str = ""
|
||||
qty: int = 0
|
||||
gropri: decimal.Decimal = decimal.Decimal()
|
||||
discrgval_1: decimal.Decimal = decimal.Decimal()
|
||||
zamaztax: decimal.Decimal = decimal.Decimal()
|
||||
star91: str = ""
|
||||
star92: str = ""
|
||||
|
||||
def convert_to_strings(self) -> typing.List[str]:
|
||||
"""
|
||||
Convert to strings for X3 import writing.
|
||||
"""
|
||||
#self.qty = self.check_subdetail_qty()
|
||||
return yamamotoyama.x3_imports.convert_to_strings(
|
||||
[
|
||||
"D",
|
||||
self.itmref,
|
||||
self.itmrefbpc,
|
||||
self.qty,
|
||||
self.gropri,
|
||||
self.discrgval_1,
|
||||
self.zamaztax,
|
||||
self.star91,
|
||||
self.star92,
|
||||
]
|
||||
)
|
||||
|
||||
# def __eq__(self, item: typing.Any) -> bool:
|
||||
# """
|
||||
# Test for equality
|
||||
# """
|
||||
# if isinstance(item, str):
|
||||
# return self.itmref == item
|
||||
# if isinstance(item, SalesOrderDetail):
|
||||
# return self.itmref == item.itmref
|
||||
# return False
|
||||
|
||||
# def fill(self):
|
||||
# """
|
||||
# Set soplin & itmdes from itmref & sohnum
|
||||
# """
|
||||
|
||||
# def get() -> records.Record:
|
||||
# with yamamotoyama.get_connection() as database:
|
||||
# how_many = (
|
||||
# database.query(
|
||||
# """
|
||||
# select
|
||||
# count(*) as [how_many]
|
||||
# from [PROD].[SORDERP] as [SOP]
|
||||
# where
|
||||
# [SOP].[SOHNUM_0] = :sohnum
|
||||
# and [SOP].[ITMREF_0] = :itmref
|
||||
# """,
|
||||
# sohnum=self.sohnum,
|
||||
# itmref=self.itmref,
|
||||
# )
|
||||
# .first()
|
||||
# .how_many
|
||||
# )
|
||||
# if how_many == 1:
|
||||
# return database.query(
|
||||
# """
|
||||
# select top 1
|
||||
# [SOP].[SOPLIN_0]
|
||||
# ,[SOP].[ITMDES1_0]
|
||||
# ,[SOP].[SAU_0]
|
||||
# from [PROD].[SORDERP] as [SOP]
|
||||
# where
|
||||
# [SOP].[SOHNUM_0] = :sohnum
|
||||
# and [SOP].[ITMREF_0] = :itmref
|
||||
# order by
|
||||
# [SOP].[SOPLIN_0]
|
||||
# """,
|
||||
# sohnum=self.sohnum,
|
||||
# itmref=self.itmref,
|
||||
# ).first()
|
||||
# else:
|
||||
# emailtext = str(self.sohnum +' '+str(self.itmref))
|
||||
# msg.attach(MIMEText(emailtext, 'plain'))
|
||||
# with smtplib.SMTP_SSL("smtp.gmail.com", 465) as smtp:
|
||||
# smtp.login(user='x3reportmk2@yamamotoyama.com', password=r'n</W<7fr"VD~\2&[pZc5')
|
||||
# smtp.send_message(msg)
|
||||
# raise NotImplementedError # TODO
|
||||
|
||||
# result = get()
|
||||
# self.soplin = result.SOPLIN_0
|
||||
# self.itmdes = result.ITMDES1_0
|
||||
# self.sau = result.SAU_0
|
||||
|
||||
|
||||
@dataclasses.dataclass
|
||||
class SalesOrderHeader:
|
||||
"""
|
||||
Information that goes on an order header, taken from ZSHPORD template.
|
||||
"""
|
||||
|
||||
sohnum: str = ""
|
||||
sohtyp: str = "WEB"
|
||||
bpcord: str = "STSHOPIFY" #TODO are they handling YU?
|
||||
bpcinv: str = "STSHOPIFY" #TODO are they handling YU?
|
||||
bpcpyr: str = "STSHOPIFY" #TODO are they handling YU?
|
||||
bpaadd: str = "BL001"
|
||||
orddat: datetime.date = datetime.date(1753, 1, 1)
|
||||
cusordref: str = ""
|
||||
cur: str = "USD"
|
||||
alltyp: int = 2 #Detailed
|
||||
salfcy: str = "ECS" #TODO are they handling YU?
|
||||
stofcy: str = "" #TODO need to be set from file
|
||||
pte: str = "USPREPAY" #TODO needs checking
|
||||
vacbpr: str = "NTX" #TODO needs checking
|
||||
dlvpio: int = 1 #normal
|
||||
mdl: str = "GRN"#TODO any way to tell how they were sent?
|
||||
yshppaymth: int = 1 #prepaid freight
|
||||
bpcnam: str = "ST SHOPIFY Stashtea.com" #TODO does this need to be set
|
||||
bpdnam: str = ""
|
||||
bpdaddlig_0: str = "999 ANYSTREET"
|
||||
bpdaddlig_1: str = ""
|
||||
bpdaddlig_2: str = ""
|
||||
bpdcty: str = "POMONA"
|
||||
bpdsat: str = "CA"
|
||||
bpdposcod: str = "91768"
|
||||
bpdcry: str = "US"
|
||||
ybpdweb: str = ""
|
||||
ybpdtel: str = ""
|
||||
ybpcweb: str = ""
|
||||
yamaorder: str = ""
|
||||
ygiftwrap: int = 0
|
||||
invdtaamt_5: decimal.Decimal = decimal.Decimal()
|
||||
invdtaamt_7: decimal.Decimal = decimal.Decimal()
|
||||
invdtaamt_8: decimal.Decimal = decimal.Decimal()
|
||||
yimport: int = 0
|
||||
pjt: str = ""
|
||||
yedinotes: str = ""
|
||||
|
||||
def convert_to_strings(self) -> typing.List[str]:
|
||||
"""
|
||||
Convert to X3 import line
|
||||
"""
|
||||
return yamamotoyama.x3_imports.convert_to_strings(
|
||||
[
|
||||
"H",
|
||||
self.sohnum,
|
||||
self.sohtyp,
|
||||
self.bpcord,
|
||||
self.bpcinv,
|
||||
self.bpcpyr,
|
||||
self.bpaadd,
|
||||
self.orddat,
|
||||
self.cusordref,
|
||||
self.cur,
|
||||
self.alltyp,
|
||||
self.salfcy,
|
||||
self.stofcy,
|
||||
self.pte,
|
||||
self.vacbpr,
|
||||
self.dlvpio,
|
||||
self.mdl,
|
||||
self.yshppaymth,
|
||||
self.bpcnam,
|
||||
self.bpdnam,
|
||||
self.bpdaddlig_0,
|
||||
self.bpdaddlig_1,
|
||||
self.bpdaddlig_2,
|
||||
self.bpdcty,
|
||||
self.bpdsat,
|
||||
self.bpdposcod,
|
||||
self.bpdcry,
|
||||
self.ybpdweb,
|
||||
self.ybpdtel,
|
||||
self.ybpcweb,
|
||||
self.yamaorder,
|
||||
self.ygiftwrap,
|
||||
self.invdtaamt_5,
|
||||
self.invdtaamt_7,
|
||||
self.invdtaamt_8,
|
||||
self.yimport,
|
||||
self.pjt,
|
||||
self.yedinotes
|
||||
]
|
||||
)
|
||||
|
||||
|
||||
class SalesOrderDetailList:
|
||||
"""
|
||||
List of shipment details
|
||||
"""
|
||||
|
||||
_details: typing.List[SalesOrderDetail]
|
||||
_item_set: typing.Set[str]
|
||||
|
||||
def __init__(self):
|
||||
self._details = []
|
||||
self._item_set = set()
|
||||
|
||||
def append(
|
||||
self,
|
||||
salesorder_detail: SalesOrderDetail,
|
||||
):
|
||||
"""
|
||||
Append
|
||||
"""
|
||||
itmref = salesorder_detail.itmref
|
||||
qty = salesorder_detail.qty
|
||||
if itmref in self._item_set:
|
||||
for detail in self._details:
|
||||
if detail.itmref == itmref:
|
||||
detail.qty += qty
|
||||
else:
|
||||
self._item_set.add(itmref)
|
||||
self._details.append(salesorder_detail)
|
||||
|
||||
def __iter__(self):
|
||||
return iter(self._details)
|
||||
|
||||
|
||||
class SalesOrder:
|
||||
"""
|
||||
sales order both header & details
|
||||
"""
|
||||
|
||||
header: SalesOrderHeader
|
||||
details: SalesOrderDetailList
|
||||
|
||||
def __init__(self):
|
||||
self.header = SalesOrderHeader()
|
||||
self.details = SalesOrderDetailList()
|
||||
|
||||
def append(
|
||||
self,
|
||||
salesorder_detail: SalesOrderDetail,
|
||||
):
|
||||
"""
|
||||
Add detail information.
|
||||
"""
|
||||
self.details.append(salesorder_detail)
|
||||
|
||||
def output(self, import_file: typing.TextIO):
|
||||
"""
|
||||
Output entire order to import_file.
|
||||
"""
|
||||
output = functools.partial(
|
||||
yamamotoyama.x3_imports.output_with_file, import_file
|
||||
)
|
||||
output(self.header.convert_to_strings())
|
||||
for detail in self.details:
|
||||
output(detail.convert_to_strings())
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
|
@ -37,7 +37,7 @@ SHIPMENTS_DIRECTORY = THIS_DIRECTORY / "incoming_shipments"
|
|||
SOH_IMPORT_DIRECTORY = THIS_DIRECTORY / "to_import_SOH"
|
||||
|
||||
def main():
|
||||
#retrieve_x12_edi_files()#TODO uncomment
|
||||
#retrieve_x12_edi_files()#TODO remove this as it's handled by the earlier process
|
||||
for file in INCOMING_DIRECTORY.iterdir():
|
||||
if file.name[-4:] != '.csv':
|
||||
continue
|
||||
|
@ -99,7 +99,7 @@ def combine_zshpords():
|
|||
archive_directory / individual_import_filename.name,
|
||||
)
|
||||
|
||||
def process_files(file):
|
||||
def process_files(file): #I am assuming I am getting a sorted csv file by order number and line id from Source
|
||||
with open(file) as source_file:
|
||||
csv_reader = csv.reader(source_file)
|
||||
sales_order = SalesOrder()
|
||||
|
@ -114,11 +114,12 @@ def process_files(file):
|
|||
time_stamp = datetime.datetime.now().strftime("%Y%m%d_%H%M%S")
|
||||
if sales_order.header.cusordref != '':
|
||||
with yamamotoyama.x3_imports.open_import_file(
|
||||
SOH_IMPORT_DIRECTORY / f"ZSHPORD_{current_order}_{time_stamp}_{sales_order.header.cusordref}.dat"
|
||||
SOH_IMPORT_DIRECTORY / f"ZSHPORD_{previous_order}_{time_stamp}_{sales_order.header.cusordref}.dat"
|
||||
) as import_file:
|
||||
sales_order.output(import_file)
|
||||
sales_order = SalesOrder()
|
||||
previous_order = current_order
|
||||
shopify_order_info = get_details_from_shopify(current_order)
|
||||
ship_site = row[0]
|
||||
order_id = row[6]
|
||||
order_date = row[9]
|
||||
|
@ -130,11 +131,12 @@ def process_files(file):
|
|||
# shipzip = row[13]
|
||||
tracking = row[16]
|
||||
weight = row[18]
|
||||
taxes = row[22]
|
||||
ship_charge = row[21]
|
||||
discount = row[24]
|
||||
taxes = shopify_order_info[0]['total_tax']#row[22]
|
||||
ship_charge = shopify_order_info[0]['shipping_lines__price']#row[21]
|
||||
discount = shopify_order_info[0]['total_discounts']#row[24]
|
||||
sales_order.header.cusordref = order_id
|
||||
sales_order.header.orddat = datetime.datetime.strptime(order_date,'%m/%d/%y %I:%M %p').strftime('%Y%m%d') #TODO strftim this
|
||||
sales_order.header.orddat = datetime.datetime.strptime(order_date,'%m/%d/%Y %I:%M:%S %p').strftime('%Y%m%d') # what comes from SL
|
||||
#sales_order.header.orddat = datetime.datetime.strptime(order_date,'%m/%d/%Y %H:%M').strftime('%Y%m%d') #default when we sort in Excel
|
||||
sales_order.header.stofcy = ship_site
|
||||
sales_order.header.bpdnam = customer_name
|
||||
sales_order.header.invdtaamt_5 = ship_charge
|
||||
|
@ -146,11 +148,12 @@ def process_files(file):
|
|||
line_qty = row[3]
|
||||
line_lot = row[4]
|
||||
line_price = row[20]
|
||||
shopify_item_data = get_item_from_shopify_order(shopify_order_info, line_product)
|
||||
sales_order.append(
|
||||
SalesOrderDetail(
|
||||
itmref=line_product,
|
||||
qty=int(line_qty),
|
||||
gropri=line_price
|
||||
itmref=shopify_item_data['sku'],#line_product,
|
||||
qty=int(shopify_item_data['quantity']),#int(line_qty),
|
||||
gropri=shopify_item_data['price']#line_price
|
||||
)
|
||||
)
|
||||
time_stamp = datetime.datetime.now().strftime("%Y%m%d_%H%M%S")
|
||||
|
@ -159,6 +162,34 @@ def process_files(file):
|
|||
) as import_file:
|
||||
sales_order.output(import_file)
|
||||
|
||||
|
||||
def get_item_from_shopify_order(line_item_list, product):
|
||||
for record in line_item_list:
|
||||
if record['sku'] == product:
|
||||
return record
|
||||
return None
|
||||
|
||||
|
||||
def get_details_from_shopify(order):
|
||||
with yamamotoyama.get_connection() as db_connection:
|
||||
return db_connection.query(
|
||||
"""
|
||||
select
|
||||
[ecommerce_orders].[order_number]
|
||||
,[ecommerce_orders].[total_tax]
|
||||
,[ecommerce_orders].[total_discounts]
|
||||
,[ecommerce_orders].[shipping_lines__price]
|
||||
,[ecommerce_order_lines].[sku]
|
||||
,[ecommerce_order_lines].[quantity]
|
||||
,[ecommerce_order_lines].[price]
|
||||
from [staging].[dbo].[ecommerce_orders]
|
||||
left join [staging].[dbo].[ecommerce_order_lines]
|
||||
on [ecommerce_orders].[id] = [ecommerce_order_lines].[id]
|
||||
where order_number = :shopifyorder
|
||||
""",
|
||||
shopifyorder=order,
|
||||
).all()
|
||||
|
||||
@dataclasses.dataclass
|
||||
class SalesOrderDetail:
|
||||
"""
|
||||
|
|
|
@ -25,7 +25,7 @@ def main():
|
|||
continue
|
||||
else:
|
||||
process_files(file)
|
||||
#shutil.move(file, INCOMING_DIRECTORY / "archive" / file.name)
|
||||
shutil.move(file, INCOMING_DIRECTORY / "archive" / file.name)
|
||||
combine_zship945s()
|
||||
|
||||
|
||||
|
@ -36,7 +36,7 @@ def combine_zship945s():
|
|||
archive_directory = SDH_IMPORT_DIRECTORY / "archive"
|
||||
archive_directory.mkdir(exist_ok=True)
|
||||
with (SDH_IMPORT_DIRECTORY / "ZSHIP945S.dat").open(
|
||||
"w", encoding="utf-8", newline="\n"
|
||||
"a", encoding="utf-8", newline="\n"
|
||||
) as combined_import_file:
|
||||
for individual_import_filename in SDH_IMPORT_DIRECTORY.glob(
|
||||
"ZSHIP945S_*.dat"
|
||||
|
@ -52,12 +52,12 @@ def combine_zship945s():
|
|||
)
|
||||
|
||||
def find_so_from_po(cust_po):
|
||||
with yamamotoyama.get_connection('test') as db_connection:#TODO remove 'test'
|
||||
with yamamotoyama.get_connection() as db_connection:
|
||||
return db_connection.query(
|
||||
"""
|
||||
select
|
||||
SOHNUM_0
|
||||
from FY23TEST.SORDER --TODO change to PROD
|
||||
from PROD.SORDER
|
||||
where
|
||||
SOHTYP_0 = 'WEB'
|
||||
and BPCORD_0 = 'STSHOPIFY'
|
||||
|
@ -71,23 +71,22 @@ def process_files(file):
|
|||
csv_reader = csv.reader(source_file)
|
||||
warehouse_shipment = WarehouseShipment()
|
||||
previous_order = ''
|
||||
previous_item = ''
|
||||
current_order = ''
|
||||
for num, row in enumerate(csv_reader):
|
||||
if num == 0:
|
||||
continue #skip header lines
|
||||
if num >= 1: #gather header information
|
||||
sohnum = find_so_from_po(row[6])
|
||||
if num == 1:
|
||||
previous_order = sohnum
|
||||
if previous_order != sohnum:
|
||||
if warehouse_shipment.sohnum != '':
|
||||
current_order = row[6]
|
||||
if current_order != previous_order:
|
||||
time_stamp = datetime.datetime.now().strftime("%Y%m%d_%H%M%S")
|
||||
if warehouse_shipment.sohnum != '':
|
||||
with yamamotoyama.x3_imports.open_import_file(
|
||||
SDH_IMPORT_DIRECTORY / f"ZSHIP945S_{warehouse_shipment.sohnum}_{time_stamp}.dat"
|
||||
) as import_file:
|
||||
warehouse_shipment.output(import_file)
|
||||
warehouse_shipment = WarehouseShipment()
|
||||
previous_order = sohnum
|
||||
previous_order = current_order
|
||||
sohnum = find_so_from_po(current_order)
|
||||
order_date = row[9]
|
||||
customer_name = row[10]
|
||||
# shipadd1 = row[9] # address information is not stored in X3
|
||||
|
@ -97,17 +96,17 @@ def process_files(file):
|
|||
# shipzip = row[13]
|
||||
tracking = row[16]
|
||||
weight = row[18]
|
||||
ship_charge = row[21]
|
||||
taxes = row[22]
|
||||
ship_site = row[0]
|
||||
discount = row[4]
|
||||
# ship_charge = row[22]
|
||||
# taxes = "?" #unused
|
||||
# ship_site = "?" #unsued
|
||||
# discount = "?" #unused
|
||||
warehouse_shipment.sohnum = sohnum
|
||||
#warehouse_shipment.header.sohnum = sohnum
|
||||
warehouse_shipment.header.shidat = datetime.datetime.strptime(order_date,'%m/%d/%y %I:%M %p')
|
||||
warehouse_shipment.header.sohnum = sohnum
|
||||
warehouse_shipment.header.shidat = datetime.datetime.strptime(order_date,'%m/%d/%Y %I:%M:%S %p')# what comes from SL
|
||||
#warehouse_shipment.header.shidat = datetime.datetime.strptime(order_date,'%m/%d/%Y %H:%M')#default when we sort in Excel
|
||||
warehouse_shipment.header.ylicplate = tracking
|
||||
warehouse_shipment.header.growei = weight
|
||||
#gather line data
|
||||
#TODO how are multiple lots processed?
|
||||
line_product = row[1]
|
||||
line_qty = row[3]
|
||||
line_lot = row[4]
|
||||
|
@ -116,21 +115,12 @@ def process_files(file):
|
|||
qtypcu=-1 * int(line_qty),
|
||||
lot=line_lot,
|
||||
)
|
||||
if previous_item != line_product:
|
||||
detail = WarehouseShipmentDetail(
|
||||
warehouse_shipment.append(
|
||||
WarehouseShipmentDetail(
|
||||
sohnum=sohnum,
|
||||
itmref=line_product,
|
||||
qty=int(line_qty),
|
||||
)
|
||||
# pprint.pprint(detail)
|
||||
# pprint.pprint(subdetail)
|
||||
warehouse_shipment.append(
|
||||
detail,
|
||||
subdetail,
|
||||
)
|
||||
else:
|
||||
warehouse_shipment.append(
|
||||
detail,
|
||||
),
|
||||
subdetail,
|
||||
)
|
||||
time_stamp = datetime.datetime.now().strftime("%Y%m%d_%H%M%S")
|
||||
|
@ -191,7 +181,6 @@ class WarehouseShipmentDetail:
|
|||
"""
|
||||
Add subdetail
|
||||
"""
|
||||
|
||||
subdetail.pcu = self.sau
|
||||
self.subdetails.append(subdetail)
|
||||
|
||||
|
@ -202,8 +191,6 @@ class WarehouseShipmentDetail:
|
|||
total_cases = 0
|
||||
for subdetail in self.subdetails:
|
||||
total_cases += subdetail.qtypcu
|
||||
if subdetail.pcu == '':
|
||||
subdetail.pcu = self.sau
|
||||
return abs(total_cases)
|
||||
|
||||
def convert_to_strings(self) -> typing.List[str]:
|
||||
|
@ -241,13 +228,13 @@ class WarehouseShipmentDetail:
|
|||
"""
|
||||
|
||||
def get() -> records.Record:
|
||||
with yamamotoyama.get_connection('test') as database:#TODO remove test
|
||||
with yamamotoyama.get_connection() as database:
|
||||
how_many = (
|
||||
database.query(
|
||||
"""
|
||||
select
|
||||
count(*) as [how_many]
|
||||
from [FY23TEST].[SORDERP] as [SOP] --TODO change to PROD
|
||||
from [PROD].[SORDERP] as [SOP]
|
||||
where
|
||||
[SOP].[SOHNUM_0] = :sohnum
|
||||
and [SOP].[ITMREF_0] = :itmref
|
||||
|
@ -265,7 +252,7 @@ class WarehouseShipmentDetail:
|
|||
[SOP].[SOPLIN_0]
|
||||
,[SOP].[ITMDES1_0]
|
||||
,[SOP].[SAU_0]
|
||||
from [FY23TEST].[SORDERP] as [SOP] --TODO change to PROD
|
||||
from [PROD].[SORDERP] as [SOP]
|
||||
where
|
||||
[SOP].[SOHNUM_0] = :sohnum
|
||||
and [SOP].[ITMREF_0] = :itmref
|
||||
|
@ -279,6 +266,7 @@ class WarehouseShipmentDetail:
|
|||
pprint.pprint(self.sohnum)
|
||||
pprint.pprint(self.itmref)
|
||||
raise NotImplementedError # TODO
|
||||
|
||||
result = get()
|
||||
self.soplin = result.SOPLIN_0
|
||||
self.itmdes = result.ITMDES1_0
|
||||
|
@ -530,7 +518,7 @@ class WarehouseShipment:
|
|||
"""
|
||||
Fetch sales order from X3 database.
|
||||
"""
|
||||
with yamamotoyama.get_connection('test') as db_connection:#TODO remove 'test'
|
||||
with yamamotoyama.get_connection() as db_connection:
|
||||
return db_connection.query(
|
||||
"""
|
||||
select
|
||||
|
@ -596,7 +584,7 @@ class WarehouseShipment:
|
|||
,[SOH].[BPDSAT_0]
|
||||
,[SOH].[BPDCRY_0]
|
||||
,[SOH].[BPDCRYNAM_0]
|
||||
from [FY23TEST].[SORDER] as [SOH]--TODO change back to PROD
|
||||
from [PROD].[SORDER] as [SOH]
|
||||
where
|
||||
[SOH].[SOHNUM_0] = :order
|
||||
""",
|
||||
|
|
|
@ -0,0 +1,671 @@
|
|||
#!/usr/bin/env python3
|
||||
import csv
|
||||
import pprint
|
||||
import dataclasses
|
||||
import datetime
|
||||
import decimal
|
||||
import functools
|
||||
import pathlib
|
||||
import re
|
||||
import shutil
|
||||
import typing
|
||||
|
||||
import records # type: ignore
|
||||
|
||||
import yamamotoyama # type: ignore
|
||||
import yamamotoyama.x3_imports # type: ignore
|
||||
|
||||
THIS_DIRECTORY = pathlib.Path(__file__).parent
|
||||
INCOMING_DIRECTORY = THIS_DIRECTORY / "incoming_shipments"
|
||||
SDH_IMPORT_DIRECTORY = THIS_DIRECTORY / "to_import_SDH"
|
||||
|
||||
def main():
|
||||
for file in INCOMING_DIRECTORY.iterdir():
|
||||
if file.name[-4:] != '.csv':
|
||||
continue
|
||||
else:
|
||||
process_files(file)
|
||||
shutil.move(file, INCOMING_DIRECTORY / "archive" / file.name)
|
||||
combine_zship945s()
|
||||
|
||||
|
||||
def combine_zship945s():
|
||||
"""
|
||||
Collect all ZSHPORD imports into a single file for easy import.
|
||||
"""
|
||||
archive_directory = SDH_IMPORT_DIRECTORY / "archive"
|
||||
archive_directory.mkdir(exist_ok=True)
|
||||
with (SDH_IMPORT_DIRECTORY / "ZSHIP945S.dat").open(
|
||||
"a", encoding="utf-8", newline="\n"
|
||||
) as combined_import_file:
|
||||
for individual_import_filename in SDH_IMPORT_DIRECTORY.glob(
|
||||
"ZSHIP945S_*.dat"
|
||||
):
|
||||
with individual_import_filename.open(
|
||||
"r", encoding="utf-8", newline="\n"
|
||||
) as individual_import_file:
|
||||
for line in individual_import_file:
|
||||
combined_import_file.write(line)
|
||||
shutil.move(
|
||||
individual_import_filename,
|
||||
archive_directory / individual_import_filename.name,
|
||||
)
|
||||
|
||||
def find_so_from_po(cust_po):
|
||||
with yamamotoyama.get_connection() as db_connection:
|
||||
return db_connection.query(
|
||||
"""
|
||||
select
|
||||
SOHNUM_0
|
||||
from PROD.SORDER
|
||||
where
|
||||
SOHTYP_0 = 'WEB'
|
||||
and BPCORD_0 = 'STSHOPIFY'
|
||||
and CUSORDREF_0 = :order
|
||||
""",
|
||||
order=cust_po,
|
||||
).first()["SOHNUM_0"]
|
||||
|
||||
def process_files(file):
|
||||
with open(file) as source_file:
|
||||
csv_reader = csv.reader(source_file)
|
||||
warehouse_shipment = WarehouseShipment()
|
||||
for num, row in enumerate(csv_reader):
|
||||
if num == 0:
|
||||
continue #skip header lines
|
||||
if num == 1: #gather header information
|
||||
sohnum = find_so_from_po(row[6])
|
||||
order_date = row[9]
|
||||
customer_name = row[10]
|
||||
# shipadd1 = row[9] # address information is not stored in X3
|
||||
# shipadd2 = row[10]
|
||||
# shipcity = row[11]
|
||||
# shipstate = row[12]
|
||||
# shipzip = row[13]
|
||||
tracking = row[16]
|
||||
weight = row[18]
|
||||
# ship_charge = row[22]
|
||||
# taxes = "?" #unused
|
||||
# ship_site = "?" #unsued
|
||||
# discount = "?" #unused
|
||||
warehouse_shipment.sohnum = sohnum
|
||||
#warehouse_shipment.header.sohnum = sohnum
|
||||
#warehouse_shipment.header.shidat = datetime.datetime.strptime(order_date,'%m/%d/%Y')
|
||||
warehouse_shipment.header.shidat = datetime.datetime.strptime(order_date,'%m/%d/%Y %H:%M')
|
||||
warehouse_shipment.header.ylicplate = tracking
|
||||
warehouse_shipment.header.growei = weight
|
||||
#gather line data
|
||||
line_product = row[1]
|
||||
line_qty = row[3]
|
||||
line_lot = row[4]
|
||||
line_price = row[20]
|
||||
subdetail = WarehouseShipmentSubDetail(
|
||||
qtypcu=-1 * int(line_qty),
|
||||
lot=line_lot,
|
||||
)
|
||||
warehouse_shipment.append(
|
||||
WarehouseShipmentDetail(
|
||||
sohnum=sohnum,
|
||||
itmref=line_product,
|
||||
qty=int(line_qty),
|
||||
),
|
||||
subdetail,
|
||||
)
|
||||
time_stamp = datetime.datetime.now().strftime("%Y%m%d_%H%M%S")
|
||||
with yamamotoyama.x3_imports.open_import_file(
|
||||
SDH_IMPORT_DIRECTORY / f"ZSHIP945S_{warehouse_shipment.sohnum}_{time_stamp}.dat"
|
||||
) as import_file:
|
||||
warehouse_shipment.output(import_file)
|
||||
|
||||
@dataclasses.dataclass
|
||||
class WarehouseShipmentSubDetail:
|
||||
"""
|
||||
Information that goes onto a shipment sub-detail line, taken from ZSHIP945 template.
|
||||
"""
|
||||
|
||||
sta: str = "A"
|
||||
pcu: str = ""
|
||||
qtypcu: int = 0
|
||||
loc: str = ""
|
||||
lot: str = ""
|
||||
sernum: str = ""
|
||||
|
||||
def convert_to_strings(self) -> typing.List[str]:
|
||||
"""
|
||||
Convert to strings for X3 import writing.
|
||||
"""
|
||||
return yamamotoyama.x3_imports.convert_to_strings(
|
||||
[
|
||||
"S",
|
||||
self.sta,
|
||||
self.pcu,
|
||||
self.qtypcu,
|
||||
self.loc,
|
||||
self.lot,
|
||||
self.sernum,
|
||||
]
|
||||
)
|
||||
|
||||
|
||||
@dataclasses.dataclass
|
||||
class WarehouseShipmentDetail:
|
||||
"""
|
||||
Information that goes on a shipment detail line, taken from ZSHIP945 template.
|
||||
"""
|
||||
|
||||
sohnum: str = ""
|
||||
soplin: int = 0
|
||||
itmref: str = ""
|
||||
itmdes: str = ""
|
||||
sau: str = ""
|
||||
qty: int = 0
|
||||
star91: str = ""
|
||||
star92: str = ""
|
||||
subdetails: typing.List[WarehouseShipmentSubDetail] = dataclasses.field(
|
||||
default_factory=list
|
||||
)
|
||||
|
||||
def append(self, subdetail: WarehouseShipmentSubDetail):
|
||||
"""
|
||||
Add subdetail
|
||||
"""
|
||||
subdetail.pcu = self.sau
|
||||
self.subdetails.append(subdetail)
|
||||
|
||||
def check_subdetail_qty(self):
|
||||
"""
|
||||
Check for shortages by totaling up subdetail quantities.
|
||||
"""
|
||||
total_cases = 0
|
||||
for subdetail in self.subdetails:
|
||||
total_cases += subdetail.qtypcu
|
||||
return abs(total_cases)
|
||||
|
||||
def convert_to_strings(self) -> typing.List[str]:
|
||||
"""
|
||||
Convert to strings for X3 import writing.
|
||||
"""
|
||||
self.qty = self.check_subdetail_qty()
|
||||
return yamamotoyama.x3_imports.convert_to_strings(
|
||||
[
|
||||
"L",
|
||||
self.sohnum,
|
||||
self.soplin,
|
||||
self.itmref,
|
||||
self.itmdes,
|
||||
self.sau,
|
||||
self.qty,
|
||||
self.star91,
|
||||
self.star92,
|
||||
]
|
||||
)
|
||||
|
||||
def __eq__(self, item: typing.Any) -> bool:
|
||||
"""
|
||||
Test for equality
|
||||
"""
|
||||
if isinstance(item, str):
|
||||
return self.itmref == item
|
||||
if isinstance(item, WarehouseShipmentDetail):
|
||||
return self.itmref == item.itmref
|
||||
return False
|
||||
|
||||
def fill(self):
|
||||
"""
|
||||
Set soplin & itmdes from itmref & sohnum
|
||||
"""
|
||||
|
||||
def get() -> records.Record:
|
||||
with yamamotoyama.get_connection() as database:
|
||||
how_many = (
|
||||
database.query(
|
||||
"""
|
||||
select
|
||||
count(*) as [how_many]
|
||||
from [PROD].[SORDERP] as [SOP]
|
||||
where
|
||||
[SOP].[SOHNUM_0] = :sohnum
|
||||
and [SOP].[ITMREF_0] = :itmref
|
||||
""",
|
||||
sohnum=self.sohnum,
|
||||
itmref=self.itmref,
|
||||
)
|
||||
.first()
|
||||
.how_many
|
||||
)
|
||||
if how_many == 1:
|
||||
return database.query(
|
||||
"""
|
||||
select top 1
|
||||
[SOP].[SOPLIN_0]
|
||||
,[SOP].[ITMDES1_0]
|
||||
,[SOP].[SAU_0]
|
||||
from [PROD].[SORDERP] as [SOP]
|
||||
where
|
||||
[SOP].[SOHNUM_0] = :sohnum
|
||||
and [SOP].[ITMREF_0] = :itmref
|
||||
order by
|
||||
[SOP].[SOPLIN_0]
|
||||
""",
|
||||
sohnum=self.sohnum,
|
||||
itmref=self.itmref,
|
||||
).first()
|
||||
else:
|
||||
pprint.pprint(self.sohnum)
|
||||
pprint.pprint(self.itmref)
|
||||
raise NotImplementedError # TODO
|
||||
|
||||
result = get()
|
||||
self.soplin = result.SOPLIN_0
|
||||
self.itmdes = result.ITMDES1_0
|
||||
self.sau = result.SAU_0
|
||||
|
||||
|
||||
@dataclasses.dataclass
|
||||
class WarehouseShipmentHeader:
|
||||
"""
|
||||
Information that goes on a shipment header, taken from ZSHIP945 template.
|
||||
"""
|
||||
|
||||
salfcy: str = "STC"
|
||||
stofcy: str = ""
|
||||
sdhnum: str = ""
|
||||
bpcord: str = ""
|
||||
bpaadd: str = "SH001"
|
||||
cur: str = "USD"
|
||||
shidat: datetime.date = datetime.date(1753, 1, 1)
|
||||
cfmflg: int = 1
|
||||
pjt: str = ""
|
||||
bptnum: str = ""
|
||||
ylicplate: str = ""
|
||||
invdtaamt_2: decimal.Decimal = decimal.Decimal()
|
||||
invdtaamt_3: decimal.Decimal = decimal.Decimal()
|
||||
invdtaamt_4: decimal.Decimal = decimal.Decimal()
|
||||
invdtaamt_5: decimal.Decimal = decimal.Decimal()
|
||||
invdtaamt_6: decimal.Decimal = decimal.Decimal()
|
||||
invdtaamt_7: decimal.Decimal = decimal.Decimal()
|
||||
invdtaamt_8: decimal.Decimal = decimal.Decimal()
|
||||
invdtaamt_9: decimal.Decimal = decimal.Decimal()
|
||||
die: str = ""
|
||||
die_1: str = ""
|
||||
die_2: str = ""
|
||||
die_3: str = ""
|
||||
die_4: str = ""
|
||||
die_5: str = ""
|
||||
die_6: str = ""
|
||||
die_7: str = ""
|
||||
die_8: str = ""
|
||||
die_9: str = ""
|
||||
die_10: str = ""
|
||||
die_11: str = ""
|
||||
die_12: str = ""
|
||||
die_13: str = ""
|
||||
die_14: str = ""
|
||||
die_15: str = ""
|
||||
die_16: str = ""
|
||||
die_17: str = ""
|
||||
die_18: str = ""
|
||||
die_19: str = ""
|
||||
cce: str = ""
|
||||
cce_1: str = ""
|
||||
cce_2: str = ""
|
||||
cce_3: str = ""
|
||||
cce_4: str = ""
|
||||
cce_5: str = ""
|
||||
cce_6: str = ""
|
||||
cce_7: str = ""
|
||||
cce_8: str = ""
|
||||
cce_9: str = ""
|
||||
cce_10: str = ""
|
||||
cce_11: str = ""
|
||||
cce_12: str = ""
|
||||
cce_13: str = ""
|
||||
cce_14: str = ""
|
||||
cce_15: str = ""
|
||||
cce_16: str = ""
|
||||
cce_17: str = ""
|
||||
cce_18: str = ""
|
||||
cce_19: str = ""
|
||||
bpdnam: str = ""
|
||||
bpdaddlig: str = ""
|
||||
bpdaddlig_1: str = ""
|
||||
bpdaddlig_2: str = ""
|
||||
bpdposcod: str = ""
|
||||
bpdcty: str = ""
|
||||
bpdsat: str = ""
|
||||
bpdcry: str = ""
|
||||
bpdcrynam: str = ""
|
||||
sdhtyp: str = "SDN"
|
||||
growei: decimal.Decimal = decimal.Decimal()
|
||||
pacnbr: int = 0
|
||||
star71: str = ""
|
||||
star72: str = ""
|
||||
star81: str = ""
|
||||
star82: str = ""
|
||||
|
||||
def convert_to_strings(self) -> typing.List[str]:
|
||||
"""
|
||||
Convert to X3 import line
|
||||
"""
|
||||
return yamamotoyama.x3_imports.convert_to_strings(
|
||||
[
|
||||
"H",
|
||||
self.salfcy,
|
||||
self.stofcy,
|
||||
self.sdhnum,
|
||||
self.bpcord,
|
||||
self.bpaadd,
|
||||
self.cur,
|
||||
self.shidat.strftime("%Y%m%d"),
|
||||
self.cfmflg,
|
||||
self.pjt,
|
||||
self.bptnum,
|
||||
self.ylicplate,
|
||||
self.invdtaamt_2,
|
||||
self.invdtaamt_3,
|
||||
self.invdtaamt_4,
|
||||
self.invdtaamt_5,
|
||||
self.invdtaamt_6,
|
||||
self.invdtaamt_7,
|
||||
self.invdtaamt_8,
|
||||
self.invdtaamt_9,
|
||||
self.die,
|
||||
self.die_1,
|
||||
self.die_2,
|
||||
self.die_3,
|
||||
self.die_4,
|
||||
self.die_5,
|
||||
self.die_6,
|
||||
self.die_7,
|
||||
self.die_8,
|
||||
self.die_9,
|
||||
self.die_10,
|
||||
self.die_11,
|
||||
self.die_12,
|
||||
self.die_13,
|
||||
self.die_14,
|
||||
self.die_15,
|
||||
self.die_16,
|
||||
self.die_17,
|
||||
self.die_18,
|
||||
self.die_19,
|
||||
self.cce,
|
||||
self.cce_1,
|
||||
self.cce_2,
|
||||
self.cce_3,
|
||||
self.cce_4,
|
||||
self.cce_5,
|
||||
self.cce_6,
|
||||
self.cce_7,
|
||||
self.cce_8,
|
||||
self.cce_9,
|
||||
self.cce_10,
|
||||
self.cce_11,
|
||||
self.cce_12,
|
||||
self.cce_13,
|
||||
self.cce_14,
|
||||
self.cce_15,
|
||||
self.cce_16,
|
||||
self.cce_17,
|
||||
self.cce_18,
|
||||
self.cce_19,
|
||||
self.bpdnam,
|
||||
self.bpdaddlig,
|
||||
self.bpdaddlig_1,
|
||||
self.bpdaddlig_2,
|
||||
self.bpdposcod,
|
||||
self.bpdcty,
|
||||
self.bpdsat,
|
||||
self.bpdcry,
|
||||
self.bpdcrynam,
|
||||
self.sdhtyp,
|
||||
self.growei,
|
||||
self.pacnbr,
|
||||
self.star71,
|
||||
self.star72,
|
||||
self.star81,
|
||||
self.star82,
|
||||
]
|
||||
)
|
||||
|
||||
|
||||
class WarehouseShipmentDetailList:
|
||||
"""
|
||||
List of shipment details
|
||||
"""
|
||||
|
||||
_details: typing.List[WarehouseShipmentDetail]
|
||||
_item_set: typing.Set[str]
|
||||
|
||||
def __init__(self):
|
||||
self._details = []
|
||||
self._item_set = set()
|
||||
|
||||
def append(
|
||||
self,
|
||||
shipment_detail: WarehouseShipmentDetail,
|
||||
shipment_subdetail: WarehouseShipmentSubDetail,
|
||||
):
|
||||
"""
|
||||
Append
|
||||
"""
|
||||
itmref = shipment_detail.itmref
|
||||
if itmref in self._item_set:
|
||||
for detail in self._details:
|
||||
if detail == itmref:
|
||||
detail.subdetails.append(shipment_subdetail)
|
||||
return
|
||||
self._item_set.add(itmref)
|
||||
shipment_detail.fill()
|
||||
shipment_detail.append(shipment_subdetail)
|
||||
self._details.append(shipment_detail)
|
||||
|
||||
def __iter__(self):
|
||||
return iter(self._details)
|
||||
|
||||
|
||||
class WarehouseShipment:
|
||||
"""
|
||||
Warehosue shipment, both header & details
|
||||
"""
|
||||
|
||||
header: WarehouseShipmentHeader
|
||||
details: WarehouseShipmentDetailList
|
||||
_sohnum: str
|
||||
|
||||
def __init__(self):
|
||||
self.header = WarehouseShipmentHeader()
|
||||
self._sohnum = ""
|
||||
self.details = WarehouseShipmentDetailList()
|
||||
|
||||
def append(
|
||||
self,
|
||||
shipment_detail: WarehouseShipmentDetail,
|
||||
shipment_subdetail: WarehouseShipmentSubDetail,
|
||||
):
|
||||
"""
|
||||
Add detail information.
|
||||
"""
|
||||
self.details.append(shipment_detail, shipment_subdetail)
|
||||
|
||||
@property
|
||||
def sohnum(self):
|
||||
"""
|
||||
Sales order number
|
||||
"""
|
||||
return self._sohnum
|
||||
|
||||
@sohnum.setter
|
||||
def sohnum(self, value: str):
|
||||
if self._sohnum != value:
|
||||
self._sohnum = value
|
||||
if value:
|
||||
self._fill_info_from_so()
|
||||
|
||||
def _get_so_from_x3(self) -> records.Record:
|
||||
"""
|
||||
Fetch sales order from X3 database.
|
||||
"""
|
||||
with yamamotoyama.get_connection() as db_connection:
|
||||
return db_connection.query(
|
||||
"""
|
||||
select
|
||||
[SOH].[SALFCY_0]
|
||||
,[SOH].[STOFCY_0]
|
||||
,[SOH].[BPCORD_0]
|
||||
,[SOH].[BPAADD_0]
|
||||
,[SOH].[CUR_0]
|
||||
,[SOH].[INVDTAAMT_2]
|
||||
,[SOH].[INVDTAAMT_3]
|
||||
,[SOH].[INVDTAAMT_4]
|
||||
,[SOH].[INVDTAAMT_5]
|
||||
,[SOH].[INVDTAAMT_6]
|
||||
,[SOH].[INVDTAAMT_7]
|
||||
,[SOH].[INVDTAAMT_8]
|
||||
,[SOH].[INVDTAAMT_9]
|
||||
,[SOH].[DIE_0]
|
||||
,[SOH].[DIE_1]
|
||||
,[SOH].[DIE_2]
|
||||
,[SOH].[DIE_3]
|
||||
,[SOH].[DIE_4]
|
||||
,[SOH].[DIE_5]
|
||||
,[SOH].[DIE_6]
|
||||
,[SOH].[DIE_7]
|
||||
,[SOH].[DIE_8]
|
||||
,[SOH].[DIE_9]
|
||||
,[SOH].[DIE_10]
|
||||
,[SOH].[DIE_11]
|
||||
,[SOH].[DIE_12]
|
||||
,[SOH].[DIE_13]
|
||||
,[SOH].[DIE_14]
|
||||
,[SOH].[DIE_15]
|
||||
,[SOH].[DIE_16]
|
||||
,[SOH].[DIE_17]
|
||||
,[SOH].[DIE_18]
|
||||
,[SOH].[DIE_19]
|
||||
,[SOH].[CCE_0]
|
||||
,[SOH].[CCE_1]
|
||||
,[SOH].[CCE_2]
|
||||
,[SOH].[CCE_3]
|
||||
,[SOH].[CCE_4]
|
||||
,[SOH].[CCE_5]
|
||||
,[SOH].[CCE_6]
|
||||
,[SOH].[CCE_7]
|
||||
,[SOH].[CCE_8]
|
||||
,[SOH].[CCE_9]
|
||||
,[SOH].[CCE_10]
|
||||
,[SOH].[CCE_11]
|
||||
,[SOH].[CCE_12]
|
||||
,[SOH].[CCE_13]
|
||||
,[SOH].[CCE_14]
|
||||
,[SOH].[CCE_15]
|
||||
,[SOH].[CCE_16]
|
||||
,[SOH].[CCE_17]
|
||||
,[SOH].[CCE_18]
|
||||
,[SOH].[CCE_19]
|
||||
,[SOH].[BPDNAM_0]
|
||||
,[SOH].[BPDADDLIG_0]
|
||||
,[SOH].[BPDADDLIG_1]
|
||||
,[SOH].[BPDADDLIG_2]
|
||||
,[SOH].[BPDPOSCOD_0]
|
||||
,[SOH].[BPDCTY_0]
|
||||
,[SOH].[BPDSAT_0]
|
||||
,[SOH].[BPDCRY_0]
|
||||
,[SOH].[BPDCRYNAM_0]
|
||||
from [PROD].[SORDER] as [SOH]
|
||||
where
|
||||
[SOH].[SOHNUM_0] = :order
|
||||
""",
|
||||
order=self.sohnum,
|
||||
).first()
|
||||
|
||||
def _copy_accounting_codes(self, result: records.Record):
|
||||
"""
|
||||
Fill in all the accounting codes
|
||||
"""
|
||||
self.header.die = result.DIE_0
|
||||
self.header.die_1 = result.DIE_1
|
||||
self.header.die_2 = result.DIE_2
|
||||
self.header.die_3 = result.DIE_3
|
||||
self.header.die_4 = result.DIE_4
|
||||
self.header.die_5 = result.DIE_5
|
||||
self.header.die_6 = result.DIE_6
|
||||
self.header.die_7 = result.DIE_7
|
||||
self.header.die_8 = result.DIE_8
|
||||
self.header.die_9 = result.DIE_9
|
||||
self.header.die_10 = result.DIE_10
|
||||
self.header.die_11 = result.DIE_11
|
||||
self.header.die_12 = result.DIE_12
|
||||
self.header.die_13 = result.DIE_13
|
||||
self.header.die_14 = result.DIE_14
|
||||
self.header.die_15 = result.DIE_15
|
||||
self.header.die_16 = result.DIE_16
|
||||
self.header.die_17 = result.DIE_17
|
||||
self.header.die_18 = result.DIE_18
|
||||
self.header.die_19 = result.DIE_19
|
||||
self.header.cce = result.CCE_0
|
||||
self.header.cce_1 = result.CCE_1
|
||||
self.header.cce_2 = result.CCE_2
|
||||
self.header.cce_3 = result.CCE_3
|
||||
self.header.cce_4 = result.CCE_4
|
||||
self.header.cce_5 = result.CCE_5
|
||||
self.header.cce_6 = result.CCE_6
|
||||
self.header.cce_7 = result.CCE_7
|
||||
self.header.cce_8 = result.CCE_8
|
||||
self.header.cce_9 = result.CCE_9
|
||||
self.header.cce_10 = result.CCE_10
|
||||
self.header.cce_11 = result.CCE_11
|
||||
self.header.cce_12 = result.CCE_12
|
||||
self.header.cce_13 = result.CCE_13
|
||||
self.header.cce_14 = result.CCE_14
|
||||
self.header.cce_15 = result.CCE_15
|
||||
self.header.cce_16 = result.CCE_16
|
||||
self.header.cce_17 = result.CCE_17
|
||||
self.header.cce_18 = result.CCE_18
|
||||
self.header.cce_19 = result.CCE_19
|
||||
|
||||
def _fill_info_from_so(self):
|
||||
"""
|
||||
When we learn the SOHNUM, we can copy information from the sales order.
|
||||
"""
|
||||
result = self._get_so_from_x3()
|
||||
self.header.salfcy = result.SALFCY_0
|
||||
self.header.stofcy = result.STOFCY_0
|
||||
self.header.bpcord = result.BPCORD_0
|
||||
self.header.bpaadd = result.BPAADD_0
|
||||
self.header.cur = result.CUR_0
|
||||
self.header.invdtaamt_2 = result.INVDTAAMT_2
|
||||
self.header.invdtaamt_3 = result.INVDTAAMT_3
|
||||
self.header.invdtaamt_4 = result.INVDTAAMT_4
|
||||
self.header.invdtaamt_5 = result.INVDTAAMT_5
|
||||
self.header.invdtaamt_6 = result.INVDTAAMT_6
|
||||
self.header.invdtaamt_7 = result.INVDTAAMT_7
|
||||
self.header.invdtaamt_8 = result.INVDTAAMT_8
|
||||
self.header.invdtaamt_9 = result.INVDTAAMT_9
|
||||
self._copy_accounting_codes(result)
|
||||
self.header.bpdnam = result.BPDNAM_0
|
||||
self.header.bpdaddlig = result.BPDADDLIG_0
|
||||
self.header.bpdaddlig_1 = result.BPDADDLIG_1
|
||||
self.header.bpdaddlig_2 = result.BPDADDLIG_2
|
||||
self.header.bpdposcod = result.BPDPOSCOD_0
|
||||
self.header.bpdcty = result.BPDCTY_0
|
||||
self.header.bpdsat = result.BPDSAT_0
|
||||
self.header.bpdcry = result.BPDCRY_0
|
||||
self.header.bpdcrynam = result.BPDCRYNAM_0
|
||||
|
||||
def output(self, import_file: typing.TextIO):
|
||||
"""
|
||||
Output entire order to import_file.
|
||||
"""
|
||||
output = functools.partial(
|
||||
yamamotoyama.x3_imports.output_with_file, import_file
|
||||
)
|
||||
output(self.header.convert_to_strings())
|
||||
for detail in self.details:
|
||||
output(detail.convert_to_strings())
|
||||
for subdetail in detail.subdetails:
|
||||
output(subdetail.convert_to_strings())
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
Loading…
Reference in New Issue