Adding report files and minor changes

master
bleeson 2025-01-10 07:46:23 -08:00
parent a6c460cc3a
commit 4cef690956
14 changed files with 611 additions and 112 deletions

View File

@ -6,7 +6,7 @@ Problems:
Source does not send accurate unit price, shipping charge, tax, and discounts. To
bring something accurate in we use the old Shopify integration. This brings up
another issue that the old integration brings in orders at creation, if these fields
get modifed (like from a refund) we are brining in the wrong amount.
get modifed (like from a refund) we are bringing in the wrong amount.
Questions:
How does Source handle multiple lots on a line?
@ -28,7 +28,7 @@ To catch issues we need to look at:
-missing sales orders, that came into the shipping table, but failed to import in
-missing deliveries, that failed during import, e.g. not enough stock
To find these use the sql files in "daily sql reports"
To find these use the sql files in "sql reports"
----------------------------------------------------------------------------------------
Reimporting:

View File

@ -8,22 +8,44 @@ import pathlib
import smtplib
from email.mime.multipart import MIMEMultipart
from email.mime.text import MIMEText
import shutil
import os
THIS_DIRECTORY = pathlib.Path(__file__).parent
INCOMING_DIRECTORY = THIS_DIRECTORY / "incoming_orders"
SHIPMENTS_DIRECTORY = THIS_DIRECTORY / "incoming_shipments"
SOH_IMPORT_DIRECTORY = THIS_DIRECTORY / "to_import_SOH"
NEW_FILES_DIRECTORY = THIS_DIRECTORY / "new_files_from_ftp"
PROBLEMS_DIRECTORY = THIS_DIRECTORY / "problems"
SOH_IMPORT_DIRECTORY = THIS_DIRECTORY / "to_import_SOH"
SDH_IMPORT_DIRECTORY = THIS_DIRECTORY / "to_import_SDH"
def main():
#retrieve_x12_edi_files()#TODO remove this as it's handled by the earlier process
"""
Check for leftover files which means something unexpected happened. Then clean the files
so there isn't a mess for the next run
"""
file_count = []
for file in INCOMING_DIRECTORY.iterdir():
if file.name[-4:] == '.csv':
file_count.append(file.name)
if file.name[-4:] == '.csv':#move the file that drove the process
file_count.append(f'order: {file.name}')
shutil.move(file, PROBLEMS_DIRECTORY / file.name)
for file in SOH_IMPORT_DIRECTORY.iterdir():#delete any import files we were in the middle of
if file.name[-4:] == '.dat':
os.remove(file)
for file in SHIPMENTS_DIRECTORY.iterdir():
if file.name[-4:] == '.csv':
file_count.append(file.name)
file_count.append(f'shipment: {file.name}')
shutil.move(file, PROBLEMS_DIRECTORY / file.name)
for file in SDH_IMPORT_DIRECTORY.iterdir():
if file.name[-4:] == '.dat':
os.remove(file)
for file in NEW_FILES_DIRECTORY.iterdir():
if file.name[-4:] == '.csv':
file_count.append(f'new file: {file.name}')
shutil.move(file, PROBLEMS_DIRECTORY / file.name)
if file_count:
file_alert(file_count)

View File

@ -93,7 +93,7 @@ def clean_lines(file):
for i, row in enumerate(data):
if len(row) < len(headers):
next_line = data.pop(i+1)
csv_writer.writerow(row+next_line)
csv_writer.writerow(row+next_line[1:])
else:
csv_writer.writerow(row)
@ -105,7 +105,7 @@ def process_files(file):
with open(ORDER_DIRECTORY / f'sorted_{file.name}', 'w',newline='',encoding='utf8') as output:
csv_reader = csv.reader(source_file)
csv_writer = csv.writer(output)
csv_writer.writerow(next(csv_reader, None)) #skip header
#csv_writer.writerow(next(csv_reader, None)) #skip header 2024-09-05 clean process removes headers
csv_sort = sorted(csv_reader, key=lambda row: row[6], reverse=False)
for row in csv_sort:
csv_writer.writerow(row)

View File

@ -107,61 +107,61 @@ def process_files(file): #I am assuming I am getting a sorted csv file by order
previous_order = ''
current_order = ''
for num, row in enumerate(csv_reader):
if num == 0:
continue #skip header lines
if num >= 1: #gather header information
current_order = row[6]
if current_order != previous_order:
time_stamp = datetime.datetime.now().strftime("%Y%m%d_%H%M%S")
if sales_order.header.cusordref != '':
with yamamotoyama.x3_imports.open_import_file(
SOH_IMPORT_DIRECTORY / f"ZSHPORD_{previous_order}_{time_stamp}_{sales_order.header.cusordref}.dat"
) as import_file:
sales_order.output(import_file)
sales_order = SalesOrder()
previous_order = current_order
pprint.pprint(current_order)
shopify_order_info = get_details_from_shopify(current_order)
shopify_line_dict = create_shopify_dict(shopify_order_info)
for entry in shopify_line_dict:
sales_order.append(
SalesOrderDetail(
itmref=shopify_line_dict[entry]['sku'],
qty=int(shopify_line_dict[entry]['quantity']),
gropri=shopify_line_dict[entry]['price']
)
# if num == 0: #disabled 2025-01-10
# continue #skip header lines
# if num >= 1: #gather header information
current_order = row[6]
if current_order != previous_order:
time_stamp = datetime.datetime.now().strftime("%Y%m%d_%H%M%S")
if sales_order.header.cusordref != '':
with yamamotoyama.x3_imports.open_import_file(
SOH_IMPORT_DIRECTORY / f"ZSHPORD_{previous_order}_{time_stamp}_{sales_order.header.cusordref}.dat"
) as import_file:
sales_order.output(import_file)
sales_order = SalesOrder()
previous_order = current_order
pprint.pprint(current_order)
shopify_order_info = get_details_from_shopify(current_order)
shopify_line_dict = create_shopify_dict(shopify_order_info)
for entry in shopify_line_dict:
sales_order.append(
SalesOrderDetail(
itmref=shopify_line_dict[entry]['sku'],
qty=int(shopify_line_dict[entry]['quantity']),
gropri=shopify_line_dict[entry]['price']
)
ship_site = row[0]
order_id = row[6]
order_date = row[9]
customer_name = row[10]
# shipadd1 = row[9] # address information is not stored in X3
# shipadd2 = row[10]
# shipcity = row[11]
# shipstate = row[12]
# shipzip = row[13]
tracking = row[16]
weight = row[18]
taxes = shopify_order_info[0]['current_total_tax']#row[22]
ship_charge = shopify_order_info[0]['shipping_lines__price']#row[21]
discount = shopify_order_info[0]['current_total_discounts']#row[24]
sales_order.header.cusordref = order_id
sales_order.header.orddat = datetime.datetime.strptime(order_date,'%m/%d/%Y %I:%M:%S %p').strftime('%Y%m%d') # what comes from SL
#sales_order.header.orddat = datetime.datetime.strptime(order_date,'%m/%d/%Y %H:%M').strftime('%Y%m%d') #default when we sort in Excel
sales_order.header.stofcy = ship_site
sales_order.header.bpdnam = customer_name
sales_order.header.invdtaamt_5 = ship_charge
sales_order.header.invdtaamt_7 = discount
sales_order.header.invdtaamt_8 = taxes
)
ship_site = row[0]
order_id = row[6]
order_date = row[9]
customer_name = row[10]
# shipadd1 = row[9] # address information is not stored in X3
# shipadd2 = row[10]
# shipcity = row[11]
# shipstate = row[12]
# shipzip = row[13]
tracking = row[16]
weight = row[18]
taxes = shopify_order_info[0]['current_total_tax']#row[22]
ship_charge = shopify_order_info[0]['shipping_lines__price']#row[21]
discount = shopify_order_info[0]['current_total_discounts']#row[24]
sales_order.header.cusordref = order_id
sales_order.header.orddat = datetime.datetime.strptime(order_date,'%m/%d/%Y %I:%M:%S %p').strftime('%Y%m%d') # what comes from SL
#sales_order.header.orddat = datetime.datetime.strptime(order_date,'%m/%d/%Y %H:%M').strftime('%Y%m%d') #default when we sort in Excel
sales_order.header.stofcy = ship_site
sales_order.header.bpdnam = customer_name
sales_order.header.invdtaamt_5 = ship_charge
sales_order.header.invdtaamt_7 = discount
sales_order.header.invdtaamt_8 = taxes
#gather line data
# line_product = row[1]
# line_qty = int(row[3])
# line_lot = row[4]
# line_price = row[20]
# shopify_item_data = get_item_from_shopify_order(shopify_line_dict, line_product, line_qty)
#gather line data
# line_product = row[1]
# line_qty = int(row[3])
# line_lot = row[4]
# line_price = row[20]
# shopify_item_data = get_item_from_shopify_order(shopify_line_dict, line_product, line_qty)
# shopify_line_dict = remove_item_from_shopify_order(shopify_line_dict, shopify_item_data['sku'], shopify_item_data['quantity'],shopify_item_data['price'])
# shopify_line_dict = remove_item_from_shopify_order(shopify_line_dict, shopify_item_data['sku'], shopify_item_data['quantity'],shopify_item_data['price'])
time_stamp = datetime.datetime.now().strftime("%Y%m%d_%H%M%S")
with yamamotoyama.x3_imports.open_import_file(
SOH_IMPORT_DIRECTORY / f"ZSHPORD_{current_order}_{time_stamp}_{sales_order.header.cusordref}.dat"

View File

@ -52,6 +52,7 @@ def combine_zship945s():
)
def find_so_from_po(cust_po):
pprint.pprint(cust_po)
with yamamotoyama.get_connection() as db_connection:
return db_connection.query(
"""
@ -73,57 +74,54 @@ def process_files(file):
previous_order = ''
current_order = ''
for num, row in enumerate(csv_reader):
if num == 0:
continue #skip header lines
if num >= 1: #gather header information
current_order = row[6]
if current_order != previous_order:
time_stamp = datetime.datetime.now().strftime("%Y%m%d_%H%M%S")
if warehouse_shipment.sohnum != '':
with yamamotoyama.x3_imports.open_import_file(
SDH_IMPORT_DIRECTORY / f"ZSHIP945S_{warehouse_shipment.sohnum}_{time_stamp}.dat"
) as import_file:
warehouse_shipment.output(import_file)
warehouse_shipment = WarehouseShipment()
previous_order = current_order
sohnum = find_so_from_po(current_order)
ship_date = row[17]
customer_name = row[10]
# shipadd1 = row[9] # address information is not stored in X3
# shipadd2 = row[10]
# shipcity = row[11]
# shipstate = row[12]
# shipzip = row[13]
tracking = row[16]
weight = row[18]
# ship_charge = row[22]
# taxes = "?" #unused
# ship_site = "?" #unsued
# discount = "?" #unused
warehouse_shipment.sohnum = sohnum
warehouse_shipment.header.sohnum = sohnum
warehouse_shipment.header.shidat = datetime.datetime.strptime(ship_date,'%m/%d/%Y %I:%M:%S %p')# what comes from SL
#warehouse_shipment.header.shidat = datetime.datetime.strptime(ship_date,'%m/%d/%Y %H:%M')#default when we sort in Excel
warehouse_shipment.header.ylicplate = tracking
warehouse_shipment.header.growei = weight
#gather line data
line_product = row[1]
line_qty = row[3]
line_lot = row[4]
line_price = row[20]
subdetail = WarehouseShipmentSubDetail(
qtypcu=-1 * int(line_qty),
lot=line_lot,
)
warehouse_shipment.append(
WarehouseShipmentDetail(
sohnum=sohnum,
itmref=line_product,
qty=int(line_qty),
gropri=line_price,
),
subdetail,
)
current_order = row[6]
if current_order != previous_order:
time_stamp = datetime.datetime.now().strftime("%Y%m%d_%H%M%S")
if warehouse_shipment.sohnum != '':
with yamamotoyama.x3_imports.open_import_file(
SDH_IMPORT_DIRECTORY / f"ZSHIP945S_{warehouse_shipment.sohnum}_{time_stamp}.dat"
) as import_file:
warehouse_shipment.output(import_file)
warehouse_shipment = WarehouseShipment()
previous_order = current_order
sohnum = find_so_from_po(current_order)
ship_date = row[17]
customer_name = row[10]
# shipadd1 = row[9] # address information is not stored in X3
# shipadd2 = row[10]
# shipcity = row[11]
# shipstate = row[12]
# shipzip = row[13]
tracking = row[16]
weight = row[18]
# ship_charge = row[22]
# taxes = "?" #unused
# ship_site = "?" #unsued
# discount = "?" #unused
warehouse_shipment.sohnum = sohnum
warehouse_shipment.header.sohnum = sohnum
warehouse_shipment.header.shidat = datetime.datetime.strptime(ship_date,'%m/%d/%Y %I:%M:%S %p')# what comes from SL
#warehouse_shipment.header.shidat = datetime.datetime.strptime(ship_date,'%m/%d/%Y %H:%M')#default when we sort in Excel
warehouse_shipment.header.ylicplate = tracking
warehouse_shipment.header.growei = weight
#gather line data
line_product = row[1]
line_qty = row[3]
line_lot = row[4]
line_price = row[20]
subdetail = WarehouseShipmentSubDetail(
qtypcu=-1 * int(line_qty),
lot=line_lot,
)
warehouse_shipment.append(
WarehouseShipmentDetail(
sohnum=sohnum,
itmref=line_product,
qty=int(line_qty),
gropri=line_price,
),
subdetail,
)
time_stamp = datetime.datetime.now().strftime("%Y%m%d_%H%M%S")
with yamamotoyama.x3_imports.open_import_file(
SDH_IMPORT_DIRECTORY / f"ZSHIP945S_{warehouse_shipment.sohnum}_{time_stamp}.dat"

View File

@ -0,0 +1,239 @@
#!/usr/bin/env python3
"""
Find import files from the x3 import archive folder
and stick them together for a single reimport.
Useful when stock levels prevented multiple shipments from
importing in.
"""
import shutil
import pathlib
import pprint
THIS_DIRECTORY = pathlib.Path(__file__).parent
X3_IMPORT_ARCHIVE = THIS_DIRECTORY / "to_import_SDH" / "archive"
REIMPORT_DIRECTORY = THIS_DIRECTORY / "shipment_reimport"
LIST_OF_UNSHIPPED_ORDERS = [
'WEB24ECS038407',
'WEB24ECS038408',
'WEB24ECS038409',
'WEB24ECS038410',
'WEB24ECS038411',
'WEB24ECS038412',
'WEB24ECS038413',
'WEB24ECS038416',
'WEB24ECS038419',
'WEB24ECS038422',
'WEB24ECS038425',
'WEB24ECS038428',
'WEB24ECS038431',
'WEB24ECS038434',
'WEB24ECS038438',
'WEB24ECS038440',
'WEB24ECS038443',
'WEB24ECS038446',
'WEB24ECS038449',
'WEB24ECS038452',
'WEB24ECS038455',
'WEB24ECS038458',
'WEB24ECS038464',
'WEB24ECS038467',
'WEB24ECS038469',
'WEB24ECS038472',
'WEB24ECS038475',
'WEB24ECS038478',
'WEB24ECS038481',
'WEB24ECS038484',
'WEB24ECS038487',
'WEB24ECS038490',
'WEB24ECS038493',
'WEB24ECS038498',
'WEB24ECS038501',
'WEB24ECS038502',
'WEB24ECS038505',
'WEB24ECS038508',
'WEB24ECS038511',
'WEB24ECS038514',
'WEB24ECS038517',
'WEB24ECS038520',
'WEB24ECS038523',
'WEB24ECS038526',
'WEB24ECS038529',
'WEB24ECS038530',
'WEB24ECS038531',
'WEB24ECS038532',
'WEB24ECS038533',
'WEB24ECS038534',
'WEB24ECS038535',
'WEB24ECS038536',
'WEB24ECS038537',
'WEB24ECS038538',
'WEB24ECS038539',
'WEB24ECS038540',
'WEB24ECS038541',
'WEB24ECS038542',
'WEB24ECS038543',
'WEB24ECS038891',
'WEB24ECS039064',
'WEB24ECS039173',
'WEB24ECS039480',
'WEB24ECS039527',
'WEB24ECS039607',
'WEB24ECS039748',
'WEB24ECS039897',
'WEB24ECS040315',
'WEB24ECS040451',
'WEB24ECS041154',
'WEB24ECS041235',
'WEB24ECS041430',
'WEB24ECS041616',
'WEB24ECS041617',
'WEB24ECS041624',
'WEB24ECS041632',
'WEB24ECS041688',
'WEB24ECS041693',
'WEB24ECS041707',
'WEB24ECS041710',
'WEB24ECS041720',
'WEB24ECS041752',
'WEB24ECS041754',
'WEB24ECS041755',
'WEB24ECS041761',
'WEB24ECS041777',
'WEB24ECS041783',
'WEB24ECS041831',
'WEB24ECS041841',
'WEB24ECS041928',
'WEB24ECS041948',
'WEB24ECS042006',
'WEB24ECS042030',
'WEB24ECS042229',
'WEB24ECS042256',
'WEB24ECS042285',
'WEB24ECS042302',
'WEB24ECS042332',
'WEB24ECS042385',
'WEB24ECS042402',
'WEB24ECS042428',
'WEB24ECS042435',
'WEB24ECS042490',
'WEB24ECS042730',
'WEB24ECS042756',
'WEB24ECS042794',
'WEB24ECS043471',
'WEB24ECS043487',
'WEB24ECS043502',
'WEB24ECS043524',
'WEB24ECS043536',
'WEB24ECS043541',
'WEB24ECS043622',
'WEB24ECS043643',
'WEB24ECS043730',
'WEB24ECS043776',
'WEB24ECS043841',
'WEB24ECS043862',
'WEB24ECS043866',
'WEB24ECS043881',
'WEB24ECS043958',
'WEB24ECS043962',
'WEB24ECS043964',
'WEB24ECS043969',
'WEB24ECS043971',
'WEB24ECS043983',
'WEB24ECS043991',
'WEB24ECS044004',
'WEB24ECS044036',
'WEB24ECS044040',
'WEB24ECS044043',
'WEB24ECS044062',
'WEB24ECS044120',
'WEB24ECS044121',
'WEB24ECS044122',
'WEB24ECS044123',
'WEB24ECS044124',
'WEB24ECS044125',
'WEB24ECS044126',
'WEB24ECS044127',
'WEB24ECS044128',
'WEB24ECS044129',
'WEB24ECS044130',
'WEB24ECS044135',
'WEB24ECS044153',
'WEB24ECS044171',
'WEB24ECS044174',
'WEB24ECS044200',
'WEB24ECS044209',
'WEB24ECS044211',
'WEB24ECS044213',
'WEB24ECS044236',
'WEB24ECS044246',
'WEB24ECS044280',
'WEB24ECS044282',
'WEB24ECS044284',
'WEB24ECS044287',
'WEB24ECS044288',
'WEB24ECS044304',
'WEB24ECS044345',
'WEB24ECS044348',
'WEB24ECS044355',
'WEB25ECS000014',
'WEB25ECS000043',
'WEB25ECS000168',
'WEB25ECS000171',
'WEB25ECS000173',
'WEB25ECS000175',
'WEB25ECS000246',
'WEB25ECS000248',
'WEB25ECS000288',
'WEB25ECS000302',
'WEB25ECS000305',
'WEB25ECS000306',
'WEB25ECS000375',
'WEB25ECS000376',
'WEB25ECS000386',
'WEB25ECS000444',
'WEB25ECS000458',
'WEB25ECS000462',
'WEB25ECS000463',
'WEB25ECS000498',
'WEB25ECS000508',
'WEB25ECS000552',
'WEB25ECS000554',
'WEB25ECS000578',
'WEB25ECS000583',
'WEB25ECS000610',
'WEB25ECS000635',
'WEB25ECS000637',
'WEB25ECS000644',
'WEB25ECS000669',
'WEB25ECS000720',
]
def main():
#run through all files in list and combine them into a single import
with open(REIMPORT_DIRECTORY / 'ZSHIP945S.dat', 'w',encoding="utf-8", newline="\n") as combined_import_file:
files = get_files()
for file in files:
with file.open(
"r", encoding="utf-8", newline="\n"
) as individual_import_file:
for line in individual_import_file:
combined_import_file.write(line)
#search the archive directory for the files, write their contents to a single file
def get_files():
file_list = []
for missing_shipment in LIST_OF_UNSHIPPED_ORDERS:
for file in X3_IMPORT_ARCHIVE.iterdir():
order_number = file.name[10:24]
if missing_shipment == order_number:
file_list.append(file)
return file_list
if __name__ == "__main__":
main()

View File

@ -0,0 +1,62 @@
#!/usr/bin/env python3
"""
Find import files from the x3 import archive folder
and stick them together for a single reimport.
Useful when stock levels prevented multiple shipments from
importing in.
"""
import shutil
import pathlib
import pprint
THIS_DIRECTORY = pathlib.Path(__file__).parent
X3_IMPORT_ARCHIVE = THIS_DIRECTORY / "to_import_SOH" / "archive"
REIMPORT_DIRECTORY = THIS_DIRECTORY / "shipment_reimport"
LIST_OF_UNIMPORTED_ORDERS = [
'514113',
'514092',
'513984',
'514110',
'514055',
'514141',
'514062',
'514051',
'514142',
'514095',
'513897',
'513918',
'513896',
'513884',
'513927'
]
def main():
#run through all files in list and combine them into a single import
with open(REIMPORT_DIRECTORY / 'ZSHPORD.dat', 'w',encoding="utf-8", newline="\n") as combined_import_file:
files = get_files()
for file in files:
with file.open(
"r", encoding="utf-8", newline="\n"
) as individual_import_file:
for line in individual_import_file:
combined_import_file.write(line)
#search the archive directory for the files, write their contents to a single file
def get_files():
file_list = []
for missing_order in LIST_OF_UNIMPORTED_ORDERS:
for file in X3_IMPORT_ARCHIVE.iterdir():
order_number = file.name[8:14]
if missing_order == order_number:
pprint.pprint(f'found {missing_order}')
file_list.append(file)
return file_list
if __name__ == "__main__":
main()

View File

@ -0,0 +1,16 @@
--any actual splits should be entered
--by prepending P2- or similar into the PO
--also need to update the SL_ECOMM table
--most important is not to duplicate the
--invoice elements
select
CUSORDREF_0,
count(CUSORDREF_0)
from PROD.SORDER SOH
where
SOH.SOHTYP_0 = 'WEB'
and SOH.STOFCY_0 in ('SSB','SMB')
group by
CUSORDREF_0
having
count(CUSORDREF_0) > 1

View File

@ -0,0 +1,32 @@
select
SOH.STOFCY_0 [Site],
SOH.SOHNUM_0 [SO#],
SOH.CUSORDREF_0 [Shopify#],
coalesce(SDH.SDHNUM_0,'') [Shipment#],
case SOH.ORDSTA_0
when 1 then 'Open'
when 2 then 'Closed'
end [Order status],
SOQ.ITMREF_0 [X3 Order Item#],
SOQ.QTYSTU_0 [X3 Order qty],
[SL_ECOMM].lot [SL lot],
SLF.AAACUMQTY_0 [X3 available stock for SL lot],
[SL_ECOMM].[tracking]
from PROD.SORDER SOH
left join PROD.SDELIVERY SDH
on SDH.SOHNUM_0 = SOH.SOHNUM_0
left join PROD.SORDERQ SOQ
on SOQ.SOHNUM_0 = SOH.SOHNUM_0
left join [analytics].[dbo].[SL_ECOMM]
on SOQ.ITMREF_0 = [SL_ECOMM].item
and SOQ.STOFCY_0 = [SL_ECOMM].site
and SOH.CUSORDREF_0 = [SL_ECOMM].order_id
left join PROD.STOLOTFCY SLF
on [SL_ECOMM].site = SLF.STOFCY_0
and [SL_ECOMM].item = SLF.ITMREF_0
and [SL_ECOMM].lot = SLF.LOT_0
where SOH.SOHTYP_0 = 'WEB'
and SOH.STOFCY_0 in ('SSB','SMB')
and SOH.ORDSTA_0 <> 2
--and SLF.AAACUMQTY_0 is null
order by 2, 3

View File

@ -0,0 +1,13 @@
--find PO in files sent by SL
--trim file to just the missing po
select
[SL_ECOMM].site,
[SL_ECOMM].order_id,
SOH.SOHNUM_0
from [analytics].[dbo].[SL_ECOMM]
left join PROD.SORDER SOH
on [SL_ECOMM].site = SOH.STOFCY_0
and [SL_ECOMM].order_id = SOH.CUSORDREF_0
and SOH.SOHTYP_0 = 'WEB'
where
SOH.SOHNUM_0 is null

View File

@ -0,0 +1,19 @@
select
distinct
concat('''',SOH.SOHNUM_0,''',') [SO#]
from PROD.SORDER SOH
left join PROD.SDELIVERY SDH
on SDH.SOHNUM_0 = SOH.SOHNUM_0
left join PROD.SORDERQ SOQ
on SOQ.SOHNUM_0 = SOH.SOHNUM_0
left join [analytics].[dbo].[SL_ECOMM]
on SOQ.ITMREF_0 = [SL_ECOMM].item
and SOQ.STOFCY_0 = [SL_ECOMM].site
and SOH.CUSORDREF_0 = [SL_ECOMM].order_id
left join PROD.STOLOTFCY SLF
on [SL_ECOMM].site = SLF.STOFCY_0
and [SL_ECOMM].item = SLF.ITMREF_0
and [SL_ECOMM].lot = SLF.LOT_0
where SOH.SOHTYP_0 = 'WEB'
and SOH.STOFCY_0 in ('SSB','SMB')
and SOH.ORDSTA_0 <> 2

View File

@ -0,0 +1,41 @@
select
--SOH.SHIDAT_0,
SOH.STOFCY_0 [Site],
--SOH.SOHNUM_0 [SO#],
--SOH.CUSORDREF_0 [Shopify#],
--coalesce(SDH.SDHNUM_0,'') [Shipment#],
--case SOH.ORDSTA_0
-- when 1 then 'Open'
-- when 2 then 'Closed'
--end [Order status],
SOQ.ITMREF_0 [X3 Order Item#],
sum(SOQ.QTYSTU_0) [X3 Order qty],
[SL_ECOMM].lot [SL lot],
SLF.AAACUMQTY_0 [X3 available stock for SL lot],
sum(SOQ.QTYSTU_0) - SLF.AAACUMQTY_0 [Qty needed]
--[SL_ECOMM].[tracking]
from PROD.SORDER SOH
left join PROD.SDELIVERY SDH
on SDH.SOHNUM_0 = SOH.SOHNUM_0
left join PROD.SORDERQ SOQ
on SOQ.SOHNUM_0 = SOH.SOHNUM_0
left join [analytics].[dbo].[SL_ECOMM]
on SOQ.ITMREF_0 = [SL_ECOMM].item
and SOQ.STOFCY_0 = [SL_ECOMM].site
and SOH.CUSORDREF_0 = [SL_ECOMM].order_id
left join PROD.STOLOTFCY SLF
on [SL_ECOMM].site = SLF.STOFCY_0
and [SL_ECOMM].item = SLF.ITMREF_0
and [SL_ECOMM].lot = SLF.LOT_0
where SOH.SOHTYP_0 = 'WEB'
and SOH.STOFCY_0 in ('SSB','SMB')
and SOH.ORDSTA_0 <> 2
and SOH.SHIDAT_0 between '11-1-2024' and '11-30-2024'
--and SLF.AAACUMQTY_0 is null
group by
SOH.STOFCY_0,
SOQ.ITMREF_0,
[SL_ECOMM].lot,
SLF.AAACUMQTY_0
having sum(SOQ.QTYSTU_0) > SLF.AAACUMQTY_0
order by 1, 2

View File

@ -0,0 +1,32 @@
select
SOH.STOFCY_0 [Site],
SOH.SOHNUM_0 [SO#],
SOH.CUSORDREF_0 [Shopify#],
coalesce(SDH.SDHNUM_0,'') [Shipment#],
case SOH.ORDSTA_0
when 1 then 'Open'
when 2 then 'Closed'
end [Order status],
SOQ.ITMREF_0 [X3 Order Item#],
SOQ.QTYSTU_0 [X3 Order qty],
[SL_ECOMM].lot [SL lot],
SLF.AAACUMQTY_0 [X3 available stock for SL lot],
[SL_ECOMM].[tracking]
from PROD.SORDER SOH
left join PROD.SDELIVERY SDH
on SDH.SOHNUM_0 = SOH.SOHNUM_0
left join PROD.SORDERQ SOQ
on SOQ.SOHNUM_0 = SOH.SOHNUM_0
left join [analytics].[dbo].[SL_ECOMM]
on SOQ.ITMREF_0 = [SL_ECOMM].item
and SOQ.STOFCY_0 = [SL_ECOMM].site
and SOH.CUSORDREF_0 = [SL_ECOMM].order_id
left join PROD.STOLOTFCY SLF
on [SL_ECOMM].site = SLF.STOFCY_0
and [SL_ECOMM].item = SLF.ITMREF_0
and [SL_ECOMM].lot = SLF.LOT_0
where SOH.SOHTYP_0 = 'WEB'
and SOH.STOFCY_0 in ('SSB','SMB')
and SOH.ORDSTA_0 <> 2
--and SLF.AAACUMQTY_0 is null
order by 2, 3, 6

View File

@ -0,0 +1,25 @@
--includes very recent shipments that SL hasn't reported yet
with sl_ecomm as (
select
distinct
[order_id]
from analytics.dbo.SL_ECOMM
)
select
eso.order_number,
cast(eso.created_at as date)[creation/payment date],
cast(eso.updated_at as date)[fulfilled date],
isnull(sl_ecomm.order_id,'no') [received data from Source],
isnull(SOH.SOHNUM_0,'none') [X3 Sales order]
from staging.dbo.ecommerce_shipped_orders eso
left join sl_ecomm
on eso.order_number = sl_ecomm.order_id
left join x3.PROD.SORDER SOH
on eso.order_number = SOH.CUSORDREF_0
and SOH.SOHTYP_0 = 'WEB'
where
SOH.SOHNUM_0 is null
--sl_ecomm.order_id is null
and eso.updated_at >= '6-1-2024'
and eso.order_number not in ('351445','486270','482841','485954','450287','514490')
order by 2 desc