file encoding, email alerts
parent
28f6f627f5
commit
9f22df7940
|
@ -0,0 +1,46 @@
|
|||
#!/usr/bin/env python3
|
||||
"""
|
||||
If daily processing crashed, files will be in
|
||||
2 directories, trigger an email if a file is left over
|
||||
"""
|
||||
import pprint
|
||||
import pathlib
|
||||
import smtplib
|
||||
from email.mime.multipart import MIMEMultipart
|
||||
from email.mime.text import MIMEText
|
||||
|
||||
THIS_DIRECTORY = pathlib.Path(__file__).parent
|
||||
|
||||
INCOMING_DIRECTORY = THIS_DIRECTORY / "incoming_orders"
|
||||
SHIPMENTS_DIRECTORY = THIS_DIRECTORY / "incoming_shipments"
|
||||
SOH_IMPORT_DIRECTORY = THIS_DIRECTORY / "to_import_SOH"
|
||||
|
||||
def main():
|
||||
#retrieve_x12_edi_files()#TODO remove this as it's handled by the earlier process
|
||||
file_count = []
|
||||
for file in INCOMING_DIRECTORY.iterdir():
|
||||
if file.name[-4:] == '.csv':
|
||||
file_count.append(file.name)
|
||||
for file in SHIPMENTS_DIRECTORY.iterdir():
|
||||
if file.name[-4:] == '.csv':
|
||||
file_count.append(file.name)
|
||||
if file_count:
|
||||
file_alert(file_count)
|
||||
|
||||
def file_alert(file_list):
|
||||
file_string = ', '.join(file_list)
|
||||
msg = MIMEMultipart()
|
||||
msg['Subject'] = 'Source ecommerce: Files Left Over'
|
||||
msg['Precedence'] = 'bulk'
|
||||
msg['From'] = 'x3report@stashtea.com'
|
||||
msg['To'] = 'bleeson@stashtea.com'
|
||||
#msg['CC'] = 'bleeson@stashtea.com'
|
||||
emailtext = f'files: {file_string}'
|
||||
msg.attach(MIMEText(emailtext, 'plain'))
|
||||
with smtplib.SMTP_SSL("smtp.gmail.com", 465) as smtp:
|
||||
smtp.login(user='x3reportmk2@yamamotoyama.com', password=r'n</W<7fr"VD~\2&[pZc5')
|
||||
smtp.send_message(msg)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
|
@ -75,10 +75,10 @@ def retrieve_x12_edi_files():
|
|||
|
||||
|
||||
def process_files(file):
|
||||
with open(file) as source_file:
|
||||
with open(file, encoding='utf8') as source_file:
|
||||
with yamamotoyama.get_connection() as db_connection:
|
||||
with db_connection.transaction() as _:
|
||||
with open(ORDER_DIRECTORY / f'sorted_{file.name}', 'w',newline='') as output:
|
||||
with open(ORDER_DIRECTORY / f'sorted_{file.name}', 'w',newline='',encoding='utf8') as output:
|
||||
csv_reader = csv.reader(source_file)
|
||||
csv_writer = csv.writer(output)
|
||||
csv_writer.writerow(next(csv_reader, None)) #skip header
|
||||
|
|
|
@ -44,9 +44,9 @@ def main():
|
|||
continue
|
||||
else:
|
||||
process_files(file)
|
||||
#shutil.move(file, SHIPMENTS_DIRECTORY / file.name)
|
||||
shutil.move(file, SHIPMENTS_DIRECTORY / file.name)
|
||||
# archives are in the shipping folder
|
||||
#combine_zshpords()
|
||||
combine_zshpords()
|
||||
|
||||
def sftp_server() -> paramiko.SFTPClient:
|
||||
with paramiko.SSHClient() as ssh_client:
|
||||
|
@ -101,7 +101,7 @@ def combine_zshpords():
|
|||
)
|
||||
|
||||
def process_files(file): #I am assuming I am getting a sorted csv file by order number and line id from Source
|
||||
with open(file) as source_file:
|
||||
with open(file, encoding='utf8') as source_file:
|
||||
csv_reader = csv.reader(source_file)
|
||||
sales_order = SalesOrder()
|
||||
previous_order = ''
|
||||
|
@ -120,6 +120,7 @@ def process_files(file): #I am assuming I am getting a sorted csv file by order
|
|||
sales_order.output(import_file)
|
||||
sales_order = SalesOrder()
|
||||
previous_order = current_order
|
||||
pprint.pprint(current_order)
|
||||
shopify_order_info = get_details_from_shopify(current_order)
|
||||
shopify_line_dict = create_shopify_dict(shopify_order_info)
|
||||
for entry in shopify_line_dict:
|
||||
|
|
|
@ -67,7 +67,7 @@ def find_so_from_po(cust_po):
|
|||
).first()["SOHNUM_0"]
|
||||
|
||||
def process_files(file):
|
||||
with open(file) as source_file:
|
||||
with open(file, encoding='utf8') as source_file:
|
||||
csv_reader = csv.reader(source_file)
|
||||
warehouse_shipment = WarehouseShipment()
|
||||
previous_order = ''
|
||||
|
|
Loading…
Reference in New Issue