[text] D

Viewer

  1. import requests
  2. from bs4 import BeautifulSoup
  3. import csv
  4.  
  5. def get_username_and_password():
  6.     username = input("Enter your username: ")
  7.     password = input("Enter your password: ")
  8.     return username, password
  9.  
  10. def login_and_get_week_ending_dates(username, password):
  11.     try:
  12.         s = requests.Session()
  13.         s.auth = (username, password)
  14.         login_url = "http://www.waitroseconnect.co.uk/names.nsf?Login"
  15.         payload = {'username': username, 'password': password}
  16.         s.post(login_url, data=payload)
  17.         link = f"http://www.waitroseconnect.co.uk/waitroseconnect/reference/WaitroseConnectSalesV2.nsf/WeeklyDataDocsViewForm?OpenForm=&DBPath=domcfg.nsf&Server_Name=www.waitroseconnect.co.uk&Username={username}&Password={password}"
  18.         response = s.get(link)
  19.         soup = BeautifulSoup(response.content, 'html.parser')
  20.         week_ending_dates = [date_elem.get_text().strip() for date_elem in soup.find_all("a", href=True) if date_elem.get_text().strip() and date_elem.get_text().strip() != "Page Help"]
  21.         return week_ending_dates
  22.     except Exception as e:
  23.         print(f"Error occurred during login and fetching week ending dates: {e}")
  24.         return []
  25.  
  26. # The rest of the code remains the same
  27.  
  28. def main():
  29.     try:
  30.         username, password = get_username_and_password()
  31.         week_ending_dates = login_and_get_week_ending_dates(username, password)
  32.         if not week_ending_dates:
  33.             print("Failed to fetch week ending dates. Exiting...")
  34.             return
  35.  
  36.         print("Select a Week Ending Date:")
  37.         for i, date in enumerate(week_ending_dates[::-1], start=1):
  38.             print(f"{i}. {date}")
  39.  
  40.         selected_index = int(input("Please enter the number corresponding to your desired week ending date: "))
  41.         adjusted_index = len(week_ending_dates) - selected_index
  42.         selected_date = week_ending_dates[adjusted_index]
  43.         print(f"You selected: {selected_date}")
  44.  
  45.         corresponding_week_number = get_corresponding_week_number(selected_date, week_ending_dates)
  46.         if corresponding_week_number == -1:
  47.             print("Failed to find corresponding week number. Exiting...")
  48.             return
  49.  
  50.         print(f"Corresponding number for {selected_date}: {corresponding_week_number}")
  51.  
  52.         url = construct_url_for_week_ending_date(selected_date, corresponding_week_number)
  53.         if not url:
  54.             print("Failed to construct URL. Exiting...")
  55.             return
  56.  
  57.         response = get_response_from_url(url)
  58.         if not response:
  59.             print("Failed to fetch CSV data. Exiting...")
  60.             return
  61.  
  62.         soup = BeautifulSoup(response.content, 'html.parser')
  63.         plain_text = soup.get_text()
  64.         weekendyear = selected_date.split("/")[2]
  65.         csvfile = download_csv_sales_data(weekendyear, corresponding_week_number)
  66.  
  67.         if csvfile:
  68.             entries = [line.split(",") for line in csvfile.strip().split("\n")]
  69.             filename = f"WebLookupWeeklyLineBranchSalesData135186{weekendyear}{corresponding_week_number}.csv"
  70.             save_to_csv(entries, filename)
  71.         else:
  72.             print("Failed to download the file.")
  73.     except Exception as e:
  74.         print(f"An error occurred: {e}")
  75.  
  76. if __name__ == "__main__":
  77.     main()

Editor

You can edit this paste and save as new:


File Description
  • D
  • Paste Code
  • 07 May-2024
  • 3.26 Kb
You can Share it: