- import requests
- from bs4 import BeautifulSoup
- s = requests.Session()
- username = "Grace Redding"
- password = "Beckett1234"
- def login_and_get_week_ending_dates():
- s.auth = (username, password)
- login_url = "http://www.waitroseconnect.co.uk/names.nsf?Login"
- payload = {'username': username, 'password': password}
- s.post(login_url, data=payload)
- link = f"http://www.waitroseconnect.co.uk/waitroseconnect/reference/WaitroseConnectSalesV2.nsf/WeeklyDataDocsViewForm?OpenForm=&DBPath=domcfg.nsf&Server_Name=www.waitroseconnect.co.uk&Username={username}&Password={password}"
- response = s.get(link)
- soup = BeautifulSoup(response.content, 'html.parser')
- week_ending_dates = []
- for date_elem in soup.find_all("a", href=True):
- date_text = date_elem.get_text().strip()
- if date_text and date_text != "Page Help":
- week_ending_dates.append(date_text)
- return week_ending_dates
- def get_corresponding_week_number(date_to_find, week_ending_dates):
- for i, date in enumerate(week_ending_dates[::-1], start=9):
- if date == date_to_find:
- return i
- return -1
- def construct_url_for_week_ending_date(selected_date,corresponding_week_number):
- base_url = f"http://www.waitroseconnect.co.uk/waitroseconnect/reference/WaitroseConnectSalesV2.nsf/LookupWeeklyLineBranchSalesAgent?OpenAgent&{corresponding_week_number}&s=135186&s=U&s=2024&s={selected_date}&s=1&s=2&s=0&s=135186"
- return base_url
- def get_response_from_url(url):
- response = s.get(url)
- return response
- def download_csv_sales_data(weekendyear,corresponding_week_number):
- downloadurl = f"http://www.waitroseconnect.co.uk/waitroseconnect/reference/WaitroseConnectSalesV2.nsf/SalesData.csv?OpenAgent&WebLookupWeeklyLineBranchSalesData135186{weekendyear}{corresponding_week_number}&s=LB"
- response = s.get(downloadurl)
- return response
- def main():
- week_ending_dates = login_and_get_week_ending_dates()
- print("Select a Week Ending Date:")
- for i, date in enumerate(week_ending_dates[::-1], start=1):
- print(f"{i}. {date}")
- selected_index = int(input("Please enter the number corresponding to your desired week ending date: "))
- adjusted_index = len(week_ending_dates) - selected_index - 1
- selected_date = week_ending_dates[adjusted_index]
- print(f"You selected: {selected_date}")
- corresponding_week_number = get_corresponding_week_number(selected_date, week_ending_dates)
- print(f"Corresponding number for {selected_date}: {corresponding_week_number}")
- url = construct_url_for_week_ending_date(selected_date,corresponding_week_number)
- response = get_response_from_url(url)
- soup = BeautifulSoup(response.content, 'html.parser')
- plain_text = soup.get_text()
- weekendyear = selected_date.split("/")[2]
- csvfile = download_csv_sales_data(weekendyear,corresponding_week_number)
- print(csvfile)
- if __name__ == "__main__":
- main()
[text] Z
Viewer
*** This page was generated with the meta tag "noindex, nofollow". This happened because you selected this option before saving or the system detected it as spam. This means that this page will never get into the search engines and the search bot will not crawl it. There is nothing to worry about, you can still share it with anyone.
Editor
You can edit this paste and save as new: