- import requests
- from bs4 import BeautifulSoup
- # URL for login
- login_url = "http://www.waitroseconnect.co.uk/waitroseconnect/menu.nsf/Login?OpenAgent"
- # Actual username and password
- username = "Grace Redding"
- password = "Beckett1234"
- # Payload with username and password
- payload = {
- "Username": username,
- "Password": password
- }
- # Send a get request to authenticate
- session = requests.Session()
- try:
- response = session.get(login_url, data=payload)
- response.raise_for_status() # Raise an error for bad status codes
- print("Authentication successful!")
- # Construct the URL for further requests (modify as needed)
- url = f"http://www.waitroseconnect.co.uk/waitroseconnect/reference/WaitroseConnectSalesV2.nsf/WeeklyDataDocsViewForm?OpenForm"
- response = session.get(url)
- response.raise_for_status()
- # Parse the HTML content
- html_content = response.text
- soup = BeautifulSoup(html_content, 'html.parser')
- text_content = soup.get_text()
- print(text_content)
- except requests.exceptions.RequestException as e:
- print("Error:", e)
- except Exception as e:
- print("An unexpected error occurred:", e)
[text] x
Viewer
*** This page was generated with the meta tag "noindex, nofollow". This happened because you selected this option before saving or the system detected it as spam. This means that this page will never get into the search engines and the search bot will not crawl it. There is nothing to worry about, you can still share it with anyone.
Editor
You can edit this paste and save as new: