[text] I

Viewer

  1. import requests
  2. import pandas as pd
  3. from bs4 import BeautifulSoup
  4.  
  5. # URL for the main action
  6. url = 'https://toolkit.tesco.com/partner/reports/'
  7.  
  8. # Define options for each category/subcategory
  9. Country = ['UK', 'ROI']
  10. Report_type = ['Sales and stock', 'Range conformance', 'Promotional funding', 'Auto sales out retro deals', 'Fulfilment fee', 'Performance summary', 'Cost price amendments', 'Purchase order amendments']
  11. Product_subgroup = ['All product subgroups']
  12. Products = ['All products']
  13. View = ['TPNB – Total sales', 'TPNB – Sales x store', 'TPNB – Sales x store format', 'TPNB - Sales x DC','Product subgroup - Total sales','Stores - Total sales','Store Format - Total sales']
  14. Time_period = ['Last full week (Wk 08)']
  15.  
  16. # Iterate over combinations of options
  17. for country in Country:
  18.     for report_type in Report_type:
  19.         for product_subgroup in Product_subgroup:
  20.             for product in Products:
  21.                 for view in View:
  22.                     for time_period in Time_period:
  23.                         # Send request to select options
  24.                         options_data = {
  25.                             'Country': country,
  26.                             'Report type': report_type,
  27.                             'Product subgroup': product_subgroup,
  28.                             'Product': product,
  29.                             'View': view,
  30.                             'Time period': time_period
  31.                         }
  32.                         try:
  33.                             response = requests.get(url)#, params=options_data)
  34.                             response.raise_for_status()  # Raise exception for 4xx or 5xx errors
  35.                             
  36.                             # Parse HTML response
  37.                             soup = BeautifulSoup(response.text, 'html.parser')
  38.                             
  39.                             # Extract specific data using BeautifulSoup
  40.                             # Example: Find all <table> tags and convert them to DataFrame
  41.                             tables = soup.find_all('table')
  42.                             dataframes = [pd.read_html(str(table))[0] for table in tables]
  43.                             
  44.                             # Combine multiple DataFrames into one
  45.                             df = pd.concat(dataframes)
  46.                             
  47.                             # Save DataFrame as an Excel file
  48.                             filename = f"{country}_{report_type}_{product}_{view}_{time_period}.xlsx"
  49.                             df.to_excel(filename, index=False)
  50.                             
  51.                             print(f"Report downloaded and saved to DBFS: {filename}")
  52.                         except requests.exceptions.HTTPError as err:
  53.                             print(f"An error occurred: {err}")

Editor

You can edit this paste and save as new: