Compare commits
7 Commits
732b568986
...
main
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
d2ff24b6da | ||
|
|
2f569ae683 | ||
|
|
e692758a7c | ||
|
|
22136f77b7 | ||
|
|
ee0bb76f22 | ||
|
|
6b25a7ddad | ||
|
|
211752ac77 |
3
.gitignore
vendored
3
.gitignore
vendored
@@ -27,6 +27,7 @@ logs
|
||||
__pycache__/
|
||||
*.py[cod]
|
||||
*$py.class
|
||||
venv/
|
||||
|
||||
# Output directory
|
||||
output/
|
||||
output/
|
||||
|
||||
Binary file not shown.
67
config.json
67
config.json
@@ -1,38 +1,44 @@
|
||||
{
|
||||
"user_data": {
|
||||
"first_name": "Denisa",
|
||||
"last_name": "Cirsteas",
|
||||
"company_name": "footprints",
|
||||
"email": "test@test.ro",
|
||||
"phone": "1231231231",
|
||||
"store_name": "TEST",
|
||||
"country": "Romania",
|
||||
"starting_date": "2026-01-01",
|
||||
"first_name": "gfgdgfd",
|
||||
"last_name": "gfdgdf",
|
||||
"company_name": "gfdgdf",
|
||||
"email": "gfdgf",
|
||||
"phone": "gfdgfdg",
|
||||
"store_name": "test1232",
|
||||
"country": "gfdgfd",
|
||||
"starting_date": "2025-09-25",
|
||||
"duration": 36,
|
||||
"store_types": [
|
||||
"Convenience",
|
||||
"Supermarket"
|
||||
"Convenience"
|
||||
],
|
||||
"open_days_per_month": 30,
|
||||
"currency_symbol": "€",
|
||||
"convenience_store_type": {
|
||||
"stores_number": 4000,
|
||||
"monthly_transactions": 40404040,
|
||||
"stores_number": 1233,
|
||||
"monthly_transactions": 3211312,
|
||||
"has_digital_screens": true,
|
||||
"screen_count": 2,
|
||||
"screen_percentage": 100,
|
||||
"screen_percentage": 123123,
|
||||
"has_in_store_radio": true,
|
||||
"radio_percentage": 100,
|
||||
"open_days_per_month": 30
|
||||
"radio_percentage": 321
|
||||
},
|
||||
"minimarket_store_type": {
|
||||
"stores_number": 0,
|
||||
"monthly_transactions": 0,
|
||||
"has_digital_screens": false,
|
||||
"screen_count": 0,
|
||||
"screen_percentage": 0,
|
||||
"has_in_store_radio": false,
|
||||
"radio_percentage": 0
|
||||
},
|
||||
"supermarket_store_type": {
|
||||
"stores_number": 200,
|
||||
"monthly_transactions": 20202020,
|
||||
"has_digital_screens": true,
|
||||
"screen_count": 4,
|
||||
"screen_percentage": 100,
|
||||
"has_in_store_radio": true,
|
||||
"radio_percentage": 100,
|
||||
"open_days_per_month": 30
|
||||
"stores_number": 0,
|
||||
"monthly_transactions": 0,
|
||||
"has_digital_screens": false,
|
||||
"screen_count": 0,
|
||||
"screen_percentage": 0,
|
||||
"has_in_store_radio": false,
|
||||
"radio_percentage": 0
|
||||
},
|
||||
"hypermarket_store_type": {
|
||||
"stores_number": 0,
|
||||
@@ -41,22 +47,21 @@
|
||||
"screen_count": 0,
|
||||
"screen_percentage": 0,
|
||||
"has_in_store_radio": false,
|
||||
"radio_percentage": 0,
|
||||
"open_days_per_month": 30
|
||||
"radio_percentage": 0
|
||||
},
|
||||
"on_site_channels": [
|
||||
"Website"
|
||||
],
|
||||
"website_visitors": 1001001,
|
||||
"website_visitors": 321321,
|
||||
"app_users": 0,
|
||||
"loyalty_users": 0,
|
||||
"off_site_channels": [
|
||||
"Email"
|
||||
"Facebook Business"
|
||||
],
|
||||
"facebook_followers": 0,
|
||||
"facebook_followers": 32131312,
|
||||
"instagram_followers": 0,
|
||||
"google_views": 0,
|
||||
"email_subscribers": 100000,
|
||||
"email_subscribers": 0,
|
||||
"sms_users": 0,
|
||||
"whatsapp_contacts": 0,
|
||||
"potential_reach_in_store": 0,
|
||||
@@ -66,4 +71,4 @@
|
||||
"potential_reach_off_site": 0,
|
||||
"unique_impressions_off_site": 0
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
149
create_excel.py
149
create_excel.py
@@ -1,149 +0,0 @@
|
||||
#!/usr/bin/env python3
|
||||
import json
|
||||
import os
|
||||
import shutil
|
||||
import datetime
|
||||
import re
|
||||
from pathlib import Path
|
||||
from dateutil.relativedelta import relativedelta
|
||||
from update_excel import update_excel_variables
|
||||
|
||||
def create_excel_from_template():
|
||||
"""
|
||||
Create a copy of the Excel template and save it to the output folder,
|
||||
then inject variables from config.json into the Variables sheet.
|
||||
"""
|
||||
# Define paths
|
||||
script_dir = os.path.dirname(os.path.abspath(__file__))
|
||||
config_path = os.path.join(script_dir, 'config.json')
|
||||
# Look for any Excel template in the template directory
|
||||
template_dir = os.path.join(script_dir, 'template')
|
||||
template_files = [f for f in os.listdir(template_dir) if f.endswith('.xlsx')]
|
||||
if not template_files:
|
||||
print("Error: No Excel template found in the template directory")
|
||||
return False
|
||||
template_path = os.path.join(template_dir, template_files[0])
|
||||
output_dir = os.path.join(script_dir, 'output')
|
||||
|
||||
# Ensure output directory exists
|
||||
os.makedirs(output_dir, exist_ok=True)
|
||||
|
||||
# Read config.json to get store_name, starting_date, and duration
|
||||
try:
|
||||
with open(config_path, 'r') as f:
|
||||
config = json.load(f)
|
||||
user_data = config.get('user_data', {})
|
||||
store_name = user_data.get('store_name', '')
|
||||
starting_date = user_data.get('starting_date', '')
|
||||
duration = user_data.get('duration', 36)
|
||||
|
||||
# If store_name is empty, use a default value
|
||||
if not store_name:
|
||||
store_name = "Your Store"
|
||||
|
||||
# Calculate years array based on starting_date and duration
|
||||
years = calculate_years(starting_date, duration)
|
||||
print(f"Years in the period: {years}")
|
||||
except Exception as e:
|
||||
print(f"Error reading config file: {e}")
|
||||
return False
|
||||
|
||||
# Use first and last years from the array in the filename
|
||||
year_range = ""
|
||||
if years and len(years) > 0:
|
||||
if len(years) == 1:
|
||||
year_range = f"{years[0]}"
|
||||
else:
|
||||
year_range = f"{years[0]}-{years[-1]}"
|
||||
else:
|
||||
# Fallback to current year if years array is empty
|
||||
current_year = datetime.datetime.now().year
|
||||
year_range = f"{current_year}"
|
||||
|
||||
# Create output filename with store_name and year range
|
||||
output_filename = f"Footprints AI for {store_name} - Retail Media Business Case Calculations {year_range}.xlsx"
|
||||
output_path = os.path.join(output_dir, output_filename)
|
||||
|
||||
# Copy the template to the output directory with the new name
|
||||
try:
|
||||
shutil.copy2(template_path, output_path)
|
||||
print(f"Excel file created successfully: {output_path}")
|
||||
|
||||
# Update the Excel file with variables from config.json
|
||||
print("Updating Excel file with variables from config.json...")
|
||||
update_result = update_excel_variables(output_path)
|
||||
|
||||
if update_result:
|
||||
print("Excel file updated successfully with variables from config.json")
|
||||
else:
|
||||
print("Warning: Failed to update Excel file with variables from config.json")
|
||||
|
||||
return True
|
||||
except Exception as e:
|
||||
print(f"Error creating Excel file: {e}")
|
||||
return False
|
||||
|
||||
def calculate_years(starting_date, duration):
|
||||
"""
|
||||
Calculate an array of years that appear in the period from starting_date for duration months.
|
||||
|
||||
Args:
|
||||
starting_date (str): Date in format dd/mm/yyyy or dd.mm.yyyy
|
||||
duration (int): Number of months, including the starting month
|
||||
|
||||
Returns:
|
||||
list: Array of years in the period [year1, year2, ...]
|
||||
"""
|
||||
# Default result if we can't parse the date
|
||||
default_years = [datetime.datetime.now().year]
|
||||
|
||||
# If starting_date is empty, return current year
|
||||
if not starting_date:
|
||||
return default_years
|
||||
|
||||
try:
|
||||
# Try to parse the date, supporting both dd/mm/yyyy and dd.mm.yyyy formats
|
||||
if '/' in starting_date:
|
||||
day, month, year = map(int, starting_date.split('/'))
|
||||
elif '.' in starting_date:
|
||||
day, month, year = map(int, starting_date.split('.'))
|
||||
elif '-' in starting_date:
|
||||
# Handle ISO format (yyyy-mm-dd)
|
||||
date_parts = starting_date.split('-')
|
||||
if len(date_parts) == 3:
|
||||
year, month, day = map(int, date_parts)
|
||||
else:
|
||||
# Default to current date if format is not recognized
|
||||
return default_years
|
||||
else:
|
||||
# If format is not recognized, return default
|
||||
return default_years
|
||||
|
||||
# Create datetime object for starting date
|
||||
start_date = datetime.datetime(year, month, day)
|
||||
|
||||
# Calculate end date (starting date + duration months - 1 day)
|
||||
end_date = start_date + relativedelta(months=duration-1)
|
||||
|
||||
# Create a set of years (to avoid duplicates)
|
||||
years_set = set()
|
||||
|
||||
# Add starting year
|
||||
years_set.add(start_date.year)
|
||||
|
||||
# Add ending year
|
||||
years_set.add(end_date.year)
|
||||
|
||||
# If there are years in between, add those too
|
||||
for y in range(start_date.year + 1, end_date.year):
|
||||
years_set.add(y)
|
||||
|
||||
# Convert set to sorted list
|
||||
return sorted(list(years_set))
|
||||
|
||||
except Exception as e:
|
||||
print(f"Error calculating years: {e}")
|
||||
return default_years
|
||||
|
||||
if __name__ == "__main__":
|
||||
create_excel_from_template()
|
||||
@@ -1,326 +0,0 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Cross-platform Excel generation script using openpyxl.
|
||||
This version ensures clean Excel files without SharePoint/OneDrive metadata.
|
||||
"""
|
||||
import json
|
||||
import os
|
||||
import datetime
|
||||
from pathlib import Path
|
||||
from dateutil.relativedelta import relativedelta
|
||||
import openpyxl
|
||||
from openpyxl.workbook import Workbook
|
||||
from openpyxl.utils import get_column_letter
|
||||
from openpyxl.styles import Font, PatternFill, Alignment, Border, Side
|
||||
import tempfile
|
||||
import shutil
|
||||
|
||||
|
||||
|
||||
|
||||
def create_excel_from_template():
|
||||
"""
|
||||
Create an Excel file from template with all placeholders replaced.
|
||||
Uses openpyxl for maximum cross-platform compatibility.
|
||||
"""
|
||||
# Define paths
|
||||
script_dir = os.path.dirname(os.path.abspath(__file__))
|
||||
config_path = os.path.join(script_dir, 'config.json')
|
||||
template_dir = os.path.join(script_dir, 'template')
|
||||
|
||||
# Try to find the template with either naming convention
|
||||
possible_templates = [
|
||||
'cleaned_template.xlsx', # Prefer cleaned template
|
||||
'Footprints AI for {store_name} - Retail Media Business Case Calculations.xlsx',
|
||||
'Footprints AI for store_name - Retail Media Business Case Calculations.xlsx'
|
||||
]
|
||||
|
||||
template_path = None
|
||||
for template_name in possible_templates:
|
||||
full_path = os.path.join(template_dir, template_name)
|
||||
if os.path.exists(full_path):
|
||||
template_path = full_path
|
||||
print(f"Found template: {template_name}")
|
||||
break
|
||||
|
||||
if not template_path:
|
||||
print(f"Error: No template found in {template_dir}")
|
||||
return False
|
||||
|
||||
output_dir = os.path.join(script_dir, 'output')
|
||||
os.makedirs(output_dir, exist_ok=True)
|
||||
|
||||
# Read config.json
|
||||
try:
|
||||
with open(config_path, 'r') as f:
|
||||
config = json.load(f)
|
||||
user_data = config.get('user_data', {})
|
||||
store_name = user_data.get('store_name', 'Your Store')
|
||||
starting_date = user_data.get('starting_date', '')
|
||||
duration = user_data.get('duration', 36)
|
||||
|
||||
if not store_name:
|
||||
store_name = "Your Store"
|
||||
|
||||
print(f"Processing for store: {store_name}")
|
||||
|
||||
# Calculate years array
|
||||
years = calculate_years(starting_date, duration)
|
||||
calculated_years = years
|
||||
print(f"Years in the period: {years}")
|
||||
except Exception as e:
|
||||
print(f"Error reading config file: {e}")
|
||||
return False
|
||||
|
||||
# Determine year range for filename
|
||||
year_range = ""
|
||||
if years and len(years) > 0:
|
||||
if len(years) == 1:
|
||||
year_range = f"{years[0]}"
|
||||
else:
|
||||
year_range = f"{years[0]}-{years[-1]}"
|
||||
else:
|
||||
year_range = f"{datetime.datetime.now().year}"
|
||||
|
||||
# Create output filename
|
||||
output_filename = f"Footprints AI for {store_name} - Retail Media Business Case Calculations {year_range}.xlsx"
|
||||
output_path = os.path.join(output_dir, output_filename)
|
||||
|
||||
try:
|
||||
# Load template with data_only=False to preserve formulas
|
||||
print("Loading template...")
|
||||
wb = openpyxl.load_workbook(template_path, data_only=False, keep_vba=False)
|
||||
|
||||
|
||||
# Build mapping of placeholder patterns to actual values
|
||||
placeholder_patterns = [
|
||||
('{store_name}', store_name),
|
||||
('store_name', store_name)
|
||||
]
|
||||
|
||||
# Step 1: Create sheet name mappings
|
||||
print("Processing sheet names...")
|
||||
sheet_name_mappings = {}
|
||||
sheets_to_rename = []
|
||||
|
||||
for sheet in wb.worksheets:
|
||||
old_title = sheet.title
|
||||
new_title = old_title
|
||||
|
||||
for placeholder, replacement in placeholder_patterns:
|
||||
if placeholder in new_title:
|
||||
new_title = new_title.replace(placeholder, replacement)
|
||||
|
||||
if old_title != new_title:
|
||||
sheet_name_mappings[old_title] = new_title
|
||||
sheet_name_mappings[f"'{old_title}'"] = f"'{new_title}'"
|
||||
sheets_to_rename.append((sheet, new_title))
|
||||
print(f" Will rename: '{old_title}' -> '{new_title}'")
|
||||
|
||||
# Step 2: Update all formulas and values
|
||||
print("Updating formulas and cell values...")
|
||||
total_updates = 0
|
||||
|
||||
for sheet in wb.worksheets:
|
||||
if 'Variables' in sheet.title:
|
||||
continue
|
||||
|
||||
updates_in_sheet = 0
|
||||
for row in sheet.iter_rows():
|
||||
for cell in row:
|
||||
try:
|
||||
# Handle formulas
|
||||
if hasattr(cell, '_value') and isinstance(cell._value, str) and cell._value.startswith('='):
|
||||
original = cell._value
|
||||
updated = original
|
||||
|
||||
# Update sheet references
|
||||
for old_ref, new_ref in sheet_name_mappings.items():
|
||||
updated = updated.replace(old_ref, new_ref)
|
||||
|
||||
# Update placeholders
|
||||
for placeholder, replacement in placeholder_patterns:
|
||||
updated = updated.replace(placeholder, replacement)
|
||||
|
||||
if updated != original:
|
||||
cell._value = updated
|
||||
updates_in_sheet += 1
|
||||
|
||||
# Handle regular text values
|
||||
elif cell.value and isinstance(cell.value, str):
|
||||
original = cell.value
|
||||
updated = original
|
||||
|
||||
for placeholder, replacement in placeholder_patterns:
|
||||
updated = updated.replace(placeholder, replacement)
|
||||
|
||||
if updated != original:
|
||||
cell.value = updated
|
||||
updates_in_sheet += 1
|
||||
except Exception as e:
|
||||
# Skip cells that cause issues
|
||||
continue
|
||||
|
||||
if updates_in_sheet > 0:
|
||||
print(f" {sheet.title}: {updates_in_sheet} updates")
|
||||
total_updates += updates_in_sheet
|
||||
|
||||
print(f"Total updates: {total_updates}")
|
||||
|
||||
# Step 3: Rename sheets
|
||||
print("Renaming sheets...")
|
||||
for sheet, new_title in sheets_to_rename:
|
||||
old_title = sheet.title
|
||||
sheet.title = new_title
|
||||
print(f" Renamed: '{old_title}' -> '{new_title}'")
|
||||
|
||||
# Hide forecast sheets not in calculated years
|
||||
if "Forecast" in new_title:
|
||||
try:
|
||||
sheet_year = int(new_title.split()[0])
|
||||
if sheet_year not in calculated_years:
|
||||
sheet.sheet_state = 'hidden'
|
||||
print(f" Hidden sheet '{new_title}' (year {sheet_year} not in range)")
|
||||
except (ValueError, IndexError):
|
||||
pass
|
||||
|
||||
# Step 4: Update Variables sheet
|
||||
print("Updating Variables sheet...")
|
||||
if 'Variables' in wb.sheetnames:
|
||||
update_variables_sheet(wb['Variables'], user_data)
|
||||
|
||||
# Step 5: Save as a clean Excel file
|
||||
print(f"Saving clean Excel file to: {output_path}")
|
||||
|
||||
# Create a temporary file first
|
||||
with tempfile.NamedTemporaryFile(suffix='.xlsx', delete=False) as tmp:
|
||||
tmp_path = tmp.name
|
||||
|
||||
# Save to temporary file
|
||||
wb.save(tmp_path)
|
||||
|
||||
# Re-open and save again to ensure clean structure
|
||||
wb_clean = openpyxl.load_workbook(tmp_path, data_only=False)
|
||||
wb_clean.save(output_path)
|
||||
wb_clean.close()
|
||||
|
||||
# Clean up temporary file
|
||||
os.unlink(tmp_path)
|
||||
|
||||
print(f"✓ Excel file created successfully: {output_filename}")
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
print(f"Error creating Excel file: {e}")
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
return False
|
||||
|
||||
|
||||
def update_variables_sheet(sheet, user_data):
|
||||
"""
|
||||
Update the Variables sheet with values from config.json
|
||||
"""
|
||||
cell_mappings = {
|
||||
'B2': user_data.get('store_name', ''),
|
||||
'B31': user_data.get('starting_date', ''),
|
||||
'B32': user_data.get('duration', 36),
|
||||
'B37': user_data.get('open_days_per_month', 0),
|
||||
|
||||
# Store types
|
||||
'H37': user_data.get('convenience_store_type', {}).get('stores_number', 0),
|
||||
'C37': user_data.get('convenience_store_type', {}).get('monthly_transactions', 0),
|
||||
'I37': 1 if user_data.get('convenience_store_type', {}).get('has_digital_screens', False) else 0,
|
||||
'J37': user_data.get('convenience_store_type', {}).get('screen_count', 0),
|
||||
'K37': user_data.get('convenience_store_type', {}).get('screen_percentage', 0),
|
||||
'M37': 1 if user_data.get('convenience_store_type', {}).get('has_in_store_radio', False) else 0,
|
||||
'N37': user_data.get('convenience_store_type', {}).get('radio_percentage', 0),
|
||||
|
||||
'H38': user_data.get('minimarket_store_type', {}).get('stores_number', 0),
|
||||
'C38': user_data.get('minimarket_store_type', {}).get('monthly_transactions', 0),
|
||||
'I38': 1 if user_data.get('minimarket_store_type', {}).get('has_digital_screens', False) else 0,
|
||||
'J38': user_data.get('minimarket_store_type', {}).get('screen_count', 0),
|
||||
'K38': user_data.get('minimarket_store_type', {}).get('screen_percentage', 0),
|
||||
'M38': 1 if user_data.get('minimarket_store_type', {}).get('has_in_store_radio', False) else 0,
|
||||
'N38': user_data.get('minimarket_store_type', {}).get('radio_percentage', 0),
|
||||
|
||||
'H39': user_data.get('supermarket_store_type', {}).get('stores_number', 0),
|
||||
'C39': user_data.get('supermarket_store_type', {}).get('monthly_transactions', 0),
|
||||
'I39': 1 if user_data.get('supermarket_store_type', {}).get('has_digital_screens', False) else 0,
|
||||
'J39': user_data.get('supermarket_store_type', {}).get('screen_count', 0),
|
||||
'K39': user_data.get('supermarket_store_type', {}).get('screen_percentage', 0),
|
||||
'M39': 1 if user_data.get('supermarket_store_type', {}).get('has_in_store_radio', False) else 0,
|
||||
'N39': user_data.get('supermarket_store_type', {}).get('radio_percentage', 0),
|
||||
|
||||
'H40': user_data.get('hypermarket_store_type', {}).get('stores_number', 0),
|
||||
'C40': user_data.get('hypermarket_store_type', {}).get('monthly_transactions', 0),
|
||||
'I40': 1 if user_data.get('hypermarket_store_type', {}).get('has_digital_screens', False) else 0,
|
||||
'J40': user_data.get('hypermarket_store_type', {}).get('screen_count', 0),
|
||||
'K40': user_data.get('hypermarket_store_type', {}).get('screen_percentage', 0),
|
||||
'M40': 1 if user_data.get('hypermarket_store_type', {}).get('has_in_store_radio', False) else 0,
|
||||
'N40': user_data.get('hypermarket_store_type', {}).get('radio_percentage', 0),
|
||||
|
||||
# Channels
|
||||
'B43': user_data.get('website_visitors', 0),
|
||||
'B44': user_data.get('app_users', 0),
|
||||
'B45': user_data.get('loyalty_users', 0),
|
||||
'B49': user_data.get('facebook_followers', 0),
|
||||
'B50': user_data.get('instagram_followers', 0),
|
||||
'B51': user_data.get('google_views', 0),
|
||||
'B52': user_data.get('email_subscribers', 0),
|
||||
'B53': user_data.get('sms_users', 0),
|
||||
'B54': user_data.get('whatsapp_contacts', 0)
|
||||
}
|
||||
|
||||
for cell_ref, value in cell_mappings.items():
|
||||
try:
|
||||
sheet[cell_ref].value = value
|
||||
print(f" Updated {cell_ref} = {value}")
|
||||
except Exception as e:
|
||||
print(f" Warning: Could not update {cell_ref}: {e}")
|
||||
|
||||
|
||||
def calculate_years(starting_date, duration):
|
||||
"""
|
||||
Calculate an array of years that appear in the period.
|
||||
"""
|
||||
default_years = [datetime.datetime.now().year]
|
||||
|
||||
if not starting_date:
|
||||
return default_years
|
||||
|
||||
try:
|
||||
# Parse date - support multiple formats
|
||||
if '/' in str(starting_date):
|
||||
day, month, year = map(int, str(starting_date).split('/'))
|
||||
elif '.' in str(starting_date):
|
||||
day, month, year = map(int, str(starting_date).split('.'))
|
||||
elif '-' in str(starting_date):
|
||||
# ISO format (yyyy-mm-dd)
|
||||
date_parts = str(starting_date).split('-')
|
||||
if len(date_parts) == 3:
|
||||
year, month, day = map(int, date_parts)
|
||||
else:
|
||||
return default_years
|
||||
else:
|
||||
return default_years
|
||||
|
||||
start_date = datetime.datetime(year, month, day)
|
||||
end_date = start_date + relativedelta(months=duration-1)
|
||||
|
||||
years_set = set()
|
||||
years_set.add(start_date.year)
|
||||
years_set.add(end_date.year)
|
||||
|
||||
for y in range(start_date.year + 1, end_date.year):
|
||||
years_set.add(y)
|
||||
|
||||
return sorted(list(years_set))
|
||||
|
||||
except Exception as e:
|
||||
print(f"Error calculating years: {e}")
|
||||
return default_years
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
create_excel_from_template()
|
||||
@@ -1,149 +0,0 @@
|
||||
#!/usr/bin/env python3
|
||||
import json
|
||||
import os
|
||||
import shutil
|
||||
import datetime
|
||||
import re
|
||||
from pathlib import Path
|
||||
from dateutil.relativedelta import relativedelta
|
||||
from update_excel import update_excel_variables
|
||||
|
||||
def create_excel_from_template():
|
||||
"""
|
||||
Create a copy of the Excel template and save it to the output folder,
|
||||
then inject variables from config.json into the Variables sheet.
|
||||
"""
|
||||
# Define paths
|
||||
script_dir = os.path.dirname(os.path.abspath(__file__))
|
||||
config_path = os.path.join(script_dir, 'config.json')
|
||||
# Look for any Excel template in the template directory
|
||||
template_dir = os.path.join(script_dir, 'template')
|
||||
template_files = [f for f in os.listdir(template_dir) if f.endswith('.xlsx')]
|
||||
if not template_files:
|
||||
print("Error: No Excel template found in the template directory")
|
||||
return False
|
||||
template_path = os.path.join(template_dir, template_files[0])
|
||||
output_dir = os.path.join(script_dir, 'output')
|
||||
|
||||
# Ensure output directory exists
|
||||
os.makedirs(output_dir, exist_ok=True)
|
||||
|
||||
# Read config.json to get store_name, starting_date, and duration
|
||||
try:
|
||||
with open(config_path, 'r') as f:
|
||||
config = json.load(f)
|
||||
user_data = config.get('user_data', {})
|
||||
store_name = user_data.get('store_name', '')
|
||||
starting_date = user_data.get('starting_date', '')
|
||||
duration = user_data.get('duration', 36)
|
||||
|
||||
# If store_name is empty, use a default value
|
||||
if not store_name:
|
||||
store_name = "Your Store"
|
||||
|
||||
# Calculate years array based on starting_date and duration
|
||||
years = calculate_years(starting_date, duration)
|
||||
print(f"Years in the period: {years}")
|
||||
except Exception as e:
|
||||
print(f"Error reading config file: {e}")
|
||||
return False
|
||||
|
||||
# Use first and last years from the array in the filename
|
||||
year_range = ""
|
||||
if years and len(years) > 0:
|
||||
if len(years) == 1:
|
||||
year_range = f"{years[0]}"
|
||||
else:
|
||||
year_range = f"{years[0]}-{years[-1]}"
|
||||
else:
|
||||
# Fallback to current year if years array is empty
|
||||
current_year = datetime.datetime.now().year
|
||||
year_range = f"{current_year}"
|
||||
|
||||
# Create output filename with store_name and year range
|
||||
output_filename = f"Footprints AI for {store_name} - Retail Media Business Case Calculations {year_range}.xlsx"
|
||||
output_path = os.path.join(output_dir, output_filename)
|
||||
|
||||
# Copy the template to the output directory with the new name
|
||||
try:
|
||||
shutil.copy2(template_path, output_path)
|
||||
print(f"Excel file created successfully: {output_path}")
|
||||
|
||||
# Update the Excel file with variables from config.json
|
||||
print("Updating Excel file with variables from config.json...")
|
||||
update_result = update_excel_variables(output_path)
|
||||
|
||||
if update_result:
|
||||
print("Excel file updated successfully with variables from config.json")
|
||||
else:
|
||||
print("Warning: Failed to update Excel file with variables from config.json")
|
||||
|
||||
return True
|
||||
except Exception as e:
|
||||
print(f"Error creating Excel file: {e}")
|
||||
return False
|
||||
|
||||
def calculate_years(starting_date, duration):
|
||||
"""
|
||||
Calculate an array of years that appear in the period from starting_date for duration months.
|
||||
|
||||
Args:
|
||||
starting_date (str): Date in format dd/mm/yyyy or dd.mm.yyyy
|
||||
duration (int): Number of months, including the starting month
|
||||
|
||||
Returns:
|
||||
list: Array of years in the period [year1, year2, ...]
|
||||
"""
|
||||
# Default result if we can't parse the date
|
||||
default_years = [datetime.datetime.now().year]
|
||||
|
||||
# If starting_date is empty, return current year
|
||||
if not starting_date:
|
||||
return default_years
|
||||
|
||||
try:
|
||||
# Try to parse the date, supporting both dd/mm/yyyy and dd.mm.yyyy formats
|
||||
if '/' in starting_date:
|
||||
day, month, year = map(int, starting_date.split('/'))
|
||||
elif '.' in starting_date:
|
||||
day, month, year = map(int, starting_date.split('.'))
|
||||
elif '-' in starting_date:
|
||||
# Handle ISO format (yyyy-mm-dd)
|
||||
date_parts = starting_date.split('-')
|
||||
if len(date_parts) == 3:
|
||||
year, month, day = map(int, date_parts)
|
||||
else:
|
||||
# Default to current date if format is not recognized
|
||||
return default_years
|
||||
else:
|
||||
# If format is not recognized, return default
|
||||
return default_years
|
||||
|
||||
# Create datetime object for starting date
|
||||
start_date = datetime.datetime(year, month, day)
|
||||
|
||||
# Calculate end date (starting date + duration months - 1 day)
|
||||
end_date = start_date + relativedelta(months=duration-1)
|
||||
|
||||
# Create a set of years (to avoid duplicates)
|
||||
years_set = set()
|
||||
|
||||
# Add starting year
|
||||
years_set.add(start_date.year)
|
||||
|
||||
# Add ending year
|
||||
years_set.add(end_date.year)
|
||||
|
||||
# If there are years in between, add those too
|
||||
for y in range(start_date.year + 1, end_date.year):
|
||||
years_set.add(y)
|
||||
|
||||
# Convert set to sorted list
|
||||
return sorted(list(years_set))
|
||||
|
||||
except Exception as e:
|
||||
print(f"Error calculating years: {e}")
|
||||
return default_years
|
||||
|
||||
if __name__ == "__main__":
|
||||
create_excel_from_template()
|
||||
@@ -1,331 +0,0 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Improved Excel creation script that processes templates in memory
|
||||
to prevent external link issues in Excel.
|
||||
"""
|
||||
import json
|
||||
import os
|
||||
import datetime
|
||||
from pathlib import Path
|
||||
from dateutil.relativedelta import relativedelta
|
||||
import openpyxl
|
||||
from openpyxl.utils import get_column_letter
|
||||
|
||||
|
||||
def create_excel_from_template():
|
||||
"""
|
||||
Create an Excel file from template with all placeholders replaced in memory
|
||||
before saving to prevent external link issues.
|
||||
"""
|
||||
# Define paths
|
||||
script_dir = os.path.dirname(os.path.abspath(__file__))
|
||||
config_path = os.path.join(script_dir, 'config.json')
|
||||
# Check for both possible template names
|
||||
template_dir = os.path.join(script_dir, 'template')
|
||||
|
||||
# Try to find the template with either naming convention
|
||||
possible_templates = [
|
||||
'Footprints AI for {store_name} - Retail Media Business Case Calculations.xlsx',
|
||||
'Footprints AI for store_name - Retail Media Business Case Calculations.xlsx'
|
||||
]
|
||||
|
||||
template_path = None
|
||||
for template_name in possible_templates:
|
||||
full_path = os.path.join(template_dir, template_name)
|
||||
if os.path.exists(full_path):
|
||||
template_path = full_path
|
||||
print(f"Found template: {template_name}")
|
||||
break
|
||||
|
||||
if not template_path:
|
||||
print(f"Error: No template found in {template_dir}")
|
||||
return False
|
||||
|
||||
output_dir = os.path.join(script_dir, 'output')
|
||||
|
||||
# Ensure output directory exists
|
||||
os.makedirs(output_dir, exist_ok=True)
|
||||
|
||||
# Read config.json
|
||||
try:
|
||||
with open(config_path, 'r') as f:
|
||||
config = json.load(f)
|
||||
user_data = config.get('user_data', {})
|
||||
store_name = user_data.get('store_name', 'Your Store')
|
||||
starting_date = user_data.get('starting_date', '')
|
||||
duration = user_data.get('duration', 36)
|
||||
|
||||
if not store_name:
|
||||
store_name = "Your Store"
|
||||
|
||||
print(f"Processing for store: {store_name}")
|
||||
|
||||
# Calculate years array
|
||||
years = calculate_years(starting_date, duration)
|
||||
calculated_years = years # For sheet visibility later
|
||||
print(f"Years in the period: {years}")
|
||||
except Exception as e:
|
||||
print(f"Error reading config file: {e}")
|
||||
return False
|
||||
|
||||
# Determine year range for filename
|
||||
year_range = ""
|
||||
if years and len(years) > 0:
|
||||
if len(years) == 1:
|
||||
year_range = f"{years[0]}"
|
||||
else:
|
||||
year_range = f"{years[0]}-{years[-1]}"
|
||||
else:
|
||||
year_range = f"{datetime.datetime.now().year}"
|
||||
|
||||
# Create output filename
|
||||
output_filename = f"Footprints AI for {store_name} - Retail Media Business Case Calculations {year_range}.xlsx"
|
||||
output_path = os.path.join(output_dir, output_filename)
|
||||
|
||||
try:
|
||||
# STAGE 1: Load template and replace all placeholders in memory
|
||||
print("Loading template in memory...")
|
||||
wb = openpyxl.load_workbook(template_path, data_only=False)
|
||||
|
||||
# Build mapping of placeholder patterns to actual values
|
||||
# Support both {store_name} and store_name formats
|
||||
placeholder_patterns = [
|
||||
('{store_name}', store_name),
|
||||
('store_name', store_name) # New format without curly braces
|
||||
]
|
||||
|
||||
# STAGE 2: Replace placeholders in sheet names first
|
||||
print("Replacing placeholders in sheet names...")
|
||||
sheet_name_mappings = {}
|
||||
|
||||
for sheet in wb.worksheets:
|
||||
old_title = sheet.title
|
||||
new_title = old_title
|
||||
|
||||
# Replace all placeholder patterns in sheet name
|
||||
for placeholder, replacement in placeholder_patterns:
|
||||
if placeholder in new_title:
|
||||
new_title = new_title.replace(placeholder, replacement)
|
||||
print(f" Sheet name: '{old_title}' -> '{new_title}'")
|
||||
|
||||
if old_title != new_title:
|
||||
# Store the mapping for formula updates
|
||||
sheet_name_mappings[old_title] = new_title
|
||||
# Also store with quotes for formula references
|
||||
sheet_name_mappings[f"'{old_title}'"] = f"'{new_title}'"
|
||||
|
||||
# STAGE 3: Update all formulas and cell values BEFORE renaming sheets
|
||||
print("Updating formulas and cell values...")
|
||||
total_replacements = 0
|
||||
|
||||
for sheet in wb.worksheets:
|
||||
sheet_name = sheet.title
|
||||
replacements_in_sheet = 0
|
||||
|
||||
# Skip Variables sheet to avoid issues
|
||||
if 'Variables' in sheet_name:
|
||||
continue
|
||||
|
||||
for row in sheet.iter_rows():
|
||||
for cell in row:
|
||||
# Handle formulas
|
||||
if cell.data_type == 'f' and cell.value:
|
||||
original_formula = str(cell.value)
|
||||
new_formula = original_formula
|
||||
|
||||
# First replace sheet references
|
||||
for old_ref, new_ref in sheet_name_mappings.items():
|
||||
if old_ref in new_formula:
|
||||
new_formula = new_formula.replace(old_ref, new_ref)
|
||||
|
||||
# Then replace any remaining placeholders
|
||||
for placeholder, replacement in placeholder_patterns:
|
||||
if placeholder in new_formula:
|
||||
new_formula = new_formula.replace(placeholder, replacement)
|
||||
|
||||
if new_formula != original_formula:
|
||||
cell.value = new_formula
|
||||
replacements_in_sheet += 1
|
||||
|
||||
# Handle text values
|
||||
elif cell.value and isinstance(cell.value, str):
|
||||
original_value = str(cell.value)
|
||||
new_value = original_value
|
||||
|
||||
for placeholder, replacement in placeholder_patterns:
|
||||
if placeholder in new_value:
|
||||
new_value = new_value.replace(placeholder, replacement)
|
||||
|
||||
if new_value != original_value:
|
||||
cell.value = new_value
|
||||
replacements_in_sheet += 1
|
||||
|
||||
if replacements_in_sheet > 0:
|
||||
print(f" {sheet_name}: {replacements_in_sheet} replacements")
|
||||
total_replacements += replacements_in_sheet
|
||||
|
||||
print(f"Total replacements: {total_replacements}")
|
||||
|
||||
# STAGE 4: Now rename the sheets (after formulas are updated)
|
||||
print("Renaming sheets...")
|
||||
for sheet in wb.worksheets:
|
||||
old_title = sheet.title
|
||||
new_title = old_title
|
||||
|
||||
for placeholder, replacement in placeholder_patterns:
|
||||
if placeholder in new_title:
|
||||
new_title = new_title.replace(placeholder, replacement)
|
||||
|
||||
if old_title != new_title:
|
||||
sheet.title = new_title
|
||||
print(f" Renamed: '{old_title}' -> '{new_title}'")
|
||||
|
||||
# Check if this is a forecast sheet and hide if needed
|
||||
if "Forecast" in new_title:
|
||||
try:
|
||||
# Extract year from sheet name
|
||||
sheet_year = int(new_title.split()[0])
|
||||
if sheet_year not in calculated_years:
|
||||
sheet.sheet_state = 'hidden'
|
||||
print(f" Hidden sheet '{new_title}' (year {sheet_year} not in range)")
|
||||
except (ValueError, IndexError):
|
||||
pass
|
||||
|
||||
# STAGE 5: Update Variables sheet with config values
|
||||
print("Updating Variables sheet...")
|
||||
if 'Variables' in wb.sheetnames:
|
||||
update_variables_sheet(wb['Variables'], user_data)
|
||||
|
||||
# STAGE 6: Save the fully processed workbook
|
||||
print(f"Saving to: {output_path}")
|
||||
wb.save(output_path)
|
||||
|
||||
print(f"✓ Excel file created successfully: {output_filename}")
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
print(f"Error creating Excel file: {e}")
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
return False
|
||||
|
||||
|
||||
def update_variables_sheet(sheet, user_data):
|
||||
"""
|
||||
Update the Variables sheet with values from config.json
|
||||
"""
|
||||
# Map config variables to Excel cells
|
||||
cell_mappings = {
|
||||
'B2': user_data.get('store_name', ''),
|
||||
'B31': user_data.get('starting_date', ''),
|
||||
'B32': user_data.get('duration', 36),
|
||||
'B37': user_data.get('open_days_per_month', 0),
|
||||
|
||||
# Convenience store type
|
||||
'H37': user_data.get('convenience_store_type', {}).get('stores_number', 0),
|
||||
'C37': user_data.get('convenience_store_type', {}).get('monthly_transactions', 0),
|
||||
'I37': 1 if user_data.get('convenience_store_type', {}).get('has_digital_screens', False) else 0,
|
||||
'J37': user_data.get('convenience_store_type', {}).get('screen_count', 0),
|
||||
'K37': user_data.get('convenience_store_type', {}).get('screen_percentage', 0),
|
||||
'M37': 1 if user_data.get('convenience_store_type', {}).get('has_in_store_radio', False) else 0,
|
||||
'N37': user_data.get('convenience_store_type', {}).get('radio_percentage', 0),
|
||||
|
||||
# Minimarket store type
|
||||
'H38': user_data.get('minimarket_store_type', {}).get('stores_number', 0),
|
||||
'C38': user_data.get('minimarket_store_type', {}).get('monthly_transactions', 0),
|
||||
'I38': 1 if user_data.get('minimarket_store_type', {}).get('has_digital_screens', False) else 0,
|
||||
'J38': user_data.get('minimarket_store_type', {}).get('screen_count', 0),
|
||||
'K38': user_data.get('minimarket_store_type', {}).get('screen_percentage', 0),
|
||||
'M38': 1 if user_data.get('minimarket_store_type', {}).get('has_in_store_radio', False) else 0,
|
||||
'N38': user_data.get('minimarket_store_type', {}).get('radio_percentage', 0),
|
||||
|
||||
# Supermarket store type
|
||||
'H39': user_data.get('supermarket_store_type', {}).get('stores_number', 0),
|
||||
'C39': user_data.get('supermarket_store_type', {}).get('monthly_transactions', 0),
|
||||
'I39': 1 if user_data.get('supermarket_store_type', {}).get('has_digital_screens', False) else 0,
|
||||
'J39': user_data.get('supermarket_store_type', {}).get('screen_count', 0),
|
||||
'K39': user_data.get('supermarket_store_type', {}).get('screen_percentage', 0),
|
||||
'M39': 1 if user_data.get('supermarket_store_type', {}).get('has_in_store_radio', False) else 0,
|
||||
'N39': user_data.get('supermarket_store_type', {}).get('radio_percentage', 0),
|
||||
|
||||
# Hypermarket store type
|
||||
'H40': user_data.get('hypermarket_store_type', {}).get('stores_number', 0),
|
||||
'C40': user_data.get('hypermarket_store_type', {}).get('monthly_transactions', 0),
|
||||
'I40': 1 if user_data.get('hypermarket_store_type', {}).get('has_digital_screens', False) else 0,
|
||||
'J40': user_data.get('hypermarket_store_type', {}).get('screen_count', 0),
|
||||
'K40': user_data.get('hypermarket_store_type', {}).get('screen_percentage', 0),
|
||||
'M40': 1 if user_data.get('hypermarket_store_type', {}).get('has_in_store_radio', False) else 0,
|
||||
'N40': user_data.get('hypermarket_store_type', {}).get('radio_percentage', 0),
|
||||
|
||||
# On-site channels
|
||||
'B43': user_data.get('website_visitors', 0),
|
||||
'B44': user_data.get('app_users', 0),
|
||||
'B45': user_data.get('loyalty_users', 0),
|
||||
|
||||
# Off-site channels
|
||||
'B49': user_data.get('facebook_followers', 0),
|
||||
'B50': user_data.get('instagram_followers', 0),
|
||||
'B51': user_data.get('google_views', 0),
|
||||
'B52': user_data.get('email_subscribers', 0),
|
||||
'B53': user_data.get('sms_users', 0),
|
||||
'B54': user_data.get('whatsapp_contacts', 0)
|
||||
}
|
||||
|
||||
# Update the cells
|
||||
for cell_ref, value in cell_mappings.items():
|
||||
try:
|
||||
sheet[cell_ref].value = value
|
||||
print(f" Updated {cell_ref} = {value}")
|
||||
except Exception as e:
|
||||
print(f" Warning: Could not update {cell_ref}: {e}")
|
||||
|
||||
|
||||
def calculate_years(starting_date, duration):
|
||||
"""
|
||||
Calculate an array of years that appear in the period.
|
||||
"""
|
||||
default_years = [datetime.datetime.now().year]
|
||||
|
||||
if not starting_date:
|
||||
return default_years
|
||||
|
||||
try:
|
||||
# Parse date - support multiple formats
|
||||
if '/' in str(starting_date):
|
||||
day, month, year = map(int, str(starting_date).split('/'))
|
||||
elif '.' in str(starting_date):
|
||||
day, month, year = map(int, str(starting_date).split('.'))
|
||||
elif '-' in str(starting_date):
|
||||
# ISO format (yyyy-mm-dd)
|
||||
date_parts = str(starting_date).split('-')
|
||||
if len(date_parts) == 3:
|
||||
year, month, day = map(int, date_parts)
|
||||
else:
|
||||
return default_years
|
||||
else:
|
||||
return default_years
|
||||
|
||||
# Create datetime object
|
||||
start_date = datetime.datetime(year, month, day)
|
||||
|
||||
# Calculate end date
|
||||
end_date = start_date + relativedelta(months=duration-1)
|
||||
|
||||
# Create set of years
|
||||
years_set = set()
|
||||
years_set.add(start_date.year)
|
||||
years_set.add(end_date.year)
|
||||
|
||||
# Add any years in between
|
||||
for y in range(start_date.year + 1, end_date.year):
|
||||
years_set.add(y)
|
||||
|
||||
return sorted(list(years_set))
|
||||
|
||||
except Exception as e:
|
||||
print(f"Error calculating years: {e}")
|
||||
return default_years
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
create_excel_from_template()
|
||||
@@ -1,138 +0,0 @@
|
||||
#!/usr/bin/env python3
|
||||
import os
|
||||
import zipfile
|
||||
import xml.etree.ElementTree as ET
|
||||
import openpyxl
|
||||
from openpyxl.xml.functions import fromstring, tostring
|
||||
from pathlib import Path
|
||||
|
||||
def diagnose_excel_file(file_path):
|
||||
"""Diagnose Excel file for corruption issues"""
|
||||
print(f"Diagnosing: {file_path}")
|
||||
print("=" * 50)
|
||||
|
||||
# 1. Check if file exists
|
||||
if not os.path.exists(file_path):
|
||||
print(f"ERROR: File not found: {file_path}")
|
||||
return
|
||||
|
||||
# 2. Try to open with openpyxl
|
||||
print("\n1. Testing openpyxl compatibility:")
|
||||
try:
|
||||
wb = openpyxl.load_workbook(file_path, read_only=False, keep_vba=True, data_only=False)
|
||||
print(f" ✓ Successfully loaded with openpyxl")
|
||||
print(f" - Sheets: {wb.sheetnames}")
|
||||
|
||||
# Check for custom properties
|
||||
if hasattr(wb, 'custom_doc_props'):
|
||||
print(f" - Custom properties: {wb.custom_doc_props}")
|
||||
|
||||
wb.close()
|
||||
except Exception as e:
|
||||
print(f" ✗ Failed to load with openpyxl: {e}")
|
||||
|
||||
# 3. Analyze ZIP structure
|
||||
print("\n2. Analyzing ZIP/XML structure:")
|
||||
try:
|
||||
with zipfile.ZipFile(file_path, 'r') as zf:
|
||||
# Check for custom XML
|
||||
custom_xml_files = [f for f in zf.namelist() if 'customXml' in f or 'custom' in f.lower()]
|
||||
if custom_xml_files:
|
||||
print(f" ! Found custom XML files: {custom_xml_files}")
|
||||
|
||||
for custom_file in custom_xml_files:
|
||||
try:
|
||||
content = zf.read(custom_file)
|
||||
print(f"\n Content of {custom_file}:")
|
||||
print(f" {content[:500].decode('utf-8', errors='ignore')}")
|
||||
except Exception as e:
|
||||
print(f" Error reading {custom_file}: {e}")
|
||||
|
||||
# Check for tables
|
||||
table_files = [f for f in zf.namelist() if 'xl/tables/' in f]
|
||||
if table_files:
|
||||
print(f" - Found table files: {table_files}")
|
||||
for table_file in table_files:
|
||||
content = zf.read(table_file)
|
||||
# Check if XML declaration is present
|
||||
if not content.startswith(b'<?xml'):
|
||||
print(f" ! WARNING: {table_file} missing XML declaration")
|
||||
|
||||
# Check workbook.xml for issues
|
||||
if 'xl/workbook.xml' in zf.namelist():
|
||||
workbook_content = zf.read('xl/workbook.xml')
|
||||
# Parse and check for issues
|
||||
try:
|
||||
root = ET.fromstring(workbook_content)
|
||||
# Check for external references
|
||||
ext_refs = root.findall('.//{http://schemas.openxmlformats.org/spreadsheetml/2006/main}externalReference')
|
||||
if ext_refs:
|
||||
print(f" ! Found {len(ext_refs)} external references")
|
||||
except Exception as e:
|
||||
print(f" ! Error parsing workbook.xml: {e}")
|
||||
|
||||
except Exception as e:
|
||||
print(f" ✗ Failed to analyze ZIP structure: {e}")
|
||||
|
||||
# 4. Check for SharePoint/OneDrive metadata
|
||||
print("\n3. Checking for SharePoint/OneDrive metadata:")
|
||||
try:
|
||||
with zipfile.ZipFile(file_path, 'r') as zf:
|
||||
if 'docProps/custom.xml' in zf.namelist():
|
||||
content = zf.read('docProps/custom.xml')
|
||||
if b'ContentTypeId' in content:
|
||||
print(" ! Found SharePoint ContentTypeId in custom.xml")
|
||||
print(" ! This file contains SharePoint metadata that may cause issues")
|
||||
if b'MediaService' in content:
|
||||
print(" ! Found MediaService tags in custom.xml")
|
||||
except Exception as e:
|
||||
print(f" ✗ Error checking metadata: {e}")
|
||||
|
||||
# 5. Compare with template
|
||||
print("\n4. Comparing with template:")
|
||||
template_path = Path(file_path).parent.parent / "template" / "Footprints AI for {store_name} - Retail Media Business Case Calculations.xlsx"
|
||||
if template_path.exists():
|
||||
try:
|
||||
with zipfile.ZipFile(template_path, 'r') as tf:
|
||||
with zipfile.ZipFile(file_path, 'r') as gf:
|
||||
template_files = set(tf.namelist())
|
||||
generated_files = set(gf.namelist())
|
||||
|
||||
# Files in generated but not in template
|
||||
extra_files = generated_files - template_files
|
||||
if extra_files:
|
||||
print(f" ! Extra files in generated: {extra_files}")
|
||||
|
||||
# Files in template but not in generated
|
||||
missing_files = template_files - generated_files
|
||||
if missing_files:
|
||||
print(f" ! Missing files in generated: {missing_files}")
|
||||
except Exception as e:
|
||||
print(f" ✗ Error comparing with template: {e}")
|
||||
else:
|
||||
print(f" - Template not found at {template_path}")
|
||||
|
||||
print("\n" + "=" * 50)
|
||||
print("DIAGNOSIS SUMMARY:")
|
||||
print("The error 'This file has custom XML elements that are no longer supported'")
|
||||
print("is likely caused by SharePoint/OneDrive metadata in the custom.xml file.")
|
||||
print("\nThe ContentTypeId property suggests this file was previously stored in")
|
||||
print("SharePoint/OneDrive, which added custom metadata that Excel doesn't support")
|
||||
print("in certain contexts.")
|
||||
|
||||
# Test with the latest file
|
||||
if __name__ == "__main__":
|
||||
output_dir = Path(__file__).parent / "output"
|
||||
test_file = output_dir / "Footprints AI for Test14 - Retail Media Business Case Calculations 2025-2028.xlsx"
|
||||
|
||||
if test_file.exists():
|
||||
diagnose_excel_file(str(test_file))
|
||||
else:
|
||||
print(f"Test file not found: {test_file}")
|
||||
# Try to find any Excel file in output
|
||||
excel_files = list(output_dir.glob("*.xlsx"))
|
||||
if excel_files:
|
||||
print(f"\nFound {len(excel_files)} Excel files in output directory.")
|
||||
print("Diagnosing the most recent one...")
|
||||
latest_file = max(excel_files, key=os.path.getmtime)
|
||||
diagnose_excel_file(str(latest_file))
|
||||
@@ -1,260 +0,0 @@
|
||||
# Excel Table Repair - Solution Proposal
|
||||
|
||||
## Executive Summary
|
||||
|
||||
The Excel table repair errors are caused by **platform-specific differences in ZIP file assembly**, not XML content issues. Since the table XML is identical between working (macOS) and broken (Ubuntu) files, the solution requires addressing the underlying file generation process rather than XML formatting.
|
||||
|
||||
## Solution Strategy
|
||||
|
||||
### Option 1: Template-Based XML Injection (Recommended)
|
||||
**Approach**: Modify the script to generate Excel tables using the exact XML format from the working template.
|
||||
|
||||
**Implementation**:
|
||||
1. **Extract template table XML** as reference patterns
|
||||
2. **Generate proper XML declarations** for all table files
|
||||
3. **Add missing namespace declarations** and compatibility directives
|
||||
4. **Implement UID generation** for tables and columns
|
||||
5. **Fix table ID sequencing** to match Excel expectations
|
||||
|
||||
**Advantages**:
|
||||
- ✅ Addresses root XML format issues
|
||||
- ✅ Works across all platforms
|
||||
- ✅ Future-proof against Excel updates
|
||||
- ✅ No dependency on external libraries
|
||||
|
||||
**Implementation Timeline**: 2-3 days
|
||||
|
||||
### Option 2: Python Library Standardization
|
||||
**Approach**: Replace custom Excel generation with established cross-platform libraries.
|
||||
|
||||
**Implementation Options**:
|
||||
1. **openpyxl** - Most popular, excellent table support
|
||||
2. **xlsxwriter** - Fast performance, good formatting
|
||||
3. **pandas + openpyxl** - High-level data operations
|
||||
|
||||
**Advantages**:
|
||||
- ✅ Proven cross-platform compatibility
|
||||
- ✅ Handles XML complexities automatically
|
||||
- ✅ Better maintenance and updates
|
||||
- ✅ Extensive documentation and community
|
||||
|
||||
**Implementation Timeline**: 1-2 weeks (requires rewriting generation logic)
|
||||
|
||||
### Option 3: Platform Environment Isolation
|
||||
**Approach**: Standardize the Python environment across platforms.
|
||||
|
||||
**Implementation**:
|
||||
1. **Docker containerization** with fixed Python/library versions
|
||||
2. **Virtual environment** with pinned dependencies
|
||||
3. **CI/CD pipeline** generating files on controlled environment
|
||||
|
||||
**Advantages**:
|
||||
- ✅ Ensures identical execution environment
|
||||
- ✅ Minimal code changes required
|
||||
- ✅ Reproducible builds
|
||||
|
||||
**Implementation Timeline**: 3-5 days
|
||||
|
||||
## Recommended Implementation Plan
|
||||
|
||||
### Phase 1: Immediate Fix (Template-Based XML)
|
||||
|
||||
#### Step 1: XML Template Extraction
|
||||
```python
|
||||
def extract_template_xml_patterns():
|
||||
"""Extract proper XML patterns from working template"""
|
||||
template_tables = {
|
||||
'table1': {
|
||||
'declaration': '<?xml version="1.0" encoding="UTF-8" standalone="yes"?>',
|
||||
'namespaces': {
|
||||
'main': 'http://schemas.openxmlformats.org/spreadsheetml/2006/main',
|
||||
'mc': 'http://schemas.openxmlformats.org/markup-compatibility/2006',
|
||||
'xr': 'http://schemas.microsoft.com/office/spreadsheetml/2014/revision',
|
||||
'xr3': 'http://schemas.microsoft.com/office/spreadsheetml/2016/revision3'
|
||||
},
|
||||
'compatibility': 'mc:Ignorable="xr xr3"',
|
||||
'uid_pattern': '{00000000-000C-0000-FFFF-FFFF{:02d}000000}'
|
||||
}
|
||||
}
|
||||
return template_tables
|
||||
```
|
||||
|
||||
#### Step 2: XML Generation Functions
|
||||
```python
|
||||
def generate_proper_table_xml(table_data, table_id):
|
||||
"""Generate Excel-compliant table XML with proper format"""
|
||||
|
||||
# XML Declaration
|
||||
xml_content = '<?xml version="1.0" encoding="UTF-8" standalone="yes"?>\n'
|
||||
|
||||
# Table element with all namespaces
|
||||
xml_content += f'<table xmlns="{MAIN_NS}" xmlns:mc="{MC_NS}" '
|
||||
xml_content += f'mc:Ignorable="xr xr3" xmlns:xr="{XR_NS}" '
|
||||
xml_content += f'xmlns:xr3="{XR3_NS}" '
|
||||
xml_content += f'id="{table_id + 1}" ' # Correct ID sequence
|
||||
xml_content += f'xr:uid="{generate_table_uid(table_id)}" '
|
||||
xml_content += f'name="{table_data.name}" '
|
||||
xml_content += f'displayName="{table_data.display_name}" '
|
||||
xml_content += f'ref="{table_data.ref}">\n'
|
||||
|
||||
# Table columns with UIDs
|
||||
xml_content += generate_table_columns_xml(table_data.columns, table_id)
|
||||
|
||||
# Table style info
|
||||
xml_content += generate_table_style_xml(table_data.style)
|
||||
|
||||
xml_content += '</table>'
|
||||
|
||||
return xml_content
|
||||
|
||||
def generate_table_uid(table_id):
|
||||
"""Generate proper UIDs for tables"""
|
||||
return f"{{00000000-000C-0000-FFFF-FFFF{table_id:02d}000000}}"
|
||||
|
||||
def generate_column_uid(table_id, column_id):
|
||||
"""Generate proper UIDs for table columns"""
|
||||
return f"{{00000000-0010-0000-{table_id:04d}-{column_id:06d}000000}}"
|
||||
```
|
||||
|
||||
#### Step 3: File Assembly Improvements
|
||||
```python
|
||||
def create_excel_file_with_proper_compression():
|
||||
"""Create Excel file with consistent ZIP compression"""
|
||||
|
||||
# Use consistent compression settings
|
||||
with zipfile.ZipFile(output_path, 'w',
|
||||
compression=zipfile.ZIP_DEFLATED,
|
||||
compresslevel=6, # Consistent compression level
|
||||
allowZip64=False) as zipf:
|
||||
|
||||
# Set consistent file timestamps
|
||||
fixed_time = (2023, 1, 1, 0, 0, 0)
|
||||
|
||||
for file_path, content in excel_files.items():
|
||||
zinfo = zipfile.ZipInfo(file_path)
|
||||
zinfo.date_time = fixed_time
|
||||
zinfo.compress_type = zipfile.ZIP_DEFLATED
|
||||
|
||||
zipf.writestr(zinfo, content)
|
||||
```
|
||||
|
||||
### Phase 2: Testing and Validation
|
||||
|
||||
#### Cross-Platform Testing Matrix
|
||||
| Platform | Python Version | Library Versions | Test Status |
|
||||
|----------|---------------|-----------------|-------------|
|
||||
| Ubuntu 22.04 | 3.10+ | openpyxl==3.x | ⏳ Pending |
|
||||
| macOS | 3.10+ | openpyxl==3.x | ✅ Working |
|
||||
| Windows | 3.10+ | openpyxl==3.x | ⏳ TBD |
|
||||
|
||||
#### Validation Script
|
||||
```python
|
||||
def validate_excel_file(file_path):
|
||||
"""Validate generated Excel file for repair issues"""
|
||||
|
||||
checks = {
|
||||
'table_xml_format': check_table_xml_declarations,
|
||||
'namespace_compliance': check_namespace_declarations,
|
||||
'uid_presence': check_unique_identifiers,
|
||||
'zip_metadata': check_zip_file_metadata,
|
||||
'excel_compatibility': test_excel_opening
|
||||
}
|
||||
|
||||
results = {}
|
||||
for check_name, check_func in checks.items():
|
||||
results[check_name] = check_func(file_path)
|
||||
|
||||
return results
|
||||
```
|
||||
|
||||
### Phase 3: Long-term Improvements
|
||||
|
||||
#### Migration to openpyxl
|
||||
```python
|
||||
# Example migration approach
|
||||
from openpyxl import Workbook
|
||||
from openpyxl.worksheet.table import Table, TableStyleInfo
|
||||
|
||||
def create_excel_with_openpyxl(business_case_data):
|
||||
"""Generate Excel using openpyxl for cross-platform compatibility"""
|
||||
|
||||
wb = Workbook()
|
||||
ws = wb.active
|
||||
|
||||
# Add data
|
||||
for row in business_case_data:
|
||||
ws.append(row)
|
||||
|
||||
# Create table with proper formatting
|
||||
table = Table(displayName="BusinessCaseTable", ref="A1:H47")
|
||||
style = TableStyleInfo(name="TableStyleMedium3",
|
||||
showFirstColumn=False,
|
||||
showLastColumn=False,
|
||||
showRowStripes=True,
|
||||
showColumnStripes=False)
|
||||
table.tableStyleInfo = style
|
||||
|
||||
ws.add_table(table)
|
||||
|
||||
# Save with consistent settings
|
||||
wb.save(output_path)
|
||||
```
|
||||
|
||||
## Implementation Checklist
|
||||
|
||||
### Immediate Actions (Week 1)
|
||||
- [ ] Extract XML patterns from working template
|
||||
- [ ] Implement proper XML declaration generation
|
||||
- [ ] Add namespace declarations and compatibility directives
|
||||
- [ ] Implement UID generation algorithms
|
||||
- [ ] Fix table ID sequencing logic
|
||||
- [ ] Test on Ubuntu environment
|
||||
|
||||
### Validation Actions (Week 2)
|
||||
- [ ] Create comprehensive test suite
|
||||
- [ ] Validate across multiple platforms
|
||||
- [ ] Performance testing with large datasets
|
||||
- [ ] Excel compatibility testing (different versions)
|
||||
- [ ] Automated repair detection
|
||||
|
||||
### Future Improvements (Month 2)
|
||||
- [ ] Migration to openpyxl library
|
||||
- [ ] Docker containerization for consistent environment
|
||||
- [ ] CI/CD pipeline with cross-platform testing
|
||||
- [ ] Comprehensive documentation updates
|
||||
|
||||
## Risk Assessment
|
||||
|
||||
### High Priority Risks
|
||||
- **Platform dependency**: Current solution may not work on Windows
|
||||
- **Excel version compatibility**: Different Excel versions may have different validation
|
||||
- **Performance impact**: Proper XML generation may be slower
|
||||
|
||||
### Mitigation Strategies
|
||||
- **Comprehensive testing**: Test on all target platforms before deployment
|
||||
- **Fallback mechanism**: Keep current generation as backup
|
||||
- **Performance optimization**: Profile and optimize XML generation code
|
||||
|
||||
## Success Metrics
|
||||
|
||||
### Primary Goals
|
||||
- ✅ Zero Excel repair dialogs on Ubuntu-generated files
|
||||
- ✅ Identical behavior across macOS and Ubuntu
|
||||
- ✅ No data loss or functionality regression
|
||||
|
||||
### Secondary Goals
|
||||
- ✅ Improved file generation performance
|
||||
- ✅ Better code maintainability
|
||||
- ✅ Enhanced error handling and logging
|
||||
|
||||
## Conclusion
|
||||
|
||||
The recommended solution addresses the root cause by implementing proper Excel XML format generation while maintaining cross-platform compatibility. The template-based approach provides immediate relief while the library migration offers long-term stability.
|
||||
|
||||
**Next Steps**: Begin with Phase 1 implementation focusing on proper XML generation, followed by comprehensive testing across platforms.
|
||||
|
||||
---
|
||||
|
||||
*Proposal created: 2025-09-19*
|
||||
*Estimated implementation time: 2-3 weeks*
|
||||
*Priority: High - affects production workflows*
|
||||
@@ -1,117 +0,0 @@
|
||||
# Excel Table Repair Error Analysis
|
||||
|
||||
## Issue Summary
|
||||
When opening Ubuntu-generated Excel files, Excel displays repair errors specifically for tables:
|
||||
- **Repaired Records: Table from /xl/tables/table1.xml part (Table)**
|
||||
- **Repaired Records: Table from /xl/tables/table2.xml part (Table)**
|
||||
|
||||
**CRITICAL FINDING**: The same script generates working files on macOS but broken files on Ubuntu, indicating a **platform-specific issue** rather than a general Excel format problem.
|
||||
|
||||
## Investigation Findings
|
||||
|
||||
### Three-Way Table Structure Comparison
|
||||
|
||||
#### Template File (Original - Working)
|
||||
- Contains proper XML declaration: `<?xml version="1.0" encoding="UTF-8" standalone="yes"?>`
|
||||
- Includes comprehensive namespace declarations:
|
||||
- `xmlns:mc="http://schemas.openxmlformats.org/markup-compatibility/2006"`
|
||||
- `xmlns:xr="http://schemas.microsoft.com/office/spreadsheetml/2014/revision"`
|
||||
- `xmlns:xr3="http://schemas.microsoft.com/office/spreadsheetml/2016/revision3"`
|
||||
- Has `mc:Ignorable="xr xr3"` compatibility directive
|
||||
- Contains unique identifiers (`xr:uid`, `xr3:uid`) for tables and columns
|
||||
- Proper table ID sequence (table1 has id="2", table2 has id="3")
|
||||
|
||||
#### macOS Generated File (Working - No Repair Errors)
|
||||
- **Missing XML declaration** - no `<?xml version="1.0" encoding="UTF-8" standalone="yes"?>`
|
||||
- **Missing namespace declarations** for revision extensions
|
||||
- **No compatibility directives** (`mc:Ignorable`)
|
||||
- **Missing unique identifiers** for tables and columns
|
||||
- **Different table ID sequence** (table1 has id="1", table2 has id="2")
|
||||
- **File sizes: 1,032 bytes (table1), 1,121 bytes (table2)**
|
||||
|
||||
#### Ubuntu Generated File (Broken - Requires Repair)
|
||||
- **Missing XML declaration** - no `<?xml version="1.0" encoding="UTF-8" standalone="yes"?>`
|
||||
- **Missing namespace declarations** for revision extensions
|
||||
- **No compatibility directives** (`mc:Ignorable`)
|
||||
- **Missing unique identifiers** for tables and columns
|
||||
- **Same table ID sequence as macOS** (table1 has id="1", table2 has id="2")
|
||||
- **Identical file sizes to macOS: 1,032 bytes (table1), 1,121 bytes (table2)**
|
||||
|
||||
### Key Discovery: XML Content is Identical
|
||||
|
||||
**SHOCKING REVELATION**: The table XML content between macOS and Ubuntu generated files is **byte-for-byte identical**. Both have:
|
||||
|
||||
1. **Missing XML declarations**
|
||||
2. **Missing namespace extensions**
|
||||
3. **Missing unique identifiers**
|
||||
4. **Same table ID sequence** (1, 2)
|
||||
5. **Identical file sizes**
|
||||
|
||||
**macOS table1.xml vs Ubuntu table1.xml:**
|
||||
```xml
|
||||
<table id="1" name="Table8" displayName="Table8" ref="A43:H47" headerRowCount="1" totalsRowShown="0" headerRowDxfId="53" dataDxfId="52" xmlns="http://schemas.openxmlformats.org/spreadsheetml/2006/main">...
|
||||
```
|
||||
*(Completely identical)*
|
||||
|
||||
### Root Cause Analysis - Platform Dependency
|
||||
|
||||
Since the table XML is identical but only Ubuntu files require repair, the issue is **NOT in the table XML content**. The problem must be:
|
||||
|
||||
1. **File encoding differences** during ZIP assembly
|
||||
2. **ZIP compression algorithm differences** between platforms
|
||||
3. **File timestamp/metadata differences** in the ZIP archive
|
||||
4. **Different Python library versions** handling ZIP creation differently
|
||||
5. **Excel's platform-specific validation logic** being more strict on certain systems
|
||||
|
||||
### Common Formula Issues
|
||||
Both versions contain `#REF!` errors in calculated columns:
|
||||
```xml
|
||||
<calculatedColumnFormula>#REF!</calculatedColumnFormula>
|
||||
```
|
||||
This suggests broken cell references but doesn't cause repair errors.
|
||||
|
||||
### Impact Assessment
|
||||
- **Functionality:** No data loss, tables work after repair
|
||||
- **User Experience:** Excel shows warning dialog requiring user action **only on Ubuntu-generated files**
|
||||
- **Automation:** Breaks automated processing workflows **only for Ubuntu deployments**
|
||||
- **Platform Consistency:** Same code produces different results across platforms
|
||||
|
||||
## Recommendations
|
||||
|
||||
### Platform-Specific Investigation Priorities
|
||||
1. **Compare Python library versions** between macOS and Ubuntu environments
|
||||
2. **Check ZIP file metadata** (timestamps, compression levels, file attributes)
|
||||
3. **Examine file encoding** during Excel assembly process
|
||||
4. **Test with different Python Excel libraries** (openpyxl vs xlsxwriter vs others)
|
||||
5. **Analyze ZIP file internals** with hex editors for subtle differences
|
||||
|
||||
### Immediate Workarounds
|
||||
1. **Document platform dependency** in deployment guides
|
||||
2. **Test all generated files** on target Excel environment before distribution
|
||||
3. **Consider generating files on macOS** for production use
|
||||
4. **Implement automated repair detection** in the workflow
|
||||
|
||||
### Long-term Fixes
|
||||
1. **Standardize to template format** with proper XML declarations and namespaces
|
||||
2. **Use established Excel libraries** with proven cross-platform compatibility
|
||||
3. **Implement comprehensive testing** across multiple platforms
|
||||
4. **Add ZIP file validation** to detect platform-specific differences
|
||||
|
||||
## Technical Details
|
||||
|
||||
### File Comparison Results
|
||||
| File | Template | macOS Generated | Ubuntu Generated | Ubuntu vs macOS |
|
||||
|------|----------|----------------|------------------|-----------------|
|
||||
| table1.xml | 1,755 bytes | 1,032 bytes | 1,032 bytes | **Identical** |
|
||||
| table2.xml | 1,844 bytes | 1,121 bytes | 1,121 bytes | **Identical** |
|
||||
|
||||
### Platform Dependency Evidence
|
||||
- **Identical table XML content** between macOS and Ubuntu
|
||||
- **Same missing features** (declarations, namespaces, UIDs)
|
||||
- **Different Excel behavior** (repair required only on Ubuntu)
|
||||
- **Suggests ZIP-level or metadata differences**
|
||||
|
||||
---
|
||||
|
||||
*Analysis completed: 2025-09-19*
|
||||
*Files examined: Template vs Test5 generated Excel workbooks*
|
||||
@@ -1,207 +0,0 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Fix Excel corruption issues caused by SharePoint/OneDrive metadata
|
||||
"""
|
||||
import os
|
||||
import shutil
|
||||
import zipfile
|
||||
import xml.etree.ElementTree as ET
|
||||
from pathlib import Path
|
||||
import tempfile
|
||||
import openpyxl
|
||||
|
||||
def remove_sharepoint_metadata(excel_path, output_path=None):
|
||||
"""
|
||||
Remove SharePoint/OneDrive metadata from Excel file that causes corruption warnings
|
||||
|
||||
Args:
|
||||
excel_path: Path to the Excel file to fix
|
||||
output_path: Optional path for the fixed file (if None, overwrites original)
|
||||
|
||||
Returns:
|
||||
bool: True if successful, False otherwise
|
||||
"""
|
||||
if not output_path:
|
||||
output_path = excel_path
|
||||
|
||||
print(f"Processing: {excel_path}")
|
||||
|
||||
try:
|
||||
# Method 1: Use openpyxl to remove custom properties
|
||||
print("Method 1: Using openpyxl to clean custom properties...")
|
||||
wb = openpyxl.load_workbook(excel_path, keep_vba=True)
|
||||
|
||||
# Remove custom document properties
|
||||
if hasattr(wb, 'custom_doc_props'):
|
||||
# Clear all custom properties
|
||||
wb.custom_doc_props.props.clear()
|
||||
print(" ✓ Cleared custom document properties")
|
||||
|
||||
# Save to temporary file first
|
||||
temp_file = Path(output_path).with_suffix('.tmp.xlsx')
|
||||
wb.save(temp_file)
|
||||
wb.close()
|
||||
|
||||
# Method 2: Direct ZIP manipulation to ensure complete removal
|
||||
print("Method 2: Direct ZIP manipulation for complete cleanup...")
|
||||
with tempfile.NamedTemporaryFile(suffix='.xlsx', delete=False) as tmp:
|
||||
tmp_path = tmp.name
|
||||
|
||||
with zipfile.ZipFile(temp_file, 'r') as zin:
|
||||
with zipfile.ZipFile(tmp_path, 'w', compression=zipfile.ZIP_DEFLATED) as zout:
|
||||
# Copy all files except custom.xml or create a clean one
|
||||
for item in zin.infolist():
|
||||
if item.filename == 'docProps/custom.xml':
|
||||
# Create a clean custom.xml without SharePoint metadata
|
||||
clean_custom_xml = create_clean_custom_xml()
|
||||
zout.writestr(item, clean_custom_xml)
|
||||
print(" ✓ Replaced custom.xml with clean version")
|
||||
else:
|
||||
# Copy the file as-is
|
||||
zout.writestr(item, zin.read(item.filename))
|
||||
|
||||
# Replace original file with cleaned version
|
||||
shutil.move(tmp_path, output_path)
|
||||
|
||||
# Clean up temporary file
|
||||
if temp_file.exists():
|
||||
temp_file.unlink()
|
||||
|
||||
print(f" ✓ Successfully cleaned: {output_path}")
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
print(f" ✗ Error cleaning file: {e}")
|
||||
return False
|
||||
|
||||
def create_clean_custom_xml():
|
||||
"""
|
||||
Create a clean custom.xml without SharePoint metadata
|
||||
"""
|
||||
# Create a minimal valid custom.xml
|
||||
xml_content = '''<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
|
||||
<Properties xmlns="http://schemas.openxmlformats.org/officeDocument/2006/custom-properties">
|
||||
</Properties>'''
|
||||
return xml_content.encode('utf-8')
|
||||
|
||||
def clean_template_file():
|
||||
"""
|
||||
Clean the template file to prevent future corruption
|
||||
"""
|
||||
template_dir = Path(__file__).parent / "template"
|
||||
template_files = list(template_dir.glob("*.xlsx"))
|
||||
|
||||
if not template_files:
|
||||
print("No template files found")
|
||||
return False
|
||||
|
||||
for template_file in template_files:
|
||||
print(f"\nCleaning template: {template_file.name}")
|
||||
|
||||
# Create backup
|
||||
backup_path = template_file.with_suffix('.backup.xlsx')
|
||||
shutil.copy2(template_file, backup_path)
|
||||
print(f" ✓ Created backup: {backup_path.name}")
|
||||
|
||||
# Clean the template
|
||||
if remove_sharepoint_metadata(str(template_file)):
|
||||
print(f" ✓ Template cleaned successfully")
|
||||
else:
|
||||
print(f" ✗ Failed to clean template")
|
||||
# Restore from backup
|
||||
shutil.copy2(backup_path, template_file)
|
||||
print(f" ✓ Restored from backup")
|
||||
|
||||
return True
|
||||
|
||||
def clean_all_output_files():
|
||||
"""
|
||||
Clean all Excel files in the output directory
|
||||
"""
|
||||
output_dir = Path(__file__).parent / "output"
|
||||
excel_files = list(output_dir.glob("*.xlsx"))
|
||||
|
||||
if not excel_files:
|
||||
print("No Excel files found in output directory")
|
||||
return False
|
||||
|
||||
print(f"Found {len(excel_files)} Excel files to clean")
|
||||
|
||||
for excel_file in excel_files:
|
||||
print(f"\nCleaning: {excel_file.name}")
|
||||
if remove_sharepoint_metadata(str(excel_file)):
|
||||
print(f" ✓ Cleaned successfully")
|
||||
else:
|
||||
print(f" ✗ Failed to clean")
|
||||
|
||||
return True
|
||||
|
||||
def verify_file_is_clean(excel_path):
|
||||
"""
|
||||
Verify that an Excel file is free from SharePoint metadata
|
||||
"""
|
||||
print(f"\nVerifying: {excel_path}")
|
||||
|
||||
try:
|
||||
with zipfile.ZipFile(excel_path, 'r') as zf:
|
||||
if 'docProps/custom.xml' in zf.namelist():
|
||||
content = zf.read('docProps/custom.xml')
|
||||
|
||||
# Check for problematic metadata
|
||||
if b'ContentTypeId' in content:
|
||||
print(" ✗ Still contains SharePoint ContentTypeId")
|
||||
return False
|
||||
if b'MediaService' in content:
|
||||
print(" ✗ Still contains MediaService tags")
|
||||
return False
|
||||
|
||||
print(" ✓ File is clean - no SharePoint metadata found")
|
||||
return True
|
||||
else:
|
||||
print(" ✓ File is clean - no custom.xml present")
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
print(f" ✗ Error verifying file: {e}")
|
||||
return False
|
||||
|
||||
def main():
|
||||
"""Main function to clean Excel files"""
|
||||
print("=" * 60)
|
||||
print("Excel SharePoint Metadata Cleaner")
|
||||
print("=" * 60)
|
||||
|
||||
# Step 1: Clean the template
|
||||
print("\nStep 1: Cleaning template file...")
|
||||
print("-" * 40)
|
||||
clean_template_file()
|
||||
|
||||
# Step 2: Clean all output files
|
||||
print("\n\nStep 2: Cleaning output files...")
|
||||
print("-" * 40)
|
||||
clean_all_output_files()
|
||||
|
||||
# Step 3: Verify cleaning
|
||||
print("\n\nStep 3: Verifying cleaned files...")
|
||||
print("-" * 40)
|
||||
|
||||
# Verify template
|
||||
template_dir = Path(__file__).parent / "template"
|
||||
for template_file in template_dir.glob("*.xlsx"):
|
||||
if not template_file.name.endswith('.backup.xlsx'):
|
||||
verify_file_is_clean(str(template_file))
|
||||
|
||||
# Verify output files
|
||||
output_dir = Path(__file__).parent / "output"
|
||||
for excel_file in output_dir.glob("*.xlsx"):
|
||||
verify_file_is_clean(str(excel_file))
|
||||
|
||||
print("\n" + "=" * 60)
|
||||
print("Cleaning complete!")
|
||||
print("\nNOTE: The Excel files should now open without corruption warnings.")
|
||||
print("The SharePoint/OneDrive metadata has been removed.")
|
||||
print("\nFuture files generated from the cleaned template should not have this issue.")
|
||||
print("=" * 60)
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
80
index.html
80
index.html
@@ -121,7 +121,7 @@
|
||||
<!-- Step labels -->
|
||||
<div class="flex justify-between">
|
||||
<span class="step-label active" data-step="1" style="width: 16.66%; text-align: center;">Contact</span>
|
||||
<span class="step-label" data-step="2" style="width: 16.66%; text-align: center;">Store Details</span>
|
||||
<span class="step-label" data-step="2" style="width: 16.66%; text-align: center;">Setup Details</span>
|
||||
<span class="step-label" data-step="3" style="width: 16.66%; text-align: center;">In-Store</span>
|
||||
<span class="step-label" data-step="4" style="width: 16.66%; text-align: center;">On-Site</span>
|
||||
<span class="step-label" data-step="5" style="width: 16.66%; text-align: center;">Off-Site</span>
|
||||
@@ -181,20 +181,41 @@
|
||||
|
||||
<!-- Step 2: Store Details -->
|
||||
<div id="step2" class="step-content hidden">
|
||||
<h2 class="text-xl font-bold text-gray-700 mb-5">Store Details</h2>
|
||||
<h2 class="text-xl font-bold text-gray-700 mb-5">Setup Details</h2>
|
||||
<div class="space-y-5">
|
||||
<!-- Store Name -->
|
||||
<div>
|
||||
<label for="storeName" class="block text-base font-medium text-gray-700 mb-1">Enter your retail store/s name</label>
|
||||
<label for="storeName" class="block text-base font-medium text-gray-700 mb-1">Retailer Name</label>
|
||||
<input type="text" id="storeName" name="storeName" required
|
||||
class="w-full p-3 border border-gray-300 bg-white rounded-md focus:outline-none focus:ring-1 focus:ring-[#eb742e] focus:border-[#eb742e]">
|
||||
</div>
|
||||
|
||||
<!-- Starting Date -->
|
||||
<div>
|
||||
<label for="startingDate" class="block text-base font-medium text-gray-700 mb-1">Starting Date</label>
|
||||
<input type="date" id="startingDate" name="startingDate" required
|
||||
class="w-full p-3 border border-gray-300 bg-white rounded-md focus:outline-none focus:ring-1 focus:ring-[#eb742e] focus:border-[#eb742e]">
|
||||
<label class="block text-base font-medium text-gray-700 mb-1">Business Case Start Date</label>
|
||||
<div class="flex space-x-2">
|
||||
<select id="startingMonth" name="startingMonthSelect" required
|
||||
class="flex-1 p-3 border border-gray-300 bg-white rounded-md focus:outline-none focus:ring-1 focus:ring-[#eb742e] focus:border-[#eb742e]">
|
||||
<option value="">Select Month</option>
|
||||
<option value="01">January</option>
|
||||
<option value="02">February</option>
|
||||
<option value="03">March</option>
|
||||
<option value="04">April</option>
|
||||
<option value="05">May</option>
|
||||
<option value="06">June</option>
|
||||
<option value="07">July</option>
|
||||
<option value="08">August</option>
|
||||
<option value="09">September</option>
|
||||
<option value="10">October</option>
|
||||
<option value="11">November</option>
|
||||
<option value="12">December</option>
|
||||
</select>
|
||||
<select id="startingYear" name="startingYearSelect" required
|
||||
class="flex-1 p-3 border border-gray-300 bg-white rounded-md focus:outline-none focus:ring-1 focus:ring-[#eb742e] focus:border-[#eb742e]">
|
||||
<option value="">Select Year</option>
|
||||
</select>
|
||||
</div>
|
||||
<input type="hidden" id="startingDate" name="startingDate">
|
||||
</div>
|
||||
|
||||
<!-- Duration -->
|
||||
@@ -203,6 +224,17 @@
|
||||
<input type="number" id="duration" name="duration" value="36" min="1" required
|
||||
class="w-full p-3 border border-gray-300 bg-white rounded-md focus:outline-none focus:ring-1 focus:ring-[#eb742e] focus:border-[#eb742e]">
|
||||
</div>
|
||||
|
||||
<!-- Currency Symbol -->
|
||||
<div>
|
||||
<label for="currency" class="block text-base font-medium text-gray-700 mb-1">Currency Symbol</label>
|
||||
<select id="currency" name="currency" required
|
||||
class="w-full p-3 border border-gray-300 bg-white rounded-md focus:outline-none focus:ring-1 focus:ring-[#eb742e] focus:border-[#eb742e]">
|
||||
<option value="">Select Currency</option>
|
||||
<option value="$">$ (Dollar)</option>
|
||||
<option value="€">€ (Euro)</option>
|
||||
</select>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
@@ -210,13 +242,6 @@
|
||||
<div id="step3" class="step-content hidden">
|
||||
<h2 class="text-xl font-bold text-gray-700 mb-5">In-Store Information</h2>
|
||||
<div class="space-y-5">
|
||||
<!-- Store open days -->
|
||||
<div>
|
||||
<label for="openDays" class="block text-base font-medium text-gray-700 mb-1">Stores open days/month</label>
|
||||
<input type="number" id="openDays" name="openDays" required
|
||||
class="w-full p-3 border border-gray-300 bg-white rounded-md focus:outline-none focus:ring-1 focus:ring-[#eb742e] focus:border-[#eb742e]">
|
||||
</div>
|
||||
|
||||
<!-- Store Types Dropdown -->
|
||||
<div>
|
||||
<label class="block text-base font-medium text-gray-700 mb-1">Store Types <span class="text-orange-500">*</span></label>
|
||||
@@ -504,6 +529,31 @@
|
||||
|
||||
// Initialize
|
||||
updateProgressBar();
|
||||
|
||||
// Populate year dropdown with current year and next 5 years
|
||||
const yearSelect = document.getElementById('startingYear');
|
||||
const currentYear = new Date().getFullYear();
|
||||
for (let year = currentYear; year <= currentYear + 5; year++) {
|
||||
const option = document.createElement('option');
|
||||
option.value = year;
|
||||
option.textContent = year;
|
||||
yearSelect.appendChild(option);
|
||||
}
|
||||
|
||||
// Handle month and year selection to set full date with day = 1
|
||||
function updateStartingDate() {
|
||||
const month = document.getElementById('startingMonth').value;
|
||||
const year = document.getElementById('startingYear').value;
|
||||
|
||||
if (month && year) {
|
||||
// Format: YYYY-MM-01
|
||||
document.getElementById('startingDate').value = `${year}-${month}-01`;
|
||||
console.log('Starting date set to:', document.getElementById('startingDate').value);
|
||||
}
|
||||
}
|
||||
|
||||
document.getElementById('startingMonth').addEventListener('change', updateStartingDate);
|
||||
document.getElementById('startingYear').addEventListener('change', updateStartingDate);
|
||||
|
||||
// Event listeners for navigation buttons
|
||||
prevBtn.addEventListener('click', goToPreviousStep);
|
||||
@@ -1521,7 +1571,7 @@
|
||||
</div>
|
||||
<div>
|
||||
<label for="${id}-screen-percentage" class="block text-base font-medium text-gray-700 mb-1">Number of stores with digital screens</label>
|
||||
<input type="number" id="${id}-screen-percentage" name="${id}_screen_percentage" min="0" max="100"
|
||||
<input type="number" id="${id}-screen-percentage" name="${id}_screen_percentage" min="0"
|
||||
class="w-full p-3 border border-gray-300 bg-white rounded-md focus:outline-none focus:ring-1 focus:ring-[#eb742e] focus:border-[#eb742e]">
|
||||
</div>
|
||||
</div>
|
||||
@@ -1547,7 +1597,7 @@
|
||||
<div id="${id}-radio-questions" class="space-y-4 hidden">
|
||||
<div>
|
||||
<label for="${id}-radio-percentage" class="block text-base font-medium text-gray-700 mb-1">Number of stores with radio</label>
|
||||
<input type="number" id="${id}-radio-percentage" name="${id}_radio_percentage" min="0" max="100"
|
||||
<input type="number" id="${id}-radio-percentage" name="${id}_radio_percentage" min="0"
|
||||
class="w-full p-3 border border-gray-300 bg-white rounded-md focus:outline-none focus:ring-1 focus:ring-[#eb742e] focus:border-[#eb742e]">
|
||||
</div>
|
||||
</div>
|
||||
|
||||
25
index.js
25
index.js
@@ -32,7 +32,7 @@ async function updateConfig(formData) {
|
||||
|
||||
// Store information
|
||||
store_types: getSelectedStoreTypes(formData),
|
||||
open_days_per_month: parseInt(formData.openDays) || 0,
|
||||
currency_symbol: formData.currency || "",
|
||||
|
||||
// Store type specific data
|
||||
convenience_store_type: {
|
||||
@@ -42,10 +42,19 @@ async function updateConfig(formData) {
|
||||
screen_count: isStoreTypeSelected(formData, 'Convenience') ? parseInt(formData.convenience_screen_count) || 0 : 0,
|
||||
screen_percentage: isStoreTypeSelected(formData, 'Convenience') ? parseInt(formData.convenience_screen_percentage) || 0 : 0,
|
||||
has_in_store_radio: isStoreTypeSelected(formData, 'Convenience') ? formData.convenience_radio === "Yes" : false,
|
||||
radio_percentage: isStoreTypeSelected(formData, 'Convenience') ? parseInt(formData.convenience_radio_percentage) || 0 : 0,
|
||||
open_days_per_month: parseInt(formData.openDays) || 0
|
||||
radio_percentage: isStoreTypeSelected(formData, 'Convenience') ? parseInt(formData.convenience_radio_percentage) || 0 : 0
|
||||
},
|
||||
|
||||
|
||||
minimarket_store_type: {
|
||||
stores_number: isStoreTypeSelected(formData, 'Minimarket') ? parseInt(formData.minimarket_stores) || 0 : 0,
|
||||
monthly_transactions: isStoreTypeSelected(formData, 'Minimarket') ? parseInt(formData.minimarket_transactions) || 0 : 0,
|
||||
has_digital_screens: isStoreTypeSelected(formData, 'Minimarket') ? formData.minimarket_screens === "Yes" : false,
|
||||
screen_count: isStoreTypeSelected(formData, 'Minimarket') ? parseInt(formData.minimarket_screen_count) || 0 : 0,
|
||||
screen_percentage: isStoreTypeSelected(formData, 'Minimarket') ? parseInt(formData.minimarket_screen_percentage) || 0 : 0,
|
||||
has_in_store_radio: isStoreTypeSelected(formData, 'Minimarket') ? formData.minimarket_radio === "Yes" : false,
|
||||
radio_percentage: isStoreTypeSelected(formData, 'Minimarket') ? parseInt(formData.minimarket_radio_percentage) || 0 : 0
|
||||
},
|
||||
|
||||
supermarket_store_type: {
|
||||
stores_number: isStoreTypeSelected(formData, 'Supermarket') ? parseInt(formData.supermarket_stores) || 0 : 0,
|
||||
monthly_transactions: isStoreTypeSelected(formData, 'Supermarket') ? parseInt(formData.supermarket_transactions) || 0 : 0,
|
||||
@@ -53,8 +62,7 @@ async function updateConfig(formData) {
|
||||
screen_count: isStoreTypeSelected(formData, 'Supermarket') ? parseInt(formData.supermarket_screen_count) || 0 : 0,
|
||||
screen_percentage: isStoreTypeSelected(formData, 'Supermarket') ? parseInt(formData.supermarket_screen_percentage) || 0 : 0,
|
||||
has_in_store_radio: isStoreTypeSelected(formData, 'Supermarket') ? formData.supermarket_radio === "Yes" : false,
|
||||
radio_percentage: isStoreTypeSelected(formData, 'Supermarket') ? parseInt(formData.supermarket_radio_percentage) || 0 : 0,
|
||||
open_days_per_month: parseInt(formData.openDays) || 0
|
||||
radio_percentage: isStoreTypeSelected(formData, 'Supermarket') ? parseInt(formData.supermarket_radio_percentage) || 0 : 0
|
||||
},
|
||||
|
||||
hypermarket_store_type: {
|
||||
@@ -64,8 +72,7 @@ async function updateConfig(formData) {
|
||||
screen_count: isStoreTypeSelected(formData, 'Hypermarket') ? parseInt(formData.hypermarket_screen_count) || 0 : 0,
|
||||
screen_percentage: isStoreTypeSelected(formData, 'Hypermarket') ? parseInt(formData.hypermarket_screen_percentage) || 0 : 0,
|
||||
has_in_store_radio: isStoreTypeSelected(formData, 'Hypermarket') ? formData.hypermarket_radio === "Yes" : false,
|
||||
radio_percentage: isStoreTypeSelected(formData, 'Hypermarket') ? parseInt(formData.hypermarket_radio_percentage) || 0 : 0,
|
||||
open_days_per_month: parseInt(formData.openDays) || 0
|
||||
radio_percentage: isStoreTypeSelected(formData, 'Hypermarket') ? parseInt(formData.hypermarket_radio_percentage) || 0 : 0
|
||||
},
|
||||
|
||||
// On-site channels
|
||||
@@ -184,4 +191,4 @@ if (typeof module !== 'undefined' && module.exports) {
|
||||
updateConfig,
|
||||
fetchConfig
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
19
requirements.txt
Normal file
19
requirements.txt
Normal file
@@ -0,0 +1,19 @@
|
||||
# Business Case Form - Python Dependencies
|
||||
# Generated on 2025-09-24
|
||||
|
||||
# Core Excel manipulation library
|
||||
openpyxl==3.1.2
|
||||
|
||||
# Date utilities for date calculations
|
||||
python-dateutil==2.8.2
|
||||
|
||||
# Standard libraries (included with Python 3.x):
|
||||
# - json
|
||||
# - os
|
||||
# - shutil
|
||||
# - datetime
|
||||
# - re
|
||||
# - pathlib
|
||||
# - tempfile
|
||||
# - zipfile
|
||||
# - xml.etree.ElementTree
|
||||
10
server.js
10
server.js
@@ -47,10 +47,14 @@ app.get('/download-excel', (req, res) => {
|
||||
const latestFile = files[0].name;
|
||||
const filePath = path.join(outputDir, latestFile);
|
||||
|
||||
// Set headers for file download
|
||||
// Set optimized headers to avoid MOTW tagging and enable immediate formula calculation
|
||||
res.setHeader('Content-Type', 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet');
|
||||
res.setHeader('Content-Disposition', `attachment; filename="${latestFile}"`);
|
||||
|
||||
res.setHeader('Content-Disposition', `inline; filename="${latestFile}"`); // 'inline' instead of 'attachment' to avoid MOTW
|
||||
res.setHeader('Cache-Control', 'private, no-cache, no-store, must-revalidate');
|
||||
res.setHeader('Pragma', 'no-cache');
|
||||
res.setHeader('Expires', '0');
|
||||
res.removeHeader('X-Powered-By'); // Remove identifying headers that might trigger security warnings
|
||||
|
||||
// Send the file
|
||||
res.sendFile(filePath);
|
||||
console.log(`Excel file sent for download: ${filePath}`);
|
||||
|
||||
Binary file not shown.
BIN
test_copy.xlsx
BIN
test_copy.xlsx
Binary file not shown.
227
update_excel.py
227
update_excel.py
@@ -1,227 +0,0 @@
|
||||
#!/usr/bin/env python3
|
||||
import json
|
||||
import os
|
||||
import re
|
||||
import openpyxl
|
||||
from openpyxl.utils import get_column_letter
|
||||
|
||||
def update_excel_variables(excel_path):
|
||||
"""
|
||||
Update the Variables sheet in the Excel file with values from config.json
|
||||
and hide forecast sheets that aren't in the calculated years array.
|
||||
|
||||
This version uses openpyxl exclusively to preserve all formatting, formulas,
|
||||
and Excel features that xlsxwriter cannot handle when modifying existing files.
|
||||
|
||||
Args:
|
||||
excel_path (str): Path to the Excel file to update
|
||||
|
||||
Returns:
|
||||
bool: True if successful, False otherwise
|
||||
"""
|
||||
# Define paths
|
||||
script_dir = os.path.dirname(os.path.abspath(__file__))
|
||||
config_path = os.path.join(script_dir, 'config.json')
|
||||
|
||||
try:
|
||||
# Load config.json
|
||||
with open(config_path, 'r') as f:
|
||||
config = json.load(f)
|
||||
user_data = config.get('user_data', {})
|
||||
|
||||
# Load Excel workbook
|
||||
print(f"Opening Excel file: {excel_path}")
|
||||
wb = openpyxl.load_workbook(excel_path)
|
||||
|
||||
# Try to access the Variables sheet
|
||||
try:
|
||||
# First try by name
|
||||
sheet = wb['Variables']
|
||||
except KeyError:
|
||||
# If not found by name, try to access the last sheet
|
||||
sheet_names = wb.sheetnames
|
||||
if sheet_names:
|
||||
print(f"Variables sheet not found by name. Using last sheet: {sheet_names[-1]}")
|
||||
sheet = wb[sheet_names[-1]]
|
||||
else:
|
||||
print("No sheets found in the workbook")
|
||||
return False
|
||||
|
||||
# Map config variables to Excel cells based on the provided mapping
|
||||
cell_mappings = {
|
||||
'B2': user_data.get('store_name', ''),
|
||||
'B31': user_data.get('starting_date', ''),
|
||||
'B32': user_data.get('duration', 36),
|
||||
'B37': user_data.get('open_days_per_month', 0),
|
||||
|
||||
# Convenience store type
|
||||
'H37': user_data.get('convenience_store_type', {}).get('stores_number', 0),
|
||||
'C37': user_data.get('convenience_store_type', {}).get('monthly_transactions', 0),
|
||||
# Convert boolean to 1/0 for has_digital_screens
|
||||
'I37': 1 if user_data.get('convenience_store_type', {}).get('has_digital_screens', False) else 0,
|
||||
'J37': user_data.get('convenience_store_type', {}).get('screen_count', 0),
|
||||
'K37': user_data.get('convenience_store_type', {}).get('screen_percentage', 0),
|
||||
# Convert boolean to 1/0 for has_in_store_radio
|
||||
'M37': 1 if user_data.get('convenience_store_type', {}).get('has_in_store_radio', False) else 0,
|
||||
'N37': user_data.get('convenience_store_type', {}).get('radio_percentage', 0),
|
||||
|
||||
# Minimarket store type
|
||||
'H38': user_data.get('minimarket_store_type', {}).get('stores_number', 0),
|
||||
'C38': user_data.get('minimarket_store_type', {}).get('monthly_transactions', 0),
|
||||
# Convert boolean to 1/0 for has_digital_screens
|
||||
'I38': 1 if user_data.get('minimarket_store_type', {}).get('has_digital_screens', False) else 0,
|
||||
'J38': user_data.get('minimarket_store_type', {}).get('screen_count', 0),
|
||||
'K38': user_data.get('minimarket_store_type', {}).get('screen_percentage', 0),
|
||||
# Convert boolean to 1/0 for has_in_store_radio
|
||||
'M38': 1 if user_data.get('minimarket_store_type', {}).get('has_in_store_radio', False) else 0,
|
||||
'N38': user_data.get('minimarket_store_type', {}).get('radio_percentage', 0),
|
||||
|
||||
# Supermarket store type
|
||||
'H39': user_data.get('supermarket_store_type', {}).get('stores_number', 0),
|
||||
'C39': user_data.get('supermarket_store_type', {}).get('monthly_transactions', 0),
|
||||
# Convert boolean to 1/0 for has_digital_screens
|
||||
'I39': 1 if user_data.get('supermarket_store_type', {}).get('has_digital_screens', False) else 0,
|
||||
'J39': user_data.get('supermarket_store_type', {}).get('screen_count', 0),
|
||||
'K39': user_data.get('supermarket_store_type', {}).get('screen_percentage', 0),
|
||||
# Convert boolean to 1/0 for has_in_store_radio
|
||||
'M39': 1 if user_data.get('supermarket_store_type', {}).get('has_in_store_radio', False) else 0,
|
||||
'N39': user_data.get('supermarket_store_type', {}).get('radio_percentage', 0),
|
||||
|
||||
# Hypermarket store type
|
||||
'H40': user_data.get('hypermarket_store_type', {}).get('stores_number', 0),
|
||||
'C40': user_data.get('hypermarket_store_type', {}).get('monthly_transactions', 0),
|
||||
# Convert boolean to 1/0 for has_digital_screens
|
||||
'I40': 1 if user_data.get('hypermarket_store_type', {}).get('has_digital_screens', False) else 0,
|
||||
'J40': user_data.get('hypermarket_store_type', {}).get('screen_count', 0),
|
||||
'K40': user_data.get('hypermarket_store_type', {}).get('screen_percentage', 0),
|
||||
# Convert boolean to 1/0 for has_in_store_radio
|
||||
'M40': 1 if user_data.get('hypermarket_store_type', {}).get('has_in_store_radio', False) else 0,
|
||||
'N40': user_data.get('hypermarket_store_type', {}).get('radio_percentage', 0),
|
||||
|
||||
# On-site channels
|
||||
'B43': user_data.get('website_visitors', 0),
|
||||
'B44': user_data.get('app_users', 0),
|
||||
'B45': user_data.get('loyalty_users', 0),
|
||||
|
||||
# Off-site channels
|
||||
'B49': user_data.get('facebook_followers', 0),
|
||||
'B50': user_data.get('instagram_followers', 0),
|
||||
'B51': user_data.get('google_views', 0),
|
||||
'B52': user_data.get('email_subscribers', 0),
|
||||
'B53': user_data.get('sms_users', 0),
|
||||
'B54': user_data.get('whatsapp_contacts', 0)
|
||||
}
|
||||
|
||||
# Update the cells
|
||||
for cell_ref, value in cell_mappings.items():
|
||||
try:
|
||||
# Force the value to be set, even if the cell is protected or has data validation
|
||||
cell = sheet[cell_ref]
|
||||
cell.value = value
|
||||
print(f"Updated {cell_ref} with value: {value}")
|
||||
except Exception as e:
|
||||
print(f"Error updating cell {cell_ref}: {e}")
|
||||
|
||||
# Save the workbook with variables updated
|
||||
print("Saving workbook with updated variables...")
|
||||
wb.save(excel_path)
|
||||
|
||||
# Get the calculated years array from config
|
||||
starting_date = user_data.get('starting_date', '')
|
||||
duration = user_data.get('duration', 36)
|
||||
calculated_years = []
|
||||
|
||||
# Import datetime at the module level to avoid scope issues
|
||||
import datetime
|
||||
from dateutil.relativedelta import relativedelta
|
||||
|
||||
# Calculate years array based on starting_date and duration
|
||||
try:
|
||||
# Try to parse the date, supporting both dd/mm/yyyy and dd.mm.yyyy formats
|
||||
if starting_date:
|
||||
if '/' in str(starting_date):
|
||||
day, month, year = map(int, str(starting_date).split('/'))
|
||||
elif '.' in str(starting_date):
|
||||
day, month, year = map(int, str(starting_date).split('.'))
|
||||
elif '-' in str(starting_date):
|
||||
# Handle ISO format (yyyy-mm-dd)
|
||||
date_parts = str(starting_date).split('-')
|
||||
if len(date_parts) == 3:
|
||||
year, month, day = map(int, date_parts)
|
||||
else:
|
||||
# Default to current date if format is not recognized
|
||||
current_date = datetime.datetime.now()
|
||||
year, month, day = current_date.year, current_date.month, current_date.day
|
||||
elif isinstance(starting_date, datetime.datetime):
|
||||
day, month, year = starting_date.day, starting_date.month, starting_date.year
|
||||
else:
|
||||
# Default to current date if format is not recognized
|
||||
current_date = datetime.datetime.now()
|
||||
year, month, day = current_date.year, current_date.month, current_date.day
|
||||
|
||||
# Create datetime object for starting date
|
||||
start_date = datetime.datetime(year, month, day)
|
||||
|
||||
# Calculate end date (starting date + duration months - 1 day)
|
||||
end_date = start_date + relativedelta(months=duration-1)
|
||||
|
||||
# Create a set of years (to avoid duplicates)
|
||||
years_set = set()
|
||||
|
||||
# Add starting year
|
||||
years_set.add(start_date.year)
|
||||
|
||||
# Add ending year
|
||||
years_set.add(end_date.year)
|
||||
|
||||
# If there are years in between, add those too
|
||||
for y in range(start_date.year + 1, end_date.year):
|
||||
years_set.add(y)
|
||||
|
||||
# Convert set to sorted list
|
||||
calculated_years = sorted(list(years_set))
|
||||
print(f"Calculated years for sheet visibility: {calculated_years}")
|
||||
else:
|
||||
# Default to current year if no starting date
|
||||
calculated_years = [datetime.datetime.now().year]
|
||||
except Exception as e:
|
||||
print(f"Error calculating years for sheet visibility: {e}")
|
||||
calculated_years = [datetime.datetime.now().year]
|
||||
|
||||
# Hide forecast sheets that aren't in the calculated years array
|
||||
# No sheet renaming - just check existing sheet names
|
||||
for sheet_name in wb.sheetnames:
|
||||
# Check if this is a forecast sheet
|
||||
# Forecast sheets have names like "2025 – Forecast"
|
||||
if "Forecast" in sheet_name:
|
||||
# Extract the year from the sheet name
|
||||
try:
|
||||
sheet_year = int(sheet_name.split()[0])
|
||||
# Hide the sheet if its year is not in the calculated years
|
||||
if sheet_year not in calculated_years:
|
||||
sheet = wb[sheet_name]
|
||||
sheet.sheet_state = 'hidden'
|
||||
print(f"Hiding sheet '{sheet_name}' as year {sheet_year} is not in calculated years {calculated_years}")
|
||||
except Exception as e:
|
||||
print(f"Error extracting year from sheet name '{sheet_name}': {e}")
|
||||
|
||||
# Save the workbook with updated variables and hidden sheets
|
||||
print("Saving workbook with all updates...")
|
||||
wb.save(excel_path)
|
||||
|
||||
print(f"Excel file updated successfully: {excel_path}")
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
print(f"Error updating Excel file: {e}")
|
||||
return False
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
# For testing purposes
|
||||
import sys
|
||||
if len(sys.argv) > 1:
|
||||
excel_path = sys.argv[1]
|
||||
update_excel_variables(excel_path)
|
||||
else:
|
||||
print("Please provide the path to the Excel file as an argument")
|
||||
@@ -1,225 +0,0 @@
|
||||
#!/usr/bin/env python3
|
||||
import json
|
||||
import os
|
||||
import re
|
||||
import openpyxl
|
||||
from openpyxl.utils import get_column_letter
|
||||
# Removed zipfile import - no longer using direct XML manipulation
|
||||
|
||||
def update_excel_variables(excel_path):
|
||||
"""
|
||||
Update the Variables sheet in the Excel file with values from config.json
|
||||
and hide forecast sheets that aren't in the calculated years array
|
||||
|
||||
Args:
|
||||
excel_path (str): Path to the Excel file to update
|
||||
|
||||
Returns:
|
||||
bool: True if successful, False otherwise
|
||||
"""
|
||||
# Define paths
|
||||
script_dir = os.path.dirname(os.path.abspath(__file__))
|
||||
config_path = os.path.join(script_dir, 'config.json')
|
||||
|
||||
try:
|
||||
# Load config.json
|
||||
with open(config_path, 'r') as f:
|
||||
config = json.load(f)
|
||||
user_data = config.get('user_data', {})
|
||||
|
||||
# Load Excel workbook
|
||||
print(f"Opening Excel file: {excel_path}")
|
||||
wb = openpyxl.load_workbook(excel_path)
|
||||
|
||||
# Try to access the Variables sheet
|
||||
try:
|
||||
# First try by name
|
||||
sheet = wb['Variables']
|
||||
except KeyError:
|
||||
# If not found by name, try to access the last sheet
|
||||
sheet_names = wb.sheetnames
|
||||
if sheet_names:
|
||||
print(f"Variables sheet not found by name. Using last sheet: {sheet_names[-1]}")
|
||||
sheet = wb[sheet_names[-1]]
|
||||
else:
|
||||
print("No sheets found in the workbook")
|
||||
return False
|
||||
|
||||
# Map config variables to Excel cells based on the provided mapping
|
||||
cell_mappings = {
|
||||
'B2': user_data.get('store_name', ''),
|
||||
'B31': user_data.get('starting_date', ''),
|
||||
'B32': user_data.get('duration', 36),
|
||||
'B37': user_data.get('open_days_per_month', 0),
|
||||
|
||||
# Convenience store type
|
||||
'H37': user_data.get('convenience_store_type', {}).get('stores_number', 0),
|
||||
'C37': user_data.get('convenience_store_type', {}).get('monthly_transactions', 0),
|
||||
# Convert boolean to 1/0 for has_digital_screens
|
||||
'I37': 1 if user_data.get('convenience_store_type', {}).get('has_digital_screens', False) else 0,
|
||||
'J37': user_data.get('convenience_store_type', {}).get('screen_count', 0),
|
||||
'K37': user_data.get('convenience_store_type', {}).get('screen_percentage', 0),
|
||||
# Convert boolean to 1/0 for has_in_store_radio
|
||||
'M37': 1 if user_data.get('convenience_store_type', {}).get('has_in_store_radio', False) else 0,
|
||||
'N37': user_data.get('convenience_store_type', {}).get('radio_percentage', 0),
|
||||
|
||||
# Minimarket store type
|
||||
'H38': user_data.get('minimarket_store_type', {}).get('stores_number', 0),
|
||||
'C38': user_data.get('minimarket_store_type', {}).get('monthly_transactions', 0),
|
||||
# Convert boolean to 1/0 for has_digital_screens
|
||||
'I38': 1 if user_data.get('minimarket_store_type', {}).get('has_digital_screens', False) else 0,
|
||||
'J38': user_data.get('minimarket_store_type', {}).get('screen_count', 0),
|
||||
'K38': user_data.get('minimarket_store_type', {}).get('screen_percentage', 0),
|
||||
# Convert boolean to 1/0 for has_in_store_radio
|
||||
'M38': 1 if user_data.get('minimarket_store_type', {}).get('has_in_store_radio', False) else 0,
|
||||
'N38': user_data.get('minimarket_store_type', {}).get('radio_percentage', 0),
|
||||
|
||||
# Supermarket store type
|
||||
'H39': user_data.get('supermarket_store_type', {}).get('stores_number', 0),
|
||||
'C39': user_data.get('supermarket_store_type', {}).get('monthly_transactions', 0),
|
||||
# Convert boolean to 1/0 for has_digital_screens
|
||||
'I39': 1 if user_data.get('supermarket_store_type', {}).get('has_digital_screens', False) else 0,
|
||||
'J39': user_data.get('supermarket_store_type', {}).get('screen_count', 0),
|
||||
'K39': user_data.get('supermarket_store_type', {}).get('screen_percentage', 0),
|
||||
# Convert boolean to 1/0 for has_in_store_radio
|
||||
'M39': 1 if user_data.get('supermarket_store_type', {}).get('has_in_store_radio', False) else 0,
|
||||
'N39': user_data.get('supermarket_store_type', {}).get('radio_percentage', 0),
|
||||
|
||||
# Hypermarket store type
|
||||
'H40': user_data.get('hypermarket_store_type', {}).get('stores_number', 0),
|
||||
'C40': user_data.get('hypermarket_store_type', {}).get('monthly_transactions', 0),
|
||||
# Convert boolean to 1/0 for has_digital_screens
|
||||
'I40': 1 if user_data.get('hypermarket_store_type', {}).get('has_digital_screens', False) else 0,
|
||||
'J40': user_data.get('hypermarket_store_type', {}).get('screen_count', 0),
|
||||
'K40': user_data.get('hypermarket_store_type', {}).get('screen_percentage', 0),
|
||||
# Convert boolean to 1/0 for has_in_store_radio
|
||||
'M40': 1 if user_data.get('hypermarket_store_type', {}).get('has_in_store_radio', False) else 0,
|
||||
'N40': user_data.get('hypermarket_store_type', {}).get('radio_percentage', 0),
|
||||
|
||||
# On-site channels
|
||||
'B43': user_data.get('website_visitors', 0),
|
||||
'B44': user_data.get('app_users', 0),
|
||||
'B45': user_data.get('loyalty_users', 0),
|
||||
|
||||
# Off-site channels
|
||||
'B49': user_data.get('facebook_followers', 0),
|
||||
'B50': user_data.get('instagram_followers', 0),
|
||||
'B51': user_data.get('google_views', 0),
|
||||
'B52': user_data.get('email_subscribers', 0),
|
||||
'B53': user_data.get('sms_users', 0),
|
||||
'B54': user_data.get('whatsapp_contacts', 0)
|
||||
}
|
||||
|
||||
# Update the cells
|
||||
for cell_ref, value in cell_mappings.items():
|
||||
try:
|
||||
# Force the value to be set, even if the cell is protected or has data validation
|
||||
cell = sheet[cell_ref]
|
||||
cell.value = value
|
||||
print(f"Updated {cell_ref} with value: {value}")
|
||||
except Exception as e:
|
||||
print(f"Error updating cell {cell_ref}: {e}")
|
||||
|
||||
# Save the workbook with variables updated
|
||||
print("Saving workbook with updated variables...")
|
||||
wb.save(excel_path)
|
||||
|
||||
# Get the calculated years array from config
|
||||
starting_date = user_data.get('starting_date', '')
|
||||
duration = user_data.get('duration', 36)
|
||||
calculated_years = []
|
||||
|
||||
# Import datetime at the module level to avoid scope issues
|
||||
import datetime
|
||||
from dateutil.relativedelta import relativedelta
|
||||
|
||||
# Calculate years array based on starting_date and duration
|
||||
try:
|
||||
# Try to parse the date, supporting both dd/mm/yyyy and dd.mm.yyyy formats
|
||||
if starting_date:
|
||||
if '/' in str(starting_date):
|
||||
day, month, year = map(int, str(starting_date).split('/'))
|
||||
elif '.' in str(starting_date):
|
||||
day, month, year = map(int, str(starting_date).split('.'))
|
||||
elif '-' in str(starting_date):
|
||||
# Handle ISO format (yyyy-mm-dd)
|
||||
date_parts = str(starting_date).split('-')
|
||||
if len(date_parts) == 3:
|
||||
year, month, day = map(int, date_parts)
|
||||
else:
|
||||
# Default to current date if format is not recognized
|
||||
current_date = datetime.datetime.now()
|
||||
year, month, day = current_date.year, current_date.month, current_date.day
|
||||
elif isinstance(starting_date, datetime.datetime):
|
||||
day, month, year = starting_date.day, starting_date.month, starting_date.year
|
||||
else:
|
||||
# Default to current date if format is not recognized
|
||||
current_date = datetime.datetime.now()
|
||||
year, month, day = current_date.year, current_date.month, current_date.day
|
||||
|
||||
# Create datetime object for starting date
|
||||
start_date = datetime.datetime(year, month, day)
|
||||
|
||||
# Calculate end date (starting date + duration months - 1 day)
|
||||
end_date = start_date + relativedelta(months=duration-1)
|
||||
|
||||
# Create a set of years (to avoid duplicates)
|
||||
years_set = set()
|
||||
|
||||
# Add starting year
|
||||
years_set.add(start_date.year)
|
||||
|
||||
# Add ending year
|
||||
years_set.add(end_date.year)
|
||||
|
||||
# If there are years in between, add those too
|
||||
for y in range(start_date.year + 1, end_date.year):
|
||||
years_set.add(y)
|
||||
|
||||
# Convert set to sorted list
|
||||
calculated_years = sorted(list(years_set))
|
||||
print(f"Calculated years for sheet visibility: {calculated_years}")
|
||||
else:
|
||||
# Default to current year if no starting date
|
||||
calculated_years = [datetime.datetime.now().year]
|
||||
except Exception as e:
|
||||
print(f"Error calculating years for sheet visibility: {e}")
|
||||
calculated_years = [datetime.datetime.now().year]
|
||||
|
||||
# Hide forecast sheets that aren't in the calculated years array
|
||||
# No sheet renaming - just check existing sheet names
|
||||
for sheet_name in wb.sheetnames:
|
||||
# Check if this is a forecast sheet
|
||||
# Forecast sheets have names like "2025 – Forecast"
|
||||
if "Forecast" in sheet_name:
|
||||
# Extract the year from the sheet name
|
||||
try:
|
||||
sheet_year = int(sheet_name.split()[0])
|
||||
# Hide the sheet if its year is not in the calculated years
|
||||
if sheet_year not in calculated_years:
|
||||
sheet = wb[sheet_name]
|
||||
sheet.sheet_state = 'hidden'
|
||||
print(f"Hiding sheet '{sheet_name}' as year {sheet_year} is not in calculated years {calculated_years}")
|
||||
except Exception as e:
|
||||
print(f"Error extracting year from sheet name '{sheet_name}': {e}")
|
||||
|
||||
# Save the workbook with updated variables and hidden sheets
|
||||
print("Saving workbook with all updates...")
|
||||
wb.save(excel_path)
|
||||
|
||||
print(f"Excel file updated successfully: {excel_path}")
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
print(f"Error updating Excel file: {e}")
|
||||
return False
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
# For testing purposes
|
||||
import sys
|
||||
if len(sys.argv) > 1:
|
||||
excel_path = sys.argv[1]
|
||||
update_excel_variables(excel_path)
|
||||
else:
|
||||
print("Please provide the path to the Excel file as an argument")
|
||||
@@ -3,7 +3,59 @@ import json
|
||||
import os
|
||||
import re
|
||||
import openpyxl
|
||||
from openpyxl.utils import get_column_letter
|
||||
|
||||
CURRENCY_LOCALE_MAP = {
|
||||
'$': '-409', # English (US)
|
||||
'€': '-2', # English (Euro)
|
||||
}
|
||||
|
||||
CURRENCY_BRACKET_PATTERN = re.compile(r'\[\$([^\]-]*)(-[^\]]*)?\]')
|
||||
|
||||
|
||||
def apply_currency_symbol(workbook, symbol):
|
||||
"""Propagate the selected currency symbol across formats, labels, and charts."""
|
||||
target_symbol = (symbol or '$').strip() or '$'
|
||||
|
||||
def replace_currency_token(fmt):
|
||||
if not isinstance(fmt, str) or ('$' not in fmt and '€' not in fmt):
|
||||
return fmt
|
||||
|
||||
def _replace_match(match):
|
||||
locale_hint = match.group(2) or ''
|
||||
if target_symbol in CURRENCY_LOCALE_MAP:
|
||||
locale_hint = f"-{CURRENCY_LOCALE_MAP[target_symbol].lstrip('-')}" if match.group(2) or locale_hint else ''
|
||||
return f'[${target_symbol}{locale_hint}]'
|
||||
|
||||
updated = CURRENCY_BRACKET_PATTERN.sub(_replace_match, fmt)
|
||||
updated = updated.replace('"$"', f'"{target_symbol}"')
|
||||
return updated
|
||||
|
||||
for worksheet in workbook.worksheets:
|
||||
for row in worksheet.iter_rows():
|
||||
for cell in row:
|
||||
fmt = cell.number_format
|
||||
updated_format = replace_currency_token(fmt)
|
||||
if updated_format != fmt:
|
||||
cell.number_format = updated_format
|
||||
|
||||
value = cell.value
|
||||
if isinstance(value, str) and not value.startswith('=') and ('€' in value or '$' in value):
|
||||
new_value = value.replace('€', target_symbol).replace('$', target_symbol)
|
||||
if new_value != value:
|
||||
cell.value = new_value
|
||||
|
||||
for chart in getattr(worksheet, '_charts', []):
|
||||
axes = [getattr(chart, 'y_axis', None), getattr(chart, 'secondary_y_axis', None)]
|
||||
for axis in axes:
|
||||
if not axis or not getattr(axis, 'number_format', None):
|
||||
continue
|
||||
fmt_obj = axis.number_format
|
||||
format_code = getattr(fmt_obj, 'formatCode', None)
|
||||
if not isinstance(format_code, str):
|
||||
continue
|
||||
updated_code = replace_currency_token(format_code)
|
||||
if updated_code != format_code:
|
||||
fmt_obj.formatCode = updated_code
|
||||
|
||||
def update_excel_variables(excel_path):
|
||||
"""
|
||||
@@ -35,6 +87,35 @@ def update_excel_variables(excel_path):
|
||||
print(f"Opening Excel file: {excel_path}")
|
||||
wb = openpyxl.load_workbook(excel_path)
|
||||
|
||||
|
||||
# Break any external links to prevent unsafe external sources error
|
||||
print("Breaking any external links...")
|
||||
try:
|
||||
# Clear external links if they exist
|
||||
if hasattr(wb, '_external_links'):
|
||||
wb._external_links.clear()
|
||||
if hasattr(wb, 'external_links'):
|
||||
wb.external_links.clear()
|
||||
|
||||
# Remove any defined names that might contain external references
|
||||
names_to_remove = []
|
||||
for name in wb.defined_names:
|
||||
if name.value and ('[' in str(name.value) or 'store_name' in str(name.value)):
|
||||
names_to_remove.append(name.name)
|
||||
print(f"Removing potentially problematic defined name: {name.name}")
|
||||
|
||||
for name_to_remove in names_to_remove:
|
||||
del wb.defined_names[name_to_remove]
|
||||
|
||||
# Set calculation mode to manual to prevent external link issues
|
||||
if hasattr(wb, 'calculation') and hasattr(wb.calculation, 'calcMode'):
|
||||
wb.calculation.calcMode = 'manual'
|
||||
print("Set calculation mode to manual")
|
||||
|
||||
print("External links handling completed")
|
||||
except Exception as e:
|
||||
print(f"Warning during external links handling: {e}")
|
||||
|
||||
# Try to access the Variables sheet
|
||||
try:
|
||||
# First try by name
|
||||
@@ -54,7 +135,7 @@ def update_excel_variables(excel_path):
|
||||
'B2': user_data.get('store_name', ''),
|
||||
'B31': user_data.get('starting_date', ''),
|
||||
'B32': user_data.get('duration', 36),
|
||||
'B37': user_data.get('open_days_per_month', 0),
|
||||
'B37': user_data.get('currency_symbol', ''),
|
||||
|
||||
# Convenience store type
|
||||
'H37': user_data.get('convenience_store_type', {}).get('stores_number', 0),
|
||||
@@ -124,10 +205,181 @@ def update_excel_variables(excel_path):
|
||||
except Exception as e:
|
||||
print(f"Error updating cell {cell_ref}: {e}")
|
||||
|
||||
apply_currency_symbol(wb, user_data.get('currency_symbol', ''))
|
||||
|
||||
# Force formula recalculation before saving
|
||||
print("Forcing formula recalculation...")
|
||||
wb.calculation.calcMode = 'auto'
|
||||
wb.calculation.fullCalcOnLoad = True
|
||||
wb.calculation.fullPrecision = True
|
||||
|
||||
# Save the workbook with variables updated
|
||||
print("Saving workbook with updated variables...")
|
||||
wb.save(excel_path)
|
||||
|
||||
# Rename sheets containing store_name placeholder with actual store name
|
||||
store_name = user_data.get('store_name', 'Your Store')
|
||||
if store_name and store_name.strip():
|
||||
print(f"Renaming sheets with store_name placeholder to: {store_name}")
|
||||
|
||||
# Create a list of sheets to rename to avoid modifying during iteration
|
||||
sheets_to_rename = []
|
||||
for sheet_name in wb.sheetnames:
|
||||
if 'store_name' in sheet_name:
|
||||
new_sheet_name = sheet_name.replace('store_name', store_name)
|
||||
sheets_to_rename.append((sheet_name, new_sheet_name))
|
||||
|
||||
# Update all formulas and references before renaming sheets
|
||||
if sheets_to_rename:
|
||||
print("Updating formulas to prevent external link errors...")
|
||||
|
||||
# Go through all sheets and update any formulas that reference sheets with store_name
|
||||
for ws in wb.worksheets:
|
||||
for row in ws.iter_rows():
|
||||
for cell in row:
|
||||
if cell.value and isinstance(cell.value, str) and cell.value.startswith('='):
|
||||
original_formula = cell.value
|
||||
updated_formula = original_formula
|
||||
|
||||
# Replace sheet references in formulas
|
||||
for old_name, new_name in sheets_to_rename:
|
||||
# Handle different formula reference patterns
|
||||
patterns_to_replace = [
|
||||
f"'{old_name}'!", # 'Sheet Name'!
|
||||
f"{old_name}!", # SheetName! (if no spaces)
|
||||
f"'{old_name}'.", # 'Sheet Name'. (alternative reference)
|
||||
]
|
||||
|
||||
for pattern in patterns_to_replace:
|
||||
if pattern in updated_formula:
|
||||
new_pattern = pattern.replace(old_name, new_name)
|
||||
updated_formula = updated_formula.replace(pattern, new_pattern)
|
||||
|
||||
# Also replace ALL store_name placeholder variations within formula content
|
||||
store_name_variations = ['store_name', '{store_name}', 'store_name}', '{store_name']
|
||||
for variation in store_name_variations:
|
||||
if variation in updated_formula:
|
||||
updated_formula = updated_formula.replace(variation, store_name)
|
||||
|
||||
# Update the cell if formula changed
|
||||
if updated_formula != original_formula:
|
||||
try:
|
||||
cell.value = updated_formula
|
||||
print(f"Updated formula in {ws.title}!{cell.coordinate}")
|
||||
except Exception as e:
|
||||
print(f"Warning: Could not update formula in {ws.title}!{cell.coordinate}: {e}")
|
||||
|
||||
# Also check for store_name in regular cell values (non-formula)
|
||||
elif cell.value and isinstance(cell.value, str) and 'store_name' in cell.value:
|
||||
try:
|
||||
original_value = cell.value
|
||||
updated_value = original_value.replace('store_name', store_name)
|
||||
cell.value = updated_value
|
||||
print(f"Updated cell value in {ws.title}!{cell.coordinate}: '{original_value}' -> '{updated_value}'")
|
||||
except Exception as e:
|
||||
print(f"Warning: Could not update cell value in {ws.title}!{cell.coordinate}: {e}")
|
||||
|
||||
# Now safely rename each sheet
|
||||
for old_name, new_name in sheets_to_rename:
|
||||
try:
|
||||
sheet_obj = wb[old_name]
|
||||
sheet_obj.title = new_name
|
||||
print(f"Renamed sheet '{old_name}' to '{new_name}'")
|
||||
except Exception as e:
|
||||
print(f"Error renaming sheet '{old_name}' to '{new_name}': {e}")
|
||||
|
||||
# COMPREHENSIVE pass: Replace store_name in ALL cells throughout the workbook
|
||||
print("=== COMPREHENSIVE SCAN: Checking all sheets for store_name placeholders ===")
|
||||
total_formulas_updated = 0
|
||||
total_text_updated = 0
|
||||
|
||||
for ws in wb.worksheets:
|
||||
print(f"Scanning sheet: {ws.title}")
|
||||
sheet_formulas_updated = 0
|
||||
sheet_text_updated = 0
|
||||
|
||||
for row in ws.iter_rows():
|
||||
for cell in row:
|
||||
if cell.value:
|
||||
# Handle ArrayFormula objects specially
|
||||
if hasattr(cell.value, 'text'): # ArrayFormula
|
||||
formula_text = cell.value.text
|
||||
store_name_variations = ['store_name', '{store_name}', 'store_name}', '{store_name']
|
||||
has_store_name = any(variation in formula_text for variation in store_name_variations)
|
||||
|
||||
if has_store_name:
|
||||
original_formula = formula_text
|
||||
updated_formula = original_formula
|
||||
|
||||
# Replace all variations
|
||||
for variation in store_name_variations:
|
||||
if variation in updated_formula:
|
||||
updated_formula = updated_formula.replace(variation, store_name)
|
||||
|
||||
if updated_formula != original_formula:
|
||||
try:
|
||||
cell.value.text = updated_formula
|
||||
sheet_formulas_updated += 1
|
||||
total_formulas_updated += 1
|
||||
print(f" ✓ Updated store_name in ArrayFormula {cell.coordinate}: {original_formula}")
|
||||
except Exception as e:
|
||||
print(f" ✗ Could not update ArrayFormula {cell.coordinate}: {e}")
|
||||
|
||||
elif isinstance(cell.value, str):
|
||||
# Handle regular string cells
|
||||
store_name_variations = ['store_name', '{store_name}', 'store_name}', '{store_name']
|
||||
has_store_name = any(variation in cell.value for variation in store_name_variations)
|
||||
|
||||
if has_store_name:
|
||||
if cell.value.startswith('='):
|
||||
# Formula with store_name variations
|
||||
original_formula = cell.value
|
||||
updated_formula = original_formula
|
||||
|
||||
# Replace all variations
|
||||
for variation in store_name_variations:
|
||||
if variation in updated_formula:
|
||||
updated_formula = updated_formula.replace(variation, store_name)
|
||||
|
||||
if updated_formula != original_formula:
|
||||
try:
|
||||
cell.value = updated_formula
|
||||
sheet_formulas_updated += 1
|
||||
total_formulas_updated += 1
|
||||
print(f" ✓ Updated store_name in formula {cell.coordinate}: {original_formula[:50]}...")
|
||||
except Exception as e:
|
||||
print(f" ✗ Could not update formula {cell.coordinate}: {e}")
|
||||
else:
|
||||
# Regular text with store_name variations
|
||||
original_value = cell.value
|
||||
updated_value = original_value
|
||||
|
||||
# Replace all variations
|
||||
for variation in store_name_variations:
|
||||
if variation in updated_value:
|
||||
updated_value = updated_value.replace(variation, store_name)
|
||||
|
||||
if updated_value != original_value:
|
||||
try:
|
||||
cell.value = updated_value
|
||||
sheet_text_updated += 1
|
||||
total_text_updated += 1
|
||||
print(f" ✓ Updated store_name in text {cell.coordinate}: '{original_value}' -> '{updated_value}'")
|
||||
except Exception as e:
|
||||
print(f" ✗ Could not update text {cell.coordinate}: {e}")
|
||||
|
||||
if sheet_formulas_updated > 0 or sheet_text_updated > 0:
|
||||
print(f" → Sheet {ws.title}: {sheet_formulas_updated} formulas, {sheet_text_updated} text cells updated")
|
||||
else:
|
||||
print(f" → Sheet {ws.title}: No store_name placeholders found")
|
||||
|
||||
print(f"=== TOTAL UPDATES: {total_formulas_updated} formulas, {total_text_updated} text cells ===")
|
||||
|
||||
# Save after sheet renaming and formula updates
|
||||
if sheets_to_rename:
|
||||
print("Saving workbook after sheet renaming and formula updates...")
|
||||
wb.save(excel_path)
|
||||
|
||||
# Get the calculated years array from config
|
||||
starting_date = user_data.get('starting_date', '')
|
||||
duration = user_data.get('duration', 36)
|
||||
@@ -207,6 +459,12 @@ def update_excel_variables(excel_path):
|
||||
except Exception as e:
|
||||
print(f"Error extracting year from sheet name '{sheet_name}': {e}")
|
||||
|
||||
# Ensure formulas are marked for recalculation before final save
|
||||
print("Ensuring formulas are marked for recalculation...")
|
||||
wb.calculation.calcMode = 'auto'
|
||||
wb.calculation.fullCalcOnLoad = True
|
||||
wb.calculation.fullPrecision = True
|
||||
|
||||
# Save the workbook with updated variables and hidden sheets
|
||||
print("Saving workbook with all updates...")
|
||||
wb.save(excel_path)
|
||||
@@ -226,4 +484,4 @@ if __name__ == "__main__":
|
||||
excel_path = sys.argv[1]
|
||||
update_excel_variables(excel_path)
|
||||
else:
|
||||
print("Please provide the path to the Excel file as an argument")
|
||||
print("Please provide the path to the Excel file as an argument")
|
||||
|
||||
Reference in New Issue
Block a user