From 3f0aff923d518e77b7dce5465b76f3bff013e9c0 Mon Sep 17 00:00:00 2001 From: brentperteet Date: Sun, 22 Feb 2026 08:16:48 -0600 Subject: [PATCH] initial commit --- .claude/settings.local.json | 14 + add_libraries.py | 335 ++++++++++++++ app.py | 763 ++++++++++++++++++++++++++++++++ apply_variant.py | 269 ++++++++++++ config.json | 3 + export_bom.py | 89 ++++ extract_symbols.py | 517 ++++++++++++++++++++++ gen_passives_db.py | 288 ++++++++++++ init_user.py | 299 +++++++++++++ requirements.txt | 6 + static/logo_banner.png | Bin 0 -> 41755 bytes sync_variant.py | 278 ++++++++++++ templates/index.html | 846 ++++++++++++++++++++++++++++++++++++ templates/variants.html | 446 +++++++++++++++++++ variant_manager.py | 211 +++++++++ 15 files changed, 4364 insertions(+) create mode 100644 .claude/settings.local.json create mode 100644 add_libraries.py create mode 100644 app.py create mode 100644 apply_variant.py create mode 100644 config.json create mode 100644 export_bom.py create mode 100644 extract_symbols.py create mode 100644 gen_passives_db.py create mode 100644 init_user.py create mode 100644 requirements.txt create mode 100644 static/logo_banner.png create mode 100644 sync_variant.py create mode 100644 templates/index.html create mode 100644 templates/variants.html create mode 100644 variant_manager.py diff --git a/.claude/settings.local.json b/.claude/settings.local.json new file mode 100644 index 0000000..913a2de --- /dev/null +++ b/.claude/settings.local.json @@ -0,0 +1,14 @@ +{ + "permissions": { + "allow": [ + "Read(//d/kicad/**)", + "Read(//d/tx/25w-kicad/25w/**)", + "Bash(for uuid in \"12ef4843-0c6b-44b3-b52b-21b354565dc0\" \"17a476c2-1017-41e7-9d81-f4153fe179f7\" \"25a5bbfc-04ad-4755-9a82-80d42d2cd8ce\")", + "Bash(do echo \"=== UUID: $uuid ===\")", + "Bash(grep -A 15 \"$uuid\" 25w.kicad_sch frequency.kicad_sch)", + "Bash(done)" + ], + "deny": [], + "ask": [] + } +} \ No newline at end of file diff --git a/add_libraries.py b/add_libraries.py new file mode 100644 index 0000000..3d082bb --- /dev/null +++ b/add_libraries.py @@ -0,0 +1,335 @@ +#!/usr/bin/env python3 +""" +Adds all *.kicad_sym libraries under UM_KICAD/lib/symbols +to the KiCad v9 global sym-lib-table. + +Also adds all *.pretty footprint libraries under UM_KICAD/lib/footprints +to the KiCad v9 global fp-lib-table. + +Safe behavior: +- Does NOT remove existing entries +- Skips libraries already present (by URI) +- Creates sym-lib-table.bak and fp-lib-table.bak backups +""" + +from __future__ import annotations + +import os +import sys +import re +from pathlib import Path +from typing import Dict, List + + +# ---------------------------- +# KiCad v9 config location +# ---------------------------- +def kicad9_config_dir() -> Path: + home = Path.home() + + if sys.platform.startswith("win"): + appdata = os.environ.get("APPDATA") + if not appdata: + raise RuntimeError("APPDATA not set") + return Path(appdata) / "kicad" / "9.0" + + if sys.platform == "darwin": + return home / "Library" / "Preferences" / "kicad" / "9.0" + + xdg = os.environ.get("XDG_CONFIG_HOME") + if xdg: + return Path(xdg) / "kicad" / "9.0" + + return home / ".config" / "kicad" / "9.0" + + +def global_sym_lib_table_path() -> Path: + return kicad9_config_dir() / "sym-lib-table" + + +def global_fp_lib_table_path() -> Path: + return kicad9_config_dir() / "fp-lib-table" + + +# ---------------------------- +# Basic sym-lib-table parsing +# ---------------------------- +LIB_BLOCK_RE = re.compile(r"\(lib\s+(?:.|\n)*?\)\s*\)", re.MULTILINE) +NAME_RE = re.compile(r"\(name\s+([^)]+)\)") +URI_RE = re.compile(r"\(uri\s+([^)]+)\)") + + +def strip_atom(atom: str) -> str: + atom = atom.strip() + if atom.startswith('"') and atom.endswith('"'): + return atom[1:-1] + return atom + + +def parse_existing_libs(text: str) -> Dict[str, str]: + libs = {} + for m in LIB_BLOCK_RE.finditer(text): + block = m.group(0) + name_m = NAME_RE.search(block) + uri_m = URI_RE.search(block) + if not name_m or not uri_m: + continue + name = strip_atom(name_m.group(1)) + uri = strip_atom(uri_m.group(1)) + libs[name] = uri + return libs + + +# ---------------------------- +# Helpers +# ---------------------------- +def make_unique_name(name: str, used: set[str]) -> str: + if name not in used: + return name + i = 2 + while f"{name}_{i}" in used: + i += 1 + return f"{name}_{i}" + + +def create_lib_block(name: str, uri: str, descr: str) -> str: + return ( + "(lib\n" + f" (name \"{name}\")\n" + " (type \"KiCad\")\n" + f" (uri \"{uri}\")\n" + " (options \"\")\n" + f" (descr \"{descr}\")\n" + ")" + ) + + +def create_fp_lib_block(name: str, uri: str, descr: str) -> str: + return ( + "(lib\n" + f" (name \"{name}\")\n" + " (type \"KiCad\")\n" + f" (uri \"{uri}\")\n" + " (options \"\")\n" + f" (descr \"{descr}\")\n" + ")" + ) + + +# ---------------------------- +# Main logic +# ---------------------------- +def sync_symbol_libraries(um_root: Path) -> int: + """Synchronize symbol libraries""" + symbols_dir = um_root / "lib" / "symbols" + + if not symbols_dir.exists(): + print(f"ERROR: {symbols_dir} does not exist") + return 1 + + sym_table_path = global_sym_lib_table_path() + sym_table_path.parent.mkdir(parents=True, exist_ok=True) + + existing_text = "" + if sym_table_path.exists(): + existing_text = sym_table_path.read_text(encoding="utf-8") + + existing_libs = parse_existing_libs(existing_text) + + # Backup + if sym_table_path.exists(): + backup_path = sym_table_path.with_suffix(".bak") + backup_path.write_text(existing_text, encoding="utf-8") + print(f"Backup written: {backup_path}") + + # Filter out all existing UM_KICAD entries + existing_blocks = [] + removed = 0 + for m in LIB_BLOCK_RE.finditer(existing_text): + block = m.group(0).strip() + uri_m = URI_RE.search(block) + if uri_m: + uri = strip_atom(uri_m.group(1)) + if "${UM_KICAD}" in uri: + removed += 1 + continue + existing_blocks.append(block) + + # Scan UM_KICAD/lib/symbols + sym_files = sorted(symbols_dir.glob("*.kicad_sym")) + + if not sym_files: + print("No .kicad_sym files found.") + # Still rebuild table without UM_KICAD entries + output = ["(sym_lib_table"] + for block in existing_blocks: + output.append(" " + block.replace("\n", "\n ")) + output.append(")\n") + sym_table_path.write_text("\n".join(output), encoding="utf-8") + print(f"Removed {removed} UM_KICAD entries") + return 0 + + # Build new UM_KICAD blocks + new_blocks: List[str] = [] + used_names = set() + + # Collect names from non-UM_KICAD entries to avoid conflicts + for name, uri in existing_libs.items(): + if "${UM_KICAD}" not in uri: + used_names.add(name) + + for sym_file in sym_files: + rel_path = sym_file.relative_to(um_root).as_posix() + uri = "${UM_KICAD}/" + rel_path + + base_name = sym_file.stem + name = make_unique_name(base_name, used_names) + + block = create_lib_block( + name=name, + uri=uri, + descr=f"Auto-added from {rel_path}" + ) + + new_blocks.append(block) + used_names.add(name) + + # Rebuild table + all_blocks = existing_blocks + new_blocks + + output = ["(sym_lib_table"] + for block in all_blocks: + output.append(" " + block.replace("\n", "\n ")) + output.append(")\n") + + sym_table_path.write_text("\n".join(output), encoding="utf-8") + + print(f"Updated {sym_table_path}") + print(f"Found: {len(sym_files)} libraries") + print(f"Removed: {removed} old UM_KICAD entries") + print(f"Added: {len(new_blocks)} new UM_KICAD entries") + return 0 + + +def sync_footprint_libraries(um_root: Path) -> int: + """Synchronize footprint libraries""" + footprints_dir = um_root / "lib" / "footprints" + + if not footprints_dir.exists(): + print(f"Warning: {footprints_dir} does not exist, skipping footprint sync") + return 0 + + fp_table_path = global_fp_lib_table_path() + fp_table_path.parent.mkdir(parents=True, exist_ok=True) + + existing_text = "" + if fp_table_path.exists(): + existing_text = fp_table_path.read_text(encoding="utf-8") + + existing_libs = parse_existing_libs(existing_text) + + # Backup + if fp_table_path.exists(): + backup_path = fp_table_path.with_suffix(".bak") + backup_path.write_text(existing_text, encoding="utf-8") + print(f"Backup written: {backup_path}") + + # Filter out all existing UM_KICAD entries + existing_blocks = [] + removed = 0 + for m in LIB_BLOCK_RE.finditer(existing_text): + block = m.group(0).strip() + uri_m = URI_RE.search(block) + if uri_m: + uri = strip_atom(uri_m.group(1)) + if "${UM_KICAD}" in uri: + removed += 1 + continue + existing_blocks.append(block) + + # Scan UM_KICAD/lib/footprints for *.pretty directories + pretty_dirs = sorted([d for d in footprints_dir.iterdir() if d.is_dir() and d.name.endswith('.pretty')]) + + if not pretty_dirs: + print("No .pretty footprint libraries found.") + # Still rebuild table without UM_KICAD entries + output = ["(fp_lib_table"] + for block in existing_blocks: + output.append(" " + block.replace("\n", "\n ")) + output.append(")\n") + fp_table_path.write_text("\n".join(output), encoding="utf-8") + print(f"Removed {removed} UM_KICAD footprint entries") + return 0 + + # Build new UM_KICAD blocks + new_blocks: List[str] = [] + used_names = set() + + # Collect names from non-UM_KICAD entries to avoid conflicts + for name, uri in existing_libs.items(): + if "${UM_KICAD}" not in uri: + used_names.add(name) + + for pretty_dir in pretty_dirs: + rel_path = pretty_dir.relative_to(um_root).as_posix() + uri = "${UM_KICAD}/" + rel_path + + base_name = pretty_dir.stem # Removes .pretty extension + name = make_unique_name(base_name, used_names) + + block = create_fp_lib_block( + name=name, + uri=uri, + descr=f"Auto-added from {rel_path}" + ) + + new_blocks.append(block) + used_names.add(name) + + # Rebuild table + all_blocks = existing_blocks + new_blocks + + output = ["(fp_lib_table"] + for block in all_blocks: + output.append(" " + block.replace("\n", "\n ")) + output.append(")\n") + + fp_table_path.write_text("\n".join(output), encoding="utf-8") + + print(f"Updated {fp_table_path}") + print(f"Found: {len(pretty_dirs)} footprint libraries") + print(f"Removed: {removed} old UM_KICAD entries") + print(f"Added: {len(new_blocks)} new UM_KICAD entries") + return 0 + + +def main() -> int: + um_root_env = os.environ.get("UM_KICAD") + if not um_root_env: + print("ERROR: UM_KICAD not set") + return 1 + + um_root = Path(um_root_env).resolve() + + print("=" * 60) + print("Syncing Symbol Libraries...") + print("=" * 60) + result = sync_symbol_libraries(um_root) + if result != 0: + return result + + print("\n" + "=" * 60) + print("Syncing Footprint Libraries...") + print("=" * 60) + result = sync_footprint_libraries(um_root) + if result != 0: + return result + + print("\n" + "=" * 60) + print("Restart KiCad to refresh libraries.") + print("=" * 60) + return 0 + + +if __name__ == "__main__": + raise SystemExit(main()) \ No newline at end of file diff --git a/app.py b/app.py new file mode 100644 index 0000000..0530c4d --- /dev/null +++ b/app.py @@ -0,0 +1,763 @@ +import sys +import webbrowser +import threading +import subprocess +import os +import zipfile +import tempfile +import shutil +import json +from pathlib import Path +from flask import Flask, render_template, request, send_file, jsonify +from flask_socketio import SocketIO, emit +import time +from PyPDF2 import PdfMerger +from variant_manager import VariantManager + +app = Flask(__name__) +app.config['SECRET_KEY'] = 'secret!' +socketio = SocketIO(app, cors_allowed_origins="*") + +# Store arguments +app_args = {} +connected_clients = set() +heartbeat_timeout = 5 # seconds + +# Configuration +config_file = 'config.json' +app_config = {} + +def load_config(): + """Load configuration from file""" + global app_config + if os.path.exists(config_file): + with open(config_file, 'r') as f: + app_config = json.load(f) + else: + app_config = { + 'parts_spreadsheet_path': '' + } + return app_config + +def save_config(): + """Save configuration to file""" + with open(config_file, 'w') as f: + json.dump(app_config, f, indent=2) + +@app.route('/') +def index(): + # Reconstruct the command line that invoked this app + cmd_parts = [sys.argv[0]] + for i in range(1, len(sys.argv)): + arg = sys.argv[i] + # Quote arguments with spaces + if ' ' in arg: + cmd_parts.append(f'"{arg}"') + else: + cmd_parts.append(arg) + invocation_cmd = ' '.join(cmd_parts) + + return render_template('index.html', args=app_args, invocation_cmd=invocation_cmd) + +@socketio.on('connect') +def handle_connect(): + connected_clients.add(request.sid) + print(f"Client connected: {request.sid}") + +@socketio.on('disconnect') +def handle_disconnect(): + connected_clients.discard(request.sid) + print(f"Client disconnected: {request.sid}") + + # Shutdown if no clients connected + if not connected_clients: + print("No clients connected. Shutting down...") + threading.Timer(1.0, shutdown_server).start() + +@socketio.on('heartbeat') +def handle_heartbeat(): + emit('heartbeat_ack') + +@socketio.on('generate_pdf') +def handle_generate_pdf(): + try: + kicad_cli = app_args.get('Kicad Cli', '') + schematic_file = app_args.get('Schematic File', '') + board_file = app_args.get('Board File', '') + project_dir = app_args.get('Project Dir', '') + project_name = app_args.get('Project Name', 'project') + + if not kicad_cli: + emit('pdf_error', {'error': 'Missing kicad-cli argument'}) + return + + # Create temporary directory for PDFs + temp_dir = tempfile.mkdtemp() + schematics_dir = os.path.join(temp_dir, 'schematics') + board_dir = os.path.join(temp_dir, 'board') + os.makedirs(schematics_dir, exist_ok=True) + os.makedirs(board_dir, exist_ok=True) + + # Generate schematic PDF + if schematic_file: + emit('pdf_status', {'status': 'Generating schematic PDF...'}) + sch_pdf_path = os.path.join(schematics_dir, f'{project_name}_schematic.pdf') + cmd = [kicad_cli, 'sch', 'export', 'pdf', schematic_file, '-o', sch_pdf_path] + + result = subprocess.run(cmd, capture_output=True, text=True) + if result.returncode != 0: + shutil.rmtree(temp_dir) + emit('pdf_error', {'error': f'Schematic PDF failed: {result.stderr}'}) + return + + # Generate board layer PDFs - one per layer, then merge + if board_file: + emit('pdf_status', {'status': 'Generating board layer PDFs...'}) + + # All layers to export + layers = [ + ('F.Cu', 'Top_Copper'), + ('B.Cu', 'Bottom_Copper'), + ('F.Silkscreen', 'Top_Silkscreen'), + ('B.Silkscreen', 'Bottom_Silkscreen'), + ('F.Mask', 'Top_Soldermask'), + ('B.Mask', 'Bottom_Soldermask'), + ('F.Paste', 'Top_Paste'), + ('B.Paste', 'Bottom_Paste'), + ('Edge.Cuts', 'Board_Outline'), + ('F.Fab', 'Top_Fabrication'), + ('B.Fab', 'Bottom_Fabrication'), + ] + + temp_pdf_dir = os.path.join(temp_dir, 'temp_pdfs') + os.makedirs(temp_pdf_dir, exist_ok=True) + pdf_files = [] + + for layer_name, file_suffix in layers: + pdf_path = os.path.join(temp_pdf_dir, f'{file_suffix}.pdf') + + # Include Edge.Cuts on every layer except the Edge.Cuts layer itself + if layer_name == 'Edge.Cuts': + layers_to_export = layer_name + else: + layers_to_export = f"{layer_name},Edge.Cuts" + + cmd = [ + kicad_cli, 'pcb', 'export', 'pdf', + board_file, + '-l', layers_to_export, + '--include-border-title', + '-o', pdf_path + ] + + result = subprocess.run(cmd, capture_output=True, text=True) + if result.returncode == 0: + pdf_files.append(pdf_path) + else: + print(f"Warning: Failed to generate {layer_name}: {result.stderr}") + + # Merge all PDFs into one + if pdf_files: + emit('pdf_status', {'status': 'Merging board layer PDFs...'}) + merged_pdf_path = os.path.join(board_dir, f'{project_name}.pdf') + merger = PdfMerger() + + for pdf in pdf_files: + merger.append(pdf) + + merger.write(merged_pdf_path) + merger.close() + + # Delete temp PDF directory + shutil.rmtree(temp_pdf_dir) + + # Create ZIP file + emit('pdf_status', {'status': 'Creating ZIP archive...'}) + zip_filename = f'{project_name}_PDFs.zip' + zip_path = os.path.join(project_dir if project_dir else temp_dir, zip_filename) + + with zipfile.ZipFile(zip_path, 'w', zipfile.ZIP_DEFLATED) as zipf: + for root, dirs, files in os.walk(temp_dir): + for file in files: + file_path = os.path.join(root, file) + arcname = os.path.relpath(file_path, temp_dir) + zipf.write(file_path, arcname) + + # Clean up temp directory + shutil.rmtree(temp_dir) + + emit('pdf_complete', {'path': zip_path, 'filename': zip_filename}) + + except Exception as e: + emit('pdf_error', {'error': str(e)}) + +@socketio.on('generate_gerbers') +def handle_generate_gerbers(): + try: + kicad_cli = app_args.get('Kicad Cli', '') + board_file = app_args.get('Board File', '') + project_dir = app_args.get('Project Dir', '') + project_name = app_args.get('Project Name', 'project') + + if not kicad_cli or not board_file: + emit('gerber_error', {'error': 'Missing kicad-cli or board-file arguments'}) + return + + # Create temporary directory for gerbers + temp_dir = tempfile.mkdtemp() + gerber_dir = os.path.join(temp_dir, 'gerbers') + os.makedirs(gerber_dir, exist_ok=True) + + # Generate gerbers + emit('gerber_status', {'status': 'Generating gerber files...'}) + cmd = [kicad_cli, 'pcb', 'export', 'gerbers', board_file, '-o', gerber_dir] + result = subprocess.run(cmd, capture_output=True, text=True) + + if result.returncode != 0: + shutil.rmtree(temp_dir) + emit('gerber_error', {'error': f'Gerber generation failed: {result.stderr}'}) + return + + # Generate drill files + emit('gerber_status', {'status': 'Generating drill files...'}) + cmd = [kicad_cli, 'pcb', 'export', 'drill', board_file, '-o', gerber_dir] + result = subprocess.run(cmd, capture_output=True, text=True) + + if result.returncode != 0: + print(f"Warning: Drill file generation failed: {result.stderr}") + + # Generate ODB++ files + emit('gerber_status', {'status': 'Generating ODB++ files...'}) + odb_dir = os.path.join(temp_dir, 'odb') + os.makedirs(odb_dir, exist_ok=True) + odb_file = os.path.join(odb_dir, f'{project_name}.zip') + cmd = [kicad_cli, 'pcb', 'export', 'odb', board_file, '-o', odb_file] + result = subprocess.run(cmd, capture_output=True, text=True) + + if result.returncode != 0: + print(f"Warning: ODB++ generation failed: {result.stderr}") + + # Create ZIP file + emit('gerber_status', {'status': 'Creating ZIP archive...'}) + zip_filename = f'{project_name}_fab.zip' + zip_path = os.path.join(project_dir if project_dir else temp_dir, zip_filename) + + with zipfile.ZipFile(zip_path, 'w', zipfile.ZIP_DEFLATED) as zipf: + # Add gerbers folder + for root, dirs, files in os.walk(gerber_dir): + for file in files: + file_path = os.path.join(root, file) + arcname = os.path.join('gerbers', os.path.basename(file_path)) + zipf.write(file_path, arcname) + + # Add odb folder + for root, dirs, files in os.walk(odb_dir): + for file in files: + file_path = os.path.join(root, file) + arcname = os.path.join('odb', os.path.relpath(file_path, odb_dir)) + zipf.write(file_path, arcname) + + # Clean up temp directory + shutil.rmtree(temp_dir) + + emit('gerber_complete', {'path': zip_path, 'filename': zip_filename}) + + except Exception as e: + emit('gerber_error', {'error': str(e)}) + +@socketio.on('sync_libraries') +def handle_sync_libraries(): + try: + emit('sync_status', {'status': 'Starting library synchronization...'}) + + # Check if UM_KICAD is set + um_kicad = os.environ.get('UM_KICAD') + if not um_kicad: + emit('sync_error', {'error': 'UM_KICAD environment variable is not set in the Flask app environment'}) + return + + emit('sync_status', {'status': f'UM_KICAD is set to: {um_kicad}'}) + + # Run the add_libraries.py script + script_path = os.path.join(os.path.dirname(__file__), 'add_libraries.py') + + if not os.path.exists(script_path): + emit('sync_error', {'error': 'add_libraries.py script not found'}) + return + + result = subprocess.run( + [sys.executable, script_path], + capture_output=True, + text=True, + env=os.environ.copy() + ) + + output = result.stdout + result.stderr + + if result.returncode == 0: + emit('sync_complete', {'output': output}) + else: + emit('sync_error', {'error': f'Sync failed:\n{output}'}) + + except Exception as e: + emit('sync_error', {'error': str(e)}) + +@socketio.on('sync_database') +def handle_sync_database(): + try: + emit('db_sync_status', {'status': 'Starting database synchronization...'}) + + # Get the parts spreadsheet path from config + parts_spreadsheet = app_config.get('parts_spreadsheet_path', '') + if not parts_spreadsheet: + emit('db_sync_error', {'error': 'Parts spreadsheet path not configured. Please set it in Settings.'}) + return + + if not os.path.exists(parts_spreadsheet): + emit('db_sync_error', {'error': f'Parts spreadsheet not found at: {parts_spreadsheet}'}) + return + + emit('db_sync_status', {'status': f'Using parts spreadsheet: {parts_spreadsheet}'}) + + # Run the gen_resistors_db.py script + script_path = os.path.join(os.path.dirname(__file__), 'gen_resistors_db.py') + + if not os.path.exists(script_path): + emit('db_sync_error', {'error': 'gen_resistors_db.py script not found'}) + return + + result = subprocess.run( + [sys.executable, script_path, parts_spreadsheet], + capture_output=True, + text=True, + env=os.environ.copy() + ) + + output = result.stdout + result.stderr + + if result.returncode == 0: + emit('db_sync_complete', {'output': output}) + else: + emit('db_sync_error', {'error': f'Database sync failed:\n{output}'}) + + except Exception as e: + emit('db_sync_error', {'error': str(e)}) + +@socketio.on('init_user') +def handle_init_user(): + try: + emit('init_status', {'status': 'Starting user environment initialization...'}) + + # Check if UM_KICAD is set + um_kicad = os.environ.get('UM_KICAD') + if not um_kicad: + emit('init_error', {'error': 'UM_KICAD environment variable is not set'}) + return + + emit('init_status', {'status': f'UM_KICAD: {um_kicad}'}) + + # Run the init_user.py script + script_path = os.path.join(os.path.dirname(__file__), 'init_user.py') + + if not os.path.exists(script_path): + emit('init_error', {'error': 'init_user.py script not found'}) + return + + result = subprocess.run( + [sys.executable, script_path], + capture_output=True, + text=True, + env=os.environ.copy() + ) + + output = result.stdout + result.stderr + + if result.returncode == 0: + emit('init_complete', {'output': output}) + else: + emit('init_error', {'error': f'Initialization failed:\n{output}'}) + + except Exception as e: + emit('init_error', {'error': str(e)}) + +@app.route('/download/') +def download_file(filename): + project_dir = app_args.get('Project Dir', '') + file_path = os.path.join(project_dir, filename) + if os.path.exists(file_path): + return send_file(file_path, as_attachment=True) + return "File not found", 404 + +@app.route('/config', methods=['GET', 'POST']) +def config(): + if request.method == 'POST': + data = request.get_json() + app_config['parts_spreadsheet_path'] = data.get('parts_spreadsheet_path', '') + save_config() + return jsonify({'status': 'success', 'config': app_config}) + else: + return jsonify(app_config) + +@app.route('/variants') +def variants_page(): + return render_template('variants.html') + +# --------------------------------------------------------------------------- +# Variant Management Socket Handlers +# --------------------------------------------------------------------------- + +def get_variant_manager(): + """Get VariantManager instance for current project""" + schematic_file = app_args.get('Schematic File', '') + if not schematic_file or not os.path.exists(schematic_file): + return None + return VariantManager(schematic_file) + +def get_all_schematic_files(root_schematic): + """Get all schematic files in a hierarchical design""" + from pathlib import Path + + root_path = Path(root_schematic) + if not root_path.exists(): + return [root_schematic] + + schematic_files = [str(root_path)] + schematic_dir = root_path.parent + + try: + with open(root_path, 'r', encoding='utf-8') as f: + content = f.read() + + for line in content.split('\n'): + if '(property "Sheetfile"' in line: + parts = line.split('"') + if len(parts) >= 4: + sheet_file = parts[3] + sheet_path = schematic_dir / sheet_file + if sheet_path.exists(): + sub_sheets = get_all_schematic_files(str(sheet_path)) + for sub in sub_sheets: + if sub not in schematic_files: + schematic_files.append(sub) + except: + pass + + return schematic_files + + +def get_all_parts_from_schematic(): + """Get all component references, values, and UUIDs from all schematics (including hierarchical sheets)""" + schematic_file = app_args.get('Schematic File', '') + if not schematic_file or not os.path.exists(schematic_file): + return [] + + # Get all schematic files + all_schematics = get_all_schematic_files(schematic_file) + all_parts = {} # uuid -> {reference, value} + + for sch_file in all_schematics: + try: + with open(sch_file, 'r', encoding='utf-8') as f: + content = f.read() + + lines = content.split('\n') + in_symbol = False + current_uuid = None + current_ref = None + current_value = None + current_lib_id = None + + for line in lines: + stripped = line.strip() + + # Detect start of symbol + if stripped.startswith('(symbol'): + in_symbol = True + current_uuid = None + current_ref = None + current_value = None + current_lib_id = None + + # Detect end of symbol + elif in_symbol and stripped == ')': + # Save the part if we have all the info, excluding power symbols + is_power = current_lib_id and 'power:' in current_lib_id + is_power = is_power or (current_ref and current_ref.startswith('#')) + + if current_uuid and current_ref and not is_power and len(current_ref) > 1: + all_parts[current_uuid] = { + 'reference': current_ref, + 'value': current_value or '' + } + in_symbol = False + + # Extract lib_id to check for power symbols + elif in_symbol and '(lib_id' in stripped: + lib_parts = line.split('"') + if len(lib_parts) >= 2: + current_lib_id = lib_parts[1] + + # Extract UUID + elif in_symbol and '(uuid' in stripped: + uuid_parts = line.split('"') + if len(uuid_parts) >= 2: + current_uuid = uuid_parts[1] + + # Extract reference - format: (property "Reference" "U1" ... + elif in_symbol and '(property "Reference"' in line: + try: + start = line.find('"Reference"') + len('"Reference"') + remainder = line[start:] + quote_start = remainder.find('"') + if quote_start != -1: + quote_end = remainder.find('"', quote_start + 1) + if quote_end != -1: + current_ref = remainder[quote_start + 1:quote_end] + except: + pass + + # Extract value - format: (property "Value" "LM358" ... + elif in_symbol and '(property "Value"' in line: + try: + start = line.find('"Value"') + len('"Value"') + remainder = line[start:] + quote_start = remainder.find('"') + if quote_start != -1: + quote_end = remainder.find('"', quote_start + 1) + if quote_end != -1: + current_value = remainder[quote_start + 1:quote_end] + except: + pass + + except Exception as e: + print(f"Error reading schematic {sch_file}: {e}") + + return [{'uuid': uuid, 'reference': data['reference'], 'value': data['value']} + for uuid, data in sorted(all_parts.items(), key=lambda x: x[1]['reference'])] + +@socketio.on('get_variants') +def handle_get_variants(): + try: + manager = get_variant_manager() + if not manager: + emit('variant_error', {'error': 'No project loaded'}) + return + + all_parts = get_all_parts_from_schematic() + + emit('variants_data', { + 'project_name': manager.project_name, + 'variants': manager.get_variants(), + 'active_variant': manager.get_active_variant(), + 'all_parts': all_parts # Now includes uuid, reference, and value + }) + except Exception as e: + emit('variant_error', {'error': str(e)}) + +@socketio.on('create_variant') +def handle_create_variant(data): + try: + manager = get_variant_manager() + if not manager: + emit('variant_error', {'error': 'No project loaded'}) + return + + name = data.get('name', '') + description = data.get('description', '') + based_on = data.get('based_on', None) + + if not name: + emit('variant_error', {'error': 'Variant name required'}) + return + + success = manager.create_variant(name, description, based_on) + if success: + emit('variant_updated', {'message': f'Variant "{name}" created'}) + else: + emit('variant_error', {'error': f'Variant "{name}" already exists'}) + except Exception as e: + emit('variant_error', {'error': str(e)}) + +@socketio.on('delete_variant') +def handle_delete_variant(data): + try: + manager = get_variant_manager() + if not manager: + emit('variant_error', {'error': 'No project loaded'}) + return + + name = data.get('name', '') + success = manager.delete_variant(name) + + if success: + emit('variant_updated', {'message': f'Variant "{name}" deleted'}) + else: + emit('variant_error', {'error': f'Cannot delete variant "{name}"'}) + except Exception as e: + emit('variant_error', {'error': str(e)}) + +@socketio.on('activate_variant') +def handle_activate_variant(data): + try: + manager = get_variant_manager() + if not manager: + emit('variant_error', {'error': 'No project loaded'}) + return + + name = data.get('name', '') + schematic_file = app_args.get('Schematic File', '') + kicad_cli = app_args.get('Kicad Cli', 'kicad-cli') + + # First, sync the current variant from schematic to capture any manual changes + current_variant = manager.get_active_variant() + + print(f"Syncing current variant '{current_variant}' before switching...") + # Import and call sync function directly instead of subprocess + from sync_variant import sync_variant_from_schematic + try: + sync_success = sync_variant_from_schematic(schematic_file, current_variant) + if sync_success: + print(f"Successfully synced variant '{current_variant}'") + # Reload the manager to get the updated data + manager = get_variant_manager() + else: + print(f"Warning: Sync of variant '{current_variant}' failed") + except Exception as e: + print(f"Error during sync: {e}") + import traceback + traceback.print_exc() + + # Now activate the new variant + success = manager.set_active_variant(name) + + if success: + # Apply new variant to schematic + apply_script_path = os.path.join(os.path.dirname(__file__), 'apply_variant.py') + + result = subprocess.run( + [sys.executable, apply_script_path, schematic_file, name, kicad_cli], + capture_output=True, + text=True + ) + + if result.returncode == 0: + emit('variant_updated', {'message': f'Synced "{current_variant}", then activated and applied "{name}"'}) + else: + error_msg = result.stderr if result.stderr else result.stdout + emit('variant_error', {'error': f'Failed to apply variant: {error_msg}'}) + else: + emit('variant_error', {'error': f'Variant "{name}" not found'}) + except Exception as e: + emit('variant_error', {'error': str(e)}) + +@socketio.on('get_variant_parts') +def handle_get_variant_parts(data): + try: + manager = get_variant_manager() + if not manager: + emit('variant_error', {'error': 'No project loaded'}) + return + + variant_name = data.get('variant', '') + all_parts = get_all_parts_from_schematic() + dnp_uuids = manager.get_dnp_parts(variant_name) + + parts_data = [] + for part in all_parts: + parts_data.append({ + 'uuid': part['uuid'], + 'reference': part['reference'], + 'value': part['value'], + 'is_dnp': part['uuid'] in dnp_uuids + }) + + emit('variant_parts_data', {'parts': parts_data}) + except Exception as e: + emit('variant_error', {'error': str(e)}) + +@socketio.on('set_part_dnp') +def handle_set_part_dnp(data): + try: + manager = get_variant_manager() + if not manager: + emit('variant_error', {'error': 'No project loaded'}) + return + + variant = data.get('variant', '') + uuid = data.get('uuid', '') + is_dnp = data.get('is_dnp', False) + + success = manager.set_part_dnp(variant, uuid, is_dnp) + if success: + # Re-send updated parts list + handle_get_variant_parts({'variant': variant}) + else: + emit('variant_error', {'error': 'Failed to update part'}) + except Exception as e: + emit('variant_error', {'error': str(e)}) + +@socketio.on('sync_from_schematic') +def handle_sync_from_schematic(): + try: + manager = get_variant_manager() + if not manager: + emit('variant_error', {'error': 'No project loaded'}) + return + + # Read DNP state from schematic and update active variant + schematic_file = app_args.get('Schematic File', '') + script_path = os.path.join(os.path.dirname(__file__), 'sync_variant.py') + + result = subprocess.run( + [sys.executable, script_path, schematic_file], + capture_output=True, + text=True + ) + + if result.returncode == 0: + emit('sync_complete', {'message': f'Synced from schematic:\n{result.stdout}'}) + else: + emit('variant_error', {'error': f'Failed to sync: {result.stderr}'}) + except Exception as e: + emit('variant_error', {'error': str(e)}) + +def shutdown_server(): + print("Server stopped") + os._exit(0) + +def parse_args(args): + """Parse command line arguments into a dictionary""" + parsed = {'executable': args[0] if args else ''} + + i = 1 + while i < len(args): + if args[i].startswith('--'): + key = args[i][2:].replace('-', ' ').title() + if i + 1 < len(args) and not args[i + 1].startswith('--'): + parsed[key] = args[i + 1] + i += 2 + else: + parsed[key] = 'true' + i += 1 + else: + i += 1 + + return parsed + +if __name__ == '__main__': + # Load configuration + load_config() + + # Parse arguments + app_args = parse_args(sys.argv) + + # Open browser after short delay + def open_browser(): + time.sleep(1.5) + webbrowser.open('http://127.0.0.1:5000') + + threading.Thread(target=open_browser, daemon=True).start() + + # Run the app + print("Starting Flask app...") + socketio.run(app, debug=False, host='127.0.0.1', port=5000) diff --git a/apply_variant.py b/apply_variant.py new file mode 100644 index 0000000..698dbf7 --- /dev/null +++ b/apply_variant.py @@ -0,0 +1,269 @@ +#!/usr/bin/env python3 +""" +apply_variant.py +================ +Apply a variant to a KiCad schematic by setting DNP (Do Not Place) flags on components. + +Uses KiCad CLI to modify the schematic. +""" + +import sys +import json +import subprocess +from pathlib import Path +from variant_manager import VariantManager + + +def get_all_schematic_files(root_schematic: str) -> list: + """ + Get all schematic files in a hierarchical design. + + Args: + root_schematic: Path to root .kicad_sch file + + Returns: + List of all schematic file paths (including root) + """ + root_path = Path(root_schematic) + if not root_path.exists(): + return [root_schematic] + + schematic_files = [str(root_path)] + schematic_dir = root_path.parent + + # Read root schematic to find sheet files + try: + with open(root_path, 'r', encoding='utf-8') as f: + content = f.read() + + # Find all sheet file references + for line in content.split('\n'): + if '(property "Sheetfile"' in line: + parts = line.split('"') + if len(parts) >= 4: + sheet_file = parts[3] + sheet_path = schematic_dir / sheet_file + if sheet_path.exists(): + # Recursively get sheets from this sheet + sub_sheets = get_all_schematic_files(str(sheet_path)) + for sub in sub_sheets: + if sub not in schematic_files: + schematic_files.append(sub) + except Exception as e: + print(f"Warning: Error reading sheet files: {e}") + + return schematic_files + + +def apply_variant_to_schematic(schematic_file: str, variant_name: str, kicad_cli: str = "kicad-cli") -> bool: + """ + Apply a variant to a schematic by setting DNP flags. + Handles hierarchical schematics by processing all sheets. + + Args: + schematic_file: Path to root .kicad_sch file + variant_name: Name of variant to apply + kicad_cli: Path to kicad-cli executable + + Returns: + True if successful, False otherwise + """ + manager = VariantManager(schematic_file) + + if variant_name not in manager.get_variants(): + print(f"Error: Variant '{variant_name}' not found") + return False + + dnp_parts = manager.get_dnp_parts(variant_name) + + print(f"Applying variant '{variant_name}' to {Path(schematic_file).name}") + print(f"DNP parts ({len(dnp_parts)}): {dnp_parts}") + + # Get all schematic files (root + hierarchical sheets) + all_schematics = get_all_schematic_files(schematic_file) + print(f"Processing {len(all_schematics)} schematic file(s)") + + overall_success = True + + # Process each schematic file + for idx, sch_file in enumerate(all_schematics): + is_root = (idx == 0) # First file is the root schematic + if not process_single_schematic(sch_file, dnp_parts, variant_name, is_root): + overall_success = False + + if overall_success: + print(f"\nVariant '{variant_name}' applied successfully") + print(f"Please reload the schematic in KiCad to see changes") + + return overall_success + + +def process_single_schematic(schematic_file: str, dnp_uuids: list, variant_name: str = None, is_root: bool = False) -> bool: + """ + Process a single schematic file to apply DNP flags. + + Args: + schematic_file: Path to .kicad_sch file + dnp_uuids: List of UUIDs that should be DNP + variant_name: Name of variant being applied (for title block) + is_root: True if this is the root schematic (not a sub-sheet) + + Returns: + True if successful, False otherwise + """ + sch_path = Path(schematic_file) + if not sch_path.exists(): + print(f"Error: Schematic file not found: {schematic_file}") + return False + + print(f"\n Processing: {sch_path.name}") + + try: + with open(sch_path, 'r', encoding='utf-8') as f: + content = f.read() + + # Parse schematic and set DNP flags + # KiCad 9 schematic format uses S-expressions + # Component structure: + # (symbol + # (lib_id ...) + # (at ...) + # (uuid "...") <- UUID appears here + # (dnp no) <- DNP flag appears here + # (property "Reference" "U1" ...) + # ... + # ) + + lines = content.split('\n') + modified = False + + # Update title block comment if this is the root schematic + if is_root and variant_name: + in_title_block = False + for i, line in enumerate(lines): + stripped = line.strip() + + if stripped.startswith('(title_block'): + in_title_block = True + elif in_title_block and stripped == ')': + in_title_block = False + elif in_title_block and '(comment 1' in stripped: + # Update comment 1 with variant name + indent = line[:len(line) - len(line.lstrip())] + new_line = indent + f'(comment 1 "{variant_name}")' + if lines[i] != new_line: + lines[i] = new_line + modified = True + print(f" Updated title block: Variant = {variant_name}") + break + + for i, line in enumerate(lines): + stripped = line.strip() + + # Look for DNP lines + if '(dnp' in stripped and (stripped.startswith('(dnp') or '\t(dnp' in line or ' (dnp' in line): + # Find the UUID for this symbol by looking forward (UUID comes after DNP) + current_uuid = None + current_ref = None + is_power_symbol = False + + # Look backward to check for power symbols + for j in range(i - 1, max(0, i - 10), -1): + if '(lib_id' in lines[j] and 'power:' in lines[j]: + is_power_symbol = True + break + if lines[j].strip().startswith('(symbol'): + break + + # Skip power symbols + if is_power_symbol: + continue + + # Look forward for UUID (it comes right after DNP in the structure) + for j in range(i + 1, min(len(lines), i + 10)): + if '(uuid' in lines[j]: + # Extract UUID from line like: (uuid "681abb84-6eb2-4c95-9a2f-a9fc19a34beb") + # Make sure it's at symbol level (minimal indentation) + if '\t(uuid' in lines[j] or ' (uuid' in lines[j]: + uuid_parts = lines[j].split('"') + if len(uuid_parts) >= 2: + current_uuid = uuid_parts[1] + break + # Stop if we hit properties or other structures + if '(property "Reference"' in lines[j]: + break + + # Look forward for reference (for logging purposes) + for j in range(i + 1, min(len(lines), i + 20)): + if '(property "Reference"' in lines[j]: + ref_parts = lines[j].split('"') + if len(ref_parts) >= 4: + current_ref = ref_parts[3] + # Also skip if reference starts with # + if current_ref.startswith('#'): + is_power_symbol = True + break + if lines[j].strip().startswith('(symbol') or (lines[j].strip() == ')' and len(lines[j]) - len(lines[j].lstrip()) <= len(line) - len(line.lstrip())): + break + + if current_uuid and not is_power_symbol: + # Get indentation + indent = line[:len(line) - len(line.lstrip())] + + # Check if this part should be DNP + should_be_dnp = current_uuid in dnp_uuids + + # Determine what the DNP line should say + if should_be_dnp: + target_dnp = '(dnp yes)' + else: + target_dnp = '(dnp no)' + + # Update DNP flag if it's different from target + if stripped != target_dnp.strip(): + lines[i] = indent + target_dnp + modified = True + if should_be_dnp: + print(f" Set DNP: {current_ref if current_ref else current_uuid}") + else: + print(f" Cleared DNP: {current_ref if current_ref else current_uuid}") + + if modified: + # Backup original file + backup_path = sch_path.with_suffix('.kicad_sch.bak') + + # Remove old backup if it exists + if backup_path.exists(): + backup_path.unlink() + + # Create new backup + import shutil + shutil.copy2(sch_path, backup_path) + print(f" Backup created: {backup_path.name}") + + # Write modified schematic + with open(sch_path, 'w', encoding='utf-8') as f: + f.write('\n'.join(lines)) + + print(f" Updated successfully") + else: + print(f" No changes needed") + + return True + + except Exception as e: + print(f" Error: {e}") + return False + + +if __name__ == "__main__": + if len(sys.argv) < 3: + print("Usage: python apply_variant.py [kicad-cli]") + sys.exit(1) + + schematic = sys.argv[1] + variant = sys.argv[2] + kicad_cli = sys.argv[3] if len(sys.argv) > 3 else "kicad-cli" + + success = apply_variant_to_schematic(schematic, variant, kicad_cli) + sys.exit(0 if success else 1) diff --git a/config.json b/config.json new file mode 100644 index 0000000..b646e01 --- /dev/null +++ b/config.json @@ -0,0 +1,3 @@ +{ + "parts_spreadsheet_path": "D:\\svn\\pcb\\PN\\parts_list_pcb.xlsx" +} \ No newline at end of file diff --git a/export_bom.py b/export_bom.py new file mode 100644 index 0000000..abdf6a2 --- /dev/null +++ b/export_bom.py @@ -0,0 +1,89 @@ +#!/usr/bin/env python3 +""" +BOM CSV Exporter +================ +Reads extract_symbols.json and writes bom.csv with columns: + Reference, MPN, MFG + +Power symbols (#PWR, #FLG) are excluded. + +Usage: + python3 export_bom.py [project_dir] +""" + +import csv +import json +import sys +from pathlib import Path + + +def main(project_dir: Path): + json_path = project_dir / 'extract_symbols.json' + if not json_path.exists(): + print(f"Error: {json_path} not found. Run extract_symbols.py first.", file=sys.stderr) + sys.exit(1) + + data = json.loads(json_path.read_text(encoding='utf-8')) + symbols = data['symbols'] + + # Filter out power/flag symbols, DNP parts, and parts excluded from BOM + parts = [ + s for s in symbols + if not (s.get('reference') or '').startswith(('#', '~')) + and not (s.get('lib_id') or '').startswith('power:') + and s.get('in_bom') is not False + and not s.get('dnp') + ] + + # Collapse multi-unit / duplicate records to one row per reference. + # If multiple records exist for the same ref, pick the one with the + # most complete MPN/MFG data (longest non-placeholder string). + def data_score(s): + props = s.get('properties', {}) + mpn = props.get('MPN', '') + mfg = props.get('MFG') or props.get('MANUFACTURER', '') + placeholder = mpn.strip().lower() in ('', 'x', 'tbd', 'n/a', 'na') + return (0 if placeholder else len(mpn) + len(mfg)) + + from collections import defaultdict + by_ref: dict[str, list] = defaultdict(list) + for s in parts: + by_ref[s.get('reference', '')].append(s) + + best: list[dict] = [] + for ref, recs in by_ref.items(): + best.append(max(recs, key=data_score)) + + # Sort by reference + def ref_sort_key(r): + ref = r.get('reference') or '' + letters = ''.join(c for c in ref if c.isalpha()) + digits = ''.join(c for c in ref if c.isdigit()) + return (letters, int(digits) if digits else 0) + + best.sort(key=ref_sort_key) + + out_path = project_dir / 'bom.csv' + with out_path.open('w', newline='', encoding='utf-8') as f: + writer = csv.writer(f) + writer.writerow(['Reference', 'MPN', 'MFG']) + for s in best: + props = s.get('properties', {}) + writer.writerow([ + s.get('reference', ''), + props.get('MPN', ''), + props.get('MFG') or props.get('MANUFACTURER', ''), + ]) + + excluded_bom = sum(1 for s in symbols if s.get('in_bom') is False + and not (s.get('reference') or '').startswith(('#', '~'))) + excluded_dnp = sum(1 for s in symbols if s.get('dnp') + and not (s.get('reference') or '').startswith(('#', '~')) + and s.get('in_bom') is not False) + print(f"Excluded: {excluded_bom} 'exclude from BOM', {excluded_dnp} DNP") + print(f"Wrote {len(best)} unique references to {out_path} (collapsed from {len(parts)} records)") + + +if __name__ == '__main__': + project_dir = Path(sys.argv[1]).resolve() if len(sys.argv) > 1 else Path(__file__).parent.resolve() + main(project_dir) diff --git a/extract_symbols.py b/extract_symbols.py new file mode 100644 index 0000000..bd19664 --- /dev/null +++ b/extract_symbols.py @@ -0,0 +1,517 @@ +#!/usr/bin/env python3 +""" +KiCad 9 Symbol Metadata Extractor +================================== +Walks every .kicad_sch file in the project directory and extracts +metadata for every placed symbol (component instance), correctly +expanding hierarchical sheet instances so that each unique reference +in the final design becomes its own record. + +KiCad stores multi-instance sheets by embedding an `(instances ...)` +block in each symbol. That block contains one `(path ...)` entry per +sheet instantiation, each with the authoritative reference for that +copy. This script reads those paths so a sheet used N times produces +N distinct records per symbol. + +Output: extract_symbols.json (same directory as this script) + +Usage: + python3 extract_symbols.py [project_dir] + +If project_dir is omitted, the directory containing this script is used. +""" + +import json +import sys +from pathlib import Path + + +# --------------------------------------------------------------------------- +# S-expression parser +# --------------------------------------------------------------------------- + +def _tokenize(text: str) -> list: + """ + Convert raw KiCad S-expression text into a flat list of tokens. + Token forms: + ('OPEN',) – opening paren + ('CLOSE',) – closing paren + ('ATOM', value) – unquoted word / number / bool + ('STR', value) – double-quoted string (escapes resolved) + """ + tokens = [] + i, n = 0, len(text) + while i < n: + c = text[i] + if c in ' \t\r\n': + i += 1 + elif c == '(': + tokens.append(('OPEN',)) + i += 1 + elif c == ')': + tokens.append(('CLOSE',)) + i += 1 + elif c == '"': + j = i + 1 + buf = [] + while j < n: + if text[j] == '\\' and j + 1 < n: + buf.append(text[j + 1]) + j += 2 + elif text[j] == '"': + j += 1 + break + else: + buf.append(text[j]) + j += 1 + tokens.append(('STR', ''.join(buf))) + i = j + else: + j = i + while j < n and text[j] not in ' \t\r\n()': + j += 1 + tokens.append(('ATOM', text[i:j])) + i = j + return tokens + + +def _parse(tokens: list, pos: int) -> tuple: + """ + Recursively parse one S-expression value starting at *pos*. + Returns (parsed_value, next_pos). + A list/node becomes a Python list; atoms and strings become strings. + """ + tok = tokens[pos] + kind = tok[0] + if kind == 'OPEN': + pos += 1 + items = [] + while tokens[pos][0] != 'CLOSE': + item, pos = _parse(tokens, pos) + items.append(item) + return items, pos + 1 # consume CLOSE + elif kind in ('ATOM', 'STR'): + return tok[1], pos + 1 + else: + raise ValueError(f"Unexpected token at pos {pos}: {tok}") + + +def parse_sexp(text: str): + """Parse a complete KiCad S-expression file. Returns the root list.""" + tokens = _tokenize(text) + root, _ = _parse(tokens, 0) + return root + + +# --------------------------------------------------------------------------- +# Helpers to navigate parsed S-expressions +# --------------------------------------------------------------------------- + +def tag(node) -> str: + if isinstance(node, list) and node and isinstance(node[0], str): + return node[0] + return '' + + +def children(node: list) -> list: + return node[1:] if isinstance(node, list) else [] + + +def first_child_with_tag(node: list, name: str): + for child in children(node): + if isinstance(child, list) and tag(child) == name: + return child + return None + + +def all_children_with_tag(node: list, name: str) -> list: + return [c for c in children(node) if isinstance(c, list) and tag(c) == name] + + +def scalar(node, index: int = 1, default=None): + if isinstance(node, list) and len(node) > index: + return node[index] + return default + + +# --------------------------------------------------------------------------- +# Instance path extraction +# --------------------------------------------------------------------------- + +def extract_instances(sym_node: list) -> list[dict]: + """ + Parse the (instances ...) block of a symbol and return one dict per + hierarchical path. Each dict has: + path – the full UUID path string + reference – the reference designator for that instance + unit – the unit number for that instance + project – the project name + + If there is no instances block (unusual), returns an empty list. + """ + instances_node = first_child_with_tag(sym_node, 'instances') + if instances_node is None: + return [] + + results = [] + for project_node in all_children_with_tag(instances_node, 'project'): + project_name = scalar(project_node, 1, '') + for path_node in all_children_with_tag(project_node, 'path'): + path_str = scalar(path_node, 1, '') + ref_node = first_child_with_tag(path_node, 'reference') + unit_node = first_child_with_tag(path_node, 'unit') + results.append({ + 'path': path_str, + 'reference': scalar(ref_node, 1) if ref_node else None, + 'unit': scalar(unit_node, 1) if unit_node else None, + 'project': project_name, + }) + return results + + +# --------------------------------------------------------------------------- +# Symbol extraction +# --------------------------------------------------------------------------- + +def extract_symbol_records(sym_node: list, sheet_file: str) -> list[dict]: + """ + Extract metadata from a placed-symbol node and return one record per + hierarchical instance (i.e. one record per path in the instances block). + + For a sheet used only once, this produces a single record. + For a sheet instantiated N times, this produces N records — each with + its own unique reference designator from the instances block. + """ + # --- Shared fields (same for all instances of this symbol placement) --- + shared = { + 'sheet_file': sheet_file, + 'lib_id': None, + 'at': None, + 'exclude_from_sim': None, + 'in_bom': None, + 'on_board': None, + 'dnp': None, + 'uuid': None, + 'properties': {}, + } + + for child in children(sym_node): + if not isinstance(child, list): + continue + t = tag(child) + if t == 'lib_id': + shared['lib_id'] = scalar(child, 1) + elif t == 'at': + shared['at'] = { + 'x': scalar(child, 1), + 'y': scalar(child, 2), + 'angle': scalar(child, 3, 0), + } + elif t == 'exclude_from_sim': + shared['exclude_from_sim'] = scalar(child, 1) == 'yes' + elif t == 'in_bom': + shared['in_bom'] = scalar(child, 1) == 'yes' + elif t == 'on_board': + shared['on_board'] = scalar(child, 1) == 'yes' + elif t == 'dnp': + shared['dnp'] = scalar(child, 1) == 'yes' + elif t == 'uuid': + shared['uuid'] = scalar(child, 1) + elif t == 'property': + prop_name = scalar(child, 1) + prop_val = scalar(child, 2) + if prop_name is not None: + shared['properties'][prop_name] = prop_val + + # Promote standard properties for convenient access + props = shared['properties'] + shared['value'] = props.get('Value') + shared['footprint'] = props.get('Footprint') + shared['datasheet'] = props.get('Datasheet') + shared['description'] = props.get('Description') + + # --- Per-instance fields (one record per path in instances block) --- + instances = extract_instances(sym_node) + + if not instances: + # Fallback: no instances block — use top-level Reference property + record = dict(shared) + record['reference'] = props.get('Reference') + record['instance_path'] = None + record['instance_unit'] = shared.get('unit') + record['instance_project']= None + return [record] + + records = [] + for inst in instances: + record = dict(shared) + record['properties'] = dict(shared['properties']) # copy so each is independent + record['reference'] = inst['reference'] + record['instance_path'] = inst['path'] + record['instance_unit'] = inst['unit'] + record['instance_project'] = inst['project'] + records.append(record) + + return records + + +# --------------------------------------------------------------------------- +# Hierarchy walker +# --------------------------------------------------------------------------- + +def find_reachable_sheets(root_sch: Path) -> list[Path]: + """ + Walk the sheet hierarchy starting from *root_sch* and return an ordered + list of every .kicad_sch file that is actually reachable (i.e. referenced + directly or transitively as a sub-sheet). Handles repeated sub-sheet + references (same file used N times) by visiting the file only once. + """ + reachable: list[Path] = [] + visited_names: set[str] = set() + queue: list[Path] = [root_sch] + + while queue: + sch = queue.pop(0) + if sch.name in visited_names: + continue + visited_names.add(sch.name) + reachable.append(sch) + + try: + text = sch.read_text(encoding='utf-8') + except OSError: + continue + + root_node = parse_sexp(text) + for child in children(root_node): + if tag(child) != 'sheet': + continue + for prop in all_children_with_tag(child, 'property'): + if scalar(prop, 1) == 'Sheetfile': + child_filename = scalar(prop, 2) + if child_filename: + child_path = sch.parent / child_filename + if child_path.exists() and child_path.name not in visited_names: + queue.append(child_path) + + return reachable + + +# --------------------------------------------------------------------------- +# Per-file parsing +# --------------------------------------------------------------------------- + +def extract_from_schematic(sch_path: Path) -> list[dict]: + """ + Parse one .kicad_sch file and return a list of symbol records. + lib_symbols definitions are skipped; only placed instances are returned. + """ + text = sch_path.read_text(encoding='utf-8') + root = parse_sexp(text) + + results = [] + for child in children(root): + if not isinstance(child, list): + continue + t = tag(child) + if t == 'lib_symbols': + continue # skip library definitions + if t == 'symbol' and first_child_with_tag(child, 'lib_id') is not None: + records = extract_symbol_records(child, sch_path.name) + results.extend(records) + + return results + + +# --------------------------------------------------------------------------- +# Main +# --------------------------------------------------------------------------- + +def get_root_uuid(project_dir: Path) -> str | None: + """ + Find the UUID of the root schematic by reading the .kicad_pro file + (which names the root sheet) or by scanning for the top-level sheet. + Returns the UUID string, or None if it cannot be determined. + """ + # The .kicad_pro file tells us the root schematic filename + pro_files = list(project_dir.glob('*.kicad_pro')) + root_sch: Path | None = None + + if pro_files: + import json as _json + try: + pro = _json.loads(pro_files[0].read_text(encoding='utf-8')) + root_name = pro.get('sheets', [{}])[0] if pro.get('sheets') else None + # Fall back: just find a .kicad_sch with the same stem as the .pro + root_sch = project_dir / (pro_files[0].stem + '.kicad_sch') + except Exception: + pass + + if root_sch is None or not root_sch.exists(): + # Guess: the .kicad_sch whose stem matches the .kicad_pro + if pro_files: + candidate = project_dir / (pro_files[0].stem + '.kicad_sch') + if candidate.exists(): + root_sch = candidate + + if root_sch is None or not root_sch.exists(): + return None + + # Extract the first (uuid ...) at the root level of the file + import re + text = root_sch.read_text(encoding='utf-8') + m = re.search(r'\(uuid\s+"([^"]+)"', text) + return m.group(1) if m else None + + +def main(project_dir: Path): + # Determine root schematic and walk the real hierarchy + root_uuid = get_root_uuid(project_dir) + + pro_files = list(project_dir.glob('*.kicad_pro')) + root_sch = project_dir / (pro_files[0].stem + '.kicad_sch') if pro_files else None + + if root_sch and root_sch.exists(): + sch_files = find_reachable_sheets(root_sch) + print(f"Root sheet: {root_sch.name}") + print(f"Found {len(sch_files)} reachable schematic file(s) in hierarchy:") + else: + # Fallback: glob everything + sch_files = sorted( + p for p in project_dir.rglob('*.kicad_sch') + if not p.name.startswith('_autosave') + and not p.suffix.endswith('.bak') + ) + print(f"Warning: could not find root schematic; scanning all {len(sch_files)} files.\n") + + if not sch_files: + print(f"No .kicad_sch files found in {project_dir}", file=sys.stderr) + sys.exit(1) + + for f in sch_files: + print(f" {f.relative_to(project_dir)}") + + all_records: list[dict] = [] + + for sch_path in sch_files: + print(f"\nParsing {sch_path.name} ...", end=' ', flush=True) + records = extract_from_schematic(sch_path) + print(f"{len(records)} instance record(s)") + all_records.extend(records) + + # All records come from reachable sheets, so no orphan filtering needed. + # Optionally still filter by root UUID to catch stale instance paths. + if root_uuid: + active_prefix = f'/{root_uuid}/' + active = [r for r in all_records + if (r.get('instance_path') or '').startswith(active_prefix)] + stale = len(all_records) - len(active) + print(f"\nTotal records : {len(all_records)}") + if stale: + print(f"Stale paths dropped: {stale}") + else: + active = all_records + print(f"\nTotal records: {len(all_records)}") + + # ---- Stage 1: dedup by (instance_path, uuid) ---- + # Collapses records that were seen from multiple sheet scans into one. + seen: set = set() + stage1: list[dict] = [] + for r in active: + key = (r.get('instance_path'), r.get('uuid')) + if key not in seen: + seen.add(key) + stage1.append(r) + + # ---- Stage 2: dedup by uuid across different sheet files ---- + # If the SAME uuid appears in two *different* .kicad_sch files, that is a + # UUID collision in the design (copy-paste without UUID regeneration). + # The same uuid appearing in the same sheet file with different instance + # paths is *correct* — it is how multi-instance sheets work, so those are + # left alone. + uuid_sheets: dict = {} # uuid -> set of sheet_files seen + uuid_collisions: dict = {} # uuid -> list of colliding records + unique: list[dict] = [] + for r in stage1: + u = r.get('uuid') + sf = r.get('sheet_file', '') + sheets_so_far = uuid_sheets.setdefault(u, set()) + if not sheets_so_far or sf in sheets_so_far: + # First time seeing this uuid, OR it's from the same sheet file + # (legitimate multi-instance expansion) — keep it. + sheets_so_far.add(sf) + unique.append(r) + else: + # Same uuid, but from a DIFFERENT sheet file → UUID collision. + uuid_collisions.setdefault(u, []).append(r) + # Don't append to unique — drop the duplicate. + + if uuid_collisions: + print(f"\nNote: {len(uuid_collisions)} UUID collision(s) detected " + f"(same symbol UUID in multiple sheet files — likely copy-paste artifacts).") + print(" Only the first occurrence is kept in the output.") + for u, recs in list(uuid_collisions.items())[:10]: + refs = [r.get('reference') for r in recs] + files = [r.get('sheet_file') for r in recs] + print(f" uuid={u[:8]}... refs={refs} sheets={files}") + + print(f"\nUnique instances after dedup: {len(unique)}") + + # Separate power symbols from real parts + real = [r for r in unique if not (r.get('lib_id') or '').startswith('power:')] + power = [r for r in unique if (r.get('lib_id') or '').startswith('power:')] + print(f" Non-power parts : {len(real)}") + print(f" Power symbols : {len(power)}") + + # Check for true reference duplicates (same ref, different uuid = multi-unit) + from collections import defaultdict, Counter + by_ref: dict[str, list] = defaultdict(list) + for r in unique: + by_ref[r.get('reference', '')].append(r) + + multi_unit = {ref: recs for ref, recs in by_ref.items() + if len(recs) > 1 and len({r['uuid'] for r in recs}) > 1} + if multi_unit: + refs = [r for r in multi_unit if not r.startswith('#')] + if refs: + print(f"\nMulti-unit components ({len(refs)} references, expected for split-unit symbols):") + for ref in sorted(refs): + units = [r['instance_unit'] for r in multi_unit[ref]] + print(f" {ref}: units {units}") + + output = { + "project_dir": str(project_dir), + "root_uuid": root_uuid, + "schematic_files": [str(f.relative_to(project_dir)) for f in sch_files], + "total_instances": len(unique), + "non_power_count": len(real), + "symbols": unique, + } + + out_path = project_dir / 'extract_symbols.json' + out_path.write_text(json.dumps(output, indent=2, ensure_ascii=False), encoding='utf-8') + print(f"\nOutput written to: {out_path}") + + # Print a summary table + print("\n--- Summary (non-power parts, sorted by reference) ---") + for r in sorted(real, key=lambda x: x.get('reference') or ''): + ref = r.get('reference', '') + value = r.get('value', '') + lib = r.get('lib_id', '') + mpn = r['properties'].get('MPN', '') + sheet = r.get('sheet_file', '') + unit = r.get('instance_unit', '') + print(f" {ref:<12} u{unit:<2} {value:<30} {lib:<40} MPN={mpn:<25} [{sheet}]") + + +if __name__ == '__main__': + if len(sys.argv) > 1: + project_dir = Path(sys.argv[1]).resolve() + else: + project_dir = Path(__file__).parent.resolve() + + if not project_dir.is_dir(): + print(f"Error: {project_dir} is not a directory", file=sys.stderr) + sys.exit(1) + + main(project_dir) diff --git a/gen_passives_db.py b/gen_passives_db.py new file mode 100644 index 0000000..6a201ac --- /dev/null +++ b/gen_passives_db.py @@ -0,0 +1,288 @@ +#!/usr/bin/env python3 +""" +gen_resistors_db.py +=================== +Reads the approved parts list spreadsheet and adds surface mount resistor +and capacitor records to the KiCad SQLite database. + +Processes all SMD resistors (0402, 0603, 0805, etc.) and capacitors from +the spreadsheet. + +Each part becomes a database record with: + ipn ← GLE P/N (or generated ID if missing) + description ← Description column + value ← Value1 (e.g. "10k", "4.7k", "100") + footprint ← Standard KiCad footprint based on size (e.g., "Resistor_SMD:R_0402_1005Metric") + fp_display ← Footprint column from spreadsheet (for display purposes) + symbol ← "UM_template:R" for resistors, "UM_template:C" for capacitors + mpn ← Mfg.1 P/N + manufacturer ← Mfg.1 + datasheet ← (empty for now) + class ← Class column + +Where multiple approved vendors share the same value+tolerance+footprint, +only the first row is used (duplicates are reported and skipped). + +Usage: + python3 gen_resistors_db.py + +The script reads the database path from ../database/parts.sqlite relative +to this script. +""" + +import re +import sys +import sqlite3 +import pandas as pd +from pathlib import Path + + +# --------------------------------------------------------------------------- +# Helpers +# --------------------------------------------------------------------------- + +def get_footprint(description: str, part_type: str) -> str: + """ + Extract footprint size from description and return standard KiCad footprint. + + Args: + description: Part description containing size (e.g., "0402", "0603") + part_type: "resistor" or "capacitor" + + Returns: + Standard KiCad footprint string + """ + # Footprint size mapping to KiCad standard footprints + resistor_footprints = { + '0201': 'Resistor_SMD:R_0201_0603Metric', + '0402': 'Resistor_SMD:R_0402_1005Metric', + '0603': 'Resistor_SMD:R_0603_1608Metric', + '0805': 'Resistor_SMD:R_0805_2012Metric', + '1206': 'Resistor_SMD:R_1206_3216Metric', + '1210': 'Resistor_SMD:R_1210_3225Metric', + '2010': 'Resistor_SMD:R_2010_5025Metric', + '2512': 'Resistor_SMD:R_2512_6332Metric', + } + + capacitor_footprints = { + '0201': 'Capacitor_SMD:C_0201_0603Metric', + '0402': 'Capacitor_SMD:C_0402_1005Metric', + '0603': 'Capacitor_SMD:C_0603_1608Metric', + '0805': 'Capacitor_SMD:C_0805_2012Metric', + '1206': 'Capacitor_SMD:C_1206_3216Metric', + '1210': 'Capacitor_SMD:C_1210_3225Metric', + '2010': 'Capacitor_SMD:C_2010_5025Metric', + '2512': 'Capacitor_SMD:C_2512_6332Metric', + } + + # Extract size from description + size_match = re.search(r'\b(0201|0402|0603|0805|1206|1210|2010|2512)\b', description) + if not size_match: + return "" + + size = size_match.group(1) + + if part_type == "resistor": + return resistor_footprints.get(size, "") + elif part_type == "capacitor": + return capacitor_footprints.get(size, "") + + return "" + + +def process_parts(parts_df: pd.DataFrame, part_type: str, symbol: str, + cursor, existing_ipns: set) -> tuple[int, int, list]: + """ + Process a dataframe of parts (resistors or capacitors) and insert/update in database. + + Args: + parts_df: DataFrame containing the parts to process + part_type: "resistor" or "capacitor" (for reporting) + symbol: KiCad symbol reference (e.g., "UM_template:R") + cursor: Database cursor + existing_ipns: Set of existing IPNs in database + + Returns: + Tuple of (added_count, updated_count, skipped_list) + """ + added = 0 + updated = 0 + skipped = [] + seen_parts: dict[str, str] = {} # value+tol+footprint → GLE P/N of first occurrence + + for _, row in parts_df.iterrows(): + gle_pn = str(row['GLE P/N']).strip() + value = str(row['Value1']).strip() + description = str(row['Description']).strip() + mfg = str(row['Mfg.1']).strip() + mpn = str(row['Mfg.1 P/N']).strip() + part_class = str(row.get('Class', '')).strip() + fp_display = str(row.get('Footprint', '')).strip() # From spreadsheet for display + + if not gle_pn: + skipped.append((value, '(no GLE P/N)')) + continue + + # Get standard KiCad footprint based on size in description + footprint = get_footprint(description, part_type) + if not footprint: + skipped.append((value, f'could not determine footprint size from: {description}')) + continue + + # Create unique key from value+tolerance+footprint to detect duplicates + # Extract tolerance from description + tol_match = re.search(r'(\d+(?:\.\d+)?%)', description) + tolerance = tol_match.group(1) if tol_match else 'X' + part_key = f"{value}_{tolerance}_{footprint}" + + # Skip duplicate value+tolerance+footprint combinations (alternate approved vendors) + if part_key in seen_parts: + skipped.append((value, f'dup value/tol/fp, first: {seen_parts[part_key]}, this: {gle_pn}')) + continue + seen_parts[part_key] = gle_pn + + # Prepare database record + ipn = gle_pn + datasheet = "" # Could be populated from spreadsheet if available + + # Insert or update record + if ipn in existing_ipns: + cursor.execute(""" + UPDATE parts + SET description = ?, value = ?, footprint = ?, symbol = ?, + mpn = ?, manufacturer = ?, datasheet = ?, class = ?, fp_display = ? + WHERE ipn = ? + """, (description, value, footprint, symbol, mpn, mfg, datasheet, part_class, fp_display, ipn)) + updated += 1 + else: + cursor.execute(""" + INSERT INTO parts (ipn, description, value, footprint, symbol, mpn, manufacturer, datasheet, class, fp_display) + VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?) + """, (ipn, description, value, footprint, symbol, mpn, mfg, datasheet, part_class, fp_display)) + added += 1 + existing_ipns.add(ipn) + + return added, updated, skipped + + +# --------------------------------------------------------------------------- +# Main +# --------------------------------------------------------------------------- + +def main(xlsx_path: Path, db_path: Path): + # ---- Load spreadsheet ---- + df = pd.read_excel(xlsx_path, sheet_name='PCB', dtype=str) + df = df.fillna('') + + # Filter to SMD resistors and capacitors + # Match common SMD footprints: 0402, 0603, 0805, 1206, etc. + smd_pattern = r'0(201|402|603|805)|1206|1210|2010|2512' + + resistor_mask = ( + df['Footprint'].str.contains(smd_pattern, na=False, regex=True) & + df['Description'].str.contains('[Rr]es', na=False, regex=True) + ) + resistors = df[resistor_mask].copy() + + capacitor_mask = ( + df['Footprint'].str.contains(smd_pattern, na=False, regex=True) & + df['Description'].str.contains('[Cc]ap', na=False, regex=True) + ) + capacitors = df[capacitor_mask].copy() + + print(f"Found {len(resistors)} SMD resistors in parts list") + print(f"Found {len(capacitors)} SMD capacitors in parts list") + + # ---- Connect to database ---- + conn = sqlite3.connect(db_path) + cursor = conn.cursor() + + # Get existing IPNs to check for duplicates + cursor.execute("SELECT ipn FROM parts") + existing_ipns = set(row[0] for row in cursor.fetchall()) + + # ---- Process resistors ---- + print("\nProcessing resistors...") + r_added, r_updated, r_skipped = process_parts( + resistors, "resistor", "UM_template:R", cursor, existing_ipns + ) + + # ---- Process capacitors ---- + print("Processing capacitors...") + c_added, c_updated, c_skipped = process_parts( + capacitors, "capacitor", "UM_template:C", cursor, existing_ipns + ) + + # Commit changes + conn.commit() + conn.close() + + # Report results + print(f"\n{'='*60}") + print("Database updated:") + print(f"{'='*60}") + print(f"\nResistors:") + print(f" Added: {r_added}") + print(f" Updated: {r_updated}") + print(f" Skipped: {len(r_skipped)} (duplicates or missing data)") + + print(f"\nCapacitors:") + print(f" Added: {c_added}") + print(f" Updated: {c_updated}") + print(f" Skipped: {len(c_skipped)} (duplicates or missing data)") + + print(f"\nTotals:") + print(f" Added: {r_added + c_added}") + print(f" Updated: {r_updated + c_updated}") + print(f" Skipped: {len(r_skipped) + len(c_skipped)}") + + # Show some skipped items if any + all_skipped = r_skipped + c_skipped + if all_skipped: + print(f"\n Sample skipped items:") + for val, reason in all_skipped[:10]: # Show first 10 + print(f" {val}: {reason}") + if len(all_skipped) > 10: + print(f" ... and {len(all_skipped) - 10} more") + + +if __name__ == '__main__': + # Get paths + script_dir = Path(__file__).parent + + # Database path + db_path = script_dir.parent / 'database' / 'parts.sqlite' + if not db_path.exists(): + print(f"Error: database not found at {db_path}", file=sys.stderr) + sys.exit(1) + + # Spreadsheet path - try command line arg, then config file + if len(sys.argv) >= 2: + xlsx_path = Path(sys.argv[1]) + else: + # Try to read from config.json + import json + config_file = script_dir / 'config.json' + if config_file.exists(): + with open(config_file, 'r') as f: + config = json.load(f) + xlsx_str = config.get('parts_spreadsheet_path', '') + if xlsx_str: + xlsx_path = Path(xlsx_str) + else: + print("Error: no parts_spreadsheet_path in config.json", file=sys.stderr) + sys.exit(1) + else: + print("Error: no spreadsheet path provided and config.json not found", file=sys.stderr) + print("Usage: python3 gen_resistors_db.py ", file=sys.stderr) + sys.exit(1) + + if not xlsx_path.exists(): + print(f"Error: spreadsheet not found at {xlsx_path}", file=sys.stderr) + sys.exit(1) + + print(f"Reading parts from: {xlsx_path}") + print(f"Database: {db_path}") + print() + + main(xlsx_path, db_path) diff --git a/init_user.py b/init_user.py new file mode 100644 index 0000000..0d7013f --- /dev/null +++ b/init_user.py @@ -0,0 +1,299 @@ +#!/usr/bin/env python3 +""" +init_user.py +============ +Initialize a new user's KiCad environment with UM customizations. + +This script: +1. Creates ODBC System DSN for SQLite database (Windows only) +2. Copies theme files from UM_KICAD/lib/themes to the user's KiCad config directory +3. Updates KiCad preferences to use the UM theme +4. Sets other UM-specific preferences + +Usage: + python3 init_user.py +""" + +import os +import sys +import json +import shutil +from pathlib import Path + + +# --------------------------------------------------------------------------- +# KiCad v9 config location +# --------------------------------------------------------------------------- +def kicad9_config_dir() -> Path: + home = Path.home() + + if sys.platform.startswith("win"): + appdata = os.environ.get("APPDATA") + if not appdata: + raise RuntimeError("APPDATA not set") + return Path(appdata) / "kicad" / "9.0" + + if sys.platform == "darwin": + return home / "Library" / "Preferences" / "kicad" / "9.0" + + xdg = os.environ.get("XDG_CONFIG_HOME") + if xdg: + return Path(xdg) / "kicad" / "9.0" + + return home / ".config" / "kicad" / "9.0" + + +def get_themes_dir() -> Path: + """Get the user's KiCad themes directory""" + return kicad9_config_dir() / "colors" + + +def get_kicad_common_json() -> Path: + """Get the path to kicad_common.json (main preferences file)""" + return kicad9_config_dir() / "kicad_common.json" + + +# --------------------------------------------------------------------------- +# ODBC DSN Creation (Windows only) +# --------------------------------------------------------------------------- +def create_odbc_dsn(um_root: Path) -> bool: + """ + Create a System DSN for SQLite ODBC connection on Windows. + + Args: + um_root: Path to UM_KICAD root directory + + Returns: + True if DSN was created or already exists, False on error + """ + if not sys.platform.startswith("win"): + print(" ODBC DSN creation is only supported on Windows") + return True # Not an error, just not applicable + + try: + import winreg + except ImportError: + print(" Warning: winreg module not available, skipping ODBC DSN creation") + return False + + # Database path + db_path = um_root / "database" / "parts.sqlite" + if not db_path.exists(): + print(f" Warning: Database not found at {db_path}") + return False + + dsn_name = "UM_2KiCad_Parts2" + driver_name = "SQLite3 ODBC Driver" + + try: + # Check if DSN already exists + try: + key = winreg.OpenKey( + winreg.HKEY_LOCAL_MACHINE, + r"SOFTWARE\ODBC\ODBC.INI\UM_KiCad_Parts", + 0, + winreg.KEY_READ + ) + winreg.CloseKey(key) + print(f" ODBC DSN '{dsn_name}' already exists") + return True + except FileNotFoundError: + pass # DSN doesn't exist, we'll create it + + # Create the DSN + # First, add to ODBC Data Sources list + key = winreg.OpenKey( + winreg.HKEY_LOCAL_MACHINE, + r"SOFTWARE\ODBC\ODBC.INI\ODBC Data Sources", + 0, + winreg.KEY_WRITE + ) + winreg.SetValueEx(key, dsn_name, 0, winreg.REG_SZ, driver_name) + winreg.CloseKey(key) + + # Create the DSN configuration key + key = winreg.CreateKey( + winreg.HKEY_LOCAL_MACHINE, + rf"SOFTWARE\ODBC\ODBC.INI\{dsn_name}" + ) + + # Set DSN configuration values + winreg.SetValueEx(key, "Driver", 0, winreg.REG_SZ, driver_name) + winreg.SetValueEx(key, "Database", 0, winreg.REG_SZ, str(db_path)) + winreg.SetValueEx(key, "Description", 0, winreg.REG_SZ, "UM KiCad Parts Database") + + winreg.CloseKey(key) + + print(f" Created ODBC System DSN '{dsn_name}'") + print(f" Database: {db_path}") + return True + + except PermissionError: + print(" ERROR: Administrator privileges required to create System DSN") + print(" Please run this script as Administrator, or create the DSN manually:") + print(f" DSN Name: {dsn_name}") + print(f" Driver: {driver_name}") + print(f" Database: {db_path}") + return False + except Exception as e: + print(f" Warning: Failed to create ODBC DSN: {e}") + print(f" You may need to create it manually:") + print(f" DSN Name: {dsn_name}") + print(f" Driver: {driver_name}") + print(f" Database: {db_path}") + return False + + +# --------------------------------------------------------------------------- +# Theme installation +# --------------------------------------------------------------------------- +def install_themes(um_root: Path) -> list[str]: + """ + Copy all theme files from UM_KICAD/lib/themes to user's KiCad config. + + Returns: + List of installed theme names + """ + source_themes_dir = um_root / "lib" / "themes" + if not source_themes_dir.exists(): + print(f"Warning: Themes directory not found at {source_themes_dir}") + return [] + + dest_themes_dir = get_themes_dir() + dest_themes_dir.mkdir(parents=True, exist_ok=True) + + installed = [] + theme_files = list(source_themes_dir.glob("*.json")) + + if not theme_files: + print("Warning: No theme files found") + return [] + + for theme_file in theme_files: + dest_file = dest_themes_dir / theme_file.name + shutil.copy2(theme_file, dest_file) + theme_name = theme_file.stem + installed.append(theme_name) + print(f" Installed theme: {theme_name}") + + return installed + + +# --------------------------------------------------------------------------- +# Preferences configuration +# --------------------------------------------------------------------------- +def set_theme_preference(theme_name: str): + """ + Update kicad_common.json and user.json to use the specified theme. + + Args: + theme_name: Name of the theme (without .json extension) + """ + # 1. Copy the theme to user.json (this is the active theme file) + themes_dir = get_themes_dir() + source_theme = themes_dir / f"{theme_name}.json" + user_theme = themes_dir / "user.json" + + if source_theme.exists(): + shutil.copy2(source_theme, user_theme) + print(f" Activated theme '{theme_name}' by copying to user.json") + else: + print(f" Warning: Theme file not found: {source_theme}") + + # 2. Update kicad_common.json to reference the theme + config_file = get_kicad_common_json() + + # Load existing config or create new one + if config_file.exists(): + with open(config_file, 'r', encoding='utf-8') as f: + config = json.load(f) + print(f" Loaded existing preferences from {config_file}") + else: + config = {} + print(f" Creating new preferences file at {config_file}") + + # Ensure the appearance section exists + if "appearance" not in config: + config["appearance"] = {} + + # Set the theme for all applications + config["appearance"]["color_theme"] = theme_name + + # Also set it for specific editors + for editor in ["pcb_editor", "schematic_editor", "gerbview", "3d_viewer"]: + if editor not in config: + config[editor] = {} + if "appearance" not in config[editor]: + config[editor]["appearance"] = {} + config[editor]["appearance"]["color_theme"] = theme_name + + # Backup existing config + if config_file.exists(): + backup_file = config_file.with_suffix('.json.bak') + shutil.copy2(config_file, backup_file) + print(f" Backed up existing config to {backup_file.name}") + + # Write updated config + config_file.parent.mkdir(parents=True, exist_ok=True) + with open(config_file, 'w', encoding='utf-8') as f: + json.dump(config, f, indent=2) + + print(f" Set theme to '{theme_name}' in KiCad preferences") + + +# --------------------------------------------------------------------------- +# Main +# --------------------------------------------------------------------------- +def main() -> int: + um_root_env = os.environ.get("UM_KICAD") + if not um_root_env: + print("ERROR: UM_KICAD environment variable not set") + print("Please set UM_KICAD to the root of your UM KiCad repository") + return 1 + + um_root = Path(um_root_env).resolve() + if not um_root.exists(): + print(f"ERROR: UM_KICAD path does not exist: {um_root}") + return 1 + + print("=" * 60) + print("UM KiCad User Initialization") + print("=" * 60) + print(f"\nUM_KICAD: {um_root}") + print(f"KiCad config: {kicad9_config_dir()}") + print() + + # Create ODBC DSN (Windows only) + if sys.platform.startswith("win"): + print("Creating ODBC System DSN...") + create_odbc_dsn(um_root) + print() + + # Install themes + print("Installing themes...") + installed_themes = install_themes(um_root) + + if not installed_themes: + print("\nNo themes were installed") + return 1 + + print(f"\nInstalled {len(installed_themes)} theme(s)") + + # Set the first theme as default (typically "UM") + default_theme = installed_themes[0] + print(f"\nSetting default theme...") + set_theme_preference(default_theme) + + print("\n" + "=" * 60) + print("Initialization complete!") + print("=" * 60) + print("\nNext steps:") + print("1. Restart KiCad to see the new theme") + print("2. You can change themes in Preferences > Colors") + print(f"3. Available themes: {', '.join(installed_themes)}") + + return 0 + + +if __name__ == "__main__": + raise SystemExit(main()) diff --git a/requirements.txt b/requirements.txt new file mode 100644 index 0000000..034a6c5 --- /dev/null +++ b/requirements.txt @@ -0,0 +1,6 @@ +Flask==3.0.0 +flask-socketio==5.3.6 +python-socketio==5.11.0 +PyPDF2==3.0.1 +pandas +openpyxl diff --git a/static/logo_banner.png b/static/logo_banner.png new file mode 100644 index 0000000000000000000000000000000000000000..c902f7353557af8e58c40dff8a58d70fadd142b7 GIT binary patch literal 41755 zcmeFZcRZE-|2TdfBeG9~vO{=tQe;a)gCu*MvKm%7Mh?#5 z)H$*a*^cph$^E|X`~Cg=_xt;Odc--`d0ns9bMMPtV?$j=y5n>Z1TkK^sAUR4tXU95 z!ApAxT)B1SogxImV%;@0jW20xp78beadE$S1A>I_N8E4KZ&Ty$us16fI0`?TAbh>l zFwg|vW!fQNCF~Vz!E;$`huYxy12zub_tiLDYR8K^iB;uH<4ZB<5hsB>Pj$*==I)Bqp*;L7uS&~= zcJSl%dR5*02Xe=7tapnp%ZW}gPJK7fCpH;zJDs}3uW~w1dOEs2)i%Q}5^sCU3ipIx z@L|?zCU{~8(_^K~M+}dG`udFz{ZPjF({M(9JEiG=Qq`c) z7@Fvt&h|&)&h|smOu7E-qCb;dhz=#FKOY|+&ot5^=>4T?Cf93@$6bA z;L9N=eO)bRZ~tFGbIC(+<*@HXYXk%_JMRC7m8zDwgNrn`E*WUk%upPK^GIGcdjNQQ z0=lH7ZXP^3H~!uaZ^0h-b++6P67juL>q3CEbwzGkZ{k-dCd zB>1au$k>3{xd+TA{rn86hN46Q>uDplZ(!E#DI*sGUwKPdkG2Er#8gN;}RF1 zOwM3mV+Ed8QK?pO#fe)0SpNI<7>Cfh*^Q!^GcfKLC2?PN`WZz3-z&hU-L+3t|K}nE z)zkLC{_o|Jg3>hq_i9>FKllH=swSxcX!US;e`xYQQv8P!(EmvBKT`ZnfxqAU zPb&ULioYrF_nZGo#s5h0HwFHF^FOKhA1VH(z~68FCl&wyNYU-e3PGr*^l!58O2dsJ z9_IMqb#PJX zR_@N^Vx%M`Si!m*)ks{~{5mTzjvd;%3`5$$vnV0trnPSSaE$6OLhFKp51swPUE5aq zbI_%|@vy`Mal^xZwjTTL17E73QjI6lFDfl>ToatKloFkY5+G*mPJGDNSbMdfs9f$q zY;9lcY<<1@gaVRV`tO`ZRLfEilj&FwK;)M^NMJ-`{yS&U|Mmp6bCB^oR&!(ebMV0r zU#SKDcTObQ z(@B+h@~@mq^xppi(!ir{vMyFBrlVK1UY$FDP4w>nvj+VH)qL=kB@u&H6!u4vH!tppBewW9LfO1Qhz?HS^Pf`4|Y;5F#P!{?NL8BDq{YC z{|7hw%Grr{Imb11k^>(fTzP!ajmcZz{NRp*OT3A+PskL1-aq>f?g!tX9(Y~GwH&-Z zcKE*%&^Oor>>S?N`1j6wVCPvkik2!l^`o9ZT>pMqvIrt9|9nE;iXm(W!%GywuZjPX ziJ;o^w;Kzd1?Ytq&GhNfs*9y?%Ijqxb8-Q)OoL^ls$?+MB|& z?r@DVwoTB5uV?)i_k$U@SnE#z0Y*(o1K}HXnpNV+lb0pu4{rSPg>P|r`cT!U!%0DN zqJObDx1XD@T2@^q46M#t=B54{H|PSppJdPh7#B6&Qc`~>+pgwZMgcPavj$gr^D-{w z;IlPt&-?kiiAx+`-_zAZh{qi0)SvIr-yVGn)d}hBBnSM(EtV`i1E$5^rjX9>{H}na zRs6cpKf4wP-IrWH5R;4QZYhWjopQxBA(ZF8Fu^$j21Xv-ats`tJRdjNLzh%RU0AQE zCGpQfWLAUe)jpBF{@~i)BL~Z|v^{<))rVieIomu@VgF+1VqN>^F~&cO9M)n8^6@_S z7eu${j{g$(U|T?Du0M-VV~2a*N6_|6${cL`=jUSGA3a?@Kk>5afOse@*sJR^)nL;T zhov>4QWviSW*;0x@$|;R0nsHqH4r&%a(-9davdsEsDM@cg9U`_syTll3 z8x1>=*TlzsBfesGJ!V*uvxDAkqbEL6`@-oG^D!n=Jt(}q7r`u3M5;kv-u3}&6PKvzTr zCPXVaEATyKt~dS#EbdTy_t*jH9Rd$_9&FBXIIl%L`(GUQc66o)Iv{-}SD`|0&3u?+ zNka_b@#K%8L2P1-_&`n| z9Axoc*LqX-52cX$tkPW~@)5K9r`yf75dzC~eC7nEWTWJAu-$*ziv0tTXU@^@?F=e5 zwzR&TZRq)Cd2oVlR)K<{xUqyfTO{?%CPKvq{CR4S5&DNN!s={d8xkrOSz$fpx;8jG}FdT?#;L5576&0 z!QqVwRzy|iYQC2V3FVj(2smNXsdIu;VY1zTM?BYmT>O6Rd*9I~`-d>Kx5q z>WBdr>7er2t8L8(j^FoaG(rBTI>PeA&4s6da6j?u3ol5V5M+3w*oAFpv|Uf@*0{^h zGZJrtzRIt0&EEc^8f9iVIbIWSc%Z+eu`~csNQ`n~@Xk8`?NwE-c}l)u-qX6OGEJD0 zu6)5_7Y_1bz1Qvl;%h7U&11hiVY5W+^2s&F``TwCQf&EJGHNtTzAkWh{@{vr1Uo1e zyKw#NMf8|@=yaYx?L+h^J!Q#d-mC0d-rQ8vwj3Lj5oDRqCRgGWS(VZ4Be@dT6ZY%g zeA;Z3f6-|9uj(!bnIU7YVGmNNPtJ{Nw_8-s0^}Y6S?)4QjQx~I|HTn8o3L?&?q=@| z4tPEg%xZnVkmVYlc!MekI?dDibBR*~s)*Tpv|`Q3&3#8ScTr$)SjQ`hpn`y;CHDx(Y2sy=NLvZ_(KQG14Y+^bKz zqt=Jd2&f!gQ^IPs=T;b!QCAk?O~M(SvKg;)hc|s9i`mC}X^9FtSzonsE%rDj7fzSG z6h$a}u(Y>lxk2(tnjr0t`tj{K?2Ox!R>r5TvHrh5H{IT(Dp)&DsJxM`+$WPr8=sxO z7W&wGI3T;Y;w>j(boSUD4`D8~D>6E4n!K|evC~(WFRilWxf;ARP7X za-B}k-9)TPU#L9j!J=%qQ^$cm z1W9|_AFtvV=I#ES8i*ry!u?p`a+`VfJmbqlIM{R;cD_0iCELQ75#*%k^Glbvld8*c z71~wnR;df@om=r)jcua~pX*XtQ)p7#0?{y7 z#!Y+*RySVrDbstzsDjF93s0?=M-QyDELZ8B7e(Xrta9jxkq4H~N60P3EUOz6xc$yb zIJ)%#cfuO6$%`Z~_5SE)8o8uH!*}XY4`xkbKS679&f&$z0A=&3(`G3@H!@kq3`RV5^Gw6+xn_eU4;VCi4MV?_vJc&?vC(Z*DiF4 z+PXL}rfVupVOta-pES$|CJy*UW7}L3x#mB78rNz!(CblLkG9et$P8D%#^#=T!#HZo&h)Tr?-;r+9cXw1@9uMrAJVQ5WwHFyoTZZWJZRu`ZRLdWY zCl;+uYLk5*)l3S#x^nHq$+f}@%u6!J;T&w%nsn_AW)5}&nVztcTqw$IFnB@x9W=Ge_b}iTpyV}abT*z+*zm1-uU-^^Qrrw&L zFY(P>zLI*_H}Z?{I%?HKujkdQtiC;x|&!=S?V}WTIZy&zX4J)V#?2Tm7tGC=M<6tE3$qTB3JlH^MtdU{oJ|$YE z2NTE+XD*c3b>Jc+6{u%+BK9NcjG*w}FFqhOem<>feDmpW`O-EI1+S|kFH>-%Wo_3vxjOCsGqP<~)*lSj{e?p>abt zT&>ijm7GLDfXYi}WoKhfeaF)(=%_SJo1`Q`ikGT%%&%_7)W-5UIgoa| zQin+`c|Hdvrq^J<_e9rUrtaBZ9aWC=TZf4nS+lhf5TE^iXSEF{$IkMfy9Ts=&AoC% z^P_F+PYEsh^AOZuR2ex(Pr3VEUaN?ftFCs~(m_SO{;MO1@2eNe9 zu_7|88B^n=>ANaNj0>%K^F8?{uqsJr9x9%E_xv)*TK8t4(jEhEJg+}oVP{4v#ngjW zV2U)xx5NXpHToojEef|f>SOsOz-H@=KNnII69(iWDmG1_Tn!XNoYdbz3cd?wTH*KJa!f|KWT;~HfF(1cAT$B8Bbkrs zwgcmuk$ya}TRBaxi{9w=?1iYRq7Pi!?@{$h8Y84I^2NSN`PLBg3L=r`sRBHEvAW ziJp^;V*h*{0kY$CnYH|4^jP4y%P)Uh9VU2iN`C$fQ_k*pfYHQ<4@|!20G(&cG4fVF zSJziW*YC3>L7M)w+2m&t8;)d&&+~T-#si+{wA&M&L8blj>!fP}Q!TTy9yqaKpK#g$ zPbi`$$$z*@S42h%KP>m;$}KDfclM9-SWxgdzP#!r>c$AeUG}mY_L@7_YKVE>dlYDI zK(x8YuRI>@c1?M;+&mVbz=yNW0;Q--PUvcj%B!81UVBLYaJuhg+nKe~=+}Ad^rs`Q z0M)6^Q3DuhO5RK8qr-`{>71_njF9`>m+Q(rt=T%Ouh+x3WxE$@WP_wh`eL@MI^Whh zn&7S#&cy$$8cY6?P#3UAY5XKRA@hgvmD?YPBq^1cwg_m;`w4cqI4iEdvi4fV6zRKt z_m#|wXvfCHgzWSz)kbe;arA2E9VSUid1spp;&`{uwQ@VwJUSH1sA8C+@KVdpk)fx* z;J|tgxJ}f^fh;Uh-=u~6$66mnXOq|IdUYL_I@B#9B@;^-7QM+HHk=TRM_EQV>7lQ_~V4!p6?jFWPcwX(auy znt>2}$39Ml}nPVk!erZdcvwD?kiu9^=unO50Vju~s@?z{sX z&H7QZFtJVej*(q8gWP5dqSjwWAXgp~DD--{9_5zs!QfH#>v=+fR#xLG z=h$s5!;HQ6vwY#vLSfv)H}wOT%IXu}8o9=s<~PS1o71ff0Mk@HE1OCgse5*pW}_C6 z?8>m$V@guj+5Y3t{r2H)24RMfT`k-YF{*+mzBxu+ApIq}i(HhR^}V0y6*=(gDfuWa zXsj5WBsf(wE31@&pTnz9Qb8QN^c|6rh?|Tg(fO<`5$Ih&e_vS85xBj76lbV5Fg~)m zweu_PmA)P;YCjz6H05(hxmZ~rjQhwUNd9Hyb82R&8dxuug)kM40lR4^L2VozpPmKG z#-Sjq#C2(ukuvd3y}Gv!E4#s*+zmtLR*?aIS_u6^s8}ebC-+%jX(?ZU?H6G(vc{d3 zk32qWzTJFAlrFf)S__!hZqK}+3;VJix&ZP-f}GK4`C%jKg0-67_a^C~q!fSEo>1xL z6}}9l{$+|fDr67{eHcDZQYlXIk5so6ywSKKdaMDaqh%xauZV2jEKW z!r!q)d{Lht;<`I#wBdA(4oNruLxq_{lA36b+_f}ORlEKB{n=CTr0(#<)|)SNAK8{X z@?j08SajcqbuQBPeexd7`jPA2z3NkmROaI|=G2&eIdsrmKPOrz|-kiVvZUqav zw^j#;--0Tn=7oUCI=56{gE`0#@i1Y`+aHx4ddbI;`PubGw{LUBhBlg|QZokx6Yjyb z-ft6VFx5@6U6*&O0B1Av#Z@ZJd3LCjkj7t$)qRX2_`07%y@cFsCQZAD3yi2|*543%@lW z>lR+RQy|ODo#HBxE!y^e`6)vpieb?NKiY}g`kjM(qX50|#DFlzHZE#k$)Fd50oh(! zujqGa2`#M;yMD>oW&m}O@2V3QNFqm8C@Z|;&)lOH?m}-9#4$2ZY4vMXNnOVEC24Lk zO-B$bxXXd+Od|sdSaY}^UJo#%AW2~AyUT}gQ;@$Y3B*LCz8*LCvlbRbUDxAh#U>l@RaHnE^ z)Xs{FreODcPO~{$EkAjRHmmK+kvjH7dC*q4{4k-8YfJl$1chbD0+ksg^U{gcVuHc@ zArq>Omdh4@%_-4BG^;f{-HJER8p$<{r)5e6n-5B#?-Y`u6zS{@?vhs1HH^eL8u zQuW8_Vgi3RfJg_pDtM*Xh9Iz7d+#QT^vb(t824AJ^tW=xx7c(!hciEDIP-42ZzT)Y+M`(;YFDmMa7dhA z+jJ2XzRpx2#;nO?9*`*sQ96nrt*)bAhO_B`^Ca_XUw`uAEC|7<#5@B&+o5&R-Pid} zQ%K0nY}&@jYw%Jo>sXg|(1zg9(E~`}m>D z$QKj8d~zv;l}Gf))s%-F#{;r+D~{BqHC|wH;*%s^h0xA#;xp-us2xtsk0UZ!i+PzI z6j}je{k$;*bA00i@`vlS`zj!rktj`84HMRTE=HxXt+y!A!6|Uz_s;e!op}}% zd^F>?=uIZ19>Yu;A5Ai3PBcJ_+0}m6X>Y*wDQzUX)t{@FR1{TiLB%RF>`?q@yEaG( zL%aHdv!77zpKbRsqacsGDTJ0Wl~7064mhw998tUR)B1^zm4Fs zpot86!c@=r$#ZH=kOJ+V5Y|ijz|%W{C^-&sDGAzDq~ zs!%aaIAcUgQ(Rx`LPO6`5v}ce(5S$9eEsbzn@Vq7``vg{9u#I6t~0F9=^1}jRzAGE z#Ri;G#EIMNaqs`2oRvK)ci|_?QFXF!y=!muNr~&)Y*){uQ`OZ8+{?~!&}Y8<`vMb` zUOt1HXexi!bLfOWYPbQHDlyWJZhCIhMzjw-a{KJDE{S0qo;5n-RucxCgXClx#1I3t zyc5>IW&}=?$gO?dqh(yVoup3$fkNc7(YqG>XMB$aRf?BVb3UEmxGoG1yf@*~ph%Zg zqn`9KRm9B0MT`=Pec61DRTM&1KDg7OL&m;p!-cl!t|qDuR%u=Pu^!A2!Kl1^^ zQ!ufLnCgv2-{^D7GZoL!rH-(I>0SHRg)$0OYOQdjb1_W>V}SEj#{4iK&aZV@>Ykb~ zTpws@y9PVtnuFb`uJM#J*>mKF7_7?cLwrw_&b8!e9Sg`@G>axo({oi6UXx`6;bp7o zY+OmB#9ams!7=y=flw8WY*PdWIK8}ZSbKw_=$Cu`=Zz!CZ&x^E(-PN#IBn=KZC&Nq zh98o1%^MEz+!)h0I^i%ER2kS7eh#&W4o!>0@j!7n#K*^Kdu*Yk|0_f2YL6j zsTvQ#D~zK}*&X<^;B0Gi^BB)6R@>M&VZwc&g0awtW=|DxdVr;mSM55F-29xLdn`==_)a=nU8t>Mm;Gq>pA*D&dXLhh{NMcsDkbyjBpg%xPN1g z7_KL+SN0@e+~s27je&a$jX@DaUTa)TC#9P%?+3>UNb}Z1Qv^03+wMlbK1fxs=$C~g z_!B)QUD27Q)arXJ2idPMPvqiHCTL4avbB; zxc3;_Eag&ZKbNbW8ykjCn8qt6?!xy zs%~WF_xETXTJG|y24$F5DOzrvf^1Qu0x#q@=P$xxOrgh*@^r6f=hxnv zW$vzik_|JQe?Am=l5Z^(fA$dPD{HMLb9DI={4=)v(3QsUGF)i6;0IU`gNBOlO7EkB zZ(*1wu`4Xny4WNq`-{7uijh;wji8Q;vO)VdywHH^=;$Aye7@`?-_RN(1d{ow#&Q~? zPO`8~hhg3%R#4B>ZeAbE{yZ8#L4E(Y>fSfmwf70LBRref_iJBn5 zjv{^7jjY;v&5J*Qu1rTgUhZR26DYrhFq z;wF7Lghn}h?)_c3@l~>KzUi`+HMzfQsY+uj%7zda4uX{VW2B`vD~FJk+{?=&FKW^{ zQh{WR-5Mu!6hj4S)iEvDU-=Nj2VLyy%VOjc)PZIF4CBpN=>G4tln-Qf3vwVB1X{?Y1*~I^z6CCmR~(zAlgW2 zA(dzC437V>bg8oAHN<0YdY;$olYJZ9lDmdt@y$0>mn^wO_Fr_pM@8p|15*vypL z%(6oH>4_buo0cyEEyC~nuQ0!9KQm*~_7V}Pem)#tP z&k(%QUr(^fo{6=AaXGsr(kNA9=_6?+*5cIK9S61fwutoROPd99jv)%7TFCFaJ9{Ht z0~>q?xL-xl*3}1LC}e8&?P@p8KguJ1>#4f>uMFdh_|}G>|LD(hwV%x9 z@j+kMUM)}G#HVM)zX_yk@)2#+FSG7caVycPN~q&OS<1Y`Nr@`^`Bn$*%se_jCB4_} zghOP?P%Ux$S@l$OMa0Vxu3U0fkuNeO$k}JthI>UKjxaT=@M@;-rf`Ke2D()+1(ZiO z!p=NgN7LSk%EzxZee`mnU7gA0e`UAeq`KaGX7hwnqycbpvMCYw`}4)9+3<+Wb}9%N zwOtTQ9Q-t4d%-t+bjDcD81&35`>}QjpDTDc=wBVJTY^(Ei7SfYnu6nSKO$yHb~7BB z*2b({<=XNn1jR_{)a9;9`=Db^`QGfYaiErJgc~8PiZiRPQOvm6ii$G^$qQ%WK~o{24(u9@|vo%C5f-zZ?h2r4RN_Qisz0Pwl zE{n&vy|{K$3%0RYCzEQe?}c>*)LO1$t%lYB?`la{k`1nfP zN%UmL*Nr|~>BR`HpiFF2-?D4a`6PvRm$-R^O^q96?@gkQfqaGY1dZw5m&rG!#hPU= zO+6#VB-i2zFMRkiLGhn$YPrel)OQVL;K;e*`F3E!L_QznW)0rU-vY8#o$RSBmrqUz zi_5YJ?zKn-dE8vy*^C0JErqQT%2CL$M%z<0B_@!|G2y1+lo|6u+P7oxD$nyVhESqp z*Ahv4pzj_QAyE1>Vx+T$B`LL`?K|Se8p&U>##IcOUBv8XhK0#B{o898{TG3 zDb~z$EcD|TxZ%Dh34IP~^}t&IAlb;9J)^`VZl?+R#)J*C+_NYL>po0vY-)(;dTUc& zN$(pmnL|CVm&fjGt-+2+>xzIPQpXKs(nJDU~WfL z45Vk3bsZ~sAv^zuzlu;__cWT|Z=nTNt%6`O4W#P$X)keHZ9}(-+710Pw zBHxR`GHpv#U6~tZ^{$pdDUH?-epM$_pOG1JrVS{Mx;3tILJkEfq$ye~&EH0M`meq? zulsXoSkX3`ApGh2L`|O<<;rKx^pL4@5X5%QN4QMkW_f0G?QQXtxhYRn&WWR-mSymC zVKX5>EBx&Ut>w>6@<>>ybpP&G{>v8^#n8%!rgrArq-QRSFY8|WEmA#*l?yS%4WRPw za~a8~J@OijK-(DGjfT&pQO>1O61q)bnurHIS3^Xv{T5ocF2?#W=6n7;PmNc>t2)1z z0zRqdG(R30xKMJ>#?`RwiT3y`{*E;>4K6;8n{8JFba9G~$Bk~?N+q0#17^7+K%U@5 z#z~@9f8unhS3rJwD#UTk41Fxd^xMffWBM^Bu3XV1Uq+ceT~JJ6!=hI6wO3LnM&(Sm znOCdso0AtC7aF~#O3gs2yy{*9v-ZY>D4zvxt&mYFfWeaoU^c z@oM(m`HR(rKre-NtG>`Ro;JpF&Q4tLC<0O;Ag0aKu0N;Z)(BOP$y1Oh)73_w{oGeT z1ha_cqBQWqI%AvmlXFN8U4RCaqu*ybxZW3ZYBp6w zaGF|u^aAx^>!e3yV@y1D#q#Wy>-ow&%H@VzP#YTUEOv3tAKJw@on zkebuBs>egL3OVpCS?@5aMH7m{0qA-#)aqQv z_g%Xk~}+;|e&3P%*c*76&?5kB6WI8pq-1G$~S8l$Iiea@^RXoZEB0lZbE1@*8LYWoK7 zA{|o^O+Fv%YoAsQO3SDLlXseHA10nP?g}7ATbZjMXM8 zY6c5@#!%TU7e2&>{c7lWeVV0CO{wBatsMDHW3xwLHX%GD%w zaS?`o)vfQ%g~3#~bw>V#S?&B?oBG^sX=JM6vKJ+Kywh?mM31wI*-3D?_dl}$r%<~R zwQp84VL4ZRaXo+jD4ba)Jms|vtkjHYvaFI&{w|1QOtAKe?S^rV})RFyXTeM5(&F{9vUt%mn86@!M zH7*~y2n#mK(EP}a!VL1Vza1)xvhx=AY?Z}_n@T~eH8(n=I82w+1uIthR7Iv}>Mq)} zB)**$_IQG*m_&#wn;2K0p)~u=`7#EJ;9iY+xtnzS0`9o;(;HgoTm9-Haip0_-P@A;tUG5kX@b+q zN7o70_fp$I+Fy2xM(+@690lc3Hkf#8bZh7I%4q(_lF?xm!jrA%uoo6)N<-_ip0u-K z1a?Tw=gJ~UnEsuvYaaMdVf9k68hXMOgK$qh`p8gwL+!eZff z$}z8=E+l_9!%C_RdU&*LjjDoDl-<=f%8hfxtIW2K#sY~7kEhI~<(@IpIIH4Ym&-zu zLpaDNzW{YLzE>WbCq!f-@^xSN&fuNaW0V%Qe-0ZWabTFBxt;MPyf2S9I+tOM%hO)ePMB@2iCc z3Xq3|=ebN}b5sGa#1;<|Ho_)qqTk{ee@iV}(mnytvpoStt?zFNxkl0<~5 zJxo1IKRYwE!*I#|R<~YzrmO=1GZLjdz3c~{LugFo_lrT-WGWh8#n3}5#9UjTd}rnz z%I^_Cwv&>riHuCysWBNIyMe{Z$naSRK7*#Svj}q3J%i_kC<-oAr%WbcIUkrX?YiQC z58nAAw&sYC6Qka?iqzSq`@=szMS zUgX1x8}}F&ZJclDNvv3J^B7N)SRSM>?+KsSL~~2Kdc#mww@T-4FwT=La1sm;G)u)w z2yNZm7L+UZ)*sZ&QzqSoO6zm0;_3P%@0@f?;pYTX?{5@>?ILCK_5?SSp<_@+x{%wZYPjev!YL{p8^%O37WNZo4xdg^P90^Qtu`!TU;v z>Z!BMpwN?j>yMj+RF=uta*oq+K7Zk%;NN3lA60Rj`IU9hxDHOf-;z+)i>Wbq+IvTF zu%3cNLo;W(v%K33`QK%F7#p6-rqo0fM{#KD+LvtF*H|sd8s(x6BEFd{c~w7 z#FDZ}H<`Y7O9$dhOPux;6&s5!s5eWF_h@;{ij`2S?k47zkgO2=jXfgJj+y@(jq{U) z&GlFO4xmwJe5a+?a!8R~nu&_D{P=KJEvWc!dB74BnYjZT*feE&Chs$37AV9KyeDcv zEkns)g!3(!9S6WD2zc9q;(F<~-c$1Q+TOT&CF;ZnYDdqgxL?E4;+#DgM?*is+}(WQ zgZC&4UMpr?9k#n^2%9eXcBk!mWMATUMR|rp`l^wc&I4xEJQ{kpRR&rIYwBI0{@#e6 zZ%pFGPKT~lGi8IL-Pqyw1h1E<4E4|JyBVcY>c=5yq^d5=NkryhDIPsfns4xSz8|CV za_Jp6$*q9B+8gSZqv%&@(joh5D7>ra^i*7(r!e=NJuY?RRXC$PMU#2~If*#43d&@y*c-rQvVwWG2i#uHvi*q%t)WXI+z-kR*i3WaIJ7wmhP94&lP;UUKucv#-}4-5nXbtd=b?U ziPtQ*(Mvc)RB?t(sMsbC`XcjdxKUP%=eSqs*p|v){6MU%;+)hGkU5x%=MLyz=aGuM zlR%_@)zuHG-kfhuw8FuJ%N_mqEI-0_5V1U=Ajs3_wA`u}eSrJk*z~C4XsK2*eK(MT zQJRPi`cEa4qzjaw0r<%K^4*bmHd1&SL<$_L;g3h9Z|CK0?euzM0#|S3JG0_ClSS4QJA|Umy3=f}wJqag zBd-490qIw3QA%tWz}@qO-@`XOhQSNv1%T?8Z%_SX?ep-yzqrt`KN4a;|GkOx z3xzQ)7t;z)R2LXHA#gxj6+fyQO!N%IJwGgyWc;I`Mf&PkwwuY6pxN|U`4`hHRl&Ew z-Z8%LSr*xKP?8H&hy!{OE|ZcVzFZUGD^jZ7lF_Oak-Y=DA7W>W9uLXayuGM_wJwFG zE6&`B34aZc?(EHYWEAoG+(%H;b57t)JJjjQ%BND{{QHHMI+)6Bq)wy#(gQMP$UNj8?WVdq| zoyx0-;OV*(#m_sB*MeEIZgGh1>8$LRCXp9t-*@gSp3c`9y~yQ8lM##Wch}@wdX18u zqJ%$q>NxTyet9t;i3uFnVPQtSA=?RB!jSh2&X6w*Ed{S{U{56^?AT5fyuF$7p<%d^ zc^Jbp&ipdLDnnlGq#IMD7hojNpC$Lo;9CmQ*`Q0@zH6?`Y6gf$G{FadaR~`MSL|Z? zOE>#DdV4Rtts@j9=gnV`LAM@}+#&>#sbF@s5zGVHZ4h>ctr}^NlV5|Vg!?4dR){~h{08ttB|R`srPj`o&9}p$7GH^#am9+sFZ7Di+f3*7k&T0xTUPM&J)8V6fe@Fz8`?kH9@@;;Gl?1&9 z(?_^@o~NzA@oc=a0@G=>px2;VX(Zo_*ngl`>}gHUZ&{_udP(ukD{St~ZQu5@K&P5o z=r|3<2?kE&nPN08UHpC?k$#*bx|XwJw}0F^vc%h2HFf1%e7}M^Vo@asB;MNXI|ELR zCifGc)Xp2sQ4(&}fCsYbkaQdu1wd?b2*C&KT(Q}^_{U4g&@tGNm@kz*S(%qMh5Cao zoL#1eZd}!AHz;h~9}xx+p?&#K9na87&U||Q8-8xEbl2ekF1#A>{T9`+IIh+ulb0-# z-Q;BFR`!60n#9MI%(;!98!rJXHRrf{!3T3?j*`emi;xA7b*eZR(ttJ$?ybj#QzP7m zpzYDOIN9Z~#FXmQ)RkmpZLsJ@kbUlEzPMY8%DR2|`$LWpr?HZV15{x3QdrK-8w{O+ zXPX8!zL|QsJbE{KHkE=S_!@AAE;%~6)fX3Ex)#(l8xOHS3BCoA&}f~hRacv`R{lf< zsHKD6U`hmT?8~Vr7$B72K*EOS{vbSa4yOKO&gUjCX2uRMWDXSbZYJ(I72V6*FB?{( zc`R?((FE(0eMx7wjlHPjM&rMz*vzJ99q+1^eL;zu^`OrhAW)_&@!CB{&(V2jKd)4k zCuZ&Kew!~pHl#ZahUzM@H2$m_7Yk(nK()vQt@`7H>*D2GFKJyUL62#-6Mo`8W*NNCmg2%c$1$qaG^o&t~0ZzQ}h%qBG?cSIqX^~%cp z9zI)pFS92cAP6lwSHEty3W5pSeByVmy1hq^-`yE716_dhEMme&6nu$AgK*;MTNxL? zI?37FkQ@H<3DKNX$<%IzWM@A4^&q{T?^Ur>J@0=h83ghBrUWU{Ws;S!`V0KPz0Ibs zz61r{M-`7KE`w1@F|{RHl)HJY$+r>f#<_a%X9&kqVVqeHh!AKySRM}hvNv&`#Mcrc z=56Y+o08B6CXVi@4N?I=UI=FALa$s|e#(@@I;Pp^>w7LEWF1VdZ$C0c)M#>n<3qS< zdaX}PWl(hkWW3+0_xrnd7^+dNQbK=y_z5CKx4K3Q7$*}>c1C8;!a*QZ_RE81F z8cErmE3b7=UozFgm2aIzVtr02j_<{}1l|3=Mqw3x#yqpRpPuCo=E7MdXf&T40&7aG4VJk(eRA%Baj$tHfn9LMuh8x&JIgvN*O`!s z8fN&?5qK)-NKUQnPb3Miz4w>nw*|plL7xX#mGwy&w357%3~eZ=^ADTYFm|+Cf=-HY z(v>;S2K|E;Ji)2ZJ;ZFF^Z|)yVwoYyiBC)pA%m;Eaf7+)M!h71@q7LgH$`+*)k^RU%!mrQJ;YI_iTX}!sd4}`Mt|F69_ai{X@9>(uu zNXU`Q97BpoNak50MKWccXEHlv=BPyG2AMfhlzGZLW}f*}95az2^E~t0hkBmp_k7x>s-Mke1L9>rH$}IA#dv z_&W|%=Y&DyIEiY-J?p+~&q{z+<~-R+YFK_f>GS;x0oO5u%*;g2<4pXaJy95h(*5}T z6Zs1!zKqTQ!m@UG;8&v=RW>L0K+97%<{@jy`|y0!GUW?+ac-;ib zUaNv^T!%`BjogWbld>1R^g&G-*d;9Ps28dd2>e1=yE@sLyS56DdF)B*!B~#&1_B&D zjf*k?+$(L=^06bB-P2AqXK@_)a)sWHBdF7t&|3yO6zV*(W3wLUI+bwc{XAQOYQB&o z8*328XCU9GD0>5BAj|!b0HMMSJA%Za!JvTvZ!<2m+ivLu!YhEF1jo?!Xb6>QvjF>D z80IVC@zRpa**n!=nPi;r@E_II#5>m~&3gX%t^uKC_Je^uFh&8F%4-m_^7wao&FtHF zVAt)?J61k%9z_qZf&G^3#PRIdg}lEH@`L~+N8lL`3!@AKgM5ltaG}H)W^ZWX3Plta zen=(jHLffOska)njXK%8GR}90*&M!0xaAJXVovZ-g76oMeXQtf$>SA*n??f{LU==i z2dnJe3#8<1fon}zX@SUPVw~Uq8WkMijlFG%6`ZsdD#E+WA#j+&4uA`?q%0J=xZ86` zQ=0u!>P1fW1o8=lZ(SNrb)Yj-#V(t-uW0*2!2 zAP}U`dM@bx;%v#u;g1uQUc`5cC)U9;_LUJwGIRSN7o>u}7O;hmUqJW!F%$*#n+BDu z{#gG!R0c%JomMOlVmsR?_bLN%K<_vND%ph|+0z{~EcC4PtCKPDcptC4cnlzQ7SAk7^VdOIe=AzJGF|E_Fb{W8d z|KoR%EAL-Ux7i4S+zNrV!*oOn_5~+|%#Y<7;$%JfMleZC|KTJs07T?XdO!UI7FEmV zEHD_w<&T!;>an%>f_(y=#Q~WlKh~l43isJRi$u?Hmmjb0`xY>`)P4DrpK0KO!wg)J( z)0_lQ+4&=G&|Km2z4NGn*eS5iQLqmBhLVE3J3M{-55;y2I$$e`?m|*dTB{py{bby7 zSTJU#;T*KT@*VkcmB*m8yeCL!q>OHJvO4BexyiC+c{&4b{T0@eeW(b+y|}k0kqelV zs}&K9_`SZ=?9>G**(v+iVQ76|oB`SPz=W_-pu-3>gCs>@2N>?>1{9Vqc=yWoH!(l( zwUVD~gq%o%43O|={=mCUeVC04IZ13wz|i5i$9--I!*Aoyn;v(!KgVBwz&96T+TJ>DF8i@?r7zxm;BR0Tk8>0>p$ zN`Ar&S6m1j`XMVTTrmJfp8`CdKI8iXum&fgO{}J^N?PKgxPjZl4NKGlQD~lNScUm0 z;~~x1eGEe4R>%?EKrNgVFj&>VB-nCX8zzfs%RJu)djaW+8l(YN@mExG$q7{DNz_PH#;wh;A|32-Y)S^7)x z*@@Ah^t1h_#yA<;fi>t&m)CYr`R|?Gqr2Bz?Y05kp-%~im$<>Heur$0gQX<*oR;6o zku6*wt9`t+(U_mT#IGfJ%?Af!Qk%Sc2MRNRp^S<^BrGI7{uaS6^zieZ3iD%yt{cBG z!R0QT8|_CD@CDhr0}pplMIdXb?%dWreK~+JvbEY^4`+e0_x=oQ;P?hSR^;SD(58}o zMv%2F3>A6t9LVaS5vyFzFeV2is1uuB%`j{wFpt*fyLm8iQu~fM>EpFft&ws(Csn-| z2o*m)aS{4d5UzghLWXzp6Tpz+!BR3@v;({kKbMOf(?11PK9nSFIo>aTX9Xsew4soZ z;xAC0{P!nnsWoJ%?MVS_mF}!emi@Q!FY_qSznwkXE?X3Fo}75E9bwNr1wSi~hlPQK zj@8!P?_MSp{?HemQkZ`?*r> zz)d$`-94g#yfUYi42y_QPQ13N-A0=y-}`_pcf`ywDsqDn#ta-C&{k#+RVPiv7zhXi zKAGdN;5&08p?+*2$rsB9T;xJEVyGIOADbpFeK$6f-h{z6!?|yt=c$|jUSgv~Gmoz! zs#UPwg;Ph$5k`RKXC>$gSARUl-}4HHV9}YL@Gj4;PF7UbMKwq|98V7y#fwOk1wvZH zbs5;N{AC*fE+w}Y$omKOKDDLZRmXC6uMnF?T0##w?cO61O$r;=l z0z#azUggeuA`!XEWUen^&$L3MFhtB6h=>Wd4?gk8ki0od=r z4$d=Rj5RIEE7kwQ)xl}WT`#3sFwS4o<O{(W=gCNG z!PN`G{(*?P_&QJNTI2&}CGd}=SN0;?MKUxDmmClLd%n~B%=qiq=SNH`5_lq2sH@zg zgX7e^scC6N1-~CZvN9+vL`8wHCN;v)z!a??PAWuhw@Z$PDA1uEW(Gc4%m!0?<5n(I z$~z+6Uv(eyujr+UfEl`FKK4)@=qP_TH+GeegC2z%_JAP9uC&Ua`LJ-PGs)dDNmu;% zrw1W4_sSr22kJHWsPg-Ig*VOo^(P$x?P~h>Q$-B_8EAN(g~`_Hx?;MlB5;jgnZj!E zGIa5kQzQaPZYsb1ezyF)?TbxJ(NouO!>yN@bcT&)0oAwnxHoOOJXACqkk}gl(Fuj) zG#-p_Q@6Dw3k%$ftxs;Yav_Qhzv|_e)Ly&dh!MZg`Kr?j)|n(W$&#(kq_06TN<#!4 zz0ItXrtM+pCy4Twlnh-qY;>WlTv9h=-6i4r?PX&W8MA4lY)wf9lDX$eoq9c)p`&+h ziL53;F+b#6&Cc>Va^*%nT8mEq&b&_pZG=A3!K|8=sk&ll$Zc;qph9p$_r792&V;yVlDw5;i z&aAV00Sc+!gSbaTUe1-D6}d4nAy0eP$8FfQFG8&};}H7Q@&%3ZU}!?lty=%b*18}| zja(!DyxDcEq-7|%cWC!4lTM*=IbyNxlHds{%vme(Bk?W9PAzNrO~G4v_!pT(KV-fz zZJe~IYHc(zgyzOEePv{5sS5qZ#owVPRpe%26^7R#lSW+3x$=j{!YyFF!MWrUAa2PL>C5(V!tJpA3xxg53_YDb($6YPVZXBUMY zhZ!fE+u<=w29sUW;7)9NqPIE}@C5QOGT@=)IET)v&~Kez{L*2R_1GiYJYsBU$1K}v zf876{*~cNBZO)D2tNhayxKX?{BvqPFJA!+7&jKktab>(-M^@kRfa-M8qtQp`*8uzM zZ9xvxxckg{EOnk5B&;~SPd36xe+in^VLnBl%VWsJMemq-EPNj-U75NHT@-T9@VFpN z`WE+{LHBT_0%MD%-^VJJ1<9i?EJ)V}CBAsxoe)hZ%oskvpw+DkV^Zxm)*xvLZcw#` zPw_)j^1Q?-?%yuFMOlr=O*sP|Ex+U{7Bk*YpGWR?;&$P}E)K)%8AW0vWqu0JA}Cic zf5eOkcWi*A1VN)efc@Vx> z1k}~KKPwO4!Kqs=%sEM}d_am8s?fJy&dz)>%hTCMFh=a!E_#W<;e%FTP6}KBfsf9h zs66+CmhUl6=cKM&?#~;vej_G9xNLRzR1f=r74jAxa!+1@DiYNU@YI0AFBMkV9|^lyKMl~knYis-pddwuP2Gm@U@Go#+|02oolv}k_&A?CQC%jrC}nZ{Zx<_^X+44KB1n% zF@RdfwkMPIBb*bM5Kme6=qL`!*? zt)o`AE98qf;5_B{lI1p}LZid0X#$v0{4TB?a)K~|$Lv)IGLQ16;A5^J2x<%((7%^H z9-57RUx)_vD4v ztD<`Wp3Ks<79&@*d z#%zQZW(RUs$fcCcK^t}3E~&KAwO~sbLvWrhIk8BjCGGB~hk1Qx@~l5YMq5Zjpjg$} zD~=RryWAfpmbvgt)P)Uq1Q7fjn|*XakAY{7^!ozU{yR`e7tBYgAFm-aldhuth}_HR z<$b-`Tpc*mFcVHI*Hn#N3bzyPoS0nqL-7c9{F4glhv%BRqon-e32-)J#I+{4wkzk0 zD-5|=Ortl~mJ@Z(Nq+h^e&q%0y>{Yl2Q)+iEwI&j8yO&FoGfSL; zaxf#|)eojP=JD&=n?2?UMZmxNRNi?MH8Qng%b5Ptm1%#Gh!EzZ%RyE6aNh$S;;V8$ zw!5k%?^7MwOz&$i*gqX~$~j!n)tKT~@Mfbyb6xe->UyE7qh$&**j!GS$fNak6aO$W zve}`98!X4$Apw#=x;#?X_){M2hmb*5IT?^G5)YR5wy zk=(l_*zOa&{66!A``E3z3=7FQ@2Tx=(()ZWskjGI{4aN)atlK}r#QyC7FS-L2L(mn zwA)rp<@?vrNw=sub~&z{m$p;Z*6AR~KN;mW@uGDA!qiQ|ht6^G+hRy|_ph42s> z9E$^`3H_`n9N(jdn`;Tm1XsCMh0m8t;56SgaLS9i2!=MSWQ>(sNxy1h8Sybx%_F8k z-5SnZYV*^os03Q&CF)8Yg?XVVIJB=Z_FzCNxshGMwJ>{JYMZ(-efGN#a0UmamK!CE z36jPRfYJa#qTDup;qKQ=0Y}38@hr#=RzQf{B6;a3&Ptol#C#^0u^}(l+k$(6HRsG< zNq9~(-8Y1oVh*FU8!eNDlu8^wWln04&PdoZ5moVC@(U{pyaLIev9j2__GUUU zzVj{0czW6ABK*=0aRXG{X2^Pa=qHVL{K`9GuT;lHjmv&dLc#dQZLzanCd!EIffRn2 zLH{%T$rq-`Iz3U}F~dJ#e48z?MoMc49MT?%7k~M*@N0+%!ra~_x#av#^Yg;58!ik{ zj%C#}ttN#YEtCY-#ugqtrnU*rAItE9UM%EY+sf5|qsN;n#6rqOSI0w6c%rWup~h z>j|?8xN4S`{qprNMFQ@whmW>=BmbTmT){`hj@yqi?%)_~zvj z(XJ2o=#m>?lE$L$9vacVfL#Bclu)~4ws3JmBQoJO3(G{p+IMB=cT{ch`d(TZ4a9lW zSSoG=@AkOUPuyvi@VMJF^WI%<#`0sZG_`r9{;!87@~H3oD#jDyEXR~RkV`kEK)q`A zB`VFJq`@?q!^P|rK$_31!qPnE22)j~>1~AORlD2Gu~B%<5?-1}5o5jCp3ze|@7d{n zeAv}BZskXmqJAmrIMi?4t2`?ByiafOwVuI79YUPMM`5v2v%}2;Tibkt!d8vUe8vBcA$!N4QHA`EH{nGo5!j^N3U$7AEu^i{ZOod zmz`g}#B<`h*V|{*+&Qp0MZ1m)Dej(VGd)(H^xLwnVjmU?9O8@p<2zt^*S!ytn27&C z$TF+Mz|=Q#$VGc#Mm};|1aYGL;^i{qK3QG&Yd})2?jek9m1J1D^{cB0^nzKMO(f%9 zF2$4^nqb1$m*ehGXOefcwsy$E*5}rd_qj0!qKHC;p@Gg+*!u5|nhs``WrI!JD0!VL z?)4uFaF*V1<@IA;g#kqM8;N-I3&A?|L_d{F-?{KCue^WKnt`x}s}s01VwS-+ zAs3+O%l<3*nr_vLzlG}$x1QG~nD-z}ltZ3x)M?0edT(Rg$u1L+GCN`y%hQ%&B)w-Y zLUD%or}9y71pT!Lw>;XnHJnB?`?FSi=UkJqnod-x>=${hfXtTb5Ks> zdp!OQaOQwiR(+cTb|JDgN7+s1h(dOJ^2$%71pK8tPmcx9;odVD*_f?;*%f$IDK!8o zMTqK-dge-#1x~p?2_h-dWG-~g-bkjpuHu70H+aR)?ti0WBbbv1ch9o=cP9Mf-?=jP zn2EvmrBTeiBQr(TX1oJalM2epiBS$AOx&!d-K@BAhY>ldFjw0};ekuFL?ry4 zaH)s$9*heVz@>WsUtaY_qqarkG7WNqaU zr(%laBKIS!WGaLs>E(}~Mpag@ZT#p{Rj(UF@3cgMP8@a zT)=_zqXk#hifpY2OIBX)pc!32U}_7-e|BOemt3lH7k*V`zF1(~sbq8B$vw=y!Z`3r zbFDb)h^0aUGd8KlT&ZgJy38}oR!Gn6%U1vX*Xx1@-2F7rLn#8i=xXtDy@QXv2%}eS z1|K*C-MOq#<{NeX{0%(MFpHn{n=JiP+dPdLEmL&H39L^J^Bv27DfxZ(jic{r;;Q+? zQ=3qPp-7-_VM{Sz`tj{@>5Ox&&5ZL-&qiQ_$aU)^wZ$*Ib2gTYQ^ z=0eoJdPX%_o)o?68@jsa!Z-RvRU~Lf%E}zZC@g(p#$q^dwuE$BaP7%~@rq-R7emC? zvYj8t6c)Ja12bmAD(8#MYbfE*YG?N9S1liGE{HKExDjq>Fel}h?@JoJD>QI&pi5w} z%-NfX`7+Dq++Kl9?ogQdm9#FLcMg4I))J`bH^%S5INm{br>NwjMNTU1w;Rbv<6^Bi zPQrW}5!Jdsw!ooe_o>{p!K`Rx@JV}_r=5@dQ_l&2<)Zv|d-0)5D>`k|kGErI$qK0W zV7gWmN7cbf$(#rAJzH%c_V2C8*mp!EylYDT!BNGvy|1incg070^jf}sjnDF^>`gCc z@=*n6;dX5SZ|8EO@o2zNr$(dazrD(zsH}V|A?=kgDi&!oSphOQ`sHuWy&bXf{T?4* z7x5n#>KM&d+XFI`2W{?rZnU_$?5o3PS>=)1!yiar8Y*&EoXXP<gKUc%~Yt>_?D$1XWWH3(=eK7*`%Ump(c=y-ZHa2q$As@TyCf{)+VPt;K8l9 zUII1)*JwpQNuh4L<{|2pFxVHWI+M=!#FV2i*L>t=WRT$r7>X-ErDd?rwI*Qbr1J0=ptQ`yYbJG)q$0xjyLI3_WoS=kLGK$p~v@~~W#X?b8F2NCq;+orO@t&M@KlV3di3Wr%>VpLLWIB)tg^r%{eCTYV45@2R1d}e~E3W zR|UK5*h)f#p6sc$!zNnKv3b(+yP3xdl#KuU36>i(X;HXM6T?p0j1e{ncn5B=r=7X) zPvJ4~8&L088eU*u7%5$lQ-YOM1{<|~Rl41`^!5D;RVruGGz+8SPjPZSN%ObAG*@qN zt4~9y+~xvR+JRs*$hGak;!voD0S|S20MG9Y$4@%8VIKO9BMvcNQd-DshL=*ABgZAA z>DDtQI%X{FtVcA7LLIx7W_UWiNks?Ls}LK)sE6@qHk!`>N_~xntbEu7p7I}fbuhD4 zp!OEAm&(HkR*>I-8bGsyQ@kxE!970DmjkQg8z5UVx_8q@nQve(M>uhKi1aZ~`*jMf z;$O%tL9PS#vWHo;kQX0mx0A6ORml0pXAK=bIzY?T5YP{-&Wx!k?d|ug%*~u+&$hye zx>h1Gj&Gz#uVTGyLEUrgb>T$02<%S19JSp1$9MFAlhZ52rhcW_K?N2;@k}co7BMtC zQcTjjyuuHOtG?E1JU?U|y;&;4=Y9^|VAdih8}zyr#{xj zjbB<_(-x&vgxzZ%6OQe<%{1f8#|7){E_i#YJtdmk?sh4pKwu1pcLY<05M%APjVN)P zx(;4b%NxqX212DCYzvOIX15?w#*VwGt-z9L3f~r3D17oZKn9tSaTDhH?H)N46)3K? z>pHHJ@a=ZSNb}4g8?aAAsH2mGgI)l37wH&w{){8*m$GuisgAx{#|+S*X+pL0E@E%8H%Yq` ze&7k&7l1=s-p`a%g~T7cziPxu@AX3nu$#ja49p~c`jssg3{@b`CCDS!$rstCd6|$2 z<c;A%0 zuUmN`fo#)5Mt`8=vnBNK;Q{gKCP~TfrWp5PY zK@6!kyRXg8yH)f1%lYHc5v@Z}cL(WXr0|(mg!&25dPkalPN;9XQXoSQtB6RbIczi9 z5PDhb4k!ja3Qz~Q623v&vWP5-dI0s#tQ1d^L1T0sil1g51I#JFhMFZ?3YM@YB@XFal;atTQ4i%xMl1MLvc6W zAV`(!4irFSO0|y$EN@SGmSvwq=~IjcM1~VsP2$ka>DC$q9#Wvx_w2d!u^R5&<))dr zS8}rYl6e(yr>`{GtCWbm+MGL=;L_AJ-=NjkiW^oDV9Fh13=eMv-C`I52wdy@2$p)_ z-at!s$E33`Uf>LRctbko2NcDTn`2fa#N9*r#Ht#qpkA>|wHB`HYk9z$@{{n?%#@)} zAVSDzp5BV+)r^^B;}Y(Vbyo$HS~`5F$OxPGLUTDa*(@$P6t=H0X$0L0W5_T8^MZFu zffM_CfvJ}0aT?b#H;fd-+_URSw!JuUrMdZOZ<~h@kyRWlV!ny^;8ZPO`31&Hi-t;EtVVrQI)q`7Aa&3=ex z2l_YalRFtVNAxb0RS;1t<3w?mbjc)hE)E&>&$#vu9(&U#=rcfYTiyO1zcUupH{fY8 zx${(?A`Z9WqCfRyzBr<&N68HNi~(A@>WPQind!-Q?0>}+{*{gr5^0VoophpwmeM_j z9Qr@D;vigru6ZqW;|SCA1~Lz||CV1vSVdCG!a< z=H8FHSr`POv?p8!P)e`1ZviV5hBvGh1{S}GvN+^ik)jQqH-pT1i57^V;+UT{680IH zbnvSF{s@Ku_?r{B^jFNxEfP#lUiKE{oa>2O1kr?3GcfX2ZGH1!N9GJJ_s|`_9%So*uQA5Y z$7(|1y$uOLf;@CiKbf<^fMSA`bo%ar(qUlQR=KL3LtzmwVQ;tdDlkN?^RSnvB~_TxJRbLV4`d!<4-~~!(ejDjCGCOf z013x<)*pDPh=Y&WdZ+$K>%St%+FF2 zgt+dJ6Pew>2)R}>3GIFDSAt2-mgwI+;Kr_Xlu-1Cg@t^M+uje%)Xg87Qb!4FU*9Hb zSwa#$f{G95k4A;)Py;hP1@~{c8ix&pb)?Ejj@70YOmSsEy{VXVv;<_I8XNy4)px2X z{bs-#%kH3^wrmk~Ta`g$!JhC>V`s>=ho;q44KpwU2o2E6HE(49xbj0AAHLr`i>-B5y`kc@O z;TZySmw4K7GZZvKy?)efX8(v%Xn4P*>H2&y!$Oh%^ntGzg6JMjlw*~bjU-{-_N(Fi z5=dviP>~h(q-fVV;#7XuJp_6;@G0y#8Ap;Ud0hEfIKofbn*r7=Csw+4;2c=oyM=E( zB{ZZI2vt#XQrKn`gaohjzuxIDrEtMx*0Zzir)d;|9(L7R*~^HvgsPj#-5bdG1%pL# zlz?-*C&PC0q`*Ky8!goN3o6u$cKj7|s_OaN6Uon*AZg(TC%7FKTykk9vpmy0rx8h^ z$Y96GetDFjU)ty+aBHjcdyf8P6&HUcK$~veb-#oDR+)Sr6siursG@F!I{Fpv$T3~HSsem5U@I!cJf zw0(Z5%#f|+Ty=m2*Cwo_hARs{^hYjSk@l|b>rx7YKgC^DOVkGL^;<029NUU-gxwZY z_Ul`Whp15x)^a!mCs0ICM(`b5-Z?^Qg5DBIFZz(K+v$?kAzl0Fp@y(~0(mz)bJOyJ z2!?&l$hJ!^>t!R;aVPpjS-7RVY4fmKXkCW==sH(lsIRiCyjykEjWz~Iym@TqoM>CN zVzOEH-&g>En%O7<>B^d(4NSH?4LNL8H=8C#yP%hDUV!>tY$=1)1Poq|%VECNP?>cb({O5=l4H3@nBxn0+;?Rk4G+Y}r_eLV`7q%(;Pc6NF`l zp7DZ(o!}U&;fBT(20`R%%Urim;BN3*hZVnWZGXhg?h>5hGh{s;+j<*<3_~eHD-8W2 zyW+2i03c|H5&jY4|Ik5KL!G~27AtAQaGV~%@4kC7S3hz*V!OO3ye63aOuEExl!86V zod5`y{=@~B)N|t7p>k#Di?_Dsmpam>31PmQzgUoDr-IKL_&b5>Tie9;x!IJ2Ngz}y zp#0UH07$H5Ebs#){5T~chJ|t;fkjJ^3W^$4YSzRq7?>75wGk`d=uZ7EvDhfQ!Q=t7 z-c;&FE9OTW=no02Es`Pqx;BdFJ-U*Xv!*cFJngp6^*3reqBh{tH8et8vlT!Vrz>Z- z`*ha`UJY>r+?)KE1T9Zh5cDs&O5Dn<5rX5M<5drw@;Y!NA*RUcuDdTW#PYw9mz*Kp z9eYEO^B@GUz;yZjTMA8ZL(o7+^;M;UQr=jZ2*^Cld4RQ<77007!$mnuyDViMWNf-4iRU?zgc7GhRs^-OQOI zaa3~PQM^P29lWe|mr&AYMq21TmR_4&J80q@WCrTC^4M^;30<(rke9NtEIxevObP-mduzyL=6@v z){hTspZYx}jFTx7=>waz1w)?@b;1zb+k*70C*%nqpaN?6_)7fRvi&__eCq~xPj4=n zpXVV{xgBr;v7V=utC`;-nh3~cedK(rg&p@J+==rb(rN6Ca8ZOGbppBGylE{%%M8oC zTBFtp6e;||@bgB`eqz(?V>SF#j+x`gVZ!(Tezuc;v+ z7W7U0{?H6bv#J>&f=X)|>(g9}rhvd@1m9je?Moms39K7Rbxr{)?RrlUJ3Jenh_LVz z2O{v~Ic^*@9g5tjFf@Gev6}-l0NNP(QjLb&Irhpf=<34go5WciA&} z;Zb&4i|?eUU^z>-wsh|Ckq?h@B5uW}wgmCk1n_6BK7u83 zZ-WJ6`RSRm)2?Kx$CFQn^+c#|elfE>{gL{qwOz%hw*vvw)F zZ6Y^+M$eJ4VYfET%I%ut>PxdIxGq2&01z{F2H^dweBQ|@)#SECVL<{|ZH(-7!T@L~ zxOyY!nX)MlB4b>s)S-zkNJ#;u?)Rp2;PQwhX`SANWx>x~&5vCt8Tz`W%Wv}>4T6rC z!4fNTD|h|Ck)1h*fKnnjB$7DG^FtpTY1fOiimll0?|i}FOaIi~kr0gWXh(jG*(lE$ zuiZmTM_XweHXt&K<>z&l?hd2*Zdvf=hz+v` z<}i%yBE#EQZ`I=q#TOAD#=*;!zYS=w}ZKYbH|`WTU9D$pDt>Balvk<0`s+d|g!Q=gN~sw_QT`@XGtm z%8eYqYaFnNvhiu1xZyF)ANhOmlmkZGd`iyvXQ{IV_pd&=xw#M@s@QCyjPmOVPk4{) z67JN#fV$wkymy;&p%z-aB$W;f#gWI;tz8kGPDBrTGZLOYO2cGxT)&6!x$pjFL6Tc; zwq(}FWfazWX{BUC-;r`UHDjZ^C}%GbNg0)uBE^$iktBAb8y2*^P}r}&_~vw%F8+r& z&=IjKnd4^X(48dTI0BPK6|a?Ylw9ILtV@u#IXMOU4cI-3C~dB``W0EgQ@2*r(v>Jw zQq$5?3oan>9`U13xVq04FxItHiF77(+{#Peomh-6h{cVETzwIHK1Vy^j01GpLQuXv z)e(n_)K0~Gn}-v4xz*?Rj+g`%hJG>hf$!l!{lu+V+kU>%S)YATh}Lv0@2NQJjDcN~ zh-a=j<3jk)`H$e84tHv5j;~sM%pT|W^}879$q5-b9(rP3IdJ{TjqtSqTQA`;LHEWPk55zD+sz(x zdqR7QgQb|=Ph#2O5YtPNv`fUfrf+01EwW$P`dr8oe!A48Ef(cf$F}M5wsM%=SSJY>K+*TO6p?7vBvRrp)-S|7S+z1a z;^=+6qqcaH8V}7mHC(!`UpfWiohrm@y#3OhE1~Fdq3OkgF3*V4bxp&Bfj0UVBUEc1 zcl{X>HuiG3Wb&|F|hSS#+Ow|bd z^IWgM^Gf!AB?v`IaekE%%NDquaWEcKv(qDZYkqmRAr?L)!MLBFld#Aq92x9gON6|Q zyzNCt-2AI&`xkpxg=@)Aq>^g6hMmJGf63G%-<1aEsliDlOwa|StL^hKzNJv-jU49~ z%(*JI4zFIt{ffJ;w0k@2`(-a~D(o8OOfgzs*nR9AkyDUTMZfti5m}owqs{cSX$31EHUz4+|b9XYm>n_)q0gq1P)VF)N2^e7)}->wfZWh#q(+g*wLV znC&)>O!JA@+OM7*@2^drR7rvC9!9n|CmEEe;A<}hyHe_lYF7%Ee$Of&*8~aGWAk|A zHD4PY-(O{)sxXYGQZ?=wZ%v;V*=}ODSH9}dpOEn0B{Y}qJ~&WGiu`TelDX24n+l`$KR5y7l$xd4-m)kR*et=L0T>RKydo@pG1NLD) z&$A1pssoBx(S2Ir*f>dDY^{m>(^Eh8sj*S_X+!L1F*<83&QT3&*rOb_pnTg@tQ{r= zPE)EN2qE{bqjSa(n-2FM1Fc$sSmsbf5Y#{R2iQ7Q{eSfo5$W;?XI1H}2SbSvdurjJI|5hFf)U4>n_` z7&P_yE{w>AhD^3@#)a{p;kx+lyM!6)QwTw&x_XO2vt(~zJoH9ih#=QtqTLY*x3Wbn8ACiIPWCz z?G&^O09t4;jV<8>UNh;NHU+-ZYSOmmqL4A^ALu4fuMI4zOj1t469M2A1Mn!wh`C^h zzV|-fr!7#}i`>o%inQG|PUrj9Ka$wadLI*Yt!V&wR78{Xo^enV;Ie+>U@Dl`7 zUPdx0(AS0ow!#8Hb_{qwXIehZIYfVzkL_ti6czCN0-xGd@9fi{R_?Rrb$&qPUL-rY z>cek^eY-lUU)?&ww>;u}5I5=$P>XXc&sskLt-%Y~uK5&~mlB^f%{S7Mn>k0w)N?jP zFQTpjYJk1#6i;#BQzbrTXAQ}LQ^i2`@WH-^L;N3>!lVIrTuXZEi09W9z3*EtOI%p~ zvCrs5cy9novzTVHq9o{I9M8sak^vkJI*|hR=w2qBB)-d&#iwos0*U-8+o^_xR7OsN+a3NoAhO66tG)p9zIg9l^Ui zE<6xU(3g;4+pg2LP==~T!);l8xkdxo#I1=}3};UvXcrsPA>XL9_r}nGX9=4G1tqNp zu^Ep8f$C-#(v|b&+$tf6bLYlJ--9v{+}^dmQ&6t`+iEkdRbRHTbNCM`cFTZA2Sh5Q z1n&xYshM;Cnap|y+r&lCS(A~JUzjO!7jNRqU4UAdGsdi&1fH%NEZwEY`kVQ*Aa12t z=nrG(gD8E&#%~fcw@DLBmSeDEy9UZ25uFX@w~iKCBEi)g-{|ThuzfxH)7LQ=U7V5p zAVJ0j9EbV1rqeyK%>XCd)gU1#7*vkHEOH1PwEI?9Etb1stj>X-L}gP&;J;07(H?D0 zwJ@_d6rWp-m5pM^eG&gw;+EkNv-CjvjPGDLcSoxi0I z#L|hUwVr|Nr0@-RlK%)CAeql3@*E;?M)-Qu3`*wJ*-D)F-uy?vKq!XtPVC(0u7WzE zO(2{Bn8dMT09)1{(m;^pR}1W7lD-Dk=Y23<^~XQs^`849@xi{q8H1eylmsFPuBBlR zn-MHXY&Re`t@B^(H-W2fiN&ES5W|UYO_9Cyef0ZtASS0q^5Oq1b=rzsE z=;Ery2mb9%?7;%anmOZ6uzzKP7Y?5CgTZzN8=55E6zPAoyT%77d;SBjZ|s8`AI@fD zpC1wY3OfmGF_!-qntCn#t#O7vP<)`XCA?>n{dZn0rGLqdt^Xwd)56D&PVfoU4TMa; zXY7N!WWb>7I&44Rfvo^!z|FjcUk}bW6CmTttUi{W6cq7k(O@;$$m-v1)U}n)0N{Gh zH~eXm|L>g&Gg_7#16TwUa}eR01LQ#`s4A-+_RK-MF}*cW@=T-@w|&Zu;rzoxP#v6# zjQ5mvrKyOz`W|Ct7z_6m({us!50GlFvw!1sBxyZ>Gm#%>EDk}7#tdYq)5G#Db12Yu z?00U)0m%0An533jej0{KFN86&kqZ@{wubDBfMf8FG{xc$+twOOMdmkBAQ)7YNXMXg z=I;54O^_LC@Q1_Df&B%8dVe-LltCC6{~LCEX+Z!d2Y^!?o2H|0TX(+;YtUT!1L!b- zqA)jxJO9ttWlRkS6>-YXQGF@#s9b<~ur~GvBALL2DtU4fb6A0({Riz_-znmdI)6E` zGPZwf{fswsj#3HdGS%jOd&>%6#c3%B4qo}ocU*s28C)5jdP@B$5C`W9mN(eGI)YVp ziQu+}H{*PGe9!tt+iRXx!{Vlro)dcmCA0|+Bd%khi63B$0uJHQe^~OhvVzNQ_@3b$ z9ZvOc)BtvJ)f-T>ngI<_hAP$({D46`Wql%p3;I#jG*S=P#udiT=5GyNlBVCk<&wjJ z9SszZ7EzX5EG@C>pR^GOpS%v{zYpYD6Hstvz@6kbSO9}5o>qqJizB*7HU|Dq70Q36 zBkPj|*i;?+yDt#c%xORsAR`+;0gAOGxQxIQtubK$d=UT$Go`cTUyKnKl$+6v|3eX! zd-fGm{Q*y3m_=S?0esM1uRCBP z`j1ilJGFj?WiOTL%L6o1TQmzeZdw7230tg-JY@mb&HwX9jzeQb5y3Wg+i?R}XBou& zlT-<$J%805XE8-2$laNHctuk{!50^nc;+@tn~n*qsgMg#-H41(j7-m7!UO43XK zet;_MMk34tmJJ)rIo%hHexr?QekZ3OK=vT}9awFx{sVWWMLps<3%?YQ*cyiqQ$@V& zvnEFP8Hu}Z3w{fx;kbFWyB5D~^grtZ$X?b{v4xf8EI&Bu!MVfR)ub-U%$A>n#0 z>2)K;hc##P8Zu`{Xgn$ZSNkXV!7YlN{im}w79#4Zf2S5@`-rG}H1H|XRaxEG0dTkW zjk95g055D{C}I+9|1P4jUVY<|;NZBQE?96SNsY<_5%*)g6p5iqedQo*D6z8lR&lF1OFCfQGiC&@ zY%fe&tPKeUu_&|gSQ2BKX&nw3MRd`Y2X>rFS@@k)5lkQ8w(#J2fPzGMS#?(jw1seh zn=DYhz&x_Cet%G$jFtMzW=eLq`u*Q6{?WSXO62UeqX!B&uI^0Nf<+)hj>Q)uQUNCCVqzmFI8$@yDs055Zl*C%g#Ve~(4rc7amah% z`96^`X6`c_|3?id zF{u}!U(eZBZsE;+V>4&*F;NGJhCpz~8AtwS%@}5W0xiMNZViniUH8$#OwFP@$)4H{ zf9Up~HJxk#Wtwz7?6NbZ7IO8zQzYnhCy%5Qr00*6%^&2PvD|+)K-rSVKcg?4Kk(JC zAO1|0oFG4|sabLVjzna~(w}qWe^&Ql1=BL&(iSXM*U(tV6~mild>qol#pR}9CRUkA zC+I5`|Mxik?`CBJGDs&YUCg4+h~21L(CWx#$p@V>IWqwfa?gO2|b zyxk|yXHahppAVHV?M3KhFH6Y&{Qq@)`bF}-HN1?b9=t#AzX~dEt>J(62|+r~vAe^6 zm#^pltx^8_x94Q1qUqlcPCuQ$@_%3n=;GbK7KZ<<&K!odjs16#rosA#{=1-Vo}Qfk z`?1OY0(^#&|HbfM*7#qR_=Ac6yI5l8vZ5aVZ1}$H?MHt}b^7=J6^Sv!ghy#$;O>&r QgPqoWxreg3GR9B;KlFxS`~Uy| literal 0 HcmV?d00001 diff --git a/sync_variant.py b/sync_variant.py new file mode 100644 index 0000000..4afe1bd --- /dev/null +++ b/sync_variant.py @@ -0,0 +1,278 @@ +#!/usr/bin/env python3 +""" +sync_variant.py +=============== +Sync variant from KiCad schematic - read DNP flags and update variant data. + +This script reads the current state of DNP flags from the schematic and updates +the active variant to match. +""" + +import sys +from pathlib import Path +from variant_manager import VariantManager + + +def get_all_schematic_files(root_schematic: str) -> list: + """ + Get all schematic files in a hierarchical design. + + Args: + root_schematic: Path to root .kicad_sch file + + Returns: + List of all schematic file paths (including root) + """ + root_path = Path(root_schematic) + if not root_path.exists(): + return [root_schematic] + + schematic_files = [str(root_path)] + schematic_dir = root_path.parent + + # Read root schematic to find sheet files + try: + with open(root_path, 'r', encoding='utf-8') as f: + content = f.read() + + # Find all sheet file references + for line in content.split('\n'): + if '(property "Sheetfile"' in line: + parts = line.split('"') + if len(parts) >= 4: + sheet_file = parts[3] + sheet_path = schematic_dir / sheet_file + if sheet_path.exists(): + # Recursively get sheets from this sheet + sub_sheets = get_all_schematic_files(str(sheet_path)) + for sub in sub_sheets: + if sub not in schematic_files: + schematic_files.append(sub) + except Exception as e: + print(f"Warning: Error reading sheet files: {e}") + + return schematic_files + + +def sync_variant_from_schematic(schematic_file: str, target_variant: str = None) -> bool: + """ + Sync variant from schematic DNP flags and title block. + + Args: + schematic_file: Path to .kicad_sch file + target_variant: Specific variant to sync to (optional). If not provided, uses title block or active variant. + + Returns: + True if successful, False otherwise + """ + manager = VariantManager(schematic_file) + + # If target_variant specified, use that + if target_variant: + if target_variant not in manager.get_variants(): + print(f"Error: Variant '{target_variant}' not found") + return False + active_variant = target_variant + print(f"Syncing to specified variant: {active_variant}") + else: + # Read variant name from title block in root schematic + variant_from_title = None + sch_path = Path(schematic_file) + if sch_path.exists(): + try: + with open(sch_path, 'r', encoding='utf-8') as f: + content = f.read() + + lines = content.split('\n') + in_title_block = False + for line in lines: + stripped = line.strip() + if stripped.startswith('(title_block'): + in_title_block = True + elif in_title_block and stripped == ')': + break + elif in_title_block and '(comment 1' in stripped: + # Extract variant name from comment 1 + parts = line.split('"') + if len(parts) >= 2: + variant_from_title = parts[1] + print(f"Found variant in title block: {variant_from_title}") + break + except: + pass + + # Use variant from title block if found, otherwise use active variant + if variant_from_title and variant_from_title in manager.get_variants(): + active_variant = variant_from_title + manager.set_active_variant(variant_from_title) + print(f"Set active variant to: {active_variant}") + else: + active_variant = manager.get_active_variant() + print(f"Using active variant: {active_variant}") + + # Get all schematic files (root + hierarchical sheets) + all_schematics = get_all_schematic_files(schematic_file) + print(f"Processing {len(all_schematics)} schematic file(s)") + + all_dnp_uuids = [] + all_uuids = [] + + # Process each schematic file + for sch_file in all_schematics: + sch_path = Path(sch_file) + if not sch_path.exists(): + print(f"Warning: Schematic file not found: {sch_file}") + continue + + print(f"\n Processing: {sch_path.name}") + + try: + with open(sch_path, 'r', encoding='utf-8') as f: + content = f.read() + + # Parse schematic to find DNP components + lines = content.split('\n') + in_symbol = False + current_uuid = None + current_ref = None + current_lib_id = None + has_dnp = False + + # Track line depth to know when we're at symbol level + for i, line in enumerate(lines): + stripped = line.strip() + + # Detect start of symbol + if stripped.startswith('(symbol'): + in_symbol = True + current_uuid = None + current_ref = None + current_lib_id = None + has_dnp = False + symbol_uuid_found = False # Track if we found the main symbol UUID + + # Detect end of symbol + elif in_symbol and stripped == ')': + # Check if this symbol block is closing (simple heuristic) + # Skip power symbols + is_power = current_lib_id and 'power:' in current_lib_id + is_power = is_power or (current_ref and current_ref.startswith('#')) + + if current_uuid and has_dnp and not is_power: + if current_uuid not in all_dnp_uuids: + all_dnp_uuids.append(current_uuid) + print(f" Found DNP: {current_ref if current_ref else current_uuid}") + in_symbol = False + + # Extract lib_id to check for power symbols + elif in_symbol and '(lib_id' in stripped: + lib_parts = line.split('"') + if len(lib_parts) >= 2: + current_lib_id = lib_parts[1] + + # Check for DNP flag - can be (dnp), (dnp yes), or (dnp no) + # Do this before UUID extraction so we know if we need the UUID + elif in_symbol and '(dnp' in stripped and not has_dnp: + # Only set has_dnp if it's (dnp) or (dnp yes), not (dnp no) + if '(dnp yes)' in stripped or (stripped == '(dnp)'): + has_dnp = True + # Now look forward for the UUID (it comes right after DNP) + for j in range(i + 1, min(len(lines), i + 5)): + if '(uuid' in lines[j]: + # Check it's at symbol level + if '\t(uuid' in lines[j] or ' (uuid' in lines[j]: + uuid_parts = lines[j].split('"') + if len(uuid_parts) >= 2: + current_uuid = uuid_parts[1] + symbol_uuid_found = True + break + + # Extract reference designator (for logging) + elif in_symbol and '(property "Reference"' in line and not current_ref: + # Extract reference from line like: (property "Reference" "R1" + parts = line.split('"') + if len(parts) >= 4: + current_ref = parts[3] + + # Get all component UUIDs (excluding power symbols) + # Use same approach - look for UUID after DNP line + in_symbol = False + current_uuid = None + current_lib_id = None + current_ref = None + for i, line in enumerate(lines): + stripped = line.strip() + if stripped.startswith('(symbol'): + in_symbol = True + current_uuid = None + current_lib_id = None + current_ref = None + elif in_symbol and stripped == ')': + # Skip power symbols + is_power = current_lib_id and 'power:' in current_lib_id + is_power = is_power or (current_ref and current_ref.startswith('#')) + + if current_uuid and not is_power and current_uuid not in all_uuids: + all_uuids.append(current_uuid) + in_symbol = False + elif in_symbol and '(lib_id' in stripped: + lib_parts = line.split('"') + if len(lib_parts) >= 2: + current_lib_id = lib_parts[1] + elif in_symbol and '(property "Reference"' in stripped and not current_ref: + ref_parts = line.split('"') + if len(ref_parts) >= 4: + current_ref = ref_parts[3] + elif in_symbol and '(dnp' in stripped and not current_uuid: + # Found DNP line - look forward for UUID + for j in range(i + 1, min(len(lines), i + 5)): + if '(uuid' in lines[j]: + if '\t(uuid' in lines[j] or ' (uuid' in lines[j]: + uuid_parts = lines[j].split('"') + if len(uuid_parts) >= 2: + current_uuid = uuid_parts[1] + break + + except Exception as e: + print(f" Error processing {sch_path.name}: {e}") + import traceback + traceback.print_exc() + + # Update variant with DNP list + print(f"\nUpdating variant '{active_variant}'...") + print(f" Found {len(all_uuids)} total UUIDs") + print(f" Found {len(all_dnp_uuids)} DNP UUIDs") + + # Build the new DNP list directly instead of calling set_part_dnp multiple times + # This avoids multiple file saves + if active_variant in manager.variants["variants"]: + # Set the DNP list directly + manager.variants["variants"][active_variant]["dnp_parts"] = sorted(all_dnp_uuids) + + # Save once at the end + manager._save_variants() + + print(f" Updated DNP list with {len(all_dnp_uuids)} parts") + for uuid in all_dnp_uuids: + print(f" DNP UUID: {uuid}") + else: + print(f" Error: Variant '{active_variant}' not found in variants") + return False + + print(f"\nVariant '{active_variant}' updated:") + print(f" Total components: {len(all_uuids)}") + print(f" DNP components: {len(all_dnp_uuids)}") + print(f" Fitted components: {len(all_uuids) - len(all_dnp_uuids)}") + + return True + + +if __name__ == "__main__": + if len(sys.argv) < 2: + print("Usage: python sync_variant.py [variant_name]") + sys.exit(1) + + schematic = sys.argv[1] + variant = sys.argv[2] if len(sys.argv) > 2 else None + success = sync_variant_from_schematic(schematic, variant) + sys.exit(0 if success else 1) diff --git a/templates/index.html b/templates/index.html new file mode 100644 index 0000000..aeaf825 --- /dev/null +++ b/templates/index.html @@ -0,0 +1,846 @@ + + + + + + UM KiCad Manager + + + + + Banner + +
Disconnected
+ +

KiCad Manager

+ +
+ + +
+ +
+ +
+

Invocation Command

+
+ + {{ invocation_cmd }} +
+
+ +
+ {% for key, value in args.items() %} +
+
{{ key }}:
+
{{ value }}
+
+ {% endfor %} +
+ +
+

Actions

+ + + + +
+
+
+
+
+ +
+

System Initialization

+ +
+
+ +
+

Settings

+
+ + + +
+
+
+ +
+ + +
+
+

Project: Loading...

+ + +
+
+ +
+

Variants

+
+
+
+ + + + + + + + + + diff --git a/templates/variants.html b/templates/variants.html new file mode 100644 index 0000000..4624d05 --- /dev/null +++ b/templates/variants.html @@ -0,0 +1,446 @@ + + + + + + Variant Manager - UM KiCad + + + + +
Disconnected
+ + ← Back to Main + +

Variant Manager

+ +
+

Project: Loading...

+ + +
+
+ +
+

Variants

+
+
+ + + + + + + + + + diff --git a/variant_manager.py b/variant_manager.py new file mode 100644 index 0000000..16f1241 --- /dev/null +++ b/variant_manager.py @@ -0,0 +1,211 @@ +#!/usr/bin/env python3 +""" +variant_manager.py +================== +Manage KiCad design variants - track which parts are fitted/unfitted in different variants. + +Variants are stored in a JSON file alongside the project. +""" + +import json +import sys +from pathlib import Path +from typing import Dict, List, Set + + +class VariantManager: + """Manages design variants for a KiCad project""" + + def __init__(self, project_path: str): + """ + Initialize variant manager for a project. + + Args: + project_path: Path to the .kicad_pro or .kicad_sch file + """ + self.project_path = Path(project_path) + self.project_dir = self.project_path.parent + self.project_name = self.project_path.stem + + # Variants file stored alongside project + self.variants_file = self.project_dir / f"{self.project_name}.variants.json" + self.variants = self._load_variants() + + def _load_variants(self) -> Dict: + """Load variants from JSON file""" + if self.variants_file.exists(): + with open(self.variants_file, 'r', encoding='utf-8') as f: + return json.load(f) + else: + # Default structure + return { + "meta": { + "version": 2, # Version 2 uses UUIDs instead of references + "active_variant": "default" + }, + "variants": { + "default": { + "name": "default", + "description": "Default variant - all parts fitted", + "dnp_parts": [] # List of UUIDs that are DNP (Do Not Place) + } + } + } + + def _save_variants(self): + """Save variants to JSON file""" + with open(self.variants_file, 'w', encoding='utf-8') as f: + json.dump(self.variants, f, indent=2) + + def get_variants(self) -> Dict: + """Get all variants""" + return self.variants["variants"] + + def get_active_variant(self) -> str: + """Get the name of the active variant""" + return self.variants["meta"]["active_variant"] + + def create_variant(self, name: str, description: str = "", based_on: str = None) -> bool: + """ + Create a new variant. + + Args: + name: Variant name + description: Variant description + based_on: Name of variant to copy from (None = start empty) + + Returns: + True if created, False if already exists + """ + if name in self.variants["variants"]: + return False + + if based_on and based_on in self.variants["variants"]: + # Copy DNP list from base variant + dnp_parts = self.variants["variants"][based_on]["dnp_parts"].copy() + else: + dnp_parts = [] + + self.variants["variants"][name] = { + "name": name, + "description": description, + "dnp_parts": dnp_parts + } + + self._save_variants() + return True + + def delete_variant(self, name: str) -> bool: + """ + Delete a variant. + + Args: + name: Variant name + + Returns: + True if deleted, False if doesn't exist or is active + """ + if name not in self.variants["variants"]: + return False + + if name == self.variants["meta"]["active_variant"]: + return False # Can't delete active variant + + if name == "default": + return False # Can't delete default variant + + del self.variants["variants"][name] + self._save_variants() + return True + + def set_active_variant(self, name: str) -> bool: + """ + Set the active variant. + + Args: + name: Variant name + + Returns: + True if set, False if variant doesn't exist + """ + if name not in self.variants["variants"]: + return False + + self.variants["meta"]["active_variant"] = name + self._save_variants() + return True + + def set_part_dnp(self, variant_name: str, uuid: str, is_dnp: bool) -> bool: + """ + Set whether a part is DNP (Do Not Place) in a variant. + + Args: + variant_name: Variant name + uuid: Component UUID (e.g., "681abb84-6eb2-4c95-9a2f-a9fc19a34beb") + is_dnp: True to mark as DNP, False to mark as fitted + + Returns: + True if successful, False if variant doesn't exist + """ + if variant_name not in self.variants["variants"]: + return False + + dnp_list = self.variants["variants"][variant_name]["dnp_parts"] + + if is_dnp: + if uuid not in dnp_list: + dnp_list.append(uuid) + dnp_list.sort() # Keep sorted + else: + if uuid in dnp_list: + dnp_list.remove(uuid) + + self._save_variants() + return True + + def get_dnp_parts(self, variant_name: str) -> List[str]: + """ + Get list of DNP parts for a variant. + + Args: + variant_name: Variant name + + Returns: + List of UUIDs, or empty list if variant doesn't exist + """ + if variant_name not in self.variants["variants"]: + return [] + + return self.variants["variants"][variant_name]["dnp_parts"].copy() + + def is_part_dnp(self, variant_name: str, uuid: str) -> bool: + """ + Check if a part is DNP in a variant. + + Args: + variant_name: Variant name + uuid: Component UUID + + Returns: + True if DNP, False if fitted or variant doesn't exist + """ + if variant_name not in self.variants["variants"]: + return False + + return uuid in self.variants["variants"][variant_name]["dnp_parts"] + + +if __name__ == "__main__": + if len(sys.argv) < 2: + print("Usage: python variant_manager.py ") + sys.exit(1) + + manager = VariantManager(sys.argv[1]) + + # Print current variants + print(f"Project: {manager.project_name}") + print(f"Active variant: {manager.get_active_variant()}") + print("\nVariants:") + for name, variant in manager.get_variants().items(): + dnp_count = len(variant["dnp_parts"]) + print(f" {name}: {variant['description']} ({dnp_count} DNP parts)")