diff --git a/src/glayout/blocks/ATLAS/.gitignore b/src/glayout/blocks/ATLAS/.gitignore deleted file mode 100644 index 9c42a36e..00000000 --- a/src/glayout/blocks/ATLAS/.gitignore +++ /dev/null @@ -1,2 +0,0 @@ -params_*/ -_old/ \ No newline at end of file diff --git a/src/glayout/blocks/ATLAS/HowToRun.md b/src/glayout/blocks/ATLAS/HowToRun.md deleted file mode 100644 index 91d28ae0..00000000 --- a/src/glayout/blocks/ATLAS/HowToRun.md +++ /dev/null @@ -1,13 +0,0 @@ -# How to Run the Transmission Gate Dataset Generation - -Working in progress... - -AL: Sep 29 2025 - -Migrated from Arnav's fork of OpenFASOC with my own modifications... -- A lot of effort is needed to make it compatible with latest new gLayout repo -- Not tested yet - -```bash -./run_dataset_multiprocess.py params_txgate_100_params/txgate_parameters.json --n_cores 110 --output_dir tg_dataset_1000_lhs -``` \ No newline at end of file diff --git a/src/glayout/blocks/ATLAS/analyze_dataset.py b/src/glayout/blocks/ATLAS/analyze_dataset.py deleted file mode 100644 index 037cb3e1..00000000 --- a/src/glayout/blocks/ATLAS/analyze_dataset.py +++ /dev/null @@ -1,122 +0,0 @@ -#!/usr/bin/env python3 -""" -Generate comprehensive statistics for the LHS dataset -""" - -import json -from pathlib import Path - -def analyze_dataset(): - """Analyze the complete LHS dataset""" - results_file = Path("lhs_dataset_robust/lhs_results.json") - - with open(results_file, 'r') as f: - results = json.load(f) - - total_samples = len(results) - successful_samples = [r for r in results if r["success"]] - failed_samples = [r for r in results if not r["success"]] - - drc_passes = [r for r in successful_samples if r["drc_pass"]] - drc_failures = [r for r in successful_samples if not r["drc_pass"]] - - lvs_passes = [r for r in successful_samples if r["lvs_pass"]] - lvs_failures = [r for r in successful_samples if not r["lvs_pass"]] - - execution_times = [r["execution_time"] for r in successful_samples] - avg_time = sum(execution_times) / len(execution_times) if execution_times else 0 - min_time = min(execution_times) if execution_times else 0 - max_time = max(execution_times) if execution_times else 0 - - print("๐ŸŽ‰ LHS Dataset Analysis Report") - print("=" * 50) - print(f"๐Ÿ“Š Dataset Overview:") - print(f" Total samples: {total_samples}") - print(f" Successful completions: {len(successful_samples)} ({len(successful_samples)/total_samples*100:.1f}%)") - print(f" Pipeline failures: {len(failed_samples)} ({len(failed_samples)/total_samples*100:.1f}%)") - - print(f"\n๐Ÿ” Quality Analysis (among successful samples):") - print(f" DRC passes: {len(drc_passes)}/{len(successful_samples)} ({len(drc_passes)/len(successful_samples)*100:.1f}%)") - print(f" DRC failures: {len(drc_failures)}/{len(successful_samples)} ({len(drc_failures)/len(successful_samples)*100:.1f}%)") - print(f" LVS passes: {len(lvs_passes)}/{len(successful_samples)} ({len(lvs_passes)/len(successful_samples)*100:.1f}%)") - print(f" LVS failures: {len(lvs_failures)}/{len(successful_samples)} ({len(lvs_failures)/len(successful_samples)*100:.1f}%)") - - print(f"\nโฑ๏ธ Performance Analysis:") - print(f" Average execution time: {avg_time:.1f}s") - print(f" Fastest sample: {min_time:.1f}s") - print(f" Slowest sample: {max_time:.1f}s") - - # Identify any failed samples - if failed_samples: - print(f"\nโŒ Failed Samples:") - for sample in failed_samples: - print(f" Sample {sample['sample_id']:04d}: {sample.get('error', 'Unknown error')}") - - # Identify DRC failures - if drc_failures: - print(f"\n๐Ÿ” DRC Failure Details:") - for sample in drc_failures: - print(f" Sample {sample['sample_id']:04d}: {sample['component_name']}") - - # Identify LVS failures - if lvs_failures: - print(f"\n๐Ÿ” LVS Failure Details:") - for sample in lvs_failures: - print(f" Sample {sample['sample_id']:04d}: {sample['component_name']}") - - # Overall assessment - success_rate = len(successful_samples) / total_samples * 100 - drc_rate = len(drc_passes) / len(successful_samples) * 100 if successful_samples else 0 - lvs_rate = len(lvs_passes) / len(successful_samples) * 100 if successful_samples else 0 - - print(f"\n๐Ÿ† Overall Assessment:") - if success_rate == 100: - print(f" โœ… EXCELLENT: 100% pipeline completion rate") - elif success_rate >= 95: - print(f" โœ… VERY GOOD: {success_rate:.1f}% pipeline completion rate") - elif success_rate >= 90: - print(f" โš ๏ธ GOOD: {success_rate:.1f}% pipeline completion rate") - else: - print(f" โŒ NEEDS IMPROVEMENT: {success_rate:.1f}% pipeline completion rate") - - if drc_rate == 100: - print(f" โœ… PERFECT: 100% DRC pass rate") - elif drc_rate >= 95: - print(f" โœ… EXCELLENT: {drc_rate:.1f}% DRC pass rate") - elif drc_rate >= 90: - print(f" โœ… VERY GOOD: {drc_rate:.1f}% DRC pass rate") - else: - print(f" โš ๏ธ NEEDS REVIEW: {drc_rate:.1f}% DRC pass rate") - - if lvs_rate == 100: - print(f" โœ… PERFECT: 100% LVS pass rate") - elif lvs_rate >= 95: - print(f" โœ… EXCELLENT: {lvs_rate:.1f}% LVS pass rate") - elif lvs_rate >= 90: - print(f" โœ… VERY GOOD: {lvs_rate:.1f}% LVS pass rate") - else: - print(f" โš ๏ธ NEEDS REVIEW: {lvs_rate:.1f}% LVS pass rate") - - print(f"\n๐ŸŽฏ Dataset Status:") - if success_rate == 100 and drc_rate >= 95 and lvs_rate >= 95: - print(f" ๐ŸŽ‰ PRODUCTION READY: Dataset meets all quality thresholds") - print(f" ๐Ÿš€ Ready for machine learning training and analysis") - else: - print(f" โš ๏ธ REVIEW NEEDED: Some quality metrics below optimal") - - return { - "total_samples": total_samples, - "success_rate": success_rate, - "drc_rate": drc_rate, - "lvs_rate": lvs_rate, - "avg_time": avg_time - } - -if __name__ == "__main__": - stats = analyze_dataset() - - # Generate a brief summary - print(f"\n๐Ÿ“‹ Brief Summary:") - print(f" {stats['total_samples']} samples, {stats['success_rate']:.0f}% success") - print(f" DRC: {stats['drc_rate']:.0f}%, LVS: {stats['lvs_rate']:.0f}%") - print(f" Avg time: {stats['avg_time']:.1f}s per sample") diff --git a/src/glayout/blocks/ATLAS/assemble_dataset.py b/src/glayout/blocks/ATLAS/assemble_dataset.py deleted file mode 100644 index 8b21ff0e..00000000 --- a/src/glayout/blocks/ATLAS/assemble_dataset.py +++ /dev/null @@ -1,41 +0,0 @@ -import json -import pandas as pd - -# Paths -input_json = 'sweep_outputs/sweep_results.json' -output_jsonl = 'sweep_outputs/sweep_results.jsonl' -output_csv = 'sweep_outputs/sweep_results.csv' - -# 1. Load full JSON results -with open(input_json, 'r') as f: - data = json.load(f) - -# 2. Write JSONL (one record per line) -with open(output_jsonl, 'w') as f: - for rec in data: - f.write(json.dumps(rec) + "\n") - -# 3. Flatten and assemble tabular DataFrame -records = [] -for rec in data: - flat = { - 'pcell': rec.get('pcell'), - 'index': rec.get('index') - } - # Flatten params - for k, v in rec.get('params', {}).items(): - flat[f'param_{k}'] = v - # Flatten report - for k, v in rec.get('report', {}).items(): - flat[f'report_{k}'] = v - records.append(flat) - -df = pd.DataFrame(records) - -# 4. Save CSV -df.to_csv(output_csv, index=False) - -# 5. Display summary -print(f"Written {len(data)} records to:") -print(f" - JSONL: {output_jsonl}") -print(f" - CSV: {output_csv}") diff --git a/src/glayout/blocks/ATLAS/current_mirror.py b/src/glayout/blocks/ATLAS/current_mirror.py deleted file mode 100644 index 3d7bf11f..00000000 --- a/src/glayout/blocks/ATLAS/current_mirror.py +++ /dev/null @@ -1,223 +0,0 @@ -from glayout.flow.placement.two_transistor_interdigitized import two_nfet_interdigitized, two_pfet_interdigitized -from glayout.flow.pdk.mappedpdk import MappedPDK -from glayout.flow.routing.c_route import c_route -from glayout.flow.routing.L_route import L_route -from glayout.flow.routing.straight_route import straight_route -from glayout.flow.spice.netlist import Netlist -from glayout.flow.pdk.sky130_mapped import sky130_mapped_pdk as sky130 -from glayout.flow.primitives.fet import nmos, pmos -from glayout.flow.primitives.guardring import tapring -from glayout.flow.pdk.util.port_utils import add_ports_perimeter,rename_ports_by_orientation -from gdsfactory.component import Component -from gdsfactory.cell import cell -from glayout.flow.pdk.util.comp_utils import evaluate_bbox, prec_center, prec_ref_center, align_comp_to_port -from typing import Optional, Union -from glayout.flow.pdk.sky130_mapped import sky130_mapped_pdk -from glayout.flow.primitives.via_gen import via_stack -from gdsfactory.components import text_freetype, rectangle - -try: - from evaluator_wrapper import run_evaluation -except ImportError: - print("Warning: evaluator_wrapper not found. Evaluation will be skipped.") - run_evaluation = None - -def add_cm_labels(cm_in: Component, - pdk: MappedPDK - ) -> Component: - - cm_in.unlock() - met2_pin = (68,16) - met2_label = (68,5) - - # list that will contain all port/comp info - move_info = list() - # create labels and append to info list - # vss - vsslabel = rectangle(layer=pdk.get_glayer("met2_pin"),size=(0.27,0.27),centered=True).copy() - vsslabel.add_label(text="VSS",layer=pdk.get_glayer("met2_label")) - move_info.append((vsslabel,cm_in.ports["fet_A_source_E"],None)) - - # vref - vreflabel = rectangle(layer=pdk.get_glayer("met2_pin"),size=(0.27,0.27),centered=True).copy() - vreflabel.add_label(text="VREF",layer=pdk.get_glayer("met2_label")) - move_info.append((vreflabel,cm_in.ports["fet_A_drain_N"],None)) - - # vcopy - vcopylabel = rectangle(layer=pdk.get_glayer("met2_pin"),size=(0.27,0.27),centered=True).copy() - vcopylabel.add_label(text="VCOPY",layer=pdk.get_glayer("met2_label")) - move_info.append((vcopylabel,cm_in.ports["fet_B_drain_N"],None)) - - # VB - vblabel = rectangle(layer=pdk.get_glayer("met2_pin"),size=(0.5,0.5),centered=True).copy() - vblabel.add_label(text="VB",layer=pdk.get_glayer("met2_label")) - move_info.append((vblabel,cm_in.ports["welltie_S_top_met_S"], None)) - - # move everything to position - for comp, prt, alignment in move_info: - alignment = ('c','b') if alignment is None else alignment - compref = align_comp_to_port(comp, prt, alignment=alignment) - cm_in.add(compref) - return cm_in.flatten() - -def current_mirror_netlist( - pdk: MappedPDK, - width: float, - length: float, - multipliers: int, - with_dummy: bool = True, - n_or_p_fet: Optional[str] = 'nfet', - subckt_only: Optional[bool] = False -) -> Netlist: - if length is None: - length = pdk.get_grule('poly')['min_width'] - if width is None: - width = 3 - mtop = multipliers if subckt_only else 1 - model = pdk.models[n_or_p_fet] - - source_netlist = """.subckt {circuit_name} {nodes} """ + f'l={length} w={width} m={mtop} ' + """ -XA VREF VREF VSS VB {model} l={{l}} w={{w}} m={{m}} -XB VCOPY VREF VSS VB {model} l={{l}} w={{w}} m={{m}}""" - if with_dummy: - source_netlist += "\nXDUMMY VB VB VB VB {model} l={{l}} w={{w}} m={{2}}" - source_netlist += "\n.ends {circuit_name}" - - instance_format = "X{name} {nodes} {circuit_name} l={length} w={width} m={mult}" - - return Netlist( - circuit_name='CMIRROR', - nodes=['VREF', 'VCOPY', 'VSS', 'VB'], - source_netlist=source_netlist, - instance_format=instance_format, - parameters={ - 'model': model, - 'width': width, - 'length': length, - 'mult': multipliers - } - ) - - -#@cell -def current_mirror( - pdk: MappedPDK, - numcols: int = 3, - device: str = 'nfet', - with_dummy: Optional[bool] = True, - with_substrate_tap: Optional[bool] = False, - with_tie: Optional[bool] = True, - tie_layers: tuple[str,str]=("met2","met1"), - **kwargs -) -> Component: - """An instantiable current mirror that returns a Component object. The current mirror is a two transistor interdigitized structure with a shorted source and gate. It can be instantiated with either nmos or pmos devices. It can also be instantiated with a dummy device, a substrate tap, and a tie layer, and is centered at the origin. Transistor A acts as the reference and Transistor B acts as the mirror fet - - Args: - pdk (MappedPDK): the process design kit to use - numcols (int): number of columns of the interdigitized fets - device (str): nfet or pfet (can only interdigitize one at a time with this option) - with_dummy (bool): True places dummies on either side of the interdigitized fets - with_substrate_tap (bool): boolean to decide whether to place a substrate tapring - with_tie (bool): boolean to decide whether to place a tapring for tielayer - tie_layers (tuple[str,str], optional): the layers to use for the tie. Defaults to ("met2","met1"). - **kwargs: The keyword arguments are passed to the two_nfet_interdigitized or two_pfet_interdigitized functions and need to be valid arguments that can be accepted by the multiplier function - - Returns: - Component: a current mirror component object - """ - top_level = Component("current mirror") - if device in ['nmos', 'nfet']: - interdigitized_fets = two_nfet_interdigitized( - pdk, - numcols=numcols, - dummy=with_dummy, - with_substrate_tap=False, - with_tie=False, - **kwargs - ) - elif device in ['pmos', 'pfet']: - interdigitized_fets = two_pfet_interdigitized( - pdk, - numcols=numcols, - dummy=with_dummy, - with_substrate_tap=False, - with_tie=False, - **kwargs - ) - top_level.add_ports(interdigitized_fets.get_ports_list(), prefix="fet_") - maxmet_sep = pdk.util_max_metal_seperation() - # short source of the fets - source_short = interdigitized_fets << c_route(pdk, interdigitized_fets.ports['A_source_E'], interdigitized_fets.ports['B_source_E'], extension=3*maxmet_sep, viaoffset=False) - # short gates of the fets - gate_short = interdigitized_fets << c_route(pdk, interdigitized_fets.ports['A_gate_W'], interdigitized_fets.ports['B_gate_W'], extension=3*maxmet_sep, viaoffset=False) - # short gate and drain of one of the reference - interdigitized_fets << L_route(pdk, interdigitized_fets.ports['A_drain_W'], gate_short.ports['con_N'], viaoffset=False, fullbottom=False) - - top_level << interdigitized_fets - if with_tie: - if device in ['nmos','nfet']: - tap_layer = "p+s/d" - if device in ['pmos','pfet']: - tap_layer = "n+s/d" - tap_sep = max( - float(pdk.util_max_metal_seperation()), - float(pdk.get_grule("active_diff", "active_tap")["min_separation"]), - ) - tap_sep += float(pdk.get_grule(tap_layer, "active_tap")["min_enclosure"]) - tap_encloses = ( - 2 * (tap_sep + interdigitized_fets.xmax), - 2 * (tap_sep + interdigitized_fets.ymax), - ) - tie_ref = top_level << tapring(pdk, enclosed_rectangle = tap_encloses, sdlayer = tap_layer, horizontal_glayer = tie_layers[0], vertical_glayer = tie_layers[1]) - top_level.add_ports(tie_ref.get_ports_list(), prefix="welltie_") - try: - top_level << straight_route(pdk, top_level.ports[f"fet_B_{numcols - 1}_dummy_R_gsdcon_top_met_E"],top_level.ports["welltie_E_top_met_E"],glayer2="met1") - top_level << straight_route(pdk, top_level.ports["fet_A_0_dummy_L_gsdcon_top_met_W"],top_level.ports["welltie_W_top_met_W"],glayer2="met1") - except KeyError: - pass - try: - end_col = numcols - 1 - port1 = f'B_{end_col}_dummy_R_gdscon_top_met_E' - top_level << straight_route(pdk, top_level.ports[port1], top_level.ports["welltie_E_top_met_E"], glayer2="met1") - except KeyError: - pass - - # add a pwell - if device in ['nmos','nfet']: - top_level.add_padding(layers = (pdk.get_glayer("pwell"),), default = pdk.get_grule("pwell", "active_tap")["min_enclosure"], ) - top_level = add_ports_perimeter(top_level, layer = pdk.get_glayer("pwell"), prefix="well_") - if device in ['pmos','pfet']: - top_level.add_padding(layers = (pdk.get_glayer("nwell"),), default = pdk.get_grule("nwell", "active_tap")["min_enclosure"], ) - top_level = add_ports_perimeter(top_level, layer = pdk.get_glayer("nwell"), prefix="well_") - - - # add the substrate tap if specified - if with_substrate_tap: - subtap_sep = pdk.get_grule("dnwell", "active_tap")["min_separation"] - subtap_enclosure = ( - 2.5 * (subtap_sep + interdigitized_fets.xmax), - 2.5 * (subtap_sep + interdigitized_fets.ymax), - ) - subtap_ring = top_level << tapring(pdk, enclosed_rectangle = subtap_enclosure, sdlayer = "p+s/d", horizontal_glayer = "met2", vertical_glayer = "met1") - top_level.add_ports(subtap_ring.get_ports_list(), prefix="substrate_tap_") - - top_level.add_ports(source_short.get_ports_list(), prefix='purposegndports') - - - top_level.info['netlist'] = current_mirror_netlist( - pdk, - width=kwargs.get('width', 3), length=kwargs.get('length', 0.15), multipliers=numcols, with_dummy=with_dummy, - n_or_p_fet=device, - subckt_only=True - ) - - return top_level - -if __name__=="__main__": - current_mirror = add_cm_labels(current_mirror(sky130_mapped_pdk, device='pfet'),sky130_mapped_pdk) - current_mirror.show() - current_mirror.name = "CMIRROR" - #magic_drc_result = sky130_mapped_pdk.drc_magic(current_mirror, current_mirror.name) - #netgen_lvs_result = sky130_mapped_pdk.lvs_netgen(current_mirror, current_mirror.name) - current_mirror_gds = current_mirror.write_gds("current_mirror.gds") - res = run_evaluation("current_mirror.gds", current_mirror.name, current_mirror) \ No newline at end of file diff --git a/src/glayout/blocks/ATLAS/data_diagnostics.py b/src/glayout/blocks/ATLAS/data_diagnostics.py deleted file mode 100644 index 72414928..00000000 --- a/src/glayout/blocks/ATLAS/data_diagnostics.py +++ /dev/null @@ -1,59 +0,0 @@ -import numpy as np -import matplotlib.pyplot as plt -from scipy.spatial.distance import pdist, squareform -import pandas as pd - -# Import your generated samples and continuous specs -from elhs import all_samples, cont_specs - -# Threshold ratio for flagging (min_dist < threshold_ratio * avg_nn) -threshold_ratio = 0.5 - -diagnostics = [] - -for pcell, samples in all_samples.items(): - specs = cont_specs[pcell] - - # Build flat list of continuous dims spec: (name, min, max) per dimension - flat_specs = [] - for name, mn, mx, cnt in specs: - flat_specs.extend([(name, mn, mx)] * cnt) - - n_p = len(samples) - d_p = len(flat_specs) - - # Reconstruct normalized continuous matrix - cont_matrix = np.zeros((n_p, d_p)) - for i, sample in enumerate(samples): - for j, (name, mn, mx) in enumerate(flat_specs): - val = sample[name][j] - cont_matrix[i, j] = (val - mn) / (mx - mn) - - # Compute pairwise distances - dist_matrix = squareform(pdist(cont_matrix)) - np.fill_diagonal(dist_matrix, np.inf) - min_dist = np.min(dist_matrix) - nn_dist = np.min(dist_matrix, axis=1) - avg_nn = np.mean(nn_dist) - flagged = min_dist < threshold_ratio * avg_nn - - diagnostics.append({ - 'pcell': pcell, - 'min_distance': min_dist, - 'avg_nearest_neighbor': avg_nn, - 'flagged': flagged - }) - - # Plot histograms for each continuous dimension - for j, (name, mn, mx) in enumerate(flat_specs): - values = [sample[name][j] for sample in samples] - plt.figure() - plt.hist(values, bins=20) - plt.title(f"{pcell} โ€” {name}[{j}] histogram") - plt.xlabel(name) - plt.ylabel("Frequency") - plt.show() - -# Display diagnostics table -df_diag = pd.DataFrame(diagnostics) -df_diag diff --git a/src/glayout/blocks/ATLAS/dataset_curator.py b/src/glayout/blocks/ATLAS/dataset_curator.py deleted file mode 100644 index 8b21ff0e..00000000 --- a/src/glayout/blocks/ATLAS/dataset_curator.py +++ /dev/null @@ -1,41 +0,0 @@ -import json -import pandas as pd - -# Paths -input_json = 'sweep_outputs/sweep_results.json' -output_jsonl = 'sweep_outputs/sweep_results.jsonl' -output_csv = 'sweep_outputs/sweep_results.csv' - -# 1. Load full JSON results -with open(input_json, 'r') as f: - data = json.load(f) - -# 2. Write JSONL (one record per line) -with open(output_jsonl, 'w') as f: - for rec in data: - f.write(json.dumps(rec) + "\n") - -# 3. Flatten and assemble tabular DataFrame -records = [] -for rec in data: - flat = { - 'pcell': rec.get('pcell'), - 'index': rec.get('index') - } - # Flatten params - for k, v in rec.get('params', {}).items(): - flat[f'param_{k}'] = v - # Flatten report - for k, v in rec.get('report', {}).items(): - flat[f'report_{k}'] = v - records.append(flat) - -df = pd.DataFrame(records) - -# 4. Save CSV -df.to_csv(output_csv, index=False) - -# 5. Display summary -print(f"Written {len(data)} records to:") -print(f" - JSONL: {output_jsonl}") -print(f" - CSV: {output_csv}") diff --git a/src/glayout/blocks/ATLAS/debug_netlist.py b/src/glayout/blocks/ATLAS/debug_netlist.py deleted file mode 100644 index 05e322f7..00000000 --- a/src/glayout/blocks/ATLAS/debug_netlist.py +++ /dev/null @@ -1,72 +0,0 @@ -#!/usr/bin/env python3 -""" -Debug script to investigate the netlist reconstruction issue. -""" - -import sys -import os - -# Add the glayout path -glayout_path = "/home/arnavshukla/OpenFASOC/openfasoc/generators/glayout" -if glayout_path not in sys.path: - sys.path.insert(0, glayout_path) - -# Set up environment -os.environ['PDK_ROOT'] = '/opt/conda/envs/GLdev/share/pdk' -os.environ['PDK'] = 'sky130A' - -def debug_netlist_storage(): - """Debug what's actually being stored in component.info""" - print("๐Ÿ” Debugging Netlist Storage...") - - from glayout.flow.pdk.sky130_mapped import sky130_mapped_pdk - from transmission_gate import transmission_gate - - pdk = sky130_mapped_pdk - - print("๐Ÿ“‹ Creating transmission gate...") - tg = transmission_gate(pdk=pdk, width=(1.0, 2.0), length=(0.15, 0.15)) - - print("\n๐Ÿ“Š Component Info Contents:") - print("Keys:", list(tg.info.keys())) - - for key, value in tg.info.items(): - print(f"\n{key}: {type(value)}") - if isinstance(value, str): - print(f" Length: {len(value)}") - print(f" Preview: {value[:100]}...") - elif isinstance(value, dict): - print(f" Dict keys: {list(value.keys())}") - for k, v in value.items(): - print(f" {k}: {type(v)} - {str(v)[:50]}...") - - # Test reconstruction - print("\n๐Ÿ”ง Testing Reconstruction...") - if 'netlist_data' in tg.info: - from glayout.flow.spice.netlist import Netlist - data = tg.info['netlist_data'] - print(f"Netlist data: {data}") - - try: - netlist_obj = Netlist( - circuit_name=data['circuit_name'], - nodes=data['nodes'] - ) - netlist_obj.source_netlist = data['source_netlist'] - - print(f"Reconstructed netlist object: {netlist_obj}") - print(f"Circuit name: {netlist_obj.circuit_name}") - print(f"Nodes: {netlist_obj.nodes}") - print(f"Source netlist: {netlist_obj.source_netlist}") - - generated = netlist_obj.generate_netlist() - print(f"Generated netlist length: {len(generated)}") - print(f"Generated content:\n{generated}") - - except Exception as e: - print(f"Error reconstructing: {e}") - import traceback - traceback.print_exc() - -if __name__ == "__main__": - debug_netlist_storage() diff --git a/src/glayout/blocks/ATLAS/debug_sample_11.py b/src/glayout/blocks/ATLAS/debug_sample_11.py deleted file mode 100644 index 1dd3c00b..00000000 --- a/src/glayout/blocks/ATLAS/debug_sample_11.py +++ /dev/null @@ -1,81 +0,0 @@ -#!/usr/bin/env python3 -""" -Debug script for sample 11 that was hanging -""" - -import sys -import time -import json -from pathlib import Path - -# Add glayout to path -_here = Path(__file__).resolve() -_root_dir = _here.parent.parent.parent.parent.parent -sys.path.insert(0, str(_root_dir)) - -from glayout.flow.blocks.elementary.LHS.transmission_gate import transmission_gate, add_tg_labels -from glayout.flow.pdk.sky130_mapped import sky130_mapped_pdk - -def test_sample_11(): - """Test the specific parameters that are causing sample 11 to hang""" - - # Sample 11 parameters (index 10) - params = { - "width": [15.56987768790995, 19.431313875884364], - "length": [2.2925198967864566, 0.8947369421533957], - "fingers": [5, 5], - "multipliers": [2, 2] - } - - print("Testing sample 11 parameters:") - print(f"Parameters: {params}") - - # Convert to tuples - width_tuple = tuple(params['width']) - length_tuple = tuple(params['length']) - fingers_tuple = tuple(params['fingers']) - multipliers_tuple = tuple(params['multipliers']) - - print(f"Width tuple: {width_tuple}") - print(f"Length tuple: {length_tuple}") - print(f"Fingers tuple: {fingers_tuple}") - print(f"Multipliers tuple: {multipliers_tuple}") - - try: - print("Creating transmission gate...") - start_time = time.time() - - tg_component = transmission_gate( - pdk=sky130_mapped_pdk, - width=width_tuple, - length=length_tuple, - fingers=fingers_tuple, - multipliers=multipliers_tuple, - substrate_tap=True - ) - - creation_time = time.time() - start_time - print(f"โœ… Transmission gate created in {creation_time:.2f}s") - - print("Adding labels...") - start_time = time.time() - cell = add_tg_labels(tg_component, sky130_mapped_pdk) - cell.name = "test_sample_11" - label_time = time.time() - start_time - print(f"โœ… Labels added in {label_time:.2f}s") - - print("Writing GDS...") - start_time = time.time() - cell.write_gds("test_sample_11.gds") - gds_time = time.time() - start_time - print(f"โœ… GDS written in {gds_time:.2f}s") - - print("๐ŸŽ‰ Sample 11 test completed successfully!") - - except Exception as e: - print(f"โŒ Error: {e}") - import traceback - traceback.print_exc() - -if __name__ == "__main__": - test_sample_11() \ No newline at end of file diff --git a/src/glayout/blocks/ATLAS/diff_pair.py b/src/glayout/blocks/ATLAS/diff_pair.py deleted file mode 100644 index 116a58cd..00000000 --- a/src/glayout/blocks/ATLAS/diff_pair.py +++ /dev/null @@ -1,257 +0,0 @@ -from typing import Optional, Union - -from gdsfactory.cell import cell -from gdsfactory.component import Component, copy -from gdsfactory.components.rectangle import rectangle -from gdsfactory.routing.route_quad import route_quad -from gdsfactory.routing.route_sharp import route_sharp -from glayout.flow.pdk.mappedpdk import MappedPDK -from glayout.flow.pdk.util.comp_utils import align_comp_to_port, evaluate_bbox, movex, movey -from glayout.flow.pdk.util.port_utils import ( - add_ports_perimeter, - get_orientation, - print_ports, - rename_ports_by_list, - rename_ports_by_orientation, - set_port_orientation, -) -from glayout.flow.pdk.util.snap_to_grid import component_snap_to_grid -from glayout.flow.placement.common_centroid_ab_ba import common_centroid_ab_ba -from glayout.flow.primitives.fet import nmos, pmos -from glayout.flow.primitives.guardring import tapring -from glayout.flow.primitives.via_gen import via_stack -from glayout.flow.routing.c_route import c_route -from glayout.flow.routing.smart_route import smart_route -from glayout.flow.routing.straight_route import straight_route -from glayout.flow.spice import Netlist -from glayout.flow.pdk.sky130_mapped import sky130_mapped_pdk -from gdsfactory.components import text_freetype -try: - from evaluator_wrapper import run_evaluation -except ImportError: - print("Warning: evaluator_wrapper not found. Evaluation will be skipped.") - run_evaluation = None - - -def add_df_labels(df_in: Component, - pdk: MappedPDK - ) -> Component: - - df_in.unlock() - met1_pin = (67,16) - met1_label = (67,5) - met2_pin = (68,16) - met2_label = (68,5) - # list that will contain all port/comp info - move_info = list() - # create labels and append to info list - # vtail - vtaillabel = rectangle(layer=pdk.get_glayer("met2_pin"),size=(0.27,0.27),centered=True).copy() - vtaillabel.add_label(text="VTAIL",layer=pdk.get_glayer("met2_label")) - move_info.append((vtaillabel,df_in.ports["bl_multiplier_0_source_S"],None)) - - # vdd1 - vdd1label = rectangle(layer=pdk.get_glayer("met2_pin"),size=(0.27,0.27),centered=True).copy() - vdd1label.add_label(text="VDD1",layer=pdk.get_glayer("met2_label")) - move_info.append((vdd1label,df_in.ports["tl_multiplier_0_drain_N"],None)) - - # vdd2 - vdd2label = rectangle(layer=pdk.get_glayer("met2_pin"),size=(0.27,0.27),centered=True).copy() - vdd2label.add_label(text="VDD2",layer=pdk.get_glayer("met2_label")) - move_info.append((vdd2label,df_in.ports["tr_multiplier_0_drain_N"],None)) - - # VB - vblabel = rectangle(layer=pdk.get_glayer("met1_pin"),size=(0.5,0.5),centered=True).copy() - vblabel.add_label(text="B",layer=pdk.get_glayer("met1_label")) - move_info.append((vblabel,df_in.ports["tap_N_top_met_S"], None)) - - # VP - vplabel = rectangle(layer=pdk.get_glayer("met2_pin"),size=(0.27,0.27),centered=True).copy() - vplabel.add_label(text="VP",layer=pdk.get_glayer("met2_label")) - move_info.append((vplabel,df_in.ports["br_multiplier_0_gate_S"], None)) - - # VN - vnlabel = rectangle(layer=pdk.get_glayer("met2_pin"),size=(0.27,0.27),centered=True).copy() - vnlabel.add_label(text="VN",layer=pdk.get_glayer("met2_label")) - move_info.append((vnlabel,df_in.ports["bl_multiplier_0_gate_S"], None)) - - # move everything to position - for comp, prt, alignment in move_info: - alignment = ('c','b') if alignment is None else alignment - compref = align_comp_to_port(comp, prt, alignment=alignment) - df_in.add(compref) - return df_in.flatten() - -def diff_pair_netlist(fetL: Component, fetR: Component) -> Netlist: - diff_pair_netlist = Netlist(circuit_name='DIFF_PAIR', nodes=['VP', 'VN', 'VDD1', 'VDD2', 'VTAIL', 'B']) - diff_pair_netlist.connect_netlist( - fetL.info['netlist'], - [('D', 'VDD1'), ('G', 'VP'), ('S', 'VTAIL'), ('B', 'B')] - ) - diff_pair_netlist.connect_netlist( - fetR.info['netlist'], - [('D', 'VDD2'), ('G', 'VN'), ('S', 'VTAIL'), ('B', 'B')] - ) - return diff_pair_netlist - -@cell -def diff_pair( - pdk: MappedPDK, - width: float = 3, - fingers: int = 4, - length: Optional[float] = None, - n_or_p_fet: bool = True, - plus_minus_seperation: float = 0, - rmult: int = 1, - dummy: Union[bool, tuple[bool, bool]] = True, - substrate_tap: bool=True -) -> Component: - """create a diffpair with 2 transistors placed in two rows with common centroid place. Sources are shorted - width = width of the transistors - fingers = number of fingers in the transistors (must be 2 or more) - length = length of the transistors, None or 0 means use min length - short_source = if true connects source of both transistors - n_or_p_fet = if true the diffpair is made of nfets else it is made of pfets - substrate_tap: if true place a tapring around the diffpair (connects on met1) - """ - # TODO: error checking - pdk.activate() - diffpair = Component() - # create transistors - well = None - if isinstance(dummy, bool): - dummy = (dummy, dummy) - if n_or_p_fet: - fetL = nmos(pdk, width=width, fingers=fingers,length=length,multipliers=1,with_tie=False,with_dummy=(dummy[0], False),with_dnwell=False,with_substrate_tap=False,rmult=rmult) - fetR = nmos(pdk, width=width, fingers=fingers,length=length,multipliers=1,with_tie=False,with_dummy=(False,dummy[1]),with_dnwell=False,with_substrate_tap=False,rmult=rmult) - min_spacing_x = pdk.get_grule("n+s/d")["min_separation"] - 2*(fetL.xmax - fetL.ports["multiplier_0_plusdoped_E"].center[0]) - well = "pwell" - else: - fetL = pmos(pdk, width=width, fingers=fingers,length=length,multipliers=1,with_tie=False,with_dummy=(dummy[0], False),dnwell=False,with_substrate_tap=False,rmult=rmult) - fetR = pmos(pdk, width=width, fingers=fingers,length=length,multipliers=1,with_tie=False,with_dummy=(False,dummy[1]),dnwell=False,with_substrate_tap=False,rmult=rmult) - min_spacing_x = pdk.get_grule("p+s/d")["min_separation"] - 2*(fetL.xmax - fetL.ports["multiplier_0_plusdoped_E"].center[0]) - well = "nwell" - # place transistors - viam2m3 = via_stack(pdk,"met2","met3",centered=True) - metal_min_dim = max(pdk.get_grule("met2")["min_width"],pdk.get_grule("met3")["min_width"]) - metal_space = max(pdk.get_grule("met2")["min_separation"],pdk.get_grule("met3")["min_separation"],metal_min_dim) - gate_route_os = evaluate_bbox(viam2m3)[0] - fetL.ports["multiplier_0_gate_W"].width + metal_space - min_spacing_y = metal_space + 2*gate_route_os - min_spacing_y = min_spacing_y - 2*abs(fetL.ports["well_S"].center[1] - fetL.ports["multiplier_0_gate_S"].center[1]) - # TODO: fix spacing where you see +-0.5 - a_topl = (diffpair << fetL).movey(fetL.ymax+min_spacing_y/2+0.5).movex(0-fetL.xmax-min_spacing_x/2) - b_topr = (diffpair << fetR).movey(fetR.ymax+min_spacing_y/2+0.5).movex(fetL.xmax+min_spacing_x/2) - a_botr = (diffpair << fetR) - a_botr = a_botr.mirror_y() - a_botr.movey(0-0.5-fetL.ymax-min_spacing_y/2).movex(fetL.xmax+min_spacing_x/2) - b_botl = (diffpair << fetL) - b_botl = b_botl.mirror_y() - b_botl.movey(0-0.5-fetR.ymax-min_spacing_y/2).movex(0-fetL.xmax-min_spacing_x/2) - # if substrate tap place substrate tap - if substrate_tap: - tapref = diffpair << tapring(pdk,evaluate_bbox(diffpair,padding=1),horizontal_glayer="met1") - diffpair.add_ports(tapref.get_ports_list(),prefix="tap_") - try: - diffpair< Component: - diffpair = common_centroid_ab_ba(pdk,width,fingers,length,n_or_p_fet,rmult,dummy,substrate_tap) - diffpair << smart_route(pdk,diffpair.ports["A_source_E"],diffpair.ports["B_source_E"],diffpair, diffpair) - return diffpair - -if __name__=="__main__": - diff_pair = add_df_labels(diff_pair(sky130_mapped_pdk),sky130_mapped_pdk) - #diff_pair = diff_pair(sky130_mapped_pdk) - diff_pair.show() - diff_pair.name = "DIFF_PAIR" - #magic_drc_result = sky130_mapped_pdk.drc_magic(diff_pair, diff_pair.name) - #netgen_lvs_result = sky130_mapped_pdk.lvs_netgen(diff_pair, diff_pair.name) - diff_pair_gds = diff_pair.write_gds("diff_pair.gds") - res = run_evaluation("diff_pair.gds", diff_pair.name, diff_pair) \ No newline at end of file diff --git a/src/glayout/blocks/ATLAS/docs/DATASET_GENERATION_README.md b/src/glayout/blocks/ATLAS/docs/DATASET_GENERATION_README.md deleted file mode 100644 index d3b81479..00000000 --- a/src/glayout/blocks/ATLAS/docs/DATASET_GENERATION_README.md +++ /dev/null @@ -1,315 +0,0 @@ -# Dataset Generation Setup Guide - -This guide provides step-by-step instructions for setting up the environment and generating datasets for analog circuit components using the Glayout framework. - -## Table of Contents -- [Prerequisites](#prerequisites) -- [Environment Setup](#environment-setup) -- [Installation Steps](#installation-steps) -- [Dataset Generation](#dataset-generation) -- [Available Generators](#available-generators) -- [Troubleshooting](#troubleshooting) - -## Prerequisites - -Before starting, ensure you have: -- Python 3.10 or later -- Conda package manager -- Git -- Access to PDK files (Process Design Kit) - -## Environment Setup - -### 1. Create and Activate Conda Environment - -Create a new conda environment named `GLdev`: - -```bash -# Create conda environment -conda create -n GLdev python=3.10 - -# Activate the environment -conda activate GLdev -``` - -### 2. Install Glayout Package - -Navigate to the glayout directory and install in development mode: - -```bash -# Navigate to the glayout directory -cd /path/to/OpenFASOC/openfasoc/generators/glayout - -# Install glayout in development mode -pip install -e . -``` - -### 3. Install Core Dependencies - -Install the core requirements: - -```bash -# Install core dependencies -pip install -r requirements.txt -``` - -The core dependencies include: -- `gdsfactory>=7.16.0,<7.17` -- `numpy!=1.24.0,>=1.20` -- `prettyprint` -- `prettyprinttree` -- `gdstk` - -### 4. Install ML Dependencies (Optional) - -For machine learning features, install additional requirements: - -```bash -# Install ML dependencies -pip install -r requirements.ml.txt -``` - -The ML dependencies include: -- `torch` -- `transformers` -- `langchain` -- `chromadb` -- `sentence-transformers` -- And other ML-related packages - -### 5. Setup PDK Environment - -Set up the Process Design Kit environment variable: - -```bash -# Set PDK_ROOT environment variable -export PDK_ROOT=/opt/conda/envs/GLdev/share/pdk -``` - -**Note**: Add this line to your `~/.bashrc` or `~/.zshrc` to make it persistent: - -```bash -echo "export PDK_ROOT=/opt/conda/envs/GLdev/share/pdk" >> ~/.bashrc -source ~/.bashrc -``` - -## Installation Steps - -### Complete Setup Script - -You can run all the setup commands in sequence: - -```bash -# 1. Create and activate conda environment -conda create -n GLdev python=3.10 -conda activate GLdev - -# 2. Navigate to glayout directory -cd /path/to/OpenFASOC/openfasoc/generators/glayout - -# 3. Install glayout in development mode -pip install -e . - -# 4. Install dependencies -pip install -r requirements.txt -pip install -r requirements.ml.txt # Optional for ML features - -# 5. Set PDK environment -export PDK_ROOT=/opt/conda/envs/GLdev/share/pdk - -# 6. Navigate to LHS directory -cd glayout/flow/blocks/elementary/LHS - -# 7. Setup execution permissions -chmod +x run_pex.sh -chmod +x getStarted.sh -``` - -## Dataset Generation - -### 1. Navigate to LHS Directory - -```bash -cd /path/to/OpenFASOC/openfasoc/generators/glayout/glayout/flow/blocks/elementary/LHS -``` - -### 2. Run Initial Setup - -Execute the startup script: - -```bash -# Activate conda environment -conda activate GLdev - -# Set PDK_ROOT -export PDK_ROOT=/opt/conda/envs/GLdev/share/pdk - -# Make scripts executable -chmod +x run_pex.sh -``` - -### 3. Generate Datasets - -The LHS directory contains pre-generated parameters in the `gen_params_8h_runtime_aware` folder for different circuit components: - -- `current_mirror_params.json` -- `diff_pair_params.json` -- `fvf_params.json` -- `lvcm_params.json` -- `opamp_params.json` -- `txgate_params.json` - -#### Generate Transmission Gate Dataset - -```bash -python generate_tg_1000_dataset.py -# or -python generate_tg_200_dataset.py -# or -python generate_tg_100_dataset.py -``` - -#### Generate FVF (Flipped Voltage Follower) Dataset - -```bash -python generate_fvf_8h_runtime_aware.py -# or -python generate_fvf_360_robust.py -``` - -#### Generate Op-Amp Dataset - -```bash -python generate_opamp_dataset.py -# or -python generate_opamp_5_samples.py -``` - -#### Generate Differential Pair Dataset - -```bash -python generate_diff_pair_dataset.py -``` - -#### Generate Current Mirror Dataset - -```bash -python generate_current_mirror_3164_dataset.py -``` - -## Available Generators - -The following generator scripts are available in the LHS directory: - -| Generator Script | Circuit Type | Parameter File | Output Dataset | -|------------------|--------------|----------------|----------------| -| `generate_tg_1000_dataset.py` | Transmission Gate | `txgate_params.json` | `tg_dataset_1000_lhs/` | -| `generate_fvf_8h_runtime_aware.py` | Flipped Voltage Follower | `fvf_params.json` | `fvf_dataset_8h_runtime_aware/` | -| `generate_opamp_dataset.py` | Operational Amplifier | `opamp_params.json` | `opamp_dataset_250/` | -| `generate_diff_pair_dataset.py` | Differential Pair | `diff_pair_params.json` | `diff_pair_dataset_1800_lhs/` | -| `generate_current_mirror_3164_dataset.py` | Current Mirror | `current_mirror_params.json` | `cm_dataset_3164_lhs/` | - -## Usage Example - -Here's a complete workflow example: - -```bash -# 1. Activate environment -conda activate GLdev - -# 2. Set environment variables -export PDK_ROOT=/opt/conda/envs/GLdev/share/pdk - -# 3. Navigate to LHS directory -cd /path/to/OpenFASOC/openfasoc/generators/glayout/glayout/flow/blocks/elementary/LHS - -# 4. Make scripts executable -chmod +x run_pex.sh - -# 5. Generate transmission gate dataset with 1000 samples -python generate_tg_1000_dataset.py - -# 6. Generate FVF dataset -python generate_fvf_8h_runtime_aware.py - -# 7. Generate op-amp dataset -python generate_opamp_dataset.py -``` - -## Output Structure - -Generated datasets are stored in their respective directories: - -``` -LHS/ -โ”œโ”€โ”€ tg_dataset_1000_lhs/ # Transmission gate samples -โ”œโ”€โ”€ fvf_dataset_8h_runtime_aware/ # FVF samples -โ”œโ”€โ”€ opamp_dataset_250/ # Op-amp samples -โ”œโ”€โ”€ diff_pair_dataset_1800_lhs/ # Differential pair samples -โ””โ”€โ”€ cm_dataset_3164_lhs/ # Current mirror samples -``` - -Each dataset directory contains: -- Individual JSON parameter files -- Generated layout files (GDS format) -- Simulation results -- Performance metrics - -## Troubleshooting - -### Common Issues - -1. **Import Errors** - ```bash - # Make sure you're in the GLdev environment - conda activate GLdev - - # Reinstall dependencies - pip install -r requirements.txt - ``` - -2. **PDK Path Issues** - ```bash - # Verify PDK_ROOT is set correctly - echo $PDK_ROOT - - # Reset if needed - export PDK_ROOT=/opt/conda/envs/GLdev/share/pdk - ``` - -3. **Permission Errors** - ```bash - # Make scripts executable - chmod +x run_pex.sh - chmod +x getStarted.sh - ``` - -4. **Memory Issues** - - For large datasets, consider running smaller batches - - Monitor system memory usage during generation - -### Verification - -To verify your setup is working: - -```bash -# Test with a small sample -python generate_tg_5_samples.py - -# Check if output directory is created -ls -la tg_dataset_* -``` - -## Notes - -- Dataset generation can be time-intensive depending on the number of samples -- Ensure sufficient disk space for large datasets -- The generation process includes layout synthesis and performance extraction -- Parameters are pre-optimized using Latin Hypercube Sampling (LHS) for design space exploration - -## Support - -For issues or questions: -- Check the main OpenFASOC documentation -- Review the glayout README.md for API details -- Ensure all dependencies are correctly installed diff --git a/src/glayout/blocks/ATLAS/docs/FIX_SUMMARY.md b/src/glayout/blocks/ATLAS/docs/FIX_SUMMARY.md deleted file mode 100644 index a609525a..00000000 --- a/src/glayout/blocks/ATLAS/docs/FIX_SUMMARY.md +++ /dev/null @@ -1,163 +0,0 @@ -# Fix for Gymnasium Info Dict Error and gdsfactory 7.16.0+ Compatibility - -## Problem Description - -The error "Values of the info dict only support int, float, string or tuple" was occurring when running `generate_tg_1000_dataset.py` because: - -1. **Root Cause**: Component objects were storing `Netlist` objects directly in their `info` dictionary -2. **Library Conflict**: The `gymnasium` library (used in ML optimization pipelines) only accepts basic data types in info dictionaries -3. **Version Issue**: gdsfactory 7.16.0+ has strict Pydantic validation that prevents storing custom objects in `component.info` -4. **Error Location**: The error occurred when `Netlist` objects were encountered in `component.info['netlist']` - -## Additional Issue Fixed - -**PrettyPrint Import Error**: Fixed incorrect import `from PrettyPrint import PrettyPrintTree` to use the correct package name with fallback handling. - -## Files Modified - -The following files were updated to fix the issues: - -### Core Primitive Files -1. **`glayout/flow/primitives/fet.py`** - - Fixed NMOS and PMOS functions (lines ~484 and ~622) - - Changed from storing `Netlist` object directly to storing as string + data - -2. **`glayout/flow/primitives/mimcap.py`** - - Fixed mimcap and mimcap_array functions (lines ~85 and ~132) - - Updated to handle both single capacitors and capacitor arrays - -3. **`glayout/flow/pdk/util/port_utils.py`** - - Fixed PrettyPrint import with fallback handling - - Added error handling for missing prettyprinttree package - -### Elementary Block Files -4. **`glayout/flow/blocks/elementary/LHS/transmission_gate.py`** - - Fixed transmission_gate function (line ~137) - - Updated tg_netlist function with helper function for version compatibility - - Added `get_component_netlist()` helper function - -5. **`glayout/flow/blocks/elementary/transmission_gate/transmission_gate.py`** - - Fixed transmission_gate function (line ~131) - - Updated tg_netlist function for consistency - - Added `get_component_netlist()` helper function - -6. **`glayout/flow/blocks/elementary/LHS/fvf.py`** - - Fixed flipped_voltage_follower function (line ~162) - - Updated fvf_netlist function with helper function - - Added `get_component_netlist()` helper function - -### Composite Block Files -7. **`glayout/flow/blocks/composite/fvf_based_ota/low_voltage_cmirror.py`** - - Fixed netlist storage (line ~143) - -8. **`glayout/flow/blocks/composite/fvf_based_ota/p_block.py`** - - Fixed netlist storage (line ~92) - -9. **`glayout/flow/blocks/composite/fvf_based_ota/n_block.py`** - - Fixed netlist storage (line ~146) - -## Solution Implementation - -### Before (Problematic Code) -```python -component.info['netlist'] = some_netlist_function(...) -``` - -### After (Fixed Code - Compatible with gdsfactory 7.16.0+) -```python -# Store netlist as string to avoid gymnasium info dict type restrictions -# Compatible with both gdsfactory 7.7.0 and 7.16.0+ strict Pydantic validation -netlist_obj = some_netlist_function(...) -component.info['netlist'] = str(netlist_obj) -# Store serialized netlist data for reconstruction if needed -component.info['netlist_data'] = { - 'circuit_name': netlist_obj.circuit_name, - 'nodes': netlist_obj.nodes, - 'source_netlist': netlist_obj.source_netlist -} -``` - -### Helper Function for Netlist Reconstruction -```python -def get_component_netlist(component): - """Helper function to get netlist object from component info, compatible with all gdsfactory versions""" - from glayout.flow.spice.netlist import Netlist - - # Try to get stored object first (for older gdsfactory versions) - if 'netlist_obj' in component.info: - return component.info['netlist_obj'] - - # Try to reconstruct from netlist_data (for newer gdsfactory versions) - if 'netlist_data' in component.info: - data = component.info['netlist_data'] - netlist = Netlist( - circuit_name=data['circuit_name'], - nodes=data['nodes'] - ) - netlist.source_netlist = data['source_netlist'] - return netlist - - # Fallback: return the string representation - return component.info.get('netlist', '') -``` - -### PrettyPrint Import Fix -```python -# Before (Problematic) -from PrettyPrint import PrettyPrintTree - -# After (Fixed with fallback) -try: - from prettyprinttree import PrettyPrintTree -except ImportError: - try: - from PrettyPrint import PrettyPrintTree - except ImportError: - PrettyPrintTree = None -``` - -## Benefits - -1. **gdsfactory 7.16.0+ Compatibility**: Works with strict Pydantic validation -2. **Backward Compatibility**: Still works with older gdsfactory versions (7.7.0) -3. **Gymnasium Compatibility**: Resolves gymnasium library compatibility issues -4. **JSON Serializable**: Component info dictionaries can be serialized to JSON -5. **No Functional Loss**: All netlist functionality is preserved -6. **Import Robustness**: PrettyPrint imports work regardless of package naming - -## Version Compatibility - -| gdsfactory Version | Storage Method | Reconstruction Method | -|-------------------|---------------|--------------------| -| 7.7.0 - 7.15.x | `netlist_obj` (if available) | Direct object access | -| 7.16.0+ | `netlist_data` dict | Reconstruct from serialized data | - -## Testing - -Updated comprehensive test scripts: -- `test_netlist_fix.py` - Basic validation -- `test_comprehensive_fix.py` - Tests multiple component types with version compatibility - -All tests pass for both storage methods, confirming that: -- Netlist objects are stored as strings in `component.info['netlist']` -- Netlist data is preserved in `component.info['netlist_data']` for reconstruction -- Info dictionaries are JSON-serializable -- No functionality is lost -- Works with both gdsfactory 7.7.0 and 7.16.0+ - -## For Your Friend (gdsfactory 7.16.0) - -Your friend should now be able to run `generate_tg_1000_dataset.py` without encountering: -1. โœ… The gymnasium info dict error (fixed by string storage) -2. โœ… The PrettyPrint import error (fixed with fallback imports) -3. โœ… gdsfactory 7.16.0+ Pydantic validation errors (fixed with `netlist_data` approach) - -## Verification - -To verify the fix works with gdsfactory 7.16.0, your friend can run: -```bash -cd /path/to/LHS/directory -python test_comprehensive_fix.py -``` - -This will confirm that all components store netlists properly and are compatible with both gymnasium and gdsfactory 7.16.0+ requirements. diff --git a/src/glayout/blocks/ATLAS/docs/README_CHANGES.md b/src/glayout/blocks/ATLAS/docs/README_CHANGES.md deleted file mode 100644 index 280ee44d..00000000 --- a/src/glayout/blocks/ATLAS/docs/README_CHANGES.md +++ /dev/null @@ -1,285 +0,0 @@ -# LHS Dataset Extension Summary - -This document summarizes the modifications made to include **lvcm** (Low Voltage Current Mirror) and prepare for **opamp** circuits in the LHS dataset generation pipeline. Note: opamp is temporarily disabled due to upstream implementation issues. - -## File Structure and Roles - -### Core Parameter Generation -- **`elhs.py`** - Enhanced Latin Hypercube Sampling implementation with parameter specifications for all circuit types -- **`elementary_inventory.py`** - Circuit inventory and parameter definitions - -### Circuit Implementations -- **`fvf.py`** - Flipped Voltage Follower circuit with labeling -- **`transmission_gate.py`** - Transmission gate (txgate) circuit implementation -- **`current_mirror.py`** - Current mirror circuit generator -- **`diff_pair.py`** - Differential pair circuit implementation -- **`lvcm.py`** - Low Voltage Current Mirror circuit -- **`opamp.py`** - Operational amplifier (currently disabled due to upstream bugs) - -### Dataset Generation Engines -- **`sweeper.py`** - Parallel processing sweeper for large-scale dataset generation -- **`sequential_sweeper.py`** - Sequential processing sweeper to avoid file conflicts -- **`enhanced_sweeper.py`** - Enhanced version with better error handling and progress tracking - -### Evaluation Framework -- **`evaluator_wrapper.py`** - Main evaluation coordinator that runs DRC, LVS, PEX, and geometric analysis -- **`evaluator_box/`** - Comprehensive evaluation modules: - - **`verification.py`** - DRC and LVS verification using Magic VLSI and Netgen - - **`physical_features.py`** - PEX extraction, area calculation, and symmetry analysis - - **`evaluator_wrapper.py`** - Backup evaluator wrapper - -### Dataset Processing and Analysis -- **`assemble_dataset.py`** - Converts raw JSON results to structured JSONL and CSV formats -- **`dataset_curator.py`** - Quality control and data validation for generated datasets -- **`data_diagnostics.py`** - Comprehensive analysis of parameter space coverage and dataset quality - -### Testing and Validation -- **`simple_test.py`** - Basic functionality tests for individual circuits -- **`run_fvf.py`** - Standalone FVF circuit testing -- **`test_output/`** - Directory containing test results and validation data - -### Infrastructure and Configuration -- **`sky130A.magicrc`** - Magic VLSI configuration file for SKY130 PDK -- **`run_pex.sh`** - Shell script for parasitic extraction using Magic VLSI -- **`evaluator_box/run_pex.sh`** - Backup PEX script -- **`run_full_pipeline.sh`** - Complete pipeline execution script - -### Output Directories -- **`sweep_outputs/`** - Results from parallel sweep operations -- **`sequential_outputs/`** - Results from sequential processing (created during execution) -- **`__pycache__/`** - Python bytecode cache - -## Files Modified - -### 1. `elhs.py` - Core Parameter Generation -**Changes:** -- Added `lvcm` to the PCells list (opamp temporarily disabled) -- Extended `cont_specs` with lvcm continuous parameters: - - **lvcm**: 2 parameter groups (width tuple, length scalar) = 3 total continuous dims -- Extended `int_specs` with integer parameters: - - **lvcm**: 2 integer parameters (fingers tuple, multipliers tuple) -- Enhanced `generate_mixed_samples()` to handle different parameter structures: - - **fvf, txgate**: Parameters as tuples (width, length, fingers, multipliers) - - **current_mirror, diff_pair**: Parameters as scalars (width, length) - - **lvcm**: Mixed parameters (width tuple, length scalar, fingers/multipliers tuples) - - **diff_pair**: Special handling for n_or_p_fet boolean parameter - -### 2. `sweeper.py` - Parallel Execution Engine -**Changes:** -- Uncommented all functional code -- Added imports for lvcm circuit: - ```python - from lvcm import add_lvcm_labels, low_voltage_cmirror - ``` -- Extended `PCELL_FUNCS` dictionary with lvcm factory function: - ```python - 'lvcm': lambda pdk, **kwargs: add_lvcm_labels(low_voltage_cmirror(pdk, **kwargs), pdk), - ``` - -### 3. `opamp.py` - Opamp Circuit with Labels (Prepared but disabled) -**Changes:** -- Fixed import path for opamp function -- Corrected main function to use proper PDK reference -- Added `add_output_stage=False` parameter to work around upstream bug - -### 4. Parameter Compatibility Fixes -**Major corrections made:** -- **fvf, txgate**: Changed fingers and multipliers to tuples as expected by circuits -- **current_mirror, diff_pair**: Changed width/length to scalars instead of tuples -- **diff_pair**: Fixed n_or_p_fet parameter to be boolean (True=nfet, False=pfet) -- **lvcm**: Maintained tuple structure for width, fingers, multipliers; scalar for length -- Removed incompatible categorical parameters (type, placement, short_source) that circuits don't accept - -## Current Working Circuits (5/6) - -### 1. **FVF (Flipped Voltage Follower)** - 60 samples -- Parameters: `width: tuple(2)`, `length: tuple(2)`, `fingers: tuple(2)`, `multipliers: tuple(2)` - -### 2. **TXGATE (Transmission Gate)** - 60 samples -- Parameters: `width: tuple(2)`, `length: tuple(2)`, `fingers: tuple(2)`, `multipliers: tuple(2)` - -### 3. **Current Mirror** - 30 samples -- Parameters: `width: float`, `length: float`, `numcols: int` - -### 4. **Differential Pair** - 30 samples -- Parameters: `width: float`, `length: float`, `fingers: int`, `n_or_p_fet: bool` - -### 5. **LVCM (Low Voltage Current Mirror)** - 45 samples -- Parameters: `width: tuple(2)`, `length: float`, `fingers: tuple(2)`, `multipliers: tuple(2)` - -### 6. **Opamp** - Temporarily disabled -- Issue: Upstream bug in `__add_output_stage` function causes KeyError: 'top_met_E' -- Status: Parameter structure prepared, can be re-enabled when upstream fix is available - -## Sample Counts -Current budget allocation produces: -- **fvf**: 60 samples -- **txgate**: 60 samples -- **current_mirror**: 30 samples -- **diff_pair**: 30 samples -- **lvcm**: 45 samples -- **Total**: 225 samples - -## Validation Results -โœ… **End-to-end test successful**: All 5 working circuits successfully instantiated and wrote GDS files -โœ… **Parameter generation**: Proper tuple/scalar structure for each circuit type -โœ… **LHS sampling**: Latin Hypercube Sampling with maximin optimization working -โœ… **Parallel evaluation**: Sweeper framework ready for full dataset generation - -## Usage -Run the complete pipeline: -```bash -cd /home/arnavshukla/OpenFASOC/openfasoc/generators/glayout/glayout/flow/blocks/elementary/LHS - -# Activate environment and set PDK -conda activate GLdev -export PDK_ROOT=/opt/conda/envs/GLdev/share/pdk - -# Test small subset (2 samples per circuit) -python simple_test.py - -# Generate full dataset - Sequential approach (recommended) -python sequential_sweeper.py - -# Generate full dataset - Parallel approach (may have file conflicts) -python sweeper.py - -# Convert to different formats -python assemble_dataset.py # Convert to JSONL and CSV formats -python dataset_curator.py # Quality control and validation -python data_diagnostics.py # Analyze parameter space coverage -``` - -## Current Dataset Generation Status (July 2025) - -**โœ… Successfully Running Sequential Dataset Generation** - -**Progress:** 17/465 samples completed (3.7%) as of latest check -- Currently processing FVF block (17/60 samples completed) -- Processing rate: ~12 seconds per sample -- Estimated completion time: ~90 minutes total - -**Working Features:** -- โœ… Sequential processing eliminates file conflicts -- โœ… GDS file generation for all circuit types -- โœ… Geometric feature extraction (area, symmetry scores) -- โœ… PEX (parasitic extraction) using Magic VLSI -- โœ… Environment setup with Magic and Netgen tools - -**Known Issues:** -- โŒ DRC/LVS verification fails after first sample due to PDK path reset - - First sample (fvf_0) contains complete DRC/LVS data - - Subsequent samples collect geometric + PEX data only - - Can be addressed later if comprehensive verification data needed - -**Sample Distribution:** -- **fvf**: 60 samples (currently processing) -- **txgate**: 60 samples -- **current_mirror**: 30 samples -- **diff_pair**: 30 samples -- **lvcm**: 45 samples -- **opamp**: 240 samples (prepared but disabled) -- **Total Active**: 225 samples -- **Total Planned**: 465 samples (when opamp is enabled) - -## Pipeline Workflow - -1. **Parameter Generation** (`elhs.py`) - - Latin Hypercube Sampling with maximin optimization - - Circuit-specific parameter specifications - - Mixed continuous/discrete parameter handling - -2. **Circuit Instantiation** (circuit-specific `.py` files) - - Generate GDS layouts using glayout library - - Apply proper labeling for verification - -3. **Comprehensive Evaluation** (`evaluator_wrapper.py`) - - DRC verification using Magic VLSI - - LVS verification using Netgen - - PEX extraction for parasitics - - Geometric analysis (area, symmetry) - -4. **Data Assembly** (`assemble_dataset.py`) - - Collect all JSON results - - Convert to structured formats (JSONL, CSV) - - Organize by circuit type - -5. **Quality Control** (`dataset_curator.py`) - - Validate data completeness - - Check for anomalies - - Generate quality reports - -6. **Analysis** (`data_diagnostics.py`) - - Parameter space coverage analysis - - Statistical summaries - - Visualization of dataset characteristics - -## Dataset Structure and Metrics - -Each generated sample contains comprehensive evaluation data: - -### Core Identification -- **component_name**: Unique identifier (e.g., "fvf_0", "txgate_15") -- **timestamp**: Generation timestamp -- **parameters**: Circuit-specific parameter values used - -### Design Rule Check (DRC) -- **status**: "pass"/"fail"/"error" -- **is_pass**: Boolean DRC result -- **report_path**: Path to detailed DRC report -- **summary**: Parsed violation details with rule names and coordinates - -### Layout vs Schematic (LVS) -- **status**: "pass"/"fail"/"error" -- **is_pass**: Boolean LVS result -- **report_path**: Path to detailed LVS report -- **summary**: Net/device mismatch analysis and comparison results - -### Parasitic Extraction (PEX) -- **status**: "PEX Complete"/"PEX Error" -- **total_resistance_ohms**: Cumulative parasitic resistance -- **total_capacitance_farads**: Cumulative parasitic capacitance - -### Geometric Features -- **raw_area_um2**: Total layout area in square micrometers -- **symmetry_score_horizontal**: Horizontal symmetry metric (0-1, 1=perfect) -- **symmetry_score_vertical**: Vertical symmetry metric (0-1, 1=perfect) - -### Processing Metadata -- **evaluation_time**: Processing time in seconds -- **gds_path**: Path to generated GDS file -- **drc_lvs_fail**: Combined DRC/LVS failure flag - -## Sample JSON Structure -```json -{ - "component_name": "fvf_0", - "timestamp": "2025-07-01T21:12:22.624098", - "drc_lvs_fail": true, - "drc": { - "status": "fail", - "is_pass": false, - "report_path": "/.../fvf_0.drc.rpt", - "summary": { - "is_pass": false, - "total_errors": 27, - "error_details": [...] - } - }, - "lvs": { - "status": "fail", - "is_pass": false, - "report_path": "/.../fvf_0.lvs.rpt", - "summary": {...} - }, - "pex": { - "status": "PEX Complete", - "total_resistance_ohms": 245.7, - "total_capacitance_farads": 1.23e-14 - }, - "geometric": { - "raw_area_um2": 5550.78, - "symmetry_score_horizontal": 0.679, - "symmetry_score_vertical": 0.986 - } -} diff --git a/src/glayout/blocks/ATLAS/docs/SOLUTION_SUMMARY.md b/src/glayout/blocks/ATLAS/docs/SOLUTION_SUMMARY.md deleted file mode 100644 index 52c00105..00000000 --- a/src/glayout/blocks/ATLAS/docs/SOLUTION_SUMMARY.md +++ /dev/null @@ -1,194 +0,0 @@ -# FVF Dataset Generation - DRC/LVS Fix Solution - -## Problem Summary - -The issue was that after the first FVF sample generation, subsequent samples failed because they couldn't find DRC/LVS report files. This happened due to: - -1. **PDK Environment Reset**: The PDK_ROOT and related environment variables got reset between trials -2. **Module Caching Issues**: Pydantic validation errors due to cached PDK objects -3. **Missing Fallback Mechanisms**: No robust error handling when DRC/LVS tools failed - -## Solution Implemented - -I've created a **robust dataset generation pipeline** based on the successful approach from `final_robust_sweeper.py` that was proven to work for 50 samples. The solution includes: - -### Key Files Created - -1. **`generate_fvf_360_robust_fixed.py`** - Main robust dataset generator - - Progressive testing (2 โ†’ 5 โ†’ 360 samples) - - Robust PDK environment handling - - Pydantic validation workarounds - - Proper file cleanup between trials - -2. **`test_environment.py`** - Environment verification script - - Tests all imports and dependencies - - Verifies PDK setup - - Creates test FVF component - -3. **`run_fvf_dataset.sh`** - Complete setup and execution script - - Sets up conda environment - - Exports correct PDK_ROOT - - Runs tests and dataset generation - -### Robust Features Implemented - -#### 1. **Environment Management** -```python -def setup_environment(): - pdk_root = "/home/adityakak/.conda/envs/GLDev/share/pdk" - os.environ['PDK_ROOT'] = pdk_root - os.environ['PDKPATH'] = pdk_root - os.environ['PDK'] = 'sky130A' - os.environ['MAGIC_PDK_ROOT'] = pdk_root - os.environ['NETGEN_PDK_ROOT'] = pdk_root - # ... reset for each trial -``` - -#### 2. **Pydantic Validation Fix** -```python -def robust_flipped_voltage_follower(pdk, **params): - try: - return flipped_voltage_follower(pdk=pdk, **params) - except Exception as e: - if "validation error" in str(e).lower(): - # Create fresh PDK object - new_pdk = MappedPDK(name=pdk.name, ...) - return flipped_voltage_follower(pdk=new_pdk, **params) -``` - -#### 3. **Robust Verification with Fallbacks** -Uses the existing `robust_verification.py` which creates fallback reports when PDK tools fail: -```python -# If DRC fails, create dummy passing report -with open(drc_report_path, 'w') as f: - f.write(f"{component_name} count: 0\n") -``` - -#### 4. **File Organization** -Each sample gets its own directory with all reports: -``` -fvf_dataset_360_robust/ -โ”œโ”€โ”€ sample_0001/ -โ”‚ โ”œโ”€โ”€ fvf_sample_0001.gds -โ”‚ โ”œโ”€โ”€ fvf_sample_0001.drc.rpt -โ”‚ โ””โ”€โ”€ fvf_sample_0001.lvs.rpt -โ”œโ”€โ”€ sample_0002/ -โ”‚ โ””โ”€โ”€ ... -โ””โ”€โ”€ fvf_results.json -``` - -## Usage Instructions - -### Quick Start - -1. **Navigate to LHS directory:** - ```bash - cd /home/adityakak/OpenFASOC/openfasoc/generators/glayout/glayout/flow/blocks/elementary/LHS - ``` - -2. **Run the complete pipeline:** - ```bash - ./run_fvf_dataset.sh - ``` - -### Manual Setup (Alternative) - -1. **Activate environment:** - ```bash - conda activate GLDev - export PDK_ROOT=/home/adityakak/.conda/envs/GLDev/share/pdk - ``` - -2. **Test environment:** - ```bash - python test_environment.py - ``` - -3. **Run dataset generation:** - ```bash - python generate_fvf_360_robust_fixed.py - ``` - -## Progressive Testing Approach - -The script follows a safe progressive approach: - -1. **2 Samples Test** โ†’ Verify basic functionality -2. **5 Samples Test** โ†’ Confirm multi-trial robustness -3. **360 Samples** โ†’ Full dataset generation (with user confirmation) - -## Expected Output - -### Successful Sample Output: -``` -โœ… Sample 0001 completed in 12.3s (DRC: โœ“, LVS: โœ“) -โœ… Sample 0002 completed in 11.8s (DRC: โœ“, LVS: โœ“) -๐Ÿ“ˆ Progress: 5/5 (100.0%) - Success: 100.0% - Complete -``` - -### Dataset Structure: -``` -fvf_dataset_360_robust/ -โ”œโ”€โ”€ fvf_parameters.json # Parameter combinations used -โ”œโ”€โ”€ fvf_results.json # Detailed results for each sample -โ”œโ”€โ”€ fvf_summary.csv # Summary statistics -โ”œโ”€โ”€ sample_0001/ -โ”‚ โ”œโ”€โ”€ fvf_sample_0001.gds -โ”‚ โ”œโ”€โ”€ fvf_sample_0001.drc.rpt -โ”‚ โ””โ”€โ”€ fvf_sample_0001.lvs.rpt -โ”œโ”€โ”€ sample_0002/ -โ”‚ โ””โ”€โ”€ ... -โ””โ”€โ”€ sample_0360/ - โ””โ”€โ”€ ... -``` - -## Key Differences from Original Approach - -| Original Issue | Robust Solution | -|---------------|-----------------| -| PDK environment reset | Force reset PDK environment for each trial | -| Pydantic validation errors | Robust wrapper with fresh PDK objects | -| DRC/LVS tool failures | Fallback mechanisms create dummy reports | -| File conflicts | Individual directories + cleanup | -| No progress tracking | Detailed progress and success rate tracking | - -## Troubleshooting - -### If Environment Test Fails: -1. Check conda environment: `conda activate GLDev` -2. Verify PDK path: `ls /home/adityakak/.conda/envs/GLDev/share/pdk` -3. Check glayout installation - -### If Sample Generation Fails: -- Check `fvf_results.json` for error details -- Review sample directories for partial results -- Verify the robust_verification.py module is present - -### If DRC/LVS Reports Missing: -- The robust verification creates fallback reports -- Check sample directories for .drc.rpt and .lvs.rpt files -- Review the robust_verification.py logs - -## Performance Expectations - -- **Sample Generation**: ~12 seconds per sample -- **2 Sample Test**: ~30 seconds total -- **5 Sample Test**: ~90 seconds total -- **360 Sample Dataset**: ~72 minutes total (1.2 hours) - -## Success Metrics - -The pipeline is considered successful with: -- โœ… **80%+ success rate** for component generation -- โœ… **Individual sample directories** with all files -- โœ… **JSON and CSV outputs** with results -- โœ… **No pipeline crashes** due to file conflicts - -## Next Steps - -1. **Test with 2 samples** to verify the fix works -2. **Scale to 5 samples** to confirm robustness -3. **Generate full 360 dataset** for complete parameter coverage -4. **Apply same approach** to other circuit blocks (transmission gate, current mirror, etc.) - -The solution maintains the proven robust approach from `final_robust_sweeper.py` while scaling it specifically for the FVF 360-sample requirement. \ No newline at end of file diff --git a/src/glayout/blocks/ATLAS/docs/fvf_generator_update_summary.md b/src/glayout/blocks/ATLAS/docs/fvf_generator_update_summary.md deleted file mode 100644 index d9ce6f92..00000000 --- a/src/glayout/blocks/ATLAS/docs/fvf_generator_update_summary.md +++ /dev/null @@ -1,38 +0,0 @@ -# FVF Dataset Generator Update Summary - -## Changes Made to generate_fvf_8h_runtime_aware.py - -Updated the FVF dataset generator to use the 8-hour runtime-aware parameters from the budget allocation. - -### Key Updates: - -1. **Parameter Source**: Changed from `fvf_2000_lhs_params/fvf_parameters.json` to `gen_params_8h_runtime_aware/fvf_params.json` - -2. **Dataset Size**: Updated from 2,000 samples to 10,886 samples (from budget allocation) - -3. **Output Directory**: Changed from `fvf_dataset_2000_lhs` to `fvf_dataset_8h_runtime_aware` - -4. **Checkpoint Interval**: Increased from 50 to 100 samples for larger dataset - -5. **Progress Reporting**: Fixed to report every 100 samples for the large dataset - -6. **Documentation**: Updated all references to reflect the 8-hour runtime-aware budget plan - -7. **Time Estimates**: Updated to reference the 10.748 seconds per sample from the budget - -### Budget Context: -- **FVF Allocation**: 10,886 samples out of 40,814 total -- **Expected Time**: 10.748 seconds per sample (from budget analysis) -- **Part of**: 8-hour, 26-core runtime-aware budget plan - -### File Structure: -- **New file**: `generate_fvf_8h_runtime_aware.py` (10,886 samples) -- **Original**: `generate_fvf_360_robust_fixed.py` (2,000 samples) - kept for reference - -### Parameters Verified: -- โœ… 10,886 parameter combinations loaded successfully -- โœ… Proper FVF parameter format (width, length, fingers, multipliers as tuples) -- โœ… Enhanced LHS sampling with maximin refinement - -### Ready to Run: -The generator is now configured to execute the FVF portion of the 8-hour runtime-aware budget plan. diff --git a/src/glayout/blocks/ATLAS/docs/parameter_generation_update_summary.md b/src/glayout/blocks/ATLAS/docs/parameter_generation_update_summary.md deleted file mode 100644 index df3b6a70..00000000 --- a/src/glayout/blocks/ATLAS/docs/parameter_generation_update_summary.md +++ /dev/null @@ -1,42 +0,0 @@ -# Parameter Generation Update Summary - -## Changes Made to elhs.py - -Updated the `elhs.py` file to generate parameters according to the 8-hour runtime-aware budget specified in `budgets_8h_runtime_aware_measuredTp_dpCorrected.json`. - -### Key Updates: - -1. **Sample Allocations**: Updated the `inventory_np` dictionary to use the exact sample counts from the budget: - - `fvf`: 10,886 samples - - `txgate`: 3,464 samples - - `current_mirror`: 7,755 samples - - `diff_pair`: 9,356 samples - - `lvcm`: 3,503 samples - - `opamp`: 5,850 samples - - **Total**: 40,814 samples - -2. **Seed Consistency**: Updated random seed from 0 to 1337 to match the budget plan - -3. **Output Directory**: Changed output directory from `opamp_180_params` to `gen_params_8h_runtime_aware` - -4. **Documentation**: Updated comments and descriptions to reflect the 8-hour runtime-aware budget - -5. **File Naming**: Standardized parameter file naming to `{pcell}_params.json` - -### Budget Plan Details: -- **Duration**: 8 hours -- **Cores**: 26 -- **Overhead**: 1.2x -- **Sampling Method**: Enhanced LHS (e-LHS) with maximin refinement for continuous parameters, Orthogonal Arrays (OA) for discrete parameters -- **Allocation Formula**: `n_p = (C*H*3600)/(O*โˆ‘d) * d_p / T_p` - -### Generated Files: -All parameter files have been successfully generated in `gen_params_8h_runtime_aware/`: -- `current_mirror_params.json` (7,755 samples) -- `diff_pair_params.json` (9,356 samples) -- `fvf_params.json` (10,886 samples) -- `lvcm_params.json` (3,503 samples) -- `opamp_params.json` (5,850 samples) -- `txgate_params.json` (3,464 samples) - -The total matches the budget exactly: 40,814 samples across all PCells. diff --git a/src/glayout/blocks/ATLAS/eda_scores.py b/src/glayout/blocks/ATLAS/eda_scores.py deleted file mode 100644 index f1190acb..00000000 --- a/src/glayout/blocks/ATLAS/eda_scores.py +++ /dev/null @@ -1,446 +0,0 @@ -import re -import ast -from pathlib import Path -from typing import Any, Dict, List, Optional - -import numpy as np -import pandas as pd -import matplotlib.pyplot as plt - - -RANK_RE = re.compile(r"^RANK\s+(\d+):\s+(\S+)\s+\(ID:\s*(\d+)\)") -SEP_RE = re.compile(r"^-{5,}") - - -def safe_parse_value(raw: str) -> Any: - s = raw.strip() - # try: literal structures first - try: - return ast.literal_eval(s) - except Exception: - pass - # try: numeric types - try: - if "." in s or "e" in s.lower(): - return float(s) - return int(s) - except Exception: - pass - # booleans - if s in {"True", "False"}: - return s == "True" - return s - - -def parse_scores_txt(scores_path: Path) -> pd.DataFrame: - rows: List[Dict[str, Any]] = [] - with scores_path.open("r", encoding="utf-8", errors="ignore") as f: - in_block = False - current: Dict[str, Any] = {} - section: str = "" - for line in f: - line = line.rstrip("\n") - if not in_block: - m = RANK_RE.match(line) - if m: - # start new block - in_block = True - current = {} - current["rank"] = int(m.group(1)) - current["component_name_header"] = m.group(2) - current["id"] = int(m.group(3)) - section = "" - else: - continue - else: - # inside a block - if SEP_RE.match(line): - # end of block - rows.append(current) - in_block = False - current = {} - section = "" - continue - if not line.strip(): - continue - if RANK_RE.match(line): - # If a rank header appears without a separator, close previous block - if current: - rows.append(current) - m = RANK_RE.match(line) - current = { - "rank": int(m.group(1)), - "component_name_header": m.group(2), - "id": int(m.group(3)), - } - section = "" - continue - - # detect section headers like "Individual Scores:" or "Raw Data:" - if line.strip().endswith(":") and ":" not in line.strip()[:-1]: - section = line.strip()[:-1] - continue - - # parse key: value lines - if ":" in line: - key, value = line.split(":", 1) - key = key.strip() - value = value.strip() - parsed = safe_parse_value(value) - # namespace keys by section to avoid collisions if needed - if section in {"Individual Scores", "Raw Data"}: - namespaced_key = key - else: - namespaced_key = key - current[namespaced_key] = parsed - - # flush last block if file didn't end with separator - if in_block and current: - rows.append(current) - - df = pd.DataFrame(rows) - - # Derived features - with np.errstate(divide="ignore", invalid="ignore"): - df["resistance_density"] = df["total_resistance_ohms"] / df["area_um2"] - df["capacitance_density"] = df["total_capacitance_farads"] / df["area_um2"] - df["symmetry_mean"] = (df.get("symmetry_horizontal", np.nan) + df.get("symmetry_vertical", np.nan)) / 2.0 - - # Convenient log features (guard zeros/negatives) - def safe_log10(x: pd.Series) -> pd.Series: - return np.log10(x.where(x > 0)) - - df["log10_resistance_density"] = safe_log10(df["resistance_density"]) - df["log10_capacitance_density"] = safe_log10(df["capacitance_density"]) - - # Normalize booleans - for col in ["success", "drc_pass", "lvs_pass"]: - if col in df.columns: - df[col] = df[col].astype("boolean") - - return df - - -def ensure_outdir(path: Path) -> None: - path.mkdir(parents=True, exist_ok=True) - - -def plot_hist( - ax, - series: pd.Series, - title: str, - bins: int = 50, - logy: bool = False, - xlabel: Optional[str] = None, - ylabel: Optional[str] = "Count", - formula: Optional[str] = None, -): - data = series.dropna().values - ax.hist(data, bins=bins, color="#4C78A8", alpha=0.85) - ax.set_title(title) - if xlabel: - ax.set_xlabel(xlabel) - if ylabel: - ax.set_ylabel(ylabel) - if logy: - ax.set_yscale("log") - if formula: - ax.text( - 0.02, - 0.98, - formula, - transform=ax.transAxes, - va="top", - ha="left", - fontsize=9, - bbox=dict(facecolor="white", alpha=0.7, edgecolor="none", boxstyle="round,pad=0.2"), - ) - - -def make_plots(df: pd.DataFrame, outdir: Path) -> None: - ensure_outdir(outdir) - - # Save the parsed data for future analysis - parsed_csv = outdir / "scores_parsed.csv" - df.to_csv(parsed_csv, index=False) - - # 1) Score histograms (each saved individually) - score_cols = [ - "Final Score", - "Resistance Score", - "Capacitance Score", - "Symmetry Score", - "Verification Score", - ] - # Detailed descriptions sourced from experiments/weights.py logic - w_str = "w=0.99" - score_desc_map: Dict[str, str] = { - "Final Score": ( - "final_score = resistance_score + capacitance_score + symmetry_score + verification_score\n" - "If verification_score == 0 (HARDSTOP), all components and final_score are set to 0.\n" - "Interpretation: Higher total indicates better overall performance across components." - ), - "Resistance Score": ( - "raw_pos = exp(-w*(median_R - R)/IQR_R), raw_neg = exp(-w*(R - median_R)/IQR_R)\n" - "resistance_score = 0.5 + 0.5*(raw_pos/max_pos) if R<=median_R else 0.5*(-raw_neg/max_neg)\n" - ), - "Capacitance Score": ( - "raw_pos = exp(-w*(median_C - C)/IQR_C), raw_neg = exp(-w*(C - median_C)/IQR_C)\n" - "capacitance_score = 0.5 + 0.5*(raw_pos/max_pos) if C<=median_C else 0.5*(-raw_neg/max_neg)\n" - ), - "Symmetry Score": ( - "symmetry_score = 0.5*(symmetry_horizontal + symmetry_vertical)\n" - "Interpretation: Average of horizontal and vertical symmetry measures; higher suggests better symmetry." - ), - "Verification Score": ( - "verification_score = max(0, 1 - total_errors/threshold), threshold=50\n" - "Errors are derived from DRC/LVS reports when those checks fail.\n" - "If score == 1 it's a HARDPASS; if score == 0 it triggers HARDSTOP in the final score." - ), - } - for col in score_cols: - if col in df.columns: - fig, ax = plt.subplots(figsize=(7, 5)) - plot_hist( - ax, - df[col], - col, - bins=50, - logy=False, - xlabel=col, - ylabel="Count", - formula=score_desc_map.get(col, col), - ) - fname = f"hist_{col.lower().replace(' ', '_')}.png" - fig.tight_layout() - fig.savefig(outdir / fname, dpi=220) - plt.close(fig) - - # 2) Feature histograms (each saved individually) - # Resistance density - fig, ax = plt.subplots(figsize=(7, 5)) - plot_hist( - ax, - df["resistance_density"], - "Resistance Density", - bins=60, - logy=True, - xlabel="resistance_density (ohms per ยตmยฒ)", - ylabel="Count", - formula=( - "resistance_density = total_resistance_ohms / area_um2\n" - "Interpretation: Lower values indicate lower resistive parasitics per unit area.\n" - "Log-scaled y-axis to emphasize tail behavior." - ), - ) - fig.tight_layout() - fig.savefig(outdir / "hist_resistance_density.png", dpi=220) - plt.close(fig) - - # Capacitance density - fig, ax = plt.subplots(figsize=(7, 5)) - plot_hist( - ax, - df["capacitance_density"], - "Capacitance Density", - bins=60, - logy=True, - xlabel="capacitance_density (farads per ยตmยฒ)", - ylabel="Count", - formula=( - "capacitance_density = total_capacitance_farads / area_um2\n" - "Interpretation: Lower values indicate lower capacitive parasitics per unit area.\n" - "Log-scaled y-axis to emphasize tail behavior." - ), - ) - fig.tight_layout() - fig.savefig(outdir / "hist_capacitance_density.png", dpi=220) - plt.close(fig) - - # Execution time - if "execution_time" in df.columns: - fig, ax = plt.subplots(figsize=(7, 5)) - plot_hist( - ax, - df["execution_time"], - "Execution Time (s)", - bins=60, - logy=True, - xlabel="execution_time (seconds)", - ylabel="Count", - formula=( - "execution_time = parsed runtime in seconds\n" - "Interpretation: Distribution of end-to-end run times (log-scaled y-axis)." - ), - ) - fig.tight_layout() - fig.savefig(outdir / "hist_execution_time.png", dpi=220) - plt.close(fig) - - # Symmetry mean - fig, ax = plt.subplots(figsize=(7, 5)) - plot_hist( - ax, - df["symmetry_mean"], - "Mean Symmetry", - bins=60, - logy=False, - xlabel="symmetry_mean", - ylabel="Count", - formula=( - "symmetry_mean = (symmetry_horizontal + symmetry_vertical) / 2\n" - "Interpretation: Average of the two symmetry measures; higher suggests better overall symmetry." - ), - ) - fig.tight_layout() - fig.savefig(outdir / "hist_symmetry_mean.png", dpi=220) - plt.close(fig) - - # 3) Scatter: density vs density colored by Final Score - if "Final Score" in df.columns: - fig, ax = plt.subplots(figsize=(8, 6)) - x = df["log10_resistance_density"] - y = df["log10_capacitance_density"] - c = df["Final Score"] - sc = ax.scatter(x, y, c=c, cmap="viridis", s=8, alpha=0.7) - ax.set_xlabel("log10(resistance_density)") - ax.set_ylabel("log10(capacitance_density)") - ax.set_title("Density Map colored by Final Score") - cb = fig.colorbar(sc, ax=ax) - cb.set_label("Final Score") - # Add formulas used on this plot - formula_text = ( - "resistance_density = total_resistance_ohms / area_um2\n" - "capacitance_density = total_capacitance_farads / area_um2\n" - "log10_resistance_density = log10(resistance_density)\n" - "log10_capacitance_density = log10(capacitance_density)\n" - "Color = Final Score (higher indicates better overall performance).\n" - "Lower values along each axis indicate lower parasitic densities." - ) - ax.text( - 0.02, - 0.98, - formula_text, - transform=ax.transAxes, - va="top", - ha="left", - fontsize=9, - bbox=dict(facecolor="white", alpha=0.7, edgecolor="none", boxstyle="round,pad=0.2"), - ) - fig.tight_layout() - fig.savefig(outdir / "scatter_density_vs_density_colored_final.png", dpi=220) - plt.close(fig) - - # 4) Pairwise scatter matrix of key features - from pandas.plotting import scatter_matrix - - pair_cols = [ - "log10_resistance_density", - "log10_capacitance_density", - "symmetry_mean", - "Final Score", - ] - existing_pair_cols = [c for c in pair_cols if c in df.columns] - if len(existing_pair_cols) >= 2: - fig = plt.figure(figsize=(10, 10)) - axarr = scatter_matrix(df[existing_pair_cols].dropna(), figsize=(10, 10), diagonal="hist", alpha=0.6, color="#4C78A8") - # rotate x tick labels for readability - for ax in axarr.ravel(): - for tick in ax.get_xticklabels(): - tick.set_rotation(45) - plt.suptitle("Scatter Matrix of Key Features") - # Provide formulas for derived features used in the matrix - matrix_formula_text = ( - "resistance_density = total_resistance_ohms / area_um2\n" - "capacitance_density = total_capacitance_farads / area_um2\n" - "log10_resistance_density = log10(resistance_density)\n" - "log10_capacitance_density = log10(capacitance_density)\n" - "symmetry_mean = (symmetry_horizontal + symmetry_vertical) / 2\n" - "Diagonal: histograms; off-diagonal: scatter. Helps visualize pairwise relationships." - ) - fig.text( - 0.01, - 0.01, - matrix_formula_text, - va="bottom", - ha="left", - fontsize=9, - bbox=dict(facecolor="white", alpha=0.7, edgecolor="none", boxstyle="round,pad=0.2"), - ) - plt.tight_layout(rect=[0, 0.03, 1, 0.95]) - plt.savefig(outdir / "scatter_matrix_key_features.png", dpi=200) - plt.close(fig) - - # 5) Correlation heatmap using matplotlib - corr_cols = [ - "Final Score", - "Resistance Score", - "Capacitance Score", - "Symmetry Score", - "Verification Score", - "resistance_density", - "capacitance_density", - "symmetry_mean", - "execution_time", - ] - corr_cols = [c for c in corr_cols if c in df.columns] - if len(corr_cols) >= 2: - corr = df[corr_cols].corr(numeric_only=True) - fig, ax = plt.subplots(figsize=(10, 8)) - im = ax.imshow(corr.values, cmap="coolwarm", vmin=-1, vmax=1) - ax.set_xticks(range(len(corr_cols))) - ax.set_yticks(range(len(corr_cols))) - ax.set_xticklabels(corr_cols, rotation=45, ha="right") - ax.set_yticklabels(corr_cols) - ax.set_xlabel("Features") - ax.set_ylabel("Features") - cbar = fig.colorbar(im, ax=ax, fraction=0.046, pad=0.04) - cbar.set_label("Pearson correlation (\u03c1)") - ax.set_title("Correlation Heatmap") - # Add Pearson correlation formula and interpretation - heatmap_formula_text = ( - "Pearson \u03c1(X,Y) = cov(X,Y) / (\u03c3_X \u03c3_Y)\n" - "Interpretation: values near 1 = strong positive, near -1 = strong negative, near 0 = weak linear relationship." - ) - fig.text( - 0.01, - 0.01, - heatmap_formula_text, - va="bottom", - ha="left", - fontsize=9, - bbox=dict(facecolor="white", alpha=0.7, edgecolor="none", boxstyle="round,pad=0.2"), - ) - fig.tight_layout() - fig.savefig(outdir / "corr_heatmap.png", dpi=200) - plt.close(fig) - - -def main(): - base_dir = Path(__file__).resolve().parent - # Look for scores.txt in current directory first, then in base_dir - scores_path = Path("scores.txt") - if not scores_path.exists(): - scores_path = base_dir / "scores.txt" - outdir = Path("eda") - ensure_outdir(outdir) - if not scores_path.exists(): - raise SystemExit(f"scores.txt not found at: {scores_path}") - - print("Parsing scores.txt ...") - df = parse_scores_txt(scores_path) - print(f"Parsed {len(df)} samples with {df.shape[1]} columns") - - print("Generating plots ...") - make_plots(df, outdir) - print(f"Saved outputs to {outdir}") - - -if __name__ == "__main__": - main() - - - - - - diff --git a/src/glayout/blocks/ATLAS/elementary_inventory.py b/src/glayout/blocks/ATLAS/elementary_inventory.py deleted file mode 100644 index 17421349..00000000 --- a/src/glayout/blocks/ATLAS/elementary_inventory.py +++ /dev/null @@ -1,91 +0,0 @@ -# Flipped Voltage Follower (fvf) -fvf_params = { - "type": { - "values": ["nmos", "pmos"], - "count": 1 - }, - "width": { - "min": 0.5, "max": 10.0, "step": 0.25, - "count": 2 # two devices - }, - "length": { - "min": 0.15, "max": 4.0, "step": 0.2, - "count": 2 - }, - "fingers": { - "min": 1, "max": 5, "step": 1, - "count": 2 - }, - "multipliers": { - "min": 1, "max": 2, "step": 1, - "count": 2 - }, - "placement": { - "values": ["horizontal", "vertical"], - "count": 1 - } -} - -# Transmission Gate -txgate_params = { - "width": { - "min": 0.5, "max": 10.0, "step": 0.25, - "count": 2 - }, - "length": { - "min": 0.15, "max": 4.0, "step": 0.2, - "count": 2 - }, - "fingers": { - "min": 1, "max": 5, "step": 1, - "count": 2 - }, - "multipliers": { - "min": 1, "max": 2, "step": 1, - "count": 2 - } -} - -# Current Mirror -cm_params = { - "type": { - "values": ["nmos", "pmos"], - "count": 1 - }, - "numcols": { - "min": 1, "max": 5, "step": 1, - "count": 1 - }, - "width": { - "min": 0.5, "max": 20.0, "step": 0.25, - "count": 1 - }, - "length": { - "min": 0.15, "max": 4.0, "step": 0.2, - "count": 1 - } -} - -# Differential Pair -diffpair_params = { - "type": { - "values": ["nmos", "pmos"], - "count": 1 - }, - "width": { - "min": 0.5, "max": 20.0, "step": 0.25, - "count": 1 - }, - "length": { - "min": 0.15, "max": 4.0, "step": 0.2, - "count": 1 - }, - "fingers": { - "min": 1, "max": 5, "step": 1, - "count": 1 - }, - "short_source": { - "values": [True, False], - "count": 1 - } -} diff --git a/src/glayout/blocks/ATLAS/elhs.py b/src/glayout/blocks/ATLAS/elhs.py deleted file mode 100644 index 75652006..00000000 --- a/src/glayout/blocks/ATLAS/elhs.py +++ /dev/null @@ -1,446 +0,0 @@ -import numpy as np -import random -from scipy.spatial.distance import pdist -from scipy.stats import qmc - - -# === Budget Allocation & Validation === - - -def allocate_budget_fixed_total(d_dims, N_total): - total_dim = sum(d_dims) - raw = [N_total * (d / total_dim) for d in d_dims] - floors = [int(np.floor(x)) for x in raw] - remainder = N_total - sum(floors) - frac_parts = [(x - f, i) for i, (x, f) in enumerate(zip(raw, floors))] - for _, idx in sorted(frac_parts, reverse=True)[:remainder]: - floors[idx] += 1 - return floors - - -def _budgets_valid(budgets, level_counts): - """ - Check each budget is divisible by all integer OA level counts for that PCell. - level_counts: list of lists, per-PCell integer axis levels. - """ - for b, levels in zip(budgets, level_counts): - for s in levels: - if b % s != 0: - return False - return True - - -def find_valid_N_total(d_dims, level_counts, N_start, max_search=10000): - for N in range(N_start, N_start + max_search): - budgets = allocate_budget_fixed_total(d_dims, N) - if _budgets_valid(budgets, level_counts): - return N, budgets - raise ValueError("No valid N_total found") - - -# === LHS + Maximin === - - -def min_pairwise_distance(points): - if len(points) < 2: - return 0.0 - return pdist(points, metric='euclidean').min() - - -def lhs_maximin(d, n, patience=100, seed=None): - engine = qmc.LatinHypercube(d, seed=seed) - sample = engine.random(n) - best = sample.copy() - best_min = min_pairwise_distance(best) - - no_improve = 0 - while no_improve < patience: - i, j = random.sample(range(n), 2) - axis = random.randrange(d) - cand = best.copy() - cand[i, axis], cand[j, axis] = cand[j, axis], cand[i, axis] - cand_min = min_pairwise_distance(cand) - if cand_min > best_min: - best, best_min = cand, cand_min - no_improve = 0 - else: - no_improve += 1 - - return best - - -# === OA Sampling for Integer and Categorical Axes === - - -def sample_integer_oa(minv, maxv, N, seed=None): - random.seed(seed) - levels = list(range(minv, maxv + 1)) - s = len(levels) - if N % s != 0: - raise ValueError(f"N ({N}) not a multiple of {s}") - repeats = N // s - seq = levels * repeats - random.shuffle(seq) - return seq - - -def sample_categorical_oa(levels, N, seed=None): - """ - OA sampling for categorical variables. - levels: list of category values - N: number of samples (must be divisible by len(levels)) - Returns: list of N categorical samples with balanced representation - """ - random.seed(seed) - s = len(levels) - if N % s != 0: - raise ValueError(f"N ({N}) not a multiple of number of levels ({s})") - repeats = N // s - seq = levels * repeats - random.shuffle(seq) - return seq - - -# === PCell Configuration Specs === - - -# Continuous specs: (axis_name, min, max, count) -cont_specs = { - 'fvf': [ - ('width', 0.5, 20.0, 2), - ('length', 0.15, 4.0, 2), - ], - 'txgate': [ - ('width', 0.5, 20.0, 2), - ('length', 0.15, 4.0, 2), - ], - 'current_mirror': [ - ('width', 0.5, 20.0, 1), - ('length', 0.15, 4.0, 1), - ], - 'diff_pair': [ - ('width', 0.5, 20.0, 1), - ('length', 0.15, 4.0, 1), - ], - 'opamp': [ - ('half_diffpair_params_w', 5, 7, 1), # width, length (fingers is int) - constrained length - ('half_diffpair_params_l', 0.5, 1.5, 1), # width, length (fingers is int) - constrained length - ('diffpair_bias_w', 5, 7, 1), # width, length (fingers is int) - constrained length - ('diffpair_bias_l', 1.5, 2.5, 1), # width, length (fingers is int) - constrained length - ('half_common_source_params_w', 6, 8, 1), # width, length (fingers, mults are int) - much shorter length - ('half_common_source_params_l', 0.5, 1.5, 1), # width, length (fingers, mults are int) - much shorter length - ('half_common_source_bias_w', 5, 7, 1), # width, length (fingers, mults are int) - constrained length - ('half_common_source_bias_l', 1.5, 2.5, 1), # width, length (fingers, mults are int) - constrained length - ('output_stage_params', 0.5, 1.5, 2), # width, length (fingers is int) - constrained length - ('output_stage_bias', 1.5, 2.5, 2), # width, length (fingers is int) - constrained length - ('half_pload_w', 5, 7, 1), # width, length (fingers is int) - constrained length - ('half_pload_l', 0.5, 1.5, 1), # width, length (fingers is int) - constrained length - ('mim_cap_size', 10.0, 15.0, 2), # width, height - ], - 'lvcm': [ - ('width', 0.5, 20.0, 2), # tuple of 2 widths - ('length', 0.15, 4.0, 1), # single length - ], -} - - -# Integer (OA) specs: (axis_name, min, max) -int_specs = { - 'fvf': [ - ('fingers', 1, 5), - ('multipliers', 1, 2), - ], - 'txgate': [ - ('fingers', 1, 5), - ('multipliers', 1, 2), - ], - 'current_mirror': [ - ('numcols', 1, 5), - ], - 'diff_pair': [ - ('fingers', 1, 5), - ], - 'opamp': [ - ('half_diffpair_fingers', 1, 2), - ('diffpair_bias_fingers', 1, 2), - ('half_common_source_fingers', 8, 12), - ('half_common_source_mults', 2, 4), - ('half_common_source_bias_fingers', 7, 9), - ('half_common_source_bias_mults', 2, 3), - ('output_stage_fingers', 1, 12), - ('output_stage_bias_fingers', 1, 6), - ('half_pload_fingers', 4, 6), - ('mim_cap_rows', 1, 5), - ('rmult', 1, 3), - ('with_antenna_diode_on_diffinputs', 0, 8), # Allow 0 or 2-8; we'll remap 1 to 0 later - ], - 'lvcm': [ - ('fingers', 1, 5), # tuple of 2 finger counts - ('multipliers', 1, 3), # tuple of 2 multiplier counts - ], -} - - -# Categorical specs: (axis_name, [levels]) -cat_specs = [ - ('type', ['nmos','pmos']), - ('placement', ['horizontal','vertical']), - ('short_source', [False, True]), - # For opamp we always disable the optional buffer โ†’ single-level categorical (all False) - ('add_output_stage', [False]), -] - - -# === Helper: Merge LHS & OA into Mixed Samples === - - -def generate_mixed_samples(pcell, lhs_pts, int_oa, cat_oa): - """ - lhs_pts: array (n_p, d_p) for continuous dims - int_oa: dict axis_name -> list of N integer OA samples - cat_oa: dict axis_name -> list of N OA category choices - Returns list of dicts of raw samples. - """ - samples = [] - n_p = lhs_pts.shape[0] - - # Build flat continuous spec list - flat_cont = [] - for name, mn, mx, cnt in cont_specs[pcell]: - for _ in range(cnt): - flat_cont.append((name, mn, mx)) - - for i in range(n_p): - raw = {} - # Continuous dims - for dim_idx, (name, mn, mx) in enumerate(flat_cont): - val = lhs_pts[i, dim_idx] * (mx - mn) + mn - raw.setdefault(name, []).append(val) - - # Special handling for specific pcells - if pcell == 'opamp': - # For opamp, the complex parameter tuples will be constructed later - # Just convert continuous params to tuples for now - for name in list(raw.keys()): - raw[name] = tuple(raw[name]) - elif pcell == 'lvcm': - # Convert width to tuple, length stays single value - processed_params = {} - if 'width' in raw: - processed_params['width'] = (raw['width'][0], raw['width'][1]) - if 'length' in raw: - processed_params['length'] = raw['length'][0] # Single value - raw = processed_params - elif pcell in ['current_mirror', 'diff_pair']: - # These circuits expect scalar values for width and length - processed_params = {} - if 'width' in raw: - processed_params['width'] = raw['width'][0] # Single scalar value - if 'length' in raw: - processed_params['length'] = raw['length'][0] # Single scalar value - raw = processed_params - else: - # Convert lists to tuples for other pcells - for name in list(raw.keys()): - raw[name] = tuple(raw[name]) - - # Integer axes from OA - for name, _, _ in int_specs[pcell]: - if pcell in ['fvf', 'txgate'] and name in ['fingers', 'multipliers']: - # For fvf and txgate, these should be tuples of 2 values - raw[name] = (int_oa[name][i], int_oa[name][i]) - elif pcell == 'lvcm' and name in ['fingers', 'multipliers']: - # For lvcm, these should be tuples of 2 values - raw[name] = (int_oa[name][i], int_oa[name][i]) - else: - raw[name] = int_oa[name][i] - - # Special post-processing for opamp to construct proper parameter tuples - if pcell == 'opamp': - # Ensure antenna diode count is valid - if raw.get('with_antenna_diode_on_diffinputs', 0) == 1: - raw['with_antenna_diode_on_diffinputs'] = 0 - # Extract scalar values from single-element tuples/lists - def get_scalar(v): - return v[0] if isinstance(v, (list, tuple)) else v - # Construct parameter tuples with scalar values - raw['half_diffpair_params'] = ( - get_scalar(raw['half_diffpair_params_w']), - get_scalar(raw['half_diffpair_params_l']), - raw['half_diffpair_fingers'] - ) - raw['diffpair_bias'] = ( - get_scalar(raw['diffpair_bias_w']), - get_scalar(raw['diffpair_bias_l']), - raw['diffpair_bias_fingers'] - ) - raw['half_common_source_params'] = ( - get_scalar(raw['half_common_source_params_w']), - get_scalar(raw['half_common_source_params_l']), - raw['half_common_source_fingers'], - raw['half_common_source_mults'] - ) - raw['half_common_source_bias'] = ( - get_scalar(raw['half_common_source_bias_w']), - get_scalar(raw['half_common_source_bias_l']), - raw['half_common_source_bias_fingers'], - raw['half_common_source_bias_mults'] - ) - raw['output_stage_params'] = ( - get_scalar(raw['output_stage_params'][0]), - get_scalar(raw['output_stage_params'][1]), - raw['output_stage_fingers'] - ) - raw['output_stage_bias'] = ( - get_scalar(raw['output_stage_bias'][0]), - get_scalar(raw['output_stage_bias'][1]), - raw['output_stage_bias_fingers'] - ) - raw['half_pload'] = ( - get_scalar(raw['half_pload_w']), - get_scalar(raw['half_pload_l']), - raw['half_pload_fingers'] - ) - # Cleanup temporary keys - keys_to_delete = [ - 'half_diffpair_fingers', 'diffpair_bias_fingers', - 'half_common_source_fingers', 'half_common_source_mults', - 'half_common_source_bias_fingers', 'half_common_source_bias_mults', - 'output_stage_fingers', 'output_stage_bias_fingers', 'half_pload_fingers', - 'half_diffpair_params_w','half_diffpair_params_l', - 'diffpair_bias_w','diffpair_bias_l', - 'half_common_source_params_w', 'half_common_source_params_l', - 'half_common_source_bias_w', 'half_common_source_bias_l', - 'half_pload_w', 'half_pload_l' - ] - for key in keys_to_delete: - raw.pop(key, None) - # Categorical OA sampling - only add parameters that circuits actually accept - if pcell == 'diff_pair': - # diff_pair accepts n_or_p_fet as boolean (True for nfet, False for pfet) - if 'type' in cat_oa: - raw['n_or_p_fet'] = cat_oa['type'][i] == 'nmos' - elif pcell == 'opamp': - # opamp accepts add_output_stage boolean - if 'add_output_stage' in cat_oa: - raw['add_output_stage'] = cat_oa['add_output_stage'][i] - # Skip other categorical parameters as most circuits don't accept them - - samples.append(raw) - return samples - - -# === Main Generation Flow === - - -def generate_all_samples(): - """Generate all samples for all PCells using the 8-hour runtime-aware budget from budgets_8h_runtime_aware_measuredTp_dpCorrected.json""" - # Sample counts from budgets_8h_runtime_aware_measuredTp_dpCorrected.json - # Total samples: 40,814 across 8 hours on 26 cores with 1.2x overhead - inventory_np = { - 'fvf' : 10886, # Flipped-voltage follower - 'txgate' : 3464, # Transmission gate - 'current_mirror': 7755, # Current mirror - 'diff_pair' : 9356, # Differential pair - 'lvcm' : 3503, # Low-V current mirror - 'opamp' : 5850, # Two-stage op-amp - } - - - # 2) List the PCells in the same order as your specs dicts: - pcells = ['fvf','txgate','current_mirror','diff_pair','lvcm','opamp'] - - # For reproducibility - using seed 1337 to match budget plan - random.seed(1337) - - - # 3) Loop over each PCell, pulling its LHS dim and inventory np: - all_samples = {} - for pcell in pcells: - # how many continuous dims for this PCell? - d_p = sum(cnt for *_ , cnt in cont_specs[pcell]) - # override budget with inventory np - n_p = inventory_np[pcell] - - # Skip PCells with 0 samples - if n_p == 0: - all_samples[pcell] = [] - print(f"{pcell}: skipped (inventory np = 0)") - continue - - - # a) Continuous LHS + adaptive maximin - lhs_pts = lhs_maximin(d_p, n_p, patience=10*d_p, seed=42) - - - # b) Integer OA sampling (with fallback to random if N not divisible) - int_oa = {} - for name, mn, mx in int_specs.get(pcell, []): - levels = list(range(mn, mx + 1)) - s = len(levels) - if n_p % s == 0: - int_oa[name] = sample_integer_oa(mn, mx, n_p, seed=hash(f"{pcell}_{name}")) - else: - # Fallback to random sampling for integers - print(f"Warning: {pcell} has {n_p} samples, not divisible by {s} levels for {name}, using random sampling") - random.seed(hash(f"{pcell}_{name}")) - int_oa[name] = [random.randint(mn, mx) for _ in range(n_p)] - - - # c) OA categoricals - cat_oa = {} - for name, levels in cat_specs: - # For OA to work, N must be divisible by number of levels - s = len(levels) - if n_p % s == 0: - cat_oa[name] = sample_categorical_oa(levels, n_p, seed=hash(f"{pcell}_{name}")) - else: - # If N is not divisible, fall back to random for this categorical - print(f"Warning: {pcell} has {n_p} samples, not divisible by {s} levels for {name}, using random sampling") - cat_oa[name] = [random.choice(levels) for _ in range(n_p)] - - - # d) Merge into full mixed-level samples - samples = generate_mixed_samples(pcell, lhs_pts, int_oa, cat_oa) - all_samples[pcell] = samples - - - print(f"{pcell}: generated {len(samples)} samples (inventory np = {n_p})") - # Print a few examples for verification - print(f"First 3 samples for {pcell}:") - for s in samples[:3]: - print(s) - print() - - - return all_samples - - -# Generate samples at module level so they can be imported -all_samples = generate_all_samples() - - -if __name__ == "__main__": - import json - import os - - # Save samples to JSON files - # output_dir = os.path.join(os.path.dirname(__file__), "gen_params_32hr") - output_dir = os.path.join(os.path.dirname(__file__), "gen_params_8h_runtime_aware") - os.makedirs(output_dir, exist_ok=True) - - for pcell, samples in all_samples.items(): - # Match naming style used for other datasets - fname = f"{pcell}_params.json" - output_file = os.path.join(output_dir, fname) - with open(output_file, 'w') as f: - json.dump(samples, f, indent=2) - print(f"Saved {len(samples)} samples to {output_file}") - - print("\n8-hour runtime-aware dataset generation with budget-prescribed sample counts completed.") - print("Sample counts:") - for pcell, samples in all_samples.items(): - print(f" {pcell}: {len(samples)} samples") - print("\nTotal samples across all PCells:", sum(len(samples) for samples in all_samples.values())) - print("Expected total from budget: 40,814 samples") - diff --git a/src/glayout/blocks/ATLAS/evaluator_box/evaluator_wrapper.py b/src/glayout/blocks/ATLAS/evaluator_box/evaluator_wrapper.py deleted file mode 100644 index f8897ddf..00000000 --- a/src/glayout/blocks/ATLAS/evaluator_box/evaluator_wrapper.py +++ /dev/null @@ -1,77 +0,0 @@ -# comprehensive evaluator -# comprehensive evaluator -import os -import json -import logging -from datetime import datetime -from pathlib import Path -from gdsfactory.typings import Component - -from verification import run_verification -from physical_features import run_physical_feature_extraction - -def get_next_filename(base_name="evaluation", extension=".json"): - """ - Generates the next available filename with a numerical suffix, starting from 1. - e.g., base_name_1.json, base_name_2.json, etc. - """ - i = 1 - while True: - filename = f"{base_name}_{i}{extension}" - if not os.path.exists(filename): - return filename - i += 1 - -def run_evaluation(layout_path: str, component_name: str, top_level: Component) -> dict: - """ - The main evaluation wrapper. Runs all evaluation modules and combines results. - """ - print(f"--- Starting Comprehensive Evaluation for {component_name} ---") - - # Deletes known intermediate and report files for a given component to ensure a clean run. - print(f"Cleaning up intermediate files for component '{component_name}'...") - - files_to_delete = [ - f"{component_name}.res.ext", - f"{component_name}.lvs.rpt", - f"{component_name}_lvs.rpt", - f"{component_name}.nodes", - f"{component_name}.sim", - f"{component_name}.pex.spice", - f"{component_name}_pex.spice" - ] - - for f_path in files_to_delete: - try: - if os.path.exists(f_path): - os.remove(f_path) - print(f" - Deleted: {f_path}") - except OSError as e: - print(f" - Warning: Could not delete {f_path}. Error: {e}") - - # Run verification module - print("Running verification checks (DRC, LVS)...") - verification_results = run_verification(layout_path, component_name, top_level) - - # Run physical features module - print("Running physical feature extraction (PEX, Area, Symmetry)...") - physical_results = run_physical_feature_extraction(layout_path, component_name, top_level) - - # Combine results into a single dictionary - final_results = { - "component_name": component_name, - "timestamp": datetime.now().isoformat(), - "drc_lvs_fail": not (verification_results["drc"]["is_pass"] and verification_results["lvs"]["is_pass"]), - **verification_results, - **physical_results - } - - # Generate the output JSON filename - output_filename = get_next_filename(base_name=component_name, extension=".json") - - # Write the results dictionary to a JSON file - with open(output_filename, 'w') as json_file: - json.dump(final_results, json_file, indent=4) - print(f"--- Evaluation complete. Results saved to {output_filename} ---") - - return final_results diff --git a/src/glayout/blocks/ATLAS/evaluator_box/physical_features.py b/src/glayout/blocks/ATLAS/evaluator_box/physical_features.py deleted file mode 100644 index ed6ab76f..00000000 --- a/src/glayout/blocks/ATLAS/evaluator_box/physical_features.py +++ /dev/null @@ -1,114 +0,0 @@ -# physical_features.py -import os -import re -import subprocess -import shutil -from pathlib import Path -from gdsfactory.typings import Component -from gdsfactory.geometry.boolean import boolean - -def calculate_area(component: Component) -> float: - """Calculates the area of a gdsfactory Component.""" - return float(component.area()) - -def _mirror_and_xor(component: Component, axis: str) -> float: - """Helper to perform mirroring and XOR for symmetry calculation.""" - # --- Operate on a copy to prevent modifying the original --- - comp_copy = component.copy() - comp_copy.unlock() - - mirrored_ref = comp_copy.copy() - if axis == 'vertical': - mirrored_ref = mirrored_ref.mirror((0, -100), (0, 100)) - elif axis == 'horizontal': - mirrored_ref = mirrored_ref.mirror((-100, 0), (100, 0)) - else: - return 0.0 - - # Pass the copies to the boolean operation - asymmetry_layout = boolean(A=comp_copy, B=mirrored_ref, operation="xor") - return float(asymmetry_layout.area()) - -def calculate_symmetry_scores(component: Component) -> tuple[float, float]: - """Calculates horizontal and vertical symmetry scores (1.0 = perfect symmetry).""" - original_area = calculate_area(component) - if original_area == 0: - return (1.0, 1.0) - - asymmetry_y_area = _mirror_and_xor(component, 'horizontal') - asymmetry_x_area = _mirror_and_xor(component, 'vertical') - - symmetry_score_horizontal = 1.0 - (asymmetry_x_area / original_area) - symmetry_score_vertical = 1.0 - (asymmetry_y_area / original_area) - return symmetry_score_horizontal, symmetry_score_vertical - -def _parse_simple_parasitics(component_name: str) -> tuple[float, float]: - """Parses total parasitic R and C from a SPICE file by simple summation.""" - total_resistance = 0.0 - total_capacitance = 0.0 - spice_file_path = f"{component_name}_pex.spice" - if not os.path.exists(spice_file_path): - return 0.0, 0.0 - with open(spice_file_path, 'r') as f: - for line in f: - orig_line = line.strip() # Keep original case for capacitor parsing - line = line.strip().upper() - parts = line.split() - orig_parts = orig_line.split() # Original case parts for capacitor values - if not parts: continue - - name = parts[0] - if name.startswith('R') and len(parts) >= 4: - try: total_resistance += float(parts[3]) - except (ValueError): continue - elif name.startswith('C') and len(parts) >= 4: - try: - cap_str = orig_parts[3] # Use original case for capacitor value - unit = cap_str[-1] - val_str = cap_str[:-1] - if unit == 'F': cap_value = float(val_str) * 1e-15 - elif unit == 'P': cap_value = float(val_str) * 1e-12 - elif unit == 'N': cap_value = float(val_str) * 1e-9 - elif unit == 'U': cap_value = float(val_str) * 1e-6 - elif unit == 'f': cap_value = float(val_str) * 1e-15 # femtofarads - else: cap_value = float(cap_str) - total_capacitance += cap_value - except (ValueError): continue - return total_resistance, total_capacitance - -def run_physical_feature_extraction(layout_path: str, component_name: str, top_level: Component) -> dict: - """ - Runs PEX and calculates geometric features, returning a structured result. - """ - physical_results = { - "pex": {"status": "not run", "total_resistance_ohms": 0.0, "total_capacitance_farads": 0.0}, - "geometric": {"raw_area_um2": 0.0, "symmetry_score_horizontal": 0.0, "symmetry_score_vertical": 0.0} - } - - # PEX and Parasitics - try: - pex_spice_path = f"{component_name}_pex.spice" - if os.path.exists(pex_spice_path): - os.remove(pex_spice_path) - subprocess.run(["./run_pex.sh", layout_path, component_name], check=True, capture_output=True, text=True) - physical_results["pex"]["status"] = "PEX Complete" - total_res, total_cap = _parse_simple_parasitics(component_name) - physical_results["pex"]["total_resistance_ohms"] = total_res - physical_results["pex"]["total_capacitance_farads"] = total_cap - except subprocess.CalledProcessError as e: - physical_results["pex"]["status"] = f"PEX Error: {e.stderr}" - except FileNotFoundError: - physical_results["pex"]["status"] = "PEX Error: run_pex.sh not found." - except Exception as e: - physical_results["pex"]["status"] = f"PEX Unexpected Error: {e}" - - # Geometric Features - try: - physical_results["geometric"]["raw_area_um2"] = calculate_area(top_level) - sym_h, sym_v = calculate_symmetry_scores(top_level) - physical_results["geometric"]["symmetry_score_horizontal"] = sym_h - physical_results["geometric"]["symmetry_score_vertical"] = sym_v - except Exception as e: - print(f"Warning: Could not calculate geometric features. Error: {e}") - - return physical_results \ No newline at end of file diff --git a/src/glayout/blocks/ATLAS/evaluator_box/run_pex.sh b/src/glayout/blocks/ATLAS/evaluator_box/run_pex.sh deleted file mode 100644 index e7a32fd6..00000000 --- a/src/glayout/blocks/ATLAS/evaluator_box/run_pex.sh +++ /dev/null @@ -1,24 +0,0 @@ -#!/bin/bash - -# Usage: ./run_pex.sh layout.gds layout_cell_name - -GDS_FILE=$1 -LAYOUT_CELL=$2 - -magic -rcfile ./sky130A.magicrc -noconsole -dnull << EOF -gds read $GDS_FILE -flatten $LAYOUT_CELL -load $LAYOUT_CELL -select top cell -extract do local -extract all -ext2sim labels on -ext2sim -extresist tolerance 10 -extresist -ext2spice lvs -ext2spice cthresh 0 -ext2spice extresist on -ext2spice -o ${LAYOUT_CELL}_pex.spice -exit -EOF \ No newline at end of file diff --git a/src/glayout/blocks/ATLAS/evaluator_box/verification.py b/src/glayout/blocks/ATLAS/evaluator_box/verification.py deleted file mode 100644 index 09e83a91..00000000 --- a/src/glayout/blocks/ATLAS/evaluator_box/verification.py +++ /dev/null @@ -1,174 +0,0 @@ -# verification.py -import os -import re -import subprocess -import shutil -import tempfile -import sys -from pathlib import Path -from glayout.flow.pdk.sky130_mapped import sky130_mapped_pdk -from gdsfactory.typings import Component - -def parse_drc_report(report_content: str) -> dict: - """ - Parses a Magic DRC report into a machine-readable format. - """ - errors = [] - current_rule = "" - for line in report_content.strip().splitlines(): - stripped_line = line.strip() - if stripped_line == "----------------------------------------": - continue - if re.match(r"^[a-zA-Z]", stripped_line): - current_rule = stripped_line - elif re.match(r"^[0-9]", stripped_line): - errors.append({"rule": current_rule, "details": stripped_line}) - - is_pass = len(errors) == 0 - if not is_pass and re.search(r"count:\s*0\s*$", report_content, re.IGNORECASE): - is_pass = True - - return { - "is_pass": is_pass, - "total_errors": len(errors), - "error_details": errors - } - -def parse_lvs_report(report_content: str) -> dict: - """ - Parses the raw netgen LVS report and returns a summarized, machine-readable format. - Focuses on parsing net and instance mismatches. - """ - summary = { - "is_pass": False, - "conclusion": "LVS failed or report was inconclusive.", - "total_mismatches": 0, - "mismatch_details": { - "nets": "Not found", - "devices": "Not found", - "unmatched_nets_parsed": [], - "unmatched_instances_parsed": [] - } - } - - # Primary check for LVS pass/fail - if "Netlists match" in report_content or "Circuits match uniquely" in report_content: - summary["is_pass"] = True - summary["conclusion"] = "LVS Pass: Netlists match." - elif "Netlist mismatch" in report_content or "Netlists do not match" in report_content: - summary["conclusion"] = "LVS Fail: Netlist mismatch." - - for line in report_content.splitlines(): - line = line.strip() - - # Parse net mismatches - net_mismatch_match = re.search(r"Net:\s*([^\|]+)\s*\|\s*\((no matching net)\)", line) - if net_mismatch_match: - name_left = net_mismatch_match.group(1).strip() - # If name is on the left, it's in layout, missing in schematic - summary["mismatch_details"]["unmatched_nets_parsed"].append({ - "type": "net", - "name": name_left, - "present_in": "layout", - "missing_in": "schematic" - }) - continue - - # Parse instance mismatches - instance_mismatch_match = re.search(r"Instance:\s*([^\|]+)\s*\|\s*\((no matching instance)\)", line) - if instance_mismatch_match: - name_left = instance_mismatch_match.group(1).strip() - # If name is on the left, it's in layout, missing in schematic - summary["mismatch_details"]["unmatched_instances_parsed"].append({ - "type": "instance", - "name": name_left, - "present_in": "layout", - "missing_in": "schematic" - }) - continue - - # Also capture cases where something is present in schematic but missing in layout (right side of '|') - net_mismatch_right_match = re.search(r"\s*\|\s*([^\|]+)\s*\((no matching net)\)", line) - if net_mismatch_right_match: - name_right = net_mismatch_right_match.group(1).strip() - # If name is on the right, it's in schematic, missing in layout - summary["mismatch_details"]["unmatched_nets_parsed"].append({ - "type": "net", - "name": name_right, - "present_in": "schematic", - "missing_in": "layout" - }) - continue - - instance_mismatch_right_match = re.search(r"\s*\|\s*([^\|]+)\s*\((no matching instance)\)", line) - if instance_mismatch_right_match: - name_right = instance_mismatch_right_match.group(1).strip() - # If name is on the right, it's in schematic, missing in layout - summary["mismatch_details"]["unmatched_instances_parsed"].append({ - "type": "instance", - "name": name_right, - "present_in": "schematic", - "missing_in": "layout" - }) - continue - - # Capture summary lines like "Number of devices:" and "Number of nets:" - if "Number of devices:" in line: - summary["mismatch_details"]["devices"] = line.split(":", 1)[1].strip() if ":" in line else line - elif "Number of nets:" in line: - summary["mismatch_details"]["nets"] = line.split(":", 1)[1].strip() if ":" in line else line - - # Calculate total mismatches - summary["total_mismatches"] = len(summary["mismatch_details"]["unmatched_nets_parsed"]) + \ - len(summary["mismatch_details"]["unmatched_instances_parsed"]) - - # If there are any mismatches found, then LVS fails, regardless of "Netlists match" string. - if summary["total_mismatches"] > 0: - summary["is_pass"] = False - if "LVS Pass" in summary["conclusion"]: # If conclusion still says pass, update it - summary["conclusion"] = "LVS Fail: Mismatches found." - - return summary - -def run_verification(layout_path: str, component_name: str, top_level: Component) -> dict: - """ - Runs DRC and LVS checks and returns a structured result dictionary. - """ - verification_results = { - "drc": {"status": "not run", "is_pass": False, "report_path": None, "summary": {}}, - "lvs": {"status": "not run", "is_pass": False, "report_path": None, "summary": {}} - } - - # DRC Check - drc_report_path = os.path.abspath(f"./{component_name}.drc.rpt") - verification_results["drc"]["report_path"] = drc_report_path - try: - if os.path.exists(drc_report_path): - os.remove(drc_report_path) - sky130_mapped_pdk.drc_magic(layout_path, component_name, output_file=drc_report_path) - report_content = "" - if os.path.exists(drc_report_path): - with open(drc_report_path, 'r') as f: - report_content = f.read() - summary = parse_drc_report(report_content) - verification_results["drc"].update({"summary": summary, "is_pass": summary["is_pass"], "status": "pass" if summary["is_pass"] else "fail"}) - except Exception as e: - verification_results["drc"]["status"] = f"error: {e}" - - # LVS Check - lvs_report_path = os.path.abspath(f"./{component_name}.lvs.rpt") - verification_results["lvs"]["report_path"] = lvs_report_path - try: - if os.path.exists(lvs_report_path): - os.remove(lvs_report_path) - sky130_mapped_pdk.lvs_netgen(layout=top_level, design_name=component_name, output_file_path=lvs_report_path) - report_content = "" - if os.path.exists(lvs_report_path): - with open(lvs_report_path, 'r') as report_file: - report_content = report_file.read() - lvs_summary = parse_lvs_report(report_content) - verification_results["lvs"].update({"summary": lvs_summary, "is_pass": lvs_summary["is_pass"], "status": "pass" if lvs_summary["is_pass"] else "fail"}) - except Exception as e: - verification_results["lvs"]["status"] = f"error: {e}" - - return verification_results \ No newline at end of file diff --git a/src/glayout/blocks/ATLAS/evaluator_wrapper.py b/src/glayout/blocks/ATLAS/evaluator_wrapper.py deleted file mode 100644 index d378794a..00000000 --- a/src/glayout/blocks/ATLAS/evaluator_wrapper.py +++ /dev/null @@ -1,77 +0,0 @@ -# comprehensive evaluator -# comprehensive evaluator -import os -import json -import logging -from datetime import datetime -from pathlib import Path -from gdsfactory.typings import Component - -from robust_verification import run_robust_verification -from glayout.flow.blocks.evaluator_box.physical_features import run_physical_feature_extraction - -def get_next_filename(base_name="evaluation", extension=".json"): - """ - Generates the next available filename with a numerical suffix, starting from 1. - e.g., base_name_1.json, base_name_2.json, etc. - """ - i = 1 - while True: - filename = f"{base_name}_{i}{extension}" - if not os.path.exists(filename): - return filename - i += 1 - -def run_evaluation(layout_path: str, component_name: str, top_level: Component) -> dict: - """ - The main evaluation wrapper. Runs all evaluation modules and combines results. - """ - print(f"--- Starting Comprehensive Evaluation for {component_name} ---") - - # Deletes known intermediate and report files for a given component to ensure a clean run. - print(f"Cleaning up intermediate files for component '{component_name}'...") - - files_to_delete = [ - f"{component_name}.res.ext", - f"{component_name}.lvs.rpt", - f"{component_name}_lvs.rpt", - f"{component_name}.nodes", - f"{component_name}.sim", - f"{component_name}.pex.spice", - f"{component_name}_pex.spice" - ] - - for f_path in files_to_delete: - try: - if os.path.exists(f_path): - os.remove(f_path) - print(f" - Deleted: {f_path}") - except OSError as e: - print(f" - Warning: Could not delete {f_path}. Error: {e}") - - # Run verification module - print("Running verification checks (DRC, LVS)...") - verification_results = run_robust_verification(layout_path, component_name, top_level) - - # Run physical features module - print("Running physical feature extraction (PEX, Area, Symmetry)...") - physical_results = run_physical_feature_extraction(layout_path, component_name, top_level) - - # Combine results into a single dictionary - final_results = { - "component_name": component_name, - "timestamp": datetime.now().isoformat(), - "drc_lvs_fail": not (verification_results["drc"]["is_pass"] and verification_results["lvs"]["is_pass"]), - **verification_results, - **physical_results - } - - # Generate the output JSON filename - output_filename = get_next_filename(base_name=component_name, extension=".json") - - # Write the results dictionary to a JSON file - with open(output_filename, 'w') as json_file: - json.dump(final_results, json_file, indent=4) - print(f"--- Evaluation complete. Results saved to {output_filename} ---") - - return final_results diff --git a/src/glayout/blocks/ATLAS/fvf.py b/src/glayout/blocks/ATLAS/fvf.py deleted file mode 100644 index 106a932d..00000000 --- a/src/glayout/blocks/ATLAS/fvf.py +++ /dev/null @@ -1,205 +0,0 @@ -from glayout.flow.pdk.mappedpdk import MappedPDK -from glayout.flow.pdk.sky130_mapped import sky130_mapped_pdk -from gdsfactory.cell import cell -from gdsfactory.component import Component -from gdsfactory import Component -from glayout.flow.primitives.fet import nmos, pmos, multiplier -from glayout.flow.pdk.util.comp_utils import evaluate_bbox, prec_center, prec_ref_center, align_comp_to_port -from glayout.flow.pdk.util.snap_to_grid import component_snap_to_grid -from glayout.flow.pdk.util.port_utils import rename_ports_by_orientation -from glayout.flow.routing.straight_route import straight_route -from glayout.flow.routing.c_route import c_route -from glayout.flow.routing.L_route import L_route -from glayout.flow.primitives.guardring import tapring -from glayout.flow.pdk.util.port_utils import add_ports_perimeter -from glayout.flow.spice.netlist import Netlist -from glayout.flow.primitives.via_gen import via_stack -from gdsfactory.components import text_freetype, rectangle -from evaluator_wrapper import run_evaluation # CUSTOM IMPLEMENTED EVAL BOX - -def get_component_netlist(component): - """Helper function to get netlist object from component info, compatible with all gdsfactory versions""" - from glayout.flow.spice.netlist import Netlist - - # Try to get stored object first (for older gdsfactory versions) - if 'netlist_obj' in component.info: - return component.info['netlist_obj'] - - # Try to reconstruct from netlist_data (for newer gdsfactory versions) - if 'netlist_data' in component.info: - data = component.info['netlist_data'] - netlist = Netlist( - circuit_name=data['circuit_name'], - nodes=data['nodes'] - ) - netlist.source_netlist = data['source_netlist'] - return netlist - - # Fallback: return the string representation (should not happen in normal operation) - return component.info.get('netlist', '') - -def fvf_netlist(fet_1: Component, fet_2: Component) -> Netlist: - - netlist = Netlist(circuit_name='FLIPPED_VOLTAGE_FOLLOWER', nodes=['VIN', 'VBULK', 'VOUT', 'Ib']) - - # Use helper function to get netlist objects regardless of gdsfactory version - fet_1_netlist = get_component_netlist(fet_1) - fet_2_netlist = get_component_netlist(fet_2) - netlist.connect_netlist(fet_1_netlist, [('D', 'Ib'), ('G', 'VIN'), ('S', 'VOUT'), ('B', 'VBULK')]) - netlist.connect_netlist(fet_2_netlist, [('D', 'VOUT'), ('G', 'Ib'), ('S', 'VBULK'), ('B', 'VBULK')]) - - return netlist - -def sky130_add_fvf_labels(fvf_in: Component) -> Component: - - fvf_in.unlock() - # define layers` - met1_pin = (68,16) - met1_label = (68,5) - met2_pin = (69,16) - met2_label = (69,5) - # list that will contain all port/comp info - move_info = list() - # create labels and append to info list - # gnd - gnd2label = rectangle(layer=met1_pin,size=(0.5,0.5),centered=True).copy() - gnd2label.add_label(text="VBULK",layer=met1_label) - move_info.append((gnd2label,fvf_in.ports["B_tie_N_top_met_N"],None)) - - #currentbias - ibiaslabel = rectangle(layer=met2_pin,size=(0.5,0.5),centered=True).copy() - ibiaslabel.add_label(text="Ib",layer=met2_label) - move_info.append((ibiaslabel,fvf_in.ports["A_drain_bottom_met_N"],None)) - - # output (3rd stage) - outputlabel = rectangle(layer=met2_pin,size=(0.5,0.5),centered=True).copy() - outputlabel.add_label(text="VOUT",layer=met2_label) - move_info.append((outputlabel,fvf_in.ports["A_source_bottom_met_N"],None)) - - # input - inputlabel = rectangle(layer=met1_pin,size=(0.5,0.5),centered=True).copy() - inputlabel.add_label(text="VIN",layer=met1_label) - move_info.append((inputlabel,fvf_in.ports["A_multiplier_0_gate_N"], None)) - - # move everything to position - for comp, prt, alignment in move_info: - alignment = ('c','b') if alignment is None else alignment - compref = align_comp_to_port(comp, prt, alignment=alignment) - fvf_in.add(compref) - return fvf_in.flatten() - -@cell -def flipped_voltage_follower( - pdk: MappedPDK, - device_type: str = "nmos", - placement: str = "horizontal", - width: tuple[float,float] = (6.605703928526579, 3.713220935212418), - length: tuple[float,float] = (2.3659471990041707, 1.9639325665440608), - fingers: tuple[int,int] = (1, 1), - multipliers: tuple[int,int] = (2, 2), - dummy_1: tuple[bool,bool] = (True,True), - dummy_2: tuple[bool,bool] = (True,True), - tie_layers1: tuple[str,str] = ("met2","met1"), - tie_layers2: tuple[str,str] = ("met2","met1"), - sd_rmult: int=1, - **kwargs - ) -> Component: - """ - creates a Flipped Voltage Follower - pdk: pdk to use - device_type: either "nmos" or "pmos" - placement: either "horizontal" or "vertical" - width: (input fet, feedback fet) - length: (input fet, feedback fet) - fingers: (input fet, feedback fet) - multipliers: (input fet, feedback fet) - dummy_1: dummy for input fet - dummy_2: dummy for feedback fet - tie_layers1: tie layers for input fet - tie_layers2: tie layers for feedback fet - sd_rmult: sd_rmult for both fets - **kwargs: any kwarg that is supported by nmos and pmos - """ - - #top level component - top_level = Component(name="flipped_voltage_follower") - - #two fets - device_map = { - "nmos": nmos, - "pmos": pmos, - } - device = device_map.get(device_type) - - if device_type == "nmos": - kwargs["with_dnwell"] = False # Set the parameter dynamically - - - fet_1 = device(pdk, width=width[0], fingers=fingers[0], multipliers=multipliers[0], with_dummy=dummy_1, with_substrate_tap=False, length=length[0], tie_layers=tie_layers1, sd_rmult=sd_rmult, **kwargs) - fet_2 = device(pdk, width=width[1], fingers=fingers[1], multipliers=multipliers[1], with_dummy=dummy_2, with_substrate_tap=False, length=length[1], tie_layers=tie_layers2, sd_rmult=sd_rmult, **kwargs) - well = "pwell" if device == nmos else "nwell" - fet_1_ref = top_level << fet_1 - fet_2_ref = top_level << fet_2 - - #Relative move - ref_dimensions = evaluate_bbox(fet_2) - if placement == "horizontal": - fet_2_ref.movex(fet_1_ref.xmax + ref_dimensions[0]/2 + pdk.util_max_metal_seperation()-0.5) - if placement == "vertical": - fet_2_ref.movey(fet_1_ref.ymin - ref_dimensions[1]/2 - pdk.util_max_metal_seperation()-1) - - #Routing - viam2m3 = via_stack(pdk, "met2", "met3", centered=True) - drain_1_via = top_level << viam2m3 - source_1_via = top_level << viam2m3 - drain_2_via = top_level << viam2m3 - gate_2_via = top_level << viam2m3 - drain_1_via.move(fet_1_ref.ports["multiplier_0_drain_W"].center).movex(-0.5*evaluate_bbox(fet_1)[1]) - source_1_via.move(fet_1_ref.ports["multiplier_0_source_E"].center).movex(1.5) - drain_2_via.move(fet_2_ref.ports["multiplier_0_drain_W"].center).movex(-1.5) - gate_2_via.move(fet_2_ref.ports["multiplier_0_gate_E"].center).movex(1) - - top_level << straight_route(pdk, fet_1_ref.ports["multiplier_0_source_E"], source_1_via.ports["bottom_met_W"]) - top_level << straight_route(pdk, fet_2_ref.ports["multiplier_0_drain_W"], drain_2_via.ports["bottom_met_E"]) - top_level << c_route(pdk, source_1_via.ports["top_met_N"], drain_2_via.ports["top_met_N"], extension=1.2*max(width[0],width[1]), e1glayer="met3", e2glayer="met3", cglayer="met2") - top_level << straight_route(pdk, fet_1_ref.ports["multiplier_0_drain_W"], drain_1_via.ports["bottom_met_E"]) - top_level << c_route(pdk, drain_1_via.ports["top_met_S"], gate_2_via.ports["top_met_S"], extension=1.2*max(width[0],width[1]), cglayer="met2") - top_level << straight_route(pdk, fet_2_ref.ports["multiplier_0_gate_E"], gate_2_via.ports["bottom_met_W"]) - try: - top_level << straight_route(pdk, fet_2_ref.ports["multiplier_0_source_W"], fet_2_ref.ports["tie_W_top_met_W"], glayer1=tie_layers2[1], width=0.2*sd_rmult, fullbottom=True) - except: - pass - #Renaming Ports - top_level.add_ports(fet_1_ref.get_ports_list(), prefix="A_") - top_level.add_ports(fet_2_ref.get_ports_list(), prefix="B_") - top_level.add_ports(drain_1_via.get_ports_list(), prefix="A_drain_") - top_level.add_ports(source_1_via.get_ports_list(), prefix="A_source_") - top_level.add_ports(drain_2_via.get_ports_list(), prefix="B_drain_") - top_level.add_ports(gate_2_via.get_ports_list(), prefix="B_gate_") - #add nwell - if well == "nwell": - top_level.add_padding(layers=(pdk.get_glayer("nwell"),),default= 1 ) - - component = component_snap_to_grid(rename_ports_by_orientation(top_level)) - #component = rename_ports_by_orientation(top_level) - - # Store netlist as string to avoid gymnasium info dict type restrictions - # Compatible with both gdsfactory 7.7.0 and 7.16.0+ strict Pydantic validation - netlist_obj = fvf_netlist(fet_1, fet_2) - component.info['netlist'] = str(netlist_obj) - # Store serialized netlist data for reconstruction if needed - component.info['netlist_data'] = { - 'circuit_name': netlist_obj.circuit_name, - 'nodes': netlist_obj.nodes, - 'source_netlist': netlist_obj.source_netlist - } - - return component - -if __name__=="__main__": - fvf = sky130_add_fvf_labels(flipped_voltage_follower(sky130_mapped_pdk, width=(2,1), sd_rmult=3)) - fvf.show() - fvf.name = "fvf" - fvf_gds = fvf.write_gds("fvf.gds") - result = run_evaluation("fvf.gds",fvf.name,fvf) - print(result) \ No newline at end of file diff --git a/src/glayout/blocks/ATLAS/getStarted.sh b/src/glayout/blocks/ATLAS/getStarted.sh deleted file mode 100644 index 6ee1090a..00000000 --- a/src/glayout/blocks/ATLAS/getStarted.sh +++ /dev/null @@ -1,4 +0,0 @@ -conda activate GLdev -export PDK_ROOT=/opt/conda/envs/GLdev/share/pdk -cd /home/arnavshukla/OpenFASOC/openfasoc/generators/glayout/glayout/flow/blocks/elementary/LHS -chmod +x run_pex.sh \ No newline at end of file diff --git a/src/glayout/blocks/ATLAS/install_dependencies.py b/src/glayout/blocks/ATLAS/install_dependencies.py deleted file mode 100644 index 7a72e8ca..00000000 --- a/src/glayout/blocks/ATLAS/install_dependencies.py +++ /dev/null @@ -1,103 +0,0 @@ -#!/usr/bin/env python3 -""" -Installation verification and fix script for OpenFASOC transmission gate dataset generation. -Checks and installs missing dependencies, specifically handling the PrettyPrint issue. -""" - -import subprocess -import sys -import importlib.util - -def check_and_install_package(package_name, import_name=None): - """Check if a package is installed, and install if missing""" - if import_name is None: - import_name = package_name - - try: - spec = importlib.util.find_spec(import_name) - if spec is not None: - print(f"โœ… {package_name} is already installed") - return True - except ImportError: - pass - - print(f"โŒ {package_name} is missing. Installing...") - try: - subprocess.check_call([sys.executable, "-m", "pip", "install", package_name]) - print(f"โœ… Successfully installed {package_name}") - return True - except subprocess.CalledProcessError: - print(f"โŒ Failed to install {package_name}") - return False - -def main(): - """Main installation verification function""" - print("๐Ÿ”ง OpenFASOC Dependency Checker and Installer") - print("=" * 50) - - # Check gdsfactory version - try: - import gdsfactory - version = gdsfactory.__version__ - print(f"๐Ÿ“ฆ gdsfactory version: {version}") - - # Parse version to check if it's 7.16.0+ - version_parts = [int(x) for x in version.split('.')] - if version_parts[0] > 7 or (version_parts[0] == 7 and version_parts[1] >= 16): - print("โ„น๏ธ Using gdsfactory 7.16.0+ with strict Pydantic validation") - print("โ„น๏ธ The updated fix handles this version properly") - else: - print("โ„น๏ธ Using older gdsfactory version with relaxed validation") - except ImportError: - print("โŒ gdsfactory not found") - return False - - # Check required packages - packages_to_check = [ - ("prettyprinttree", "prettyprinttree"), - ("prettyprint", "prettyprint"), - ("gymnasium", "gymnasium"), # Also check for gymnasium - ] - - print("\n๐Ÿ“‹ Checking required packages...") - all_good = True - - for package_name, import_name in packages_to_check: - success = check_and_install_package(package_name, import_name) - if not success: - all_good = False - - # Special check for PrettyPrint import issue - print("\n๐Ÿ” Testing PrettyPrint imports...") - try: - from prettyprinttree import PrettyPrintTree - print("โœ… prettyprinttree import works correctly") - except ImportError: - try: - from PrettyPrint import PrettyPrintTree - print("โœ… PrettyPrint import works (older style)") - except ImportError: - print("โŒ Neither prettyprinttree nor PrettyPrint imports work") - print("๐Ÿ’ก Installing prettyprinttree...") - success = check_and_install_package("prettyprinttree") - if not success: - all_good = False - - # Summary - print("\n" + "=" * 50) - if all_good: - print("๐ŸŽ‰ All dependencies are properly installed!") - print("โœ… Your environment should now work with the transmission gate dataset generation") - print("\n๐Ÿ“ Next steps:") - print("1. Run the test script: python test_comprehensive_fix.py") - print("2. If tests pass, run: python generate_tg_1000_dataset.py") - else: - print("โš ๏ธ Some dependencies are missing or failed to install") - print("๐Ÿ’ก Please install them manually:") - print(" pip install prettyprinttree prettyprint gymnasium") - - return all_good - -if __name__ == "__main__": - success = main() - sys.exit(0 if success else 1) diff --git a/src/glayout/blocks/ATLAS/lvcm.py b/src/glayout/blocks/ATLAS/lvcm.py deleted file mode 100644 index 9e85ec6b..00000000 --- a/src/glayout/blocks/ATLAS/lvcm.py +++ /dev/null @@ -1,199 +0,0 @@ -from glayout.flow.pdk.mappedpdk import MappedPDK -from glayout.flow.pdk.sky130_mapped import sky130_mapped_pdk -from gdsfactory.component import Component -from gdsfactory.component_reference import ComponentReference -from gdsfactory.cell import cell -from gdsfactory import Component -from gdsfactory.components import text_freetype, rectangle -from glayout.flow.primitives.fet import nmos, pmos, multiplier -from glayout.flow.pdk.util.comp_utils import evaluate_bbox, prec_center, align_comp_to_port, prec_ref_center -from glayout.flow.pdk.util.snap_to_grid import component_snap_to_grid -from glayout.flow.pdk.util.port_utils import rename_ports_by_orientation -from glayout.flow.routing.straight_route import straight_route -from glayout.flow.routing.c_route import c_route -from glayout.flow.routing.L_route import L_route -from glayout.flow.primitives.guardring import tapring -from glayout.flow.pdk.util.port_utils import add_ports_perimeter -from glayout.flow.spice.netlist import Netlist -from glayout.flow.blocks.elementary.LHS.fvf import fvf_netlist, flipped_voltage_follower -from glayout.flow.primitives.via_gen import via_stack -from typing import Optional -from evaluator_wrapper import run_evaluation - - -def add_lvcm_labels(lvcm_in: Component, - pdk: MappedPDK - ) -> Component: - - lvcm_in.unlock() - - met2_pin = (68,16) - met2_label = (68,5) - met3_pin = (69,16) - met3_label = (69,5) - # list that will contain all port/comp info - move_info = list() - # create labels and append to info list - # gnd - gndlabel = rectangle(layer=pdk.get_glayer("met2_pin"),size=(0.5,0.5),centered=True).copy() - gndlabel.add_label(text="GND",layer=pdk.get_glayer("met2_label")) - move_info.append((gndlabel,lvcm_in.ports["M_1_B_tie_N_top_met_N"],None)) - - #currentbias - ibias1label = rectangle(layer=pdk.get_glayer("met3_pin"),size=(0.5,0.5),centered=True).copy() - ibias1label.add_label(text="IBIAS1",layer=pdk.get_glayer("met3_label")) - move_info.append((ibias1label,lvcm_in.ports["M_1_A_drain_bottom_met_N"],None)) - - ibias2label = rectangle(layer=pdk.get_glayer("met3_pin"),size=(0.5,0.5),centered=True).copy() - ibias2label.add_label(text="IBIAS2",layer=pdk.get_glayer("met3_label")) - move_info.append((ibias2label,lvcm_in.ports["M_2_A_drain_bottom_met_N"],None)) - - # output - output1label = rectangle(layer=pdk.get_glayer("met2_pin"),size=(0.27,0.27),centered=True).copy() - output1label.add_label(text="IOUT1",layer=pdk.get_glayer("met2_label")) - move_info.append((output1label,lvcm_in.ports["M_3_A_multiplier_0_drain_N"],None)) - - output2label = rectangle(layer=pdk.get_glayer("met2_pin"),size=(0.27,0.27),centered=True).copy() - output2label.add_label(text="IOUT2",layer=pdk.get_glayer("met2_label")) - move_info.append((output2label,lvcm_in.ports["M_4_A_multiplier_0_drain_N"],None)) - - # move everything to position - for comp, prt, alignment in move_info: - alignment = ('c','b') if alignment is None else alignment - compref = align_comp_to_port(comp, prt, alignment=alignment) - lvcm_in.add(compref) - return lvcm_in.flatten() - -def low_voltage_cmirr_netlist(bias_fvf: Component, cascode_fvf: Component, fet_1_ref: ComponentReference, fet_2_ref: ComponentReference, fet_3_ref: ComponentReference, fet_4_ref: ComponentReference) -> Netlist: - - netlist = Netlist(circuit_name='Low_voltage_current_mirror', nodes=['IBIAS1', 'IBIAS2', 'GND', 'IOUT1', 'IOUT2']) - netlist.connect_netlist(bias_fvf.info['netlist'], [('VIN','IBIAS1'),('VBULK','GND'),('Ib','IBIAS1'),('VOUT','local_net_1')]) - netlist.connect_netlist(cascode_fvf.info['netlist'], [('VIN','IBIAS1'),('VBULK','GND'),('Ib', 'IBIAS2'),('VOUT','local_net_2')]) - fet_1A_ref=netlist.connect_netlist(fet_2_ref.info['netlist'], [('D', 'IOUT1'),('G','IBIAS1'),('B','GND')]) - fet_2A_ref=netlist.connect_netlist(fet_4_ref.info['netlist'], [('D', 'IOUT2'),('G','IBIAS1'),('B','GND')]) - fet_1B_ref=netlist.connect_netlist(fet_1_ref.info['netlist'], [('G','IBIAS2'),('S', 'GND'),('B','GND')]) - fet_2B_ref=netlist.connect_netlist(fet_3_ref.info['netlist'], [('G','IBIAS2'),('S', 'GND'),('B','GND')]) - netlist.connect_subnets( - fet_1A_ref, - fet_1B_ref, - [('S', 'D')] - ) - netlist.connect_subnets( - fet_2A_ref, - fet_2B_ref, - [('S', 'D')] - ) - - return netlist - -@cell -def low_voltage_cmirror( - pdk: MappedPDK, - width: tuple[float,float] = (4.15,1.42), - length: float = 2, - fingers: tuple[int,int] = (2,1), - multipliers: tuple[int,int] = (1,1), - ) -> Component: - """ - A low voltage N type current mirror. It has two input brnaches and two output branches. It consists of total 8 nfets, 7 of them have the same W/L. One nfet has width of w' = w/3(theoretcially) - The default values are used to mirror 10uA. - """ - #top level component - top_level = Component("Low_voltage_N-type_current_mirror") - - #input branch 2 - cascode_fvf = flipped_voltage_follower(pdk, width=(width[0],width[0]), length=(length,length), fingers=(fingers[0],fingers[0]), multipliers=(multipliers[0],multipliers[0]), with_dnwell=False) - cascode_fvf_ref = prec_ref_center(cascode_fvf) - top_level.add(cascode_fvf_ref) - - #input branch 1 - bias_fvf = flipped_voltage_follower(pdk, width=(width[0],width[1]), length=(length,length), fingers=(fingers[0],fingers[1]), multipliers=(multipliers[0],multipliers[1]), placement="vertical", with_dnwell=False) - bias_fvf_ref = prec_ref_center(bias_fvf) - bias_fvf_ref.movey(cascode_fvf_ref.ymin - 2 - (evaluate_bbox(bias_fvf)[1]/2)) - top_level.add(bias_fvf_ref) - - #creating fets for output branches - fet_1 = nmos(pdk, width=width[0], fingers=fingers[0], multipliers=multipliers[0], with_dummy=True, with_dnwell=False, with_substrate_tap=False, length=length) - fet_1_ref = prec_ref_center(fet_1) - fet_2_ref = prec_ref_center(fet_1) - fet_3_ref = prec_ref_center(fet_1) - fet_4_ref = prec_ref_center(fet_1) - - fet_1_ref.movex(cascode_fvf_ref.xmin - (evaluate_bbox(fet_1)[0]/2) - pdk.util_max_metal_seperation()) - fet_2_ref.movex(cascode_fvf_ref.xmin - (3*evaluate_bbox(fet_1)[0]/2) - 2*pdk.util_max_metal_seperation()) - fet_3_ref.movex(cascode_fvf_ref.xmax + (evaluate_bbox(fet_1)[0]/2) + pdk.util_max_metal_seperation()) - fet_4_ref.movex(cascode_fvf_ref.xmax + (3*evaluate_bbox(fet_1)[0]/2) + 2*pdk.util_max_metal_seperation()) - - top_level.add(fet_1_ref) - top_level.add(fet_2_ref) - top_level.add(fet_3_ref) - top_level.add(fet_4_ref) - - top_level << c_route(pdk, bias_fvf_ref.ports["A_multiplier_0_gate_E"], bias_fvf_ref.ports["B_gate_bottom_met_E"]) - top_level << c_route(pdk, cascode_fvf_ref.ports["A_multiplier_0_gate_W"], bias_fvf_ref.ports["A_multiplier_0_gate_W"]) - top_level << straight_route(pdk, cascode_fvf_ref.ports["B_gate_bottom_met_E"], fet_3_ref.ports["multiplier_0_gate_W"]) - - #creating vias for routing - viam2m3 = via_stack(pdk, "met2", "met3", centered=True) - gate_1_via = top_level << viam2m3 - gate_1_via.move(fet_1_ref.ports["multiplier_0_gate_W"].center).movex(-1) - gate_2_via = top_level << viam2m3 - gate_2_via.move(fet_2_ref.ports["multiplier_0_gate_W"].center).movex(-1) - gate_3_via = top_level << viam2m3 - gate_3_via.move(fet_3_ref.ports["multiplier_0_gate_E"].center).movex(1) - gate_4_via = top_level << viam2m3 - gate_4_via.move(fet_4_ref.ports["multiplier_0_gate_E"].center).movex(1) - - source_2_via = top_level << viam2m3 - drain_1_via = top_level << viam2m3 - source_2_via.move(fet_2_ref.ports["multiplier_0_source_E"].center).movex(1.5) - drain_1_via.move(fet_1_ref.ports["multiplier_0_drain_W"].center).movex(-1) - - source_4_via = top_level << viam2m3 - drain_3_via = top_level << viam2m3 - source_4_via.move(fet_4_ref.ports["multiplier_0_source_W"].center).movex(-1) - drain_3_via.move(fet_3_ref.ports["multiplier_0_drain_E"].center).movex(1.5) - - #routing - top_level << straight_route(pdk, fet_2_ref.ports["multiplier_0_source_E"], source_2_via.ports["bottom_met_W"]) - top_level << straight_route(pdk, fet_1_ref.ports["multiplier_0_drain_W"], drain_1_via.ports["bottom_met_E"]) - top_level << straight_route(pdk, fet_4_ref.ports["multiplier_0_source_W"], source_4_via.ports["bottom_met_E"]) - top_level << straight_route(pdk, fet_3_ref.ports["multiplier_0_drain_E"], drain_3_via.ports["bottom_met_W"]) - top_level << c_route(pdk, source_2_via.ports["top_met_N"], drain_1_via.ports["top_met_N"], extension=0.5*evaluate_bbox(fet_1)[1], width1=0.32, width2=0.32, cwidth=0.32, e1glayer="met3", e2glayer="met3", cglayer="met2") - top_level << c_route(pdk, source_4_via.ports["top_met_N"], drain_3_via.ports["top_met_N"], extension=0.5*evaluate_bbox(fet_1)[1], width1=0.32, width2=0.32, cwidth=0.32, e1glayer="met3", e2glayer="met3", cglayer="met2") - top_level << c_route(pdk, bias_fvf_ref.ports["A_multiplier_0_gate_E"], gate_4_via.ports["bottom_met_E"], width1=0.32, width2=0.32, cwidth=0.32) - - - top_level << straight_route(pdk, fet_1_ref.ports["multiplier_0_gate_W"], gate_1_via.ports["bottom_met_E"]) - top_level << straight_route(pdk, fet_2_ref.ports["multiplier_0_gate_W"], gate_2_via.ports["bottom_met_E"]) - top_level << straight_route(pdk, fet_3_ref.ports["multiplier_0_gate_E"], gate_3_via.ports["bottom_met_W"]) - top_level << straight_route(pdk, fet_4_ref.ports["multiplier_0_gate_E"], gate_4_via.ports["bottom_met_W"]) - - top_level << c_route(pdk, gate_1_via.ports["top_met_S"], gate_3_via.ports["top_met_S"], extension=(1.2*width[0]+0.6), cglayer='met2') - top_level << c_route(pdk, gate_2_via.ports["top_met_S"], gate_4_via.ports["top_met_S"], extension=(1.2*width[0]-0.6), cglayer='met2') - - top_level << straight_route(pdk, fet_1_ref.ports["multiplier_0_source_W"], fet_1_ref.ports["tie_W_top_met_W"], glayer1='met1', width=0.2) - top_level << straight_route(pdk, fet_3_ref.ports["multiplier_0_source_W"], fet_3_ref.ports["tie_W_top_met_W"], glayer1='met1', width=0.2) - - - top_level.add_ports(bias_fvf_ref.get_ports_list(), prefix="M_1_") - top_level.add_ports(cascode_fvf_ref.get_ports_list(), prefix="M_2_") - top_level.add_ports(fet_1_ref.get_ports_list(), prefix="M_3_B_") - top_level.add_ports(fet_2_ref.get_ports_list(), prefix="M_3_A_") - top_level.add_ports(fet_3_ref.get_ports_list(), prefix="M_4_B_") - top_level.add_ports(fet_4_ref.get_ports_list(), prefix="M_4_A_") - - component = component_snap_to_grid(rename_ports_by_orientation(top_level)) - component.info['netlist'] = low_voltage_cmirr_netlist(bias_fvf, cascode_fvf, fet_1_ref, fet_2_ref, fet_3_ref, fet_4_ref) - - return component - -if __name__=="__main__": - #low_voltage_current_mirror = low_voltage_current_mirror(sky130_mapped_pdk) - low_voltage_current_mirror = add_lvcm_labels(low_voltage_cmirror(sky130_mapped_pdk),sky130_mapped_pdk) - low_voltage_current_mirror.show() - low_voltage_current_mirror.name = "Low_voltage_current_mirror" - #magic_drc_result = sky130_mapped_pdk.drc_magic(low_voltage_current_mirror, low_voltage_current_mirror.name) - #netgen_lvs_result = sky130_mapped_pdk.lvs_netgen(low_voltage_current_mirror, low_voltage_current_mirror.name) - low_voltage_current_mirror_gds = low_voltage_current_mirror.write_gds("low_voltage_current_mirror.gds") - res = run_evaluation("low_voltage_current_mirror.gds", low_voltage_current_mirror.name, low_voltage_current_mirror) \ No newline at end of file diff --git a/src/glayout/blocks/ATLAS/opamp.py b/src/glayout/blocks/ATLAS/opamp.py deleted file mode 100644 index d5b25690..00000000 --- a/src/glayout/blocks/ATLAS/opamp.py +++ /dev/null @@ -1,132 +0,0 @@ -from gdsfactory.read.import_gds import import_gds -from gdsfactory.components import text_freetype, rectangle -from glayout.flow.pdk.util.comp_utils import prec_array, movey, align_comp_to_port, prec_ref_center -from glayout.flow.pdk.util.port_utils import add_ports_perimeter, print_ports -from gdsfactory.component import Component -from glayout.flow.pdk.mappedpdk import MappedPDK -from glayout.flow.blocks.composite.opamp.opamp import opamp -from glayout.flow.routing.L_route import L_route -from glayout.flow.routing.straight_route import straight_route -from glayout.flow.routing.c_route import c_route -from glayout.flow.primitives.via_gen import via_array -from gdsfactory.cell import cell, clear_cache -from glayout.flow.pdk.sky130_mapped import sky130_mapped_pdk as pdk -from glayout.flow.pdk.util.snap_to_grid import component_snap_to_grid -from glayout.flow.pdk.util.component_array_create import write_component_matrix -from evaluator_wrapper import run_evaluation -def sky130_add_opamp_2_labels(opamp_in: Component) -> Component: - """adds opamp labels for extraction, without adding pads - this function does not need to be used with sky130_add_opamp_pads - """ - opamp_in.unlock() - # define layers - met2_pin = (69,16) - met2_label = (69,5) - met3_pin = (70,16) - met3_label = (70,5) - met4_pin = (71,16) - met4_label = (71,5) - # list that will contain all port/comp info - move_info = list() - # create labels and append to info list - # gnd - gndlabel = rectangle(layer=met3_pin,size=(1,1),centered=True).copy() - gndlabel.add_label(text="GND",layer=met3_label) - move_info.append((gndlabel,opamp_in.ports["pin_gnd_N"],None)) - #diffpairibias - ibias1label = rectangle(layer=met2_pin,size=(1,1),centered=True).copy() - ibias1label.add_label(text="DIFFPAIR_BIAS",layer=met2_label) - move_info.append((ibias1label,opamp_in.ports["pin_diffpairibias_N"],None)) - # commonsourceibias - ibias2label = rectangle(layer=met4_pin,size=(1,1),centered=True).copy() - ibias2label.add_label(text="CS_BIAS",layer=met4_label) - move_info.append((ibias2label,opamp_in.ports["pin_commonsourceibias_N"],None)) - #minus - minuslabel = rectangle(layer=met2_pin,size=(1,1),centered=True).copy() - minuslabel.add_label(text="VP",layer=met2_label) - move_info.append((minuslabel,opamp_in.ports["pin_minus_N"],None)) - #-plus - pluslabel = rectangle(layer=met2_pin,size=(1,1),centered=True).copy() - pluslabel.add_label(text="VN",layer=met2_label) - move_info.append((pluslabel,opamp_in.ports["pin_plus_N"],None)) - #vdd - vddlabel = rectangle(layer=met3_pin,size=(1,1),centered=True).copy() - vddlabel.add_label(text="VDD",layer=met3_label) - move_info.append((vddlabel,opamp_in.ports["pin_vdd_N"],None)) - # output (2nd stage) - outputlabel = rectangle(layer=met4_pin,size=(0.2,0.2),centered=True).copy() - outputlabel.add_label(text="VOUT",layer=met4_label) - move_info.append((outputlabel,opamp_in.ports["commonsource_output_E"],('l','c'))) - # move everything to position - for comp, prt, alignment in move_info: - alignment = ('c','b') if alignment is None else alignment - compref = align_comp_to_port(comp, prt, alignment=alignment) - opamp_in.add(compref) - return opamp_in.flatten() - -def sky130_add_opamp_3_labels(opamp_in: Component) -> Component: - """adds opamp labels for extraction, without adding pads - this function does not need to be used with sky130_add_opamp_pads - """ - opamp_in.unlock() - # define layers - met2_pin = (69,16) - met2_label = (69,5) - met3_pin = (70,16) - met3_label = (70,5) - met4_pin = (71,16) - met4_label = (71,5) - # list that will contain all port/comp info - move_info = list() - # create labels and append to info list - # gnd - gndlabel = rectangle(layer=met3_pin,size=(1,1),centered=True).copy() - gndlabel.add_label(text="gnd",layer=met3_label) - move_info.append((gndlabel,opamp_in.ports["pin_gnd_N"],None)) - #diffpairibias - ibias1label = rectangle(layer=met2_pin,size=(1,1),centered=True).copy() - ibias1label.add_label(text="diffpairibias",layer=met2_label) - move_info.append((ibias1label,opamp_in.ports["pin_diffpairibias_N"],None)) - #outputibias - ibias3label = rectangle(layer=met2_pin,size=(1,1),centered=True).copy() - ibias3label.add_label(text="outputibias",layer=met2_label) - move_info.append((ibias3label,opamp_in.ports["pin_outputibias_N"],None)) - # commonsourceibias - ibias2label = rectangle(layer=met4_pin,size=(1,1),centered=True).copy() - ibias2label.add_label(text="commonsourceibias",layer=met4_label) - move_info.append((ibias2label,opamp_in.ports["pin_commonsourceibias_N"],None)) - #minus - minuslabel = rectangle(layer=met2_pin,size=(1,1),centered=True).copy() - minuslabel.add_label(text="minus",layer=met2_label) - move_info.append((minuslabel,opamp_in.ports["pin_minus_N"],None)) - #-plus - pluslabel = rectangle(layer=met2_pin,size=(1,1),centered=True).copy() - pluslabel.add_label(text="plus",layer=met2_label) - move_info.append((pluslabel,opamp_in.ports["pin_plus_N"],None)) - #vdd - vddlabel = rectangle(layer=met3_pin,size=(1,1),centered=True).copy() - vddlabel.add_label(text="vdd",layer=met3_label) - move_info.append((vddlabel,opamp_in.ports["pin_vdd_N"],None)) - # output (3rd stage) - outputlabel = rectangle(layer=met2_pin,size=(1,1),centered=True).copy() - outputlabel.add_label(text="output",layer=met2_label) - move_info.append((outputlabel,opamp_in.ports["pin_output_route_N"],None)) - # output (2nd stage) - outputlabel = rectangle(layer=met4_pin,size=(0.2,0.2),centered=True).copy() - outputlabel.add_label(text="CSoutput",layer=met4_label) - move_info.append((outputlabel,opamp_in.ports["commonsource_output_E"],('l','c'))) - # move everything to position - for comp, prt, alignment in move_info: - alignment = ('c','b') if alignment is None else alignment - compref = align_comp_to_port(comp, prt, alignment=alignment) - opamp_in.add(compref) - return opamp_in.flatten() - -if __name__=="__main__": - opamp_comp = sky130_add_opamp_2_labels(opamp(pdk, add_output_stage=False)) - #opamp_comp.show() - opamp_comp.name = "opamp" - #magic_drc_result = pdk.drc_magic(opamp_comp, opamp_comp.name) - #netgen_lvs_result = pdk.lvs_netgen(opamp_comp, opamp_comp.name) - opamp_gds = opamp_comp.write_gds("opamp.gds") - res = run_evaluation("opamp.gds", opamp_comp.name, opamp_comp) diff --git a/src/glayout/blocks/ATLAS/resume_fvf_nohup.py b/src/glayout/blocks/ATLAS/resume_fvf_nohup.py deleted file mode 100755 index a192ff49..00000000 --- a/src/glayout/blocks/ATLAS/resume_fvf_nohup.py +++ /dev/null @@ -1,39 +0,0 @@ -#!/usr/bin/env python3 -"""Resume the FVF generation non-interactively and exit with status. - -This script imports the updated generator and calls run_dataset_generation -directly. It's intended to be launched under nohup or a systemd service so it -continues after SSH disconnects. -""" -import logging -import sys - -logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s') -logger = logging.getLogger(__name__) - -try: - from generate_fvf_8h_runtime_aware import load_fvf_parameters, run_dataset_generation -except Exception as e: - logger.error(f"Failed to import generator module: {e}") - sys.exit(2) - - -def main(): - try: - params = load_fvf_parameters(None) - n = len(params) - logger.info(f"Resuming generation for {n} samples (checkpoint-aware)") - - # Run dataset generation; it will load and resume from checkpoint.json - success, passed, total = run_dataset_generation(n, "fvf_dataset_8h_runtime_aware", checkpoint_interval=100, resume_from_checkpoint=True) - - logger.info(f"Finished. success={success}, passed={passed}, total={total}") - return 0 if success else 1 - except Exception as e: - logger.exception(f"Unexpected error during resume: {e}") - return 3 - - -if __name__ == '__main__': - rc = main() - sys.exit(rc) diff --git a/src/glayout/blocks/ATLAS/robust_verification.py b/src/glayout/blocks/ATLAS/robust_verification.py deleted file mode 100644 index ea309be8..00000000 --- a/src/glayout/blocks/ATLAS/robust_verification.py +++ /dev/null @@ -1,424 +0,0 @@ -#!/usr/bin/env python3 - -""" -Fixed verification module that properly handles PDK_ROOT environment variable. -This addresses the issue where PDK_ROOT gets reset to None between trials. -""" - -# ----------------------------------------------------------------------------- -# Make sure the `glayout` repository is discoverable *before* we import from it. -# ----------------------------------------------------------------------------- - -import os -import re -import subprocess -import shutil -import tempfile -import sys -from pathlib import Path - -# Insert the repo root (`.../generators/glayout`) if it is not already present -_here = Path(__file__).resolve() -_glayout_repo_path = _here.parent.parent.parent.parent.parent.parent - -if _glayout_repo_path.exists() and str(_glayout_repo_path) not in sys.path: - sys.path.insert(0, str(_glayout_repo_path)) - -del _here - -from gdsfactory.typings import Component - -def ensure_pdk_environment(): - """Ensure PDK environment is properly set. - - * Uses an existing PDK_ROOT env if already set (preferred) - * Falls back to the conda-env PDK folder if needed - * Sets CAD_ROOT **only** to the Magic installation directory (``$CONDA_PREFIX/lib``) - """ - # Respect an existing PDK_ROOT (set by the user / calling script) - pdk_root = os.environ.get('PDK_ROOT') - # Some libraries erroneously set the literal string "None". Treat that as - # undefined so we fall back to a real path. - if pdk_root in (None, '', 'None'): - pdk_root = None - - if not pdk_root: - # Fall back to the PDK bundled inside the current conda environment - conda_prefix = os.environ.get('CONDA_PREFIX', '') - if not conda_prefix or 'miniconda3' in conda_prefix: - # Hard-code the *known* GLDev env path as a robust fallback - conda_prefix = "/home/adityakak/.conda/envs/GLDev" - - pdk_root = os.path.join(conda_prefix, 'share', 'pdk') - if not os.path.isdir(pdk_root): - raise RuntimeError( - f"Derived PDK_ROOT '{pdk_root}' does not exist; please set the PDK_ROOT env variable" - ) - - # Build a consistent set of environment variables - conda_prefix = os.environ.get('CONDA_PREFIX', '') - env_vars = { - 'PDK_ROOT': pdk_root, - 'PDKPATH': pdk_root, - # Ensure a default value for PDK but preserve if user overrides elsewhere - 'PDK': os.environ.get('PDK', 'sky130A'), - 'MAGIC_PDK_ROOT': pdk_root, - 'NETGEN_PDK_ROOT': pdk_root, - } - - # Point CAD_ROOT to Magic installation folder only (fixes missing magicdnull) - if conda_prefix: - env_vars['CAD_ROOT'] = os.path.join(conda_prefix, 'lib') - - # Refresh the environment in *one* atomic update to avoid partial states - os.environ.update(env_vars) - - # Also try to reinitialize the PDK module to avoid stale state - try: - import importlib, sys as _sys - modules_to_reload = [mod for mod in _sys.modules if 'pdk' in mod.lower()] - for mod_name in modules_to_reload: - try: - importlib.reload(_sys.modules[mod_name]) - except Exception: - pass # Ignore reload errors โ€“ best-effort only - print(f"PDK environment reset via os.environ.update: PDK_ROOT={pdk_root}") - except Exception as e: - print(f"Warning: Could not reload PDK modules: {e}") - - return pdk_root - -def parse_drc_report(report_content: str) -> dict: - """ - Parses a Magic DRC report into a machine-readable format. - """ - errors = [] - current_rule = "" - for line in report_content.strip().splitlines(): - stripped_line = line.strip() - if stripped_line == "----------------------------------------": - continue - if re.match(r"^[a-zA-Z]", stripped_line): - current_rule = stripped_line - elif re.match(r"^[0-9]", stripped_line): - errors.append({"rule": current_rule, "details": stripped_line}) - - is_pass = len(errors) == 0 - if not is_pass and re.search(r"count:\s*0\s*$", report_content, re.IGNORECASE): - is_pass = True - - return { - "is_pass": is_pass, - "total_errors": len(errors), - "error_details": errors - } - -def parse_lvs_report(report_content: str) -> dict: - """ - Parses the raw netgen LVS report and returns a summarized, machine-readable format. - Focuses on parsing net and instance mismatches, similar to the reference - implementation in ``evaluator_box/verification.py``. - """ - summary = { - "is_pass": False, - "conclusion": "LVS failed or report was inconclusive.", - "total_mismatches": 0, - "mismatch_details": { - "nets": "Not found", - "devices": "Not found", - "unmatched_nets_parsed": [], - "unmatched_instances_parsed": [] - } - } - - # Primary check for LVS pass/fail โ€“ if the core matcher says the netlists - # match (even with port errors) we treat it as a _pass_ just like the - # reference flow. - if "Netlists match" in report_content or "Circuits match uniquely" in report_content: - summary["is_pass"] = True - summary["conclusion"] = "LVS Pass: Netlists match." - - # ------------------------------------------------------------------ - # Override: If the report explicitly states that netlists do NOT - # match, or mentions other mismatch keywords (even if the specific - # "no matching net" regex patterns are absent), force a failure so - # we never mis-classify. - # ------------------------------------------------------------------ - lowered = report_content.lower() - failure_keywords = ( - "netlists do not match", - "netlist mismatch", - "failed pin matching", - "mismatch" - ) - if any(k in lowered for k in failure_keywords): - summary["is_pass"] = False - summary["conclusion"] = "LVS Fail: Netlist mismatch." - - for line in report_content.splitlines(): - stripped = line.strip() - - # Parse net mismatches of the form: - # Net: | (no matching net) - m = re.search(r"Net:\s*([^|]+)\s*\|\s*\(no matching net\)", stripped) - if m: - summary["mismatch_details"]["unmatched_nets_parsed"].append({ - "type": "net", - "name": m.group(1).strip(), - "present_in": "layout", - "missing_in": "schematic" - }) - continue - - # Parse instance mismatches - m = re.search(r"Instance:\s*([^|]+)\s*\|\s*\(no matching instance\)", stripped) - if m: - summary["mismatch_details"]["unmatched_instances_parsed"].append({ - "type": "instance", - "name": m.group(1).strip(), - "present_in": "layout", - "missing_in": "schematic" - }) - continue - - # Right-side (schematic-only) mismatches - m = re.search(r"\|\s*([^|]+)\s*\(no matching net\)", stripped) - if m: - summary["mismatch_details"]["unmatched_nets_parsed"].append({ - "type": "net", - "name": m.group(1).strip(), - "present_in": "schematic", - "missing_in": "layout" - }) - continue - - m = re.search(r"\|\s*([^|]+)\s*\(no matching instance\)", stripped) - if m: - summary["mismatch_details"]["unmatched_instances_parsed"].append({ - "type": "instance", - "name": m.group(1).strip(), - "present_in": "schematic", - "missing_in": "layout" - }) - continue - - # Capture the summary lines with device/net counts for debugging - if "Number of devices:" in stripped: - summary["mismatch_details"]["devices"] = stripped.split(":", 1)[1].strip() - elif "Number of nets:" in stripped: - summary["mismatch_details"]["nets"] = stripped.split(":", 1)[1].strip() - - # Tot up mismatches that we actually parsed (nets + instances) - summary["total_mismatches"] = ( - len(summary["mismatch_details"]["unmatched_nets_parsed"]) + - len(summary["mismatch_details"]["unmatched_instances_parsed"]) - ) - - # If we found *any* explicit net/instance mismatches, override to FAIL. - if summary["total_mismatches"] > 0: - summary["is_pass"] = False - if "Pass" in summary["conclusion"]: - summary["conclusion"] = "LVS Fail: Mismatches found." - - return summary - -def _parse_simple_parasitics(component_name: str) -> tuple[float, float]: - """Parses total parasitic R and C from a SPICE file by simple summation.""" - total_resistance = 0.0 - total_capacitance = 0.0 - spice_file_path = f"{component_name}_pex.spice" - if not os.path.exists(spice_file_path): - return 0.0, 0.0 - with open(spice_file_path, 'r') as f: - for line in f: - orig_line = line.strip() # Keep original case for capacitor parsing - line = line.strip().upper() - parts = line.split() - orig_parts = orig_line.split() # Original case parts for capacitor values - if not parts: continue - - name = parts[0] - if name.startswith('R') and len(parts) >= 4: - try: total_resistance += float(parts[3]) - except (ValueError): continue - elif name.startswith('C') and len(parts) >= 4: - try: - cap_str = orig_parts[3] # Use original case for capacitor value - unit = cap_str[-1] - val_str = cap_str[:-1] - if unit == 'F': cap_value = float(val_str) * 1e-15 - elif unit == 'P': cap_value = float(val_str) * 1e-12 - elif unit == 'N': cap_value = float(val_str) * 1e-9 - elif unit == 'U': cap_value = float(val_str) * 1e-6 - elif unit == 'f': cap_value = float(val_str) * 1e-15 # femtofarads - else: cap_value = float(cap_str) - total_capacitance += cap_value - except (ValueError): continue - return total_resistance, total_capacitance - -def run_robust_verification(layout_path: str, component_name: str, top_level: Component) -> dict: - """ - Runs DRC, LVS, and PEX checks with robust PDK handling. - """ - verification_results = { - "drc": {"status": "not run", "is_pass": False, "report_path": None, "summary": {}}, - "lvs": {"status": "not run", "is_pass": False, "report_path": None, "summary": {}}, - "pex": {"status": "not run", "total_resistance_ohms": 0.0, "total_capacitance_farads": 0.0, "spice_file": None} - } - - # Ensure PDK environment before each operation - pdk_root = ensure_pdk_environment() - print(f"Using PDK_ROOT: {pdk_root}") - - # Import sky130_mapped_pdk *after* the environment is guaranteed sane so - # that gdsfactory/PDK initialization picks up the correct PDK_ROOT. - from glayout.flow.pdk.sky130_mapped import sky130_mapped_pdk - - # DRC Check - drc_report_path = os.path.abspath(f"./{component_name}.drc.rpt") - verification_results["drc"]["report_path"] = drc_report_path - - try: - # Clean up any existing DRC report - if os.path.exists(drc_report_path): - os.remove(drc_report_path) - - # Ensure PDK environment again right before DRC - ensure_pdk_environment() - - print(f"Running DRC for {component_name}...") - - # Try the PDK DRC method first - sky130_mapped_pdk.drc_magic(layout_path, component_name, output_file=drc_report_path) - - # Check if report was created and read it - report_content = "" - if os.path.exists(drc_report_path): - with open(drc_report_path, 'r') as f: - report_content = f.read() - print(f"DRC report created successfully: {len(report_content)} chars") - '''else: - print("Warning: DRC report file was not created, creating empty report") - # Create empty report as fallback - report_content = f"{component_name} count: \n----------------------------------------\n\n" - with open(drc_report_path, 'w') as f: - f.write(report_content) - ''' - summary = parse_drc_report(report_content) - verification_results["drc"].update({ - "summary": summary, - "is_pass": summary["is_pass"], - "status": "pass" if summary["is_pass"] else "fail" - }) - - except Exception as e: - print(f"DRC failed with exception: {e}") - # Create a basic report even on failure - try: - with open(drc_report_path, 'w') as f: - f.write(f"DRC Error for {component_name}\n") - f.write(f"Error: {str(e)}\n") - verification_results["drc"]["status"] = f"error: {e}" - except: - verification_results["drc"]["status"] = f"error: {e}" - - # Small delay between DRC and LVS - import time - time.sleep(1) - - # LVS Check - lvs_report_path = os.path.abspath(f"./{component_name}.lvs.rpt") - verification_results["lvs"]["report_path"] = lvs_report_path - - try: - # Clean up any existing LVS report - if os.path.exists(lvs_report_path): - os.remove(lvs_report_path) - - # Ensure PDK environment again right before LVS - ensure_pdk_environment() - - print(f"Running LVS for {component_name}...") - - # Try the PDK LVS method first - sky130_mapped_pdk.lvs_netgen(layout=top_level, design_name=component_name, output_file_path=lvs_report_path) - - # Check if report was created and read it - report_content = "" - if os.path.exists(lvs_report_path): - with open(lvs_report_path, 'r') as report_file: - report_content = report_file.read() - print(f"LVS report created successfully: {len(report_content)} chars") - '''else: - print("Warning: LVS report file was not created, creating fallback report") - # Create fallback report - report_content = f"LVS Report for {component_name}\nFinal result: Circuits match uniquely.\nLVS Done.\n" - with open(lvs_report_path, 'w') as f: - f.write(report_content) - ''' - lvs_summary = parse_lvs_report(report_content) - verification_results["lvs"].update({ - "summary": lvs_summary, - "is_pass": lvs_summary["is_pass"], - "status": "pass" if lvs_summary["is_pass"] else "fail" - }) - - except Exception as e: - print(f"LVS failed with exception: {e}") - # Create a basic report even on failure - try: - with open(lvs_report_path, 'w') as f: - f.write(f"LVS Error for {component_name}\n") - f.write(f"Error: {str(e)}\n") - verification_results["lvs"]["status"] = f"error: {e}" - except: - verification_results["lvs"]["status"] = f"error: {e}" - - # Small delay between LVS and PEX - time.sleep(1) - - # PEX Extraction - pex_spice_path = os.path.abspath(f"./{component_name}_pex.spice") - verification_results["pex"]["spice_file"] = pex_spice_path - - try: - # Clean up any existing PEX file - if os.path.exists(pex_spice_path): - os.remove(pex_spice_path) - - print(f"Running PEX extraction for {component_name}...") - - # Run the PEX extraction script - subprocess.run(["bash", "run_pex.sh", layout_path, component_name], - check=True, capture_output=True, text=True, cwd=".") - - # Check if PEX spice file was created and parse it - if os.path.exists(pex_spice_path): - total_res, total_cap = _parse_simple_parasitics(component_name) - verification_results["pex"].update({ - "status": "PEX Complete", - "total_resistance_ohms": total_res, - "total_capacitance_farads": total_cap - }) - print(f"PEX extraction completed: R={total_res:.2f}ฮฉ, C={total_cap:.6e}F") - else: - verification_results["pex"]["status"] = "PEX Error: Spice file not generated" - - except subprocess.CalledProcessError as e: - error_msg = e.stderr if e.stderr else str(e) - verification_results["pex"]["status"] = f"PEX Error: {error_msg}" - print(f"PEX extraction failed: {error_msg}") - except FileNotFoundError: - verification_results["pex"]["status"] = "PEX Error: run_pex.sh not found" - print("PEX extraction failed: run_pex.sh script not found") - except Exception as e: - verification_results["pex"]["status"] = f"PEX Unexpected Error: {e}" - print(f"PEX extraction failed with unexpected error: {e}") - - return verification_results - -if __name__ == "__main__": - # Test the robust verification - print("Testing robust verification module...") - ensure_pdk_environment() - print("PDK environment setup complete.") diff --git a/src/glayout/blocks/ATLAS/run_dataset_multiprocess.py b/src/glayout/blocks/ATLAS/run_dataset_multiprocess.py deleted file mode 100755 index 23626de1..00000000 --- a/src/glayout/blocks/ATLAS/run_dataset_multiprocess.py +++ /dev/null @@ -1,541 +0,0 @@ -#!/usr/bin/env python3 -""" -Transmission Gate Dataset Generator - 100 Samples Version -Based on the proven approach from generate_fvf_360_robust_fixed.py. -Generates dataset using 100 parameter combinations from txgate_parameters.json and monitors runtime. -""" -import logging -import os -import sys -import time -import json -import shutil -from pathlib import Path -import numpy as np -import pandas as pd - -# Suppress overly verbose gdsfactory logging -import warnings -warnings.filterwarnings( - "ignore", - message="decorator is deprecated and will be removed soon.*" -) -warnings.filterwarnings( - "ignore", - message=".*we will remove unlock to discourage use.*" -) -# Also suppress info with "* PDK is now active" -logging.getLogger("gdsfactory").setLevel(logging.WARNING) - -# ----------------------------------------------------------------------------- -# Ensure the *local* `glayout` package is discoverable *before* we import any -# module that depends on it (e.g. `robust_verification`). -# ----------------------------------------------------------------------------- -_here = Path(__file__).resolve() -_glayout_repo_path = _here.parent.parent.parent.parent.parent.parent -pwd_path = Path.cwd().resolve() -print("Current working directory:", pwd_path) -# Fallback hard-coded path if relative logic fails (for robustness when the -# script is moved around). Adjust this if your repo structure changes. -if not _glayout_repo_path.exists(): - _glayout_repo_path = pwd_path / "../../../../" - -if _glayout_repo_path.exists() and str(_glayout_repo_path) not in sys.path: - sys.path.insert(0, str(_glayout_repo_path)) - -del _here, _glayout_repo_path - -# Set up logging -logging.basicConfig( - level=logging.INFO, - format='%(asctime)s - %(levelname)s - %(message)s' -) -logger = logging.getLogger(__name__) - -# We *delay* importing gdsfactory until *after* the PDK environment variables -# are guaranteed to be correct. Importing it too early locks-in an incorrect -# `PDK_ROOT`, which then causes Magic/Netgen to fall back to the built-in -# "minimum" tech, triggering the dummy fallback reports the user wants to -# avoid. - -# Helper to obtain a stable sky130 mapped PDK instance -GLOBAL_SKY130_PDK = None - -def get_global_pdk(): - """Return a *stable* sky130_mapped_pdk instance (cached).""" - global GLOBAL_SKY130_PDK - if GLOBAL_SKY130_PDK is None: - from glayout.flow.pdk.sky130_mapped import sky130_mapped_pdk as _pdk - GLOBAL_SKY130_PDK = _pdk - return GLOBAL_SKY130_PDK - -# Import the shared PDK environment helper so we keep a single source of truth -from robust_verification import ensure_pdk_environment -from contextlib import contextmanager - -@contextmanager -def chdir(path: Path): - """Temporarily change working directory to `path`.""" - prev = Path.cwd() - try: - os.makedirs(path, exist_ok=True) - os.chdir(path) - yield - finally: - os.chdir(prev) - -def setup_environment(): - """Set up (or refresh) the PDK environment for this trial. - - We rely on the **shared** `ensure_pdk_environment` helper so that the - exact same logic is used across the entire code-base. This prevents the - two implementations from drifting apart and guarantees that *every* - entry-point resets the PDK environment in one atomic `os.environ.update` - call. - """ - - pdk_root = ensure_pdk_environment() - - # Now that the environment is correctly set, it is finally safe to import - # gdsfactory and disable its Component cache to avoid stale classes. - try: - import gdsfactory as gf - except ImportError: - import gdsfactory as gf # should always succeed now - if hasattr(gf, 'CONFIG') and hasattr(gf.CONFIG, 'use_cache'): - gf.CONFIG.use_cache = False - else: - # Newer gdsfactory versions expose settings via gf.config.CONF - try: - gf.config.CONF.use_cache = False # type: ignore - except Exception: - pass - - # Ensure the `glayout` package directory is discoverable regardless of - # how the user launches the script. - glayout_path = pwd_path / "../../../../" - print("Using glayout path:", glayout_path) - if glayout_path not in sys.path: - sys.path.insert(0, glayout_path) - - # Prepend to PYTHONPATH so subprocesses (if any) inherit the correct path - current_pythonpath = os.environ.get('PYTHONPATH', '') - if glayout_path not in current_pythonpath.split(":"): - os.environ['PYTHONPATH'] = f"{glayout_path}:{current_pythonpath}" - - logger.info(f"Environment refreshed: PDK_ROOT={pdk_root}") - return pdk_root - -def robust_transmission_gate(_, **params): - """Return a transmission_gate with a *fresh* MappedPDK every call. - - We sidestep all pydantic ValidationErrors by importing/reloading - ``glayout.flow.pdk.sky130_mapped`` each time and passing that brand-new - ``sky130_mapped_pdk`` instance to the circuit generator. - """ - from transmission_gate import transmission_gate, add_tg_labels - # Use a *stable* PDK instance across all trials to avoid Pydantic class mismatch - pdk = get_global_pdk() - comp = transmission_gate(pdk=pdk, **params) - # Add physical pin shapes so Magic extracts a correct pin list for LVS - try: - comp = add_tg_labels(comp, pdk) - except Exception as e: - logger.warning(f"Failed to add pin labels to TG: {e}") - return comp - -def load_tg_parameters_from_json(json_file=""): - """Load transmission gate parameters from the generated JSON file""" - json_path = Path(json_file) - if not json_path.exists(): - raise FileNotFoundError(f"Parameter file not found: {json_file}") - with open(json_path, 'r') as f: - parameters = json.load(f) - logger.info(f"Loaded {len(parameters)} transmission gate parameter combinations from {json_file}") - # Log parameter distribution statistics - widths_nmos = [p["width"][0] for p in parameters] - widths_pmos = [p["width"][1] for p in parameters] - lengths_nmos = [p["length"][0] for p in parameters] - lengths_pmos = [p["length"][1] for p in parameters] - logger.info(f"Parameter ranges:") - logger.info(f" NMOS width: {min(widths_nmos):.2f} - {max(widths_nmos):.2f} ฮผm") - logger.info(f" PMOS width: {min(widths_pmos):.2f} - {max(widths_pmos):.2f} ฮผm") - logger.info(f" NMOS length: {min(lengths_nmos):.3f} - {max(lengths_nmos):.3f} ฮผm") - logger.info(f" PMOS length: {min(lengths_pmos):.3f} - {max(lengths_pmos):.3f} ฮผm") - # Show first few parameter examples - logger.info(f"First 3 parameter combinations:") - for i, params in enumerate(parameters[:3], 1): - nmos_w, pmos_w = params["width"] - nmos_l, pmos_l = params["length"] - nmos_f, pmos_f = params["fingers"] - nmos_m, pmos_m = params["multipliers"] - - logger.info(f" Sample {i}: NMOS({nmos_w:.2f}ฮผm/{nmos_l:.3f}ฮผm, {nmos_f}fร—{nmos_m}), " - f"PMOS({pmos_w:.2f}ฮผm/{pmos_l:.3f}ฮผm, {pmos_f}fร—{pmos_m})") - return parameters - -def cleanup_files(): - """Clean up generated files in working directory""" - files_to_clean = [ - "*.gds", "*.drc.rpt", "*.lvs.rpt", "*.ext", "*.spice", - "*.res.ext", "*.sim", "*.nodes", "*_lvsmag.spice", "*_sim.spice", - "*_pex.spice", "*.pex.spice" - ] - for pattern in files_to_clean: - import glob - for file in glob.glob(pattern): - try: - os.remove(file) - except OSError: - pass - -def make_json_serializable(obj): - """Convert complex objects to JSON-serializable formats""" - if isinstance(obj, dict): - return {k: make_json_serializable(v) for k, v in obj.items()} - elif isinstance(obj, (list, tuple)): - return [make_json_serializable(item) for item in obj] - elif isinstance(obj, (np.integer, np.floating)): - return obj.item() - elif isinstance(obj, np.ndarray): - return obj.tolist() - elif hasattr(obj, '__dict__'): - try: - return make_json_serializable(obj.__dict__) - except: - return str(obj) - elif hasattr(obj, '__class__') and 'PDK' in str(obj.__class__): - return f"PDK_object_{getattr(obj, 'name', 'unknown')}" - else: - try: - json.dumps(obj) - return obj - except (TypeError, ValueError): - return str(obj) -# Parallelized -def run_single_evaluation(trial_num, params, output_dir): - """Run a single TG evaluation in its own isolated working directory.""" - trial_start = time.time() - - # Per-trial working dir (all scratch files live here) - trial_work_dir = Path(output_dir) / "_work" / f"sample_{trial_num:04d}" - # Per-trial final results dir (curated outputs copied here) - trial_out_dir = Path(output_dir) / f"sample_{trial_num:04d}" - - try: - with chdir(trial_work_dir): - # === DETERMINISTIC SEEDING FIX === - import random - import numpy as np - base_seed = trial_num * 1000 - random.seed(base_seed) - np.random.seed(base_seed) - os.environ['PYTHONHASHSEED'] = str(base_seed) - logger.info(f"Trial {trial_num}: Set deterministic seed = {base_seed}") - - # Setup environment for each trial (safe in subprocess) - setup_environment() - - # Clear any cached gdsfactory Components / PDKs to avoid stale class refs - try: - import gdsfactory as gf - except ImportError: - import gdsfactory as gf - if hasattr(gf, 'clear_cache'): - gf.clear_cache() - if hasattr(gf, 'clear_cell_cache'): - gf.clear_cell_cache() - try: - if hasattr(gf, '_CACHE'): - gf._CACHE.clear() - if hasattr(gf.Component, '_cell_cache'): - gf.Component._cell_cache.clear() - if hasattr(gf, 'CONFIG'): - if hasattr(gf.CONFIG, 'use_cache'): - gf.CONFIG.use_cache = False - if hasattr(gf.CONFIG, 'cache'): - gf.CONFIG.cache = False - except Exception as e: - logger.warning(f"Could not clear some gdsfactory caches: {e}") - - # Fresh PDK import per trial/process - import importlib, sys - if 'glayout.flow.pdk.sky130_mapped' in sys.modules: - importlib.reload(sys.modules['glayout.flow.pdk.sky130_mapped']) - from glayout.flow.pdk.sky130_mapped import sky130_mapped_pdk - pdk = sky130_mapped_pdk - - # Create and name component - component_name = f"tg_sample_{trial_num:04d}" - comp = robust_transmission_gate(pdk, **params) - comp.name = component_name - - # Write GDS into the trial's **work** dir - gds_file = f"{component_name}.gds" - comp.write_gds(gds_file) - gds_path = Path.cwd() / gds_file # absolute path - - # Run comprehensive evaluation (DRC, LVS, PEX, Geometry) - from evaluator_wrapper import run_evaluation - comprehensive_results = run_evaluation(str(gds_path), component_name, comp) - drc_result = comprehensive_results["drc"]["is_pass"] - lvs_result = comprehensive_results["lvs"]["is_pass"] - - # Extract PEX and geometry data - pex_data = comprehensive_results.get("pex", {}) - geometry_data = comprehensive_results.get("geometric", {}) - - # Copy curated artifacts to the **final** per-trial results dir - trial_out_dir.mkdir(parents=True, exist_ok=True) - files_to_copy = [ - gds_file, - f"{component_name}.drc.rpt", - f"{component_name}.lvs.rpt", - f"{component_name}_pex.spice", - f"{component_name}.res.ext", - f"{component_name}.ext", - f"{component_name}_lvsmag.spice", - f"{component_name}_sim.spice", - ] - for file_path in files_to_copy: - p = Path(file_path) - if p.exists(): - shutil.copy(p, trial_out_dir / p.name) - - trial_time = time.time() - trial_start - success_flag = drc_result and lvs_result - - result = { - "sample_id": trial_num, - "component_name": component_name, - "success": success_flag, - "drc_pass": drc_result, - "lvs_pass": lvs_result, - "execution_time": trial_time, - "parameters": make_json_serializable(params), - "output_directory": str(trial_out_dir), - # PEX data - "pex_status": pex_data.get("status", "not run"), - "total_resistance_ohms": pex_data.get("total_resistance_ohms", 0.0), - "total_capacitance_farads": pex_data.get("total_capacitance_farads", 0.0), - # Geometry data - "area_um2": geometry_data.get("raw_area_um2", 0.0), - "symmetry_horizontal": geometry_data.get("symmetry_score_horizontal", 0.0), - "symmetry_vertical": geometry_data.get("symmetry_score_vertical", 0.0), - } - - pex_status_short = "โœ“" if pex_data.get("status") == "PEX Complete" else "โœ—" - nmos_w, pmos_w = params["width"] - nmos_f, pmos_f = params["fingers"] - param_summary = f"NMOS:{nmos_w:.1f}ฮผmร—{nmos_f}f, PMOS:{pmos_w:.1f}ฮผmร—{pmos_f}f" - logger.info( - f"โœ… Sample {trial_num:04d} completed in {trial_time:.1f}s " - f"(DRC: {'โœ“' if drc_result else 'โœ—'}, LVS: {'โœ“' if lvs_result else 'โœ—'}, PEX: {pex_status_short}) " - f"[{param_summary}]" - ) - return result - - except Exception as e: - trial_time = time.time() - trial_start - logger.error(f"โŒ Sample {trial_num:04d} failed: {e}") - return { - "sample_id": trial_num, - "component_name": f"tg_sample_{trial_num:04d}", - "success": False, - "error": str(e), - "execution_time": trial_time, - "parameters": make_json_serializable(params), - } - - finally: - # Clean ONLY this trial's scratch via CWD-scoped globbing - with chdir(trial_work_dir): - cleanup_files() - try: - import gdsfactory as gf - except ImportError: - import gdsfactory as gf - if hasattr(gf, 'clear_cache'): - gf.clear_cache() - if hasattr(gf, 'clear_cell_cache'): - gf.clear_cell_cache() - -from concurrent.futures import ProcessPoolExecutor, as_completed -import multiprocessing -# Parallelized -def run_dataset_generation(parameters, output_dir, max_workers=1): - """Run the dataset generation for all parameters (in parallel, per-trial isolation).""" - n_samples = len(parameters) - logger.info(f"๐Ÿš€ Starting Transmission Gate Dataset Generation for {n_samples} samples") - - # Prepare top-level dirs - out_dir = Path(output_dir) - work_root = out_dir / "_work" - out_dir.mkdir(exist_ok=True) - work_root.mkdir(exist_ok=True) - - # Save parameter configuration - with open(out_dir / "tg_parameters.json", 'w') as f: - json.dump(parameters, f, indent=2) - - results = [] - total_start = time.time() - logger.info(f"๐Ÿ“Š Processing {n_samples} transmission gate samples in parallel...") - logger.info(f"Using {max_workers} parallel workers") - - futures = [] - with ProcessPoolExecutor(max_workers=max_workers) as executor: - for i, params in enumerate(parameters, start=1): - futures.append(executor.submit(run_single_evaluation, i, params, output_dir)) - - completed = 0 - for future in as_completed(futures): - result = future.result() - results.append(result) - completed += 1 - - # Progress logging similar to your sequential version - if completed % 10 == 0 or completed < 5: - success_rate = ( - sum(1 for r in results if r.get("success")) / len(results) * 100 - if results else 0.0 - ) - elapsed = time.time() - total_start - avg_time = elapsed / completed - eta = avg_time * (n_samples - completed) - logger.info( - f"๐Ÿ“ˆ Progress: {completed}/{n_samples} " - f"({completed/n_samples*100:.1f}%) - " - f"Success: {success_rate:.1f}% - " - f"Elapsed: {elapsed/60:.1f}m - ETA: {eta/60:.1f}m" - ) - - # Final summary (unchanged) - total_time = time.time() - total_start - successful = [r for r in results if r.get("success")] - success_rate = (len(successful) / len(results) * 100) if results else 0.0 - - logger.info(f"\n๐ŸŽ‰ Transmission Gate Dataset Generation Complete!") - logger.info(f"๐Ÿ“Š Total time: {total_time:.1f} seconds ({total_time/60:.1f} minutes)") - logger.info(f"๐Ÿ“ˆ Success rate: {len(successful)}/{len(results)} ({success_rate:.1f}%)") - - if successful: - drc_passes = sum(1 for r in successful if r.get("drc_pass")) - lvs_passes = sum(1 for r in successful if r.get("lvs_pass")) - pex_passes = sum(1 for r in successful if r.get("pex_status") == "PEX Complete") - avg_time = sum(r["execution_time"] for r in successful) / len(successful) - avg_area = sum(r.get("area_um2", 0) for r in successful) / len(successful) - avg_sym_h = sum(r.get("symmetry_horizontal", 0) for r in successful) / len(successful) - avg_sym_v = sum(r.get("symmetry_vertical", 0) for r in successful) / len(successful) - - logger.info(f" DRC passes: {drc_passes}/{len(successful)} ({drc_passes/len(successful)*100:.1f}%)") - logger.info(f" LVS passes: {lvs_passes}/{len(successful)} ({lvs_passes/len(successful)*100:.1f}%)") - logger.info(f" PEX passes: {pex_passes}/{len(successful)} ({pex_passes/len(successful)*100:.1f}%)") - logger.info(f" Average time per sample: {avg_time:.1f}s") - logger.info(f" Average area: {avg_area:.2f} ฮผmยฒ") - logger.info(f" Average symmetry (H/V): {avg_sym_h:.3f}/{avg_sym_v:.3f}") - - failed = [r for r in results if not r.get("success")] - if failed: - logger.info(f"\nโš ๏ธ Failed Samples Summary ({len(failed)} total):") - error_counts = {} - for r in failed: - error = r.get("error", "Unknown error") - error_key = error.split('\n')[0][:50] - error_counts[error_key] = error_counts.get(error_key, 0) + 1 - for error, count in sorted(error_counts.items(), key=lambda x: x[1], reverse=True): - logger.info(f" {count}x: {error}") - - # Persist results/summary (same as before) - results_file = out_dir / "tg_results.json" - try: - serializable_results = make_json_serializable(results) - with open(results_file, 'w') as f: - json.dump(serializable_results, f, indent=2) - logger.info(f"๐Ÿ“„ Results saved to: {results_file}") - except Exception as e: - logger.error(f"Failed to save JSON results: {e}") - - df_results = pd.DataFrame(results) - summary_file = out_dir / "tg_summary.csv" - df_results.to_csv(summary_file, index=False) - logger.info(f"๐Ÿ“„ Summary saved to: {summary_file}") - - # Threshold as before - return success_rate >= 50, len(successful), len(results) - -import argparse -def main(): - """Main function for Dataset generation""" - - # Argument parsing - parser = argparse.ArgumentParser(description="Dataset Generator - 100 Samples") - parser.add_argument("json_file", type=str, help="Path to the JSON file containing parameters") - parser.add_argument("--n_cores", type=int, default=1, help="Number of CPU cores to use") # Number of CPU cores to use, default=1 - parser.add_argument("--output_dir", type=str, default="result", help="Output directory for the generated dataset") - parser.add_argument("-y", "--yes", action="store_true", help="Automatic yes to prompts") - args = parser.parse_args() - json_file = Path(args.json_file).resolve() - output_dir = args.output_dir - n_cores = args.n_cores if args.n_cores > 0 else 1 - if n_cores > (os.cpu_count()): - n_cores = os.cpu_count() - print("="*30+" Arguments "+"="*30) - print(f"Using {n_cores} CPU cores for parallel processing") - print(f"Input file: {json_file}") - print(f"Output will be saved to: {output_dir}") - print("="*70) - - # Load parameters from JSON - # Todo: make this work with other kind of cells - try: - parameters = load_tg_parameters_from_json(json_file) - n_samples = len(parameters) - print(f"Loaded {n_samples} parameter combinations") - except FileNotFoundError as e: - print(f"โŒ Error: {e}") - print(f"Make sure you have run 'python elhs.py' first to generate the parameters") - return False - except Exception as e: - print(f"โŒ Error loading parameters: {e}") - return False - - # Show parameter distribution - widths_nmos = [p["width"][0] for p in parameters] - widths_pmos = [p["width"][1] for p in parameters] - print(f"\n๐Ÿ“‹ Parameter Distribution:") - print(f" NMOS width range: {min(widths_nmos):.2f} - {max(widths_nmos):.2f} ฮผm") - print(f" PMOS width range: {min(widths_pmos):.2f} - {max(widths_pmos):.2f} ฮผm") - print(f" Finger combinations: {len(set(tuple(p['fingers']) for p in parameters))} unique") - print(f" Multiplier combinations: {len(set(tuple(p['multipliers']) for p in parameters))} unique") - print(f"\n๐Ÿ“‹ Sample Parameter Examples:") - for i, params in enumerate(parameters[:3], 1): - nmos_w, pmos_w = params["width"] - nmos_l, pmos_l = params["length"] - nmos_f, pmos_f = params["fingers"] - nmos_m, pmos_m = params["multipliers"] - print(f" {i}. NMOS: {nmos_w:.2f}ฮผm/{nmos_l:.3f}ฮผmร—{nmos_f}fร—{nmos_m} | " - f"PMOS: {pmos_w:.2f}ฮผm/{pmos_l:.3f}ฮผmร—{pmos_f}fร—{pmos_m}") - - # Prompt user to continue - print(f"\nContinue with transmission gate dataset generation for {n_samples} samples? (y/n): ", end="") - response = input().lower().strip() - if response != 'y': - print("Stopping as requested.") - return True - - # Generate dataset - print(f"\nStarting generation of {n_samples} transmission gate samples...") - success, passed, total = run_dataset_generation(parameters, output_dir, max_workers=n_cores) - - if success: - print(f"\n๐ŸŽ‰ Transmission gate dataset generation completed successfully!") - else: - print(f"\nโš ๏ธ Dataset generation completed with issues") - print(f"๐Ÿ“Š Final results: {passed}/{total} samples successful") - print(f"๐Ÿ“ Dataset saved to: {output_dir}/") - return success - - -if __name__ == "__main__": - main() \ No newline at end of file diff --git a/src/glayout/blocks/ATLAS/run_lhs_files.py b/src/glayout/blocks/ATLAS/run_lhs_files.py deleted file mode 100644 index 4a81cb2b..00000000 --- a/src/glayout/blocks/ATLAS/run_lhs_files.py +++ /dev/null @@ -1,98 +0,0 @@ -#!/usr/bin/env python3 -"""Run and time LHS generator files once and emit a JSON array of results. - -This script will attempt to execute the following files (located in the same -directory) once each and measure wall-clock time for the run: - -- current_mirror.py -- diff_pair.py -- fvf.py -- transmission_gate.py -- lvcm.py - -It records start/stop times, exit codes, elapsed seconds and any stderr output -into a JSON file named `run_lhs_results.json` and prints the JSON array to -stdout. -""" -import json -import os -import sys -import time -import subprocess - - -FILES = [ - "current_mirror.py", - "diff_pair.py", - "fvf.py", - "transmission_gate.py", - "lvcm.py", -] - - -def run_file(path, timeout=120): - """Run a python file and time the execution. Returns a dict with results.""" - start = time.perf_counter() - try: - completed = subprocess.run([sys.executable, path], capture_output=True, text=True, timeout=timeout) - end = time.perf_counter() - return { - "file": os.path.basename(path), - "elapsed_seconds": end - start, - "returncode": completed.returncode, - "stdout": completed.stdout.strip(), - "stderr": completed.stderr.strip(), - } - except subprocess.TimeoutExpired as e: - end = time.perf_counter() - return { - "file": os.path.basename(path), - "elapsed_seconds": end - start, - "returncode": None, - "stdout": "", - "stderr": f"Timeout after {timeout}s", - } - except Exception as e: - end = time.perf_counter() - return { - "file": os.path.basename(path), - "elapsed_seconds": end - start, - "returncode": None, - "stdout": "", - "stderr": f"Exception: {e}", - } - - -def main(): - base = os.path.dirname(os.path.abspath(__file__)) - results = [] - for fname in FILES: - fpath = os.path.join(base, fname) - if not os.path.exists(fpath): - results.append({ - "file": fname, - "elapsed_seconds": None, - "returncode": None, - "stdout": "", - "stderr": "File not found", - }) - continue - print(f"Running {fname}...") - res = run_file(fpath) - print(f" -> {fname}: {res['elapsed_seconds']:.4f}s, returncode={res['returncode']}") - results.append(res) - - out_path = os.path.join(base, "run_lhs_results.json") - with open(out_path, "w") as f: - json.dump(results, f, indent=2) - - # Print only the array of elapsed_seconds for quick consumption, then full JSON - elapsed_array = [r["elapsed_seconds"] for r in results] - print("\nElapsed seconds array:") - print(json.dumps(elapsed_array)) - print("\nFull results saved to:", out_path) - print(json.dumps(results, indent=2)) - - -if __name__ == "__main__": - main() diff --git a/src/glayout/blocks/ATLAS/run_pex.sh b/src/glayout/blocks/ATLAS/run_pex.sh deleted file mode 100755 index 9354aa53..00000000 --- a/src/glayout/blocks/ATLAS/run_pex.sh +++ /dev/null @@ -1,27 +0,0 @@ -#!/bin/bash - -# Usage: ./run_pex.sh layout.gds layout_cell_name - -GDS_FILE=$1 -LAYOUT_CELL=$2 - -# Use the PDK_ROOT environment variable to find the correct magicrc file -MAGICRC_PATH="$PDK_ROOT/sky130A/libs.tech/magic/sky130A.magicrc" - -magic -rcfile "$MAGICRC_PATH" -noconsole -dnull << EOF -gds read $GDS_FILE -flatten $LAYOUT_CELL -load $LAYOUT_CELL -select top cell -extract do local -extract all -ext2sim labels on -ext2sim -extresist tolerance 10 -extresist -ext2spice lvs -ext2spice cthresh 0 -ext2spice extresist on -ext2spice -o ${LAYOUT_CELL}_pex.spice -exit -EOF \ No newline at end of file diff --git a/src/glayout/blocks/ATLAS/sky130A.magicrc b/src/glayout/blocks/ATLAS/sky130A.magicrc deleted file mode 100644 index 50d352c6..00000000 --- a/src/glayout/blocks/ATLAS/sky130A.magicrc +++ /dev/null @@ -1,86 +0,0 @@ -puts stdout "Sourcing design .magicrc for technology sky130A ..." - -# Put grid on 0.005 pitch. This is important, as some commands don't -# rescale the grid automatically (such as lef read?). - -set scalefac [tech lambda] -if {[lindex $scalefac 1] < 2} { - scalegrid 1 2 -} - -# drc off -drc euclidean on -# Change this to a fixed number for repeatable behavior with GDS writes -# e.g., "random seed 12345" -catch {random seed} - -# Turn off the scale option on ext2spice or else it conflicts with the -# scale in the model files. -ext2spice scale off - -# Allow override of PDK path from environment variable PDKPATH -if {[catch {set PDKPATH $env(PDKPATH)}]} { - set PDKPATH $env(PDK_ROOT)/sky130A -} - -# loading technology -tech load /home/adityakak/.conda/envs/GLDev/share/pdk/sky130A/libs.tech/magic/sky130A.tech - -# load device generator -source /home/adityakak/.conda/envs/GLDev/share/pdk/sky130A/libs.tech/magic/sky130A.tcl - -# load bind keys (optional) -# source /home/adityakak/.conda/envs/GLDev/share/pdk/sky130A/libs.tech/magic/sky130A-BindKeys - -# set units to lambda grid -snap lambda - -# set sky130 standard power, ground, and substrate names -set VDD VPWR -set GND VGND -set SUB VSUBS - -# Allow override of type of magic library views used, "mag" or "maglef", -# from environment variable MAGTYPE - -if {[catch {set MAGTYPE $env(MAGTYPE)}]} { - set MAGTYPE mag -} - -# add path to reference cells -if {[file isdir ${PDKPATH}/libs.ref/${MAGTYPE}]} { - addpath ${PDKPATH}/libs.ref/${MAGTYPE}/sky130_fd_pr - addpath ${PDKPATH}/libs.ref/${MAGTYPE}/sky130_fd_io - addpath ${PDKPATH}/libs.ref/${MAGTYPE}/sky130_fd_sc_hd - addpath ${PDKPATH}/libs.ref/${MAGTYPE}/sky130_fd_sc_hdll - addpath ${PDKPATH}/libs.ref/${MAGTYPE}/sky130_fd_sc_hs - addpath ${PDKPATH}/libs.ref/${MAGTYPE}/sky130_fd_sc_hvl - addpath ${PDKPATH}/libs.ref/${MAGTYPE}/sky130_fd_sc_lp - addpath ${PDKPATH}/libs.ref/${MAGTYPE}/sky130_fd_sc_ls - addpath ${PDKPATH}/libs.ref/${MAGTYPE}/sky130_fd_sc_ms - addpath ${PDKPATH}/libs.ref/${MAGTYPE}/sky130_osu_sc - addpath ${PDKPATH}/libs.ref/${MAGTYPE}/sky130_osu_sc_t18 - addpath ${PDKPATH}/libs.ref/${MAGTYPE}/sky130_ml_xx_hd - addpath ${PDKPATH}/libs.ref/${MAGTYPE}/sky130_sram_macros -} else { - addpath ${PDKPATH}/libs.ref/sky130_fd_pr/${MAGTYPE} - addpath ${PDKPATH}/libs.ref/sky130_fd_io/${MAGTYPE} - addpath ${PDKPATH}/libs.ref/sky130_fd_sc_hd/${MAGTYPE} - addpath ${PDKPATH}/libs.ref/sky130_fd_sc_hdll/${MAGTYPE} - addpath ${PDKPATH}/libs.ref/sky130_fd_sc_hs/${MAGTYPE} - addpath ${PDKPATH}/libs.ref/sky130_fd_sc_hvl/${MAGTYPE} - addpath ${PDKPATH}/libs.ref/sky130_fd_sc_lp/${MAGTYPE} - addpath ${PDKPATH}/libs.ref/sky130_fd_sc_ls/${MAGTYPE} - addpath ${PDKPATH}/libs.ref/sky130_fd_sc_ms/${MAGTYPE} - addpath ${PDKPATH}/libs.ref/sky130_osu_sc/${MAGTYPE} - addpath ${PDKPATH}/libs.ref/sky130_osu_sc_t18/${MAGTYPE} - addpath ${PDKPATH}/libs.ref/sky130_ml_xx_hd/${MAGTYPE} - addpath ${PDKPATH}/libs.ref/sky130_sram_macros/${MAGTYPE} -} - -# add path to GDS cells - -# add path to IP from catalog. This procedure defined in the PDK script. -catch {magic::query_mylib_ip} -# add path to local IP from user design space. Defined in the PDK script. -catch {magic::query_my_projects} diff --git a/src/glayout/blocks/ATLAS/test_comprehensive_fix.py b/src/glayout/blocks/ATLAS/test_comprehensive_fix.py deleted file mode 100644 index 76da9854..00000000 --- a/src/glayout/blocks/ATLAS/test_comprehensive_fix.py +++ /dev/null @@ -1,180 +0,0 @@ -#!/usr/bin/env python3 -""" -Comprehensive test script to verify that all netlist info dict fixes work correctly. -Tests multiple components to ensure the fix is applied consistently. -""" - -import sys -import os -import json -from pathlib import Path - -# Add the glayout path -glayout_path = "/home/arnavshukla/OpenFASOC/openfasoc/generators/glayout" -if glayout_path not in sys.path: - sys.path.insert(0, glayout_path) - -# Set up environment -os.environ['PDK_ROOT'] = '/opt/conda/envs/GLdev/share/pdk' -os.environ['PDK'] = 'sky130A' - -def test_component_info_serialization(component, component_name): - """Test that a component's info dict can be JSON serialized""" - print(f"\nTesting {component_name}...") - - try: - # Check netlist storage - netlist_value = component.info.get('netlist') - netlist_data = component.info.get('netlist_data') - - print(f" Netlist type: {type(netlist_value)}") - print(f" Netlist data type: {type(netlist_data)}") - - success = True - - # Verify netlist is stored as string - if not isinstance(netlist_value, str): - print(f" โŒ FAILED: netlist should be string, got {type(netlist_value)}") - success = False - else: - print(" โœ… SUCCESS: netlist is stored as string") - - # Verify netlist_data is available for gdsfactory 7.16.0+ compatibility - if netlist_data is None: - print(" โš ๏ธ WARNING: netlist_data is None - may not work with gdsfactory 7.16.0+") - elif isinstance(netlist_data, dict): - required_keys = ['circuit_name', 'nodes', 'source_netlist'] - if all(key in netlist_data for key in required_keys): - print(" โœ… SUCCESS: netlist_data contains all required fields for reconstruction") - else: - print(f" โŒ FAILED: netlist_data missing required keys: {[k for k in required_keys if k not in netlist_data]}") - success = False - else: - print(f" โŒ FAILED: netlist_data should be dict, got {type(netlist_data)}") - success = False - - # Test JSON serialization - try: - info_copy = {} - for key, value in component.info.items(): - if isinstance(value, (str, int, float, bool, list, tuple, dict)): - info_copy[key] = value - else: - info_copy[key] = str(value) - - json_str = json.dumps(info_copy, indent=2) - print(" โœ… SUCCESS: info dict can be JSON serialized") - - except Exception as e: - print(f" โŒ FAILED: JSON serialization failed: {e}") - success = False - - return success - - except Exception as e: - print(f" โŒ FAILED: Test failed with error: {e}") - return False - -def main(): - """Test multiple components to ensure consistent behavior""" - print("๐Ÿงช Comprehensive Netlist Serialization Test") - print("=" * 60) - - from glayout.flow.pdk.sky130_mapped import sky130_mapped_pdk - pdk = sky130_mapped_pdk - - test_results = [] - - # Test 1: Basic FETs - try: - print("\n๐Ÿ“‹ Testing Basic Components...") - from glayout.flow.primitives.fet import nmos, pmos - - nfet = nmos(pdk, width=1.0, length=0.15, fingers=1) - test_results.append(("NMOS", test_component_info_serialization(nfet, "NMOS"))) - - pfet = pmos(pdk, width=2.0, length=0.15, fingers=1) - test_results.append(("PMOS", test_component_info_serialization(pfet, "PMOS"))) - - except Exception as e: - print(f"โŒ Failed to test basic FETs: {e}") - test_results.append(("Basic FETs", False)) - - # Test 2: Transmission Gate - try: - print("\n๐Ÿ“‹ Testing Transmission Gate...") - from transmission_gate import transmission_gate - - tg = transmission_gate( - pdk=pdk, - width=(1.0, 2.0), - length=(0.15, 0.15), - fingers=(1, 1), - multipliers=(1, 1) - ) - test_results.append(("Transmission Gate", test_component_info_serialization(tg, "Transmission Gate"))) - - except Exception as e: - print(f"โŒ Failed to test transmission gate: {e}") - test_results.append(("Transmission Gate", False)) - - # Test 3: FVF (if available) - try: - print("\n๐Ÿ“‹ Testing Flipped Voltage Follower...") - from fvf import flipped_voltage_follower - - fvf = flipped_voltage_follower( - pdk=pdk, - width=(1.0, 0.5), - length=(0.15, 0.15), - fingers=(1, 1) - ) - test_results.append(("FVF", test_component_info_serialization(fvf, "Flipped Voltage Follower"))) - - except Exception as e: - print(f"โš ๏ธ FVF test skipped: {e}") - - # Test 4: MIM Capacitor (if available) - try: - print("\n๐Ÿ“‹ Testing MIM Capacitor...") - from glayout.flow.primitives.mimcap import mimcap - - cap = mimcap(pdk=pdk, size=(5.0, 5.0)) - test_results.append(("MIM Cap", test_component_info_serialization(cap, "MIM Capacitor"))) - - except Exception as e: - print(f"โš ๏ธ MIM Cap test skipped: {e}") - - # Summary - print("\n" + "=" * 60) - print("๐Ÿ“Š TEST SUMMARY") - print("=" * 60) - - passed = sum(1 for _, result in test_results if result) - total = len(test_results) - - for component_name, result in test_results: - status = "โœ… PASS" if result else "โŒ FAIL" - print(f"{status}: {component_name}") - - print(f"\nOverall: {passed}/{total} tests passed ({passed/total*100:.1f}%)") - - if passed == total: - print("\n๐ŸŽ‰ ALL TESTS PASSED!") - print("The gymnasium info dict error should be resolved for your friend.") - print("\nSolution Summary:") - print("- All netlist objects are now stored as strings in component.info['netlist']") - print("- Netlist data is preserved in component.info['netlist_data'] for reconstruction") - print("- This prevents gymnasium from encountering unsupported object types") - print("- Compatible with both gdsfactory 7.7.0 and 7.16.0+ strict Pydantic validation") - return True - else: - print(f"\nโš ๏ธ {total - passed} tests failed. Some issues may remain.") - return False - -if __name__ == "__main__": - success = main() - if success: - print("\nโœ… Fix validation completed successfully!") - else: - print("\nโŒ Some issues detected. Please review the failed tests.") diff --git a/src/glayout/blocks/ATLAS/test_lvs_fix.py b/src/glayout/blocks/ATLAS/test_lvs_fix.py deleted file mode 100644 index 1fce7709..00000000 --- a/src/glayout/blocks/ATLAS/test_lvs_fix.py +++ /dev/null @@ -1,198 +0,0 @@ -#!/usr/bin/env python3 -""" -Test script to verify LVS functionality works with the netlist serialization fix. -Tests specifically for the 'str' object has no attribute 'generate_netlist' error. -""" - -import sys -import os -from pathlib import Path - -# Add the glayout path -glayout_path = "/home/arnavshukla/OpenFASOC/openfasoc/generators/glayout" -if glayout_path not in sys.path: - sys.path.insert(0, glayout_path) - -# Set up environment -os.environ['PDK_ROOT'] = '/opt/conda/envs/GLdev/share/pdk' -os.environ['PDK'] = 'sky130A' - -def test_lvs_netlist_generation(): - """Test that LVS can generate netlists from component info without errors""" - print("๐Ÿงช Testing LVS Netlist Generation Fix...") - - try: - from glayout.flow.pdk.sky130_mapped import sky130_mapped_pdk - from transmission_gate import transmission_gate, add_tg_labels - - pdk = sky130_mapped_pdk - - print("๐Ÿ“‹ Creating transmission gate component...") - tg = transmission_gate( - pdk=pdk, - width=(1.0, 2.0), - length=(0.15, 0.15), - fingers=(1, 1), - multipliers=(1, 1) - ) - - print("๐Ÿ“‹ Adding labels...") - tg_labeled = add_tg_labels(tg, pdk) - tg_labeled.name = "test_transmission_gate" - - print("๐Ÿ“‹ Testing netlist generation in LVS context...") - - # Test the netlist generation logic from mappedpdk.py - from glayout.flow.spice.netlist import Netlist - - # Simulate what happens in lvs_netgen when netlist is None - layout = tg_labeled - - # Try to get stored object first (for older gdsfactory versions) - if 'netlist_obj' in layout.info: - print("โœ… Found netlist_obj in component.info") - netlist_obj = layout.info['netlist_obj'] - # Try to reconstruct from netlist_data (for newer gdsfactory versions) - elif 'netlist_data' in layout.info: - print("โœ… Found netlist_data in component.info") - data = layout.info['netlist_data'] - netlist_obj = Netlist( - circuit_name=data['circuit_name'], - nodes=data['nodes'] - ) - netlist_obj.source_netlist = data['source_netlist'] - else: - # Fallback: if it's already a string, use it directly - print("โ„น๏ธ Using string fallback for netlist") - netlist_string = layout.info.get('netlist', '') - if not isinstance(netlist_string, str): - print("โŒ FAILED: Expected string fallback but got:", type(netlist_string)) - return False - netlist_obj = None - - # Generate netlist if we have a netlist object - if netlist_obj is not None: - print("๐Ÿ“‹ Testing generate_netlist() call...") - try: - netlist_content = netlist_obj.generate_netlist() - print("โœ… SUCCESS: generate_netlist() worked without error") - print(f"๐Ÿ“„ Generated netlist length: {len(netlist_content)} characters") - - # Verify it contains expected content - if 'Transmission_Gate' in netlist_content: - print("โœ… SUCCESS: Netlist contains expected circuit name") - else: - print("โš ๏ธ WARNING: Netlist doesn't contain expected circuit name") - - return True - - except AttributeError as e: - if "'str' object has no attribute 'generate_netlist'" in str(e): - print("โŒ FAILED: Still getting the 'str' object error:", e) - return False - else: - print("โŒ FAILED: Unexpected AttributeError:", e) - return False - except Exception as e: - print("โŒ FAILED: Unexpected error during generate_netlist():", e) - return False - else: - print("โ„น๏ธ No netlist object to test - using string representation") - netlist_string = layout.info.get('netlist', '') - if isinstance(netlist_string, str) and len(netlist_string) > 0: - print("โœ… SUCCESS: String netlist available as fallback") - return True - else: - print("โŒ FAILED: No valid netlist representation found") - return False - - except Exception as e: - print(f"โŒ FAILED: Test failed with error: {e}") - import traceback - traceback.print_exc() - return False - -def test_actual_lvs_call(): - """Test a simplified LVS call to see if it works""" - print("\n๐Ÿงช Testing Actual LVS Functionality...") - - try: - from glayout.flow.pdk.sky130_mapped import sky130_mapped_pdk - from transmission_gate import transmission_gate, add_tg_labels - - pdk = sky130_mapped_pdk - - print("๐Ÿ“‹ Creating and labeling transmission gate...") - tg = transmission_gate(pdk=pdk, width=(1.0, 2.0), length=(0.15, 0.15)) - tg_labeled = add_tg_labels(tg, pdk) - tg_labeled.name = "lvs_test_tg" - - print("๐Ÿ“‹ Writing GDS file...") - gds_file = "lvs_test_tg.gds" - tg_labeled.write_gds(gds_file) - - print("๐Ÿ“‹ Attempting LVS call...") - try: - # This should not fail with the "'str' object has no attribute 'generate_netlist'" error - result = pdk.lvs_netgen(tg_labeled, "lvs_test_tg") - print("โœ… SUCCESS: LVS call completed without netlist generation error") - print("๐Ÿ“Š LVS result keys:", list(result.keys()) if isinstance(result, dict) else "Not a dict") - return True - - except AttributeError as e: - if "'str' object has no attribute 'generate_netlist'" in str(e): - print("โŒ FAILED: LVS still has the 'str' object error:", e) - return False - else: - print("โš ๏ธ LVS failed with different AttributeError (may be expected):", e) - return True # The specific error we're fixing is resolved - - except Exception as e: - print("โš ๏ธ LVS failed with other error (may be expected in test environment):", e) - print("โ„น๏ธ This is likely due to missing PDK files or tools, not our fix") - return True # The specific error we're fixing is resolved - - except Exception as e: - print(f"โŒ FAILED: Test failed with error: {e}") - import traceback - traceback.print_exc() - return False - -def main(): - """Main test function""" - print("๐Ÿ”ง Testing LVS Netlist Generation Fix") - print("=" * 50) - - test1_passed = test_lvs_netlist_generation() - test2_passed = test_actual_lvs_call() - - print("\n" + "=" * 50) - print("๐Ÿ“Š TEST SUMMARY") - print("=" * 50) - - if test1_passed: - print("โœ… PASS: Netlist generation logic") - else: - print("โŒ FAIL: Netlist generation logic") - - if test2_passed: - print("โœ… PASS: LVS call functionality") - else: - print("โŒ FAIL: LVS call functionality") - - overall_success = test1_passed and test2_passed - - if overall_success: - print("\n๐ŸŽ‰ ALL TESTS PASSED!") - print("The 'str' object has no attribute 'generate_netlist' error should be resolved.") - return True - else: - print("\nโš ๏ธ Some tests failed. The LVS fix may need further adjustment.") - return False - -if __name__ == "__main__": - success = main() - if success: - print("\nโœ… LVS fix validation completed successfully!") - else: - print("\nโŒ LVS fix validation failed.") diff --git a/src/glayout/blocks/ATLAS/test_netlist_fix.py b/src/glayout/blocks/ATLAS/test_netlist_fix.py deleted file mode 100644 index d49cfbbb..00000000 --- a/src/glayout/blocks/ATLAS/test_netlist_fix.py +++ /dev/null @@ -1,87 +0,0 @@ -#!/usr/bin/env python3 -""" -Test script to verify that the netlist info dict fix works correctly. -""" - -import sys -import os -from pathlib import Path - -# Add the glayout path -glayout_path = "/home/arnavshukla/OpenFASOC/openfasoc/generators/glayout" -if glayout_path not in sys.path: - sys.path.insert(0, glayout_path) - -# Set up environment -os.environ['PDK_ROOT'] = '/opt/conda/envs/GLdev/share/pdk' -os.environ['PDK'] = 'sky130A' - -from glayout.flow.pdk.sky130_mapped import sky130_mapped_pdk -from transmission_gate import transmission_gate, add_tg_labels - -def test_netlist_serialization(): - """Test that netlist objects are properly serialized in component.info""" - print("Testing transmission gate netlist serialization...") - - try: - # Create a transmission gate with default parameters - tg = transmission_gate( - pdk=sky130_mapped_pdk, - width=(1.0, 2.0), - length=(0.15, 0.15), - fingers=(1, 1), - multipliers=(1, 1) - ) - - # Check that netlist is stored as string (not object) - netlist_value = tg.info.get('netlist') - netlist_obj = tg.info.get('netlist_obj') - - print(f"Netlist type: {type(netlist_value)}") - print(f"Netlist object type: {type(netlist_obj)}") - - # Verify types - if isinstance(netlist_value, str): - print("โœ… SUCCESS: netlist is stored as string") - else: - print(f"โŒ FAILED: netlist is stored as {type(netlist_value)}") - return False - - if netlist_obj is not None and hasattr(netlist_obj, 'circuit_name'): - print("โœ… SUCCESS: netlist_obj is available for internal use") - else: - print("โŒ FAILED: netlist_obj is not properly stored") - return False - - # Test that we can create JSON-serializable info dict - import json - try: - # Create a copy of info dict with only basic types - info_copy = {} - for key, value in tg.info.items(): - if isinstance(value, (str, int, float, bool, list, tuple)): - info_copy[key] = value - else: - info_copy[key] = str(value) - - json_str = json.dumps(info_copy, indent=2) - print("โœ… SUCCESS: info dict can be JSON serialized") - print(f"JSON preview: {json_str[:200]}...") - - except Exception as e: - print(f"โŒ FAILED: JSON serialization failed: {e}") - return False - - return True - - except Exception as e: - print(f"โŒ FAILED: Test failed with error: {e}") - return False - -if __name__ == "__main__": - print("Testing netlist serialization fix...") - success = test_netlist_serialization() - if success: - print("\n๐ŸŽ‰ All tests passed! The fix should resolve the gymnasium info dict error.") - else: - print("\nโš ๏ธ Tests failed. The issue may not be fully resolved.") diff --git a/src/glayout/blocks/ATLAS/transmission_gate.py b/src/glayout/blocks/ATLAS/transmission_gate.py deleted file mode 100644 index 3e42e7dc..00000000 --- a/src/glayout/blocks/ATLAS/transmission_gate.py +++ /dev/null @@ -1,182 +0,0 @@ -from glayout.flow.pdk.mappedpdk import MappedPDK -from glayout.flow.pdk.sky130_mapped import sky130_mapped_pdk -from gdsfactory.cell import cell -from gdsfactory.component import Component -from gdsfactory import Component -from glayout.flow.primitives.fet import nmos, pmos, multiplier -from glayout.flow.pdk.util.comp_utils import evaluate_bbox, prec_center, align_comp_to_port, movex, movey -from glayout.flow.pdk.util.snap_to_grid import component_snap_to_grid -from glayout.flow.pdk.util.port_utils import rename_ports_by_orientation -from glayout.flow.routing.straight_route import straight_route -from glayout.flow.routing.c_route import c_route -from glayout.flow.routing.L_route import L_route -from glayout.flow.primitives.guardring import tapring -from glayout.flow.pdk.util.port_utils import add_ports_perimeter -from glayout.flow.spice.netlist import Netlist -from glayout.flow.primitives.via_gen import via_stack -from gdsfactory.components import text_freetype, rectangle -try: - from evaluator_wrapper import run_evaluation # pyright: ignore[reportMissingImports] -except ImportError: - print("Warning: evaluator_wrapper not found. Evaluation will be skipped.") - run_evaluation = None - -def add_tg_labels(tg_in: Component, - pdk: MappedPDK - ) -> Component: - - tg_in.unlock() - met2_pin = (68,16) - met2_label = (68,5) - # list that will contain all port/comp info - move_info = list() - # create labels and append to info list - # vin - vinlabel = rectangle(layer=pdk.get_glayer("met2_pin"),size=(0.27,0.27),centered=True).copy() - vinlabel.add_label(text="VIN",layer=pdk.get_glayer("met2_label")) - move_info.append((vinlabel,tg_in.ports["N_multiplier_0_source_E"],None)) - - # vout - voutlabel = rectangle(layer=pdk.get_glayer("met2_pin"),size=(0.27,0.27),centered=True).copy() - voutlabel.add_label(text="VOUT",layer=pdk.get_glayer("met2_label")) - move_info.append((voutlabel,tg_in.ports["P_multiplier_0_drain_W"],None)) - - # vcc - vcclabel = rectangle(layer=pdk.get_glayer("met2_pin"),size=(0.5,0.5),centered=True).copy() - vcclabel.add_label(text="VCC",layer=pdk.get_glayer("met2_label")) - move_info.append((vcclabel,tg_in.ports["P_tie_S_top_met_S"],None)) - - # vss - vsslabel = rectangle(layer=pdk.get_glayer("met2_pin"),size=(0.5,0.5),centered=True).copy() - vsslabel.add_label(text="VSS",layer=pdk.get_glayer("met2_label")) - move_info.append((vsslabel,tg_in.ports["N_tie_S_top_met_N"], None)) - - # VGP - vgplabel = rectangle(layer=pdk.get_glayer("met2_pin"),size=(0.27,0.27),centered=True).copy() - vgplabel.add_label(text="VGP",layer=pdk.get_glayer("met2_label")) - move_info.append((vgplabel,tg_in.ports["P_multiplier_0_gate_E"], None)) - - # VGN - vgnlabel = rectangle(layer=pdk.get_glayer("met2_pin"),size=(0.27,0.27),centered=True).copy() - vgnlabel.add_label(text="VGN",layer=pdk.get_glayer("met2_label")) - move_info.append((vgnlabel,tg_in.ports["N_multiplier_0_gate_E"], None)) - - # move everything to position - for comp, prt, alignment in move_info: - alignment = ('c','b') if alignment is None else alignment - compref = align_comp_to_port(comp, prt, alignment=alignment) - tg_in.add(compref) - return tg_in.flatten() - - -def get_component_netlist(component) -> Netlist: - """Helper function to extract netlist from component with version compatibility""" - if hasattr(component.info, 'get'): - # Check if netlist object is stored directly - if 'netlist' in component.info: - netlist_obj = component.info['netlist'] - if isinstance(netlist_obj, str): - # It's a string representation, try to reconstruct - # For gymnasium compatibility, we don't store netlist_data, so create a simple netlist - return Netlist(source_netlist=netlist_obj) - else: - # It's already a Netlist object - return netlist_obj - - # Fallback: return empty netlist - return Netlist() - -def tg_netlist(nfet_comp, pfet_comp) -> str: - """Generate SPICE netlist string for transmission gate - gymnasium compatible""" - - # Get the SPICE netlists directly from components - nmos_spice = nfet_comp.info.get('netlist', '') - pmos_spice = pfet_comp.info.get('netlist', '') - - if not nmos_spice or not pmos_spice: - raise ValueError("Component netlists not found") - - # Create the transmission gate SPICE netlist by combining the primitives - tg_spice = f"""{nmos_spice} - -{pmos_spice} - -.subckt transmission_gate D G S VDD VSS -* PMOS: connects D to S when G is low (G_n is high) -X0 D G_n S VDD PMOS -* NMOS: connects D to S when G is high -X1 D G S VSS NMOS -.ends transmission_gate -""" - - return tg_spice - -@cell -def transmission_gate( - pdk: MappedPDK, - width: tuple[float,float] = (1,1), - length: tuple[float,float] = (None,None), - fingers: tuple[int,int] = (1,1), - multipliers: tuple[int,int] = (1,1), - substrate_tap: bool = False, - tie_layers: tuple[str,str] = ("met2","met1"), - **kwargs - ) -> Component: - """ - creates a transmission gate - tuples are in (NMOS,PMOS) order - **kwargs are any kwarg that is supported by nmos and pmos - """ - - #top level component - top_level = Component(name="transmission_gate") - - #two fets - nfet = nmos(pdk, width=width[0], fingers=fingers[0], multipliers=multipliers[0], with_dummy=True, with_dnwell=False, with_substrate_tap=False, length=length[0], **kwargs) - pfet = pmos(pdk, width=width[1], fingers=fingers[1], multipliers=multipliers[1], with_dummy=True, with_substrate_tap=False, length=length[1], **kwargs) - nfet_ref = top_level << nfet - pfet_ref = top_level << pfet - pfet_ref = rename_ports_by_orientation(pfet_ref.mirror_y()) - - #Relative move - pfet_ref.movey(nfet_ref.ymax + evaluate_bbox(pfet_ref)[1]/2 + pdk.util_max_metal_seperation()) - - #Routing - top_level << c_route(pdk, nfet_ref.ports["multiplier_0_source_E"], pfet_ref.ports["multiplier_0_source_E"]) - top_level << c_route(pdk, nfet_ref.ports["multiplier_0_drain_W"], pfet_ref.ports["multiplier_0_drain_W"], viaoffset=False) - - #Renaming Ports - top_level.add_ports(nfet_ref.get_ports_list(), prefix="N_") - top_level.add_ports(pfet_ref.get_ports_list(), prefix="P_") - - #substrate tap - if substrate_tap: - substrate_tap_encloses =((evaluate_bbox(top_level)[0]+pdk.util_max_metal_seperation()), (evaluate_bbox(top_level)[1]+pdk.util_max_metal_seperation())) - guardring_ref = top_level << tapring( - pdk, - enclosed_rectangle=substrate_tap_encloses, - sdlayer="p+s/d", - horizontal_glayer='met2', - vertical_glayer='met1', - ) - guardring_ref.move(nfet_ref.center).movey(evaluate_bbox(pfet_ref)[1]/2 + pdk.util_max_metal_seperation()/2) - top_level.add_ports(guardring_ref.get_ports_list(),prefix="tap_") - - component = component_snap_to_grid(rename_ports_by_orientation(top_level)) - # Generate netlist as SPICE string for gymnasium compatibility - netlist_string = tg_netlist(nfet, pfet) - - # Store as string for gymnasium compatibility - LVS method supports this directly - component.info['netlist'] = netlist_string - - - return component - -if __name__=="__main__": - transmission_gate = add_tg_labels(transmission_gate(sky130_mapped_pdk),sky130_mapped_pdk) - transmission_gate.show() - transmission_gate.name = "Transmission_Gate" - #magic_drc_result = sky130_mapped_pdk.drc_magic(transmission_gate, transmission_gate.name) - #netgen_lvs_result = sky130_mapped_pdk.lvs_netgen(transmission_gate, transmission_gate.name) - transmission_gate_gds = transmission_gate.write_gds("transmission_gate.gds") - res = run_evaluation("transmission_gate.gds", transmission_gate.name, transmission_gate) \ No newline at end of file diff --git a/src/glayout/pdk/gf180_mapped/gf180_grules.py b/src/glayout/pdk/gf180_mapped/gf180_grules.py index bf211155..ec13a453 100644 --- a/src/glayout/pdk/gf180_mapped/gf180_grules.py +++ b/src/glayout/pdk/gf180_mapped/gf180_grules.py @@ -1,8 +1,10 @@ from ..mappedpdk import MappedPDK grulesobj = dict() -for glayer in MappedPDK.valid_glayers: - grulesobj[glayer] = dict((x, None) for x in MappedPDK.valid_glayers) +# Add sab and res_mk to valid_glayers for polyresistor support +extended_glayers = list(MappedPDK.valid_glayers) + ["sab", "res_mk"] +for glayer in extended_glayers: + grulesobj[glayer] = dict((x, None) for x in extended_glayers) grulesobj["dnwell"]["dnwell"] = {'min_width': 1.7, 'min_separation': 5.42} grulesobj["dnwell"]["pwell"] = {'min_enclosure': 2.5} @@ -366,3 +368,52 @@ grulesobj["capmet"]["met5"] = {} grulesobj["capmet"]["capmet"] = {'capmettop': (42, 0), 'capmetbottom': (36, 0), 'min_separation': 1.2} +# Initialize sab and res_mk grules +grulesobj["sab"]["sab"] = {'min_width': 0.28, 'min_separation': 0.28} +grulesobj["sab"]["dnwell"] = {} +grulesobj["sab"]["pwell"] = {} +grulesobj["sab"]["nwell"] = {} +grulesobj["sab"]["p+s/d"] = {} +grulesobj["sab"]["n+s/d"] = {} +grulesobj["sab"]["active_diff"] = {} +grulesobj["sab"]["active_tap"] = {} +grulesobj["sab"]["poly"] = {"min_enclosure": 0.28} +grulesobj["sab"]["mcon"] = {"min_separation": 0.22} +grulesobj["sab"]["met1"] = {} +grulesobj["sab"]["via1"] = {} +grulesobj["sab"]["met2"] = {} +grulesobj["sab"]["via2"] = {} +grulesobj["sab"]["met3"] = {} +grulesobj["sab"]["via3"] = {} +grulesobj["sab"]["met4"] = {} +grulesobj["sab"]["via4"] = {} +grulesobj["sab"]["met5"] = {} +grulesobj["sab"]["capmet"] = {} + +grulesobj["res_mk"]["res_mk"] = {"min_width": 0.4, "min_separation": 20.0} +grulesobj["res_mk"]["dnwell"] = {} +grulesobj["res_mk"]["pwell"] = {} +grulesobj["res_mk"]["nwell"] = {} +grulesobj["res_mk"]["p+s/d"] = {} +grulesobj["res_mk"]["n+s/d"] = {} +grulesobj["res_mk"]["active_diff"] = {"min_separation": 0.3} +grulesobj["res_mk"]["active_tap"] = {} +grulesobj["res_mk"]["poly"] = {"min_enclosure": 0.4, "min_separation": 0.3} +grulesobj["res_mk"]["mcon"] = {} +grulesobj["res_mk"]["met1"] = {} +grulesobj["res_mk"]["via1"] = {} +grulesobj["res_mk"]["met2"] = {} +grulesobj["res_mk"]["via2"] = {} +grulesobj["res_mk"]["met3"] = {} +grulesobj["res_mk"]["via3"] = {} +grulesobj["res_mk"]["met4"] = {} +grulesobj["res_mk"]["via4"] = {} +grulesobj["res_mk"]["met5"] = {} +grulesobj["res_mk"]["capmet"] = {} +grulesobj["res_mk"]["sab"] = {} + +# Update existing rules for polyresistor compatibility +grulesobj["p+s/d"]["poly"] = {"min_enclosure": 0.3} +grulesobj["p+s/d"]["mcon"] = {"min_enclosure": 0.2} +grulesobj["poly"]["active_diff"] = {"min_separation": 0.6} +grulesobj["poly"]["poly"] = {"min_width": 1, "min_separation": 0.4} diff --git a/src/glayout/pdk/gf180_mapped/gf180_mapped.py b/src/glayout/pdk/gf180_mapped/gf180_mapped.py index e9091cf1..b191d41e 100644 --- a/src/glayout/pdk/gf180_mapped/gf180_mapped.py +++ b/src/glayout/pdk/gf180_mapped/gf180_mapped.py @@ -30,6 +30,8 @@ "lvpwell": (204, 0), "dnwell": (12, 0), "CAP_MK": (117, 5), + "RES_MK": (110, 5), + "SAB": (49, 0), # BJT layers "drc_bjt": (127, 5), "lvs_bjt": (118, 5), @@ -63,6 +65,8 @@ "pwell": "lvpwell", "dnwell": "dnwell", "capmet": "CAP_MK", + "res_mk": "RES_MK", + "sab": "SAB", # bjt layer "drc_bjt": "drc_bjt", "lvs_bjt": "lvs_bjt", @@ -131,7 +135,12 @@ models={ 'nfet': 'nfet_03v3', 'pfet': 'pfet_03v3', - 'mimcap': 'mimcap_1p0fF' + 'mimcap': 'mimcap_1p0fF', + 'ppolyf_s': 'ppolyf_s', + 'ppolyf_u': 'ppolyf_u', + 'npolyf_s': 'npolyf_s', + 'npolyf_u': 'npolyf_u', + "ppolyf_u_1k": "ppolyf_u_1k" }, layers=LAYER, pdk_files=pdk_files, diff --git a/src/glayout/pdk/mappedpdk.py b/src/glayout/pdk/mappedpdk.py index d6193ff3..173c485a 100644 --- a/src/glayout/pdk/mappedpdk.py +++ b/src/glayout/pdk/mappedpdk.py @@ -3,7 +3,8 @@ """ import re from gdsfactory.pdk import Pdk -from gdsfactory.typings import Component, PathType, Layer +from gdsfactory import Component +from gdsfactory.typings import PathType, Layer from pydantic import validator, StrictStr, ValidationError from typing import ClassVar, Optional, Any, Union, Literal, Iterable, TypedDict from pathlib import Path @@ -259,6 +260,8 @@ class MappedPDK(Pdk): "via4", "met5", "capmet", + "sab", + "res_mk", "lvs_bjt", "drc_bjt", # _pin layers @@ -299,8 +302,8 @@ class MappedPDK(Pdk): @validator("models") def models_check(cls, models_obj: dict[StrictStr, StrictStr]): for model in models_obj.keys(): - if not model in ["nfet","pfet","mimcap"]: - raise ValueError(f"specify nfet, pfet, or mimcap models only") + if not model in ["nfet","pfet","mimcap","ppolyf_s","ppolyf_u","npolyf_s","npolyf_u","ppolyf_u_1k"]: + raise ValueError(f"specify nfet, pfet, mimcap, or polyresistor models only") return models_obj @validator("glayers") diff --git a/src/glayout/primitives/polyres.py b/src/glayout/primitives/polyres.py new file mode 100644 index 00000000..37e481cb --- /dev/null +++ b/src/glayout/primitives/polyres.py @@ -0,0 +1,311 @@ +import numpy as np +np.float_ = np.float64 + + +from gdsfactory.components import rectangle +from gdsfactory import Component +from glayout.pdk.mappedpdk import MappedPDK +from glayout.primitives.via_gen import via_array +from glayout.util.comp_utils import prec_ref_center, movey, align_comp_to_port, movex +from glayout.util.port_utils import add_ports_perimeter +from glayout.pdk.sky130_mapped import sky130_mapped_pdk +from glayout.pdk.gf180_mapped import gf180_mapped_pdk +from glayout.spice import Netlist +from glayout.primitives.guardring import tapring + +def poly_resistor_netlist( + circuit_name: str, + model: str, + width: float, + length: float, + multipliers: int +) -> Netlist : + + ltop = (round(length, 2))*(1e-6) + wtop = (round(width, 2))*(1e-6) + mtop = multipliers + + #source_netlist=""".subckt {model} r0 r1 """+f'\n l={ltop} w={wtop} ' + + #source_netlist += "\n.ends" + + source_netlist="""\n.subckt {circuit_name} {nodes} """+f'l={ltop} w={wtop} m={mtop}'+""" +XMAIN PLUS MINUS VSUBS {model} r_width={{w}} r_length={{l}} m={{m}}""" + + source_netlist += "\n.ends {circuit_name}" + + + + return Netlist( + circuit_name=circuit_name, + nodes=['PLUS', 'MINUS', 'VSUBS'], + source_netlist=source_netlist, + instance_format="X{name} {nodes} {circuit_name} l={length} w={width} m={multipliers}}", + parameters={ + 'model': model, + 'length': ltop, + 'width': wtop, + 'multipliers': mtop, + } + ) + +def poly_resistor( + pdk: MappedPDK, + length: float = 1.65, + width: float = 0.35, + fingers: int = 1, + tie_layers: tuple[str,str] = ("met2","met2"), + is_snake: bool = True, + n_type: bool = False, + silicided: bool = False +) -> Component: + #poly_res = (66, 13) + sab = (49,0) + res_mk = (110,5) + p_res = Component() + contact_length = 2.2 + # Calculate separation to ensure all spacing rules are met + # Get minimum spacing requirements from PDK rules + poly_min_sep = pdk.get_grule("poly", "poly").get("min_separation", 0.4) + # PRES.4 requires 0.6ยตm spacing from poly2 resistor to unrelated poly2 + # Use the more strict requirement for poly2 resistors + poly2_resistor_min_sep = 0.6 + met1_min_sep = pdk.get_grule("met1", "met1").get("min_separation", 0.23) + # Separation must satisfy all: spacing = center_to_center - width >= min_separation + # So: center_to_center >= width + min_separation + # Use the maximum requirement to satisfy all rules (PRES.4 is most strict for poly2 resistors) + separation = width + max(poly2_resistor_min_sep, met1_min_sep) + # Snap to grid for proper alignment + separation = pdk.snap_to_2xgrid(separation) + #Extend poly for contacts + ex_length = length + 2*contact_length + + # Calculate SAB layer width: account for finger spacing (separation between fingers) + # SB.5a requires 0.3ยตm spacing from SAB to unrelated poly2, but for related poly2 + # (same resistor fingers), SAB should cover all fingers with proper overlap + if fingers > 1: + # Total width = first finger + spacing*(fingers-1) + last finger width + sab_width = width + (fingers - 1) * separation + 0.56 # 0.56 for overlap/extensions + else: + sab_width = width + 0.56 # Single finger case + + ##Add unsalicide layer, if not silicided, use this block, otherwise skip + # SAB layer should cover all fingers (only create once, outside the loop) + unsal = rectangle(size=(sab_width, length), layer=sab, centered=True) + unsal_ref = prec_ref_center(unsal) + p_res.add(unsal_ref) + + ##Add RES_MK layer + # RES_MK should match SAB dimensions (only create once, outside the loop) + resmk = rectangle(size=(sab_width, length), layer=res_mk, centered=True) + resmk_ref = prec_ref_center(resmk) + p_res.add(resmk_ref) + + for i in range(0,fingers): + #poly resistor rectangle + p_rect = rectangle(size=(width,ex_length), layer=pdk.get_glayer("poly"), centered=True) + p_rect_ref = prec_ref_center(p_rect) + p_res.add(p_rect_ref) + movex(p_rect_ref, (i)*separation) + #Add li layer on top and bottom contacts + li_top = rectangle(size=(width,contact_length), layer=pdk.get_glayer("met1"), centered=True) + li_top_ref = prec_ref_center(li_top) + p_res.add(li_top_ref) + movey(li_top_ref, contact_length/2 + length/2) + movex(li_top_ref, (i)*separation) + + li_bot = rectangle(size=(width,contact_length), layer=pdk.get_glayer("met1"), centered=True) + li_bot_ref = prec_ref_center(li_bot) + p_res.add(li_bot_ref) + movey(li_bot_ref, - contact_length/2 - length/2) + movex(li_bot_ref, (i)*separation) + + #Place poly to li via contact + licon1 = via_array(pdk, "poly", "met1", size=(width,contact_length)) + licon1_ref = prec_ref_center(licon1) + #p_res.add(licon1_ref) + #movey(licon1_ref, contact_length/2 + length/2) + + licon2 = via_array(pdk, "poly", "met1", size=(width,contact_length)) + licon2_ref = prec_ref_center(licon2) + p_res.add(licon2_ref) + movey(licon2_ref, - contact_length/2 - length/2) + movex(licon2_ref, (i)*separation) + + licon3 = via_array(pdk, "poly", "met1", size=(width,contact_length)) + licon3_ref = prec_ref_center(licon3) + p_res.add(licon3_ref) + movey(licon3_ref, contact_length/2 + length/2) + movex(licon3_ref, (i)*separation) + + # place metal 1 layer on contacts + met1_top = rectangle(size=(width,contact_length), layer=pdk.get_glayer("met2"), centered=True) + met1_top_ref = prec_ref_center(met1_top) + p_res.add(met1_top_ref) + movey(met1_top_ref, contact_length/2 + length/2) + movex(met1_top_ref, (i)*separation) + + met1_bot = rectangle(size=(width,contact_length), layer=pdk.get_glayer("met2"), centered=True) + met1_bot_ref = prec_ref_center(met1_bot) + p_res.add(met1_bot_ref) + movey(met1_bot_ref, - contact_length/2 - length/2) + movex(met1_bot_ref, (i)*separation) + #place li to metal vias + met1con1 = via_array(pdk, "met1", "met2", size=(width,contact_length)) + met1con1_ref = prec_ref_center(met1con1) + p_res.add(met1con1_ref) + movey(met1con1_ref, contact_length/2 + length/2) + movex(met1con1_ref, (i)*separation) + + met1con2 = via_array(pdk, "met1", "met2", size=(width,contact_length)) + met1con2_ref = prec_ref_center(met1con2) + p_res.add(met1con2_ref) + movey(met1con2_ref, - contact_length/2 - length/2) + movex(met1con2_ref, (i)*separation) + + con_offset = (separation)/2 + if is_snake == True: + if i > 0: + met1_connect = rectangle(size=(width+separation,contact_length), layer=pdk.get_glayer("met2"),centered= True) + met1_con_ref = prec_ref_center(met1_connect) + p_res.add(met1_con_ref) + if i%2 == 0: + movey(met1_con_ref, - contact_length/2 - length/2) + movex(met1_con_ref, (i-1)*separation+con_offset) + else: + movey(met1_con_ref, contact_length/2 + length/2) + movex(met1_con_ref, (i-1)*separation+con_offset) + + if i == 0: + p_res.add_ports(met1_bot_ref.get_ports_list(), prefix="MINUS_") + + + # Calculate tap_separation to ensure proper spacing from poly resistor to COMP (active_diff) + # tapring creates COMP layer via active_tap, so we need to ensure poly-to-COMP spacing (0.6ยตm minimum for PRES.3) + poly_to_comp_sep = pdk.get_grule("poly", "active_diff").get("min_separation", 0.6) + + # tap_separation_base for other spacing requirements (metal, tap spacing, etc.) + tap_separation_base = max( + pdk.util_max_metal_seperation(), + pdk.get_grule("active_diff", "active_tap")["min_separation"], + ) + tap_separation_base += pdk.get_grule("p+s/d", "active_tap")["min_enclosure"] + + # tap_encloses is the internal rectangle size that tapring will enclose + # The tapring creates a ring around this rectangle with active_tap (which contains COMP) + # To ensure PRES.3: poly edge to COMP edge >= 0.6ยตm + # The ring's inner edge is at enclosed_rectangle edge, so COMP edge is at that edge + # Since p_res.xmax is half-width from center (poly edge distance), tapring internal rectangle half-width should be: + # p_res.xmax + poly_to_comp_sep to ensure poly-to-COMP spacing + # Add tap_separation_base for other requirements + # Ensure minimum 0.6ยตm spacing from poly to COMP (PRES.3 requirement) + # Current spacing is 0.56ยตm, need at least 0.04ยตm more to reach 0.6ยตm + # Add safety margin (0.15ยตm) to account for rounding, grid snapping, and ensure >= 0.6ยตm + safety_margin = 0.15 # Add safety margin to ensure >= 0.6ยตm even after grid snapping + total_separation = poly_to_comp_sep + tap_separation_base + safety_margin + # Snap to grid to ensure proper alignment + total_separation = pdk.snap_to_2xgrid(total_separation) + # Ensure minimum: poly_to_comp_sep (0.6ยตm) must be met + # total_separation should be at least poly_to_comp_sep + tap_separation_base + min_required_separation = poly_to_comp_sep + tap_separation_base + min_required_separation = pdk.snap_to_2xgrid(min_required_separation) + if total_separation < min_required_separation: + total_separation = min_required_separation + 0.15 # Add extra margin if below minimum + total_separation = pdk.snap_to_2xgrid(total_separation) + tap_encloses = ( + 2 * (p_res.xmax + total_separation), + 2 * (p_res.ymax + total_separation), + ) + tiering_ref = p_res << tapring( + pdk, + enclosed_rectangle=tap_encloses, + sdlayer="p+s/d", + horizontal_glayer=tie_layers[0], + vertical_glayer=tie_layers[1], + ) + p_res.add_ports(tiering_ref.get_ports_list(), prefix="tie_") + + # add pplus or nplus layer according to the polyresistor type + if n_type: + plus_layer = pdk.get_glayer("n+s/d") # N-plus for N-type polyresistor + else: + plus_layer = pdk.get_glayer("p+s/d") # P-plus for P-type polyresistor + + plus = rectangle(size=(2*p_res.xmax+2,2*p_res.ymax+2), layer=plus_layer, centered=True) + plus_ref = prec_ref_center(plus) + p_res.add(plus_ref) + # add pwell + #p_res.add_padding( + # layers=(pdk.get_glayer("pwell"),), + # default=pdk.get_grule("pwell", "active_tap")["min_enclosure"], + #) + #p_res = add_ports_perimeter(p_res,layer=pdk.get_glayer("pwell"),prefix="well_") + + #print(i) + if i%2 == 0: + p_res.add_ports(met1_top_ref.get_ports_list(), prefix="PLUS_") + else: + p_res.add_ports(met1_bot_ref.get_ports_list(), prefix="PLUS_") + + # Select model based on type and silicidation + if n_type: + if silicided: + model = 'npolyf_s' # n-type, silicided + else: + model = 'npolyf_u' # n-type, unsalicided + else: + if silicided: + model = 'ppolyf_s' # p-type, silicided + else: + model = 'ppolyf_u' # p-type, unsalicided + + p_res.info['netlist'] = poly_resistor_netlist( + circuit_name="POLY_RES", + model=model, + width=width, + length=length, + multipliers=1, + ) + #print(p_res.get_ports_list()) + return p_res + +def add_polyres_labels(pdk: MappedPDK, p_res: Component, length, width, fingers): + p_res.unlock() + #met1_label = (68, 5) + #met1_pin = (68, 16) + move_info = list() + # Calculate separation the same way as in poly_resistor() to ensure consistency + # PRES.4 requires 0.6ยตm spacing from poly2 resistor to unrelated poly2 + poly2_resistor_min_sep = 0.6 + met1_min_sep = pdk.get_grule("met1", "met1").get("min_separation", 0.23) + separation = width + max(poly2_resistor_min_sep, met1_min_sep) + separation = pdk.snap_to_2xgrid(separation) + contact_length = 2.2 + p_pin = p_res << rectangle(size=(0.1,0.1),layer=pdk.get_glayer("met2"),centered=True) + if fingers%2 == 0: + movey(p_pin, -contact_length/2 - length/2) + movex(p_pin, (fingers-1)*separation) + else: + movey(p_pin, contact_length/2 + length/2) + movex(p_pin, (fingers-1)*separation) + + m_pin = p_res << rectangle(size=(0.1,0.1),layer=pdk.get_glayer("met2"),centered=True) + movey(m_pin, -contact_length/2 - length/2) + + #plus label + p_label = rectangle(layer=pdk.get_glayer("met2_pin"), size=(0.1,0.1), centered=True).copy() + p_label.add_label(text="PLUS",layer=pdk.get_glayer("met2_label")) + move_info.append((p_label,p_pin.ports["e1"],None)) + + m_label = rectangle(layer=pdk.get_glayer("met2_pin"), size=(0.1,0.1), centered=True).copy() + m_label.add_label(text="MINUS",layer=pdk.get_glayer("met2_label")) + move_info.append((m_label,m_pin.ports["e1"],None)) + + sub_label = rectangle(layer=pdk.get_glayer("met2_pin"),size=(0.5,0.5),centered=True).copy() + sub_label.add_label(text="VSUBS",layer=pdk.get_glayer("met2_label")) + move_info.append((sub_label,p_res.ports["tie_N_top_met_N"], None)) + for comp, prt, alignment in move_info: + alignment = ('c','b') if alignment is None else alignment + compref = align_comp_to_port(comp, prt, alignment=alignment) + p_res.add(compref) + return p_res.flatten() diff --git a/tests/README.md b/tests/README.md new file mode 100644 index 00000000..4f48fe0a --- /dev/null +++ b/tests/README.md @@ -0,0 +1,90 @@ +# gLayout Regression Tests + +This directory contains regression tests for gLayout to ensure previously fixed bugs don't reappear. + +## Structure + +``` +tests/ +โ”œโ”€โ”€ README.md +โ””โ”€โ”€ regression/ + โ””โ”€โ”€ test_polyres_multifinger.py # Polyresistor multi-finger tests (M1.2a spacing fix) +``` + +## Running Tests + +### Basic usage + +```bash +# Set PDK_ROOT (required) +export PDK_ROOT=/path/to/your/pdk + +# Run all regression tests +pytest tests/regression/ + +# Run with verbose output +pytest tests/regression/test_polyres_multifinger.py -v +``` + +### Run specific parameterized tests + +```bash +# Run only the M1.2a spacing fix test +pytest tests/regression/test_polyres_multifinger.py::TestPolyResistorMultiFinger::test_multifinger_m1_2a_spacing_fix -v + +# Run tests for various widths +pytest tests/regression/test_polyres_multifinger.py::TestPolyResistorMultiFinger::test_polyres_various_widths -v + +# Run a specific parameter combination +pytest tests/regression/test_polyres_multifinger.py::TestPolyResistorMultiFinger::test_polyres_various_widths[1.0-1.5-3-True-False-False] -v +``` + +### Using custom parameters via command line + +```bash +# Test with custom parameters (requires modifying the test or using conftest.py) +pytest tests/regression/ --width=1.2 --length=2.0 --fingers=4 + +# Run only tests matching a pattern +pytest tests/regression/ -k "width" -v +``` + +## Polyresistor Multi-finger Tests + +Tests multi-finger polyresistor configurations with the M1.2a spacing fix for narrow width resistors. + +### Test Parameters + +All tests are parameterized with the following options: +- `width`: Resistor width in ยตm (e.g., 0.5, 0.8, 1.0, 1.5) +- `length`: Resistor length in ยตm (e.g., 1.5, 2.0) +- `fingers`: Number of fingers (e.g., 2, 3, 5, 7) +- `is_snake`: Use snake configuration (True/False) +- `n_type`: Use n-type resistor (True/False) +- `silicided`: Use silicided resistor (True/False) + +### Key test cases + +1. **M1.2a spacing fix** (original bug case) + - Width: 0.8ยตm, Length: 1.5ยตm, Fingers: 5 + - Tests narrow width resistor with M1.2a spacing requirements + +2. **Various widths** + - Tests: 0.5ยตm, 0.8ยตm, 1.0ยตm, 1.5ยตm widths + - Validates layout generation across width range + +3. **Various finger counts** + - Tests: 2, 3, 5, 7 fingers + - Ensures multi-finger scaling works correctly + +### Adding Custom Test Cases + +Edit `test_polyres_multifinger.py` and add parameters to `@pytest.mark.parametrize`: + +```python +@pytest.mark.parametrize("width,length,fingers,is_snake,n_type,silicided", [ + (1.2, 2.0, 4, True, False, False), # Your custom test case +]) +def test_polyres_custom_config(self, width, length, fingers, is_snake, n_type, silicided): + # Test implementation +``` diff --git a/tests/__init__.py b/tests/__init__.py new file mode 100644 index 00000000..19896225 --- /dev/null +++ b/tests/__init__.py @@ -0,0 +1 @@ +"""Regression tests for gLayout primitives and components.""" diff --git a/tests/regression/__init__.py b/tests/regression/__init__.py new file mode 100644 index 00000000..d3703957 --- /dev/null +++ b/tests/regression/__init__.py @@ -0,0 +1 @@ +"""Regression test suite for gLayout.""" diff --git a/tests/regression/test_polyres_multifinger.py b/tests/regression/test_polyres_multifinger.py new file mode 100644 index 00000000..cb26c3c9 --- /dev/null +++ b/tests/regression/test_polyres_multifinger.py @@ -0,0 +1,123 @@ +""" +Regression tests for polyresistor multi-finger configurations. +Tests the M1.2a spacing fix for narrow width resistors. + +Usage with custom parameters: + pytest tests/regression/test_polyres_multifinger.py -v + pytest tests/regression/test_polyres_multifinger.py::TestPolyResistorMultiFinger::test_polyres_layout[1.0-2.0-3-True-False-False] +""" + +import pytest +from glayout.pdk.gf180_mapped import gf180_mapped_pdk +from glayout.primitives.polyres import poly_resistor, add_polyres_labels + + +class TestPolyResistorMultiFinger: + """Test multi-finger polyresistor configurations with parameterized tests.""" + + # Original M1.2a spacing fix test case + @pytest.mark.parametrize("width,length,fingers,is_snake,n_type,silicided,check_drc", [ + (0.8, 1.5, 5, True, False, False, True), # Original M1.2a test case + ]) + def test_multifinger_m1_2a_spacing_fix(self, width, length, fingers, is_snake, n_type, silicided, check_drc): + """ + Test multi-finger polyresistor with width=0.8ยตm and 5 fingers. + This is the original test case for M1.2a spacing fix. + """ + # Create multi-finger resistor + resistor_base = poly_resistor( + gf180_mapped_pdk, + width=width, + length=length, + fingers=fingers, + is_snake=is_snake, + n_type=n_type, + silicided=silicided + ) + + resistor_multi = add_polyres_labels( + gf180_mapped_pdk, + resistor_base, + length, width, fingers + ) + + # Verify component was created + assert resistor_multi is not None, f"Failed to create resistor (w={width}, l={length}, f={fingers})" + + # Run DRC check if requested + if check_drc: + drc_result = gf180_mapped_pdk.drc(resistor_multi) + assert drc_result is True or drc_result == 0, f"DRC check failed: {drc_result}" + + # Parameterized test for various widths + @pytest.mark.parametrize("width,length,fingers,is_snake,n_type,silicided", [ + (0.5, 1.5, 3, True, False, False), # Very narrow + (0.8, 1.5, 3, True, False, False), # Narrow (M1.2a case) + (1.0, 1.5, 3, True, False, False), # Medium + (1.5, 1.5, 3, True, False, False), # Wide + ]) + def test_polyres_various_widths(self, width, length, fingers, is_snake, n_type, silicided): + """Test multi-finger polyresistors with various widths.""" + resistor = poly_resistor( + gf180_mapped_pdk, + width=width, + length=length, + fingers=fingers, + is_snake=is_snake, + n_type=n_type, + silicided=silicided + ) + assert resistor is not None, f"Failed to create resistor with width={width}ยตm" + + # Parameterized test for various finger counts + @pytest.mark.parametrize("width,length,fingers,is_snake,n_type,silicided", [ + (0.8, 1.5, 2, True, False, False), # 2 fingers + (0.8, 1.5, 3, True, False, False), # 3 fingers + (0.8, 1.5, 5, True, False, False), # 5 fingers + (0.8, 1.5, 7, True, False, False), # 7 fingers + ]) + def test_polyres_various_finger_counts(self, width, length, fingers, is_snake, n_type, silicided): + """Test multi-finger polyresistors with various finger counts.""" + resistor = poly_resistor( + gf180_mapped_pdk, + width=width, + length=length, + fingers=fingers, + is_snake=is_snake, + n_type=n_type, + silicided=silicided + ) + assert resistor is not None, f"Failed to create resistor with {fingers} fingers" + + # Generic parameterized test for custom configurations + @pytest.mark.parametrize("width,length,fingers,is_snake,n_type,silicided", [ + # Add custom test cases here or override via command line + # Example: pytest --width=1.2 --length=2.0 --fingers=4 + ]) + def test_polyres_custom_config(self, width, length, fingers, is_snake, n_type, silicided): + """ + Generic test for custom polyresistor configurations. + Can be used with custom parameters from command line or test data files. + """ + resistor = poly_resistor( + gf180_mapped_pdk, + width=width, + length=length, + fingers=fingers, + is_snake=is_snake, + n_type=n_type, + silicided=silicided + ) + assert resistor is not None, f"Failed to create custom resistor (w={width}, l={length}, f={fingers})" + + +# pytest configuration hook to add custom command line options +def pytest_addoption(parser): + """Add custom command line options for polyresistor parameters.""" + parser.addoption("--width", action="store", type=float, help="Resistor width in ยตm") + parser.addoption("--length", action="store", type=float, help="Resistor length in ยตm") + parser.addoption("--fingers", action="store", type=int, help="Number of fingers") + parser.addoption("--is-snake", action="store", type=bool, default=True, help="Use snake configuration") + parser.addoption("--n-type", action="store", type=bool, default=False, help="Use n-type resistor") + parser.addoption("--silicided", action="store", type=bool, default=False, help="Use silicided resistor") +