Skip to content

Instantly share code, notes, and snippets.

@mdmitry1
Last active December 9, 2025 09:00
Show Gist options
  • Select an option

  • Save mdmitry1/eadd1de72d8cfe43c436b7b9b057aeae to your computer and use it in GitHub Desktop.

Select an option

Save mdmitry1/eadd1de72d8cfe43c436b7b9b057aeae to your computer and use it in GitHub Desktop.

General Performance Comparison

VF2 advantages

VF2 tends to perform better on graphs that are sparse (fewer connections), small, or have a regular structure (e.g., 2D meshes or bounded valence graphs with low valence). In some cases, Nauty cannot find a solution for these regular graphs if they exceed a certain size (a few dozen nodes).

Nauty advantages

Nauty (via pynauty) is generally the better algorithm for dense or large randomly connected graphs.

#!/usr/bin/python3.12
"""
Graph Automorphisms: Complete Explanation with Examples
An AUTOMORPHISM is an isomorphism from a graph to itself.
It's a way to relabel vertices that preserves all edges.
Think of it as "symmetries" of the graph.
"""
import networkx as nx
import itertools
from collections import defaultdict
import matplotlib.pyplot as plt
import os
# Create output directory for images
OUTPUT_DIR = "automorphism_illustrations"
if not os.path.exists(OUTPUT_DIR):
os.makedirs(OUTPUT_DIR)
def save_graph_visualization(G, filename, title, pos=None, highlight_nodes=None, node_labels=None):
"""
Save a graph visualization as PNG.
Args:
G: NetworkX graph
filename: Output filename (without extension)
title: Plot title
pos: Node positions (if None, spring layout is used)
highlight_nodes: Dict of {node: color} to highlight specific nodes
node_labels: Custom node labels (if None, uses node names)
"""
plt.figure(figsize=(10, 8))
# Calculate layout if not provided
if pos is None:
pos = nx.spring_layout(G, seed=42)
# Default node colors
node_colors = ['lightblue'] * len(G.nodes())
if highlight_nodes:
node_list = list(G.nodes())
for i, node in enumerate(node_list):
if node in highlight_nodes:
node_colors[i] = highlight_nodes[node]
# Draw graph
nx.draw_networkx_edges(G, pos, width=2, alpha=0.6)
nx.draw_networkx_nodes(G, pos, node_color=node_colors,
node_size=1500, alpha=0.9, edgecolors='black', linewidths=2)
# Labels
labels = node_labels if node_labels else {node: str(node) for node in G.nodes()}
nx.draw_networkx_labels(G, pos, labels, font_size=16, font_weight='bold')
plt.title(title, fontsize=18, fontweight='bold', pad=20)
plt.axis('off')
plt.tight_layout()
# Save
filepath = os.path.join(OUTPUT_DIR, f"{filename}.png")
plt.savefig(filepath, dpi=150, bbox_inches='tight', facecolor='white')
plt.close()
return filepath
def visualize_automorphism(G, original_pos, automorphism, filename, title):
"""
Visualize an automorphism by showing the mapping.
"""
fig, (ax1, ax2) = plt.subplots(1, 2, figsize=(16, 7))
# Left: Original graph
plt.sca(ax1)
nx.draw_networkx_edges(G, original_pos, width=2, alpha=0.6)
nx.draw_networkx_nodes(G, original_pos, node_color='lightblue',
node_size=1500, alpha=0.9, edgecolors='black', linewidths=2)
labels = {node: str(node) for node in G.nodes()}
nx.draw_networkx_labels(G, original_pos, labels, font_size=16, font_weight='bold')
ax1.set_title("Original", fontsize=16, fontweight='bold')
ax1.axis('off')
# Right: After automorphism
plt.sca(ax2)
# Create new positions based on automorphism
new_pos = {automorphism[node]: original_pos[node] for node in G.nodes()}
nx.draw_networkx_edges(G, new_pos, width=2, alpha=0.6)
nx.draw_networkx_nodes(G, new_pos, node_color='lightgreen',
node_size=1500, alpha=0.9, edgecolors='black', linewidths=2)
new_labels = {node: str(node) for node in G.nodes()}
nx.draw_networkx_labels(G, new_pos, new_labels, font_size=16, font_weight='bold')
ax2.set_title("After Automorphism", fontsize=16, fontweight='bold')
ax2.axis('off')
# Add mapping text
mapping_text = " → ".join([f"{k}→{v}" for k, v in list(automorphism.items())[:4]])
if len(automorphism) > 4:
mapping_text += "..."
fig.suptitle(f"{title}\nMapping: {mapping_text}", fontsize=14, fontweight='bold')
plt.tight_layout()
filepath = os.path.join(OUTPUT_DIR, f"{filename}.png")
plt.savefig(filepath, dpi=150, bbox_inches='tight', facecolor='white')
plt.close()
return filepath
class AutomorphismAnalyzer:
"""
Analyze and find automorphisms of graphs.
"""
def find_all_automorphisms_bruteforce(self, G):
"""
Find all automorphisms by trying all vertex permutations.
Only works for small graphs (< 10 nodes).
An automorphism is a permutation of vertices that preserves edges.
"""
vertices = list(G.nodes())
n = len(vertices)
automorphisms = []
# Try all n! permutations
for perm in itertools.permutations(vertices):
# Create mapping: old vertex -> new vertex
mapping = {vertices[i]: perm[i] for i in range(n)}
# Check if this mapping preserves all edges
is_automorphism = True
# Check that all existing edges are preserved
for u, v in G.edges():
# After relabeling, edge (u,v) becomes (mapping[u], mapping[v])
# This should still be an edge in G
if not G.has_edge(mapping[u], mapping[v]):
is_automorphism = False
break
if is_automorphism:
# Also verify that no extra edges are created
# Count edges: should be same before and after mapping
edge_count = 0
for u, v in G.edges():
if G.has_edge(mapping[u], mapping[v]):
edge_count += 1
# If edge count matches, this is a valid automorphism
if edge_count == G.number_of_edges():
automorphisms.append(mapping)
return automorphisms
def count_automorphisms(self, G):
"""Count number of automorphisms (automorphism group size)."""
autos = self.find_all_automorphisms_bruteforce(G)
return len(autos)
def describe_automorphisms(self, G):
"""
Provide human-readable description of automorphisms.
"""
automorphisms = self.find_all_automorphisms_bruteforce(G)
result = {
'count': len(automorphisms),
'automorphisms': automorphisms,
'is_symmetric': len(automorphisms) > 1,
'symmetry_description': self._describe_symmetry(G, automorphisms)
}
return result
def _describe_symmetry(self, G, automorphisms):
"""Describe the type of symmetry."""
n = len(automorphisms)
if n == 1:
return "Asymmetric (no non-trivial automorphisms)"
elif n == 2:
return "One non-trivial symmetry (e.g., reflection)"
else:
return f"Highly symmetric ({n} automorphisms = {n}! / |Aut(G)|)"
def find_orbits(self, G):
"""
Find orbits: sets of vertices that can be mapped to each other.
Vertices in the same orbit are "equivalent" under symmetry.
"""
automorphisms = self.find_all_automorphisms_bruteforce(G)
vertices = list(G.nodes())
# Union-find to group vertices that can be mapped to each other
parent = {v: v for v in vertices}
def find(v):
# Path compression: find root and update all nodes on path
root = v
while parent[root] != root:
root = parent[root]
# Compress path
while parent[v] != root:
next_v = parent[v]
parent[v] = root
v = next_v
return root
def union(u, v):
root_u = find(u)
root_v = find(v)
if root_u != root_v:
parent[root_u] = root_v
# For each automorphism, union vertices that map to each other
for auto in automorphisms:
for v in vertices:
# If automorphism maps v to auto[v], they're in same orbit
if v != auto[v]: # Only union if different
union(v, auto[v])
# Group vertices by their root (orbit representative)
orbits_dict = defaultdict(list)
for v in vertices:
root = find(v)
orbits_dict[root].append(v)
return list(orbits_dict.values())
def example_1_triangle():
"""
Example 1: Equilateral Triangle
High symmetry - all vertices are equivalent
"""
print("="*70)
print("EXAMPLE 1: Equilateral Triangle")
print("="*70)
print("\nGraph structure:")
print(" A")
print(" / \\")
print(" B---C")
print("\nAll vertices look the same - complete symmetry!")
G = nx.Graph()
G.add_edges_from([('A', 'B'), ('B', 'C'), ('C', 'A')])
# Create visualization with fixed positions
pos = {'A': (0.5, 1), 'B': (0, 0), 'C': (1, 0)}
# Save basic graph
filepath = save_graph_visualization(G, "1_triangle_basic",
"Triangle Graph - Complete Symmetry", pos)
print(f"\n✓ Saved: {filepath}")
analyzer = AutomorphismAnalyzer()
result = analyzer.describe_automorphisms(G)
print(f"\nNumber of automorphisms: {result['count']}")
print("\nAll automorphisms:")
for i, auto in enumerate(result['automorphisms'], 1):
print(f" {i}. {auto}")
# Visualize a rotation automorphism
rotation_auto = {'A': 'B', 'B': 'C', 'C': 'A'}
if rotation_auto in result['automorphisms']:
filepath = visualize_automorphism(G, pos, rotation_auto,
"1_triangle_rotation",
"Triangle: Rotation Automorphism (A→B→C→A)")
print(f"✓ Saved: {filepath}")
# Visualize orbits
orbits = analyzer.find_orbits(G)
print(f"\nOrbits (equivalent vertices):")
for i, orbit in enumerate(orbits, 1):
print(f" Orbit {i}: {orbit}")
# Color nodes by orbit - all nodes in same orbit get same color
orbit_colors = {}
colors = ['yellow', 'lightcoral', 'lightgreen', 'lightblue', 'lavender']
for orbit_idx, orbit in enumerate(orbits):
color = colors[orbit_idx % len(colors)]
for node in orbit:
orbit_colors[node] = color
filepath = save_graph_visualization(G, "1_triangle_orbits",
"Triangle: All vertices in ONE orbit (all yellow = same equivalence class)",
pos, highlight_nodes=orbit_colors)
print(f"✓ Saved: {filepath}")
print("\nInterpretation:")
print(" - 6 automorphisms (3! = 6) because triangle has full symmetry")
print(" - All 3 vertices in same orbit (can be mapped to each other)")
print(" - Can rotate: A→B→C→A or reflect: A↔B, C stays")
def example_2_path():
"""
Example 2: Path Graph
Reflection symmetry only
"""
print("\n" + "="*70)
print("EXAMPLE 2: Path (Chain)")
print("="*70)
print("\nGraph structure:")
print(" A---B---C---D")
print("\nEnds (A, D) are equivalent, middles (B, C) are equivalent")
G = nx.Graph()
G.add_edges_from([('A', 'B'), ('B', 'C'), ('C', 'D')])
# Fixed positions for path
pos = {'A': (0, 0), 'B': (1, 0), 'C': (2, 0), 'D': (3, 0)}
filepath = save_graph_visualization(G, "2_path_basic",
"Path Graph - Reflection Symmetry", pos)
print(f"\n✓ Saved: {filepath}")
analyzer = AutomorphismAnalyzer()
result = analyzer.describe_automorphisms(G)
print(f"\nNumber of automorphisms: {result['count']}")
print("\nAll automorphisms:")
for i, auto in enumerate(result['automorphisms'], 1):
print(f" {i}. {auto}")
# Visualize reflection
reflection = {'A': 'D', 'B': 'C', 'C': 'B', 'D': 'A'}
if reflection in result['automorphisms']:
filepath = visualize_automorphism(G, pos, reflection,
"2_path_reflection",
"Path: Reflection Automorphism")
print(f"✓ Saved: {filepath}")
orbits = analyzer.find_orbits(G)
print(f"\nOrbits (equivalent vertices):")
for i, orbit in enumerate(orbits, 1):
print(f" Orbit {i}: {orbit}")
# Color by orbit - each orbit gets one color
orbit_colors = {}
colors = ['lightcoral', 'lightgreen', 'lightblue', 'yellow']
for orbit_idx, orbit in enumerate(orbits):
color = colors[orbit_idx % len(colors)]
for node in orbit:
orbit_colors[node] = color
filepath = save_graph_visualization(G, "2_path_orbits",
"Path: Two orbits (one color per orbit)",
pos, highlight_nodes=orbit_colors)
print(f"✓ Saved: {filepath}")
print("\nInterpretation:")
print(" - 2 automorphisms: identity and reflection")
print(" - Orbit {A, D}: the two endpoints are equivalent")
print(" - Orbit {B, C}: the two middle vertices are equivalent")
def example_3_star():
"""
Example 3: Star Graph
Center vs periphery vertices
"""
print("\n" + "="*70)
print("EXAMPLE 3: Star Graph")
print("="*70)
print("\nGraph structure:")
print(" B")
print(" |")
print(" C---A---D")
print(" |")
print(" E")
print("\nA is the center (degree 4), others are leaves (degree 1)")
G = nx.Graph()
G.add_edges_from([('A', 'B'), ('A', 'C'), ('A', 'D'), ('A', 'E')])
# Star layout
pos = {
'A': (0, 0),
'B': (0, 1),
'C': (-1, 0),
'D': (1, 0),
'E': (0, -1)
}
filepath = save_graph_visualization(G, "3_star_basic",
"Star Graph - Center vs Periphery", pos)
print(f"\n✓ Saved: {filepath}")
analyzer = AutomorphismAnalyzer()
result = analyzer.describe_automorphisms(G)
print(f"\nNumber of automorphisms: {result['count']}")
print(f"\nFirst 5 automorphisms (of {result['count']}):")
for i, auto in enumerate(result['automorphisms'][:5], 1):
print(f" {i}. {auto}")
# Show one permutation of leaves
if len(result['automorphisms']) > 1:
perm_auto = result['automorphisms'][1]
filepath = visualize_automorphism(G, pos, perm_auto,
"3_star_permutation",
"Star: Leaves can be permuted, center stays fixed")
print(f"✓ Saved: {filepath}")
orbits = analyzer.find_orbits(G)
print(f"\nOrbits:")
for i, orbit in enumerate(orbits, 1):
print(f" Orbit {i}: {orbit}")
# Highlight orbits
orbit_colors = {}
for orbit in orbits:
if 'A' in orbit:
for node in orbit:
orbit_colors[node] = 'gold'
else:
for node in orbit:
orbit_colors[node] = 'lightblue'
filepath = save_graph_visualization(G, "3_star_orbits",
"Star: Two orbits (gold=center, blue=leaves)",
pos, highlight_nodes=orbit_colors)
print(f"✓ Saved: {filepath}")
print("\nInterpretation:")
print(f" - {result['count']} automorphisms (4! = 24)")
print(" - Center A must stay as A (unique degree 4)")
print(" - Leaves {B,C,D,E} can be permuted in any way")
print(" - Two orbits: {A} and {B,C,D,E}")
def example_4_asymmetric():
"""
Example 4: Asymmetric Graph
Only trivial automorphism (identity)
"""
print("\n" + "="*70)
print("EXAMPLE 4: Asymmetric Graph")
print("="*70)
print("\nGraph structure:")
print(" A---B---C---D")
print(" |")
print(" E")
print("\nAll vertices have different 'views' - no symmetry")
print("Degrees: A=1, B=3, C=2, D=1, E=1")
print("B is unique (degree 3), C is unique (degree 2 in middle)")
G = nx.Graph()
G.add_edges_from([('A', 'B'), ('B', 'C'), ('C', 'D'), ('B', 'E')])
# Fixed positions
pos = {'A': (0, 1), 'B': (1, 1), 'C': (2, 1), 'D': (3, 1), 'E': (1, 0)}
filepath = save_graph_visualization(G, "4_asymmetric_basic",
"Asymmetric Graph - No Symmetry", pos)
print(f"\n✓ Saved: {filepath}")
analyzer = AutomorphismAnalyzer()
result = analyzer.describe_automorphisms(G)
print(f"\nNumber of automorphisms: {result['count']}")
print("\nAutomorphisms:")
for i, auto in enumerate(result['automorphisms'], 1):
print(f" {i}. {auto}")
orbits = analyzer.find_orbits(G)
print(f"\nOrbits:")
for i, orbit in enumerate(orbits, 1):
print(f" Orbit {i}: {orbit}")
# Color by orbit - each orbit gets its own color
orbit_colors = {}
colors = ['lightcoral', 'lightgreen', 'lightblue', 'yellow', 'lavender']
for orbit_idx, orbit in enumerate(orbits):
color = colors[orbit_idx % len(colors)]
for node in orbit:
orbit_colors[node] = color
filepath = save_graph_visualization(G, "4_asymmetric_orbits",
"Asymmetric: Each vertex in its own orbit (5 different colors)",
pos, highlight_nodes=orbit_colors)
print(f"✓ Saved: {filepath}")
print("\nInterpretation:")
print(" - Only 1 automorphism: identity (every vertex maps to itself)")
print(" - Each vertex is in its own orbit (5 separate orbits)")
print(" - Graph has NO symmetry - it's asymmetric")
print(" - Why? Each vertex has a unique 'structural fingerprint':")
print(" • A: degree 1, neighbor has degree 3")
print(" • B: degree 3 (unique!)")
print(" • C: degree 2, one neighbor degree 3, other degree 1")
print(" • D: degree 1, neighbor has degree 2")
print(" • E: degree 1, neighbor has degree 3")
print(" - Even though A and E both have degree 1, their neighborhoods differ")
def example_5_square():
"""
Example 5: Square (4-cycle)
Rotations and reflections
"""
print("\n" + "="*70)
print("EXAMPLE 5: Square (4-cycle)")
print("="*70)
print("\nGraph structure:")
print(" A---B")
print(" | |")
print(" D---C")
print("\nCan rotate 90°, 180°, 270° and reflect horizontally/vertically")
G = nx.Graph()
G.add_edges_from([('A', 'B'), ('B', 'C'), ('C', 'D'), ('D', 'A')])
# Square positions
pos = {'A': (0, 1), 'B': (1, 1), 'C': (1, 0), 'D': (0, 0)}
filepath = save_graph_visualization(G, "5_square_basic",
"Square Graph - Rotations & Reflections", pos)
print(f"\n✓ Saved: {filepath}")
analyzer = AutomorphismAnalyzer()
result = analyzer.describe_automorphisms(G)
print(f"\nNumber of automorphisms: {result['count']}")
print("\nAll automorphisms:")
for i, auto in enumerate(result['automorphisms'], 1):
description = ""
if auto == {'A': 'A', 'B': 'B', 'C': 'C', 'D': 'D'}:
description = " (identity)"
elif auto == {'A': 'B', 'B': 'C', 'C': 'D', 'D': 'A'}:
description = " (rotate 90° clockwise)"
elif auto == {'A': 'C', 'B': 'D', 'C': 'A', 'D': 'B'}:
description = " (rotate 180°)"
elif auto == {'A': 'D', 'B': 'A', 'C': 'B', 'D': 'C'}:
description = " (rotate 270° clockwise)"
print(f" {i}. {auto}{description}")
# Visualize 90-degree rotation
rotation_90 = {'A': 'B', 'B': 'C', 'C': 'D', 'D': 'A'}
if rotation_90 in result['automorphisms']:
filepath = visualize_automorphism(G, pos, rotation_90,
"5_square_rotation",
"Square: 90° Clockwise Rotation")
print(f"✓ Saved: {filepath}")
orbits = analyzer.find_orbits(G)
print(f"\nOrbits:")
for i, orbit in enumerate(orbits, 1):
print(f" Orbit {i}: {sorted(orbit)}")
# Color by orbit - all nodes in same orbit get same color
orbit_colors = {}
colors = ['lavender', 'lightcoral', 'lightgreen', 'lightblue']
for orbit_idx, orbit in enumerate(orbits):
color = colors[orbit_idx % len(colors)]
for node in orbit:
orbit_colors[node] = color
filepath = save_graph_visualization(G, "5_square_orbits",
"Square: All vertices in ONE orbit (all lavender = fully symmetric)",
pos, highlight_nodes=orbit_colors)
print(f"✓ Saved: {filepath}")
print("\nInterpretation:")
print(" - 8 automorphisms: 4 rotations + 4 reflections")
print(" - All vertices in one orbit (completely symmetric)")
print(" - This is the dihedral group D4")
def example_6_circuit_implications():
"""
Example 6: Why automorphisms matter for circuits
"""
print("\n" + "="*70)
print("EXAMPLE 6: Circuit Design Implications")
print("="*70)
print("\nScenario: Two circuit subcircuits")
print("\nCircuit 1: NAND gate")
print(" in1 ----\\")
print(" NAND ---- out")
print(" in2 ----/")
C1 = nx.Graph([('in1', 'nand'), ('in2', 'nand'), ('nand', 'out')])
# Circuit layout positions
pos1 = {'in1': (0, 1), 'in2': (0, 0), 'nand': (1, 0.5), 'out': (2, 0.5)}
filepath = save_graph_visualization(C1, "6_circuit_nand",
"NAND Gate Circuit", pos1)
print(f"\n✓ Saved: {filepath}")
print("\nCircuit 2: NOR gate with ground")
print(" in1 ----\\")
print(" NOR ---- out")
print(" in2 ----/")
print(" |")
print(" gnd")
C2 = nx.Graph([('in1', 'nor'), ('in2', 'nor'), ('nor', 'out'), ('nor', 'gnd')])
pos2 = {'in1': (0, 1.5), 'in2': (0, 0.5), 'nor': (1, 1), 'out': (2, 1), 'gnd': (1, -0.5)}
filepath = save_graph_visualization(C2, "6_circuit_nor",
"NOR Gate with Ground", pos2)
print(f"✓ Saved: {filepath}")
analyzer = AutomorphismAnalyzer()
print("\n" + "-"*70)
print("Circuit 1 (NAND) Analysis:")
print("-"*70)
result1 = analyzer.describe_automorphisms(C1)
print(f"Automorphisms: {result1['count']}")
orbits1 = analyzer.find_orbits(C1)
print("Orbits:")
for orbit in orbits1:
print(f" {sorted(orbit)}")
# Highlight input swap automorphism
if result1['count'] > 1:
swap_auto = result1['automorphisms'][1]
filepath = visualize_automorphism(C1, pos1, swap_auto,
"6_circuit_nand_swap",
"NAND: Inputs are interchangeable (commutative)")
print(f"✓ Saved: {filepath}")
# Highlight orbits
orbit_colors1 = {}
colors = ['lightcoral', 'lightgreen', 'lightblue', 'yellow']
for i, orbit in enumerate(orbits1):
for node in orbit:
orbit_colors1[node] = colors[i % len(colors)]
filepath = save_graph_visualization(C1, "6_circuit_nand_orbits",
"NAND Orbits: Inputs in same orbit (interchangeable)",
pos1, highlight_nodes=orbit_colors1)
print(f"✓ Saved: {filepath}")
print("\nInterpretation:")
print(" - 2 automorphisms: can swap in1 ↔ in2")
print(" - Inputs are interchangeable (commutative)")
print(" - {in1, in2} form one orbit")
print(" - {nand} and {out} each in their own orbit")
print("\n" + "-"*70)
print("Circuit 2 (NOR with ground) Analysis:")
print("-"*70)
result2 = analyzer.describe_automorphisms(C2)
print(f"Automorphisms: {result2['count']}")
orbits2 = analyzer.find_orbits(C2)
print("Orbits:")
for orbit in orbits2:
print(f" {sorted(orbit)}")
# Highlight orbits
orbit_colors2 = {}
for i, orbit in enumerate(orbits2):
for node in orbit:
orbit_colors2[node] = colors[i % len(colors)]
filepath = save_graph_visualization(C2, "6_circuit_nor_orbits",
"NOR Orbits: More structure, more separate orbits",
pos2, highlight_nodes=orbit_colors2)
print(f"✓ Saved: {filepath}")
print("\nInterpretation:")
print(" - 2 automorphisms: can swap in1 ↔ in2")
print(" - Inputs still interchangeable")
print(" - out and gnd are NOT interchangeable (different roles)")
print("\n" + "="*70)
print("KEY INSIGHTS FOR SEMICONDUCTOR DESIGN:")
print("="*70)
print("1. Automorphism count = measure of circuit symmetry")
print("2. Orbits = functionally equivalent pins/nodes")
print("3. High automorphism count → harder for isomorphism testing")
print(" (VF2 must explore all symmetric variations)")
print("4. Canonicalization exploits automorphisms to create unique form")
print("5. Understanding symmetry helps optimize circuit layout")
def main():
"""Run all examples."""
print("\n" + "█"*70)
print("GRAPH AUTOMORPHISMS: COMPLETE GUIDE WITH VISUALIZATIONS")
print("█"*70)
print("\nDEFINITION:")
print("An automorphism is a mapping of a graph to itself that preserves edges.")
print("It's an 'isomorphism from a graph to itself' - a symmetry operation.")
print("\nFormally: A permutation π of vertices where:")
print(" (u,v) is an edge ⟺ (π(u), π(v)) is an edge")
print("\nWHY IT MATTERS:")
print("• Automorphism group size = measure of graph symmetry")
print("• Orbits identify 'equivalent' vertices under symmetry")
print("• Critical for canonical labeling algorithms (like Nauty)")
print("• Affects isomorphism testing complexity")
print("• Helps understand circuit symmetry in EDA applications")
print(f"\nAll visualizations will be saved to: {OUTPUT_DIR}/")
print("="*70)
# Run examples
example_1_triangle()
example_2_path()
example_3_star()
example_4_asymmetric()
example_5_square()
example_6_circuit_implications()
print("\n" + "="*70)
print("SUMMARY")
print("="*70)
print("\nAutomorphism Count Interpretation:")
print(" • Count = 1 → Asymmetric (no symmetry)")
print(" • Count = 2 → One symmetry (e.g., reflection)")
print(" • Count = n! → Completely symmetric (all vertices equivalent)")
print(" • Count between → Partial symmetry")
print("\nOrbits:")
print(" • Vertices in same orbit are 'structurally equivalent'")
print(" • Can be swapped without changing graph structure")
print(" • Useful for: pin assignment, layout optimization, equivalence checking")
print("\nFor Your Interview:")
print(" • Automorphisms = graph symmetries")
print(" • Used by Nauty for efficient canonical labeling")
print(" • High symmetry → harder isomorphism testing (more cases)")
print(" • Understanding orbits helps identify equivalent components")
print(f"\n✓ All visualizations saved to: {os.path.abspath(OUTPUT_DIR)}/")
print("="*70)
if __name__ == "__main__":
main()
#!/usr/bin/python3.12
"""
Graph Canonicalization Using Various Approaches
Demonstrates Nauty-like canonicalization for circuit isomorphism
NOTE: This uses pynauty if available, otherwise falls back to
NetworkX's weisfeiler_lehman_graph_hash or custom implementation.
To install pynauty (optional):
pip install pynauty
"""
import networkx as nx
from collections import defaultdict
import hashlib
import itertools
class GraphCanonicalizer:
"""
Provides multiple approaches to graph canonicalization.
"""
def __init__(self):
self.has_pynauty = self._check_pynauty()
self.has_networkx_canon = self._check_networkx_canon()
def _check_pynauty(self):
"""Check if pynauty is available."""
try:
import pynauty
return True
except ImportError:
return False
def _check_networkx_canon(self):
"""Check if NetworkX has canonical labeling (v3.0+)."""
try:
from networkx.algorithms import isomorphism
return hasattr(isomorphism, 'weisfeiler_lehman_graph_hash')
except:
return False
def canonicalize_with_pynauty(self, G: nx.Graph):
"""
Use PyNauty for true canonical form (if available).
This is the gold standard for canonicalization.
"""
if not self.has_pynauty:
raise ImportError("pynauty not installed. Install with: pip install pynauty")
import pynauty
# Convert NetworkX graph to pynauty format
# pynauty uses integer vertex labels starting from 0
node_to_int = {node: i for i, node in enumerate(G.nodes())}
n = len(node_to_int)
# Build adjacency dictionary for pynauty
adjacency_dict = {i: [] for i in range(n)}
for u, v in G.edges():
u_int = node_to_int[u]
v_int = node_to_int[v]
adjacency_dict[u_int].append(v_int)
adjacency_dict[v_int].append(u_int)
# Create pynauty graph
g = pynauty.Graph(
number_of_vertices=n,
directed=False,
adjacency_dict=adjacency_dict
)
certificate = pynauty.certificate(g).hex()
canonical_labeling=pynauty.canon_label(g)
canonical_graph = pynauty.Graph(n)
adjacency_dict=g._get_adjacency_dict()
for u in range(n):
for v in adjacency_dict[u]:
# Add edge in canonical graph if (u,v) is an edge in original graph
# and the canonical mapping ensures it's added only once (e.g., u < v)
if u < v: # Avoid adding duplicate edges in undirected graphs
canonical_graph.connect_vertex(canonical_labeling[u], canonical_labeling[v])
canonical_certificate = pynauty.certificate(canonical_graph).hex()
print("Canonization suceeded? ", canonical_certificate == certificate)
# Compute canonical labeling
# Returns: (canonical_labeling, automorphism_group_size, orbits)
automorphism_result = pynauty.autgrp(g)
automorphism_group_size = automorphism_result[1]
orbits = len(set(automorphism_result[3]))
seen_edges = set()
canonical_adjacency_dict=canonical_graph._get_adjacency_dict()
for u in range(n):
for v in canonical_adjacency_dict:
if u == v or (not canonical_graph.directed and (v, u) in seen_edges):
continue
seen_edges.add((u, v))
# Create canonical string (sorted edge list with canonical labels)
canonical_edges = sorted([tuple(sorted([u, v]))
for u, v in seen_edges])
canonical_string = str(canonical_edges)
# Hash for efficient comparison
canonical_hash = hashlib.sha256(canonical_string.encode()).hexdigest()
return {
'canonical_graph': canonical_graph,
'canonical_string': canonical_string,
'canonical_hash': canonical_hash,
'automorphism_group_size': automorphism_group_size,
'orbits': orbits if orbits is not None else None,
'canonical_labeling': tuple(canonical_labeling) if canonical_labeling is not None else None
}
def canonicalize_with_wl_hash(self, G: nx.Graph):
"""
Use Weisfeiler-Leman hash (fast approximation).
Not perfect but very fast and works well in practice.
"""
if self.has_networkx_canon:
# Use NetworkX's built-in WL hash (v3.0+)
try:
wl_hash = nx.weisfeiler_lehman_graph_hash(G)
return {
'canonical_hash': wl_hash,
'method': 'networkx_wl',
'note': 'Approximate - may have false positives'
}
except:
pass
# Fallback: custom WL implementation
return self._custom_wl_hash(G)
def _custom_wl_hash(self, G: nx.Graph, iterations=10):
"""
Custom Weisfeiler-Leman hash implementation.
"""
# Initialize colors by degree
colors = {node: G.degree(node) for node in G.nodes()}
# Iterate to refine colors
for iteration in range(iterations):
new_colors = {}
for node in G.nodes():
# Get sorted neighbor colors
neighbor_colors = sorted([colors[neighbor]
for neighbor in G.neighbors(node)])
# Create new color from old color + neighbor colors
signature = (colors[node], tuple(neighbor_colors))
new_colors[node] = hash(signature)
# Check if converged
if new_colors == colors:
break
colors = new_colors
# Create graph hash from color distribution
color_multiset = tuple(sorted(colors.values()))
graph_hash = hashlib.sha256(str(color_multiset).encode()).hexdigest()
return {
'canonical_hash': graph_hash,
'color_distribution': color_multiset,
'iterations': iteration + 1,
'method': 'custom_wl',
'note': 'Approximate - may have false positives'
}
def canonicalize_small_graph_exact(self, G: nx.Graph):
"""
Brute force exact canonicalization for small graphs (< 10 nodes).
Tries all permutations - only for demonstration!
"""
if G.number_of_nodes() > 10:
raise ValueError("Graph too large for brute force (>10 nodes)")
vertices = list(G.nodes())
n = len(vertices)
min_sequence = None
min_labeling = None
# Try all n! permutations
for perm in itertools.permutations(range(n)):
# Create mapping: original vertex -> new label
mapping = {vertices[i]: perm[i] for i in range(n)}
# Build adjacency matrix with this labeling
matrix = []
for i in range(n):
row = []
for j in range(n):
# Find original vertices
u = vertices[i]
v = vertices[j]
row.append(1 if G.has_edge(u, v) else 0)
matrix.append(row)
# Flatten to sequence
sequence = tuple(matrix[i][j] for i in range(n) for j in range(n))
# Keep lexicographically smallest
if min_sequence is None or sequence < min_sequence:
min_sequence = sequence
min_labeling = perm
# Create canonical graph
mapping = {vertices[i]: min_labeling[i] for i in range(n)}
canonical_graph = nx.relabel_nodes(G, mapping)
# Create canonical string
canonical_edges = sorted([tuple(sorted([u, v]))
for u, v in canonical_graph.edges()])
canonical_string = str((min_sequence, canonical_edges))
canonical_hash = hashlib.sha256(canonical_string.encode()).hexdigest()
return {
'canonical_graph': canonical_graph,
'canonical_string': canonical_string,
'canonical_hash': canonical_hash,
'adjacency_sequence': min_sequence,
'method': 'brute_force_exact'
}
def canonicalize(self, G: nx.Graph, method='auto'):
"""
Canonicalize a graph using the best available method.
Args:
G: NetworkX graph
method: 'auto', 'pynauty', 'wl_hash', 'exact_small'
Returns:
Dictionary with canonical form information
"""
if method == 'auto':
# Choose best available method
if G.number_of_nodes() <= 10:
method = 'exact_small'
elif self.has_pynauty:
method = 'pynauty'
else:
method = 'wl_hash'
if method == 'pynauty':
return self.canonicalize_with_pynauty(G)
elif method == 'wl_hash':
return self.canonicalize_with_wl_hash(G)
elif method == 'exact_small':
return self.canonicalize_small_graph_exact(G)
else:
raise ValueError(f"Unknown method: {method}")
class CanonicalCircuitLibrary:
"""
Circuit library using canonical forms for fast lookup.
"""
def __init__(self, canonicalizer=None):
self.canonicalizer = canonicalizer or GraphCanonicalizer()
self.library = {} # canonical_hash -> list of (circuit_id, graph, metadata)
self.method = 'auto'
def add_circuit(self, circuit_id, graph, metadata=None):
"""Add a circuit to the library."""
result = self.canonicalizer.canonicalize(graph, method=self.method)
canonical_hash = result['canonical_hash']
if canonical_hash not in self.library:
self.library[canonical_hash] = []
# Make a safe copy of result without the graph object
safe_result = {
'canonical_string': result.get('canonical_string'),
'canonical_hash': result.get('canonical_hash'),
'method': result.get('method'),
'automorphism_group_size': result.get('automorphism_group_size'),
'orbits': result.get('orbits'),
'canonical_labeling': result.get('canonical_labeling')
}
self.library[canonical_hash].append({
'circuit_id': circuit_id,
'graph': graph,
'metadata': metadata or {},
'canonical_result': safe_result
})
def find_matches(self, query_graph):
"""
Find all circuits isomorphic to query.
Returns list of matching circuit IDs.
"""
result = self.canonicalizer.canonicalize(query_graph, method=self.method)
canonical_hash = result['canonical_hash']
if canonical_hash in self.library:
matches = self.library[canonical_hash]
# If using WL hash (approximate), verify with VF2
if result.get('method') in ['custom_wl', 'networkx_wl']:
verified_matches = []
for match in matches:
matcher = nx.algorithms.isomorphism.GraphMatcher(
query_graph, match['graph']
)
if matcher.is_isomorphic():
verified_matches.append(match)
return verified_matches
else:
# Exact method, no verification needed
return matches
return []
def demonstrate_canonicalization():
"""Demonstrate different canonicalization approaches."""
print("="*70)
print("GRAPH CANONICALIZATION DEMONSTRATION")
print("="*70)
canonicalizer = GraphCanonicalizer()
print(f"\nAvailable methods:")
print(f" PyNauty (exact): {'✓ Available' if canonicalizer.has_pynauty else '✗ Not installed'}")
print(f" NetworkX WL hash: {'✓ Available' if canonicalizer.has_networkx_canon else '✗ Not available'}")
print(f" Brute force (small graphs): ✓ Always available")
# Create test graphs - isomorphic triangles with different labels
print("\n" + "="*70)
print("TEST: Isomorphic Triangles with Different Labels")
print("="*70)
G1 = nx.Graph()
G1.add_edges_from([('A', 'B'), ('B', 'C'), ('C', 'A')])
G2 = nx.Graph()
G2.add_edges_from([('X', 'Y'), ('Y', 'Z'), ('Z', 'X')])
G3 = nx.Graph()
G3.add_edges_from([('P', 'Q'), ('Q', 'R'), ('R', 'P')])
print(f"\nG1 edges: {list(G1.edges())}")
print(f"G2 edges: {list(G2.edges())}")
print(f"G3 edges: {list(G3.edges())}")
# Test with brute force (always available)
print("\n" + "-"*70)
print("Method: Brute Force Exact Canonicalization")
print("-"*70)
canon1 = canonicalizer.canonicalize(G1, method='exact_small')
canon2 = canonicalizer.canonicalize(G2, method='exact_small')
canon3 = canonicalizer.canonicalize(G3, method='exact_small')
print(f"\nG1 canonical hash: {canon1['canonical_hash'][:16]}...")
print(f"G2 canonical hash: {canon2['canonical_hash'][:16]}...")
print(f"G3 canonical hash: {canon3['canonical_hash'][:16]}...")
print(f"\nG1 adjacency seq: {canon1['adjacency_sequence']}")
print(f"G2 adjacency seq: {canon2['adjacency_sequence']}")
print(f"G3 adjacency seq: {canon3['adjacency_sequence']}")
if canon1['canonical_hash'] == canon2['canonical_hash'] == canon3['canonical_hash']:
print("\n✓ SUCCESS: All three triangles have IDENTICAL canonical forms!")
print(" This proves they are isomorphic.")
else:
print("\n✗ FAILED: Canonical forms differ (shouldn't happen for triangles)")
# Test with WL hash
print("\n" + "-"*70)
print("Method: Weisfeiler-Leman Hash (Fast Approximation)")
print("-"*70)
wl1 = canonicalizer.canonicalize(G1, method='wl_hash')
wl2 = canonicalizer.canonicalize(G2, method='wl_hash')
wl3 = canonicalizer.canonicalize(G3, method='wl_hash')
print(f"\nG1 WL hash: {wl1['canonical_hash'][:16]}...")
print(f"G2 WL hash: {wl2['canonical_hash'][:16]}...")
print(f"G3 WL hash: {wl3['canonical_hash'][:16]}...")
if wl1['canonical_hash'] == wl2['canonical_hash'] == wl3['canonical_hash']:
print("\n✓ WL hashes match (graphs likely isomorphic)")
else:
print("\n✗ WL hashes differ")
# Test with PyNauty if available
if canonicalizer.has_pynauty:
print("\n" + "-"*70)
print("Method: PyNauty (Industry Standard)")
print("-"*70)
nauty1 = canonicalizer.canonicalize(G1, method='pynauty')
nauty2 = canonicalizer.canonicalize(G2, method='pynauty')
nauty3 = canonicalizer.canonicalize(G3, method='pynauty')
print(f"\nG1 Nauty hash: {nauty1['canonical_hash'][:16]}...")
print(f"G2 Nauty hash: {nauty2['canonical_hash'][:16]}...")
print(f"G3 Nauty hash: {nauty3['canonical_hash'][:16]}...")
print(f"\nAutomorphism group sizes:")
print(f" G1: {nauty1['automorphism_group_size']}")
print(f" G2: {nauty2['automorphism_group_size']}")
print(f" G3: {nauty3['automorphism_group_size']}")
if nauty1['canonical_hash'] == nauty2['canonical_hash'] == nauty3['canonical_hash']:
print("\n✓ Nauty: All canonical forms match perfectly!")
# Demonstrate circuit library
print("\n" + "="*70)
print("CIRCUIT LIBRARY DEMONSTRATION")
print("="*70)
library = CanonicalCircuitLibrary()
# Add some circuits
print("\nBuilding circuit library...")
# NAND gate variations (all isomorphic)
nand1 = nx.Graph([('in1', 'gate'), ('in2', 'gate'), ('gate', 'out')])
nand2 = nx.Graph([('A', 'G'), ('B', 'G'), ('G', 'Z')])
nand3 = nx.Graph([('X', 'Y'), ('W', 'Y'), ('Y', 'Q')])
library.add_circuit('NAND_v1', nand1, {'type': 'logic_gate', 'function': 'NAND'})
library.add_circuit('NAND_v2', nand2, {'type': 'logic_gate', 'function': 'NAND'})
library.add_circuit('NAND_v3', nand3, {'type': 'logic_gate', 'function': 'NAND'})
# Different gate (NOT isomorphic)
nor = nx.Graph([('in1', 'gate'), ('in2', 'gate'), ('gate', 'out'), ('gate', 'gnd')])
library.add_circuit('NOR_v1', nor, {'type': 'logic_gate', 'function': 'NOR'})
print(f" Added 4 circuits to library")
print(f" Unique canonical forms: {len(library.library)}")
# Search for matches
print("\n" + "-"*70)
print("Searching for matches...")
print("-"*70)
query = nx.Graph([('p', 'q'), ('r', 'q'), ('q', 's')])
print(f"\nQuery circuit edges: {list(query.edges())}")
matches = library.find_matches(query)
print(f"\nFound {len(matches)} matching circuits:")
for match in matches:
print(f" - {match['circuit_id']}: {match['metadata']}")
print("\n✓ Canonical library allows O(1) lookup instead of comparing")
print(" against all circuits with VF2!")
if __name__ == "__main__":
demonstrate_canonicalization()
#!/usr/bin/python3.12
import pynauty
import networkx as nx
from matplotlib import pyplot as plt
def canonicalize_with_pynauty(G: nx.Graph) -> pynauty.Graph:
"""
Use PyNauty for true canonical form (if available).
This is the gold standard for canonicalization.
"""
# Convert NetworkX graph to pynauty format
# pynauty uses integer vertex labels starting from 0
node_to_int = {node: i for i, node in enumerate(G.nodes())}
n = len(node_to_int)
# Build adjacency dictionary for pynauty
adjacency_dict = {i: [] for i in range(n)}
for u, v in G.edges():
u_int = node_to_int[u]
v_int = node_to_int[v]
adjacency_dict[u_int].append(v_int)
adjacency_dict[v_int].append(u_int)
# Create pynauty graph
g = pynauty.Graph(number_of_vertices=n, directed=False, adjacency_dict=adjacency_dict)
certificate = pynauty.certificate(g).hex()
canonical_labeling=pynauty.canon_label(g)
canonical_graph = pynauty.Graph(n)
adjacency_dict=g._get_adjacency_dict()
for u in range(n):
for v in adjacency_dict[u]:
# Add edge in canonical graph if (u,v) is an edge in original graph
# and the canonical mapping ensures it's added only once (e.g., u < v)
if u < v: # Avoid adding duplicate edges in undirected graphs
canonical_graph.connect_vertex(canonical_labeling[u], canonical_labeling[v])
canonical_certificate = pynauty.certificate(canonical_graph).hex()
print("Canonization suceeded? ", canonical_certificate == certificate)
return g
ng=nx.Graph()
ng.add_nodes_from(["A","B", "C", "D", "E"])
ng.add_edges_from([("B","A"), ("B","C"), ("B","D"), ("B","E")])
g=canonicalize_with_pynauty(ng)
autogroup=pynauty.autgrp(g)
number_of_orbits = len(set(autogroup[3]))
print(f"Number of orbits = {number_of_orbits}")
print(f"Number of automorphisms = {int(autogroup[1])}")
pos = nx.spring_layout(ng)
nx.draw_networkx_nodes(ng, pos, node_color='lightblue', node_size=1500)
nx.draw_networkx_edges(ng, pos, width=3, alpha=0.7, edge_color='darkgreen')
nx.draw_networkx_labels(ng, pos, font_size=16, font_color='blue', font_weight="bold")
plt.title("NetworkX Graph", fontsize=16)
plt.axis('off') # Hide the axes
plt.gcf().canvas.manager.set_window_title('Graph Visualization Demo')
plt.show()
#!/usr/bin/python3.12
import networkx as nx
import matplotlib.pyplot as plt
# Example 1: Graph Isomorphism
# Check if two graphs are isomorphic (structurally identical)
print("=" * 50)
print("Example 1: Graph Isomorphism")
print("=" * 50)
# Create two graphs with same structure but different node labels
G1 = nx.Graph()
G1.add_edges_from([(0, 1), (1, 2), (2, 3), (3, 0)])
G2 = nx.Graph()
G2.add_edges_from([('A', 'B'), ('B', 'C'), ('C', 'D'), ('D', 'A')])
# Check if graphs are isomorphic
GM = nx.isomorphism.GraphMatcher(G1, G2)
is_isomorphic = GM.is_isomorphic()
print(f"\nAre G1 and G2 isomorphic? {is_isomorphic}")
# Get the mapping between nodes
if is_isomorphic:
mapping = GM.mapping
print(f"Node mapping: {mapping}")
# Example 2: Subgraph Isomorphism
# Check if one graph is a subgraph of another
print("\n" + "=" * 50)
print("Example 2: Subgraph Isomorphism")
print("=" * 50)
# Create a larger graph
G_large = nx.Graph()
G_large.add_edges_from([
(1, 2), (2, 3), (3, 4), (4, 5),
(5, 1), (2, 6), (3, 7)
])
# Create a smaller pattern to find
G_pattern = nx.Graph()
G_pattern.add_edges_from([(1, 2), (2, 3), (3, 1)]) # Triangle
# Find subgraph isomorphism
GM = nx.isomorphism.GraphMatcher(G_large, G_pattern)
is_subgraph = GM.subgraph_is_isomorphic()
print(f"\nIs pattern a subgraph of large graph? {is_subgraph}")
# Get all subgraph matches
if is_subgraph:
print("\nAll matches found:")
for i, match in enumerate(GM.subgraph_isomorphisms_iter(), 1):
print(f" Match {i}: {match}")
# Example 3: Isomorphism with Node Attributes
print("\n" + "=" * 50)
print("Example 3: Isomorphism with Node Attributes")
print("=" * 50)
# Create graphs with node attributes
G3 = nx.Graph()
G3.add_node(1, color='red')
G3.add_node(2, color='blue')
G3.add_node(3, color='red')
G3.add_edges_from([(1, 2), (2, 3)])
G4 = nx.Graph()
G4.add_node('A', color='blue')
G4.add_node('B', color='red')
G4.add_node('C', color='red')
G4.add_edges_from([('A', 'B'), ('B', 'C')])
# Match with node attribute constraints
def node_match(n1, n2):
return n1['color'] == n2['color']
GM = nx.isomorphism.GraphMatcher(G3, G4, node_match=node_match)
is_iso_attr = GM.is_isomorphic()
print(f"\nAre graphs isomorphic with color constraint? {is_iso_attr}")
if is_iso_attr:
print(f"Mapping: {GM.mapping}")
# Example 4: Isomorphism with Edge Attributes
print("\n" + "=" * 50)
print("Example 4: Isomorphism with Edge Attributes")
print("=" * 50)
G5 = nx.Graph()
G5.add_edge(1, 2, weight=5)
G5.add_edge(2, 3, weight=10)
G6 = nx.Graph()
G6.add_edge('X', 'Y', weight=10)
G6.add_edge('Y', 'Z', weight=5)
def edge_match(e1, e2):
return e1['weight'] == e2['weight']
GM = nx.isomorphism.GraphMatcher(G5, G6, edge_match=edge_match)
is_iso_edge = GM.is_isomorphic()
print(f"\nAre graphs isomorphic with edge weight constraint? {is_iso_edge}")
if is_iso_edge:
print(f"Mapping: {GM.mapping}")
# Example 5: Directed Graph Isomorphism
print("\n" + "=" * 50)
print("Example 5: Directed Graph Isomorphism")
print("=" * 50)
DG1 = nx.DiGraph()
DG1.add_edges_from([(1, 2), (2, 3), (3, 1)])
DG2 = nx.DiGraph()
DG2.add_edges_from([('A', 'B'), ('B', 'C'), ('C', 'A')])
# Use DiGraphMatcher for directed graphs
DGM = nx.isomorphism.DiGraphMatcher(DG1, DG2)
is_di_iso = DGM.is_isomorphic()
print(f"\nAre directed graphs isomorphic? {is_di_iso}")
if is_di_iso:
print(f"Mapping: {DGM.mapping}")
# Visualization
print("\n" + "=" * 50)
print("Visualizing Example Graphs")
print("=" * 50)
fig, axes = plt.subplots(2, 2, figsize=(12, 10))
# Plot G1 and G2
nx.draw(G1, ax=axes[0, 0], with_labels=True, node_color='lightblue',
node_size=500, font_size=16)
axes[0, 0].set_title('G1 (Square)')
nx.draw(G2, ax=axes[0, 1], with_labels=True, node_color='lightgreen',
node_size=500, font_size=16)
axes[0, 1].set_title('G2 (Square, isomorphic to G1)')
# Plot large graph and pattern
pos_large = nx.spring_layout(G_large)
nx.draw(G_large, pos_large, ax=axes[1, 0], with_labels=True,
node_color='lightcoral', node_size=500, font_size=16)
axes[1, 0].set_title('Large Graph')
nx.draw(G_pattern, ax=axes[1, 1], with_labels=True,
node_color='lightyellow', node_size=500, font_size=16)
axes[1, 1].set_title('Pattern (Triangle)')
plt.tight_layout()
plt.savefig('vf2_examples.png', dpi=150, bbox_inches='tight')
print("\nVisualization saved as 'vf2_examples.png'")
plt.show()
#!/usr/bin/python3.12
import networkx as nx
import matplotlib.pyplot as plt
print("=" * 60)
print("VF2 Subgraph Isomorphism - Finding Patterns in Graphs")
print("=" * 60)
# Example 1: Finding a Triangle Pattern
print("\nExample 1: Finding Triangle Patterns")
print("-" * 60)
# Create a larger graph (a social network)
G_social = nx.Graph()
G_social.add_edges_from([
(1, 2), (2, 3), (3, 1), # Triangle 1
(3, 4), (4, 5), (5, 6),
(6, 7), (7, 8), (8, 6), # Triangle 2
(4, 9), (9, 10)
])
# Pattern: A triangle (3 nodes forming a cycle)
pattern_triangle = nx.Graph()
pattern_triangle.add_edges_from([('A', 'B'), ('B', 'C'), ('C', 'A')])
# Find all occurrences of the triangle pattern
GM = nx.isomorphism.GraphMatcher(G_social, pattern_triangle)
print(f"Is triangle a subgraph? {GM.subgraph_is_isomorphic()}")
print("\nAll triangle patterns found:")
for i, match in enumerate(GM.subgraph_isomorphisms_iter(), 1):
print(f" Triangle {i}: {match}")
# Example 2: Finding a Path Pattern
print("\n" + "=" * 60)
print("Example 2: Finding Linear Path Patterns")
print("-" * 60)
# Pattern: A path of length 3 (4 nodes in a line)
pattern_path = nx.Graph()
pattern_path.add_edges_from([(1, 2), (2, 3), (3, 4)])
GM_path = nx.isomorphism.GraphMatcher(G_social, pattern_path)
print(f"\nIs path a subgraph? {GM_path.subgraph_is_isomorphic()}")
print("\nFirst 5 path patterns found:")
for i, match in enumerate(GM_path.subgraph_isomorphisms_iter(), 1):
if i > 5:
break
print(f" Path {i}: {match}")
# Example 3: Finding Star Pattern
print("\n" + "=" * 60)
print("Example 3: Finding Star Patterns (Hub and Spokes)")
print("-" * 60)
# Create a network with star patterns
G_network = nx.Graph()
G_network.add_edges_from([
# Star 1: node 1 is the hub
(1, 2), (1, 3), (1, 4),
# Additional connections
(2, 5), (3, 6),
# Star 2: node 7 is the hub
(7, 8), (7, 9), (7, 10),
(5, 7)
])
# Pattern: Star with 3 spokes
pattern_star = nx.Graph()
pattern_star.add_edges_from([('center', 'a'), ('center', 'b'), ('center', 'c')])
GM_star = nx.isomorphism.GraphMatcher(G_network, pattern_star)
print(f"\nIs star a subgraph? {GM_star.subgraph_is_isomorphic()}")
print("\nAll star patterns found:")
for i, match in enumerate(GM_star.subgraph_isomorphisms_iter(), 1):
print(f" Star {i}: {match}")
# Example 4: Chemical Structure - Finding Benzene Ring
print("\n" + "=" * 60)
print("Example 4: Chemical Structure - Finding Benzene Rings")
print("-" * 60)
# Larger molecule with multiple rings
G_molecule = nx.Graph()
# Benzene ring 1
G_molecule.add_edges_from([
(1, 2), (2, 3), (3, 4), (4, 5), (5, 6), (6, 1)
])
# Additional structure
G_molecule.add_edges_from([
(3, 7), (7, 8), (8, 9), (9, 10), (10, 11), (11, 8) # Another ring
])
# Pattern: Hexagonal ring (benzene-like)
pattern_hexagon = nx.Graph()
pattern_hexagon.add_edges_from([
('a', 'b'), ('b', 'c'), ('c', 'd'),
('d', 'e'), ('e', 'f'), ('f', 'a')
])
GM_chem = nx.isomorphism.GraphMatcher(G_molecule, pattern_hexagon)
print(f"\nIs hexagon a subgraph? {GM_chem.subgraph_is_isomorphic()}")
print("\nAll hexagonal rings found:")
for i, match in enumerate(GM_chem.subgraph_isomorphisms_iter(), 1):
print(f" Ring {i}: {match}")
# Example 5: Subgraph with Attributes
print("\n" + "=" * 60)
print("Example 5: Pattern Matching with Node Attributes")
print("-" * 60)
# Create a graph with colored nodes
G_colored = nx.Graph()
G_colored.add_node(1, color='red')
G_colored.add_node(2, color='blue')
G_colored.add_node(3, color='red')
G_colored.add_node(4, color='blue')
G_colored.add_node(5, color='red')
G_colored.add_node(6, color='green')
G_colored.add_edges_from([
(1, 2), (2, 3), (3, 4), (4, 5), (5, 6), (1, 6)
])
# Pattern: red-blue-red sequence
pattern_colored = nx.Graph()
pattern_colored.add_node('X', color='red')
pattern_colored.add_node('Y', color='blue')
pattern_colored.add_node('Z', color='red')
pattern_colored.add_edges_from([('X', 'Y'), ('Y', 'Z')])
def node_match(n1, n2):
return n1['color'] == n2['color']
GM_colored = nx.isomorphism.GraphMatcher(G_colored, pattern_colored,
node_match=node_match)
print(f"\nIs colored pattern a subgraph? {GM_colored.subgraph_is_isomorphic()}")
print("\nAll colored patterns found:")
for i, match in enumerate(GM_colored.subgraph_isomorphisms_iter(), 1):
print(f" Pattern {i}: {match}")
# Visualization
print("\n" + "=" * 60)
print("Creating Visualizations...")
print("=" * 60)
fig = plt.figure(figsize=(16, 12))
# Example 1: Social Network with Triangles
ax1 = plt.subplot(3, 3, 1)
pos1 = nx.spring_layout(G_social, seed=42)
nx.draw(G_social, pos1, with_labels=True, node_color='lightblue',
node_size=600, font_size=12, font_weight='bold')
ax1.set_title('Social Network\n(Contains Triangles)', fontsize=12, fontweight='bold')
ax2 = plt.subplot(3, 3, 2)
pos2 = nx.circular_layout(pattern_triangle)
nx.draw(pattern_triangle, pos2, with_labels=True, node_color='yellow',
node_size=800, font_size=12, font_weight='bold')
ax2.set_title('Pattern: Triangle', fontsize=12, fontweight='bold')
# Highlight found triangles in the social network
ax3 = plt.subplot(3, 3, 3)
nx.draw(G_social, pos1, with_labels=True, node_color='lightgray',
node_size=600, font_size=12, alpha=0.3)
# Highlight first triangle found
first_triangle = list(GM.subgraph_isomorphisms_iter())[0]
triangle_nodes = list(first_triangle.keys())
nx.draw_networkx_nodes(G_social, pos1, nodelist=triangle_nodes,
node_color='red', node_size=600)
nx.draw_networkx_labels(G_social, pos1, font_size=12, font_weight='bold')
ax3.set_title('First Triangle Found\n(Highlighted)', fontsize=12, fontweight='bold')
# Example 2: Star Pattern
ax4 = plt.subplot(3, 3, 4)
pos3 = nx.spring_layout(G_network, seed=42)
nx.draw(G_network, pos3, with_labels=True, node_color='lightgreen',
node_size=600, font_size=12, font_weight='bold')
ax4.set_title('Network Graph\n(Contains Stars)', fontsize=12, fontweight='bold')
ax5 = plt.subplot(3, 3, 5)
pos4 = nx.spring_layout(pattern_star, seed=42)
nx.draw(pattern_star, pos4, with_labels=True, node_color='yellow',
node_size=800, font_size=12, font_weight='bold')
ax5.set_title('Pattern: Star (3 spokes)', fontsize=12, fontweight='bold')
# Highlight found star
ax6 = plt.subplot(3, 3, 6)
nx.draw(G_network, pos3, with_labels=True, node_color='lightgray',
node_size=600, font_size=12, alpha=0.3)
first_star = list(GM_star.subgraph_isomorphisms_iter())[0]
star_nodes = list(first_star.keys())
nx.draw_networkx_nodes(G_network, pos3, nodelist=star_nodes,
node_color='orange', node_size=600)
nx.draw_networkx_labels(G_network, pos3, font_size=12, font_weight='bold')
ax6.set_title('First Star Found\n(Highlighted)', fontsize=12, fontweight='bold')
# Example 3: Chemical Structure
ax7 = plt.subplot(3, 3, 7)
pos5 = nx.spring_layout(G_molecule, seed=42)
nx.draw(G_molecule, pos5, with_labels=True, node_color='lightcoral',
node_size=600, font_size=12, font_weight='bold')
ax7.set_title('Molecule\n(Contains Hexagonal Rings)', fontsize=12, fontweight='bold')
ax8 = plt.subplot(3, 3, 8)
pos6 = nx.circular_layout(pattern_hexagon)
nx.draw(pattern_hexagon, pos6, with_labels=True, node_color='yellow',
node_size=800, font_size=12, font_weight='bold')
ax8.set_title('Pattern: Hexagon', fontsize=12, fontweight='bold')
# Highlight found hexagon
ax9 = plt.subplot(3, 3, 9)
nx.draw(G_molecule, pos5, with_labels=True, node_color='lightgray',
node_size=600, font_size=12, alpha=0.3)
first_hex = list(GM_chem.subgraph_isomorphisms_iter())[0]
hex_nodes = list(first_hex.keys())
nx.draw_networkx_nodes(G_molecule, pos5, nodelist=hex_nodes,
node_color='purple', node_size=600)
nx.draw_networkx_labels(G_molecule, pos5, font_size=12, font_weight='bold')
ax9.set_title('First Hexagon Found\n(Highlighted)', fontsize=12, fontweight='bold')
plt.tight_layout()
plt.savefig('vf2_subgraph_examples.png', dpi=150, bbox_inches='tight')
print("\nVisualization saved as 'vf2_subgraph_examples.png'")
plt.show()
print("\n" + "=" * 60)
print("Summary: VF2 successfully found patterns as subgraphs!")
print("=" * 60)
#!/usr/bin/python3.12
"""
Graph Isomorphism Detection Pipeline
Combines Weisfeiler-Leman (fast filtering) with VF2 (exact matching)
for efficient circuit topology comparison
"""
import networkx as nx
from collections import defaultdict, Counter
import time
from typing import List, Tuple, Optional, Dict, Set
class WeisfeilerLeman:
"""
Weisfeiler-Leman algorithm for graph isomorphism testing.
Provides fast polynomial-time filtering but is incomplete.
"""
def __init__(self, max_iterations: int = 100):
self.max_iterations = max_iterations
def compute_color_signature(self, G: nx.Graph) -> Tuple[tuple, List[Dict]]:
"""
Compute WL color signature for a graph.
Returns: (color_distribution, iteration_history)
"""
# Initialize: color each vertex by its degree
colors = {node: G.degree(node) for node in G.nodes()}
history = [colors.copy()]
for iteration in range(self.max_iterations):
new_colors = {}
for node in G.nodes():
# Collect neighbor colors
neighbor_colors = sorted([colors[neighbor] for neighbor in G.neighbors(node)])
# Create signature: (my_color, sorted_neighbor_colors)
signature = (colors[node], tuple(neighbor_colors))
# Hash the signature to create new color
new_colors[node] = hash(signature)
# Check if colors stabilized
if new_colors == colors:
break
colors = new_colors
history.append(colors.copy())
# Return color distribution (sorted counts)
color_counts = tuple(sorted(Counter(colors.values()).items()))
return color_counts, history
def test_isomorphism(self, G1: nx.Graph, G2: nx.Graph) -> Tuple[bool, str]:
"""
Test if two graphs might be isomorphic.
Returns: (possibly_isomorphic, reason)
"""
# FAST CHECK 1: Node count (O(1))
if G1.number_of_nodes() != G2.number_of_nodes():
return False, "Different number of nodes"
# FAST CHECK 2: Edge count (O(1))
if G1.number_of_edges() != G2.number_of_edges():
return False, "Different number of edges"
# FAST CHECK 3: Degree sequence (O(n log n))
# This is much faster than full WL and catches most non-isomorphic graphs
deg_seq1 = sorted([d for n, d in G1.degree()])
deg_seq2 = sorted([d for n, d in G2.degree()])
if deg_seq1 != deg_seq2:
return False, "Different degree sequences"
# FAST CHECK 4: Triangle count (O(n*d^2) but fast in practice)
triangles1 = sum(nx.triangles(G1).values()) // 3
triangles2 = sum(nx.triangles(G2).values()) // 3
if triangles1 != triangles2:
return False, "Different triangle counts"
# Only run expensive WL if all fast checks passed
# WL color refinement (O(n log n) but with higher constant factor)
sig1, history1 = self.compute_color_signature(G1)
sig2, history2 = self.compute_color_signature(G2)
if sig1 != sig2:
return False, f"Different WL signatures after {len(history1)} iterations"
return True, f"WL test passed (signatures match after {len(history1)} iterations)"
class VF2Matcher:
"""
VF2 algorithm for complete graph isomorphism.
Provides definitive answer but can be slow on large/symmetric graphs.
"""
def __init__(self):
self.call_count = 0
def find_isomorphism(self, G1: nx.Graph, G2: nx.Graph) -> Tuple[bool, Optional[Dict], int]:
"""
Find an isomorphism between two graphs using VF2.
Returns: (is_isomorphic, mapping, num_recursive_calls)
"""
self.call_count = 0
# Use NetworkX's built-in VF2 implementation
matcher = nx.algorithms.isomorphism.GraphMatcher(G1, G2)
try:
is_iso = matcher.is_isomorphic()
mapping = matcher.mapping if is_iso else None
return is_iso, mapping, self.call_count
except:
return False, None, self.call_count
def custom_vf2(self, G1: nx.Graph, G2: nx.Graph) -> Tuple[bool, Optional[Dict]]:
"""
Simplified custom VF2 implementation for educational purposes.
"""
self.call_count = 0
def is_feasible(mapping: Dict, n1, n2) -> bool:
"""Check if adding (n1, n2) to mapping is feasible."""
# Check if already mapped
if n1 in mapping or n2 in mapping.values():
return False
# Semantic feasibility: degrees must match
if G1.degree(n1) != G2.degree(n2):
return False
# Syntactic feasibility: mapped neighbors must correspond
for neighbor1 in G1.neighbors(n1):
if neighbor1 in mapping:
neighbor2 = mapping[neighbor1]
if neighbor2 not in G2.neighbors(n2):
return False
return True
def backtrack(mapping: Dict) -> bool:
"""Recursive backtracking search."""
self.call_count += 1
# Base case: all vertices mapped
if len(mapping) == G1.number_of_nodes():
return True
# Choose next unmapped vertex from G1
unmapped1 = [n for n in G1.nodes() if n not in mapping]
if not unmapped1:
return False
n1 = unmapped1[0]
# Try mapping to each unmapped vertex in G2
unmapped2 = [n for n in G2.nodes() if n not in mapping.values()]
for n2 in unmapped2:
if is_feasible(mapping, n1, n2):
# Add to mapping
mapping[n1] = n2
# Recurse
if backtrack(mapping):
return True
# Backtrack
del mapping[n1]
return False
mapping = {}
found = backtrack(mapping)
return found, mapping if found else None
class IsomorphismPipeline:
"""
Combined pipeline: Fast pre-checks + WL filtering + Exact VF2 matching
"""
def __init__(self):
self.wl = WeisfeilerLeman()
self.vf2 = VF2Matcher()
self.stats = {
'basic_rejections': 0, # Rejected by node/edge count
'degree_rejections': 0, # Rejected by degree sequence
'triangle_rejections': 0, # Rejected by triangle count
'wl_rejections': 0, # Rejected by WL
'wl_passes': 0, # Passed WL, need VF2
'vf2_confirmations': 0,
'vf2_rejections': 0
}
self.timing = {
'basic_time': 0,
'degree_time': 0,
'triangle_time': 0,
'wl_time': 0,
'vf2_time': 0
}
def test_isomorphism(self, G1: nx.Graph, G2: nx.Graph,
use_vf2: bool = True) -> Dict:
"""
Test isomorphism using multi-tier approach with detailed timing.
Args:
G1, G2: Graphs to compare
use_vf2: If True, run VF2 when all filters pass
Returns:
Dictionary with results and timing breakdown
"""
result = {
'isomorphic': None,
'passed_basic': False,
'passed_degree': False,
'passed_triangle': False,
'passed_wl': False,
'vf2_used': False,
'basic_time': 0,
'degree_time': 0,
'triangle_time': 0,
'wl_time': 0,
'vf2_time': 0,
'total_time': 0,
'mapping': None,
'reason': ''
}
start_time = time.time()
# Stage 1: Basic checks (node/edge count) - O(1)
t = time.time()
if G1.number_of_nodes() != G2.number_of_nodes():
result['basic_time'] = time.time() - t
result['reason'] = "Basic check: Different number of nodes"
result['isomorphic'] = False
self.stats['basic_rejections'] += 1
self.timing['basic_time'] += result['basic_time']
result['total_time'] = time.time() - start_time
return result
if G1.number_of_edges() != G2.number_of_edges():
result['basic_time'] = time.time() - t
result['reason'] = "Basic check: Different number of edges"
result['isomorphic'] = False
self.stats['basic_rejections'] += 1
self.timing['basic_time'] += result['basic_time']
result['total_time'] = time.time() - start_time
return result
result['basic_time'] = time.time() - t
result['passed_basic'] = True
self.timing['basic_time'] += result['basic_time']
# Stage 2: Degree sequence check - O(n log n)
t = time.time()
deg_seq1 = sorted([d for n, d in G1.degree()])
deg_seq2 = sorted([d for n, d in G2.degree()])
result['degree_time'] = time.time() - t
self.timing['degree_time'] += result['degree_time']
if deg_seq1 != deg_seq2:
result['reason'] = "Degree sequence check: Different degree distributions"
result['isomorphic'] = False
self.stats['degree_rejections'] += 1
result['total_time'] = time.time() - start_time
return result
result['passed_degree'] = True
# Stage 3: Triangle count - O(n*d^2), fast for sparse graphs
t = time.time()
triangles1 = sum(nx.triangles(G1).values()) // 3
triangles2 = sum(nx.triangles(G2).values()) // 3
result['triangle_time'] = time.time() - t
self.timing['triangle_time'] += result['triangle_time']
if triangles1 != triangles2:
result['reason'] = "Triangle count check: Different triangle counts"
result['isomorphic'] = False
self.stats['triangle_rejections'] += 1
result['total_time'] = time.time() - start_time
return result
result['passed_triangle'] = True
# Stage 4: WL color refinement - O(n log n) but higher constant
t = time.time()
sig1, history1 = self.wl.compute_color_signature(G1)
sig2, history2 = self.wl.compute_color_signature(G2)
result['wl_time'] = time.time() - t
self.timing['wl_time'] += result['wl_time']
if sig1 != sig2:
result['reason'] = f"WL check: Different color signatures after {len(history1)} iterations"
result['isomorphic'] = False
self.stats['wl_rejections'] += 1
result['total_time'] = time.time() - start_time
return result
result['passed_wl'] = True
result['reason'] = f"All filters passed (WL: {len(history1)} iterations)"
self.stats['wl_passes'] += 1
# Stage 5: VF2 exact matching (if requested)
if use_vf2:
t = time.time()
is_iso, mapping, calls = self.vf2.find_isomorphism(G1, G2)
result['vf2_time'] = time.time() - t
result['vf2_used'] = True
result['isomorphic'] = is_iso
result['mapping'] = mapping
self.timing['vf2_time'] += result['vf2_time']
if is_iso:
result['reason'] += " → VF2 confirmed isomorphism"
self.stats['vf2_confirmations'] += 1
else:
result['reason'] += " → VF2 rejected (false positive)"
self.stats['vf2_rejections'] += 1
else:
result['isomorphic'] = None # Unknown without VF2
result['total_time'] = time.time() - start_time
return result
def print_stats(self):
"""Print pipeline statistics with detailed breakdown."""
print("\n" + "="*60)
print("PIPELINE STATISTICS")
print("="*60)
total = sum(self.stats.values())
print(f"Total comparisons: {total}")
print(f"\nREJECTIONS BY STAGE:")
print(f" Basic checks (nodes/edges): {self.stats['basic_rejections']:>4} ({self.stats['basic_rejections']/total*100:>5.1f}%)")
print(f" Degree sequence: {self.stats['degree_rejections']:>4} ({self.stats['degree_rejections']/total*100:>5.1f}%)")
print(f" Triangle count: {self.stats['triangle_rejections']:>4} ({self.stats['triangle_rejections']/total*100:>5.1f}%)")
print(f" WL color refinement: {self.stats['wl_rejections']:>4} ({self.stats['wl_rejections']/total*100:>5.1f}%)")
print(f" Passed all filters (VF2 ran): {self.stats['wl_passes']:>4} ({self.stats['wl_passes']/total*100:>5.1f}%)")
if self.stats['vf2_confirmations'] > 0 or self.stats['vf2_rejections'] > 0:
print(f"\nVF2 RESULTS:")
print(f" Confirmed isomorphic: {self.stats['vf2_confirmations']:>4}")
print(f" Rejected (false positives): {self.stats['vf2_rejections']:>4}")
fast_rejected = (self.stats['basic_rejections'] +
self.stats['degree_rejections'] +
self.stats['triangle_rejections'])
print(f"\nFILTERING EFFICIENCY:")
print(f" Fast rejected (no WL/VF2): {fast_rejected:>4} ({fast_rejected/total*100:>5.1f}%)")
print(f" Required expensive checks: {self.stats['wl_rejections'] + self.stats['wl_passes']:>4} ({(self.stats['wl_rejections'] + self.stats['wl_passes'])/total*100:>5.1f}%)")
print(f"\nCUMULATIVE TIMING:")
print(f" Basic checks: {self.timing['basic_time']*1000:>8.2f} ms")
print(f" Degree sequence: {self.timing['degree_time']*1000:>8.2f} ms")
print(f" Triangle count: {self.timing['triangle_time']*1000:>8.2f} ms")
print(f" WL refinement: {self.timing['wl_time']*1000:>8.2f} ms")
print(f" VF2 matching: {self.timing['vf2_time']*1000:>8.2f} ms")
total_filter_time = (self.timing['basic_time'] + self.timing['degree_time'] +
self.timing['triangle_time'] + self.timing['wl_time'])
print(f" Total filtering: {total_filter_time*1000:>8.2f} ms")
print(f" Total VF2: {self.timing['vf2_time']*1000:>8.2f} ms")
def create_example_graphs() -> List[Tuple[nx.Graph, nx.Graph, str]]:
"""Create example graph pairs for testing - scaled to show filter advantage."""
examples = []
# Example 1: LARGE dense graph (VF2 struggles here)
import random
random.seed(42)
G1 = nx.gnm_random_graph(150, 1500, seed=42) # Much larger and denser
G2 = nx.gnm_random_graph(150, 1500, seed=43)
examples.append((G1, G2, "Large dense graphs (150 nodes, 1500 edges) - filters win"))
# Example 2: Regular graphs (highly symmetric - VF2's worst case)
G3 = nx.random_regular_graph(4, 100, seed=42) # All nodes degree 4
G4 = nx.random_regular_graph(4, 100, seed=43) # Different instance
examples.append((G3, G4, "Regular graphs (100 nodes, all degree 4) - VF2 struggles"))
# Example 3: Large scale-free graphs (realistic for circuits)
G5 = nx.barabasi_albert_graph(200, 5, seed=100)
G6 = nx.barabasi_albert_graph(200, 5, seed=101)
examples.append((G5, G6, "Scale-free graphs (200 nodes) - filters fast, VF2 slow"))
# Example 4: Grid graph variants (VF2's exponential behavior shows)
G7 = nx.grid_2d_graph(12, 12) # 144 nodes, highly symmetric
G8 = nx.grid_2d_graph(12, 12)
# Make them different by rewiring a few edges
edges = list(G8.edges())
G8.remove_edge(*edges[0])
G8.add_edge(edges[0][0], edges[5][1])
examples.append((G7, G8, "Large grid graphs (144 nodes) - VF2 explores many symmetries"))
# Example 5: Complete bipartite (maximum symmetry)
G9 = nx.complete_bipartite_graph(40, 40) # 80 nodes, highly symmetric
G10 = nx.complete_bipartite_graph(40, 41) # Different size
examples.append((G9, G10, "Complete bipartite - basic filter catches instantly"))
return examples
def main():
"""Demonstrate the two-tier isomorphism detection pipeline."""
print("="*60)
print("GRAPH ISOMORPHISM DETECTION PIPELINE")
print("Weisfeiler-Leman (fast filtering) + VF2 (exact matching)")
print("="*60)
pipeline = IsomorphismPipeline()
examples = create_example_graphs()
for i, (G1, G2, description) in enumerate(examples, 1):
print(f"\n{'='*60}")
print(f"Example {i}: {description}")
print(f"{'='*60}")
print(f"G1: {G1.number_of_nodes()} nodes, {G1.number_of_edges()} edges")
print(f"G2: {G2.number_of_nodes()} nodes, {G2.number_of_edges()} edges")
# Run pipeline
result = pipeline.test_isomorphism(G1, G2, use_vf2=True)
# Print results
print(f"\n{'─'*60}")
print(f"Result: {'ISOMORPHIC' if result['isomorphic'] else 'NOT ISOMORPHIC'}")
print(f"{'─'*60}")
print(f"Reason: {result['reason']}")
print(f"\nTiming breakdown:")
print(f" Basic checks: {result['basic_time']*1000:>6.3f} ms")
if result['passed_basic']:
print(f" Degree sequence: {result['degree_time']*1000:>6.3f} ms")
if result['passed_degree']:
print(f" Triangle count: {result['triangle_time']*1000:>6.3f} ms")
if result['passed_triangle']:
print(f" WL refinement: {result['wl_time']*1000:>6.3f} ms")
if result['vf2_used']:
print(f" VF2 matching: {result['vf2_time']*1000:>6.3f} ms")
print(f" Total: {result['total_time']*1000:>6.3f} ms")
if result['mapping'] and result['isomorphic']:
print(f"\nMapping (first 5 pairs): {dict(list(result['mapping'].items())[:5])}")
print("\n" + "="*60)
print("Note: Small examples above show individual algorithm behavior.")
print("See efficiency demo below for when filters actually win.")
print("="*60)
# Statistics already printed in the efficiency demo above
# Demonstrate efficiency on larger dataset
print("\n" + "="*60)
print("REAL-WORLD SCENARIO: When filters actually WIN")
print("="*60)
print("Scenario: Pre-screening 10,000 library cells with OBVIOUS mismatches")
print("Key insight: Don't run expensive checks when cheap ones suffice!")
# Create a reference graph - moderate size
reference = nx.barabasi_albert_graph(80, 3, seed=42)
print(f"\nReference circuit: {reference.number_of_nodes()} nodes, {reference.number_of_edges()} edges")
print("\nGenerating library of 10,000 diverse circuits...")
print(" - 100 isomorphic (exact relabeling)")
print(" - 3,000 wrong size (different node count) → O(1) rejection")
print(" - 2,000 wrong density (different edge count) → O(1) rejection")
print(" - 3,000 wrong degree distribution → O(n) rejection")
print(" - 1,900 structurally different → need VF2")
# Strategy comparison
import random
random.seed(42)
print("\nRunning comparisons...")
# Strategy 1: VF2 only
print(f"\n Testing: VF2 only...")
vf2_only_time = 0
random.seed(42) # Reset for identical graphs
for i in range(10000):
# Generate test graphs with known properties
if i < 100:
# Isomorphic (relabeled)
nodes = list(reference.nodes())
random.shuffle(nodes)
mapping = {old: new for old, new in zip(reference.nodes(), nodes)}
test_graph = nx.relabel_nodes(reference, mapping)
elif i < 3100:
# WRONG SIZE - filters catch instantly, VF2 still needs to check
test_graph = nx.barabasi_albert_graph(random.choice([60, 70, 90, 100]), 3, seed=i)
elif i < 5100:
# WRONG EDGE COUNT - filters catch instantly
test_graph = nx.gnm_random_graph(80, random.choice([150, 180, 280, 320]), seed=i)
elif i < 8100:
# WRONG DEGREE SEQUENCE - filters catch with O(n) check
test_graph = nx.random_regular_graph(random.choice([3, 4, 5, 6]), 80, seed=i)
else:
# Actually need to check structure (similar stats)
test_graph = nx.barabasi_albert_graph(80, 3, seed=i)
# VF2 only - no filtering
start = time.time()
matcher = nx.algorithms.isomorphism.GraphMatcher(reference, test_graph)
_ = matcher.is_isomorphic()
vf2_only_time += time.time() - start
if (i + 1) % 2000 == 0:
print(f" {i+1}/10,000 completed...")
# Strategy 2: With filters
print(f"\n Testing: With filters...")
filter_pipeline = IsomorphismPipeline()
filter_time = 0
random.seed(42) # Reset for identical graphs
for i in range(10000):
# Generate same test graphs
if i < 100:
nodes = list(reference.nodes())
random.shuffle(nodes)
mapping = {old: new for old, new in zip(reference.nodes(), nodes)}
test_graph = nx.relabel_nodes(reference, mapping)
elif i < 3100:
test_graph = nx.barabasi_albert_graph(random.choice([60, 70, 90, 100]), 3, seed=i)
elif i < 5100:
test_graph = nx.gnm_random_graph(80, random.choice([150, 180, 280, 320]), seed=i)
elif i < 8100:
test_graph = nx.random_regular_graph(random.choice([3, 4, 5, 6]), 80, seed=i)
else:
test_graph = nx.barabasi_albert_graph(80, 3, seed=i)
# Use filtering pipeline
result = filter_pipeline.test_isomorphism(reference, test_graph, use_vf2=True)
filter_time += result['total_time']
if (i + 1) % 2000 == 0:
print(f" {i+1}/10,000 completed...")
# Report results
print("\n" + "="*60)
print("FINAL RESULTS")
print("="*60)
print(f"\nVF2 only (no filtering): {vf2_only_time*1000:>10.2f} ms ({vf2_only_time:.3f}s)")
print(f"With filtering pipeline: {filter_time*1000:>10.2f} ms ({filter_time:.3f}s)")
print(f"\n{'='*60}")
if filter_time < vf2_only_time:
speedup = vf2_only_time / filter_time
print(f"✓ SUCCESS: {speedup:.2f}x FASTER with filtering!")
print(f" Time saved: {(vf2_only_time - filter_time)*1000:.2f} ms ({(vf2_only_time - filter_time):.3f}s)")
else:
slowdown = filter_time / vf2_only_time
print(f"✗ FAILURE: {slowdown:.2f}x slower with filtering")
print(f" Time wasted: {(filter_time - vf2_only_time)*1000:.2f} ms")
print(f"{'='*60}")
# Show WHY it works (using filter_pipeline stats)
filter_pipeline.print_stats()
print(f"\n{'='*60}")
print("WHY FILTERS WIN:")
print(f"{'='*60}")
cheap_rejections = filter_pipeline.stats['basic_rejections'] + filter_pipeline.stats['degree_rejections']
print(f"Cheap O(1) and O(n) checks rejected: {cheap_rejections} graphs ({cheap_rejections/100:.1f}%)")
print(f"These would have taken ~{cheap_rejections * (vf2_only_time/10000):.3f}s with VF2")
print(f"But took only ~{(filter_pipeline.timing['basic_time'] + filter_pipeline.timing['degree_time']):.3f}s with filters")
print(f"Savings: {cheap_rejections * (vf2_only_time/10000) - (filter_pipeline.timing['basic_time'] + filter_pipeline.timing['degree_time']):.3f}s")
print(f"\nPer-comparison averages:")
print(f" VF2 only: {vf2_only_time/10000*1000:.4f} ms per graph")
print(f" With filters: {filter_time/10000*1000:.4f} ms per graph")
# The key insight
print(f"\n{'='*60}")
print("KEY INSIGHT FOR YOUR INTERVIEW:")
print("="*60)
print("Filters win when the dataset has:")
print(" 1. Many OBVIOUS mismatches (wrong size, density)")
print(" 2. Large scale (10,000+ comparisons)")
print(" 3. Cheap O(1) checks that eliminate bulk of candidates")
print("")
print("VF2-only wins when:")
print(" 1. Most graphs need deep structural analysis anyway")
print(" 2. Small datasets (< 1000 comparisons)")
print(" 3. Graphs are similar in basic properties")
print("")
print("In semiconductor EDA:")
print(" - Use filters for initial library screening")
print(" - Use VF2 directly for refined candidate comparison")
print(" - Architecture matters more than algorithm choice")
print("="*60)
if __name__ == "__main__":
main()
if __name__ == "__main__":
main()
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment