Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
71 changes: 24 additions & 47 deletions ngraph/blueprints.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,8 +11,7 @@

@dataclass
class Blueprint:
"""
Represents a reusable blueprint for hierarchical sub-topologies.
"""Represents a reusable blueprint for hierarchical sub-topologies.

A blueprint may contain multiple groups of nodes (each can have a node_count
and a name_template), plus adjacency rules describing how those groups connect.
Expand All @@ -35,8 +34,7 @@ class Blueprint:

@dataclass
class DSLExpansionContext:
"""
Carries the blueprint definitions and the final Network instance
"""Carries the blueprint definitions and the final Network instance
to be populated during DSL expansion.

Attributes:
Expand All @@ -49,8 +47,7 @@ class DSLExpansionContext:


def expand_network_dsl(data: Dict[str, Any]) -> Network:
"""
Expands a combined blueprint + network DSL into a complete Network object.
"""Expands a combined blueprint + network DSL into a complete Network object.

Overall flow:
1) Parse "blueprints" into Blueprint objects.
Expand Down Expand Up @@ -164,8 +161,7 @@ def _expand_group(
group_def: Dict[str, Any],
inherited_risk_groups: Set[str] | None = None,
) -> None:
"""
Expands a single group definition into either:
"""Expands a single group definition into either:
- Another blueprint's subgroups, or
- A direct node group (with node_count, etc.),
- Possibly replicating itself if group_name has bracket expansions.
Expand Down Expand Up @@ -319,8 +315,7 @@ def _expand_blueprint_adjacency(
adj_def: Dict[str, Any],
parent_path: str,
) -> None:
"""
Expands adjacency definitions from within a blueprint, using parent_path
"""Expands adjacency definitions from within a blueprint, using parent_path
as the local root. This also handles optional expand_vars for repeated adjacency.

Recognized adjacency keys:
Expand Down Expand Up @@ -352,8 +347,7 @@ def _expand_blueprint_adjacency(


def _expand_adjacency(ctx: DSLExpansionContext, adj_def: Dict[str, Any]) -> None:
"""
Expands a top-level adjacency definition from 'network.adjacency'. If 'expand_vars'
"""Expands a top-level adjacency definition from 'network.adjacency'. If 'expand_vars'
is provided, we expand the source/target as templates repeatedly.

Recognized adjacency keys:
Expand Down Expand Up @@ -388,8 +382,7 @@ def _expand_adjacency(ctx: DSLExpansionContext, adj_def: Dict[str, Any]) -> None
def _expand_adjacency_with_variables(
ctx: DSLExpansionContext, adj_def: Dict[str, Any], parent_path: str
) -> None:
"""
Handles adjacency expansions when 'expand_vars' is provided.
"""Handles adjacency expansions when 'expand_vars' is provided.
We substitute variables into the 'source' and 'target' templates to produce
multiple adjacency expansions. Then each expansion is passed to _expand_adjacency_pattern.

Expand Down Expand Up @@ -451,8 +444,7 @@ def _expand_adjacency_pattern(
link_params: Dict[str, Any],
link_count: int = 1,
) -> None:
"""
Generates Link objects for the chosen adjacency pattern among matched nodes.
"""Generates Link objects for the chosen adjacency pattern among matched nodes.

Supported Patterns:
* "mesh": Connect every source node to every target node
Expand Down Expand Up @@ -531,8 +523,7 @@ def _create_link(
link_params: Dict[str, Any],
link_count: int = 1,
) -> None:
"""
Creates and adds one or more Links to the network, applying capacity, cost,
"""Creates and adds one or more Links to the network, applying capacity, cost,
disabled, risk_groups, and attrs from link_params if present.

Args:
Expand Down Expand Up @@ -566,8 +557,7 @@ def _create_link(


def _process_direct_nodes(net: Network, network_data: Dict[str, Any]) -> None:
"""
Processes direct node definitions (network_data["nodes"]) and adds them to the network
"""Processes direct node definitions (network_data["nodes"]) and adds them to the network
if they do not already exist. If the node name already exists, we do nothing.

Allowed top-level keys for each node: {"disabled", "attrs", "risk_groups"}.
Expand Down Expand Up @@ -609,8 +599,7 @@ def _process_direct_nodes(net: Network, network_data: Dict[str, Any]) -> None:


def _process_direct_links(net: Network, network_data: Dict[str, Any]) -> None:
"""
Processes direct link definitions (network_data["links"]) and adds them to the network.
"""Processes direct link definitions (network_data["links"]) and adds them to the network.

Each link dict must contain {"source", "target"} plus optionally
{"link_params", "link_count"}. No other top-level keys allowed.
Expand Down Expand Up @@ -653,8 +642,7 @@ def _process_direct_links(net: Network, network_data: Dict[str, Any]) -> None:


def _process_link_overrides(net: Network, network_data: Dict[str, Any]) -> None:
"""
Processes the 'link_overrides' section of the network DSL, updating
"""Processes the 'link_overrides' section of the network DSL, updating
existing links with new parameters. Overrides are applied in order if
multiple items match the same link.

Expand Down Expand Up @@ -691,8 +679,7 @@ def _process_link_overrides(net: Network, network_data: Dict[str, Any]) -> None:


def _process_node_overrides(net: Network, network_data: Dict[str, Any]) -> None:
"""
Processes the 'node_overrides' section of the network DSL, updating
"""Processes the 'node_overrides' section of the network DSL, updating
existing nodes with new attributes in bulk. Overrides are applied in order
if multiple items match the same node.

Expand Down Expand Up @@ -740,8 +727,7 @@ def _update_links(
link_params: Dict[str, Any],
any_direction: bool = True,
) -> None:
"""
Updates all Link objects between nodes matching 'source' and 'target' paths
"""Updates all Link objects between nodes matching 'source' and 'target' paths
with new parameters (capacity, cost, disabled, risk_groups, attrs).

If any_direction=True, both (source->target) and (target->source) links
Expand Down Expand Up @@ -802,8 +788,7 @@ def _update_nodes(
disabled_val: Any = None,
risk_groups_val: Any = None,
) -> None:
"""
Updates attributes on all nodes matching a given path pattern.
"""Updates attributes on all nodes matching a given path pattern.

- If 'disabled_val' is not None, sets node.disabled to that boolean value.
- If 'risk_groups_val' is not None, *replaces* the node's risk_groups with that new set.
Expand Down Expand Up @@ -833,8 +818,7 @@ def _update_nodes(
def _apply_parameters(
subgroup_name: str, subgroup_def: Dict[str, Any], params_overrides: Dict[str, Any]
) -> Dict[str, Any]:
"""
Applies user-provided parameter overrides to a blueprint subgroup.
"""Applies user-provided parameter overrides to a blueprint subgroup.

Example:
If 'spine.node_count' = 6 is in params_overrides,
Expand Down Expand Up @@ -864,8 +848,7 @@ def _apply_parameters(
def _apply_nested_path(
node_def: Dict[str, Any], path_parts: List[str], value: Any
) -> None:
"""
Recursively applies a path like ["attrs", "role"] to set node_def["attrs"]["role"] = value.
"""Recursively applies a path like ["attrs", "role"] to set node_def["attrs"]["role"] = value.
Creates intermediate dicts as needed.

Args:
Expand All @@ -888,8 +871,7 @@ def _apply_nested_path(


def _expand_name_patterns(name: str) -> List[str]:
"""
Parses and expands bracketed expressions in a group name. For example:
"""Parses and expands bracketed expressions in a group name. For example:

"fa[1-3]" -> ["fa1", "fa2", "fa3"]
"dc[1,3,5-6]" -> ["dc1", "dc3", "dc5", "dc6"]
Expand Down Expand Up @@ -930,8 +912,7 @@ def _expand_name_patterns(name: str) -> List[str]:


def _parse_range_expr(expr: str) -> List[str]:
"""
Parses a bracket expression that might have commas, single values, and dash ranges.
"""Parses a bracket expression that might have commas, single values, and dash ranges.
For example: "1-3,5,7-9" -> ["1", "2", "3", "5", "7", "8", "9"].

Args:
Expand All @@ -955,8 +936,7 @@ def _parse_range_expr(expr: str) -> List[str]:


def _join_paths(parent_path: str, rel_path: str) -> str:
"""
Joins two path segments according to NetGraph's DSL conventions:
"""Joins two path segments according to NetGraph's DSL conventions:

- If rel_path starts with '/', we strip the leading slash and treat it as
appended to parent_path if parent_path is not empty.
Expand All @@ -983,8 +963,7 @@ def _join_paths(parent_path: str, rel_path: str) -> str:
def _check_no_extra_keys(
data_dict: Dict[str, Any], allowed: set[str], context: str
) -> None:
"""
Checks that data_dict only has keys in 'allowed'. Raises ValueError if not.
"""Checks that data_dict only has keys in 'allowed'. Raises ValueError if not.

Args:
data_dict (Dict[str, Any]): The dict to check.
Expand All @@ -1000,8 +979,7 @@ def _check_no_extra_keys(


def _check_adjacency_keys(adj_def: Dict[str, Any], context: str) -> None:
"""
Ensures adjacency definitions only contain recognized keys.
"""Ensures adjacency definitions only contain recognized keys.

Recognized adjacency keys are:
{"source", "target", "pattern", "link_count", "link_params",
Expand All @@ -1025,9 +1003,8 @@ def _check_adjacency_keys(adj_def: Dict[str, Any], context: str) -> None:


def _check_link_params(link_params: Dict[str, Any], context: str) -> None:
"""
Checks that link_params only has recognized keys:
{"capacity", "cost", "disabled", "risk_groups", "attrs"}.
"""Checks that link_params only has recognized keys:
{"capacity", "cost", "disabled", "risk_groups", "attrs"}.
"""
recognized = {"capacity", "cost", "disabled", "risk_groups", "attrs"}
extra = set(link_params.keys()) - recognized
Expand Down
8 changes: 7 additions & 1 deletion ngraph/cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@

def _run_scenario(path: Path, output: Optional[Path]) -> None:
"""Run a scenario file and store results as JSON."""

yaml_text = path.read_text()
scenario = Scenario.from_yaml(yaml_text)
scenario.run()
Expand All @@ -23,7 +24,12 @@ def _run_scenario(path: Path, output: Optional[Path]) -> None:


def main(argv: Optional[List[str]] = None) -> None:
"""Entry point for the ``ngraph`` command."""
"""Entry point for the ``ngraph`` command.

Args:
argv: Optional list of command-line arguments. If ``None``, ``sys.argv``
is used.
"""
parser = argparse.ArgumentParser(prog="ngraph")
subparsers = parser.add_subparsers(dest="command", required=True)

Expand Down
39 changes: 13 additions & 26 deletions ngraph/components.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,8 +9,7 @@

@dataclass
class Component:
"""
A generic component that can represent chassis, line cards, optics, etc.
"""A generic component that can represent chassis, line cards, optics, etc.
Components can have nested children, each with their own cost, power, etc.

Attributes:
Expand Down Expand Up @@ -44,8 +43,7 @@ class Component:
children: Dict[str, Component] = field(default_factory=dict)

def total_cost(self) -> float:
"""
Computes the total (recursive) cost of this component, including children,
"""Computes the total (recursive) cost of this component, including children,
multiplied by this component's count.

Returns:
Expand All @@ -57,8 +55,7 @@ def total_cost(self) -> float:
return single_instance_cost * self.count

def total_power(self) -> float:
"""
Computes the total *typical* (recursive) power usage of this component,
"""Computes the total *typical* (recursive) power usage of this component,
including children, multiplied by this component's count.

Returns:
Expand All @@ -70,8 +67,7 @@ def total_power(self) -> float:
return single_instance_power * self.count

def total_power_max(self) -> float:
"""
Computes the total *peak* (recursive) power usage of this component,
"""Computes the total *peak* (recursive) power usage of this component,
including children, multiplied by this component's count.

Returns:
Expand All @@ -83,8 +79,7 @@ def total_power_max(self) -> float:
return single_instance_power_max * self.count

def total_capacity(self) -> float:
"""
Computes the total (recursive) capacity of this component,
"""Computes the total (recursive) capacity of this component,
including children, multiplied by this component's count.

Returns:
Expand All @@ -96,8 +91,7 @@ def total_capacity(self) -> float:
return single_instance_capacity * self.count

def as_dict(self, include_children: bool = True) -> Dict[str, Any]:
"""
Returns a dictionary containing all properties of this component.
"""Returns a dictionary containing all properties of this component.

Args:
include_children (bool): If True, recursively includes children.
Expand Down Expand Up @@ -127,8 +121,7 @@ def as_dict(self, include_children: bool = True) -> Dict[str, Any]:

@dataclass
class ComponentsLibrary:
"""
Holds a collection of named Components. Each entry is a top-level "template"
"""Holds a collection of named Components. Each entry is a top-level "template"
that can be referenced for cost/power/capacity lookups, possibly with nested children.

Example (YAML-like):
Expand All @@ -155,8 +148,7 @@ class ComponentsLibrary:
components: Dict[str, Component] = field(default_factory=dict)

def get(self, name: str) -> Optional[Component]:
"""
Retrieves a Component by its name from the library.
"""Retrieves a Component by its name from the library.

Args:
name (str): Name of the component.
Expand All @@ -169,8 +161,7 @@ def get(self, name: str) -> Optional[Component]:
def merge(
self, other: ComponentsLibrary, override: bool = True
) -> ComponentsLibrary:
"""
Merges another ComponentsLibrary into this one. By default (override=True),
"""Merges another ComponentsLibrary into this one. By default (override=True),
duplicate components in `other` overwrite those in the current library.

Args:
Expand All @@ -186,8 +177,7 @@ def merge(
return self

def clone(self) -> ComponentsLibrary:
"""
Creates a deep copy of this ComponentsLibrary.
"""Creates a deep copy of this ComponentsLibrary.

Returns:
ComponentsLibrary: A new, cloned library instance.
Expand All @@ -196,8 +186,7 @@ def clone(self) -> ComponentsLibrary:

@classmethod
def from_dict(cls, data: Dict[str, Any]) -> ComponentsLibrary:
"""
Constructs a ComponentsLibrary from a dictionary of raw component definitions.
"""Constructs a ComponentsLibrary from a dictionary of raw component definitions.

Args:
data (Dict[str, Any]): Raw component definitions.
Expand All @@ -212,8 +201,7 @@ def from_dict(cls, data: Dict[str, Any]) -> ComponentsLibrary:

@classmethod
def _build_component(cls, name: str, definition_data: Dict[str, Any]) -> Component:
"""
Recursively constructs a single Component from a dictionary definition.
"""Recursively constructs a single Component from a dictionary definition.

Args:
name (str): Name of the component.
Expand Down Expand Up @@ -269,8 +257,7 @@ def _build_component(cls, name: str, definition_data: Dict[str, Any]) -> Compone

@classmethod
def from_yaml(cls, yaml_str: str) -> ComponentsLibrary:
"""
Constructs a ComponentsLibrary from a YAML string. If the YAML contains
"""Constructs a ComponentsLibrary from a YAML string. If the YAML contains
a top-level 'components' key, that key is used; otherwise the entire
top-level is treated as component definitions.

Expand Down
Loading