Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
8 changes: 4 additions & 4 deletions jsonargparse/_actions.py
Original file line number Diff line number Diff line change
Expand Up @@ -544,7 +544,7 @@ def _move_parser_actions(parser, args, kwargs):
title = kwargs.pop("title", kwargs.pop("help", None))
description = kwargs.pop("description", subparser.description)
if len(kwargs) > 0:
raise ValueError(f"ActionParser does not accept the following parameters: {set(kwargs.keys())}")
raise ValueError(f"ActionParser does not accept the following parameters: {set(kwargs)}")
if not (len(args) == 1 and args[0].startswith("--")):
raise ValueError(f"ActionParser only accepts a single optional key but got {args}")
prefix = args[0][2:]
Expand All @@ -558,7 +558,7 @@ def add_prefix(key):
for key, action in filter_default_actions(subparser._option_string_actions).items():
option_string_actions[add_prefix(key)] = action

isect = set(option_string_actions.keys()).intersection(set(parser._option_string_actions.keys()))
isect = set(option_string_actions).intersection(set(parser._option_string_actions))
if len(isect) > 0:
raise ValueError(f"ActionParser conflicting keys: {isect}")

Expand Down Expand Up @@ -705,7 +705,7 @@ def get_subcommands(
require_single = single_subcommand.get()

# Get subcommand settings keys
subcommand_keys = [k for k in action.choices.keys() if isinstance(cfg.get(prefix + k), Namespace)]
subcommand_keys = [k for k in action.choices if isinstance(cfg.get(prefix + k), Namespace)]

# Get subcommand
subcommand = None
Expand Down Expand Up @@ -735,7 +735,7 @@ def get_subcommands(
# If subcommand is required and no subcommand is provided,
# present the user with a friendly error message to remind them of
# the available subcommands and to select one.
available_subcommands = list(action._name_parser_map.keys())
available_subcommands = list(action._name_parser_map)
if len(available_subcommands) <= 5:
candidate_subcommands_str = "{" + ",".join(available_subcommands) + "}"
else:
Expand Down
4 changes: 2 additions & 2 deletions jsonargparse/_cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -170,7 +170,7 @@ def _add_subcommands(


def has_parameter(component, name) -> bool:
return name in inspect.signature(component).parameters.keys()
return name in inspect.signature(component).parameters


def _add_component_to_parser(
Expand All @@ -180,7 +180,7 @@ def _add_component_to_parser(
fail_untyped: bool,
config_help: str,
):
kwargs: dict = dict(as_positional=as_positional, fail_untyped=fail_untyped, sub_configs=True)
kwargs: dict = {"as_positional": as_positional, "fail_untyped": fail_untyped, "sub_configs": True}
if inspect.isclass(component):
class_methods = [
k for k, v in inspect.getmembers(component) if (callable(v) or isinstance(v, property)) and k[0] != "_"
Expand Down
34 changes: 17 additions & 17 deletions jsonargparse/_common.py
Original file line number Diff line number Diff line change
Expand Up @@ -65,16 +65,16 @@ def __call__(self, class_type: Type[ClassType], *args, **kwargs) -> ClassType:
applied_instantiation_links: ContextVar[Optional[set]] = ContextVar("applied_instantiation_links", default=None)


parser_context_vars = dict(
parent_parser=parent_parser,
parser_capture=parser_capture,
defaults_cache=defaults_cache,
lenient_check=lenient_check,
load_value_mode=load_value_mode,
class_instantiators=class_instantiators,
nested_links=nested_links,
applied_instantiation_links=applied_instantiation_links,
)
parser_context_vars = {
"parent_parser": parent_parser,
"parser_capture": parser_capture,
"defaults_cache": defaults_cache,
"lenient_check": lenient_check,
"load_value_mode": load_value_mode,
"class_instantiators": class_instantiators,
"nested_links": nested_links,
"applied_instantiation_links": applied_instantiation_links,
}


@contextmanager
Expand All @@ -91,10 +91,10 @@ def parser_context(**kwargs):
context_var.reset(token)


parsing_settings = dict(
validate_defaults=False,
parse_optionals_as_positionals=False,
)
parsing_settings = {
"validate_defaults": False,
"parse_optionals_as_positionals": False,
}


def set_parsing_settings(
Expand Down Expand Up @@ -270,7 +270,7 @@ def __init__(self, instantiators: InstantiatorsDictType) -> None:
def __call__(self, class_type: Type[ClassType], *args, **kwargs) -> ClassType:
for (cls, subclasses), instantiator in self.instantiators.items():
if class_type is cls or (subclasses and is_subclass(class_type, cls)):
param_names = set(inspect.signature(instantiator).parameters.keys())
param_names = set(inspect.signature(instantiator).parameters)
if "applied_instantiation_links" in param_names:
applied_links = applied_instantiation_links.get() or set()
kwargs["applied_instantiation_links"] = {
Expand Down Expand Up @@ -316,7 +316,7 @@ def setup_default_logger(data, level, caller):
def parse_logger(logger: Union[bool, str, dict, logging.Logger], caller):
if not isinstance(logger, (bool, str, dict, logging.Logger)):
raise ValueError(f"Expected logger to be an instance of (bool, str, dict, logging.Logger), but got {logger}.")
if isinstance(logger, dict) and len(set(logger.keys()) - {"name", "level"}) > 0:
if isinstance(logger, dict) and len(set(logger) - {"name", "level"}) > 0:
value = {k: v for k, v in logger.items() if k not in {"name", "level"}}
raise ValueError(f"Unexpected data to configure logger: {value}.")
if logger is False:
Expand Down Expand Up @@ -384,6 +384,6 @@ class Action(LoggerProperty, argparse.Action):

def _check_type_(self, value, **kwargs):
if not hasattr(self, "_check_type_kwargs"):
self._check_type_kwargs = set(inspect.signature(self._check_type).parameters.keys())
self._check_type_kwargs = set(inspect.signature(self._check_type).parameters)
kwargs = {k: v for k, v in kwargs.items() if k in self._check_type_kwargs}
return self._check_type(value, **kwargs)
8 changes: 4 additions & 4 deletions jsonargparse/_core.py
Original file line number Diff line number Diff line change
Expand Up @@ -222,7 +222,7 @@ class ArgumentGroup(ActionsContainer, argparse._ArgumentGroup):
"""Extension of argparse._ArgumentGroup to support additional functionalities."""

dest: Optional[str] = None
parser: Optional[Union["ArgumentParser", "ActionsContainer"]] = None
parser: Optional[Union["ArgumentParser", ActionsContainer]] = None


class ArgumentParser(ParserDeprecations, ActionsContainer, ArgumentLinking, LoggerProperty, argparse.ArgumentParser):
Expand Down Expand Up @@ -1352,9 +1352,9 @@ def _apply_actions(
cfg_branch = cfg
cfg = Namespace()
cfg[parent_key] = cfg_branch
keys = [parent_key + "." + k for k in cfg_branch.__dict__.keys()]
keys = [parent_key + "." + k for k in cfg_branch.__dict__]
else:
keys = list(cfg.__dict__.keys())
keys = list(cfg.__dict__)

if prev_cfg:
prev_cfg = prev_cfg.clone()
Expand Down Expand Up @@ -1594,7 +1594,7 @@ def parser_mode(self, parser_mode: str):
if parser_mode == "omegaconf":
set_omegaconf_loader()
if parser_mode not in loaders:
raise ValueError(f"The only accepted values for parser_mode are {set(loaders.keys())}.")
raise ValueError(f"The only accepted values for parser_mode are {set(loaders)}.")
if parser_mode == "jsonnet":
import_jsonnet("parser_mode=jsonnet")
self._parser_mode = parser_mode
Expand Down
4 changes: 2 additions & 2 deletions jsonargparse/_deprecated.py
Original file line number Diff line number Diff line change
Expand Up @@ -553,7 +553,7 @@ def __init__(self, *args, error_handler=False, **kwargs):
self.error_handler = error_handler

@property
def error_handler(self) -> Optional[Callable[["ArgumentParser", str], None]]:
def error_handler(self) -> Optional[Callable[[ArgumentParser, str], None]]:
"""Property for the error_handler function that is called when there are parsing errors.

:getter: Returns the current error_handler function.
Expand Down Expand Up @@ -601,7 +601,7 @@ def check_config(self, *args, **kwargs):
def deprecated_skip_check(component, kwargs: dict, skip_validation: bool) -> bool:
skip_check = kwargs.pop("skip_check", None)
if kwargs:
raise ValueError(f"Unexpected keyword parameters: {set(kwargs.keys())}")
raise ValueError(f"Unexpected keyword parameters: {set(kwargs)}")
if skip_check is not None:
skip_validation = skip_check
deprecation_warning(
Expand Down
2 changes: 1 addition & 1 deletion jsonargparse/_formatters.py
Original file line number Diff line number Diff line change
Expand Up @@ -288,7 +288,7 @@ def set_yaml_argument_comment(


def get_env_var(
parser_or_formatter: Union["ArgumentParser", DefaultHelpFormatter],
parser_or_formatter: Union[ArgumentParser, DefaultHelpFormatter],
action: Optional[Action] = None,
) -> str:
"""Returns the environment variable name for a given parser or formatter and action."""
Expand Down
6 changes: 3 additions & 3 deletions jsonargparse/_loaders_dumpers.py
Original file line number Diff line number Diff line change
Expand Up @@ -87,11 +87,11 @@ def yaml_load(stream):

value = yaml.load(stream, Loader=get_yaml_default_loader())
if isinstance(value, dict) and value and all(v is None for v in value.values()):
if len(value) == 1 and stream.strip() == next(iter(value.keys())) + ":":
if len(value) == 1 and stream.strip() == next(iter(value)) + ":":
value = stream
else:
keys = set(stream.strip(" {}").replace(" ", "").split(","))
if len(keys) > 0 and keys == set(value.keys()):
if len(keys) > 0 and keys == set(value):
value = stream
return value

Expand Down Expand Up @@ -284,7 +284,7 @@ def dump_using_format(parser: ArgumentParser, data: dict, dump_format: str) -> s
def set_loader(
mode: str,
loader_fn: Callable[[str], Any],
exceptions: Tuple[Type[Exception], ...] = tuple(),
exceptions: Tuple[Type[Exception], ...] = (),
json_superset: bool = True,
):
"""Sets the value loader function to be used when parsing with a certain mode.
Expand Down
4 changes: 2 additions & 2 deletions jsonargparse/_namespace.py
Original file line number Diff line number Diff line change
Expand Up @@ -332,11 +332,11 @@ def del_clash_mark(key: str) -> str:

def expand_dict(cfg):
for k, v in cfg.items():
if isinstance(v, dict) and all(isinstance(k, str) for k in v.keys()):
if isinstance(v, dict) and all(isinstance(k, str) for k in v):
cfg[k] = expand_dict(v)
elif isinstance(v, list):
for nn, vv in enumerate(v):
if isinstance(vv, dict) and all(isinstance(k, str) for k in vv.keys()):
if isinstance(vv, dict) and all(isinstance(k, str) for k in vv):
cfg[k][nn] = expand_dict(vv)
return Namespace(**cfg)

Expand Down
46 changes: 23 additions & 23 deletions jsonargparse/_parameter_resolvers.py
Original file line number Diff line number Diff line change
Expand Up @@ -172,7 +172,7 @@ def ast_is_call_with_value(node, value_dump) -> bool:


ast_constant_attr = {ast.Constant: "value"}
ast_constant_types = tuple(ast_constant_attr.keys())
ast_constant_types = tuple(ast_constant_attr)


def ast_is_constant(node):
Expand Down Expand Up @@ -324,7 +324,7 @@ def unpack_typed_dict_kwargs(params: ParamList, kwargs_idx: int) -> int:
annotation = kwargs.annotation
if is_unpack_typehint(annotation):
params.pop(kwargs_idx)
annotation_args: tuple = getattr(annotation, "__args__", tuple())
annotation_args: tuple = getattr(annotation, "__args__", ())
assert len(annotation_args) == 1, "Unpack requires a single type argument"
dict_annotations = annotation_args[0].__annotations__
new_params = []
Expand Down Expand Up @@ -710,7 +710,7 @@ def replace_param_default_subclass_specs(self, params: List[ParamData]) -> None:
subclass_types = get_subclass_types(param.annotation, callable_return=True)
if not (class_type and subclass_types and is_subclass(class_type, subclass_types)):
continue
subclass_spec: dict = dict(class_path=get_import_path(class_type), init_args=dict())
subclass_spec: dict = {"class_path": get_import_path(class_type), "init_args": {}}
for kwarg in node.keywords:
if kwarg.arg and ast_is_constant(kwarg.value):
subclass_spec["init_args"][kwarg.arg] = ast_get_constant_value(kwarg.value)
Expand Down Expand Up @@ -890,11 +890,11 @@ def get_field_data_pydantic1_model(field, name, doc_params):
elif field.default_factory:
default = field.default_factory()

return dict(
annotation=field.annotation,
default=default,
doc=field.field_info.description or doc_params.get(name),
)
return {
"annotation": field.annotation,
"default": default,
"doc": field.field_info.description or doc_params.get(name),
}


def get_field_data_pydantic2_dataclass(field, name, doc_params):
Expand All @@ -921,11 +921,11 @@ def get_field_data_pydantic2_dataclass(field, name, doc_params):
field_type = get_annotated_base_type(field.type)
else:
field_type = field.type
return dict(
annotation=field_type,
default=default,
doc=doc_params.get(name),
)
return {
"annotation": field_type,
"default": default,
"doc": doc_params.get(name),
}


def get_field_data_pydantic2_model(field, name, doc_params):
Expand All @@ -935,11 +935,11 @@ def get_field_data_pydantic2_model(field, name, doc_params):
elif field.default_factory:
default = field.default_factory()

return dict(
annotation=field.rebuild_annotation(),
default=default,
doc=field.description or doc_params.get(name),
)
return {
"annotation": field.rebuild_annotation(),
"default": default,
"doc": field.description or doc_params.get(name),
}


def get_field_data_attrs(field, name, doc_params):
Expand All @@ -951,11 +951,11 @@ def get_field_data_attrs(field, name, doc_params):
elif isinstance(default, attrs.Factory):
default = default.factory()

return dict(
annotation=field.type,
default=default,
doc=doc_params.get(name),
)
return {
"annotation": field.type,
"default": default,
"doc": doc_params.get(name),
}


def is_init_field_pydantic2_dataclass(field) -> bool:
Expand Down
2 changes: 1 addition & 1 deletion jsonargparse/_signatures.py
Original file line number Diff line number Diff line change
Expand Up @@ -122,7 +122,7 @@ def add_class_arguments(
defaults = default.lazy_get_init_args().as_dict()
elif is_dataclass_like(default.__class__):
defaults = dataclass_to_dict(default)
args = set(k[len(prefix) :] for k in added_args)
args = {k[len(prefix) :] for k in added_args}
skip_not_added = [k for k in defaults if k not in args]
if skip_not_added:
skip.update(skip_not_added) # skip init=False
Expand Down
10 changes: 5 additions & 5 deletions jsonargparse/_typehints.py
Original file line number Diff line number Diff line change
Expand Up @@ -680,10 +680,10 @@ def completer(self, prefix, **kwargs):
return ["true", "false", "null"]
elif is_subclass(self._typehint, Enum):
enum = self._typehint
return list(enum.__members__.keys())
return list(enum.__members__)
elif is_optional(self._typehint, Enum):
enum = get_optional_arg(self._typehint)
return list(enum.__members__.keys()) + ["null"]
return list(enum.__members__) + ["null"]
elif is_optional(self._typehint, Path):
files_completer = get_files_completer()
return ["null"] + sorted(files_completer(prefix, **kwargs))
Expand Down Expand Up @@ -773,7 +773,7 @@ def adapt_typehints(
# Literal
elif typehint_origin in literal_types:
if val not in subtypehints and isinstance(val, str):
subtypes = Union[tuple({type(v) for v in subtypehints if type(v) is not str})]
subtypes = Union[tuple((type(v) for v in subtypehints if type(v) is not str))]
val = adapt_typehints(val, subtypes, **adapt_kwargs)
if val not in subtypehints:
raise_unexpected_value(f"Expected a {typehint}", val)
Expand Down Expand Up @@ -1451,7 +1451,7 @@ def adapt_class_type(
value["init_args"] = load_value(parser.dump(init_args, **dump_kwargs.get()))
else:
if isinstance(dict_kwargs, dict):
for key in list(dict_kwargs.keys()):
for key in list(dict_kwargs):
if _find_action(parser, key):
init_args[key] = dict_kwargs.pop(key)
elif dict_kwargs:
Expand Down Expand Up @@ -1576,7 +1576,7 @@ def typehint_metavar(typehint):
metavar = iter_to_set_str(enum.__members__)
elif is_optional(typehint, Enum):
enum = typehint.__args__[0]
metavar = iter_to_set_str(list(enum.__members__.keys()) + ["null"])
metavar = iter_to_set_str(list(enum.__members__) + ["null"])
elif typehint_origin in tuple_set_origin_types:
metavar = "[ITEM,...]"
return metavar
Expand Down
2 changes: 1 addition & 1 deletion jsonargparse/_util.py
Original file line number Diff line number Diff line change
Expand Up @@ -353,7 +353,7 @@ def indent_text(text: str, first_line: bool = True) -> str:
def get_private_kwargs(data, **kwargs):
extracted = [data.pop(name, default) for name, default in kwargs.items()]
if data:
raise ValueError(f"Unexpected keyword parameters: {set(data.keys())}")
raise ValueError(f"Unexpected keyword parameters: {set(data)}")
return extracted[0] if len(extracted) == 1 else extracted


Expand Down
2 changes: 1 addition & 1 deletion jsonargparse_tests/test_core.py
Original file line number Diff line number Diff line change
Expand Up @@ -896,7 +896,7 @@ def test_default_config_files_pattern(parser, subtests, tmp_cwd):
def test_named_argument_groups(parser):
parser.add_argument_group("Group 1", name="group1")
parser.add_argument_group("Group 2", name="group2")
assert {"group1", "group2"} == set(parser.groups.keys())
assert {"group1", "group2"} == set(parser.groups)
with pytest.raises(ValueError) as ctx:
parser.add_argument_group("Bad", name="group1")
ctx.match("Group with name group1 already exists")
Expand Down
2 changes: 1 addition & 1 deletion jsonargparse_tests/test_parameter_resolvers.py
Original file line number Diff line number Diff line change
Expand Up @@ -554,7 +554,7 @@ def assert_params(params, expected, origins={}, help=True):
if help:
docs = [f"help for {p.name}" for p in params] if docstring_parser_support else [None] * len(params)
assert docs == [p.doc for p in params]
assert all(isinstance(params[n].default, ConditionalDefault) for n in origins.keys())
assert all(isinstance(params[n].default, ConditionalDefault) for n in origins)
param_origins = {
n: [o.split(f"{__name__}.", 1)[1] for o in p.origin] for n, p in enumerate(params) if p.origin is not None
}
Expand Down
Loading