Skip to content

Commit f4f0f2d

Browse files
committed
Fix bug in config_parse.py when batch_norm layer is used in RecurrentLayerGroup.
1 parent 2965df5 commit f4f0f2d

File tree

1 file changed

+6
-2
lines changed

1 file changed

+6
-2
lines changed

python/paddle/trainer/config_parser.py

Lines changed: 6 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -498,9 +498,12 @@ def __init__(
498498
is_static=None,
499499
is_shared=None,
500500
update_hooks=None,
501-
input_layer_argument=None, ):
501+
input_layer_argument=None,
502+
not_make_layer_name_in_submodel=None, ):
502503
self.add_keys(locals())
503504
self.input_layer_name = MakeLayerNameInSubmodel(input_layer_name)
505+
if not_make_layer_name_in_submodel:
506+
self.input_layer_name = input_layer_name
504507

505508

506509
# Define a projection for iexed layer
@@ -1848,7 +1851,8 @@ def __init__(self,
18481851
initial_std=0.0,
18491852
initial_mean=0.0,
18501853
is_static=True,
1851-
is_shared=is_shared, ))
1854+
is_shared=is_shared,
1855+
not_make_layer_name_in_submodel=True, ))
18521856

18531857
parallel_nn = bool(int(g_command_config_args.get("parallel_nn", 0)))
18541858
cudnn_version = int(g_command_config_args.get("cudnn_version", 0))

0 commit comments

Comments
 (0)