Skip to content

Commit 27139e2

Browse files
authored
Revert "perf: Prefer generator expressions over list comprehensions (#2486)" (#2491)
This reverts commit 37b9aeb.
1 parent 37b9aeb commit 27139e2

34 files changed

+66
-65
lines changed

coremltools/converters/_converters_entry.py

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -750,14 +750,14 @@ def _validate_outputs_argument(outputs):
750750
'or of types ct.ImageType/ct.TensorType'
751751
if isinstance(outputs[0], str):
752752
# if one of the elements is a string, all elements must be strings
753-
if not all(isinstance(t, str) for t in outputs):
753+
if not all([isinstance(t, str) for t in outputs]):
754754
raise ValueError(msg_inconsistent_types)
755755
return outputs, [TensorType(name=name) for name in outputs]
756756

757757
if isinstance(outputs[0], InputType):
758-
if not all(isinstance(t, TensorType) or isinstance(t, ImageType) for t in outputs):
758+
if not all([isinstance(t, TensorType) or isinstance(t, ImageType) for t in outputs]):
759759
raise ValueError(msg_inconsistent_types)
760-
if any(t.shape is not None for t in outputs):
760+
if any([t.shape is not None for t in outputs]):
761761
msg = "The 'shape' argument must not be specified for the outputs, since it is " \
762762
"automatically inferred from the input shapes and the ops in the model"
763763
raise ValueError(msg)
@@ -777,9 +777,9 @@ def _validate_outputs_argument(outputs):
777777
output_names = [t.name for t in outputs]
778778
# verify that either all of the entries in output_names is "None" or none of them is "None"
779779
msg_consistent_names = 'Either none or all the outputs must have the "name" argument specified'
780-
if output_names[0] is None and not all(name is None for name in output_names):
780+
if output_names[0] is None and not all([name is None for name in output_names]):
781781
raise ValueError(msg_consistent_names)
782-
if output_names[0] is not None and not all(name is not None for name in output_names):
782+
if output_names[0] is not None and not all([name is not None for name in output_names]):
783783
raise ValueError(msg_consistent_names)
784784
if output_names[0] is not None:
785785
if len(set(output_names)) != len(output_names):
@@ -914,7 +914,7 @@ def _flatten_list(_inputs):
914914
if inputs is not None:
915915
raise_if_duplicated(inputs)
916916

917-
if inputs is not None and not all(isinstance(_input, InputType) for _input in inputs):
917+
if inputs is not None and not all([isinstance(_input, InputType) for _input in inputs]):
918918
raise ValueError("Input should be a list of TensorType or ImageType")
919919

920920
elif exact_source == "pytorch":

coremltools/converters/mil/backend/mil/load.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -551,7 +551,7 @@ def remove_output(block, prob_var):
551551
classes = classes.splitlines()
552552
elif isinstance(classes_in, list): # list[int or str]
553553
classes = classes_in
554-
assert all(isinstance(x, (int, str)) for x in classes), message
554+
assert all([isinstance(x, (int, str)) for x in classes]), message
555555
else:
556556
raise ValueError(message)
557557

coremltools/converters/mil/backend/nn/load.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -266,7 +266,7 @@ def load(prog, **kwargs):
266266

267267
proto = builder.spec
268268
# image input
269-
has_image_input = any(isinstance(s, ImageType) for s in input_types)
269+
has_image_input = any([isinstance(s, ImageType) for s in input_types])
270270
if has_image_input:
271271
proto = _convert_to_image_input(proto, input_types,
272272
skip_model_load=kwargs.get("skip_model_load", False))
@@ -284,7 +284,7 @@ def load(prog, **kwargs):
284284
shape = var.sym_type.get_shape()
285285
if any_variadic(shape):
286286
raise ValueError("Variable rank model outputs, that are ImageTypes, are not supported")
287-
if any(is_symbolic(d) for d in shape):
287+
if any([is_symbolic(d) for d in shape]):
288288
raise NotImplementedError("Image output '{}' has symbolic dimensions in its shape".
289289
format(var.name))
290290
_validate_image_input_output_shapes(output_types[i].color_layout, shape, var.name, is_input=False)

coremltools/converters/mil/backend/nn/op_mapping.py

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -196,7 +196,7 @@ def _try_convert_global_pool(const_context, builder, op, mode):
196196

197197
if tuple(op.outputs[0].shape[:-2]) != tuple(op.inputs["x"].shape[:-2]):
198198
return False
199-
if not all(s == 1 for s in op.outputs[0].shape[-2:]):
199+
if not all([s == 1 for s in op.outputs[0].shape[-2:]]):
200200
return False
201201

202202
builder.add_pooling(
@@ -795,11 +795,11 @@ def _add_elementwise_binary(
795795
# INTERNAL_MUL_XYKN not implemented
796796
continue
797797
if all(shape_x[indices] == shape_y[indices]):
798-
if all(True if i in indices else s == 1 for i, s in enumerate(shape_x)):
798+
if all([True if i in indices else s == 1 for i, s in enumerate(shape_x)]):
799799
internal_y = op.x
800800
internal_x = op.y
801801
break
802-
if all(True if i in indices else s == 1 for i, s in enumerate(shape_y)):
802+
if all([True if i in indices else s == 1 for i, s in enumerate(shape_y)]):
803803
internal_x = op.x
804804
internal_y = op.y
805805
break
@@ -3323,7 +3323,7 @@ def stack(const_context, builder, op):
33233323
def split(const_context, builder, op):
33243324
split = op.sizes
33253325
split = [size for size in split if size != 0]
3326-
has_equal_splits = all(size == split[0] for size in split)
3326+
has_equal_splits = all([size == split[0] for size in split])
33273327
num_splits = len(split)
33283328
output_names = [op.outputs[i].name for i in range(len(op.sizes)) if op.sizes[i] != 0]
33293329

@@ -3545,7 +3545,7 @@ def _realloc_list(const_context, builder, ls_var, index_var, value_var, mode):
35453545

35463546
# check if elem_shape is runtime-determined
35473547
elem_shape = tuple(value_var.shape)
3548-
has_dynamic_shape = any(is_symbolic(i) for i in elem_shape)
3548+
has_dynamic_shape = any([is_symbolic(i) for i in elem_shape])
35493549

35503550
# get the fill shape of the tensor array
35513551
# [length, elem_dim1, elem_dim2, ...]

coremltools/converters/mil/converter.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -58,7 +58,7 @@ def __call__(self, model, *args, **kwargs):
5858
type(inputs)
5959
)
6060
)
61-
if not all(isinstance(i, input_types.InputType) for i in inputs):
61+
if not all([isinstance(i, input_types.InputType) for i in inputs]):
6262
raise ValueError(
6363
"Type of inputs should be list or tuple of TensorType or ImageType, got {} instead.".format(
6464
[type(i) for i in inputs]

coremltools/converters/mil/debugging_utils.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -84,7 +84,7 @@ def validate_inputs(func, input_vars):
8484
else:
8585
input_values.append(v)
8686

87-
if all(x in reachable_vars for x in input_values):
87+
if all([x in reachable_vars for x in input_values]):
8888
reachable_vars.update(op.outputs)
8989

9090
for out in func.outputs:

coremltools/converters/mil/frontend/_utils.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -293,7 +293,7 @@ def _swap(a, b):
293293
return b, a
294294

295295
a_var, b_var = vars
296-
is_dynamic = any(any_symbolic(var.shape) for var in vars)
296+
is_dynamic = any([any_symbolic(var.shape) for var in vars])
297297
# list of equations supported for explicit mil translations
298298
vec_bnqd_bnkd_bnqk = (
299299
[0, 1, 2, 3],
@@ -436,10 +436,10 @@ def get_output_names(outputs) -> Optional[List[str]]:
436436

437437
output_names = None
438438
if outputs is not None:
439-
assert all(isinstance(t, InputType) for t in outputs), \
439+
assert all([isinstance(t, InputType) for t in outputs]), \
440440
"outputs must be a list of ct.ImageType or ct.TensorType"
441441
output_names = [t.name for t in outputs]
442-
if all(name is None for name in output_names):
442+
if all([name is None for name in output_names]):
443443
output_names = None
444444
return output_names
445445

coremltools/converters/mil/frontend/tensorflow/converter.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -179,7 +179,7 @@ def __init__(
179179
type(inputs)
180180
)
181181
)
182-
if not all(isinstance(i, InputType) for i in inputs):
182+
if not all([isinstance(i, InputType) for i in inputs]):
183183
raise ValueError(
184184
"Type of inputs should be list or tuple of TensorType or ImageType, got {} instead.".format(
185185
[type(i) for i in inputs]
@@ -238,7 +238,7 @@ def __init__(
238238
for inputtype in self.inputs:
239239
if not isinstance(inputtype.shape, InputShape):
240240
continue
241-
if any(isinstance(s, RangeDim) for s in inputtype.shape.shape):
241+
if any([isinstance(s, RangeDim) for s in inputtype.shape.shape]):
242242
continue
243243
if inputtype.name not in graph:
244244
raise ValueError(

coremltools/converters/mil/frontend/tensorflow/ops.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -633,7 +633,7 @@ def ExtractImagePatches(context, node):
633633
padding = node.attr.get("padding")
634634
if x.rank != 4:
635635
raise ValueError("input for ExtractImagePatches should be a 4D tensor.")
636-
if not all(rate == 1 for rate in rates):
636+
if not all([rate == 1 for rate in rates]):
637637
raise NotImplementedError(
638638
"only rates with all 1s is implemented for ExtractImagePatches."
639639
)
@@ -3022,7 +3022,7 @@ def Pack(context, node):
30223022
else:
30233023
x = mb.expand_dims(x=values[0], axes=[axis], name=node.name)
30243024
else:
3025-
if all(_is_scalar(input.sym_type) for input in values):
3025+
if all([_is_scalar(input.sym_type) for input in values]):
30263026
x = mb.concat(values=values, axis=axis, name=node.name)
30273027
else:
30283028
x = mb.stack(values=values, axis=axis, name=node.name)

coremltools/converters/mil/frontend/tensorflow/ssa_passes/tf_lstm_to_core_lstm.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -106,7 +106,7 @@ def _try_get_last_cell_state_in_tf_lstm_block(op: Operation) -> Var:
106106
return cs
107107
if len(cs.consuming_blocks) > 1:
108108
return None
109-
if not all(child_op.op_type == "slice_by_index" for child_op in cs.child_ops):
109+
if not all([child_op.op_type == "slice_by_index" for child_op in cs.child_ops]):
110110
return None
111111
child_ops = cs.child_ops[:]
112112
block = op.enclosing_block

0 commit comments

Comments
 (0)