Skip to content

Instantly share code, notes, and snippets.

@guilhermeleobas
Created December 12, 2023 04:29
Show Gist options
  • Save guilhermeleobas/3a45544e201461a12eb9712ac889d1cb to your computer and use it in GitHub Desktop.
Save guilhermeleobas/3a45544e201461a12eb9712ac889d1cb to your computer and use it in GitHub Desktop.
============================================================================================================================== FAILURES ===============================================================================================================================
___________________________________________________________________________________________________ TestControlFlowTraced.test_cond_vmap_multiple_args_with_closure ___________________________________________________________________________________________________
test/functorch/test_control_flow.py:1714: in test_cond_vmap_multiple_args_with_closure
res = torch.vmap(fn)(a, b)
torch/_functorch/apis.py:188: in wrapped
return vmap_impl(func, in_dims, out_dims, randomness, chunk_size, *args, **kwargs)
torch/_functorch/vmap.py:287: in vmap_impl
return wrapper()
torch/_dynamo/eval_frame.py:501: in _fn
return fn(*args, **kwargs)
torch/_dynamo/external_utils.py:17: in inner
return fn(*args, **kwargs)
torch/_functorch/vmap.py:283: in wrapper
return _flat_vmap(
torch/_functorch/vmap.py:44: in fn
return f(*args, **kwargs)
torch/_functorch/vmap.py:398: in _flat_vmap
batched_outputs = func(*batched_inputs, **kwargs)
test/functorch/test_control_flow.py:1707: in fn
return torch.cond(
torch/_higher_order_ops/cond.py:151: in cond
return torch.compile(cond_op, backend="eager", fullgraph=True)(
torch/_dynamo/eval_frame.py:501: in _fn
return fn(*args, **kwargs)
torch/_dynamo/eval_frame.py:667: in catch_errors
return callback(frame, cache_entry, hooks, frame_state)
torch/_dynamo/convert_frame.py:383: in _convert_frame_assert
compiled_product = _compile(
torch/_dynamo/convert_frame.py:665: in _compile
raise InternalTorchDynamoError(str(e)).with_traceback(
torch/_dynamo/convert_frame.py:646: in _compile
guarded_code = compile_inner(code, one_graph, hooks, transform)
torch/_dynamo/utils.py:244: in time_wrapper
r = func(*args, **kwargs)
torch/_dynamo/convert_frame.py:562: in compile_inner
out_code = transform_code_object(code, transform)
torch/_dynamo/bytecode_transformation.py:1033: in transform_code_object
transformations(instructions, code_options)
torch/_dynamo/convert_frame.py:151: in _fn
return fn(*args, **kwargs)
torch/_dynamo/convert_frame.py:527: in transform
tracer.run()
torch/_dynamo/symbolic_convert.py:2096: in run
super().run()
torch/_dynamo/symbolic_convert.py:781: in run
and self.step()
torch/_dynamo/symbolic_convert.py:744: in step
getattr(self, inst.opname)(inst)
torch/_dynamo/symbolic_convert.py:463: in wrapper
return inner_fn(self, inst)
torch/_dynamo/symbolic_convert.py:1205: in CALL_FUNCTION_EX
if not isinstance(
torch/_dynamo/variables/base.py:133: in __instancecheck__
instance = instance.realize()
torch/_dynamo/variables/lazy.py:56: in realize
self._cache.realize(self.parents_tracker)
torch/_dynamo/variables/lazy.py:22: in realize
self.vt = VariableBuilder(tx, self.source)(self.value)
torch/_dynamo/variables/builder.py:245: in __call__
vt = self._wrap(value)
torch/_dynamo/variables/builder.py:387: in _wrap
return type_dispatch(self, value)
torch/_dynamo/variables/builder.py:842: in wrap_listlike
output = [
torch/_dynamo/variables/builder.py:843: in <listcomp>
VariableBuilder(self.tx, GetItemSource(self.get_source(), i))(item)
torch/_dynamo/variables/builder.py:245: in __call__
vt = self._wrap(value)
torch/_dynamo/variables/builder.py:387: in _wrap
return type_dispatch(self, value)
torch/_dynamo/variables/builder.py:842: in wrap_listlike
output = [
torch/_dynamo/variables/builder.py:843: in <listcomp>
VariableBuilder(self.tx, GetItemSource(self.get_source(), i))(item)
torch/_dynamo/variables/builder.py:245: in __call__
vt = self._wrap(value)
torch/_dynamo/variables/builder.py:387: in _wrap
return type_dispatch(self, value)
torch/_dynamo/variables/builder.py:1070: in wrap_tensor
tensor_variable = wrap_fx_proxy(
torch/_dynamo/variables/builder.py:1320: in wrap_fx_proxy
return wrap_fx_proxy_cls(target_cls=TensorVariable, **kwargs)
torch/_dynamo/variables/builder.py:1454: in wrap_fx_proxy_cls
specialized_props = target_cls.specialize(example_value)
torch/_dynamo/variables/tensor.py:162: in specialize
[
torch/_dynamo/variables/tensor.py:165: in <listcomp>
if value.is_contiguous(memory_format=x)
E torch._dynamo.exc.InternalTorchDynamoError: NYI: querying is_contiguous inside of vmap for memory_format other than torch.contiguous_format
E
E from user code:
E File "/home/guilhermeleobas/git/pytorch/torch/_dynamo/external_utils.py", line 17, in inner
E return fn(*args, **kwargs)
E
E Set TORCH_LOGS="+dynamo" and TORCHDYNAMO_VERBOSE=1 for more information
E
E
E You can suppress this exception and fall back to eager by setting:
E import torch._dynamo
E torch._dynamo.config.suppress_errors = True
E
E
E To execute this test, run the following from the base repo dir:
E python test/functorch/test_control_flow.py -k test_cond_vmap_multiple_args_with_closure
E
E This message can be suppressed by setting PYTORCH_PRINT_REPRO_ON_FAILURE=0
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment