@@ -220,6 +220,11 @@ def forward(self, *args):
220220 self .graph_module .graph = original_graph
221221 self .graph_module .recompile ()
222222 self .compiled_fn = self .graph_module
223+ except (NotImplementedError , AssertionError ) as e :
224+ warnings .warn (f"torch._inductor.compile failed: { e } . Falling back to eager." )
225+ self .graph_module .graph = original_graph
226+ self .graph_module .recompile ()
227+ self .compiled_fn = self .graph_module
223228
224229 return self .compiled_fn (* args )
225230
@@ -495,6 +500,12 @@ def is_node_supported_by_thunder(
495500 target = node .target # Target is the function to call.
496501 if node .op == "call_method" :
497502 target = getattr (torch .Tensor , node .target , None )
503+ if target is None and hasattr (torch .cuda .Stream , node .target ):
504+ split_reason = SplitReason (
505+ SplitReasonType .MISSING_OP_SUPPORT ,
506+ f"node with name { node .name } and target { node .target } is a `torch.cuda.Stream` method which is not supported by Thunder." ,
507+ )
508+ return False , split_reason
498509 assert target is not None , f"Failed to find method { node .target } "
499510
500511 # If the operation has automatic registration, we mark it as unsupported as `inductor` might be
0 commit comments