node.py 32 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762
  1. # mypy: ignore-errors
  2. # Nodes represent a definition of a value in our graph of operators.
  3. from typing import TYPE_CHECKING, Union, Callable, Any, Tuple, List, Optional, Dict, Set
  4. from ._compatibility import compatibility
  5. from .immutable_collections import immutable_dict, immutable_list
  6. import torch
  7. import builtins
  8. import types
  9. import inspect
  10. import warnings
  11. from torch.fx.operator_schemas import normalize_function, normalize_module, ArgsKwargsPair
  12. from .._ops import ops as _ops
  13. if TYPE_CHECKING:
  14. from .graph import Graph
  15. __all__ = ['Node', 'map_arg', 'map_aggregate', "has_side_effect"]
  16. BaseArgumentTypes = Union[str, int, float, bool, complex, torch.dtype,
  17. torch.Tensor, torch.device, torch.memory_format, torch.layout, torch._ops.OpOverload]
  18. base_types = BaseArgumentTypes.__args__ # type: ignore[attr-defined]
  19. Target = Union[Callable[..., Any], str]
  20. Argument = Optional[Union[
  21. Tuple[Any, ...], # actually Argument, but mypy can't represent recursive types
  22. List[Any], # actually Argument
  23. Dict[str, Any], # actually Argument
  24. slice, # Slice[Argument, Argument, Argument], but slice is not a templated type in typing
  25. range,
  26. 'Node',
  27. BaseArgumentTypes
  28. ]]
  29. _side_effectful_need_to_be_preserved_pre_dispatch: Set[Callable] = {
  30. torch._C._set_grad_enabled,
  31. torch.amp._enter_autocast,
  32. torch.amp._exit_autocast,
  33. }
  34. # TODO: Either refactor this into 2 functions 1 dce for functional graphs and 1 dce for all graphs,
  35. # or add logic to correctly mark all inplace ops as side effectful.
  36. _side_effectful_functions: Set[Callable] = {
  37. torch._assert,
  38. torch._assert_async,
  39. _ops.aten._assert_async.msg,
  40. _ops.aten._assert_scalar.default,
  41. _ops.aten.copy_.default,
  42. _ops.aten.set_.source_Tensor,
  43. _ops.aten.index_put_.default,
  44. _ops.aten.sym_constrain_range.default,
  45. _ops.aten.sym_constrain_range_for_size.default,
  46. _ops.profiler._record_function_enter,
  47. _ops.profiler._record_function_enter_new,
  48. _ops.profiler._record_function_exit,
  49. _ops.inductor.accumulate_grad_.default,
  50. _ops.inductor.resize_storage_bytes_.default,
  51. } | _side_effectful_need_to_be_preserved_pre_dispatch
  52. @compatibility(is_backward_compatible=False)
  53. def has_side_effect(fn: Callable) -> Callable:
  54. _side_effectful_functions.add(fn)
  55. return fn
  56. # this is fixed on master, WAR for 1.5
  57. def _find_module_of_method(orig_method: Callable[..., Any]) -> str:
  58. name = orig_method.__name__
  59. module = orig_method.__module__
  60. if module is not None:
  61. return module
  62. for guess in [torch, torch.nn.functional]:
  63. if getattr(guess, name, None) is orig_method:
  64. return guess.__name__
  65. raise RuntimeError(f'cannot find module for {orig_method}')
  66. # Borrowed from CPython typing module
  67. # https://github.com/python/cpython/blob/f90dc36c15d7fee0efaf6d39e97be0bdf2683e93/Lib/typing.py#L156
  68. def _type_repr(obj):
  69. """Return the repr() of an object, special-casing types (internal helper).
  70. If obj is a type, we return a shorter version than the default
  71. type.__repr__, based on the module and qualified name, which is
  72. typically enough to uniquely identify a type. For everything
  73. else, we fall back on repr(obj).
  74. """
  75. if isinstance(obj, type):
  76. if obj.__module__ == 'builtins':
  77. return obj.__qualname__
  78. return f'{obj.__module__}.{obj.__qualname__}'
  79. if obj is ...:
  80. return '...'
  81. if isinstance(obj, types.FunctionType):
  82. return obj.__name__
  83. return repr(obj)
  84. def _get_qualified_name(func: Callable[..., Any]) -> str:
  85. # things like getattr just appear in builtins
  86. if getattr(builtins, func.__name__, None) is func:
  87. return func.__name__
  88. # torch.Tensor.{fn}
  89. if (isinstance(func, (types.MethodDescriptorType, types.WrapperDescriptorType))
  90. and func is getattr(torch.Tensor, func.__name__, None)):
  91. return f"torch.Tensor.{func.__name__}"
  92. name = func.__name__
  93. if name == "<lambda>":
  94. # For lambdas, try to get their defining name in the module
  95. try:
  96. name = inspect.getsource(func).split("=")[0].strip()
  97. except Exception as e:
  98. raise RuntimeError("Unable to represent lambda") from e
  99. module = _find_module_of_method(func)
  100. module = module.replace('torch._ops', 'torch.ops') # WAR for bug in how torch.ops assigns module
  101. # Fixup segment_reduce mismatch
  102. if module == "torch" and name == "segment_reduce":
  103. name = "_" + name
  104. return f'{module}.{name}'
  105. def _format_arg(arg, max_list_len=float('inf')) -> str:
  106. if hasattr(arg, '_custom_fx_repr_fn'):
  107. return arg._custom_fx_repr_fn()
  108. elif isinstance(arg, list):
  109. items = ', '.join(_format_arg(a) for idx, a in enumerate(arg) if idx < max_list_len)
  110. maybe_len = '' if len(arg) < max_list_len + 1 else f', ...[total_len={len(arg)}]'
  111. return f'[{items}{maybe_len}]'
  112. elif isinstance(arg, tuple):
  113. items = ', '.join(_format_arg(a) for idx, a in enumerate(arg) if idx < max_list_len)
  114. maybe_len = '' if len(arg) < max_list_len + 1 else f', ...[total_len={len(arg)}]'
  115. maybe_comma = ',' if len(arg) == 1 else ''
  116. return f'({items}{maybe_comma}{maybe_len})'
  117. elif isinstance(arg, dict):
  118. items_str = ', '.join(f'{k}: {_format_arg(v)}' for k, v in arg.items())
  119. return f'{{{items_str}}}'
  120. if isinstance(arg, Node):
  121. return '%' + str(arg)
  122. else:
  123. return str(arg)
  124. @compatibility(is_backward_compatible=True)
  125. class Node:
  126. """
  127. ``Node`` is the data structure that represents individual operations within
  128. a ``Graph``. For the most part, Nodes represent callsites to various entities,
  129. such as operators, methods, and Modules (some exceptions include nodes that
  130. specify function inputs and outputs). Each ``Node`` has a function specified
  131. by its ``op`` property. The ``Node`` semantics for each value of ``op`` are as follows:
  132. - ``placeholder`` represents a function input. The ``name`` attribute specifies the name this value will take on.
  133. ``target`` is similarly the name of the argument. ``args`` holds either: 1) nothing, or 2) a single argument
  134. denoting the default parameter of the function input. ``kwargs`` is don't-care. Placeholders correspond to
  135. the function parameters (e.g. ``x``) in the graph printout.
  136. - ``get_attr`` retrieves a parameter from the module hierarchy. ``name`` is similarly the name the result of the
  137. fetch is assigned to. ``target`` is the fully-qualified name of the parameter's position in the module hierarchy.
  138. ``args`` and ``kwargs`` are don't-care
  139. - ``call_function`` applies a free function to some values. ``name`` is similarly the name of the value to assign
  140. to. ``target`` is the function to be applied. ``args`` and ``kwargs`` represent the arguments to the function,
  141. following the Python calling convention
  142. - ``call_module`` applies a module in the module hierarchy's ``forward()`` method to given arguments. ``name`` is
  143. as previous. ``target`` is the fully-qualified name of the module in the module hierarchy to call.
  144. ``args`` and ``kwargs`` represent the arguments to invoke the module on, *excluding the self argument*.
  145. - ``call_method`` calls a method on a value. ``name`` is as similar. ``target`` is the string name of the method
  146. to apply to the ``self`` argument. ``args`` and ``kwargs`` represent the arguments to invoke the module on,
  147. *including the self argument*
  148. - ``output`` contains the output of the traced function in its ``args[0]`` attribute. This corresponds to the "return" statement
  149. in the Graph printout.
  150. """
  151. @compatibility(is_backward_compatible=True)
  152. def __init__(self, graph: 'Graph', name: str, op: str, target: 'Target',
  153. args: Tuple['Argument', ...], kwargs: Dict[str, 'Argument'],
  154. return_type : Optional[Any] = None) -> None:
  155. """
  156. Instantiate an instance of ``Node``. Note: most often, you want to use the
  157. Graph APIs, i.e. ``Graph.call_module``, ``Graph.call_method``, etc. rather
  158. than instantiating a ``Node`` directly.
  159. Args:
  160. graph (Graph): The ``Graph`` to which this ``Node`` should belong.
  161. name (str): The name to which the output of this ``Node`` should be assigned
  162. op (str): The opcode for this ``Node``. Can be one of 'placeholder',
  163. 'call_method', 'call_module', 'call_function', 'get_attr',
  164. 'output'
  165. target ('Target'): The target this op should call. See the broader
  166. ``Node`` docstring for more details.
  167. args (Tuple['Argument']): The args to be passed to ``target``
  168. kwargs (Dict[str, 'Argument']): The kwargs to be passed to ``target``
  169. return_type (Optional[Any]): The python type expression representing the
  170. type of the output of this node. This field can be used for
  171. annotation of values in the generated code or for other types
  172. of analyses.
  173. """
  174. self.graph = graph
  175. self.name = name # unique name of value being created
  176. assert op in ['placeholder', 'call_method', 'call_module', 'call_function', 'get_attr', 'output', 'root']
  177. self.op = op # the kind of operation = placeholder|call_method|call_module|call_function|get_attr
  178. if op == 'call_function':
  179. if not callable(target):
  180. raise ValueError(f'Node [graph = {graph}, name = \'{name}\'] target {target} has type {torch.typename(target)} '
  181. 'but a Callable is expected')
  182. else:
  183. if not isinstance(target, str):
  184. raise ValueError(f'Node [graph = {graph}, name = \'{name}\'] target {target} has type {torch.typename(target)} '
  185. 'but a str is expected')
  186. self.target = target # for method/module/function, the name of the method/module/function/attr
  187. # being invoked, e.g add, layer1, or torch.add
  188. # All `Node`-valued inputs. Key is the Node, value is don't-care.
  189. # The public API for this is `all_input_nodes`, this private attribute
  190. # should not be accessed directly.
  191. self._input_nodes : Dict[Node, None] = {}
  192. self.__update_args_kwargs(map_arg(args, lambda x: x), map_arg(kwargs, lambda x: x)) # type: ignore[arg-type]
  193. # All of the nodes that use the value produced by this Node
  194. # Note one user may correspond to several uses, e.g. the node fo ``x + x``
  195. # would appear once here, but represents two uses.
  196. #
  197. # Is a dict to act as an "ordered set". Keys are significant, value dont-care
  198. self.users : Dict[Node, None] = {}
  199. # Type expression representing the output value of this node.
  200. # This should contain the same class of Type objects that would appear
  201. # as type annotations for function inputs/outputs.
  202. #
  203. # For placeholder nodes, this value will be used to type-annotate the
  204. # generated function parameters.
  205. # For the return node, this value will be used to type-annotate the
  206. # generated function return type. (Note this is a special case. ``return``
  207. # does not produce a value, it's more of a notation. Thus, this value
  208. # describes the type of args[0] in the ``return`` node.
  209. self.type : Optional[Any] = return_type
  210. self._prev = self
  211. self._next = self
  212. self._erased = False
  213. self._sort_key: Any = ()
  214. # If set, use this fn to print this node
  215. self._repr_fn : Optional[Callable[[Node], str]] = None
  216. # Dictionary to store metadata passes need to do their
  217. # transformations. This metadata is preserved across node copies
  218. self.meta : Dict[str, Any] = {}
  219. @property
  220. def next(self) -> 'Node':
  221. """
  222. Returns the next ``Node`` in the linked list of Nodes.
  223. Returns:
  224. The next ``Node`` in the linked list of Nodes.
  225. """
  226. return self._next
  227. @property
  228. def prev(self) -> 'Node':
  229. """
  230. Returns the previous ``Node`` in the linked list of Nodes.
  231. Returns:
  232. The previous ``Node`` in the linked list of Nodes.
  233. """
  234. return self._prev
  235. @compatibility(is_backward_compatible=True)
  236. def prepend(self, x: 'Node') -> None:
  237. """
  238. Insert x before this node in the list of nodes in the graph. Example::
  239. Before: p -> self
  240. bx -> x -> ax
  241. After: p -> x -> self
  242. bx -> ax
  243. Args:
  244. x (Node): The node to put before this node. Must be a member of the same graph.
  245. """
  246. assert self.graph == x.graph, "Attempting to move a Node into a different Graph"
  247. if self == x:
  248. warnings.warn("Trying to prepend a node to itself. This behavior has no effect on the graph.")
  249. return
  250. x._remove_from_list()
  251. p = self._prev
  252. p._next, x._prev = x, p
  253. x._next, self._prev = self, x
  254. # compute x._sort_key
  255. psk = x._prev._sort_key
  256. nsk = x._next._sort_key
  257. if len(psk) > len(nsk):
  258. idx: int
  259. *prefix, idx = psk[:len(nsk) + 1]
  260. x._sort_key = (*prefix, idx + 1)
  261. elif len(psk) < len(nsk):
  262. *prefix, idx = nsk[:len(psk) + 1]
  263. x._sort_key = (*prefix, idx - 1)
  264. else: # same length, increase length by 1
  265. x._sort_key = (*psk, 0)
  266. def __gt__(self, other: 'Node'):
  267. return self._sort_key > other._sort_key
  268. def __lt__(self, other: 'Node'):
  269. return self._sort_key < other._sort_key
  270. def __ge__(self, other: 'Node'):
  271. return self > other or self == other
  272. def __le__(self, other: 'Node'):
  273. return self < other or self == other
  274. @compatibility(is_backward_compatible=True)
  275. def append(self, x: 'Node') -> None:
  276. """
  277. Insert ``x`` after this node in the list of nodes in the graph.
  278. Equivalent to ``self.next.prepend(x)``
  279. Args:
  280. x (Node): The node to put after this node. Must be a member of the same graph.
  281. """
  282. self._next.prepend(x)
  283. def _remove_from_list(self):
  284. p, n = self._prev, self._next
  285. p._next, n._prev = n, p
  286. @property
  287. def args(self) -> Tuple[Argument, ...]:
  288. """
  289. The tuple of arguments to this ``Node``. The interpretation of arguments
  290. depends on the node's opcode. See the :class:`Node` docstring for more
  291. information.
  292. Assignment to this property is allowed. All accounting of uses and users
  293. is updated automatically on assignment.
  294. """
  295. return self._args
  296. @args.setter
  297. def args(self, a : Tuple[Argument, ...]):
  298. """
  299. Set the tuple of arguments to this Node. The interpretation of arguments
  300. depends on the node's opcode. See the ``fx.Graph`` docstring for more
  301. information.
  302. """
  303. # DO NOT CALL `__update_args_kwargs` directly. The correct way to
  304. # set `args` is via direct assignment, i.e. `node.args = new_args`
  305. self.__update_args_kwargs(map_arg(a, lambda x: x), self._kwargs) # type: ignore[arg-type]
  306. @property
  307. def kwargs(self) -> Dict[str, Argument]:
  308. """
  309. The dict of keyword arguments to this ``Node``. The interpretation of arguments
  310. depends on the node's opcode. See the :class:`Node` docstring for more
  311. information.
  312. Assignment to this property is allowed. All accounting of uses and users
  313. is updated automatically on assignment.
  314. """
  315. return self._kwargs
  316. @kwargs.setter
  317. def kwargs(self, k : Dict[str, Argument]):
  318. """
  319. Set the dict of kwargs to this Node. The interpretation of arguments
  320. depends on the node's opcode. See the ``fx.Graph`` docstring for more
  321. information.
  322. """
  323. # DO NOT CALL `__update_args_kwargs` directly. The correct way to
  324. # set `args` is via direct assignment, i.e. `node.kwargs = new_kwargs`
  325. self.__update_args_kwargs(self._args, map_arg(k, lambda x: x)) # type: ignore[arg-type]
  326. @property
  327. def all_input_nodes(self) -> List['Node']:
  328. """
  329. Return all Nodes that are inputs to this Node. This is equivalent to
  330. iterating over ``args`` and ``kwargs`` and only collecting the values that
  331. are Nodes.
  332. Returns:
  333. List of ``Nodes`` that appear in the ``args`` and ``kwargs`` of this
  334. ``Node``, in that order.
  335. """
  336. return list(self._input_nodes.keys())
  337. @compatibility(is_backward_compatible=True)
  338. def update_arg(self, idx : int, arg : Argument) -> None:
  339. """
  340. Update an existing positional argument to contain the new value
  341. ``arg``. After calling, ``self.args[idx] == arg``.
  342. Args:
  343. idx (int): The index into ``self.args`` of the element to update
  344. arg (Argument): The new argument value to write into ``args``
  345. """
  346. args = list(self.args)
  347. args[idx] = arg
  348. self.args = tuple(args)
  349. @compatibility(is_backward_compatible=True)
  350. def insert_arg(self, idx : int, arg : Argument) -> None:
  351. """
  352. Insert an positional argument to the argument list with given index.
  353. Args:
  354. idx (int): The index of the element in ``self.args`` to be inserted before.
  355. arg (Argument): The new argument value to insert into ``args``
  356. """
  357. assert 0 <= idx <= len(self.args), "insert_args index must be between 0 and len(self.args)"
  358. args_left = self.args[:idx]
  359. args_right = self.args[idx:]
  360. self._args = args_left + (arg,) + args_right
  361. _new_input_nodes: Dict[Node, None] = {}
  362. map_arg(arg, _new_input_nodes.setdefault)
  363. for new_use in _new_input_nodes.keys():
  364. if new_use not in self._input_nodes:
  365. self._input_nodes.setdefault(new_use)
  366. new_use.users.setdefault(self)
  367. @compatibility(is_backward_compatible=True)
  368. def update_kwarg(self, key : str, arg : Argument) -> None:
  369. """
  370. Update an existing keyword argument to contain the new value
  371. ``arg``. After calling, ``self.kwargs[key] == arg``.
  372. Args:
  373. key (str): The key in ``self.kwargs`` of the element to update
  374. arg (Argument): The new argument value to write into ``kwargs``
  375. """
  376. kwargs = dict(self.kwargs)
  377. kwargs[key] = arg
  378. self.kwargs = kwargs
  379. @property
  380. def stack_trace(self) -> Optional[str]:
  381. """
  382. Return the Python stack trace that was recorded during tracing, if any.
  383. When traced with fx.Tracer, this property is usually populated by
  384. `Tracer.create_proxy`. To record stack traces during tracing for debug purposes,
  385. set `record_stack_traces = True` on the `Tracer` instance.
  386. When traced with dynamo, this property will be populated by default by
  387. `OutputGraph.create_proxy`.
  388. stack_trace would have the innermost frame at the end of the string.
  389. """
  390. return self.meta.get("stack_trace", None)
  391. @stack_trace.setter
  392. def stack_trace(self, trace : Optional[str]):
  393. self.meta["stack_trace"] = trace
  394. def __update_args_kwargs(self, new_args : Tuple['Argument', ...], new_kwargs : Dict[str, 'Argument']):
  395. """
  396. This API is internal. Do *not* call it directly.
  397. """
  398. self._args = new_args
  399. self._kwargs = new_kwargs
  400. for old_use in self._input_nodes.keys():
  401. old_use.users.pop(self)
  402. self._input_nodes = {}
  403. map_arg(self._args, self._input_nodes.setdefault)
  404. map_arg(self._kwargs, self._input_nodes.setdefault)
  405. for new_use in self._input_nodes.keys():
  406. new_use.users.setdefault(self)
  407. def __repr__(self) -> str:
  408. if self._repr_fn:
  409. return self._repr_fn(self)
  410. return self.name
  411. def _pretty_print_target(self, target):
  412. """
  413. Make target printouts more user-friendly.
  414. 1) builtins will be printed as `builtins.xyz`
  415. 2) operators will be printed as `operator.xyz`
  416. 3) other callables will be printed with qualified name, e.g. torch.add
  417. """
  418. if isinstance(target, str):
  419. return target
  420. if hasattr(target, '__module__'):
  421. if not hasattr(target, '__name__'):
  422. # Just to be defensive, if we don't have `__name__`, get the
  423. # qualname. Not sure if this happens for any members of `operator`
  424. # or `builtins`. This fallback path is not as good, since e.g.
  425. # things in `operator` have `_operator` as their __module__.
  426. return _get_qualified_name(target)
  427. if target.__module__ == 'builtins':
  428. return f'builtins.{target.__name__}'
  429. elif target.__module__ == '_operator':
  430. return f'operator.{target.__name__}'
  431. return _get_qualified_name(target)
  432. @compatibility(is_backward_compatible=True)
  433. def format_node(self,
  434. placeholder_names: Optional[List[str]] = None,
  435. maybe_return_typename: Optional[List[str]] = None) -> Optional[str]:
  436. """
  437. Return a descriptive string representation of ``self``.
  438. This method can be used with no arguments as a debugging
  439. utility.
  440. This function is also used internally in the ``__str__`` method
  441. of ``Graph``. Together, the strings in ``placeholder_names``
  442. and ``maybe_return_typename`` make up the signature of the
  443. autogenerated ``forward`` function in this Graph's surrounding
  444. GraphModule. ``placeholder_names`` and ``maybe_return_typename``
  445. should not be used otherwise.
  446. Args:
  447. placeholder_names: A list that will store formatted strings
  448. representing the placeholders in the generated
  449. ``forward`` function. Internal use only.
  450. maybe_return_typename: A single-element list that will store
  451. a formatted string representing the output of the
  452. generated ``forward`` function. Internal use only.
  453. Returns:
  454. str: If 1) we're using ``format_node`` as an internal helper
  455. in the ``__str__`` method of ``Graph``, and 2) ``self``
  456. is a placeholder Node, return ``None``. Otherwise,
  457. return a descriptive string representation of the
  458. current Node.
  459. """
  460. if self.op == 'placeholder':
  461. assert isinstance(self.target, str)
  462. arg_str = self.target
  463. arg_str += arg_str + f': {_type_repr(self.type)}' if self.type else ''
  464. if placeholder_names:
  465. placeholder_names.append(arg_str)
  466. return None
  467. maybe_typename = f'{_type_repr(self.type)} ' if self.type else ''
  468. default_val = '(default=' + str(self.args[0]) + ')' if self.args else ''
  469. return f'%{self.name} : {maybe_typename}[num_users={len(self.users)}] = {self.op}[target={self.target}]{default_val}'
  470. elif self.op == 'get_attr':
  471. maybe_typename = f'{_type_repr(self.type)} ' if self.type is not None else ''
  472. return f'%{self.name} : {maybe_typename}[num_users={len(self.users)}] = ' \
  473. f'{self.op}[target={self._pretty_print_target(self.target)}]'
  474. elif self.op == 'output':
  475. if self.type and maybe_return_typename:
  476. maybe_return_typename[0] = f' -> {_type_repr(self.type)}'
  477. return f'return {self.args[0]}'
  478. else:
  479. maybe_typename = f'{_type_repr(self.type)} ' if self.type is not None else ''
  480. return f'%{self.name} : {maybe_typename}[num_users={len(self.users)}] = ' \
  481. f'{self.op}[target={self._pretty_print_target(self.target)}](' \
  482. f'args = {_format_arg(self.args)}, kwargs = {_format_arg(self.kwargs)})'
  483. @compatibility(is_backward_compatible=True)
  484. def replace_all_uses_with(self,
  485. replace_with : 'Node',
  486. delete_user_cb: Callable[['Node'], bool] = lambda user: True,
  487. *,
  488. propagate_meta=False
  489. ) -> List['Node']:
  490. """
  491. Replace all uses of ``self`` in the Graph with the Node ``replace_with``.
  492. Args:
  493. replace_with (Node): The node to replace all uses of ``self`` with.
  494. delete_user_cb (Callable): Callback that is called to determine
  495. whether a given user of the self node should be removed.
  496. propagate_meta (bool): Whether or not to copy all properties
  497. on the .meta field of the original node onto the replacement node.
  498. For safety, this is only valid to do if the replacement node
  499. doesn't already have an existing .meta field.
  500. Returns:
  501. The list of Nodes on which this change was made.
  502. """
  503. if propagate_meta:
  504. assert len(replace_with.meta) == 0, \
  505. 'Called node.replace_all_uses_with(replace_with, propagate_meta=True), ' \
  506. 'but replace_with already has .meta keys'
  507. for k, v in self.meta.items():
  508. replace_with.meta[k] = v
  509. to_process = list(self.users)
  510. skipped = []
  511. m = self.graph.owning_module
  512. for use_node in to_process:
  513. if not delete_user_cb(use_node):
  514. skipped.append(use_node)
  515. continue
  516. def maybe_replace_node(n : Node) -> Node:
  517. if n == self:
  518. return replace_with
  519. else:
  520. return n
  521. if getattr(m, "_replace_hook", None):
  522. m._replace_hook(old=self, new=replace_with.name, user=use_node)
  523. new_args = map_arg(use_node.args, maybe_replace_node)
  524. new_kwargs = map_arg(use_node.kwargs, maybe_replace_node)
  525. assert isinstance(new_args, tuple)
  526. assert isinstance(new_kwargs, dict)
  527. use_node.__update_args_kwargs(new_args, new_kwargs)
  528. assert len(self.users) - len(skipped) == 0
  529. return [n for n in to_process if n not in skipped]
  530. @compatibility(is_backward_compatible=False)
  531. def is_impure(self):
  532. """
  533. Returns whether this op is impure, i.e. if its op is a placeholder or
  534. output, or if a call_function or call_module which is impure.
  535. Returns:
  536. bool: If the op is impure or not.
  537. """
  538. if self.op in {"placeholder", "output"}:
  539. return True
  540. # Check if an impure function.
  541. if self.op == "call_function":
  542. return self.target in _side_effectful_functions
  543. # Check if an impure module.
  544. if self.op == "call_module":
  545. assert (
  546. self.graph.owning_module is not None
  547. ), "self.graph.owning_module not set for purity check"
  548. target_mod = self.graph.owning_module.get_submodule(self.target)
  549. assert (
  550. target_mod is not None
  551. ), f"Did not find expected submodule target {self.target}"
  552. return getattr(target_mod, "_is_impure", False)
  553. return False
  554. @compatibility(is_backward_compatible=False)
  555. def normalized_arguments(
  556. self, root : torch.nn.Module, arg_types : Optional[Tuple[Any]] = None,
  557. kwarg_types : Optional[Dict[str, Any]] = None,
  558. normalize_to_only_use_kwargs : bool = False) -> Optional[ArgsKwargsPair]:
  559. """
  560. Returns normalized arguments to Python targets. This means that
  561. `args/kwargs` will be matched up to the module/functional's
  562. signature and return exclusively kwargs in positional order
  563. if `normalize_to_only_use_kwargs` is true.
  564. Also populates default values. Does not support positional-only
  565. parameters or varargs parameters.
  566. Supports module calls.
  567. May require `arg_types` and `kwarg_types` in order to disambiguate overloads.
  568. Args:
  569. root (torch.nn.Module): Module upon which to resolve module targets.
  570. arg_types (Optional[Tuple[Any]]): Tuple of arg types for the args
  571. kwarg_types (Optional[Dict[str, Any]]): Dict of arg types for the kwargs
  572. normalize_to_only_use_kwargs (bool): Whether to normalize to only use kwargs.
  573. Returns:
  574. Returns NamedTuple ArgsKwargsPair, or `None` if not successful.
  575. """
  576. if self.op == 'call_function':
  577. assert callable(self.target)
  578. return normalize_function(self.target, self.args, self.kwargs, arg_types, kwarg_types) # type: ignore[arg-type]
  579. elif self.op == 'call_module':
  580. assert isinstance(self.target, str)
  581. return normalize_module(root, self.target, self.args, self.kwargs) # type: ignore[arg-type]
  582. return None
  583. @compatibility(is_backward_compatible=True)
  584. def replace_input_with(self, old_input: 'Node', new_input: 'Node'):
  585. """
  586. Loop through input nodes of ``self``, and replace all instances of
  587. ``old_input`` with ``new_input``.
  588. Args:
  589. old_input (Node): The old input node to be replaced.
  590. new_input (Node): The new input node to replace ``old_input``.
  591. """
  592. def maybe_replace_node(n : Node) -> Node:
  593. return new_input if n == old_input else n
  594. m = self.graph.owning_module
  595. if getattr(m, "_replace_hook", None):
  596. m._replace_hook(old=old_input, new=new_input.name, user=self)
  597. new_args = map_arg(self.args, maybe_replace_node)
  598. new_kwargs = map_arg(self.kwargs, maybe_replace_node)
  599. assert isinstance(new_args, tuple)
  600. assert isinstance(new_kwargs, dict)
  601. self.__update_args_kwargs(new_args, new_kwargs)
  602. def _rename(self, candidate: str):
  603. if candidate == self.name:
  604. return
  605. name = self.graph._graph_namespace.create_name(candidate, None)
  606. self.name = name
  607. self.graph._graph_namespace._rename_object(self, name)
  608. def __setattr__(self, name: str, value: Any) -> None:
  609. if name == 'name' and hasattr(self, "name"):
  610. m = self.graph.owning_module
  611. if getattr(m, "_replace_hook", None):
  612. assert isinstance(value, str)
  613. for user in self.users:
  614. m._replace_hook(old=self, new=value, user=user)
  615. update = False
  616. if (
  617. hasattr(self, name) and
  618. hasattr(self.graph, "_find_nodes_lookup_table") and
  619. self in self.graph._find_nodes_lookup_table
  620. ):
  621. update = True
  622. self.graph._find_nodes_lookup_table.remove(self)
  623. object.__setattr__(self, name, value)
  624. if update:
  625. self.graph._find_nodes_lookup_table.insert(self)
  626. @compatibility(is_backward_compatible=True)
  627. def map_arg(a: Argument, fn: Callable[[Node], Argument]) -> Argument:
  628. """
  629. Apply fn to each Node appearing arg. arg may be a list, tuple, slice, or dict with string keys.
  630. """
  631. assert callable(fn), "torch.fx.map_arg(a, fn): fn must be a callable"
  632. return map_aggregate(a, lambda x: fn(x) if isinstance(x, Node) else x)
  633. @compatibility(is_backward_compatible=True)
  634. def map_aggregate(a: Argument, fn: Callable[[Argument], Argument]) -> Argument:
  635. """
  636. Apply fn to each Node appearing arg. arg may be a list, tuple, slice, or dict with string keys.
  637. """
  638. if isinstance(a, tuple):
  639. t = tuple(map_aggregate(elem, fn) for elem in a)
  640. # Support NamedTuple (if it has `_fields`) by repacking into original type.
  641. return t if not hasattr(a, '_fields') else type(a)(*t)
  642. elif isinstance(a, list):
  643. return immutable_list(map_aggregate(elem, fn) for elem in a)
  644. elif isinstance(a, dict):
  645. return immutable_dict((k, map_aggregate(v, fn)) for k, v in a.items())
  646. elif isinstance(a, slice):
  647. return slice(map_aggregate(a.start, fn), map_aggregate(a.stop, fn), map_aggregate(a.step, fn))
  648. else:
  649. return fn(a)